(.*?)',
707 | re.S)
708 | link_regex = re.compile('(.*?) ', re.S)
709 | subdomains = []
710 |
711 | try:
712 | results_tbl = tbl_regex.findall(results)[0]
713 | except IndexError:
714 | results_tbl = ''
715 | links_list = link_regex.findall(results_tbl)
716 | links = list(set(links_list))
717 |
718 | for link in links:
719 | subdomain = link.strip()
720 | if not subdomain.endswith(value):
721 | continue
722 | if subdomain and subdomain not in subdomains and subdomain != value:
723 | subdomains.append(subdomain.strip())
724 |
725 | return sorted(set(subdomains))
726 |
727 |
728 | def SpyseGetDomains(data):
729 | """
730 | :type data: dict
731 | :rtype: list
732 | """
733 | domains = []
734 | try:
735 | domains = [i['name'] for i in data['data']['items']]
736 | except IndexError:
737 | print("Unrecognised data returned by Spyse")
738 |
739 | return domains
740 |
741 |
742 | # ################## User Agent #################### #
743 |
744 | def PickRandomUA(value):
745 | """ Pick random User-Agent string """
746 | if value:
747 | secure_random = random.SystemRandom()
748 | return secure_random.choice(value)
749 |
750 | return None
751 |
752 |
753 | def PickRandomTimeout(value):
754 | if value:
755 | secure_random = random.SystemRandom()
756 | return secure_random.choice(value)
757 |
758 | return 5
759 |
760 |
761 | # ####################### Common search ########################### #
762 |
763 | def CommonSearch(value, urltemplate, quantity, step, limit, uas, proxies, timeouts):
764 | counter = 0
765 | results = ""
766 |
767 | while counter <= limit:
768 | try:
769 | url = urltemplate.format(quantity=quantity, counter=counter, value=value)
770 | s = requests.Session()
771 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)},
772 | proxies=proxies)
773 | if r.status_code != 200:
774 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
775 | return [], []
776 | results += r.text
777 | except Exception as e:
778 | print(e)
779 |
780 | time.sleep(PickRandomTimeout(timeouts))
781 | counter += step
782 |
783 | return GetEmails(results, value), GetHostnames(results, value)
784 |
785 |
786 | def CommonSearch2(value, urltemplate, step, limit, uas, proxies, timeouts):
787 | counter = 1
788 | results = ""
789 |
790 | while counter <= limit:
791 | try:
792 | url = urltemplate.format(counter=counter, value=value)
793 | s = requests.Session()
794 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)},
795 | proxies=proxies)
796 | if r.status_code != 200:
797 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
798 | return [], []
799 | results += r.text
800 | except Exception as e:
801 | print(e)
802 |
803 | time.sleep(PickRandomTimeout(timeouts))
804 | counter += step
805 |
806 | return GetEmails(results, value), GetHostnames(results, value)
807 |
808 |
809 | # ###################### Search Engines & Services ######################### #
810 |
811 | def GoogleSearch(value, limit, uas, proxies, timeouts):
812 | quantity = 100
813 | step = 100
814 | url = "https://www.google.com/search?num={quantity}&start={counter}&hl=en&meta=&q=%40%22{value}%22"
815 |
816 | return CommonSearch(value, url, quantity, step, limit, uas, proxies, timeouts)
817 |
818 |
819 | def BingSearch(value, limit, uas, proxies, timeouts):
820 | quantity = 50
821 | step = 50
822 | url = "https://www.bing.com/search?q=%40{value}&count={quantity}&first={counter}"
823 |
824 | return CommonSearch(value, url, quantity, step, limit, uas, proxies, timeouts)
825 |
826 |
827 | def AskSearch(value, limit, uas, proxies, timeouts):
828 | step = 1
829 | url = "https://www.ask.com/web?q=%40%22{value}%22&page={counter}"
830 |
831 | return CommonSearch2(value, url, step, limit, uas, proxies, timeouts)
832 |
833 |
834 | # todo captcha
835 | def DogpileSearch(value, limit, uas, proxies, timeouts):
836 | step = 15
837 | url = "https://www.dogpile.com/search/web?qsi={counter}&q=%40{value}"
838 |
839 | return CommonSearch2(value, url, step, limit, uas, proxies, timeouts)
840 |
841 |
842 | def YahooSearch(value, limit, uas, proxies, timeouts):
843 | step = 10
844 | url = "https://search.yahoo.com/search?p=%40{value}&b={counter}&pz=10"
845 |
846 | return CommonSearch2(value, url, step, limit, uas, proxies, timeouts)
847 |
848 |
849 | def YandexSearch(value, limit, uas, proxies, timeouts):
850 | server = "yandex.com"
851 | quantity = 50
852 | step = 50
853 | results = ""
854 | page = 0
855 | counter = 0
856 |
857 | while counter <= limit:
858 | try:
859 | url = "https://" + server + "/search/?text=%22%40" + value + "%22&numdoc=" \
860 | + str(quantity) + "&p=" + str(page) + "&lr=10418"
861 | s = requests.Session()
862 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)},
863 | proxies=proxies)
864 | if r.status_code != 200:
865 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
866 | return [], []
867 | results += r.text
868 | except Exception as e:
869 | print(e)
870 |
871 | time.sleep(PickRandomTimeout(timeouts))
872 | counter += step
873 | page += 1
874 |
875 | return GetEmails(results, value), GetHostnames(results, value)
876 |
877 |
878 | def CrtSearch(value, uas, proxies):
879 | server = "crt.sh"
880 | results = ""
881 |
882 | try:
883 | url = "https://" + server + "/?q=" + value
884 | s = requests.Session()
885 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)}, proxies=proxies)
886 | if r.status_code != 200:
887 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
888 | return [], []
889 | results += r.text
890 | except Exception as e:
891 | print(e)
892 |
893 | return GetHostnames(results, value)
894 |
895 |
896 | def DNSDumpsterSearch(targetip, uas, proxies):
897 | server = "dnsdumpster.com"
898 | results = ""
899 | timeout = 25
900 | subdomains = None
901 |
902 | try:
903 | url = "https://" + server + "/"
904 | s = requests.Session()
905 | myheaders = {'User-Agent': PickRandomUA(uas), 'Referer': 'https://dnsdumpster.com'}
906 | r = s.get(url, verify=False, headers=myheaders, proxies=proxies, timeout=timeout)
907 | if r.status_code != 200:
908 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
909 | return [], []
910 |
911 | # get csrf token
912 | csrf_regex = re.compile('name="csrfmiddlewaretoken" value="(.*?)"', re.S)
913 | try:
914 | token = csrf_regex.findall(r.text)[0]
915 | except IndexError:
916 | print("[-] CSRF Token not found")
917 | return None
918 |
919 | params = {'csrfmiddlewaretoken': token, 'targetip': targetip}
920 | pr = s.post(url, verify=False, headers=myheaders, proxies=proxies, data=params,
921 | timeout=timeout)
922 | if pr.status_code != 200:
923 | print("[-] Something is going wrong (status code: {})".format(pr.status_code))
924 | return [], []
925 |
926 | subdomains = GetDNSDumpsterHostnames(targetip, pr.text)
927 |
928 | except Exception as e:
929 | print(e)
930 |
931 | return subdomains
932 |
933 |
934 | # todo pgp.mit.edu lookup service seems down
935 | def PGPSearch(value, uas, proxies):
936 | server = "pgp.mit.edu"
937 | results = ""
938 |
939 | try:
940 | url = "https://" + server + "/pks/lookup?search=" + value + "&op=index"
941 | s = requests.Session()
942 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)}, proxies=proxies)
943 | if r.status_code != 200:
944 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
945 | return [], []
946 | results += r.text
947 | except Exception as e:
948 | print(e)
949 |
950 | return GetEmails(results, value), GetHostnames(results, value)
951 |
952 |
953 | def NetcraftSearch(value, uas, proxies):
954 | server = "searchdns.netcraft.com"
955 | results = ""
956 |
957 | try:
958 | url = "https://" + server + "?restriction=site+ends+with&host=" + value
959 | s = requests.Session()
960 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)}, proxies=proxies)
961 | if r.status_code != 200:
962 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
963 | return [], []
964 | results += r.text
965 | except Exception as e:
966 | print(e)
967 |
968 | return GetHostnames(results, value)
969 |
970 |
971 | def VTSearch(value, uas, proxies):
972 | server = "www.virustotal.com"
973 | results = ""
974 |
975 | try:
976 | url = "https://" + server + "/en/domain/" + value + "/information/"
977 | s = requests.Session()
978 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)}, proxies=proxies)
979 | if r.status_code != 200:
980 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
981 | return [], []
982 | results += r.text
983 | except Exception as e:
984 | print(e)
985 |
986 | return GetHostnames(results, value)
987 |
988 |
989 | def SpyseSearch(value, apikey, limit=100, proxies=None):
990 | server = "api.spyse.com"
991 | data = {}
992 |
993 | try:
994 | url = f'https://{server}/v2/data/domain/subdomain?limit={limit}&domain={value}'
995 | headers = {
996 | 'accept': 'application/json',
997 | 'Authorization': f'Bearer {apikey}',
998 | }
999 | s = requests.Session()
1000 | r = s.get(url, verify=False, headers=headers, proxies=proxies)
1001 | if DEBUG: print(r.text)
1002 | if r.status_code != 200:
1003 | print(f'[-] Something is going wrong - status code: {r.status_code}')
1004 | else:
1005 | data = json.loads(r.text)
1006 | except (ValueError, TypeError):
1007 | print("Unrecognised data returned by spyse api")
1008 | except NameError as e:
1009 | print("Error: Insufficient data passed to SpyseSearch")
1010 | print(e)
1011 | except Exception as e:
1012 | print(e)
1013 |
1014 | return data
1015 |
1016 |
1017 | def CensysSearch(value, api_id, api_secret):
1018 | try:
1019 | censys_certificates = censys.certificates.CensysCertificates(api_id=api_id,
1020 | api_secret=api_secret)
1021 | certificate_query = 'parsed.names: %s' % value
1022 | certificates_search_results = censys_certificates.search(certificate_query,
1023 | fields=['parsed.names'])
1024 | subdomains = []
1025 | for search_result in certificates_search_results:
1026 | subdomains.extend(search_result['parsed.names'])
1027 | return set(subdomains)
1028 | except censys.base.CensysUnauthorizedException:
1029 | sys.stderr.write('[-] Your Censys credentials look invalid.\n')
1030 | exit(1)
1031 | except censys.base.CensysRateLimitExceededException:
1032 | sys.stderr.write('[-] Looks like you exceeded your Censys account limits rate. Exiting\n')
1033 | exit(1)
1034 |
1035 |
1036 | def GoogleSearchEngine(value, site, limit, uas, proxies, timeouts):
1037 | """ google search in site """
1038 | server = "www.google.com"
1039 | quantity = 100
1040 | counter = 0
1041 | step = 100
1042 | results = ""
1043 |
1044 | while counter <= limit:
1045 | try:
1046 | url = "https://" + server + "/search?num=" + str(quantity) + "&start=" + str(
1047 | counter) + "&hl=en&meta=&q=site%3A" + site + "%20%40%22" + value + "%22"
1048 | s = requests.Session()
1049 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)},
1050 | proxies=proxies)
1051 | if r.status_code != 200:
1052 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
1053 | return [], []
1054 | results += r.text
1055 | except Exception as e:
1056 | print(e)
1057 |
1058 | time.sleep(PickRandomTimeout(timeouts))
1059 | counter += step
1060 |
1061 | return GetEmails(results, value), GetHostnames(results, value)
1062 |
1063 |
1064 | def BingVHostsSearch(value, limit, uas, proxies, timeouts):
1065 | """ Bing Virtual Hosts Search """
1066 | server = "www.bing.com"
1067 | quantity = 50
1068 | step = 50
1069 | counter = 0
1070 | results = ""
1071 | vhosts = []
1072 |
1073 | while counter <= limit:
1074 | try:
1075 | url = "https://" + server + "/search?q=ip%3A" + value + "&go=&count=" + str(
1076 | quantity) + "&FORM=QBHL&qs=n&first=" + str(counter)
1077 | s = requests.Session()
1078 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)},
1079 | proxies=proxies)
1080 | if r.status_code != 200:
1081 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
1082 | return [], []
1083 | results += r.text
1084 | except Exception as e:
1085 | print(e)
1086 |
1087 | time.sleep(PickRandomTimeout(timeouts))
1088 | counter += step
1089 |
1090 | all_hostnames = GetHostnamesAll(results)
1091 |
1092 | for x in all_hostnames:
1093 | x = re.sub(r'[[\<\/?]*[\w]*>]*', '', x)
1094 | x = re.sub('<', '', x)
1095 | x = re.sub('>', '', x)
1096 | vhosts.append(x)
1097 |
1098 | return sorted(set(vhosts))
1099 |
1100 |
1101 | # todo if the API is used (valid key), the rtype is `dict` else
1102 | # the search is done via the website and the rtype is a `set` of vhosts
1103 | def ShodanSearch(api_key, domain, value, uas, proxies, timeouts, limit=0):
1104 |
1105 | api = shodan.Shodan(api_key)
1106 | server = "shodan.io"
1107 | counter = 0
1108 | quantity = 10
1109 | step = 10
1110 | vhosts = []
1111 | results = ''
1112 |
1113 | # Warning: Shodan api needs a payment plan!
1114 | # Wrap the request in a try/ except block to catch errors
1115 | try:
1116 | # Search by hostname
1117 | query = 'hostname:' + domain
1118 | return api.search(query)
1119 |
1120 | except shodan.APIError as e:
1121 | print('Error: %s' % e)
1122 |
1123 | while counter <= limit:
1124 | try:
1125 | url = "https://" + server + "/search?q=ip%3A" + value + "&go=&count=" + str(
1126 | quantity) + "&FORM=QBHL&qs=n&first=" + str(counter)
1127 | s = requests.Session()
1128 | r = s.get(url, verify=False, headers={'User-Agent': PickRandomUA(uas)},
1129 | proxies=proxies)
1130 | if r.status_code != 200:
1131 | print("[-] Something is going wrong (status code: {})".format(r.status_code))
1132 | return [], []
1133 | results += r.text
1134 | except Exception as e:
1135 | print(e)
1136 |
1137 | time.sleep(PickRandomTimeout(timeouts))
1138 | counter += step
1139 |
1140 | all_hostnames = GetHostnamesAll(results)
1141 |
1142 | for x in all_hostnames:
1143 | x = re.sub(r'[[\<\/?]*[\w]*>]*', '', x)
1144 | x = re.sub('<', '', x)
1145 | x = re.sub('>', '', x)
1146 | vhosts.append(x)
1147 |
1148 | return sorted(set(vhosts))
1149 |
1150 |
1151 | # ################## Reports ######################## #
1152 |
1153 | def Report(engine, emails, hostnames, output_basename):
1154 | """ Emails & Hostnames Console report """
1155 | print()
1156 | print("Emails:")
1157 | for email in emails:
1158 | print(email)
1159 |
1160 | print()
1161 | print("Hostnames:")
1162 | for host in hostnames:
1163 | print(host)
1164 | print()
1165 |
1166 | if output_basename:
1167 | output1 = output_basename + ".txt"
1168 | output2 = output_basename + ".md"
1169 | output3 = output_basename + ".xml"
1170 | output4 = output_basename + ".html"
1171 |
1172 | with open(output1, 'a') as txt, open(output2, 'a') as md, \
1173 | open(output3, 'a') as xml, open(output4, 'a') as html:
1174 | txt.write("[+] {} results\n".format(engine))
1175 | txt.write("-------------------------\n")
1176 | md.write("---\n\n")
1177 | md.write("## {} results\n".format(engine))
1178 | xml.write("<{}Results>\n".format(engine))
1179 | html.write("{} results\n".format(engine))
1180 |
1181 | txt.write("\n")
1182 | md.write("\n")
1183 |
1184 | txt.write("Emails:\n")
1185 | md.write("### Emails\n\n")
1186 | xml.write("\n")
1187 | html.write("Emails\n\n")
1188 |
1189 | for email in emails:
1190 | txt.write("{}\n".format(email))
1191 | md.write("* {}\n".format(email))
1192 | xml.write("{}\n".format(email))
1193 | html.write("- {}
\n".format(email))
1194 |
1195 | html.write(" \n")
1196 | xml.write("\n")
1197 | txt.write("\n")
1198 | md.write("\n")
1199 |
1200 | txt.write("Hostnames:\n")
1201 | md.write("### Hostnames\n\n")
1202 | xml.write("\n")
1203 | html.write("Hostnames\n\n")
1204 |
1205 | for host in hostnames:
1206 | txt.write("{}\n".format(host))
1207 | md.write("* {}\n".format(host))
1208 | xml.write("{}\n".format(host))
1209 | html.write("- {}
\n".format(host))
1210 |
1211 | html.write(" \n")
1212 | xml.write("\n")
1213 | txt.write("\n")
1214 | md.write("\n")
1215 | xml.write("{}Results>\n".format(engine))
1216 |
1217 |
1218 | def SubdomainsReport(engine, subdomains, output_basename):
1219 | """ Subdomains Console report """
1220 | assert type(subdomains) in (list, tuple, dict) # should be list
1221 | if len(subdomains) == 0:
1222 | print('[-] Did not find any subdomain')
1223 | return
1224 |
1225 | print('')
1226 | print('[*] Found %d subdomains' % (len(subdomains)))
1227 | print('')
1228 | for subdomain in subdomains:
1229 | print(subdomain)
1230 | print('')
1231 |
1232 | if output_basename:
1233 | output1 = output_basename + ".txt"
1234 | output2 = output_basename + ".md"
1235 | output3 = output_basename + ".xml"
1236 | output4 = output_basename + ".html"
1237 |
1238 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1239 | output4, 'a') as html:
1240 | txt.write("[+] {} results\n".format(engine))
1241 | txt.write("-------------------------\n")
1242 | md.write("---\n\n")
1243 | md.write("## {} results\n".format(engine))
1244 | xml.write("<{}Results>\n".format(engine))
1245 | html.write("{} results\n".format(engine))
1246 |
1247 | txt.write("\n")
1248 | md.write("\n")
1249 |
1250 | txt.write("Subdomains:\n")
1251 | md.write("### Subdomains\n\n")
1252 | xml.write("\n")
1253 | html.write("Subdomains\n\n")
1254 |
1255 | for subdomain in subdomains:
1256 | txt.write("{}\n".format(subdomain))
1257 | md.write("* {}\n".format(subdomain))
1258 | xml.write("{} \n".format(subdomain))
1259 | html.write("{}\n".format(subdomain))
1260 |
1261 | html.write("\n")
1262 | xml.write("\n")
1263 | txt.write("\n")
1264 | md.write("\n")
1265 |
1266 |
1267 | def ShodanReport(results, output_basename):
1268 | """ Shodan Console Report """
1269 | engine = "Shodan"
1270 | if len(results) == 0:
1271 | print('[-] Did not find any results')
1272 | return
1273 |
1274 | if type(results) in (set, list, tuple):
1275 | print('Shodan Report is not available for the current results')
1276 | print('Results found: %s' % len(results))
1277 | for r in results:
1278 | print(r)
1279 | return
1280 |
1281 | print()
1282 | print('Results found: %s' % results['total'])
1283 | print('------------------')
1284 | print()
1285 |
1286 | # pdb.set_trace()
1287 |
1288 | for result in results['matches']:
1289 |
1290 | # Print host info
1291 | print('IP: %s' % result['ip_str'])
1292 | print('-------------------')
1293 | print('Hostnames: ' + ','.join(result['hostnames']))
1294 | print('Organization: %s' % result.get('org', 'n/a'))
1295 | print('Operating System: %s' % result.get('os', 'n/a'))
1296 | print('Port: %s' % result['port'])
1297 |
1298 | # Print banner
1299 | banner = result.get('data')
1300 | if banner is not None:
1301 | print('Banner:')
1302 | print(result['data'])
1303 | print()
1304 |
1305 | # Output to file
1306 | if output_basename:
1307 | output1 = output_basename + ".txt"
1308 | output2 = output_basename + ".md"
1309 | output3 = output_basename + ".xml"
1310 | output4 = output_basename + ".html"
1311 |
1312 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1313 | output4, 'a') as html:
1314 | txt.write("[+] {} results\n".format(engine))
1315 | txt.write("-------------------------\n")
1316 | md.write("---\n\n")
1317 | md.write("## {} results\n".format(engine))
1318 | xml.write("<{}Results>\n".format(engine))
1319 | html.write("{} results\n".format(engine))
1320 |
1321 | txt.write("\n")
1322 | md.write("\n")
1323 |
1324 | for result in results['matches']:
1325 | ip = result['ip_str']
1326 | hostnames = ','.join(result['hostnames'])
1327 | organization = result.get('org', 'n/a')
1328 | os = result.get('os', 'n/a')
1329 | port = result['port']
1330 | banner = result['data']
1331 |
1332 | # Print IP
1333 | PrintField("IP", ip, txt, md, xml, html)
1334 |
1335 | # Print Hostnames
1336 | PrintField("Hostnames", hostnames, txt, md, xml, html)
1337 |
1338 | # Print Organization
1339 | PrintField("Organization", organization, txt, md, xml, html)
1340 |
1341 | # Print Operating System
1342 | PrintField("OS", os, txt, md, xml, html)
1343 |
1344 | # Print Port
1345 | PrintField("Port", port, txt, md, xml, html)
1346 |
1347 | # Print Banner
1348 | PrintField("Banner", banner, txt, md, xml, html)
1349 |
1350 | xml.write("{}Results>\n".format(engine))
1351 |
1352 |
1353 | def PrintField(label, value, txt, md, xml, html):
1354 | """ Print field to output files """
1355 | txt.write("{}:\n".format(label))
1356 | md.write("### {}\n\n".format(label))
1357 | xml.write("<{}>\n".format(label))
1358 | html.write("{}\n\n".format(label))
1359 |
1360 | txt.write("{}\n".format(value))
1361 | md.write("* {}\n".format(value))
1362 | xml.write("<{}>{}{}>\n".format(label, value, label))
1363 | html.write("- {}
\n".format(value))
1364 |
1365 | html.write(" \n")
1366 | xml.write("{}>\n".format(label))
1367 | txt.write("\n")
1368 | md.write("\n")
1369 |
1370 |
1371 | def HostnamesReport(engine, hostnames, output_basename):
1372 | """ Hostnames Console report """
1373 | assert type(hostnames) in (list, tuple, dict) # should be list
1374 |
1375 | print()
1376 | print("Hostnames:")
1377 | for host in hostnames:
1378 | print(host)
1379 | print()
1380 |
1381 | if output_basename:
1382 | output1 = output_basename + ".txt"
1383 | output2 = output_basename + ".md"
1384 | output3 = output_basename + ".xml"
1385 | output4 = output_basename + ".html"
1386 |
1387 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1388 | output4, 'a') as html:
1389 | txt.write("[+] {} results\n".format(engine))
1390 | txt.write("-------------------------\n")
1391 | md.write("---\n\n")
1392 | md.write("## {} results\n".format(engine))
1393 | xml.write("<{}Results>\n".format(engine))
1394 | html.write("{} results\n".format(engine))
1395 |
1396 | txt.write("\n")
1397 | md.write("\n")
1398 |
1399 | txt.write("Hostnames:\n")
1400 | md.write("### Hostnames\n\n")
1401 | xml.write("\n")
1402 | html.write("Hostnames\n\n")
1403 |
1404 | for host in hostnames:
1405 | txt.write("{}\n".format(host))
1406 | md.write("* {}\n".format(host))
1407 | xml.write("{}\n".format(host))
1408 | html.write("- {}
\n".format(host))
1409 |
1410 | html.write(" \n")
1411 | xml.write("\n")
1412 | txt.write("\n")
1413 | md.write("\n")
1414 | xml.write("{}Results>\n".format(engine))
1415 |
1416 |
1417 | def InfoReport(mode, limit, dnsserver, proxy, domain, ip, uas, output_basename):
1418 | """ Information Console report """
1419 | print("[+] Information gathering: {}".format(mode))
1420 | print("[+] Looking into first {} search engines results".format(limit))
1421 | print("[+] Using DNS server: {}".format(dnsserver))
1422 | if proxy:
1423 | print("[+] Using Proxy server: {}".format(proxy))
1424 | print("[+] Target: {}:{}".format(domain, ip))
1425 | print("[+] User-agent strings: {}".format(uas))
1426 | print()
1427 |
1428 | if output_basename:
1429 | output1 = output_basename + ".txt"
1430 | output2 = output_basename + ".md"
1431 | output3 = output_basename + ".xml"
1432 | output4 = output_basename + ".html"
1433 |
1434 | with open(output1, 'w') as txt, open(output2, 'w') as md, open(output3, 'w') as xml, open(
1435 | output4, 'w') as html:
1436 | txt.write("{}\n".format(message))
1437 | md.write("```\n")
1438 | md.write("{}\n".format(message))
1439 | md.write("```\n\n")
1440 | xml.write('\n'.format(message))
1441 | xml.write('\n')
1442 | xml.write("\n")
1443 | xml.write("\n")
1444 | html.write("GasMasK Report\n")
1445 | html.write("{} \n\n\n".format(message))
1446 |
1447 | txt.write("[+] Information gathering: {}\n".format(",".join(mode)))
1448 | md.write("---\n\n")
1449 | md.write("* Information gathering: {}\n".format(",".join(mode)))
1450 | html.write("- Information gathering: {}
\n".format(",".join(mode)))
1451 | xml.write("\n")
1452 |
1453 | for m in mode:
1454 | xml.write("{}\n".format(m))
1455 | xml.write("\n")
1456 |
1457 | txt.write("[+] Looking into first {} search engine results\n".format(limit))
1458 | md.write("* Looking into first {} search engine results\n".format(limit))
1459 | xml.write("{}\n".format(limit))
1460 | html.write("- Search Engine Results: {}
\n".format(limit))
1461 |
1462 | txt.write("[+] Using DNS server: {}\n".format(dnsserver))
1463 | md.write("* Using DNS server: {}\n".format(dnsserver))
1464 | xml.write("{}\n".format(dnsserver))
1465 | html.write("- Using DNS server: {}
\n".format(dnsserver))
1466 |
1467 | if proxy:
1468 | txt.write("[+] Using Proxy server: {}\n".format(proxy))
1469 | md.write("* Using Proxy server: {}\n".format(proxy))
1470 | xml.write("{}\n".format(proxy))
1471 | html.write("- Using Proxy server: {}
\n".format(proxy))
1472 |
1473 | txt.write("[+] Target: {}:{}\n".format(domain, ip))
1474 | md.write("* Target: {}:{}\n".format(domain, ip))
1475 | xml.write("\n")
1476 | xml.write("{}\n".format(domain))
1477 | xml.write("{}\n".format(ip))
1478 | xml.write("\n")
1479 | html.write("- Target: {}:{}
\n".format(domain, ip))
1480 |
1481 | txt.write("[+] User-agent strings: {}\n".format(uas))
1482 | md.write("* User-agent strings: {}\n".format(uas))
1483 | xml.write("{}\n".format(uas))
1484 | html.write("- User-agent strings: {}
\n".format(uas))
1485 |
1486 | txt.write("\n")
1487 | md.write("\n")
1488 | xml.write(" \n")
1489 | html.write("\n")
1490 |
1491 |
1492 | def WhoisReport(data, output_basename):
1493 | """ Whois Console report"""
1494 | for key, value in data.items():
1495 | if isinstance(value[0], list):
1496 | print()
1497 | print(value[1])
1498 | for val in value[0]:
1499 | print(val)
1500 | print()
1501 | else:
1502 | print(value[1] + " " + value[0])
1503 | print()
1504 |
1505 | if output_basename:
1506 | output1 = output_basename + ".txt"
1507 | output2 = output_basename + ".md"
1508 | output3 = output_basename + ".xml"
1509 | output4 = output_basename + ".html"
1510 |
1511 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1512 | output4, 'a') as html:
1513 | txt.write("[+] Whois lookup\n")
1514 | txt.write("----------------\n")
1515 | md.write("---\n\n")
1516 | md.write("## Whois lookup\n\n")
1517 | xml.write("\n")
1518 | html.write("Whois lookup\n\n")
1519 |
1520 | for key, value in data.items():
1521 | if isinstance(value[0], list):
1522 | txt.write("\n")
1523 | md.write("\n")
1524 |
1525 | txt.write("{}\n".format(value[1]))
1526 | md.write("* {}\n".format(value[1]))
1527 | xml.write("<{}>\n".format(key))
1528 | html.write("- {}
\n\n".format(value[1]))
1529 |
1530 | for val in value[0]:
1531 | txt.write("{}\n".format(val))
1532 | md.write(" * {}\n".format(val))
1533 | xml.write("{}\n".format(val))
1534 | html.write("- {}
\n".format(val))
1535 |
1536 | xml.write("{}>\n".format(key))
1537 | html.write(" \n")
1538 |
1539 | txt.write("\n")
1540 | md.write("\n")
1541 | else:
1542 | txt.write("{} {}\n".format(value[1], value[0]))
1543 | md.write("* {} {}\n".format(value[1], value[0]))
1544 | xml.write("<{}>{}{}>\n".format(key, value[0], key))
1545 | html.write("- {} {}
\n".format(value[1], value[0]))
1546 |
1547 | txt.write("\n")
1548 | md.write("\n")
1549 | xml.write(" \n")
1550 | html.write("\n")
1551 |
1552 |
1553 | def DNSReport(data, output_basename):
1554 | """ DNS Console report """
1555 | for key, value in data.items():
1556 | if len(value) == 1:
1557 | print(key + " DNS record: " + value[0])
1558 | else:
1559 | print()
1560 | print(key + " DNS record: ")
1561 | for val in value:
1562 | print(val)
1563 | print()
1564 | print()
1565 |
1566 | if output_basename:
1567 | output1 = output_basename + ".txt"
1568 | output2 = output_basename + ".md"
1569 | output3 = output_basename + ".xml"
1570 | output4 = output_basename + ".html"
1571 |
1572 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1573 | output4, 'a') as html:
1574 | txt.write("[+] DNS queries\n")
1575 | txt.write("---------------\n")
1576 | md.write("---\n\n")
1577 | md.write("## DNS queries\n\n")
1578 | xml.write("\n")
1579 | html.write("DNS queries\n\n")
1580 |
1581 | for key, value in data.items():
1582 | if len(value) == 1:
1583 | txt.write("{} DNS record: {}\n".format(key, value[0]))
1584 | md.write("* {} DNS record: {}\n".format(key, value[0]))
1585 | xml.write("<{}>{}{}>\n".format(key, value[0], key))
1586 | html.write("- {} DNS record: {}
\n".format(key, value[0]))
1587 |
1588 | else:
1589 | txt.write("\n")
1590 | md.write("\n")
1591 |
1592 | txt.write("{} DNS record:\n".format(key))
1593 | md.write("* {} DNS record:\n".format(key))
1594 | xml.write("<{}>\n".format(key))
1595 | html.write("- {} DNS record:
\n\n".format(key))
1596 |
1597 | for val in value:
1598 | txt.write("{}\n".format(val))
1599 | md.write(" * {}\n".format(val))
1600 | xml.write("{}\n".format(val))
1601 | html.write("- {}
\n".format(val))
1602 |
1603 | html.write(" \n")
1604 | md.write("{}>\n".format(key))
1605 | txt.write("\n")
1606 | md.write("\n")
1607 |
1608 | txt.write("\n")
1609 | md.write("\n")
1610 | xml.write(" \n")
1611 | html.write("\n")
1612 |
1613 |
1614 | def ReverseDNSReport(ip, data, output_basename):
1615 | """ Reverse DNS Console report """
1616 | if data:
1617 | print(ip + ":" + data)
1618 | print()
1619 |
1620 | if output_basename:
1621 | output1 = output_basename + ".txt"
1622 | output2 = output_basename + ".md"
1623 | output3 = output_basename + ".xml"
1624 | output4 = output_basename + ".html"
1625 |
1626 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1627 | output4, 'a') as html:
1628 | txt.write("[+] Reverse DNS Lookup\n")
1629 | txt.write("----------------------\n")
1630 | md.write("---\n\n")
1631 | md.write("## Reverse DNS Lookup\n\n")
1632 | xml.write("\n")
1633 | html.write("Reverse DNS Lookup\n")
1634 |
1635 | if data:
1636 | txt.write("{}:{}\n".format(ip, data))
1637 | md.write("* {}:{}\n".format(ip, data))
1638 | xml.write("{}\n".format(ip))
1639 | xml.write("{}\n".format(data))
1640 | html.write("\n".format(ip, data))
1641 |
1642 | txt.write("\n")
1643 | md.write("\n")
1644 | xml.write("\n")
1645 |
1646 |
1647 | def VHostsReport(data, output_basename):
1648 | """ VHosts Console report """
1649 | for host in data:
1650 | print(host)
1651 | print()
1652 |
1653 | if output_basename:
1654 | output1 = output_basename + ".txt"
1655 | output2 = output_basename + ".md"
1656 | output3 = output_basename + ".xml"
1657 | output4 = output_basename + ".html"
1658 |
1659 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1660 | output4, 'a') as html:
1661 | txt.write("[+] Bing Virtual Hosts\n")
1662 | txt.write("----------------------\n")
1663 | md.write("---\n\n")
1664 | md.write("## Bing Virtual Hosts\n\n")
1665 | xml.write("\n")
1666 | html.write("Bing Virtual Hosts\n\n")
1667 |
1668 | for host in data:
1669 | txt.write("{}\n".format(host))
1670 | md.write("* {}\n".format(host))
1671 | xml.write("{}\n".format(host))
1672 | html.write("- {}
\n".format(host))
1673 |
1674 | txt.write("\n")
1675 | md.write("\n")
1676 | xml.write(" \n")
1677 | html.write("\n")
1678 |
1679 |
1680 | def FinalReport(info, output_basename):
1681 | """ All major formats final report """
1682 | print()
1683 | print("[+] Search engines results - Final Report")
1684 | print("-----------------------------------------")
1685 |
1686 | if info['all_emails']:
1687 | print()
1688 | print("Emails:")
1689 | print()
1690 | for email in info['all_emails']:
1691 | print(email)
1692 |
1693 | if info['all_hosts']:
1694 | print()
1695 | print("Hostnames:")
1696 | print()
1697 | for host in info['all_hosts']:
1698 | print(host)
1699 |
1700 | if info['domains']:
1701 | print()
1702 | print("Subdomains:")
1703 | print()
1704 | for domain in info['domains']:
1705 | print(domain)
1706 | print()
1707 |
1708 | if output_basename:
1709 | output1 = output_basename + ".txt"
1710 | output2 = output_basename + ".md"
1711 | output3 = output_basename + ".xml"
1712 | output4 = output_basename + ".html"
1713 |
1714 | with open(output1, 'a') as txt, open(output2, 'a') as md, open(output3, 'a') as xml, open(
1715 | output4, 'a') as html:
1716 | txt.write("[+] Search engines results - Final Report\n")
1717 | txt.write("-----------------------------------------\n")
1718 | md.write("---\n\n")
1719 | md.write("## Search engines results - Final Report\n")
1720 | xml.write("\n")
1721 | html.write("Search engines results - Final Report\n")
1722 |
1723 | txt.write("\n")
1724 | md.write("\n")
1725 |
1726 | txt.write("Emails:\n")
1727 | md.write("### Emails\n\n")
1728 | xml.write("\n")
1729 | html.write("Emails\n\n")
1730 |
1731 | for email in info['all_emails']:
1732 | txt.write("{}\n".format(email))
1733 | md.write("* {}\n".format(email))
1734 | xml.write("{}\n".format(email))
1735 | html.write("- {}
\n".format(email))
1736 |
1737 | html.write(" \n")
1738 | xml.write("\n")
1739 | txt.write("\n")
1740 | md.write("\n")
1741 |
1742 | txt.write("Hostnames:\n")
1743 | md.write("### Hostnames\n\n")
1744 | xml.write("\n")
1745 | html.write("Hostnames\n\n")
1746 |
1747 | for host in info['all_hosts']:
1748 | txt.write("{}\n".format(host))
1749 | md.write("* {}\n".format(host))
1750 | xml.write("{}\n".format(host))
1751 | html.write("- {}
\n".format(host))
1752 |
1753 | html.write(" \n")
1754 | xml.write("\n")
1755 | txt.write("\n")
1756 | md.write("\n")
1757 |
1758 | txt.write("Subdomains:\n")
1759 | md.write("### Subdomains\n\n")
1760 | xml.write("\n")
1761 | html.write("Subdomains\n\n")
1762 |
1763 | for host in info['domains']:
1764 | txt.write("{}\n".format(host))
1765 | md.write("* {}\n".format(host))
1766 | xml.write("{}\n".format(host))
1767 | html.write("- {}
\n".format(host))
1768 |
1769 | html.write(" \n")
1770 | xml.write("\n")
1771 | txt.write("\n")
1772 | md.write("\n")
1773 | xml.write("\n")
1774 |
1775 | #######################################################
1776 |
1777 |
1778 | def _get_key(service_name):
1779 | """
1780 | Parses the api keys file and returns the corresponding key
1781 | Doesn't work for censys. Works only for services with one key
1782 |
1783 | :param service_name: The online service e.g spyse
1784 | :type service_name: :class: `str`
1785 | """
1786 | key = None
1787 | if checkFile():
1788 | with open(KEYS_FILE, 'r') as fin:
1789 | lines = fin.readlines()
1790 | for l in lines:
1791 | if l.lower().startswith(service_name) and ':' in l:
1792 | try:
1793 | key = l.strip(' \r\n').split(':')[1]
1794 | except IndexError:
1795 | print(KEYS_FILE
1796 | + " doesnt follow the correct format name:value")
1797 |
1798 | return key
1799 |
1800 | #######################################################
1801 |
1802 |
1803 | def MainFunc():
1804 | print(message)
1805 |
1806 | report_buckets = 50
1807 | info = {
1808 | 'all_emails': [],
1809 | 'all_hosts': [],
1810 | 'domains': [],
1811 | 'public': []
1812 | }
1813 | uas = []
1814 |
1815 | user_agent_strings_file = 'common-ua.txt'
1816 | timeouts = [5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
1817 | report_buckets = 10
1818 | modes = ['basic', 'nongoogle', 'whois', 'dns', 'revdns', 'vhosts', 'google', 'bing', 'yahoo',
1819 | 'ask', 'dogpile', 'yandex', 'linkedin', 'twitter', 'youtube', 'reddit',
1820 | 'github', 'instagram', 'crt', 'pgp', 'netcraft', 'virustotal', 'dnsdump', 'shodan',
1821 | 'censys', 'spyse']
1822 |
1823 | parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter)
1824 | parser.add_argument("-d", '--domain', action="store", metavar='DOMAIN', dest='domain',
1825 | default=None, type=CheckDomain, help="Domain to search.")
1826 | parser.add_argument("-s", '--server', action="store", metavar='NAMESERVER', dest='dnsserver',
1827 | default='8.8.8.8', type=CheckDomainOrIP, help="DNS server to use.")
1828 | parser.add_argument('-x', '--proxy', action="store", metavar='PROXY', dest='proxy',
1829 | default=None, type=str, help="Use a proxy server when retrieving "
1830 | "results from search engines (eg. "
1831 | "'-x http://127.0.0.1:8080')")
1832 | parser.add_argument("-l", '--limit', action="store", metavar='LIMIT', dest='limit', type=int,
1833 | default=100,
1834 | help="Limit the number of search engine results (default: 100).")
1835 | parser.add_argument("-i", '--info', action="store", metavar='MODE', dest='mode', type=str,
1836 | default='basic',
1837 | help="Limit information gathering (" + ','.join(modes) + ").")
1838 | parser.add_argument('-o', '--output', action='store', metavar='BASENAME', dest='basename',
1839 | type=str, default=None, help='Output in the four major formats at once '
1840 | '(markdown, txt, xml and html).')
1841 | parser.add_argument('-k', '--shodan-key', action='store', metavar='API-KEY', dest='shodankey',
1842 | type=str, default=None,
1843 | help='API key to use with Shodan search (MODE="shodan")')
1844 | parser.add_argument('-e', '--spyse-key', action='store', metavar='SPYSE_API_KEY',
1845 | dest='spysekey', type=str, default=None)
1846 | # censys.io
1847 | parser.add_argument('-m', '--match', default=None,
1848 | help='Highlight a string within an existing query result')
1849 | parser.add_argument('-f', '--filter', default=None,
1850 | help="Filter the JSON keys to display for each result, use value 'help' "
1851 | "for interesting fields")
1852 | parser.add_argument('--count', action='store_true', help='Print the count result and exit')
1853 | parser.add_argument('-R', '--report', default=None,
1854 | help="Stats on given field (use value 'help' for listing interesting "
1855 | "fields)'")
1856 | parser.add_argument('-B', '--report_bucket', default=report_buckets,
1857 | help='Bucket len in report mode (default: %s)' % report_buckets)
1858 | # query filter shortcuts - censys.io
1859 | parser.add_argument('-1', '--censys_api_id', action='store', metavar='CENSYS_API_ID',
1860 | dest='censys_api_id', type=str, default=None,
1861 | help='Provide the authentication ID for the censys.io search engine')
1862 | parser.add_argument('-2', '--censys_api_secret', action='store', metavar='CENSYS_API_SECRET',
1863 | dest='censys_api_secret', type=str, default=None,
1864 | help='Provide the secret hash for the censys.io search engine')
1865 | parser.add_argument('-r', '--read_api_keys', action='store_true',
1866 | help="Read the API Keys stored in api_keys.txt file. (e.g. '-i censys -r')")
1867 | parser.add_argument('-u', '--update_api_keys', action='store_true',
1868 | help="Update the API Keys stored in api_keys.txt file. (e.g. '-i censys -u')")
1869 | parser.add_argument('-a', '--asn', metavar='ASN', dest='asn', type=str, default=None,
1870 | help='Filter with ASN (e.g 5408 for GR-NET AS)')
1871 | parser.add_argument('-c', '--country', metavar='COUNTRY', dest='country', type=str,
1872 | default=None,
1873 | help='Filter with country')
1874 | parser.add_argument('-O', '--cert-org', metavar='CERT_ORG', dest='cert_org', type=str,
1875 | default=None,
1876 | help='Certificate issued to organization')
1877 | parser.add_argument('-I', '--cert-issuer', metavar='CERT_ISSUER', dest='cert_issuer', type=str,
1878 | default=None,
1879 | help='Certificate issued by organization')
1880 | parser.add_argument('-z', '--cert-host', metavar='CERT_HOST', dest='cert_host', type=str,
1881 | default=None,
1882 | help='hostname Certificate is issued to')
1883 | parser.add_argument('-S', '--http-server', metavar='HTTP_SERVER', dest='http_server', type=str,
1884 | default=None,
1885 | help='Server header')
1886 | parser.add_argument('-t', '--html-title', metavar='HTML_TITLE', dest='html_title', type=str,
1887 | default=None,
1888 | help='Filter on html page title')
1889 | parser.add_argument('-b', '--html-body', metavar='HTML_BODY', dest='html_body', type=str,
1890 | default=None,
1891 | help='Filter on html body content')
1892 | parser.add_argument('-T', '--tags', default=None,
1893 | help='Filter on specific tags. e.g: -T tag1,tag2,... (use keyword \'list\''
1894 | ' to list usual tags')
1895 | parser.add_argument('-L', '--Limit', default=float('inf'), help='Limit to N results')
1896 | parser.add_argument('-D', '--debug', action='store_true', help='Debug information')
1897 | parser.add_argument('-v', '--verbose', action='store_true', help='Print raw JSON records')
1898 | parser.add_argument('-H', '--html', action='store_true',
1899 | help='Renders html elements in a browser')
1900 | parser.add_argument('arguments', metavar='arguments', nargs='*', help='Censys query')
1901 |
1902 | args = parser.parse_args()
1903 |
1904 | match = str(args.match)
1905 |
1906 | # TODO tags/fields missing:
1907 | # fire help before doing any request
1908 | # if args.tags in ['list', 'help']:
1909 | # pp.pprint(tags_available)
1910 | # sys.exit(0)
1911 | # if args.report in ['list', 'help']:
1912 | # pp.pprint(report_fields)
1913 | # sys.exit(0)
1914 | # if args.filter in ['list', 'help']:
1915 | # pp.pprint(filter_fields)
1916 | # sys.exit(0)
1917 |
1918 | if args.report_bucket:
1919 | report_buckets = args.report_bucket
1920 |
1921 | if len(sys.argv) == 1:
1922 | parser.print_help()
1923 | sys.exit()
1924 |
1925 | # args = parser.parse_args()
1926 | info['domain'] = args.domain
1927 | info['proxies'] = {}
1928 |
1929 | #######################################################
1930 |
1931 | # Load User-Agents strings from file #
1932 | if os.path.isfile(user_agent_strings_file):
1933 | uas = [line.strip() for line in open(user_agent_strings_file)]
1934 | else:
1935 | print("[-] An error occured while loading user-agent strings from file")
1936 | sys.exit()
1937 |
1938 | output_basename = None
1939 | if args.basename is not None:
1940 | output_basename = args.basename
1941 |
1942 | #######################################################
1943 |
1944 | # information #
1945 | info['mode'] = [x.strip() for x in args.mode.lower().split(',')]
1946 | info['limit'] = args.limit
1947 | info['dnsserver'] = args.dnsserver
1948 | info['ip'] = VerifyHostname(info['domain'])
1949 |
1950 | if args.proxy:
1951 | print("[+] Proxy will ONLY be used during search engines searches")
1952 | info['proxies'] = {
1953 | 'http': args.proxy,
1954 | 'https': args.proxy,
1955 | }
1956 |
1957 | InfoReport(info['mode'], info['limit'], info['dnsserver'], args.proxy, info['domain'],
1958 | info['ip'], len(uas), output_basename)
1959 |
1960 | #######################################################
1961 |
1962 | # Whois query report #
1963 | if any(i in ['whois', 'basic', 'nongoogle'] for i in info['mode']):
1964 | print("[+] Whois lookup")
1965 | print("----------------")
1966 | info['whois'] = WhoisQuery(info['domain'])
1967 | WhoisReport(info['whois'], output_basename)
1968 |
1969 | #######################################################
1970 |
1971 | # DNS records report #
1972 | if any(i in ['dns', 'basic', 'nongoogle'] for i in info['mode']):
1973 | print("[+] DNS queries")
1974 | print("---------------")
1975 | info['dns'] = DnsQuery(info['domain'], info['dnsserver'])
1976 | DNSReport(info['dns'], output_basename)
1977 |
1978 | #######################################################
1979 |
1980 | # IP Reverse DNS lookup report #
1981 | if any(i in ['revdns', 'basic', 'nongoogle'] for i in info['mode']):
1982 | print("[+] Reverse DNS Lookup")
1983 | print("----------------------")
1984 | info['revdns'] = ReverseIPQuery(info['ip'], info['dnsserver'])
1985 | ReverseDNSReport(info['ip'], info['revdns'], output_basename)
1986 |
1987 | #######################################################
1988 |
1989 | # Bing Virtual Hosts search results report ##
1990 | if any(i in ['vhosts', 'basic', 'nongoogle'] for i in info['mode']):
1991 | print("[+] Bing Virtual Hosts")
1992 | print("----------------------")
1993 | info['bingvhosts'] = BingVHostsSearch(info['ip'], info['limit'], uas,
1994 | info['proxies'], timeouts)
1995 | VHostsReport(info['bingvhosts'], output_basename)
1996 |
1997 | #######################################################
1998 |
1999 | # Google search #
2000 | if any(i in ['google'] for i in info['mode']):
2001 | print("[+] Searching in Google..")
2002 | temp1, temp2 = GoogleSearch(info['domain'], info['limit'], uas, info['proxies'], timeouts)
2003 | info['all_emails'].extend(temp1)
2004 | info['all_hosts'].extend(temp2)
2005 | Report("Google", temp1, temp2, output_basename)
2006 |
2007 | #######################################################
2008 |
2009 | # Bing search #
2010 | if any(i in ['bing', 'nongoogle'] for i in info['mode']):
2011 | print("[+] Searching in Bing..")
2012 | temp1, temp2 = BingSearch(info['domain'], info['limit'], uas, info['proxies'], timeouts)
2013 | info['all_emails'].extend(temp1)
2014 | info['all_hosts'].extend(temp2)
2015 | Report("Bing", temp1, temp2, output_basename)
2016 |
2017 | #######################################################
2018 |
2019 | # Yahoo search #
2020 | if any(i in ['yahoo', 'nongoogle'] for i in info['mode']):
2021 | print("[+] Searching in Yahoo..")
2022 | temp1, temp2 = YahooSearch(info['domain'], info['limit'], uas, info['proxies'], timeouts)
2023 | info['all_emails'].extend(temp1)
2024 | info['all_hosts'].extend(temp2)
2025 | Report("Yahoo", temp1, temp2, output_basename)
2026 |
2027 | #######################################################
2028 |
2029 | # Shodan search #
2030 | if any(i in ['shodan'] for i in info['mode']):
2031 |
2032 | # pdb.set_trace()
2033 |
2034 | if args.shodankey is None:
2035 | print("[-] API key required for the Shodan search: '-k API-KEY, --shodan-key API-KEY'")
2036 | sys.exit()
2037 | print("[+] Searching in Shodan..")
2038 | print("-------------------")
2039 |
2040 | results = ShodanSearch(args.shodankey, info['domain'], info['ip'], uas, info['proxies'],
2041 | timeouts)
2042 | ShodanReport(results, output_basename)
2043 |
2044 | #######################################################
2045 |
2046 | # ASK search #
2047 | if any(i in ['ask', 'nongoogle'] for i in info['mode']):
2048 | print("[+] Searching in ASK..")
2049 | temp1, temp2 = AskSearch(info['domain'], 5, uas, info['proxies'], timeouts) # 5 pages
2050 | info['all_emails'].extend(temp1)
2051 | info['all_hosts'].extend(temp2)
2052 | Report("ASK", temp1, temp2, output_basename)
2053 |
2054 | #######################################################
2055 |
2056 | # Dogpile search #
2057 | if any(i in ['dogpile', 'nongoogle'] for i in info['mode']):
2058 | print("[+] Searching in Dogpile..")
2059 | temp1, temp2 = DogpileSearch(info['domain'], info['limit'], uas, info['proxies'], timeouts)
2060 | info['all_emails'].extend(temp1)
2061 | info['all_hosts'].extend(temp2)
2062 | Report("Dogpile", temp1, temp2, output_basename)
2063 |
2064 | #######################################################
2065 |
2066 | # Yandex search #
2067 | if any(i in ['yandex', 'nongoogle'] for i in info['mode']):
2068 | print("[+] Searching in Yandex..")
2069 | temp1, temp2 = YandexSearch(info['domain'], info['limit'], uas, info['proxies'], timeouts)
2070 | info['all_emails'].extend(temp1)
2071 | info['all_hosts'].extend(temp2)
2072 | Report("Yandex", temp1, temp2, output_basename)
2073 |
2074 | #######################################################
2075 |
2076 | # crt search #
2077 | if any(i in ['crt', 'nongoogle'] for i in info['mode']):
2078 | print("[+] Searching in Crt..")
2079 | temp = CrtSearch(info['domain'], uas, info['proxies'])
2080 | info['all_hosts'].extend(temp)
2081 | HostnamesReport("CRT", temp, output_basename)
2082 |
2083 | #######################################################
2084 |
2085 | # dnsdumpster search #
2086 | if any(i in ['dnsdump', 'nongoogle'] for i in info['mode']):
2087 | print("[+] Searching in DNSdumpster..")
2088 | temp = DNSDumpsterSearch(info['domain'], uas, info['proxies'])
2089 | info['all_hosts'].extend(temp)
2090 | HostnamesReport("DNSdumpster", temp, output_basename)
2091 |
2092 | #######################################################
2093 |
2094 | # PGP search #
2095 | if any(i in ['pgp', 'nongoogle'] for i in info['mode']):
2096 | print("[+] Searching in PGP..")
2097 | temp1, temp2 = PGPSearch(info['domain'], uas, info['proxies'])
2098 | info['all_emails'].extend(temp1)
2099 | info['all_hosts'].extend(temp2)
2100 | Report("PGP", temp1, temp2, output_basename)
2101 |
2102 | #######################################################
2103 |
2104 | # netcraft search #
2105 | if any(i in ['netcraft', 'nongoogle'] for i in info['mode']):
2106 | print("[+] Searching in Netcraft..")
2107 | temp = NetcraftSearch(info['domain'], uas, info['proxies'])
2108 | info['all_hosts'].extend(temp)
2109 | HostnamesReport("Netcraft", temp, output_basename)
2110 |
2111 | #######################################################
2112 |
2113 | # virustotal search #
2114 | if any(i in ['virustotal', 'nongoogle'] for i in info['mode']):
2115 | print("[+] Searching in VirusTotal..")
2116 | temp = VTSearch(info['domain'], uas, info['proxies'])
2117 | info['all_hosts'].extend(temp)
2118 | HostnamesReport("VirusTotal", temp, output_basename)
2119 |
2120 | #######################################################
2121 |
2122 | # spyse search #
2123 | if any(i in ['spyse', 'nongoogle'] for i in info['mode']):
2124 | spysekey = args.spysekey or _get_key('spyse')
2125 | if not spysekey:
2126 | print("Api Key for spyse was neither provided in cmd line "
2127 | "nor located inside %s file" % KEYS_FILE)
2128 | else:
2129 | print("[+] Searching in Spyse..")
2130 | temp = SpyseSearch(info['domain'], spysekey, args.limit, info['proxies'])
2131 | subdomains = SpyseGetDomains(temp)
2132 | info['all_hosts'].extend(subdomains)
2133 | SubdomainsReport("SpyseSearch", subdomains, output_basename)
2134 |
2135 | # LinkedIn search #
2136 | if any(i in ['linkedin'] for i in info['mode']):
2137 | print("[+] Searching in LinkedIn..")
2138 | temp1, temp2 = GoogleSearchEngine(info['domain'], 'linkedin.com', info['limit'], uas,
2139 | info['proxies'], timeouts)
2140 | info['all_emails'].extend(temp1)
2141 | info['all_hosts'].extend(temp2)
2142 | Report("LinkedIn", temp1, temp2, output_basename)
2143 |
2144 | #######################################################
2145 |
2146 | # Twitter search #
2147 | if any(i in ['twitter'] for i in info['mode']):
2148 | print("[+] Searching in Twitter..")
2149 | temp1, temp2 = GoogleSearchEngine(info['domain'], "twitter.com", info['limit'], uas,
2150 | info['proxies'], timeouts)
2151 | info['all_emails'].extend(temp1)
2152 | info['all_hosts'].extend(temp2)
2153 | Report("Twitter", temp1, temp2, output_basename)
2154 |
2155 | #######################################################
2156 |
2157 | # Youtube search #
2158 | if any(i in ['youtube'] for i in info['mode']):
2159 | print("[+] Searching in Youtube..")
2160 | temp1, temp2 = GoogleSearchEngine(info['domain'], "youtube.com", info['limit'], uas,
2161 | info['proxies'], timeouts)
2162 | info['all_emails'].extend(temp1)
2163 | info['all_hosts'].extend(temp2)
2164 | Report("Youtube", temp1, temp2, output_basename)
2165 |
2166 | #######################################################
2167 |
2168 | # Reddit search #
2169 | if any(i in ['reddit'] for i in info['mode']):
2170 | print("[+] Searching in Reddit..")
2171 | temp1, temp2 = GoogleSearchEngine(info['domain'], "reddit.com", info['limit'], uas,
2172 | info['proxies'], timeouts)
2173 | info['all_emails'].extend(temp1)
2174 | info['all_hosts'].extend(temp2)
2175 | Report("Reddit", temp1, temp2, output_basename)
2176 |
2177 | #######################################################
2178 |
2179 | # Github search #
2180 | if any(i in ['github'] for i in info['mode']):
2181 | print("[+] Searching in Github..")
2182 | temp1, temp2 = GoogleSearchEngine(info['domain'], "github.com", info['limit'], uas,
2183 | info['proxies'], timeouts)
2184 | info['all_emails'].extend(temp1)
2185 | info['all_hosts'].extend(temp2)
2186 | Report("Github", temp1, temp2, output_basename)
2187 |
2188 | #######################################################
2189 |
2190 | # Instagram search #
2191 | if any(i in ['instagram'] for i in info['mode']):
2192 | print("[+] Searching in Instagram..")
2193 | temp1, temp2 = GoogleSearchEngine(info['domain'], "instagram.com", info['limit'], uas,
2194 | info['proxies'],
2195 | timeouts)
2196 | info['all_emails'].extend(temp1)
2197 | info['all_hosts'].extend(temp2)
2198 | Report("Instagram", temp1, temp2, output_basename)
2199 |
2200 | #######################################################
2201 |
2202 | # Censys.io search #
2203 | if any(i in ['censys'] for i in info['mode']):
2204 | api_id = args.censys_api_id
2205 | api_secret = args.censys_api_secret
2206 | if api_id is not None and api_secret is not None:
2207 | print("[+] Searching in Censys.io..\n")
2208 | res1 = DomainSearchCensys(info['domain'], api_id, api_secret,
2209 | output_basename, info['domains'])
2210 | res2 = CensysPublicScan(api_id, api_secret, output_basename, args,
2211 | report_buckets, match, info['public'])
2212 | if res1 == False and res2 == False:
2213 | print(
2214 | "Please use the available censys.io options in order to perform scanning. "
2215 | "For more information use the '--help' option")
2216 | print()
2217 | else:
2218 | chkstored = checkFile()
2219 | flag = 0
2220 | if chkstored == False:
2221 | chkanswer = input(
2222 | "[!] API Keys not provided. Would you like to store your API keys ? [y/n]: ")
2223 | if chkanswer == 'y':
2224 | stored = createFileAndStoreAPIKeys('censys')
2225 | if stored == "stored":
2226 | print()
2227 | readFileContents()
2228 | print()
2229 | answer1 = input(
2230 | "[*] would you like to continue searching with censys.io ? [y/n] ")
2231 | print()
2232 | if answer1 == 'n':
2233 | flag = 1
2234 | print("[*] Exiting...")
2235 | exit(0)
2236 | if answer1 == 'y':
2237 | with open(KEYS_FILE) as f:
2238 | lines = f.read().splitlines()
2239 | print("[+] Searching in Censys.io..")
2240 | print()
2241 | for line in lines:
2242 | res1 = DomainSearchCensys(
2243 | info['domain'], line.split(":")[1], line.split(":")[2],
2244 | output_basename, info['domains'])
2245 | res2 = CensysPublicScan(
2246 | line.split(":")[1], line.split(":")[2], output_basename,
2247 | args, report_buckets, match, info['public'])
2248 | if res1 == False and res2 == False:
2249 | print('Please use the available censys.io options in order'
2250 | 'to perform scanning. For more information use the '
2251 | '\'--help\' option')
2252 | print()
2253 | flag = 1
2254 | else:
2255 | print("[x] API keys has not been stored..")
2256 | print("[*] Exiting...")
2257 | exit(0)
2258 | if chkanswer == 'n':
2259 | print(
2260 | "[!] Please provide the API keys in the command line to continue searching")
2261 | print("[*] Exiting....")
2262 | exit(0)
2263 |
2264 | if chkstored is True:
2265 | if args.update_api_keys is True and args.mode == 'censys' and flag != 1:
2266 | keysupdate = updateAPIKeys('censys')
2267 | if keysupdate == 'n':
2268 | print("[x] the keys have not been updated")
2269 | print("[*] Exiting...")
2270 | exit(0)
2271 | print()
2272 | else:
2273 | print("[!] the keys have been successfully updated!")
2274 | answer1 = input(
2275 | "[*] would you like to continue searching with censys.io ? [y/n] ")
2276 | print()
2277 | if answer1 == 'y':
2278 | with open(KEYS_FILE) as f:
2279 | lines = f.read().splitlines()
2280 | print("[+] Searching in Censys.io..")
2281 | print()
2282 | for line in lines:
2283 | res1 = DomainSearchCensys(
2284 | info['domain'], line.split(":")[1], line.split(":")[2],
2285 | output_basename, info['domains'])
2286 | res2 = CensysPublicScan(
2287 | line.split(":")[1], line.split(":")[2], output_basename,
2288 | args, report_buckets, match, info['public'])
2289 | if res1 == False and res2 == False:
2290 | print("Please use the available censys.io options in order"
2291 | " to perform scanning. For more information use "
2292 | " the '--help' option\n")
2293 | else:
2294 | print("[*] Exiting...")
2295 | exit(0)
2296 | else:
2297 | if args.read_api_keys is not True:
2298 | with open(KEYS_FILE) as f:
2299 | lines = f.read().splitlines()
2300 | print("[+] Searching in Censys.io..")
2301 | print()
2302 | for line in lines:
2303 | res1 = DomainSearchCensys(
2304 | info['domain'], line.split(":")[1], line.split(":")[2],
2305 | output_basename, info['domains'])
2306 | res2 = CensysPublicScan(
2307 | line.split(":")[1], line.split(":")[2], output_basename, args,
2308 | report_buckets, match, info['public'])
2309 | if res1 == False and res2 == False:
2310 | print("Please use the available censys.io options in order to "
2311 | "perform scanning. For more information use the '--help'"
2312 | " option\n")
2313 |
2314 | if args.read_api_keys is True and args.mode == 'censys' and flag != 1:
2315 | print()
2316 | readFileContents()
2317 | print()
2318 | answer1 = input(
2319 | "[*] would you like to continue searching with censys.io ? [y/n] ")
2320 | print()
2321 | if answer1 == 'y':
2322 | with open(KEYS_FILE) as f:
2323 | lines = f.read().splitlines()
2324 | print("[+] Searching in Censys.io..")
2325 | print()
2326 | for line in lines:
2327 | res1 = DomainSearchCensys(
2328 | info['domain'], line.split(":")[1], line.split(":")[2],
2329 | output_basename, info['domains'])
2330 | res2 = CensysPublicScan(
2331 | line.split(":")[1], line.split(":")[2], output_basename, args,
2332 | report_buckets, match, info['public'])
2333 | if res1 is False and res2 is False:
2334 | print(
2335 | "Please use the available censys.io options in order to "
2336 | "perform scanning. For more information use the "
2337 | "'--help' option\n")
2338 | else:
2339 | print("[*] Exiting...")
2340 | exit(0)
2341 |
2342 | #######################################################
2343 |
2344 | # Search Results Final Report #
2345 | info['all_emails'] = sorted(set(info['all_emails']))
2346 | info['all_hosts'] = sorted(set(info['all_hosts']))
2347 | info['domains'] = sorted(set(info['domains']))
2348 | FinalReport(info, output_basename)
2349 |
2350 | #######################################################
2351 |
2352 | # Close tags for xml and html #
2353 | if output_basename:
2354 | output = output_basename + ".xml"
2355 | output1 = output_basename + ".html"
2356 |
2357 | with open(output, 'a') as xml, open(output1, 'a') as html:
2358 | xml.write("\n")
2359 | html.write("\n")
2360 |
2361 |
2362 | #######################################################
2363 |
2364 | if __name__ == '__main__':
2365 |
2366 | try:
2367 | MainFunc()
2368 | except KeyboardInterrupt:
2369 | print("Search interrupted by user..")
2370 | except Exception as e:
2371 | if DEBUG:
2372 | print(e)
2373 | sys.exit()
2374 |
2375 | #######################################################
2376 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | validators
2 | python-whois
3 | dnspython
4 | requests
5 | shodan
6 | censys
--------------------------------------------------------------------------------
|