├── .github
├── FUNDING.yml
└── workflows
│ └── ci.yml
├── .gitignore
├── CHANGELOG.md
├── Dockerfile
├── LICENSE
├── README.md
├── conf
├── config.json
└── keys.json
├── finalrecon.py
├── metadata.json
├── modules
├── __init__.py
├── crawler.py
├── dirrec.py
├── dns.py
├── export.py
├── headers.py
├── portscan.py
├── sslinfo.py
├── subdom.py
├── subdomain_modules
│ ├── __init__.py
│ ├── alienvault_subs.py
│ ├── anubis_subs.py
│ ├── bevigil_subs.py
│ ├── binedge_subs.py
│ ├── certspot_subs.py
│ ├── crtsh_subs.py
│ ├── fb_subs.py
│ ├── htarget_subs.py
│ ├── hunter_subs.py
│ ├── netlas_subs.py
│ ├── shodan_subs.py
│ ├── sonar_subs.py
│ ├── thcrowd_subs.py
│ ├── thminer_subs.py
│ ├── urlscan_subs.py
│ ├── virustotal_subs.py
│ ├── wayback_subs.py
│ └── zoomeye_subs.py
├── wayback.py
├── whois.py
└── write_log.py
├── requirements.txt
├── settings.py
├── whois_servers.json
└── wordlists
├── dirb_big.txt
├── dirb_common.txt
└── dirb_small.txt
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: thewhiteh4t
4 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 |
3 | on:
4 | push:
5 | branches:
6 | - 'master'
7 | workflow_dispatch:
8 |
9 | jobs:
10 | docker:
11 | runs-on: ubuntu-latest
12 | steps:
13 | -
14 | name: Set up QEMU
15 | uses: docker/setup-qemu-action@v3
16 | -
17 | name: Set up Docker Buildx
18 | uses: docker/setup-buildx-action@v3
19 | -
20 | name: Login to DockerHub
21 | uses: docker/login-action@v3
22 | with:
23 | username: ${{ secrets.DOCKERHUB_USERNAME }}
24 | password: ${{ secrets.DOCKERHUB_TOKEN }}
25 | -
26 | name: Build and push
27 | uses: docker/build-push-action@v5
28 | with:
29 | push: true
30 | tags: thewhiteh4t/finalrecon:latest
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # Distribution / packaging
7 | .Python
8 | build/
9 | develop-eggs/
10 | dist/
11 | downloads/
12 | eggs/
13 | .eggs/
14 | lib/
15 | lib64/
16 | parts/
17 | sdist/
18 | var/
19 | wheels/
20 | share/python-wheels/
21 | *.egg-info/
22 | .installed.cfg
23 | *.egg
24 | MANIFEST
25 |
26 | # Environments
27 | .env
28 | .venv
29 | env/
30 | venv/
31 | ENV/
32 | env.bak/
33 | venv.bak/
34 |
35 | .idea/
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | ## v1.1.7
4 |
5 | * Added option to hide banner
6 | * Added option to save API keys
7 | * Added option to specify custom export directory
8 | * Added option to read API keys from env directly
9 | * More sources added for subdomain enumeration :
10 | * BinaryEdge
11 | * Netlas
12 | * Hunter.How
13 | * ZoomEye
14 | * UrlScan
15 | * AlienVault
16 | * SSL info module optimized
17 | * Fixed TLDExtract issue with IP targets
18 | * Replaced dnslib with dnspython
19 | * Removed psycopg2
20 |
21 | ---
22 |
23 | ## v1.1.6
24 |
25 | * dependencies reduced
26 | * logger added
27 | * adjusted for new tldextract version
28 | * bevigil added for sub-domain enum
29 | * refactored
30 | * sonar sub-domain query disabled
31 | * improved exception handling in dns enum
32 |
33 | ---
34 |
35 | ## v1.1.5
36 |
37 | * fixed some url issues in crawler
38 | * threads added in port scanner
39 | * fixed status code issue in directory enumeration module
40 | * more sources added for subdomain enumeration
41 | * wayback
42 | * sonar
43 | * hackertarget
44 |
45 | ---
46 |
47 | ## v1.1.4
48 |
49 | * CHANGELOG.md added
50 | * export
51 | * output format changed
52 | * csv and xml export removed
53 | * subdomain enum
54 | * bufferover removed
55 | * shodan integrated
56 | * directory enum
57 | * module optimized
58 | * results are printed as they are found
59 | * port scanner
60 | * module optimized
61 | * dedicated wayback module added
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:latest
2 | WORKDIR /root
3 | RUN git clone https://github.com/thewhiteh4t/finalrecon.git
4 | WORKDIR /root/finalrecon/
5 | RUN pip install wheel
6 | RUN pip install -r requirements.txt
7 | ENTRYPOINT ["python3", "finalrecon.py"]
8 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 thewhiteh4t
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |

2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 | Twitter
11 | -
12 | Telegram
13 | -
14 | thewhiteh4t's Blog
15 |
16 |
17 | FinalRecon is an all in one **automatic web reconnaissance** tool written in python. Goal of FinalRecon is to provide an **overview** of the target in a **short** amount of time while maintaining the **accuracy** of results. Instead of executing **several tools** one after another it can provide similar results keeping dependencies **small and simple**.
18 |
19 | ## Available In
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 | ## Featured On
34 |
35 | ### Python For OSINT
36 | * Hakin9 April 2020
37 | * https://hakin9.org/product/python-for-osint-tooling/
38 |
39 | ### NullByte
40 | * https://null-byte.wonderhowto.com/how-to/conduct-recon-web-target-with-python-tools-0198114/
41 | * https://www.youtube.com/watch?v=F9lwzMPGIgo
42 |
43 | ### Hakin9
44 | * https://hakin9.org/final-recon-osint-tool-for-all-in-one-web-reconnaissance/
45 |
46 | ## Features
47 |
48 | FinalRecon provides detailed information such as :
49 |
50 | * Header Information
51 |
52 | * Whois
53 |
54 | * SSL Certificate Information
55 |
56 | * Crawler
57 | * html
58 | * CSS
59 | * Javascripts
60 | * Internal Links
61 | * External Links
62 | * Images
63 | * robots
64 | * sitemaps
65 | * Links inside Javascripts
66 | * Links from Wayback Machine from Last 1 Year
67 |
68 | * DNS Enumeration
69 | * Over 40 types of Records are queried
70 | * DMARC Records
71 |
72 | * Subdomain Enumeration
73 | * Over 10 reliable data sources
74 |
75 | * Directory Enumeration
76 | * Support for File Extensions
77 |
78 | * Wayback Machine
79 | * URLs from Last 5 Years
80 |
81 | * Port Scan
82 | * Fast
83 | * Top 1000 Ports
84 |
85 | * Export
86 | * Formats
87 | * txt
88 | * json [Coming Soon]
89 |
90 | ## Configuration
91 |
92 | ### API Keys
93 |
94 | Some Modules Use API Keys to fetch data from different resources, these are optional, if you are not using an API key, they will be simply skipped.
95 |
96 | #### Environment Variables
97 |
98 | Keys are read from environment variables if they are set otherwise they are loaded from the config directory
99 |
100 | ```bash
101 | FR_BEVIGIL_KEY, FR_BINEDGE_KEY, FR_FB_KEY, FR_HUNTER_KEY,
102 | FR_NETLAS_KEY, FR_SHODAN_KEY, FR_VT_KEY, FR_ZOOMEYE_KEY
103 |
104 | # Example :
105 |
106 | export FR_SHODAN_KEY="kl32lcdqwcdfv"
107 | ```
108 |
109 | #### Saved Keys
110 |
111 | You can use **`-k`** to add the keys which will be saved in config directory automatically
112 |
113 | ```bash
114 | # Usage
115 | python3 finalrecon.py -k '@'
116 |
117 | Valid Keys : 'bevigil', 'binedge', 'facebook', 'hunter', 'netlas','shodan', 'virustotal', 'zoomeye'
118 |
119 | # Example :
120 | python3 finalrecon.py -k 'shodan@kl32lcdqwcdfv'
121 | ```
122 |
123 | `Path = $HOME/.config/finalrecon/keys.json`
124 |
125 | | Source | Module | Link |
126 | |--------|--------|------|
127 | | Facebook | Sub Domain Enum | https://developers.facebook.com/docs/facebook-login/access-tokens |
128 | | VirusTotal | Sub Domain Enum | https://www.virustotal.com/gui/my-apikey |
129 | | Shodan | Sub Domain Enum | https://developer.shodan.io/api/requirements |
130 | | BeVigil | Sub Domain Enum | https://bevigil.com/osint-api |
131 | | BinaryEdge | Sub Domain Enum | https://app.binaryedge.io/ |
132 | | Netlas | Sub Domain Enum | https://docs.netlas.io/getting_started/ |
133 | | ZoomEye | Sub Domain Enum | https://www.zoomeye.hk/ |
134 | | Hunter | Sub Domain Enum | https://hunter.how/search-api |
135 |
136 | ### JSON Config File
137 |
138 | Default config file is available at `~/.config/finalrecon/config.json`
139 |
140 | ```json
141 | {
142 | "common": {
143 | "timeout": 30,
144 | "dns_servers": "8.8.8.8, 8.8.4.4, 1.1.1.1, 1.0.0.1"
145 | },
146 | "ssl_cert": {
147 | "ssl_port": 443
148 | },
149 | "port_scan": {
150 | "threads": 50
151 | },
152 | "dir_enum": {
153 | "threads": 50,
154 | "redirect": false,
155 | "verify_ssl": false,
156 | "extension": ""
157 | },
158 | "export": {
159 | "format": "txt"
160 | }
161 | }
162 | ```
163 |
164 | ## Tested on
165 |
166 | * Kali Linux
167 | * BlackArch Linux
168 |
169 | > FinalRecon is a tool for **Pentesters** and it's designed for **Linux** based Operating Systems, other platforms like **Windows** and **Termux** are **NOT** supported.
170 |
171 | ## Installation
172 |
173 | ### Kali Linux
174 |
175 | ```
176 | sudo apt install finalrecon
177 | ```
178 |
179 | ### BlackArch Linux
180 |
181 | ```
182 | sudo pacman -S finalrecon
183 | ```
184 |
185 | ### SecBSD
186 |
187 | ```bash
188 | doas pkg_add finalrecon
189 | ```
190 |
191 | ### Other Linux
192 |
193 | ```bash
194 | git clone https://github.com/thewhiteh4t/FinalRecon.git
195 | cd FinalRecon
196 | pip3 install -r requirements.txt
197 | ```
198 |
199 | ### Docker
200 |
201 | ``` bash
202 | docker pull thewhiteh4t/finalrecon
203 | docker run -it --entrypoint /bin/sh thewhiteh4t/finalrecon
204 | ```
205 |
206 | Also docker user can use this alias to run the finalrecon as the normal CLI user.
207 |
208 | ``` bash
209 | alias finalrecon="docker run -it --rm --name finalrecon --entrypoint 'python3' thewhiteh4t/finalrecon finalrecon.py"
210 | ```
211 |
212 | And then use `finalrecon` to start your scan.
213 |
214 | > remark
215 | >
216 | > If you have any api keys you can easily commit that image in your local machine.
217 | >
218 | > This docker usage needs root to run docker command.
219 |
220 | ## Usage
221 |
222 | ```bash
223 | FinalRecon - All in One Web Recon | v1.1.6
224 |
225 | options:
226 | -h, --help show this help message and exit
227 | --url URL Target URL
228 | --headers Header Information
229 | --sslinfo SSL Certificate Information
230 | --whois Whois Lookup
231 | --crawl Crawl Target
232 | --dns DNS Enumeration
233 | --sub Sub-Domain Enumeration
234 | --dir Directory Search
235 | --wayback Wayback URLs
236 | --ps Fast Port Scan
237 | --full Full Recon
238 |
239 | Extra Options:
240 | -nb Hide Banner
241 | -dt DT Number of threads for directory enum [ Default : 30 ]
242 | -pt PT Number of threads for port scan [ Default : 50 ]
243 | -T T Request Timeout [ Default : 30.0 ]
244 | -w W Path to Wordlist [ Default : wordlists/dirb_common.txt
245 | ]
246 | -r Allow Redirect [ Default : False ]
247 | -s Toggle SSL Verification [ Default : True ]
248 | -sp SP Specify SSL Port [ Default : 443 ]
249 | -d D Custom DNS Servers [ Default : 1.1.1.1 ]
250 | -e E File Extensions [ Example : txt, xml, php ]
251 | -o O Export Format [ Default : txt ]
252 | -cd CD Change export directory [ Default :
253 | ~/.local/share/finalrecon ]
254 | -k K Add API key [ Example : shodan@key ]
255 | ```
256 |
257 | ```bash
258 | # Check headers
259 |
260 | python3 finalrecon.py --headers --url https://example.com
261 |
262 | # Check ssl Certificate
263 |
264 | python3 finalrecon.py --sslinfo --url https://example.com
265 |
266 | # Check whois Information
267 |
268 | python3 finalrecon.py --whois --url https://example.com
269 |
270 | # Crawl Target
271 |
272 | python3 finalrecon.py --crawl --url https://example.com
273 |
274 | # Directory Searching
275 |
276 | python3 finalrecon.py --dir --url https://example.com -e txt,php -w /path/to/wordlist
277 |
278 | # full scan
279 |
280 | python3 finalrecon.py --full --url https://example.com
281 | ```
282 |
283 | ## Demo
284 | [](https://odysee.com/@thewhiteh4t:2/what%27s-new-in-finalrecon-v1.0.2-osint:c)
285 |
--------------------------------------------------------------------------------
/conf/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "common": {
3 | "timeout": 30,
4 | "dns_servers": "8.8.8.8, 8.8.4.4, 1.1.1.1, 1.0.0.1"
5 | },
6 | "ssl_cert": {
7 | "ssl_port": 443
8 | },
9 | "port_scan": {
10 | "threads": 50
11 | },
12 | "dir_enum": {
13 | "threads": 50,
14 | "redirect": false,
15 | "verify_ssl": false,
16 | "extension": ""
17 | },
18 | "export": {
19 | "format": "txt"
20 | }
21 | }
--------------------------------------------------------------------------------
/conf/keys.json:
--------------------------------------------------------------------------------
1 | {
2 | "bevigil": null,
3 | "facebook": null,
4 | "virustotal": null,
5 | "shodan": null,
6 | "binedge": null,
7 | "netlas": null,
8 | "zoomeye": null,
9 | "hunter": null
10 | }
11 |
--------------------------------------------------------------------------------
/finalrecon.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import os
4 | import sys
5 |
6 | R = '\033[31m' # red
7 | G = '\033[32m' # green
8 | C = '\033[36m' # cyan
9 | W = '\033[0m' # white
10 |
11 | from modules.write_log import log_writer
12 | log_writer('Importing config...')
13 | import settings as config
14 |
15 | home = config.home
16 | usr_data = config.usr_data
17 | conf_path = config.conf_path
18 | path_to_script = config.path_to_script
19 | src_conf_path = config.src_conf_path
20 | meta_file_path = config.meta_file_path
21 | keys_file_path = config.keys_file_path
22 |
23 | log_writer(
24 | f'PATHS = HOME:{home}, SCRIPT_LOC:{path_to_script},\
25 | METADATA:{meta_file_path}, KEYS:{config.keys_file_path},\
26 | CONFIG:{config.conf_file_path}, LOG:{config.log_file_path}'
27 | )
28 |
29 | import argparse
30 |
31 | VERSION = '1.1.7'
32 | log_writer(f'FinalRecon v{VERSION}')
33 |
34 | parser = argparse.ArgumentParser(description=f'FinalRecon - All in One Web Recon | v{VERSION}')
35 | parser.add_argument('--url', help='Target URL')
36 | parser.add_argument('--headers', help='Header Information', action='store_true')
37 | parser.add_argument('--sslinfo', help='SSL Certificate Information', action='store_true')
38 | parser.add_argument('--whois', help='Whois Lookup', action='store_true')
39 | parser.add_argument('--crawl', help='Crawl Target', action='store_true')
40 | parser.add_argument('--dns', help='DNS Enumeration', action='store_true')
41 | parser.add_argument('--sub', help='Sub-Domain Enumeration', action='store_true')
42 | parser.add_argument('--dir', help='Directory Search', action='store_true')
43 | parser.add_argument('--wayback', help='Wayback URLs', action='store_true')
44 | parser.add_argument('--ps', help='Fast Port Scan', action='store_true')
45 | parser.add_argument('--full', help='Full Recon', action='store_true')
46 |
47 | ext_help = parser.add_argument_group('Extra Options')
48 | ext_help.add_argument('-nb', action='store_false', help='Hide Banner')
49 | ext_help.add_argument('-dt', type=int, help='Number of threads for directory enum [ Default : 30 ]')
50 | ext_help.add_argument('-pt', type=int, help='Number of threads for port scan [ Default : 50 ]')
51 | ext_help.add_argument('-T', type=float, help='Request Timeout [ Default : 30.0 ]')
52 | ext_help.add_argument('-w', help='Path to Wordlist [ Default : wordlists/dirb_common.txt ]')
53 | ext_help.add_argument('-r', action='store_true', help='Allow Redirect [ Default : False ]')
54 | ext_help.add_argument('-s', action='store_false', help='Toggle SSL Verification [ Default : True ]')
55 | ext_help.add_argument('-sp', type=int, help='Specify SSL Port [ Default : 443 ]')
56 | ext_help.add_argument('-d', help='Custom DNS Servers [ Default : 1.1.1.1 ]')
57 | ext_help.add_argument('-e', help='File Extensions [ Example : txt, xml, php ]')
58 | ext_help.add_argument('-o', help='Export Format [ Default : txt ]')
59 | ext_help.add_argument('-cd', help='Change export directory [ Default : ~/.local/share/finalrecon ]')
60 | ext_help.add_argument('-of', help='Change export folder name [ Default :fr__ ]')
61 | ext_help.add_argument('-k', help='Add API key [ Example : shodan@key ]')
62 | ext_help.set_defaults(
63 | dt=config.dir_enum_th,
64 | pt=config.port_scan_th,
65 | T=config.timeout,
66 | w=config.dir_enum_wlist,
67 | r=config.dir_enum_redirect,
68 | s=config.dir_enum_sslv,
69 | sp=config.ssl_port,
70 | d=config.custom_dns,
71 | e=config.dir_enum_ext,
72 | o=config.export_fmt,
73 | cd=config.usr_data,
74 | of = None,
75 | )
76 |
77 | try:
78 | args = parser.parse_args()
79 | except SystemExit:
80 | log_writer('[finalrecon] Help menu accessed')
81 | log_writer(f'{"-" * 30}')
82 | sys.exit()
83 |
84 | target = args.url
85 | headinfo = args.headers
86 | sslinfo = args.sslinfo
87 | whois = args.whois
88 | crawl = args.crawl
89 | dns = args.dns
90 | dirrec = args.dir
91 | wback = args.wayback
92 | pscan = args.ps
93 | full = args.full
94 | threads = args.dt
95 | pscan_threads = args.pt
96 | tout = args.T
97 | wdlist = args.w
98 | redir = args.r
99 | sslv = args.s
100 | sslp = args.sp
101 | dserv = args.d
102 | filext = args.e
103 | subd = args.sub
104 | output = args.o
105 | show_banner = args.nb
106 | add_key = args.k
107 | output_dir = args.cd
108 | folder_name = args.of
109 |
110 | import socket
111 | import datetime
112 | import ipaddress
113 | import tldextract
114 | from json import loads, dumps
115 | from urllib import parse
116 |
117 | type_ip = False
118 | data = {}
119 |
120 |
121 | def banner():
122 | with open(meta_file_path, 'r') as metadata:
123 | json_data = loads(metadata.read())
124 | twitter_url = json_data['twitter']
125 | comms_url = json_data['comms']
126 |
127 | art = r'''
128 | ______ __ __ __ ______ __
129 | /\ ___\/\ \ /\ "-.\ \ /\ __ \ /\ \
130 | \ \ __\\ \ \\ \ \-. \\ \ __ \\ \ \____
131 | \ \_\ \ \_\\ \_\\"\_\\ \_\ \_\\ \_____\
132 | \/_/ \/_/ \/_/ \/_/ \/_/\/_/ \/_____/
133 | ______ ______ ______ ______ __ __
134 | /\ == \ /\ ___\ /\ ___\ /\ __ \ /\ "-.\ \
135 | \ \ __< \ \ __\ \ \ \____\ \ \/\ \\ \ \-. \
136 | \ \_\ \_\\ \_____\\ \_____\\ \_____\\ \_\\"\_\
137 | \/_/ /_/ \/_____/ \/_____/ \/_____/ \/_/ \/_/'''
138 | print(f'{G}{art}{W}\n')
139 | print(f'{G}[>]{C} Created By :{W} thewhiteh4t')
140 | print(f'{G} |--->{C} Twitter :{W} {twitter_url}')
141 | print(f'{G} |--->{C} Community :{W} {comms_url}')
142 | print(f'{G}[>]{C} Version :{W} {VERSION}\n')
143 |
144 |
145 | def save_key(key_string):
146 | valid_keys = ['bevigil', 'binedge', 'facebook', 'netlas', 'shodan', 'virustotal', 'zoomeye', 'hunter']
147 | key_parts = key_string.split('@', 1)
148 | key_name = key_parts[0]
149 | key_str = key_parts[1]
150 | if key_name not in valid_keys:
151 | print(f'{R}[-] {C}Invalid key name!{W}')
152 | log_writer('Invalid key name, exiting')
153 | sys.exit(1)
154 | with open(keys_file_path, 'r') as keyfile:
155 | keys_json = loads(keyfile.read())
156 | keys_json[key_name] = key_str
157 | with open(keys_file_path, 'w') as key_update:
158 | key_update.write(dumps(keys_json))
159 | print(f'{G}[+] {W}{key_name} {C}Key Added!{W}')
160 | sys.exit(1)
161 |
162 |
163 | try:
164 | if show_banner:
165 | banner()
166 |
167 | if add_key:
168 | save_key(add_key)
169 |
170 | if not target:
171 | print(f'{R}[-] {C}No Target Specified!{W}')
172 | sys.exit(1)
173 |
174 | if not target.startswith(('http', 'https')):
175 | print(f'{R}[-] {C}Protocol Missing, Include {W}http:// {C}or{W} https:// \n')
176 | log_writer(f'Protocol missing in {target}, exiting')
177 | sys.exit(1)
178 |
179 | if target.endswith('/'):
180 | target = target[:-1]
181 |
182 | print(f'{G}[+] {C}Target : {W}{target}')
183 |
184 | split_url = parse.urlsplit(target)
185 | extractor = tldextract.TLDExtract()
186 | parsed_url = extractor.extract_urllib(split_url)
187 | protocol = split_url.scheme
188 |
189 | if split_url.port:
190 | if not parsed_url.subdomain:
191 | netloc = parsed_url.domain # localhost:8000
192 | domain = netloc.split(':')[0]
193 | domain_suffix = ''
194 | hostname = domain
195 | else:
196 | netloc = f'{parsed_url.subdomain}.{parsed_url.domain}' # abc.com:8000
197 | domain = parsed_url.subdomain
198 | domain_suffix = parsed_url.domain.split(':')[0]
199 | hostname = f'{domain}.{domain_suffix}'
200 | else:
201 | if len(parsed_url.top_domain_under_public_suffix) == 0:
202 | netloc = parsed_url.domain # 8.8.8.8
203 | domain = ''
204 | domain_suffix = ''
205 | else:
206 | netloc = parsed_url.fqdn # abc.com
207 | domain = parsed_url.domain
208 | domain_suffix = parsed_url.suffix
209 | hostname = netloc
210 |
211 | try:
212 | ipaddress.ip_address(hostname)
213 | type_ip = True
214 | ip = hostname
215 | private_ip = ipaddress.ip_address(ip).is_private
216 | except Exception:
217 | try:
218 | ip = socket.gethostbyname(hostname)
219 | print(f'\n{G}[+] {C}IP Address : {W}{str(ip)}')
220 | private_ip = ipaddress.ip_address(ip).is_private
221 | except Exception as e:
222 | print(f'\n{R}[-] {C}Unable to Get IP : {W}{str(e)}')
223 | sys.exit(1)
224 |
225 | start_time = datetime.datetime.now()
226 |
227 | if output != 'None':
228 | fpath = output_dir
229 | if not folder_name:
230 | dt_now = str(datetime.datetime.now().strftime('%d-%m-%Y_%H:%M:%S'))
231 | fname = f'{fpath}fr_{hostname}_{dt_now}.{output}'
232 | respath = f'{fpath}fr_{hostname}_{dt_now}'
233 | else:
234 | fname = f'{fpath}{folder_name}.{output}'
235 | respath = f'{fpath}{folder_name}'
236 | if not os.path.exists(respath):
237 | os.makedirs(respath)
238 | out_settings = {
239 | 'format': output,
240 | 'directory': respath,
241 | 'file': fname
242 | }
243 | log_writer(f'OUTPUT = FORMAT: {output}, DIR: {respath}, FILENAME: {fname}')
244 |
245 | if full:
246 | log_writer('Starting full recon...')
247 |
248 | from modules.dns import dnsrec
249 | from modules.sslinfo import cert
250 | from modules.portscan import scan
251 | from modules.dirrec import hammer
252 | from modules.crawler import crawler
253 | from modules.headers import headers
254 | from modules.subdom import subdomains
255 | from modules.wayback import timetravel
256 | from modules.whois import whois_lookup
257 |
258 | headers(target, out_settings, data)
259 | cert(hostname, sslp, out_settings, data)
260 | whois_lookup(domain, domain_suffix, path_to_script, out_settings, data)
261 | dnsrec(hostname, dserv, out_settings, data)
262 | if not type_ip and not private_ip:
263 | subdomains(hostname, tout, out_settings, data, conf_path)
264 | scan(ip, out_settings, data, pscan_threads)
265 | crawler(target, protocol, netloc, out_settings, data)
266 | hammer(target, threads, tout, wdlist, redir, sslv, out_settings, data, filext)
267 | timetravel(target, data, out_settings)
268 |
269 | if headinfo:
270 | from modules.headers import headers
271 | log_writer('Starting header enum...')
272 | headers(target, out_settings, data)
273 |
274 | if sslinfo:
275 | from modules.sslinfo import cert
276 | log_writer('Starting SSL enum...')
277 | cert(hostname, sslp, out_settings, data)
278 |
279 | if whois:
280 | from modules.whois import whois_lookup
281 | log_writer('Starting whois enum...')
282 | whois_lookup(domain, domain_suffix, path_to_script, out_settings, data)
283 |
284 | if crawl:
285 | from modules.crawler import crawler
286 | log_writer('Starting crawler...')
287 | crawler(target, protocol, netloc, out_settings, data)
288 |
289 | if dns:
290 | from modules.dns import dnsrec
291 | log_writer('Starting DNS enum...')
292 | dnsrec(hostname, dserv, out_settings, data)
293 |
294 | if subd and not type_ip and not private_ip:
295 | from modules.subdom import subdomains
296 | log_writer('Starting subdomain enum...')
297 | subdomains(hostname, tout, out_settings, data, conf_path)
298 |
299 | elif subd and type_ip:
300 | print(f'{R}[-] {C}Sub-Domain Enumeration is Not Supported for IP Addresses{W}\n')
301 | log_writer('Sub-Domain Enumeration is Not Supported for IP Addresses, exiting')
302 | sys.exit(1)
303 |
304 | if wback:
305 | from modules.wayback import timetravel
306 | log_writer('Starting wayback enum...')
307 | timetravel(hostname, data, out_settings)
308 |
309 | if pscan:
310 | from modules.portscan import scan
311 | log_writer('Starting port scan...')
312 | scan(ip, out_settings, data, threads)
313 |
314 | if dirrec:
315 | from modules.dirrec import hammer
316 | log_writer('Starting dir enum...')
317 | hammer(target, threads, tout, wdlist, redir, sslv, out_settings, data, filext)
318 |
319 | if not any([full, headinfo, sslinfo, whois, crawl, dns, subd, wback, pscan, dirrec]):
320 | print(f'\n{R}[-] Error : {C}At least One Argument is Required with URL{W}')
321 | log_writer('At least One Argument is Required with URL, exiting')
322 | output = 'None'
323 | sys.exit(1)
324 |
325 | end_time = datetime.datetime.now() - start_time
326 | print(f'\n{G}[+] {C}Completed in {W}{str(end_time)}\n')
327 | log_writer(f'Completed in {end_time}')
328 | print(f'{G}[+] {C}Exported : {W}{respath}')
329 | log_writer(f'Exported to {respath}')
330 | log_writer(f'{"-" * 30}')
331 | sys.exit()
332 | except KeyboardInterrupt:
333 | print(f'{R}[-] {C}Keyboard Interrupt.{W}\n')
334 | log_writer('Keyboard interrupt, exiting')
335 | log_writer(f'{"-" * 30}')
336 | sys.exit(130)
337 |
--------------------------------------------------------------------------------
/metadata.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "FinalRecon",
3 | "author": "thewhiteh4t",
4 | "version": "1.1.7",
5 | "twitter": "https://twitter.com/thewhiteh4t",
6 | "comms": "https://twc1rcle.com/"
7 | }
--------------------------------------------------------------------------------
/modules/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thewhiteh4t/FinalRecon/ac4681c72613ef88a170d8f0ac9dfaa4714ade7b/modules/__init__.py
--------------------------------------------------------------------------------
/modules/crawler.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import re
4 | import bs4
5 | import lxml
6 | import asyncio
7 | import requests
8 | import threading
9 | import tldextract
10 | from modules.export import export
11 | from modules.write_log import log_writer
12 | requests.packages.urllib3.disable_warnings()
13 |
14 | R = '\033[31m' # red
15 | G = '\033[32m' # green
16 | C = '\033[36m' # cyan
17 | W = '\033[0m' # white
18 | Y = '\033[33m' # yellow
19 |
20 | user_agent = {'User-Agent': 'FinalRecon'}
21 |
22 |
23 | def crawler(target, protocol, netloc, output, data):
24 | r_total = []
25 | sm_total = []
26 | css_total = []
27 | js_total = []
28 | int_total = []
29 | ext_total = []
30 | img_total = []
31 | sm_crawl_total = []
32 | js_crawl_total = []
33 | total = []
34 |
35 | print(f'\n{Y}[!] Starting Crawler...{W}\n')
36 |
37 | try:
38 | rqst = requests.get(target, headers=user_agent, verify=False, timeout=10)
39 | except Exception as exc:
40 | print(f'{R}[-] Exception : {C}{exc}{W}')
41 | log_writer(f'[crawler] Exception = {exc}')
42 | return
43 |
44 | status = rqst.status_code
45 | if status == 200:
46 | page = rqst.content
47 | soup = bs4.BeautifulSoup(page, 'lxml')
48 | r_url = f'{protocol}://{netloc}/robots.txt'
49 | sm_url = f'{protocol}://{netloc}/sitemap.xml'
50 | base_url = f'{protocol}://{netloc}'
51 | loop = asyncio.new_event_loop()
52 | asyncio.set_event_loop(loop)
53 | tasks = asyncio.gather(
54 | robots(r_url, r_total, sm_total, base_url, data, output),
55 | sitemap(sm_url, sm_total, data, output),
56 | css(target, css_total, data, soup, output),
57 | js_scan(target, js_total, data, soup, output),
58 | internal_links(target, int_total, data, soup, output),
59 | external_links(target, ext_total, data, soup, output),
60 | images(target, img_total, data, soup, output),
61 | sm_crawl(data, sm_crawl_total, sm_total, sm_url, output),
62 | js_crawl(data, js_crawl_total, js_total, output))
63 | loop.run_until_complete(tasks)
64 | loop.close()
65 | stats(output, r_total, sm_total, css_total, js_total,
66 | int_total, ext_total, img_total, sm_crawl_total,
67 | js_crawl_total, total, data, soup
68 | )
69 | log_writer('[crawler] Completed')
70 | else:
71 | print(f'{R}[-] {C}Status : {W}{status}')
72 | log_writer(f'[crawler] Status code = {status}, expected 200')
73 |
74 |
75 | def url_filter(target, link):
76 | if all([link.startswith('/') is True, link.startswith('//') is False]):
77 | ret_url = target + link
78 | return ret_url
79 |
80 | if link.startswith('//') is True:
81 | ret_url = link.replace('//', 'http://')
82 | return ret_url
83 |
84 | if all([
85 | link.find('//') == -1,
86 | link.find('../') == -1,
87 | link.find('./') == -1,
88 | link.find('http://') == -1,
89 | link.find('https://') == -1]
90 | ):
91 | ret_url = f'{target}/{link}'
92 | return ret_url
93 |
94 | if all([
95 | link.find('http://') == -1,
96 | link.find('https://') == -1]
97 | ):
98 | ret_url = link.replace('//', 'http://')
99 | ret_url = link.replace('../', f'{target}/')
100 | ret_url = link.replace('./', f'{target}/')
101 | return ret_url
102 | return link
103 |
104 |
105 | async def robots(robo_url, r_total, sm_total, base_url, data, output):
106 | print(f'{G}[+] {C}Looking for robots.txt{W}', end='', flush=True)
107 |
108 | try:
109 | r_rqst = requests.get(robo_url, headers=user_agent, verify=False, timeout=10)
110 | r_sc = r_rqst.status_code
111 | if r_sc == 200:
112 | print(f'{G}{"[".rjust(9, ".")} Found ]{W}')
113 | print(f'{G}[+] {C}Extracting robots Links{W}', end='', flush=True)
114 | r_page = r_rqst.text
115 | r_scrape = r_page.split('\n')
116 | for entry in r_scrape:
117 | if any([
118 | entry.find('Disallow') == 0,
119 | entry.find('Allow') == 0,
120 | entry.find('Sitemap') == 0]):
121 |
122 | url = entry.split(': ', 1)[1].strip()
123 | tmp_url = url_filter(base_url, url)
124 |
125 | if tmp_url is not None:
126 | r_total.append(url_filter(base_url, url))
127 |
128 | if url.endswith('xml'):
129 | sm_total.append(url)
130 |
131 | r_total = set(r_total)
132 | print(f'{G}{"[".rjust(8, ".")} {len(r_total)} ]')
133 | exporter(data, output, r_total, 'robots')
134 |
135 | elif r_sc == 404:
136 | print(f'{R}{"[".rjust(9, ".")} Not Found ]{W}')
137 |
138 | else:
139 | print(f'{R}{"[".rjust(9, ".")} {r_sc} ]{W}')
140 |
141 | except Exception as exc:
142 | print(f'\n{R}[-] Exception : {C}{exc}{W}')
143 | log_writer(f'[crawler.robots] Exception = {exc}')
144 |
145 |
146 | async def sitemap(target_url, sm_total, data, output):
147 | print(f'{G}[+] {C}Looking for sitemap.xml{W}', end='', flush=True)
148 | try:
149 | sm_rqst = requests.get(target_url, headers=user_agent, verify=False, timeout=10)
150 | sm_sc = sm_rqst.status_code
151 | if sm_sc == 200:
152 | print(f'{G}{"[".rjust(8, ".")} Found ]{W}')
153 | print(f'{G}[+] {C}Extracting sitemap Links{W}', end='', flush=True)
154 | sm_page = sm_rqst.content
155 | sm_soup = bs4.BeautifulSoup(sm_page, 'xml')
156 | links = sm_soup.find_all('loc')
157 | for url in links:
158 | url = url.get_text()
159 | if url is not None:
160 | sm_total.append(url)
161 |
162 | sm_total = set(sm_total)
163 | print(f'{G}{"[".rjust(7, ".")} {len(sm_total)} ]{W}')
164 | exporter(data, output, sm_total, 'sitemap')
165 | elif sm_sc == 404:
166 | print(f'{R}{"[".rjust(8, ".")} Not Found ]{W}')
167 | else:
168 | print(f'{R}{"[".rjust(8, ".")} Status Code : {sm_sc} ]{W}')
169 | except Exception as exc:
170 | print(f'\n{R}[-] Exception : {C}{exc}{W}')
171 | log_writer(f'[crawler.sitemap] Exception = {exc}')
172 |
173 |
174 | async def css(target, css_total, data, soup, output):
175 | print(f'{G}[+] {C}Extracting CSS Links{W}', end='', flush=True)
176 | css_links = soup.find_all('link', href=True)
177 |
178 | for link in css_links:
179 | url = link.get('href')
180 | if url is not None and '.css' in url:
181 | css_total.append(url_filter(target, url))
182 |
183 | css_total = set(css_total)
184 | print(f'{G}{"[".rjust(11, ".")} {len(css_total)} ]{W}')
185 | exporter(data, output, css_total, 'css')
186 |
187 |
188 | async def js_scan(target, js_total, data, soup, output):
189 | print(f'{G}[+] {C}Extracting Javascript Links{W}', end='', flush=True)
190 | scr_tags = soup.find_all('script', src=True)
191 |
192 | for link in scr_tags:
193 | url = link.get('src')
194 | if url is not None and '.js' in url:
195 | tmp_url = url_filter(target, url)
196 | if tmp_url is not None:
197 | js_total.append(tmp_url)
198 |
199 | js_total = set(js_total)
200 | print(f'{G}{"[".rjust(4, ".")} {len(js_total)} ]{W}')
201 | exporter(data, output, js_total, 'javascripts')
202 |
203 |
204 | async def internal_links(target, int_total, data, soup, output):
205 | print(f'{G}[+] {C}Extracting Internal Links{W}', end='', flush=True)
206 |
207 | ext = tldextract.extract(target)
208 | domain = ext.registered_domain
209 |
210 | links = soup.find_all('a')
211 | for link in links:
212 | url = link.get('href')
213 | if url is not None:
214 | if domain in url:
215 | int_total.append(url)
216 |
217 | int_total = set(int_total)
218 | print(f'{G}{"[".rjust(6, ".")} {len(int_total)} ]{W}')
219 | exporter(data, output, int_total, 'internal_urls')
220 |
221 |
222 | async def external_links(target, ext_total, data, soup, output):
223 | print(f'{G}[+] {C}Extracting External Links{W}', end='', flush=True)
224 |
225 | ext = tldextract.extract(target)
226 | domain = ext.registered_domain
227 |
228 | links = soup.find_all('a')
229 | for link in links:
230 | url = link.get('href')
231 | if url is not None:
232 | if domain not in url and 'http' in url:
233 | ext_total.append(url)
234 |
235 | ext_total = set(ext_total)
236 | print(f'{G}{"[".rjust(6, ".")} {len(ext_total)} ]{W}')
237 | exporter(data, output, ext_total, 'external_urls')
238 |
239 |
240 | async def images(target, img_total, data, soup, output):
241 | print(f'{G}[+] {C}Extracting Images{W}', end='', flush=True)
242 | image_tags = soup.find_all('img')
243 |
244 | for link in image_tags:
245 | url = link.get('src')
246 | if url is not None and len(url) > 1:
247 | img_total.append(url_filter(target, url))
248 |
249 | img_total = set(img_total)
250 | print(f'{G}{"[".rjust(14, ".")} {len(img_total)} ]{W}')
251 | exporter(data, output, img_total, 'images')
252 |
253 |
254 | async def sm_crawl(data, sm_crawl_total, sm_total, sm_url, output):
255 | print(f'{G}[+] {C}Crawling Sitemaps{W}', end='', flush=True)
256 |
257 | threads = []
258 |
259 | def fetch(site_url):
260 | try:
261 | sm_rqst = requests.get(site_url, headers=user_agent, verify=False, timeout=10)
262 | sm_sc = sm_rqst.status_code
263 | if sm_sc == 200:
264 | sm_data = sm_rqst.content.decode()
265 | sm_soup = bs4.BeautifulSoup(sm_data, 'xml')
266 | links = sm_soup.find_all('loc')
267 | for url in links:
268 | url = url.get_text()
269 | if url is not None:
270 | sm_crawl_total.append(url)
271 | elif sm_sc == 404:
272 | # print(R + '['.rjust(8, '.') + ' Not Found ]' + W)
273 | pass
274 | else:
275 | # print(R + '['.rjust(8, '.') + ' {} ]'.format(sm_sc) + W)
276 | pass
277 | except Exception as exc:
278 | # print(f'\n{R}[-] Exception : {C}{exc}{W}')
279 | log_writer(f'[crawler.sm_crawl] Exception = {exc}')
280 |
281 | for site_url in sm_total:
282 | if site_url != sm_url:
283 | if site_url.endswith('xml') is True:
284 | task = threading.Thread(target=fetch, args=[site_url])
285 | task.daemon = True
286 | threads.append(task)
287 | task.start()
288 |
289 | for thread in threads:
290 | thread.join()
291 |
292 | sm_crawl_total = set(sm_crawl_total)
293 | print(f'{G}{"[".rjust(14, ".")} {len(sm_crawl_total)} ]{W}')
294 | exporter(data, output, sm_crawl_total, 'urls_inside_sitemap')
295 |
296 |
297 | async def js_crawl(data, js_crawl_total, js_total, output):
298 | print(f'{G}[+] {C}Crawling Javascripts{W}', end='', flush=True)
299 |
300 | threads = []
301 |
302 | def fetch(js_url):
303 | try:
304 | js_rqst = requests.get(js_url, headers=user_agent, verify=False, timeout=10)
305 | js_sc = js_rqst.status_code
306 | if js_sc == 200:
307 | js_data = js_rqst.content.decode()
308 | js_data = js_data.split(';')
309 | for line in js_data:
310 | if any(['http://' in line, 'https://' in line]):
311 | found = re.findall(r'\"(http[s]?://.*?)\"', line)
312 | for item in found:
313 | if len(item) > 8:
314 | js_crawl_total.append(item)
315 | except Exception as exc:
316 | # print(f'\n{R}[-] Exception : {C}{exc}{W}')
317 | log_writer(f'[crawler.js_crawl] Exception = {exc}')
318 |
319 | for js_url in js_total:
320 | task = threading.Thread(target=fetch, args=[js_url])
321 | task.daemon = True
322 | threads.append(task)
323 | task.start()
324 |
325 | for thread in threads:
326 | thread.join()
327 |
328 | js_crawl_total = set(js_crawl_total)
329 | print(f'{G}{"[".rjust(11, ".")} {len(js_crawl_total)} ]{W}')
330 | exporter(data, output, js_crawl_total, 'urls_inside_js')
331 |
332 |
333 | def exporter(data, output, list_name, file_name):
334 | data[f'module-crawler-{file_name}'] = {'links': list(list_name)}
335 | data[f'module-crawler-{file_name}'].update({'exported': False})
336 | fname = f'{output["directory"]}/{file_name}.{output["format"]}'
337 | output['file'] = fname
338 | export(output, data)
339 |
340 |
341 | def stats(output, r_total, sm_total, css_total, js_total, int_total, ext_total, img_total, sm_crawl_total, js_crawl_total, total, data, soup):
342 | total.extend(r_total)
343 | total.extend(sm_total)
344 | total.extend(css_total)
345 | total.extend(js_total)
346 | total.extend(js_crawl_total)
347 | total.extend(sm_crawl_total)
348 | total.extend(int_total)
349 | total.extend(ext_total)
350 | total.extend(img_total)
351 | total = set(total)
352 |
353 | print(f'\n{G}[+] {C}Total Unique Links Extracted : {W}{len(total)}')
354 |
355 | if output != 'None':
356 | if len(total) != 0:
357 | data['module-crawler-stats'] = {'Total Unique Links Extracted': str(len(total))}
358 | try:
359 | target_title = soup.title.string
360 | except AttributeError:
361 | target_title = 'None'
362 | data['module-crawler-stats'].update({'Title ': str(target_title)})
363 |
364 | data['module-crawler-stats'].update(
365 | {
366 | 'total_urls_robots': len(r_total),
367 | 'total_urls_sitemap': len(sm_total),
368 | 'total_urls_css': len(css_total),
369 | 'total_urls_js': len(js_total),
370 | 'total_urls_in_js': len(js_crawl_total),
371 | 'total_urls_in_sitemaps': len(sm_crawl_total),
372 | 'total_urls_internal': len(int_total),
373 | 'total_urls_external': len(ext_total),
374 | 'total_urls_images': len(img_total),
375 | 'total_urls': len(total)
376 | })
377 | data['module-crawler-stats'].update({'exported': False})
378 |
--------------------------------------------------------------------------------
/modules/dirrec.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import socket
4 | import aiohttp
5 | import asyncio
6 | from datetime import date
7 | from modules.export import export
8 | from modules.write_log import log_writer
9 |
10 | R = '\033[31m' # red
11 | G = '\033[32m' # green
12 | C = '\033[36m' # cyan
13 | W = '\033[0m' # white
14 | Y = '\033[33m' # yellow
15 |
16 | header = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0'}
17 | count = 0
18 | wm_count = 0
19 | exc_count = 0
20 | found = []
21 | responses = []
22 | curr_yr = date.today().year
23 | last_yr = curr_yr - 1
24 |
25 |
26 | async def fetch(url, session, redir):
27 | global responses, exc_count
28 | try:
29 | async with session.get(url, headers=header, allow_redirects=redir) as response:
30 | responses.append((url, response.status))
31 | return response.status
32 | except Exception as exc:
33 | exc_count += 1
34 | log_writer(f'[dirrec] Exception : {exc}')
35 |
36 |
37 | async def insert(queue, filext, target, wdlist, redir):
38 | if len(filext) == 0:
39 | url = target + '/{}'
40 | with open(wdlist, 'r') as wordlist:
41 | for word in wordlist:
42 | word = word.strip()
43 | await queue.put([url.format(word), redir])
44 | await asyncio.sleep(0)
45 | else:
46 | filext = ',' + filext
47 | filext = filext.split(',')
48 | with open(wdlist, 'r') as wordlist:
49 | for word in wordlist:
50 | for ext in filext:
51 | ext = ext.strip()
52 | if len(ext) == 0:
53 | url = target + '/{}'
54 | else:
55 | url = target + '/{}.' + ext
56 | word = word.strip()
57 | await queue.put([url.format(word), redir])
58 | await asyncio.sleep(0)
59 |
60 |
61 | async def consumer(queue, target, session, redir, total_num_words):
62 | global count
63 | while True:
64 | values = await queue.get()
65 | url = values[0]
66 | redir = values[1]
67 | status = await fetch(url, session, redir)
68 | await filter_out(target, url, status)
69 | queue.task_done()
70 | count += 1
71 | print(f'{Y}[!] {C}Requests : {W}{count}/{total_num_words}', end='\r')
72 |
73 |
74 | async def run(target, threads, tout, wdlist, redir, sslv, filext, total_num_words):
75 | queue = asyncio.Queue(maxsize=threads)
76 |
77 | conn = aiohttp.TCPConnector(limit=threads, family=socket.AF_INET, verify_ssl=sslv)
78 | timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout)
79 | async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
80 | distrib = asyncio.create_task(insert(queue, filext, target, wdlist, redir))
81 | workers = [
82 | asyncio.create_task(
83 | consumer(queue, target, session, redir, total_num_words)
84 | ) for _ in range(threads)]
85 |
86 | await asyncio.gather(distrib)
87 | await queue.join()
88 |
89 | for worker in workers:
90 | worker.cancel()
91 |
92 |
93 | async def filter_out(target, url, status):
94 | global found
95 | if status in {200}:
96 | if str(url) != target + '/':
97 | found.append(url)
98 | print(f'{G}{status} {C}|{W} {url}')
99 | elif status in {301, 302, 303, 307, 308}:
100 | found.append(url)
101 | print(f'{Y}{status} {C}|{W} {url}')
102 | elif status in {403}:
103 | found.append(url)
104 | print(f'{R}{status} {C}|{W} {url}')
105 |
106 |
107 | def dir_output(output, data):
108 | result = {}
109 |
110 | for entry in responses:
111 | if entry is not None:
112 | if entry[1] in {200}:
113 | if output != 'None':
114 | result.setdefault('Status 200', []).append(f'200, {entry[0]}')
115 | elif entry[1] in {301, 302, 303, 307, 308}:
116 | if output != 'None':
117 | result.setdefault(f'Status {entry[1]}', []).append(f'{entry[1]}, {entry[0]}')
118 | elif entry[1] in {403}:
119 | if output != 'None':
120 | result.setdefault('Status 403', []).append(f'{entry[1]}, {entry[0]}')
121 |
122 | print(f'\n\n{G}[+] {C}Directories Found : {W}{len(found)}\n')
123 | print(f'{Y}[!] {C}Exceptions : {W}{exc_count}')
124 |
125 | if output != 'None':
126 | result.update({'exported': False})
127 | data['module-Directory Search'] = result
128 | fname = f'{output["directory"]}/directory_enum.{output["format"]}'
129 | output['file'] = fname
130 | export(output, data)
131 |
132 |
133 | def hammer(target, threads, tout, wdlist, redir, sslv, output, data, filext):
134 | print(f'\n{Y}[!] Starting Directory Enum...{W}\n')
135 | print(f'{G}[+] {C}Threads : {W}{threads}')
136 | print(f'{G}[+] {C}Timeout : {W}{tout}')
137 | print(f'{G}[+] {C}Wordlist : {W}{wdlist}')
138 | print(f'{G}[+] {C}Allow Redirects : {W}{redir}')
139 | print(f'{G}[+] {C}SSL Verification : {W}{sslv}')
140 | with open(wdlist, 'r') as wordlist:
141 | num_words = sum(1 for i in wordlist)
142 | print(f'{G}[+] {C}Wordlist Size : {W}{num_words}')
143 | print(f'{G}[+] {C}File Extensions : {W}{filext}\n')
144 | if len(filext) != 0:
145 | total_num_words = num_words * (len(filext.split(',')) + 1)
146 | else:
147 | total_num_words = num_words
148 |
149 | loop = asyncio.new_event_loop()
150 | asyncio.set_event_loop(loop)
151 | loop.run_until_complete(run(target, threads, tout, wdlist, redir, sslv, filext, total_num_words))
152 | dir_output(output, data)
153 | loop.close()
154 | log_writer('[dirrec] Completed')
155 |
--------------------------------------------------------------------------------
/modules/dns.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import asyncio
4 | import dns.asyncresolver
5 | from modules.export import export
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | def dnsrec(domain, dns_servers, output, data):
16 | result = {}
17 | print(f'\n{Y}[!] Starting DNS Enumeration...{W}\n')
18 | dns_records = ['A', 'AAAA', 'AFSDB', 'APL', 'CAA', 'CDNSKEY', 'CDS', 'CERT',
19 | 'CNAME', 'CSYNC', 'DHCID', 'DLV', 'DNAME', 'DNSKEY', 'DS', 'EUI48',
20 | 'EUI64', 'HINFO', 'HIP', 'HTTPS', 'IPSECKEY', 'KEY', 'KX', 'LOC',
21 | 'MX', 'NAPTR', 'NS', 'NSEC', 'NSEC3', 'NSEC3PARAM', 'OPENPGPKEY', 'PTR',
22 | 'RP', 'RRSIG', 'SIG', 'SMIMEA', 'SOA', 'SRV', 'SSHFP', 'SVCB',
23 | 'TA', 'TKEY', 'TLSA', 'TSIG', 'TXT', 'URI', 'ZONEMD']
24 | full_ans = []
25 |
26 | res = dns.asyncresolver.Resolver(configure=False)
27 | res.nameservers = [sv.strip() for sv in dns_servers.split(',')]
28 |
29 |
30 |
31 | async def fetch_records(res, domain, record):
32 | answer = await res.resolve(domain, record)
33 | return answer
34 |
35 |
36 | for dns_record in dns_records:
37 | try:
38 | ans = asyncio.run(fetch_records(res, domain, dns_record))
39 | for record_data in ans:
40 | full_ans.append(f'{dns_record} : {record_data.to_text()}')
41 | except dns.resolver.NoAnswer as exc:
42 | log_writer(f'[dns] Exception = {exc}')
43 | except dns.resolver.NoMetaqueries as exc:
44 | log_writer(f'[dns] Exception = {exc}')
45 | except dns.resolver.NoNameservers as exc:
46 | log_writer(f'[dns] Exception = {exc}')
47 | except dns.resolver.NXDOMAIN as exc:
48 | log_writer(f'[dns] Exception = {exc}')
49 | print(f'{R}[-] {C}DNS Records Not Found!{W}')
50 | if output != 'None':
51 | result.setdefault('dns', ['DNS Records Not Found'])
52 | return
53 |
54 | for entry in full_ans:
55 | entry_parts = entry.split(' : ')
56 | print(f'{C}{entry_parts[0]} \t: {W}{entry_parts[1]}')
57 | if output != 'None':
58 | result.setdefault('dns', []).append(entry)
59 |
60 | dmarc_target = f'_dmarc.{domain}'
61 | try:
62 | dmarc_ans = asyncio.run(fetch_records(res, dmarc_target, 'TXT'))
63 | for entry in dmarc_ans:
64 | print(f'{C}DMARC \t: {W}{entry.to_text()}')
65 | if output != 'None':
66 | result.setdefault('dmarc', []).append(f'DMARC : {entry.to_text()}')
67 | except dns.resolver.NoAnswer as exc:
68 | log_writer(f'[dns.dmarc] Exception = {exc}')
69 | except dns.resolver.NoMetaqueries as exc:
70 | log_writer(f'[dns.dmarc] Exception = {exc}')
71 | except dns.resolver.NoNameservers as exc:
72 | log_writer(f'[dns.dmarc] Exception = {exc}')
73 | except dns.resolver.NXDOMAIN as exc:
74 | log_writer(f'[dns.dmarc] Exception = {exc}')
75 | print(f'\n{R}[-] {C}DMARC Record Not Found!{W}')
76 | if output != 'None':
77 | result.setdefault('dmarc', ['DMARC Record Not Found!'])
78 |
79 | result.update({'exported': False})
80 |
81 | if output != 'None':
82 | data['module-DNS Enumeration'] = result
83 | fname = f'{output["directory"]}/dns_records.{output["format"]}'
84 | output['file'] = fname
85 | export(output, data)
86 | log_writer('[dns] Completed')
87 |
--------------------------------------------------------------------------------
/modules/export.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import sys
4 |
5 | R = '\033[31m' # red
6 | G = '\033[32m' # green
7 | C = '\033[36m' # cyan
8 | W = '\033[0m' # white
9 | Y = '\033[33m' # yellow
10 |
11 |
12 | def export(output, data):
13 | if output['format'] != 'txt':
14 | print(f'{R}[-] {C}Invalid Output Format, Valid Formats : {W}txt')
15 | sys.exit()
16 |
17 | fname = output['file']
18 | with open(fname, 'w') as outfile:
19 | txt_export(data, outfile)
20 |
21 |
22 | def txt_unpack(outfile, val):
23 | def write_item(item):
24 | if isinstance(item, list):
25 | outfile.write(f'{item[0]}\t{item[1]}\t\t{item[2]}\n')
26 | else:
27 | outfile.write(f'{item}\n')
28 |
29 | if isinstance(val, list):
30 | for item in val:
31 | write_item(item)
32 |
33 | elif isinstance(val, dict):
34 | for sub_key, sub_val in val.items():
35 | if sub_key == 'exported':
36 | continue
37 | if isinstance(sub_val, list):
38 | txt_unpack(outfile, sub_val)
39 | else:
40 | outfile.write(f'{sub_key}: {sub_val}\n')
41 |
42 |
43 | def txt_export(data, outfile):
44 | for key, val in data.items():
45 | if key.startswith('module'):
46 | if not val['exported']:
47 | txt_unpack(outfile, val)
48 | val['exported'] = True
49 | elif key.startswith('Type'):
50 | outfile.write(f'\n{data[key]}\n')
51 | outfile.write(f'{"=" * len(data[key])}\n\n')
52 | else:
53 | outfile.write(f'{key}: {val}\n')
54 |
--------------------------------------------------------------------------------
/modules/headers.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import requests
4 | from modules.export import export
5 | from modules.write_log import log_writer
6 | requests.packages.urllib3.disable_warnings()
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | def headers(target, output, data):
16 | result = {}
17 | print(f'\n{Y}[!] Headers :{W}\n')
18 | try:
19 | rqst = requests.get(target, verify=False, timeout=10)
20 | for key, val in rqst.headers.items():
21 | print(f'{C}{key} : {W}{val}')
22 | if output != 'None':
23 | result.update({key: val})
24 | except Exception as exc:
25 | print(f'\n{R}[-] {C}Exception : {W}{exc}\n')
26 | if output != 'None':
27 | result.update({'Exception': str(exc)})
28 | log_writer(f'[headers] Exception = {exc}')
29 | result.update({'exported': False})
30 |
31 | if output != 'None':
32 | fname = f'{output["directory"]}/headers.{output["format"]}'
33 | output['file'] = fname
34 | data['module-headers'] = result
35 | export(output, data)
36 | log_writer('[headers] Completed')
37 |
--------------------------------------------------------------------------------
/modules/portscan.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import asyncio
4 | from modules.export import export
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 | counter = 0
14 | port_list = {
15 |
16 | 1: "tcpmux",
17 | 9: "Discard",
18 | 15: "netstat",
19 | 20: "FTP-CLI",
20 | 21: "FTP",
21 | 22: "SSH",
22 | 23: "Telnet",
23 | 25: "SMTP",
24 | 26: "rsftp",
25 | 53: "DNS",
26 | 67: "DHCP (Server)",
27 | 68: "DHCP (Client)",
28 | 69: "TFTP",
29 | 80: "HTTP",
30 | 110: "POP3",
31 | 119: "NNTP",
32 | 123: "NTP",
33 | 135: "Microsoft RPC",
34 | 137: "NetBIOS Name Service",
35 | 138: "NetBIOS Datagram Service",
36 | 139: "NetBIOS Session Service",
37 | 143: "IMAP",
38 | 161: "SNMP",
39 | 162: "SNMP Trap",
40 | 179: "BGP",
41 | 194: "IRC",
42 | 389: "LDAP",
43 | 443: "HTTPS",
44 | 445: "Microsoft-DS",
45 | 465: "SMTPS",
46 | 515: "LPD",
47 | 520: "RIP",
48 | 554: "RTSP (Real-Time Streaming)",
49 | 587: "SMTP (Submission)",
50 | 631: "IPP (CUPS)",
51 | 636: "LDAPS",
52 | 873: "rsync",
53 | 990: "FTPS",
54 | 993: "IMAPS",
55 | 995: "POP3S",
56 | 1024: "Dynamic/Private",
57 | 1080: "Socks Proxy",
58 | 1194: "OpenVPN",
59 | 1433: "Microsoft SQL Server",
60 | 1434: "Microsoft SQL Monitor",
61 | 1521: "Oracle DB",
62 | 1701: "L2TP",
63 | 1723: "PPTP",
64 | 1883: "MQTT",
65 | 2000: "Cisco-sccp",
66 | 2049: "NFS",
67 | 2222: "EtherNetIP-1",
68 | 2375: "Docker REST API",
69 | 2376: "Docker REST API (TLS)",
70 | 2483: "Oracle DB",
71 | 2484: "Oracle DB (TLS)",
72 | 3000: "Grafana",
73 | 3306: "MySQL",
74 | 3389: "RDP",
75 | 3690: "Subversion",
76 | 4373: "Remote Authenticated Command",
77 | 4443: "HTTPS-Alt",
78 | 4444: "Metasploit",
79 | 4567: "MySQL Group Replication",
80 | 4786: "Cisco Smart Install",
81 | 5060: "SIP",
82 | 5432: "PostgreSQL",
83 | 5672: "RabbitMQ",
84 | 5900: "VNC",
85 | 5938: "TeamViewer",
86 | 5984: "CouchDB",
87 | 6379: "Redis",
88 | 6443: "Kubernetes API",
89 | 6667: "IRC",
90 | 7000: "Couchbase",
91 | 7200: "Hazelcast",
92 | 8000: "HTTP-Alt",
93 | 8008: "HTTP-Alt",
94 | 8080: "HTTP-Proxy",
95 | 8081: "SonarQube",
96 | 8086: "InfluxDB",
97 | 8088: "Kibana",
98 | 8181: "HTTP-Alt",
99 | 8443: "HTTPS-Alt",
100 | 8444: "Jenkins",
101 | 8888: "HTTP-Alt",
102 | 9000: "SonarQube",
103 | 9090: "Openfire",
104 | 9092: "Kafka",
105 | 9093: "Prometheus Alertmanager",
106 | 9200: "Elasticsearch",
107 | 9300: "Elasticsearch",
108 | 9418: "Git",
109 | 9990: "JBoss Management",
110 | 9993: "Unreal Tournament",
111 | 9999: "NMAP",
112 | 10000: "Webmin",
113 | 10050: "Zabbix Agent",
114 | 10051: "Zabbix Server",
115 | 11211: "Memcached",
116 | 11300: "Beanstalkd",
117 | 1521: "Oracle DB",
118 | 25565: "Minecraft",
119 | 27015: "Source Engine Games",
120 | 27017: "MongoDB",
121 | 27018: "MongoDB",
122 | 5044: "Logstash",
123 | 50000: "SAP",
124 | 50030: "Hadoop",
125 | 50070: "Hadoop",
126 | 5555: "Open Remote",
127 | 61616: "ActiveMQ",
128 |
129 | }
130 | async def insert(queue):
131 | for port in port_list:
132 | await queue.put(port)
133 |
134 |
135 | async def consumer(queue, ip_addr, result):
136 | global counter
137 | while True:
138 | port = await queue.get()
139 | await sock_conn(ip_addr, port, result)
140 | queue.task_done()
141 | counter += 1
142 | print(f'{Y}[!] {C}Scanning : {W}{counter}/{len(port_list)}', end='\r')
143 |
144 |
145 | async def run(ip_addr, result, threads):
146 | queue = asyncio.Queue(maxsize=threads)
147 | distrib = asyncio.create_task(insert(queue))
148 | workers = [
149 | asyncio.create_task(
150 | consumer(queue, ip_addr, result)
151 | ) for _ in range(threads)]
152 |
153 | await asyncio.gather(distrib)
154 | await queue.join()
155 | for worker in workers:
156 | worker.cancel()
157 |
158 |
159 | def scan(ip_addr, output, data, threads):
160 | result = {}
161 | result['ports'] = []
162 | print(f'\n{Y}[!] Starting Port Scan...{W}\n')
163 | print(f'{G}[+] {C}Scanning Top 100+ Ports With {threads} Threads...{W}\n')
164 |
165 | loop = asyncio.new_event_loop()
166 | asyncio.set_event_loop(loop)
167 | loop.run_until_complete(run(ip_addr, result, threads))
168 | loop.close()
169 |
170 | print(f'\n{G}[+] {C}Scan Completed!{W}\n')
171 |
172 | if output != 'None':
173 | ps_output(output, data, result)
174 | log_writer('[portscan] Completed')
175 |
176 |
177 | async def sock_conn(ip_addr, port, result):
178 | try:
179 | connector = asyncio.open_connection(ip_addr, port)
180 | await asyncio.wait_for(connector, 1)
181 | port_name = port_list[port] # Get the port name from the port_list dictionary
182 | print(f'\x1b[K{G}[+] {C}{port} ({port_name}){W}')
183 | result['ports'].append(f"{port} ({port_name})")
184 | return True
185 | except TimeoutError:
186 | return False
187 | except Exception:
188 | pass
189 |
190 |
191 | def ps_output(output, data, result):
192 | data['module-Port Scan'] = result
193 | result.update({'exported': False})
194 | fname = f'{output["directory"]}/ports.{output["format"]}'
195 | output['file'] = fname
196 | export(output, data)
197 |
--------------------------------------------------------------------------------
/modules/sslinfo.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import ssl
4 | import socket
5 | from modules.export import export
6 | from modules.write_log import log_writer
7 | from cryptography import x509
8 | from cryptography.hazmat.backends import default_backend
9 | from datetime import timezone
10 |
11 | R = '\033[31m' # red
12 | G = '\033[32m' # green
13 | C = '\033[36m' # cyan
14 | W = '\033[0m' # white
15 | Y = '\033[33m' # yellow
16 |
17 |
18 | def cert(hostname, sslp, output, data):
19 | result = {}
20 | presence = False
21 | print(f'\n{Y}[!] SSL Certificate Information : {W}\n')
22 |
23 | port_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
24 | port_test.settimeout(5)
25 | try:
26 | port_test.connect((hostname, sslp))
27 | port_test.close()
28 | presence = True
29 | except Exception:
30 | port_test.close()
31 | print(f'{R}[-] {C}SSL is not Present on Target URL...Skipping...{W}')
32 | result.update({'Error': 'SSL is not Present on Target URL'})
33 | log_writer('[sslinfo] SSL is not Present on Target URL...Skipping...')
34 |
35 | def unpack(nested_tuple, pair):
36 | for item in nested_tuple:
37 | if isinstance(item, tuple):
38 | if len(item) == 2:
39 | pair[item[0]] = item[1]
40 | else:
41 | unpack(item, pair)
42 | else:
43 | pair[nested_tuple.index(item)] = item
44 |
45 | def process_cert(info):
46 | pair = {}
47 | for key, val in info.items():
48 | if isinstance(val, tuple):
49 | print(f'{G}[+] {C}{key}{W}')
50 | unpack(val, pair)
51 | for sub_key, sub_val in pair.items():
52 | print(f'\t{G}└╴{C}{sub_key}: {W}{sub_val}')
53 | result.update({f'{key}-{sub_key}': sub_val})
54 | pair.clear()
55 | elif isinstance(val, dict):
56 | print(f'{G}[+] {C}{key}{W}')
57 | for sub_key, sub_val in val.items():
58 | print(f'\t{G}└╴{C}{sub_key}: {W}{sub_val}')
59 | result.update({f'{key}-{sub_key}': sub_val})
60 | elif isinstance(val, list):
61 | print(f'{G}[+] {C}{key}{W}')
62 | for sub_val in val:
63 | print(f'\t{G}└╴{C}{val.index(sub_val)}: {W}{sub_val}')
64 | result.update({f'{key}-{val.index(sub_val)}': sub_val})
65 | else:
66 | print(f'{G}[+] {C}{key} : {W}{val}')
67 | result.update({key: val})
68 |
69 | if presence:
70 | ctx = ssl.create_default_context()
71 | ctx.check_hostname = False
72 | ctx.verify_mode = ssl.CERT_NONE
73 | sock = socket.socket()
74 | sock.settimeout(5)
75 | ssl_conn = ctx.wrap_socket(sock, server_hostname=hostname)
76 | ssl_conn.connect((hostname, sslp))
77 | x509_cert = ssl_conn.getpeercert(binary_form=True)
78 | decoded_cert = x509.load_der_x509_certificate(x509_cert, default_backend())
79 |
80 | subject_dict = {}
81 | issuer_dict = {}
82 |
83 | def name_to_dict(attribute):
84 | attr_name = attribute.oid._name
85 | attr_value = attribute.value
86 | return attr_name, attr_value
87 |
88 | for attribute in decoded_cert.subject:
89 | name, value = name_to_dict(attribute)
90 | subject_dict[name] = value
91 |
92 | for attribute in decoded_cert.issuer:
93 | name, value = name_to_dict(attribute)
94 | issuer_dict[name] = value
95 |
96 | # Handle `not_valid_before` and `not_valid_after` with compatibility
97 | if hasattr(decoded_cert, 'not_valid_before_utc') and hasattr(decoded_cert, 'not_valid_after_utc'):
98 | not_before = decoded_cert.not_valid_before_utc
99 | not_after = decoded_cert.not_valid_after_utc
100 | else:
101 | # Fallback for older versions
102 | not_before = decoded_cert.not_valid_before.replace(tzinfo=timezone.utc)
103 | not_after = decoded_cert.not_valid_after.replace(tzinfo=timezone.utc)
104 |
105 | cert_dict = {
106 | 'protocol': ssl_conn.version(),
107 | 'cipher': ssl_conn.cipher(),
108 | 'subject': subject_dict,
109 | 'issuer': issuer_dict,
110 | 'version': decoded_cert.version,
111 | 'serialNumber': decoded_cert.serial_number,
112 | 'notBefore': not_before.strftime("%b %d %H:%M:%S %Y GMT"),
113 | 'notAfter': not_after.strftime("%b %d %H:%M:%S %Y GMT"),
114 | }
115 |
116 | extensions = decoded_cert.extensions
117 | for ext in extensions:
118 | if ext.oid != x509.ExtensionOID.SUBJECT_ALTERNATIVE_NAME:
119 | continue
120 | san_entries = ext.value
121 | subject_alt_names = []
122 | for entry in san_entries:
123 | if isinstance(entry, x509.DNSName):
124 | subject_alt_names.append(entry.value)
125 | cert_dict['subjectAltName'] = subject_alt_names
126 |
127 | process_cert(cert_dict)
128 | result.update({'exported': False})
129 |
130 | if output:
131 | fname = f'{output["directory"]}/ssl.{output["format"]}'
132 | output['file'] = fname
133 | data['module-SSL Certificate Information'] = result
134 | export(output, data)
135 | log_writer('[sslinfo] Completed')
136 |
--------------------------------------------------------------------------------
/modules/subdom.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import asyncio
4 | import aiohttp
5 | from re import match
6 | from modules.export import export
7 | from modules.write_log import log_writer
8 | from modules.subdomain_modules.bevigil_subs import bevigil
9 | from modules.subdomain_modules.anubis_subs import anubisdb
10 | from modules.subdomain_modules.thminer_subs import thminer
11 | from modules.subdomain_modules.fb_subs import fb_cert
12 | from modules.subdomain_modules.virustotal_subs import virust
13 | from modules.subdomain_modules.shodan_subs import shodan
14 | from modules.subdomain_modules.certspot_subs import certspot
15 | # from modules.subdomain_modules.wayback_subs import machine
16 | from modules.subdomain_modules.crtsh_subs import crtsh
17 | from modules.subdomain_modules.htarget_subs import hackertgt
18 | from modules.subdomain_modules.binedge_subs import binedge
19 | from modules.subdomain_modules.zoomeye_subs import zoomeye
20 | from modules.subdomain_modules.netlas_subs import netlas
21 | from modules.subdomain_modules.hunter_subs import hunter
22 | from modules.subdomain_modules.urlscan_subs import urlscan
23 | from modules.subdomain_modules.alienvault_subs import alienvault
24 |
25 | R = '\033[31m' # red
26 | G = '\033[32m' # green
27 | C = '\033[36m' # cyan
28 | W = '\033[0m' # white
29 | Y = '\033[33m' # yellow
30 |
31 | found = []
32 |
33 |
34 | async def query(hostname, tout, conf_path):
35 | timeout = aiohttp.ClientTimeout(total=tout)
36 | async with aiohttp.ClientSession(timeout=timeout) as session:
37 | await asyncio.gather(
38 | bevigil(hostname, conf_path, session),
39 | anubisdb(hostname, session),
40 | thminer(hostname, session),
41 | fb_cert(hostname, conf_path, session),
42 | virust(hostname, conf_path, session),
43 | shodan(hostname, conf_path, session),
44 | certspot(hostname, session),
45 | # machine(hostname, session),
46 | hackertgt(hostname, session),
47 | crtsh(hostname, session),
48 | binedge(hostname, conf_path, session),
49 | zoomeye(hostname, conf_path, session),
50 | netlas(hostname, conf_path, session),
51 | hunter(hostname, conf_path, session),
52 | urlscan(hostname, session),
53 | alienvault(hostname, session)
54 | )
55 | await session.close()
56 |
57 |
58 | def subdomains(hostname, tout, output, data, conf_path):
59 | global found
60 | result = {}
61 |
62 | print(f'\n{Y}[!] Starting Sub-Domain Enumeration...{W}\n')
63 |
64 | loop = asyncio.new_event_loop()
65 | asyncio.set_event_loop(loop)
66 | loop.run_until_complete(query(hostname, tout, conf_path))
67 | loop.close()
68 |
69 | found = [item for item in found if item.endswith(hostname)]
70 | valid = r"^[A-Za-z0-9._~()'!*:@,;+?-]*$"
71 | found = [item for item in found if match(valid, item)]
72 | found = set(found)
73 | total = len(found)
74 |
75 | if found:
76 | print(f'\n{G}[+] {C}Results : {W}\n')
77 | for url in enumerate(list(found)[:20]):
78 | print(url[1])
79 |
80 | if len(found) > 20:
81 | print(f'\n{G}[+]{C} Results truncated...{W}')
82 |
83 | print(f'\n{G}[+] {C}Total Unique Sub Domains Found : {W}{total}')
84 |
85 | if output != 'None':
86 | result['Links'] = list(found)
87 | result.update({'exported': False})
88 | data['module-Subdomain Enumeration'] = result
89 | fname = f'{output["directory"]}/subdomains.{output["format"]}'
90 | output['file'] = fname
91 | export(output, data)
92 | log_writer('[subdom] Completed')
93 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thewhiteh4t/FinalRecon/ac4681c72613ef88a170d8f0ac9dfaa4714ade7b/modules/subdomain_modules/__init__.py
--------------------------------------------------------------------------------
/modules/subdomain_modules/alienvault_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def alienvault(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}AlienVault{W}')
16 | url = f'https://otx.alienvault.com/api/v1/indicators/domain/{hostname}/passive_dns'
17 | try:
18 | async with session.get(url) as resp:
19 | status = resp.status
20 | if status == 200:
21 | output = await resp.text()
22 | json_data = loads(output)['passive_dns']
23 | subdomains = []
24 | for entry in json_data:
25 | subdomains.append(entry['hostname'])
26 | parent.found.extend(subdomains)
27 | print(f'{G}[+] {Y}AlienVault {W}found {C}{len(subdomains)} {W}subdomains!')
28 | else:
29 | print(await resp.text())
30 | print(f'{R}[-] {C}AlienVault Status : {W}{status}')
31 | log_writer(f'[alienvault_subs] Status = {status}, expected 200')
32 | except Exception as exc:
33 | print(f'{R}[-] {C}AlienVault Exception : {W}{exc}')
34 | log_writer('[alienvault_subs] Completed')
35 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/anubis_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def anubisdb(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}AnubisDB{W}')
16 | url = f'https://jldc.me/anubis/subdomains/{hostname}'
17 | try:
18 | async with session.get(url) as resp:
19 | status = resp.status
20 | if status == 200:
21 | output = await resp.text()
22 | json_out = loads(output)
23 | parent.found.extend(json_out)
24 | print(f'{G}[+] {Y}AnubisDB {W}found {C}{len(json_out)} {W}subdomains!')
25 | elif status == 300:
26 | print(f'{G}[+] {Y}AnubisDB {W}found {C}0 {W}subdomains!')
27 | log_writer(f'[anubis_subs] Status = {status}, no subdomains found')
28 | else:
29 | print(await resp.text())
30 | print(f'{R}[-] {C}AnubisDB Status : {W}{status}')
31 | log_writer(f'[anubis_subs] Status = {status}, expected 200')
32 | except Exception as exc:
33 | print(f'{R}[-] {C}AnubisDB Exception : {W}{exc}')
34 | log_writer('[anubis_subs] Completed')
35 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/bevigil_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from json import loads, dumps
5 | import modules.subdom as parent
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def bevigil(hostname, conf_path, session):
16 | bevigil_key = environ.get('FR_BEVIGIL_KEY')
17 |
18 | if not bevigil_key:
19 | log_writer('[bevigil_subs] key missing in env')
20 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
21 | json_read = keyfile.read()
22 |
23 | json_load = loads(json_read)
24 | try:
25 | bevigil_key = json_load['bevigil']
26 | except KeyError:
27 | log_writer('[bevigil_subs] key missing in keys.json')
28 | with open(f'{conf_path}/keys.json', 'w') as outfile:
29 | json_load['bevigil'] = None
30 | bevigil_key = None
31 | outfile.write(
32 | dumps(json_load, sort_keys=True, indent=4)
33 | )
34 |
35 | if bevigil_key is not None:
36 | print(f'{Y}[!] {C}Requesting {G}BeVigil{W}')
37 | url = f"https://osint.bevigil.com/api/{hostname}/subdomains/"
38 | header = {"X-Access-Token": bevigil_key}
39 |
40 | try:
41 | async with session.get(url, headers=header) as resp:
42 | status = resp.status
43 | if status == 200:
44 | json_data: list = await resp.json()
45 | subdomains = json_data.get("subdomains")
46 | print(f'{G}[+] {Y}BeVigil {W}found {C}{len(subdomains)} {W}subdomains!')
47 | parent.found.extend(subdomains)
48 | else:
49 | print(f'{R}[-] {C}BeVigil Status : {W}{status}')
50 | log_writer(f'[bevigil_subs] Status = {status}, expected 200')
51 |
52 | except Exception as exc:
53 | print(f'{R}[-] {C}BeVigil Exception : {W}{exc}')
54 | log_writer(f'[bevigil_subs] Exception = {exc}')
55 | else:
56 | print(f'{Y}[!] Skipping BeVigil : {W}API key not found!')
57 | log_writer('[bevigil_subs] API key not found')
58 | log_writer('[bevigil_subs] Completed')
59 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/binedge_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from json import loads, dumps
5 | import modules.subdom as parent
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def binedge(hostname, conf_path, session):
16 | binedge_key = environ.get('FR_BINEDGE_KEY')
17 |
18 | if not binedge_key:
19 | log_writer('[binedge_subs] key missing in env')
20 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
21 | json_read = keyfile.read()
22 |
23 | json_load = loads(json_read)
24 | try:
25 | binedge_key = json_load['binedge']
26 | except KeyError:
27 | log_writer('[binedge_subs] key missing in keys.json')
28 | with open(f'{conf_path}/keys.json', 'w') as outfile:
29 | json_load['binedge'] = None
30 | binedge_key = None
31 | outfile.write(
32 | dumps(json_load, sort_keys=True, indent=4)
33 | )
34 |
35 | if binedge_key is not None:
36 | print(f'{Y}[!] {C}Requesting {G}BinaryEdge{W}')
37 | url = f'https://api.binaryedge.io/v2/query/domains/subdomain/{hostname}'
38 | header = {'X-key': binedge_key}
39 |
40 | try:
41 | async with session.get(url, headers=header) as resp:
42 | status = resp.status
43 | if status == 200:
44 | json_data = await resp.json()
45 | subdomains = json_data['events']
46 | print(f'{G}[+] {Y}binedge {W}found {C}{len(subdomains)} {W}subdomains!')
47 | parent.found.extend(subdomains)
48 | else:
49 | print(f'{R}[-] {C}binedge Status : {W}{status}')
50 | log_writer(f'[binedge_subs] Status = {status}, expected 200')
51 |
52 | except Exception as exc:
53 | print(f'{R}[-] {C}binedge Exception : {W}{exc}')
54 | log_writer(f'[binedge_subs] Exception = {exc}')
55 | else:
56 | print(f'{Y}[!] Skipping binedge : {W}API key not found!')
57 | log_writer('[binedge_subs] API key not found')
58 | log_writer('[binedge_subs] Completed')
59 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/certspot_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def certspot(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}CertSpotter{W}')
16 | url = 'https://api.certspotter.com/v1/issuances'
17 | cs_params = {
18 | 'domain': hostname,
19 | 'expand': 'dns_names',
20 | 'include_subdomains': 'true'
21 | }
22 |
23 | try:
24 | async with session.get(url, params=cs_params) as resp:
25 | status = resp.status
26 | if status == 200:
27 | json_data = await resp.text()
28 | json_read = loads(json_data)
29 | print(f'{G}[+] {Y}Certspotter {W}found {C}{len(json_read)} {W}subdomains!')
30 | for i in range(0, len(json_read)):
31 | domains = json_read[i]['dns_names']
32 | parent.found.extend(domains)
33 | else:
34 | print(f'{R}[-] {C}CertSpotter Status : {W}{status}')
35 | log_writer(f'[certspot_subs] Status = {status}, expected 200')
36 | except Exception as exc:
37 | print(f'{R}[-] {C}CertSpotter Exception : {W}{exc}')
38 | log_writer(f'[certspot_subs] Exception = {exc}')
39 | log_writer('[certspot_subs] Completed')
40 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/crtsh_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def crtsh(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}crt.sh{W}')
16 | url = f'https://crt.sh/?dNSName=%25.{hostname}&output=json'
17 |
18 | try:
19 | async with session.get(url) as resp:
20 | status = resp.status
21 | if status == 200:
22 | data = await resp.text()
23 | data_json = loads(data)
24 | tmp_list = []
25 | for entry in data_json:
26 | subdomain = entry['name_value']
27 | tmp_list.append(subdomain)
28 | print(f'{G}[+] {Y}crt.sh {W}found {C}{len(tmp_list)} {W}subdomains!')
29 | parent.found.extend(tmp_list)
30 | else:
31 | print(f'{R}[-] {C}HackerTarget Status : {W}{status}')
32 | log_writer(f'[htarget_subs] Status = {status}, expected 200')
33 | except Exception as exc:
34 | print(f'{R}[-] {C}crtsh Exception : {W}{exc}')
35 | log_writer(f'[crtsh_subs] Exception = {exc}')
36 | log_writer('[crtsh_subs] Completed')
37 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/fb_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from json import loads, dumps
5 | import modules.subdom as parent
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def fb_cert(hostname, conf_path, session):
16 | fb_key = environ.get('FR_FB_KEY')
17 |
18 | if not fb_key:
19 | log_writer('[fb_subs] key missing in env')
20 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
21 | json_read = keyfile.read()
22 |
23 | json_load = loads(json_read)
24 | try:
25 | fb_key = json_load['facebook']
26 | except KeyError:
27 | log_writer('[fb_subs] key missing in keys.json')
28 | with open(f'{conf_path}/keys.json', 'w') as outfile:
29 | json_load['facebook'] = None
30 | fb_key = None
31 | outfile.write(
32 | dumps(json_load, sort_keys=True, indent=4)
33 | )
34 |
35 | if fb_key is not None:
36 | print(f'{Y}[!] {C}Requesting {G}Facebook{W}')
37 | url = 'https://graph.facebook.com/certificates'
38 | fb_params = {
39 | 'query': hostname,
40 | 'fields': 'domains',
41 | 'access_token': fb_key
42 | }
43 | try:
44 | async with session.get(url, params=fb_params) as resp:
45 | status = resp.status
46 | if status == 200:
47 | json_data = await resp.text()
48 | json_read = loads(json_data)
49 | domains = json_read['data']
50 | print(f'{G}[+] {Y}Facebook {W}found {C}{len(domains)} {W}subdomains!')
51 | for i in range(0, len(domains)):
52 | parent.found.extend(json_read['data'][i]['domains'])
53 | else:
54 | print(f'{R}[-] {C}Facebook Status : {W}{status}')
55 | log_writer(f'[fb_subs] Status = {status}, expected 200')
56 | except Exception as exc:
57 | print(f'{R}[-] {C}Facebook Exception : {W}{exc}')
58 | log_writer(f'[fb_subs] Exception = {exc}')
59 | else:
60 | print(f'{Y}[!] Skipping Facebook : {W}API key not found!')
61 | log_writer('[fb_subs] API key not found')
62 | log_writer('[fb_subs] Completed')
63 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/htarget_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import modules.subdom as parent
4 | from modules.write_log import log_writer
5 |
6 | R = '\033[31m' # red
7 | G = '\033[32m' # green
8 | C = '\033[36m' # cyan
9 | W = '\033[0m' # white
10 | Y = '\033[33m' # yellow
11 |
12 |
13 | async def hackertgt(hostname, session):
14 | print(f'{Y}[!] {C}Requesting {G}HackerTarget{W}')
15 | url = f'https://api.hackertarget.com/hostsearch/?q={hostname}'
16 | try:
17 | async with session.get(url) as resp:
18 | status = resp.status
19 | if status == 200:
20 | data = await resp.text()
21 | data_list = data.split('\n')
22 | tmp_list = []
23 | for line in data_list:
24 | subdomain = line.split(',')[0]
25 | tmp_list.append(subdomain)
26 | print(f'{G}[+] {Y}HackerTarget {W}found {C}{len(tmp_list)} {W}subdomains!')
27 | parent.found.extend(tmp_list)
28 | else:
29 | print(f'{R}[-] {C}HackerTarget Status : {W}{status}')
30 | log_writer(f'[htarget_subs] Status = {status}, expected 200')
31 | except Exception as exc:
32 | print(f'{R}[-] {C}HackerTarget Exception : {W}{exc}')
33 | log_writer(f'[htarget_subs] Exception = {exc}')
34 | log_writer('[htarget_subs] Completed')
35 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/hunter_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from base64 import b64encode
5 | from json import loads, dumps
6 | import modules.subdom as parent
7 | from modules.write_log import log_writer
8 | from datetime import datetime, timedelta
9 |
10 | R = '\033[31m' # red
11 | G = '\033[32m' # green
12 | C = '\033[36m' # cyan
13 | W = '\033[0m' # white
14 | Y = '\033[33m' # yellow
15 |
16 |
17 | async def hunter(hostname, conf_path, session):
18 | hunter_key = environ.get('FR_HUNTER_KEY')
19 |
20 | if not hunter_key:
21 | log_writer('[hunter_subs] key missing in env')
22 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
23 | json_read = keyfile.read()
24 |
25 | json_load = loads(json_read)
26 | try:
27 | hunter_key = json_load['hunter']
28 | except KeyError:
29 | log_writer('[hunter_subs] key missing in keys.json')
30 | with open(f'{conf_path}/keys.json', 'w') as outfile:
31 | json_load['hunter'] = None
32 | hunter_key = None
33 | outfile.write(
34 | dumps(json_load, sort_keys=True, indent=4)
35 | )
36 |
37 | if hunter_key is not None:
38 | print(f'{Y}[!] {C}Requesting {G}Hunter{W}')
39 | url = f'https://api.hunter.how/search'
40 | query = f'domain.suffix=="{hostname}"'
41 | query64 = b64encode(query.encode()).decode()
42 | start_year = datetime.today().year - 1
43 | start_month = datetime.today().month
44 | start_day = datetime.today().day
45 | try:
46 | start_date = datetime.strptime(
47 | f'{start_year}-{start_month}-{start_day}', '%Y-%m-%d').strftime('%Y-%m-%d')
48 | except ValueError:
49 | # handle leap year
50 | start_date = datetime.strptime(
51 | f'{start_year}-{start_month}-{start_day - 1}', '%Y-%m-%d').strftime('%Y-%m-%d')
52 | end_date = (datetime.today() - timedelta(days=2)).strftime('%Y-%m-%d')
53 |
54 | payload = {
55 | 'api-key': hunter_key,
56 | 'query': query64,
57 | 'page': 1,
58 | 'page_size': 1000,
59 | 'start_time': start_date,
60 | 'end_time': end_date
61 | }
62 | try:
63 | async with session.get(url, params=payload) as resp:
64 | status = resp.status
65 | if status == 200:
66 | json_data = await resp.json()
67 | resp_code = json_data['code']
68 | if resp_code != 200:
69 | resp_msg = json_data['message']
70 | print(f'{R}[-] {C}Hunter Status : {W}{resp_code}, {resp_msg}')
71 | log_writer(f'[hunter_subs] Status = {resp_code}, expected 200')
72 | return
73 | subdomain_list = json_data['data']['list']
74 | subdomains = []
75 | for entry in subdomain_list:
76 | subdomains.append(entry['domain'])
77 | print(f'{G}[+] {Y}hunter {W}found {C}{len(subdomains)} {W}subdomains!')
78 | parent.found.extend(subdomains)
79 | else:
80 | print(f'{R}[-] {C}Hunter Status : {W}{status}')
81 | log_writer(f'[hunter_subs] Status = {status}, expected 200')
82 | except Exception as exc:
83 | print(f'{R}[-] {C}Hunter Exception : {W}{exc}')
84 | log_writer(f'[hunter_subs] Exception = {exc}')
85 | else:
86 | print(f'{Y}[!] Skipping hunter : {W}API key not found!')
87 | log_writer('[hunter_subs] API key not found')
88 | log_writer('[hunter_subs] Completed')
89 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/netlas_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from json import loads, dumps
5 | import modules.subdom as parent
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def netlas(hostname, conf_path, session):
16 | netlas_key = environ.get('FR_NETLAS_KEY')
17 |
18 | if not netlas_key:
19 | log_writer('[netlas_subs] key missing in env')
20 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
21 | json_read = keyfile.read()
22 |
23 | json_load = loads(json_read)
24 | try:
25 | netlas_key = json_load['netlas']
26 | except KeyError:
27 | log_writer('[netlas_subs] key missing in keys.json')
28 | with open(f'{conf_path}/keys.json', 'w') as outfile:
29 | json_load['netlas'] = None
30 | netlas_key = None
31 | outfile.write(
32 | dumps(json_load, sort_keys=True, indent=4)
33 | )
34 |
35 | if netlas_key is not None:
36 | print(f'{Y}[!] {C}Requesting {G}Netlas{W}')
37 | url = f'https://app.netlas.io/api/domains/download/'
38 | header = {'X-API-Key': netlas_key}
39 | payload = {
40 | 'q': f'domain: *.{hostname}',
41 | 'fields': ['domain'],
42 | 'source_type': 'include',
43 | 'size': '200'
44 | }
45 |
46 | try:
47 | async with session.post(url, headers=header, data=payload) as resp:
48 | status = resp.status
49 | if status == 200:
50 | json_data = loads(await resp.text())
51 | subdomains = []
52 | for entry in json_data:
53 | subdomain = entry['data']['domain']
54 | subdomains.append(subdomain)
55 | print(f'{G}[+] {Y}netlas {W}found {C}{len(subdomains)} {W}subdomains!')
56 | parent.found.extend(subdomains)
57 | else:
58 | print(f'{R}[-] {C}netlas Status : {W}{status}')
59 | log_writer(f'[netlas_subs] Status = {status}, expected 200')
60 |
61 | except Exception as exc:
62 | print(f'{R}[-] {C}netlas Exception : {W}{exc}')
63 | log_writer(f'[netlas_subs] Exception = {exc}')
64 | else:
65 | print(f'{Y}[!] Skipping netlas : {W}API key not found!')
66 | log_writer('[netlas_subs] API key not found')
67 | log_writer('[netlas_subs] Completed')
68 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/shodan_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from json import loads, dumps
5 | import modules.subdom as parent
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def shodan(hostname, conf_path, session):
16 | sho_key = environ.get('FR_SHODAN_KEY')
17 |
18 | if not sho_key:
19 | log_writer('[shodan_subs] key missing in env')
20 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
21 | json_read = keyfile.read()
22 |
23 | json_load = loads(json_read)
24 | try:
25 | sho_key = json_load['shodan']
26 | except KeyError:
27 | log_writer('[shodan_subs] key missing in keys.json')
28 | with open(f'{conf_path}/keys.json', 'w') as outfile:
29 | json_load['shodan'] = None
30 | sho_key = None
31 | outfile.write(
32 | dumps(json_load, sort_keys=True, indent=4)
33 | )
34 |
35 | if sho_key is not None:
36 | print(f'{Y}[!] {C}Requesting {G}Shodan{W}')
37 | url = f'https://api.shodan.io/dns/domain/{hostname}?key={sho_key}'
38 |
39 | try:
40 | async with session.get(url) as resp:
41 | status = resp.status
42 | if status == 200:
43 | json_data = await resp.text()
44 | json_read = loads(json_data)
45 | domains = json_read['subdomains']
46 | tmp_list = []
47 | for i in range(0, len(domains)):
48 | tmp_list.append(f'{domains[i]}.{hostname}')
49 | print(f'{G}[+] {Y}Shodan {W}found {C}{len(tmp_list)} {W}subdomains!')
50 | parent.found.extend(tmp_list)
51 | else:
52 | print(f'{R}[-] {C}Shodan Status : {W}{status}')
53 | log_writer(f'[shodan_subs] Status = {status}, expected 200')
54 | except Exception as exc:
55 | print(f'{R}[-] {C}Shodan Exception : {W}{exc}')
56 | log_writer(f'[shodan_subs] Exception = {exc}')
57 | else:
58 | print(f'{Y}[!] Skipping Shodan : {W}API key not found!')
59 | log_writer('[shodan_subs] API key not found')
60 | log_writer('[shodan_subs] Completed')
61 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/sonar_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def sonar(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}Sonar{W}')
16 | url = f'https://sonar.omnisint.io/subdomains/{hostname}'
17 | try:
18 | async with session.get(url) as resp:
19 | status = resp.status
20 | if status == 200:
21 | json_data = await resp.text()
22 | json_read = loads(json_data)
23 | print(f'{G}[+] {Y}Sonar {W}found {C}{len(json_read)} {W}subdomains!')
24 | parent.found.extend(json_read)
25 | else:
26 | print(f'{R}[-] {C}Sonar Status : {W}{status}')
27 | log_writer(f'[sonar_subs] Status = {status}, expected 200')
28 | except Exception as exc:
29 | print(f'{R}[-] {C}Sonar Exception : {W}{exc}')
30 | log_writer(f'[sonar_subs] Exception = {exc}')
31 | log_writer('[sonar_subs] Completed')
32 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/thcrowd_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def thcrowd(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}ThreatCrowd{W}')
16 | url = 'https://www.threatcrowd.org/searchApi/v2/domain/report/'
17 | thc_params = {
18 | 'domain': hostname
19 | }
20 | try:
21 | async with session.get(url, params=thc_params) as resp:
22 | status = resp.status
23 | if status == 200:
24 | output = await resp.text()
25 | json_out = loads(output)
26 | if json_out['response_code'] == '0':
27 | pass
28 | else:
29 | subd = json_out['subdomains']
30 | print(f'{G}[+] {Y}ThreatCrowd {W}found {C}{len(subd)} {W}subdomains!')
31 | parent.found.extend(subd)
32 | else:
33 | print(f'{R}[-] {C}ThreatCrowd Status : {W}{status}')
34 | log_writer(f'[thcrowd] Status = {status}, expected 200')
35 | except Exception as exc:
36 | print(f'{R}[-] {C}ThreatCrowd Exception : {W}{exc}')
37 | log_writer(f'[thcrowd] Exception = {exc}')
38 | log_writer('[thcrowd] Completed')
39 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/thminer_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def thminer(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}ThreatMiner{W}')
16 | url = 'https://api.threatminer.org/v2/domain.php'
17 | thm_params = {
18 | 'q': hostname,
19 | 'rt': '5'
20 | }
21 | try:
22 | async with session.get(url, params=thm_params) as resp:
23 | status = resp.status
24 | if status == 200:
25 | output = await resp.text()
26 | json_out = loads(output)
27 | subd = json_out['results']
28 | print(f'{G}[+] {Y}ThreatMiner {W}found {C}{len(subd)} {W}subdomains!')
29 | parent.found.extend(subd)
30 | else:
31 | print(f'{R}[-] {C}ThreatMiner Status : {W}{status}')
32 | log_writer(f'[thminer_subs] Status = {status}, expected 200')
33 | except Exception as exc:
34 | print(f'{R}[-] {C}ThreatMiner Exception : {W}{exc}')
35 | log_writer(f'[thminer_subs] Exception = {exc}')
36 | log_writer('[thminer_subs] Completed')
37 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/urlscan_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from json import loads
4 | import modules.subdom as parent
5 | from modules.write_log import log_writer
6 |
7 | R = '\033[31m' # red
8 | G = '\033[32m' # green
9 | C = '\033[36m' # cyan
10 | W = '\033[0m' # white
11 | Y = '\033[33m' # yellow
12 |
13 |
14 | async def urlscan(hostname, session):
15 | print(f'{Y}[!] {C}Requesting {G}UrlScan{W}')
16 | url = f'https://urlscan.io/api/v1/search/?q=domain:{hostname}'
17 | try:
18 | async with session.get(url) as resp:
19 | status = resp.status
20 | if status == 200:
21 | output = await resp.text()
22 | json_data = loads(output)['results']
23 | subdomains = []
24 | for entry in json_data:
25 | subdomains.append(entry['task']['domain'])
26 | parent.found.extend(subdomains)
27 | print(f'{G}[+] {Y}UrlScan {W}found {C}{len(subdomains)} {W}subdomains!')
28 | else:
29 | print(await resp.text())
30 | print(f'{R}[-] {C}UrlScan Status : {W}{status}')
31 | log_writer(f'[urlscan_subs] Status = {status}, expected 200')
32 | except Exception as exc:
33 | print(f'{R}[-] {C}UrlScan Exception : {W}{exc}')
34 | log_writer('[urlscan_subs] Completed')
35 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/virustotal_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from json import loads, dumps
5 | import modules.subdom as parent
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def virust(hostname, conf_path, session):
16 | vt_key = environ.get('FR_VT_KEY')
17 |
18 | if not vt_key:
19 | log_writer('[virustotal_subs] key missing in env')
20 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
21 | json_read = keyfile.read()
22 |
23 | json_load = loads(json_read)
24 | try:
25 | vt_key = json_load['virustotal']
26 | except KeyError:
27 | log_writer('[virustotal_subs] key missing in keys.json')
28 | with open(f'{conf_path}/keys.json', 'w') as outfile:
29 | json_load['virustotal'] = None
30 | vt_key = None
31 | outfile.write(
32 | dumps(json_load, sort_keys=True, indent=4)
33 | )
34 |
35 | if vt_key is not None:
36 | print(f'{Y}[!] {C}Requesting {G}VirusTotal{W}')
37 | url = f'https://www.virustotal.com/api/v3/domains/{hostname}/subdomains'
38 | vt_headers = {
39 | 'x-apikey': vt_key
40 | }
41 | try:
42 | async with session.get(url, headers=vt_headers) as resp:
43 | status = resp.status
44 | if status == 200:
45 | json_data = await resp.text()
46 | json_read = loads(json_data)
47 | domains = json_read['data']
48 | tmp_list = []
49 | for i in range(0, len(domains)):
50 | tmp_list.append(domains[i]['id'])
51 | print(f'{G}[+] {Y}VirusTotal {W}found {C}{len(tmp_list)} {W}subdomains!')
52 | parent.found.extend(tmp_list)
53 | else:
54 | print(f'{R}[-] {C}VirusTotal Status : {W}{status}')
55 | log_writer(f'[virustotal_subs] Status = {status}')
56 | except Exception as exc:
57 | print(f'{R}[-] {C}VirusTotal Exception : {W}{exc}')
58 | log_writer(f'[virustotal_subs] Exception = {exc}')
59 | else:
60 | print(f'{Y}[!] Skipping VirusTotal : {W}API key not found!')
61 | log_writer('[virustotal_subs] API key not found')
62 | log_writer('[virustotal_subs] Completed')
63 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/wayback_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import modules.subdom as parent
4 | from modules.write_log import log_writer
5 |
6 | R = '\033[31m' # red
7 | G = '\033[32m' # green
8 | C = '\033[36m' # cyan
9 | W = '\033[0m' # white
10 | Y = '\033[33m' # yellow
11 |
12 |
13 | async def machine(hostname, session):
14 | print(f'{Y}[!] {C}Requesting {G}Wayback{W}')
15 | url = f'http://web.archive.org/cdx/search/cdx?url=*.{hostname}/*&output=txt&fl=original&collapse=urlkey'
16 | try:
17 | async with session.get(url) as resp:
18 | status = resp.status
19 | if status == 200:
20 | raw_data = await resp.text()
21 | lines = raw_data.split('\n')
22 | tmp_list = []
23 | for line in lines:
24 | subdomain = line.replace('http://', '').replace('https://', '').split('/')[0].split(':')[0]
25 | if len(subdomain) > len(hostname):
26 | tmp_list.append(subdomain)
27 | print(f'{G}[+] {Y}Wayback {W}found {C}{len(tmp_list)} {W}subdomains!')
28 | parent.found.extend(tmp_list)
29 | else:
30 | print(f'{R}[-] {C}Wayback Status : {W}{status}')
31 | log_writer(f'[wayback_subs] Status = {status}, expected 200')
32 | except Exception as exc:
33 | print(f'{R}[-] {C}Wayback Exception : {W}{exc}')
34 | log_writer(f'[wayback_subs] Exception = {exc}')
35 | log_writer('[wayback_subs] Completed')
36 |
--------------------------------------------------------------------------------
/modules/subdomain_modules/zoomeye_subs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import environ
4 | from json import loads, dumps
5 | import modules.subdom as parent
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def zoomeye(hostname, conf_path, session):
16 | zoomeye_key = environ.get('FR_ZOOMEYE_KEY')
17 |
18 | if not zoomeye_key:
19 | log_writer('[zoomeye_subs] key missing in env')
20 | with open(f'{conf_path}/keys.json', 'r') as keyfile:
21 | json_read = keyfile.read()
22 |
23 | json_load = loads(json_read)
24 | try:
25 | zoomeye_key = json_load['zoomeye']
26 | except KeyError:
27 | log_writer('[zoomeye_subs] key missing in keys.json')
28 | with open(f'{conf_path}/keys.json', 'w') as outfile:
29 | json_load['zoomeye'] = None
30 | zoomeye_key = None
31 | outfile.write(
32 | dumps(json_load, sort_keys=True, indent=4)
33 | )
34 |
35 | if zoomeye_key is not None:
36 | print(f'{Y}[!] {C}Requesting {G}ZoomEye{W}')
37 | url = f'https://api.zoomeye.hk/domain/search?q={hostname}&type=0'
38 | header = {
39 | 'API-KEY': zoomeye_key,
40 | 'User-Agent': 'curl'
41 | }
42 |
43 | try:
44 | async with session.get(url, headers=header) as resp:
45 | status = resp.status
46 | if status == 200:
47 | json_data = await resp.json()
48 | subdomain_list = json_data['list']
49 | subdomains = [subd['name'] for subd in subdomain_list]
50 | print(f'{G}[+] {Y}zoomeye {W}found {C}{len(subdomains)} {W}subdomains!')
51 | parent.found.extend(subdomains)
52 | else:
53 | print(f'{R}[-] {C}zoomeye Status : {W}{status}')
54 | log_writer(f'[zoomeye_subs] Status = {status}, expected 200')
55 |
56 | except Exception as exc:
57 | print(f'{R}[-] {C}zoomeye Exception : {W}{exc}')
58 | log_writer(f'[zoomeye_subs] Exception = {exc}')
59 | else:
60 | print(f'{Y}[!] Skipping zoomeye : {W}API key not found!')
61 | log_writer('[zoomeye_subs] API key not found')
62 | log_writer('[zoomeye_subs] Completed')
63 |
--------------------------------------------------------------------------------
/modules/wayback.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | R = '\033[31m' # red
4 | G = '\033[32m' # green
5 | C = '\033[36m' # cyan
6 | W = '\033[0m' # white
7 | Y = '\033[33m' # yellow
8 |
9 | import json
10 | import requests
11 | from datetime import date
12 | from modules.export import export
13 | from modules.write_log import log_writer
14 |
15 |
16 | def timetravel(target, data, output):
17 | wayback_total = []
18 | result = {}
19 | is_avail = False
20 | domain_query = f'{target}/*'
21 |
22 | curr_yr = date.today().year
23 | last_yr = curr_yr - 5
24 |
25 | print(f'\n{Y}[!] Starting WayBack Machine...{W}\n')
26 | print(f'{Y}[!] {C}Checking Availability on Wayback Machine{W}', end='', flush=True)
27 | wm_avail = 'http://archive.org/wayback/available'
28 | avail_data = {'url': target}
29 |
30 | try:
31 | check_rqst = requests.get(wm_avail, params=avail_data, timeout=10)
32 | check_sc = check_rqst.status_code
33 | if check_sc == 200:
34 | check_data = check_rqst.text
35 | json_chk_data = json.loads(check_data)
36 | avail_data = json_chk_data['archived_snapshots']
37 | if avail_data:
38 | print(f'{G}{"[".rjust(5, ".")} Available ]{W}')
39 | else:
40 | print(f'{R}{"[".rjust(5, ".")} N/A ]{W}')
41 | else:
42 | print(f'\n{R}[-] Status : {C}{check_sc}{W}')
43 | log_writer(f'[wayback] Status = {check_sc}, expected 200')
44 |
45 | if avail_data:
46 | print(f'{Y}[!] {C}Fetching URLs{W}', end='', flush=True)
47 | wm_url = 'http://web.archive.org/cdx/search/cdx'
48 |
49 | payload = {
50 | 'url': domain_query,
51 | 'fl': 'original',
52 | 'fastLatest': 'true',
53 | 'from': str(last_yr),
54 | 'to': str(curr_yr)
55 | }
56 |
57 | rqst = requests.get(wm_url, params=payload, timeout=10)
58 | r_sc = rqst.status_code
59 | if r_sc == 200:
60 | r_data = rqst.text
61 | if data:
62 | r_data = set(r_data.split('\n'))
63 | print(f'{G}{"[".rjust(5, ".")} {len(r_data)} ]{W}')
64 | wayback_total.extend(r_data)
65 |
66 | if output != 'None':
67 | result.update({'links': list(r_data)})
68 | result.update({'exported': False})
69 | data['module-wayback_urls'] = result
70 | fname = f'{output["directory"]}/wayback_urls.{output["format"]}'
71 | output['file'] = fname
72 | export(output, data)
73 | else:
74 | print(f'{R}{"[".rjust(5, ".")} Not Found ]{W}')
75 | else:
76 | print(f'{R}{"[".rjust(5, ".")} {r_sc} ]{W}')
77 | except Exception as exc:
78 | print(f'\n{R}[-] Exception : {C}{exc}{W}')
79 | log_writer(f'[wayback] Exception = {exc}')
80 | log_writer('[wayback] Completed')
81 |
--------------------------------------------------------------------------------
/modules/whois.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import asyncio
4 | from json import load
5 | from modules.export import export
6 | from modules.write_log import log_writer
7 |
8 | R = '\033[31m' # red
9 | G = '\033[32m' # green
10 | C = '\033[36m' # cyan
11 | W = '\033[0m' # white
12 | Y = '\033[33m' # yellow
13 |
14 |
15 | async def get_whois(domain, server):
16 | whois_result = {}
17 | reader, writer = await asyncio.open_connection(server, 43)
18 | writer.write((domain + '\r\n').encode())
19 |
20 | raw_resp = b''
21 | while True:
22 | chunk = await reader.read(4096)
23 | if not chunk:
24 | break
25 | raw_resp += chunk
26 |
27 | writer.close()
28 | await writer.wait_closed()
29 | raw_result = raw_resp.decode()
30 |
31 | if 'No match for' in raw_result:
32 | whois_result = None
33 |
34 | res_parts = raw_result.split('>>>', 1)
35 | whois_result['whois'] = res_parts[0]
36 | return whois_result
37 |
38 |
39 | def whois_lookup(domain, tld, script_path, output, data):
40 | result = {}
41 | db_path = f'{script_path}/whois_servers.json'
42 | with open(db_path, 'r') as db_file:
43 | db_json = load(db_file)
44 | print(f'\n{Y}[!] Whois Lookup : {W}\n')
45 |
46 | try:
47 | whois_sv = db_json[tld]
48 | whois_info = asyncio.run(get_whois(f'{domain}.{tld}', whois_sv))
49 | print(whois_info['whois'])
50 | result.update(whois_info)
51 | except KeyError:
52 | print(f'{R}[-] Error : {C}This domain suffix is not supported.{W}')
53 | result.update({'Error': 'This domain suffix is not supported.'})
54 | log_writer('[whois] Exception = This domain suffix is not supported.')
55 | except Exception as exc:
56 | print(f'{R}[-] Error : {C}{exc}{W}')
57 | result.update({'Error': str(exc)})
58 | log_writer(f'[whois] Exception = {exc}')
59 |
60 | result.update({'exported': False})
61 |
62 | if output != 'None':
63 | fname = f'{output["directory"]}/whois.{output["format"]}'
64 | output['file'] = fname
65 | data['module-whois'] = result
66 | export(output, data)
67 | log_writer('[whois] Completed')
68 |
--------------------------------------------------------------------------------
/modules/write_log.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import settings
3 |
4 |
5 | def log_writer(message):
6 | logging.basicConfig(
7 | filename=settings.log_file_path,
8 | encoding='utf-8',
9 | level=logging.INFO,
10 | format='[%(asctime)s] : %(message)s',
11 | datefmt='%m/%d/%Y %I:%M:%S %p'
12 | )
13 | logging.info(message)
14 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 | bs4
3 | lxml
4 | dnspython
5 | aiohttp
6 | tldextract
7 | cryptography
--------------------------------------------------------------------------------
/settings.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | from os import getenv, path, makedirs
4 | from json import loads
5 | from shutil import copytree
6 |
7 | home = getenv('HOME')
8 | usr_data = f'{home}/.local/share/finalrecon/dumps/'
9 | conf_path = f'{home}/.config/finalrecon'
10 | path_to_script = path.dirname(path.realpath(__file__))
11 | src_conf_path = f'{path_to_script}/conf/'
12 | meta_file_path = f'{path_to_script}/metadata.json'
13 | keys_file_path = f'{conf_path}/keys.json'
14 | conf_file_path = f'{conf_path}/config.json'
15 | log_file_path = f'{home}/.local/share/finalrecon/run.log'
16 |
17 | if not path.exists(conf_path):
18 | copytree(src_conf_path, conf_path, dirs_exist_ok=True)
19 |
20 | if not path.exists(usr_data):
21 | makedirs(usr_data, exist_ok=True)
22 |
23 | while True:
24 | with open(conf_file_path, 'r') as config_file:
25 | config_read = config_file.read()
26 | config_json = loads(config_read)
27 |
28 | try:
29 | timeout = config_json['common']['timeout']
30 | custom_dns = config_json['common']['dns_servers']
31 | ssl_port = config_json['ssl_cert']['ssl_port']
32 | port_scan_th = config_json['port_scan']['threads']
33 | dir_enum_th = config_json['dir_enum']['threads']
34 | dir_enum_redirect = config_json['dir_enum']['redirect']
35 | dir_enum_sslv = config_json['dir_enum']['verify_ssl']
36 | dir_enum_ext = config_json['dir_enum']['extension']
37 | dir_enum_wlist = f'{path_to_script}/wordlists/dirb_common.txt'
38 | export_fmt = config_json['export']['format']
39 | except KeyError:
40 | # reset conf.json
41 | copytree(src_conf_path, conf_path, dirs_exist_ok=True)
42 | else:
43 | break
44 |
--------------------------------------------------------------------------------
/whois_servers.json:
--------------------------------------------------------------------------------
1 | {
2 | "aarp": "whois.nic.aarp",
3 | "abb": "whois.nic.abb",
4 | "abbott": "whois.nic.abbott",
5 | "abbvie": "whois.nic.abbvie",
6 | "abc": "whois.nic.abc",
7 | "abogado": "whois.nic.abogado",
8 | "abudhabi": "whois.nic.abudhabi",
9 | "ac": "whois.nic.ac",
10 | "academy": "whois.nic.academy",
11 | "accountant": "whois.nic.accountant",
12 | "accountants": "whois.nic.accountants",
13 | "aco": "whois.nic.aco",
14 | "actor": "whois.nic.actor",
15 | "ads": "whois.nic.google",
16 | "adult": "whois.nic.adult",
17 | "ae": "whois.aeda.net.ae",
18 | "aeg": "whois.nic.aeg",
19 | "aero": "whois.aero",
20 | "af": "whois.nic.af",
21 | "afl": "whois.nic.afl",
22 | "africa": "whois.nic.africa",
23 | "ag": "whois.nic.ag",
24 | "agakhan": "whois.nic.agakhan",
25 | "agency": "whois.nic.agency",
26 | "ai": "whois.nic.ai",
27 | "airbus": "whois.nic.airbus",
28 | "airforce": "whois.nic.airforce",
29 | "airtel": "whois.nic.airtel",
30 | "akdn": "whois.nic.akdn",
31 | "alibaba": "whois.nic.alibaba",
32 | "alipay": "whois.nic.alipay",
33 | "allfinanz": "whois.nic.allfinanz",
34 | "allstate": "whois.nic.allstate",
35 | "ally": "whois.nic.ally",
36 | "alsace": "whois.nic.alsace",
37 | "alstom": "whois.nic.alstom",
38 | "am": "whois.amnic.net",
39 | "amazon": "whois.nic.amazon",
40 | "americanfamily": "whois.nic.americanfamily",
41 | "amfam": "whois.nic.amfam",
42 | "amsterdam": "whois.nic.amsterdam",
43 | "android": "whois.nic.google",
44 | "anquan": "whois.teleinfo.cn",
45 | "anz": "whois.nic.anz",
46 | "aol": "whois.nic.aol",
47 | "apartments": "whois.nic.apartments",
48 | "app": "whois.nic.google",
49 | "apple": "whois.nic.apple",
50 | "aquarelle": "whois.nic.aquarelle",
51 | "ar": "whois.nic.ar",
52 | "arab": "whois.nic.arab",
53 | "archi": "whois.nic.archi",
54 | "army": "whois.nic.army",
55 | "arpa": "whois.iana.org",
56 | "art": "whois.nic.art",
57 | "arte": "whois.nic.arte",
58 | "as": "whois.nic.as",
59 | "asda": "whois.nic.asda",
60 | "asia": "whois.nic.asia",
61 | "associates": "whois.nic.associates",
62 | "at": "whois.nic.at",
63 | "attorney": "whois.nic.attorney",
64 | "au": "whois.auda.org.au",
65 | "auction": "whois.nic.auction",
66 | "audi": "whois.nic.audi",
67 | "audible": "whois.nic.audible",
68 | "audio": "whois.nic.audio",
69 | "auspost": "whois.nic.auspost",
70 | "author": "whois.nic.author",
71 | "auto": "whois.nic.auto",
72 | "autos": "whois.nic.autos",
73 | "avianca": "whois.nic.avianca",
74 | "aw": "whois.nic.aw",
75 | "aws": "whois.nic.aws",
76 | "ax": "whois.ax",
77 | "baby": "whois.nic.baby",
78 | "baidu": "whois.gtld.knet.cn",
79 | "band": "whois.nic.band",
80 | "bank": "whois.nic.bank",
81 | "bar": "whois.nic.bar",
82 | "barcelona": "whois.nic.barcelona",
83 | "barclaycard": "whois.nic.barclaycard",
84 | "barclays": "whois.nic.barclays",
85 | "barefoot": "whois.nic.barefoot",
86 | "bargains": "whois.nic.bargains",
87 | "basketball": "whois.nic.basketball",
88 | "bauhaus": "whois.nic.bauhaus",
89 | "bayern": "whois.nic.bayern",
90 | "bbc": "whois.nic.bbc",
91 | "bbt": "whois.nic.bbt",
92 | "bbva": "whois.nic.bbva",
93 | "bcg": "whois.nic.bcg",
94 | "bcn": "whois.nic.bcn",
95 | "be": "whois.dns.be",
96 | "beats": "whois.nic.beats",
97 | "beauty": "whois.nic.beauty",
98 | "beer": "whois.nic.beer",
99 | "bentley": "whois.nic.bentley",
100 | "berlin": "whois.nic.berlin",
101 | "best": "whois.nic.best",
102 | "bestbuy": "whois.nic.bestbuy",
103 | "bet": "whois.nic.bet",
104 | "bf": "whois.registre.bf",
105 | "bg": "whois.register.bg",
106 | "bh": "whois.nic.bh",
107 | "bi": "whois1.nic.bi",
108 | "bible": "whois.nic.bible",
109 | "bid": "whois.nic.bid",
110 | "bike": "whois.nic.bike",
111 | "bingo": "whois.nic.bingo",
112 | "bio": "whois.nic.bio",
113 | "biz": "whois.nic.biz",
114 | "bj": "whois.nic.bj",
115 | "black": "whois.nic.black",
116 | "blackfriday": "whois.nic.blackfriday",
117 | "blockbuster": "whois.nic.blockbuster",
118 | "blog": "whois.nic.blog",
119 | "blue": "whois.nic.blue",
120 | "bm": "whois.afilias-srs.net",
121 | "bms": "whois.nic.bms",
122 | "bmw": "whois.nic.bmw",
123 | "bn": "whois.bnnic.bn",
124 | "bnpparibas": "whois.nic.bnpparibas",
125 | "bo": "whois.nic.bo",
126 | "boats": "whois.nic.boats",
127 | "boehringer": "whois.nic.boehringer",
128 | "bofa": "whois.nic.bofa",
129 | "bom": "whois.gtlds.nic.br",
130 | "bond": "whois.nic.bond",
131 | "boo": "whois.nic.google",
132 | "book": "whois.nic.book",
133 | "bosch": "whois.nic.bosch",
134 | "bostik": "whois.nic.bostik",
135 | "boston": "whois.nic.boston",
136 | "bot": "whois.nic.bot",
137 | "boutique": "whois.nic.boutique",
138 | "box": "whois.nic.box",
139 | "br": "whois.registro.br",
140 | "bradesco": "whois.nic.bradesco",
141 | "bridgestone": "whois.nic.bridgestone",
142 | "broadway": "whois.nic.broadway",
143 | "broker": "whois.nic.broker",
144 | "brother": "whois.nic.brother",
145 | "brussels": "whois.nic.brussels",
146 | "build": "whois.nic.build",
147 | "builders": "whois.nic.builders",
148 | "business": "whois.nic.business",
149 | "buy": "whois.nic.buy",
150 | "bw": "whois.nic.net.bw",
151 | "by": "whois.cctld.by",
152 | "bz": "whois.afilias-grs.info",
153 | "bzh": "whois.nic.bzh",
154 | "ca": "whois.cira.ca",
155 | "cab": "whois.nic.cab",
156 | "cafe": "whois.nic.cafe",
157 | "cal": "whois.nic.google",
158 | "call": "whois.nic.call",
159 | "cam": "whois.nic.cam",
160 | "camera": "whois.nic.camera",
161 | "camp": "whois.nic.camp",
162 | "canon": "whois.nic.canon",
163 | "capetown": "whois.nic.capetown",
164 | "capital": "whois.nic.capital",
165 | "capitalone": "whois.nic.capitalone",
166 | "car": "whois.nic.car",
167 | "cards": "whois.nic.cards",
168 | "care": "whois.nic.care",
169 | "career": "whois.nic.career",
170 | "careers": "whois.nic.careers",
171 | "cars": "whois.nic.cars",
172 | "casa": "whois.nic.casa",
173 | "case": "whois.nic.case",
174 | "cash": "whois.nic.cash",
175 | "casino": "whois.nic.casino",
176 | "cat": "whois.nic.cat",
177 | "catering": "whois.nic.catering",
178 | "catholic": "whois.nic.catholic",
179 | "cba": "whois.nic.cba",
180 | "cbs": "whois.nic.cbs",
181 | "cc": "ccwhois.verisign-grs.com",
182 | "cd": "whois.nic.cd",
183 | "center": "whois.nic.center",
184 | "ceo": "whois.nic.ceo",
185 | "cern": "whois.nic.cern",
186 | "cf": "whois.dot.cf",
187 | "cfa": "whois.nic.cfa",
188 | "cfd": "whois.nic.cfd",
189 | "ch": "whois.nic.ch",
190 | "chanel": "whois.nic.chanel",
191 | "channel": "whois.nic.google",
192 | "charity": "whois.nic.charity",
193 | "chat": "whois.nic.chat",
194 | "cheap": "whois.nic.cheap",
195 | "chintai": "whois.nic.chintai",
196 | "christmas": "whois.nic.christmas",
197 | "chrome": "whois.nic.google",
198 | "church": "whois.nic.church",
199 | "ci": "whois.nic.ci",
200 | "cipriani": "whois.nic.cipriani",
201 | "circle": "whois.nic.circle",
202 | "city": "whois.nic.city",
203 | "cityeats": "whois.nic.cityeats",
204 | "cl": "whois.nic.cl",
205 | "claims": "whois.nic.claims",
206 | "cleaning": "whois.nic.cleaning",
207 | "click": "whois.nic.click",
208 | "clinic": "whois.nic.clinic",
209 | "clinique": "whois.nic.clinique",
210 | "clothing": "whois.nic.clothing",
211 | "cloud": "whois.nic.cloud",
212 | "club": "whois.nic.club",
213 | "clubmed": "whois.nic.clubmed",
214 | "cm": "whois.netcom.cm",
215 | "cn": "whois.cnnic.cn",
216 | "co": "whois.nic.co",
217 | "coach": "whois.nic.coach",
218 | "codes": "whois.nic.codes",
219 | "coffee": "whois.nic.coffee",
220 | "college": "whois.nic.college",
221 | "cologne": "whois.ryce-rsp.com",
222 | "com": "whois.verisign-grs.com",
223 | "comcast": "whois.nic.comcast",
224 | "commbank": "whois.nic.commbank",
225 | "community": "whois.nic.community",
226 | "company": "whois.nic.company",
227 | "compare": "whois.nic.compare",
228 | "computer": "whois.nic.computer",
229 | "comsec": "whois.nic.comsec",
230 | "condos": "whois.nic.condos",
231 | "construction": "whois.nic.construction",
232 | "consulting": "whois.nic.consulting",
233 | "contact": "whois.nic.contact",
234 | "contractors": "whois.nic.contractors",
235 | "cooking": "whois.nic.cooking",
236 | "cool": "whois.nic.cool",
237 | "coop": "whois.nic.coop",
238 | "corsica": "whois.nic.corsica",
239 | "country": "whois.nic.country",
240 | "coupons": "whois.nic.coupons",
241 | "courses": "whois.nic.courses",
242 | "cpa": "whois.nic.cpa",
243 | "cr": "whois.nic.cr",
244 | "credit": "whois.nic.credit",
245 | "creditcard": "whois.nic.creditcard",
246 | "creditunion": "whois.nic.creditunion",
247 | "cricket": "whois.nic.cricket",
248 | "crown": "whois.nic.crown",
249 | "crs": "whois.nic.crs",
250 | "cruise": "whois.nic.cruise",
251 | "cruises": "whois.nic.cruises",
252 | "cuisinella": "whois.nic.cuisinella",
253 | "cx": "whois.nic.cx",
254 | "cymru": "whois.nic.cymru",
255 | "cyou": "whois.nic.cyou",
256 | "cz": "whois.nic.cz",
257 | "dabur": "whois.nic.dabur",
258 | "dad": "whois.nic.google",
259 | "dance": "whois.nic.dance",
260 | "data": "whois.nic.data",
261 | "date": "whois.nic.date",
262 | "dating": "whois.nic.dating",
263 | "datsun": "whois.nic.gmo",
264 | "day": "whois.nic.google",
265 | "dclk": "whois.nic.google",
266 | "dds": "whois.nic.dds",
267 | "de": "whois.denic.de",
268 | "deal": "whois.nic.deal",
269 | "dealer": "whois.nic.dealer",
270 | "deals": "whois.nic.deals",
271 | "degree": "whois.nic.degree",
272 | "delivery": "whois.nic.delivery",
273 | "deloitte": "whois.nic.deloitte",
274 | "delta": "whois.nic.delta",
275 | "democrat": "whois.nic.democrat",
276 | "dental": "whois.nic.dental",
277 | "dentist": "whois.nic.dentist",
278 | "desi": "whois.nic.desi",
279 | "design": "whois.nic.design",
280 | "dev": "whois.nic.google",
281 | "diamonds": "whois.nic.diamonds",
282 | "diet": "whois.nic.diet",
283 | "digital": "whois.nic.digital",
284 | "direct": "whois.nic.direct",
285 | "directory": "whois.nic.directory",
286 | "discount": "whois.nic.discount",
287 | "dish": "whois.nic.dish",
288 | "diy": "whois.nic.diy",
289 | "dk": "whois.punktum.dk",
290 | "dm": "whois.dmdomains.dm",
291 | "dnp": "whois.nic.dnp",
292 | "do": "whois.nic.do",
293 | "docs": "whois.nic.google",
294 | "doctor": "whois.nic.doctor",
295 | "dog": "whois.nic.dog",
296 | "domains": "whois.nic.domains",
297 | "dot": "whois.nic.dot",
298 | "download": "whois.nic.download",
299 | "drive": "whois.nic.google",
300 | "dtv": "whois.nic.dtv",
301 | "dubai": "whois.nic.dubai",
302 | "dunlop": "whois.nic.dunlop",
303 | "durban": "whois.nic.durban",
304 | "dvag": "whois.nic.dvag",
305 | "dvr": "whois.nic.dvr",
306 | "dz": "whois.nic.dz",
307 | "earth": "whois.nic.earth",
308 | "eat": "whois.nic.google",
309 | "ec": "whois.nic.ec",
310 | "eco": "whois.nic.eco",
311 | "edeka": "whois.nic.edeka",
312 | "edu": "whois.educause.edu",
313 | "education": "whois.nic.education",
314 | "ee": "whois.tld.ee",
315 | "email": "whois.nic.email",
316 | "emerck": "whois.afilias-srs.net",
317 | "energy": "whois.nic.energy",
318 | "engineer": "whois.nic.engineer",
319 | "engineering": "whois.nic.engineering",
320 | "enterprises": "whois.nic.enterprises",
321 | "epson": "whois.nic.epson",
322 | "equipment": "whois.nic.equipment",
323 | "ericsson": "whois.nic.ericsson",
324 | "erni": "whois.nic.erni",
325 | "es": "whois.nic.es",
326 | "esq": "whois.nic.google",
327 | "estate": "whois.nic.estate",
328 | "etisalat": "whois.centralnic.com",
329 | "eu": "whois.eu",
330 | "eurovision": "whois.nic.eurovision",
331 | "eus": "whois.nic.eus",
332 | "events": "whois.nic.events",
333 | "exchange": "whois.nic.exchange",
334 | "expert": "whois.nic.expert",
335 | "exposed": "whois.nic.exposed",
336 | "express": "whois.nic.express",
337 | "extraspace": "whois.nic.extraspace",
338 | "fage": "whois.nic.fage",
339 | "fail": "whois.nic.fail",
340 | "fairwinds": "whois.nic.fairwinds",
341 | "faith": "whois.nic.faith",
342 | "family": "whois.nic.family",
343 | "fan": "whois.nic.fan",
344 | "fans": "whois.nic.fans",
345 | "farm": "whois.nic.farm",
346 | "fashion": "whois.nic.fashion",
347 | "fast": "whois.nic.fast",
348 | "fedex": "whois.nic.fedex",
349 | "feedback": "whois.nic.feedback",
350 | "ferrari": "whois.nic.ferrari",
351 | "fi": "whois.fi",
352 | "fidelity": "whois.nic.fidelity",
353 | "fido": "whois.nic.fido",
354 | "film": "whois.nic.film",
355 | "final": "whois.gtlds.nic.br",
356 | "finance": "whois.nic.finance",
357 | "financial": "whois.nic.financial",
358 | "fire": "whois.nic.fire",
359 | "firestone": "whois.nic.firestone",
360 | "firmdale": "whois.nic.firmdale",
361 | "fish": "whois.nic.fish",
362 | "fishing": "whois.nic.fishing",
363 | "fit": "whois.nic.fit",
364 | "fitness": "whois.nic.fitness",
365 | "fj": "www.whois.fj",
366 | "flights": "whois.nic.flights",
367 | "florist": "whois.nic.florist",
368 | "flowers": "whois.nic.flowers",
369 | "fly": "whois.nic.google",
370 | "fm": "whois.nic.fm",
371 | "fo": "whois.nic.fo",
372 | "foo": "whois.nic.google",
373 | "football": "whois.nic.football",
374 | "forex": "whois.nic.forex",
375 | "forsale": "whois.nic.forsale",
376 | "forum": "whois.nic.forum",
377 | "foundation": "whois.nic.foundation",
378 | "fox": "whois.nic.fox",
379 | "fr": "whois.nic.fr",
380 | "free": "whois.nic.free",
381 | "fresenius": "whois.nic.fresenius",
382 | "frl": "whois.nic.frl",
383 | "frogans": "whois.nic.frogans",
384 | "frontdoor": "whois.nic.frontdoor",
385 | "fujitsu": "whois.nic.gmo",
386 | "fun": "whois.nic.fun",
387 | "fund": "whois.nic.fund",
388 | "furniture": "whois.nic.furniture",
389 | "futbol": "whois.nic.futbol",
390 | "fyi": "whois.nic.fyi",
391 | "gal": "whois.nic.gal",
392 | "gallery": "whois.nic.gallery",
393 | "gallo": "whois.nic.gallo",
394 | "gallup": "whois.nic.gallup",
395 | "game": "whois.nic.game",
396 | "games": "whois.nic.games",
397 | "garden": "whois.nic.garden",
398 | "gay": "whois.nic.gay",
399 | "gbiz": "whois.nic.google",
400 | "gd": "whois.nic.gd",
401 | "gdn": "whois.nic.gdn",
402 | "ge": "whois.nic.ge",
403 | "gea": "whois.nic.gea",
404 | "gent": "whois.nic.gent",
405 | "genting": "whois.nic.genting",
406 | "george": "whois.nic.george",
407 | "gf": "whois.mediaserv.net",
408 | "gg": "whois.gg",
409 | "ggee": "whois.nic.ggee",
410 | "gh": "whois.nic.gh",
411 | "gi": "whois2.afilias-grs.net",
412 | "gift": "whois.uniregistry.net",
413 | "gifts": "whois.nic.gifts",
414 | "gives": "whois.nic.gives",
415 | "giving": "whois.nic.giving",
416 | "gl": "whois.nic.gl",
417 | "glass": "whois.nic.glass",
418 | "gle": "whois.nic.google",
419 | "global": "whois.nic.global",
420 | "globo": "whois.gtlds.nic.br",
421 | "gmail": "whois.nic.google",
422 | "gmbh": "whois.nic.gmbh",
423 | "gmo": "whois.nic.gmo",
424 | "gmx": "whois.nic.gmx",
425 | "godaddy": "whois.nic.godaddy",
426 | "gold": "whois.nic.gold",
427 | "goldpoint": "whois.nic.goldpoint",
428 | "golf": "whois.nic.golf",
429 | "goo": "whois.nic.gmo",
430 | "goodyear": "whois.nic.goodyear",
431 | "goog": "whois.nic.google",
432 | "google": "whois.nic.google",
433 | "gop": "whois.nic.gop",
434 | "got": "whois.nic.got",
435 | "gov": "whois.dotgov.gov",
436 | "gp": "whois.nic.gp",
437 | "gq": "whois.dominio.gq",
438 | "graphics": "whois.nic.graphics",
439 | "gratis": "whois.nic.gratis",
440 | "green": "whois.nic.green",
441 | "gripe": "whois.nic.gripe",
442 | "grocery": "whois.nic.grocery",
443 | "group": "whois.nic.group",
444 | "gs": "whois.nic.gs",
445 | "gucci": "whois.nic.gucci",
446 | "guge": "whois.nic.google",
447 | "guide": "whois.nic.guide",
448 | "guitars": "whois.nic.guitars",
449 | "guru": "whois.nic.guru",
450 | "gy": "whois.registry.gy",
451 | "hair": "whois.nic.hair",
452 | "hamburg": "whois.nic.hamburg",
453 | "hangout": "whois.nic.google",
454 | "haus": "whois.nic.haus",
455 | "hdfc": "whois.nic.hdfc",
456 | "hdfcbank": "whois.nic.hdfcbank",
457 | "healthcare": "whois.nic.healthcare",
458 | "help": "whois.nic.help",
459 | "helsinki": "whois.nic.helsinki",
460 | "here": "whois.nic.google",
461 | "hermes": "whois.nic.hermes",
462 | "hiphop": "whois.nic.hiphop",
463 | "hisamitsu": "whois.nic.gmo",
464 | "hitachi": "whois.nic.gmo",
465 | "hiv": "whois.nic.hiv",
466 | "hk": "whois.hkirc.hk",
467 | "hkt": "whois.nic.hkt",
468 | "hm": "whois.registry.hm",
469 | "hn": "whois.nic.hn",
470 | "hockey": "whois.nic.hockey",
471 | "holdings": "whois.nic.holdings",
472 | "holiday": "whois.nic.holiday",
473 | "homedepot": "whois.nic.homedepot",
474 | "homes": "whois.nic.homes",
475 | "honda": "whois.nic.honda",
476 | "horse": "whois.nic.horse",
477 | "hospital": "whois.nic.hospital",
478 | "host": "whois.nic.host",
479 | "hosting": "whois.nic.hosting",
480 | "hot": "whois.nic.hot",
481 | "house": "whois.nic.house",
482 | "how": "whois.nic.google",
483 | "hr": "whois.dns.hr",
484 | "ht": "whois.nic.ht",
485 | "hu": "whois.nic.hu",
486 | "hughes": "whois.nic.hughes",
487 | "hyundai": "whois.nic.hyundai",
488 | "ibm": "whois.nic.ibm",
489 | "icbc": "whois.nic.icbc",
490 | "ice": "whois.nic.ice",
491 | "icu": "whois.nic.icu",
492 | "id": "whois.id",
493 | "ie": "whois.weare.ie",
494 | "ifm": "whois.nic.ifm",
495 | "ikano": "whois.nic.ikano",
496 | "il": "whois.isoc.org.il",
497 | "im": "whois.nic.im",
498 | "imamat": "whois.nic.imamat",
499 | "imdb": "whois.nic.imdb",
500 | "immo": "whois.nic.immo",
501 | "immobilien": "whois.nic.immobilien",
502 | "in": "whois.registry.in",
503 | "inc": "whois.nic.inc",
504 | "industries": "whois.nic.industries",
505 | "infiniti": "whois.nic.gmo",
506 | "info": "whois.nic.info",
507 | "ing": "whois.nic.google",
508 | "ink": "whois.nic.ink",
509 | "institute": "whois.nic.institute",
510 | "insurance": "whois.nic.insurance",
511 | "insure": "whois.nic.insure",
512 | "int": "whois.iana.org",
513 | "international": "whois.nic.international",
514 | "investments": "whois.nic.investments",
515 | "io": "whois.nic.io",
516 | "iq": "whois.cmc.iq",
517 | "ir": "whois.nic.ir",
518 | "irish": "whois.nic.irish",
519 | "is": "whois.isnic.is",
520 | "ismaili": "whois.nic.ismaili",
521 | "ist": "whois.nic.ist",
522 | "istanbul": "whois.nic.istanbul",
523 | "it": "whois.nic.it",
524 | "itv": "whois.nic.itv",
525 | "jaguar": "whois.nic.jaguar",
526 | "java": "whois.nic.java",
527 | "jcb": "whois.nic.gmo",
528 | "je": "whois.je",
529 | "jeep": "whois.nic.jeep",
530 | "jetzt": "whois.nic.jetzt",
531 | "jewelry": "whois.nic.jewelry",
532 | "jio": "whois.nic.jio",
533 | "jll": "whois.nic.jll",
534 | "jobs": "whois.nic.jobs",
535 | "joburg": "whois.nic.joburg",
536 | "jot": "whois.nic.jot",
537 | "joy": "whois.nic.joy",
538 | "jp": "whois.jprs.jp",
539 | "juegos": "whois.uniregistry.net",
540 | "juniper": "whois.nic.juniper",
541 | "kaufen": "whois.nic.kaufen",
542 | "kddi": "whois.nic.kddi",
543 | "ke": "whois.kenic.or.ke",
544 | "kerryhotels": "whois.nic.kerryhotels",
545 | "kerrylogistics": "whois.nic.kerrylogistics",
546 | "kerryproperties": "whois.nic.kerryproperties",
547 | "kfh": "whois.nic.kfh",
548 | "kg": "whois.kg",
549 | "ki": "whois.nic.ki",
550 | "kia": "whois.nic.kia",
551 | "kids": "whois.nic.kids",
552 | "kim": "whois.nic.kim",
553 | "kindle": "whois.nic.kindle",
554 | "kitchen": "whois.nic.kitchen",
555 | "kiwi": "whois.nic.kiwi",
556 | "kn": "whois.nic.kn",
557 | "koeln": "whois.ryce-rsp.com",
558 | "komatsu": "whois.nic.komatsu",
559 | "kosher": "whois.nic.kosher",
560 | "kr": "whois.kr",
561 | "krd": "whois.nic.krd",
562 | "kuokgroup": "whois.nic.kuokgroup",
563 | "kw": "whois.nic.kw",
564 | "ky": "whois.kyregistry.ky",
565 | "kyoto": "whois.nic.kyoto",
566 | "kz": "whois.nic.kz",
567 | "la": "whois.nic.la",
568 | "lacaixa": "whois.nic.lacaixa",
569 | "lamborghini": "whois.nic.lamborghini",
570 | "lamer": "whois.nic.lamer",
571 | "lancaster": "whois.nic.lancaster",
572 | "land": "whois.nic.land",
573 | "landrover": "whois.nic.landrover",
574 | "lasalle": "whois.nic.lasalle",
575 | "lat": "whois.nic.lat",
576 | "latino": "whois.nic.latino",
577 | "latrobe": "whois.nic.latrobe",
578 | "law": "whois.nic.law",
579 | "lawyer": "whois.nic.lawyer",
580 | "lb": "whois.lbdr.org.lb",
581 | "lc": "whois.afilias-grs.info",
582 | "lds": "whois.nic.lds",
583 | "lease": "whois.nic.lease",
584 | "leclerc": "whois.nic.leclerc",
585 | "lefrak": "whois.nic.lefrak",
586 | "legal": "whois.nic.legal",
587 | "lego": "whois.nic.lego",
588 | "lexus": "whois.nic.lexus",
589 | "lgbt": "whois.nic.lgbt",
590 | "li": "whois.nic.li",
591 | "lidl": "whois.nic.lidl",
592 | "life": "whois.nic.life",
593 | "lifestyle": "whois.nic.lifestyle",
594 | "lighting": "whois.nic.lighting",
595 | "like": "whois.nic.like",
596 | "limited": "whois.nic.limited",
597 | "limo": "whois.nic.limo",
598 | "link": "whois.uniregistry.net",
599 | "lipsy": "whois.nic.lipsy",
600 | "live": "whois.nic.live",
601 | "lk": "whois.nic.lk",
602 | "llc": "whois.nic.llc",
603 | "llp": "whois.nic.llp",
604 | "loan": "whois.nic.loan",
605 | "loans": "whois.nic.loans",
606 | "locker": "whois.nic.locker",
607 | "locus": "whois.nic.locus",
608 | "lol": "whois.nic.lol",
609 | "london": "whois.nic.london",
610 | "lotte": "whois.nic.lotte",
611 | "lotto": "whois.nic.lotto",
612 | "love": "whois.nic.love",
613 | "lpl": "whois.nic.lpl",
614 | "lplfinancial": "whois.nic.lplfinancial",
615 | "ls": "whois.nic.ls",
616 | "lt": "whois.domreg.lt",
617 | "ltd": "whois.nic.ltd",
618 | "ltda": "whois.nic.ltda",
619 | "lu": "whois.dns.lu",
620 | "lundbeck": "whois.nic.lundbeck",
621 | "luxe": "whois.nic.luxe",
622 | "luxury": "whois.nic.luxury",
623 | "lv": "whois.nic.lv",
624 | "ly": "whois.nic.ly",
625 | "ma": "whois.registre.ma",
626 | "madrid": "whois.nic.madrid",
627 | "maison": "whois.nic.maison",
628 | "makeup": "whois.nic.makeup",
629 | "man": "whois.nic.man",
630 | "management": "whois.nic.management",
631 | "mango": "whois.nic.mango",
632 | "map": "whois.nic.google",
633 | "market": "whois.nic.market",
634 | "marketing": "whois.nic.marketing",
635 | "markets": "whois.nic.markets",
636 | "marriott": "whois.nic.marriott",
637 | "mba": "whois.nic.mba",
638 | "mckinsey": "whois.nic.mckinsey",
639 | "md": "whois.nic.md",
640 | "me": "whois.nic.me",
641 | "med": "whois.nic.med",
642 | "media": "whois.nic.media",
643 | "meet": "whois.nic.google",
644 | "melbourne": "whois.nic.melbourne",
645 | "meme": "whois.nic.google",
646 | "memorial": "whois.nic.memorial",
647 | "men": "whois.nic.men",
648 | "menu": "whois.nic.menu",
649 | "mg": "whois.nic.mg",
650 | "miami": "whois.nic.miami",
651 | "mini": "whois.nic.mini",
652 | "mit": "whois.nic.mit",
653 | "mitsubishi": "whois.nic.gmo",
654 | "mk": "whois.marnet.mk",
655 | "ml": "whois.nic.ml",
656 | "mls": "whois.nic.mls",
657 | "mm": "whois.registry.gov.mm",
658 | "mma": "whois.nic.mma",
659 | "mn": "whois.nic.mn",
660 | "mo": "whois.monic.mo",
661 | "mobi": "whois.nic.mobi",
662 | "mobile": "whois.nic.mobile",
663 | "moda": "whois.nic.moda",
664 | "moe": "whois.nic.moe",
665 | "moi": "whois.nic.moi",
666 | "mom": "whois.nic.mom",
667 | "monash": "whois.nic.monash",
668 | "money": "whois.nic.money",
669 | "monster": "whois.nic.monster",
670 | "mormon": "whois.nic.mormon",
671 | "mortgage": "whois.nic.mortgage",
672 | "moscow": "whois.nic.moscow",
673 | "motorcycles": "whois.nic.motorcycles",
674 | "mov": "whois.nic.google",
675 | "movie": "whois.nic.movie",
676 | "mq": "whois.mediaserv.net",
677 | "mr": "whois.nic.mr",
678 | "ms": "whois.nic.ms",
679 | "mt": "whois.nic.org.mt",
680 | "mtn": "whois.nic.mtn",
681 | "mtr": "whois.nic.mtr",
682 | "mu": "whois.nic.mu",
683 | "museum": "whois.nic.museum",
684 | "music": "whois.nic.music",
685 | "mw": "whois.nic.mw",
686 | "mx": "whois.mx",
687 | "my": "whois.mynic.my",
688 | "mz": "whois.nic.mz",
689 | "na": "whois.na-nic.com.na",
690 | "nab": "whois.nic.nab",
691 | "nagoya": "whois.nic.nagoya",
692 | "name": "whois.nic.name",
693 | "natura": "whois.gtlds.nic.br",
694 | "navy": "whois.nic.navy",
695 | "nc": "whois.nc",
696 | "nec": "whois.nic.nec",
697 | "net": "whois.verisign-grs.com",
698 | "netbank": "whois.nic.netbank",
699 | "network": "whois.nic.network",
700 | "new": "whois.nic.google",
701 | "news": "whois.nic.news",
702 | "next": "whois.nic.next",
703 | "nextdirect": "whois.nic.nextdirect",
704 | "nexus": "whois.nic.google",
705 | "nf": "whois.nic.nf",
706 | "ng": "whois.nic.net.ng",
707 | "ngo": "whois.nic.ngo",
708 | "nhk": "whois.nic.nhk",
709 | "nico": "whois.nic.nico",
710 | "nikon": "whois.nic.nikon",
711 | "ninja": "whois.nic.ninja",
712 | "nissan": "whois.nic.gmo",
713 | "nissay": "whois.nic.nissay",
714 | "nl": "whois.domain-registry.nl",
715 | "no": "whois.norid.no",
716 | "nokia": "whois.nic.nokia",
717 | "norton": "whois.nic.norton",
718 | "now": "whois.nic.now",
719 | "nowruz": "whois.nic.nowruz",
720 | "nowtv": "whois.nic.nowtv",
721 | "nra": "whois.nic.nra",
722 | "nrw": "whois.nic.nrw",
723 | "nu": "whois.iis.nu",
724 | "nz": "whois.irs.net.nz",
725 | "obi": "whois.nic.obi",
726 | "observer": "whois.nic.observer",
727 | "okinawa": "whois.nic.okinawa",
728 | "olayan": "whois.nic.olayan",
729 | "olayangroup": "whois.nic.olayangroup",
730 | "ollo": "whois.nic.ollo",
731 | "om": "whois.registry.om",
732 | "omega": "whois.nic.omega",
733 | "one": "whois.nic.one",
734 | "ong": "whois.nic.ong",
735 | "onl": "whois.nic.onl",
736 | "online": "whois.nic.online",
737 | "ooo": "whois.nic.ooo",
738 | "oracle": "whois.nic.oracle",
739 | "orange": "whois.nic.orange",
740 | "org": "whois.publicinterestregistry.org",
741 | "organic": "whois.nic.organic",
742 | "origins": "whois.nic.origins",
743 | "osaka": "whois.nic.osaka",
744 | "otsuka": "whois.nic.otsuka",
745 | "ott": "whois.nic.ott",
746 | "ovh": "whois.nic.ovh",
747 | "page": "whois.nic.google",
748 | "panasonic": "whois.nic.gmo",
749 | "paris": "whois.nic.paris",
750 | "pars": "whois.nic.pars",
751 | "partners": "whois.nic.partners",
752 | "parts": "whois.nic.parts",
753 | "party": "whois.nic.party",
754 | "pay": "whois.nic.pay",
755 | "pccw": "whois.nic.pccw",
756 | "pe": "kero.yachay.pe",
757 | "pet": "whois.nic.pet",
758 | "pf": "whois.registry.pf",
759 | "pharmacy": "whois.nic.pharmacy",
760 | "phd": "whois.nic.google",
761 | "philips": "whois.nic.philips",
762 | "phone": "whois.nic.phone",
763 | "photo": "whois.nic.photo",
764 | "photography": "whois.nic.photography",
765 | "photos": "whois.nic.photos",
766 | "physio": "whois.nic.physio",
767 | "pics": "whois.nic.pics",
768 | "pictures": "whois.nic.pictures",
769 | "pid": "whois.nic.pid",
770 | "pin": "whois.nic.pin",
771 | "pink": "whois.nic.pink",
772 | "pioneer": "whois.nic.pioneer",
773 | "pizza": "whois.nic.pizza",
774 | "pk": "whois.pknic.net.pk",
775 | "pl": "whois.dns.pl",
776 | "place": "whois.nic.place",
777 | "play": "whois.nic.google",
778 | "playstation": "whois.nic.playstation",
779 | "plumbing": "whois.nic.plumbing",
780 | "plus": "whois.nic.plus",
781 | "pm": "whois.nic.pm",
782 | "pnc": "whois.nic.pnc",
783 | "pohl": "whois.nic.pohl",
784 | "poker": "whois.nic.poker",
785 | "politie": "whois.nic.politie",
786 | "porn": "whois.nic.porn",
787 | "post": "whois.dotpostregistry.net",
788 | "pr": "whois.afilias-srs.net",
789 | "press": "whois.nic.press",
790 | "prime": "whois.nic.prime",
791 | "pro": "whois.nic.pro",
792 | "prod": "whois.nic.google",
793 | "productions": "whois.nic.productions",
794 | "prof": "whois.nic.google",
795 | "progressive": "whois.nic.progressive",
796 | "promo": "whois.nic.promo",
797 | "properties": "whois.nic.properties",
798 | "property": "whois.nic.property",
799 | "protection": "whois.nic.protection",
800 | "ps": "whois.pnina.ps",
801 | "pt": "whois.dns.pt",
802 | "pub": "whois.nic.pub",
803 | "pw": "whois.nic.pw",
804 | "pwc": "whois.nic.pwc",
805 | "qa": "whois.registry.qa",
806 | "qpon": "whois.nic.qpon",
807 | "quebec": "whois.nic.quebec",
808 | "quest": "whois.nic.quest",
809 | "racing": "whois.nic.racing",
810 | "radio": "whois.nic.radio",
811 | "re": "whois.nic.re",
812 | "read": "whois.nic.read",
813 | "realestate": "whois.nic.realestate",
814 | "realtor": "whois.nic.realtor",
815 | "realty": "whois.nic.realty",
816 | "recipes": "whois.nic.recipes",
817 | "red": "whois.nic.red",
818 | "redstone": "whois.nic.redstone",
819 | "redumbrella": "whois.nic.redumbrella",
820 | "rehab": "whois.nic.rehab",
821 | "reise": "whois.nic.reise",
822 | "reisen": "whois.nic.reisen",
823 | "reit": "whois.nic.reit",
824 | "reliance": "whois.nic.reliance",
825 | "ren": "whois.nic.ren",
826 | "rent": "whois.nic.rent",
827 | "rentals": "whois.nic.rentals",
828 | "repair": "whois.nic.repair",
829 | "report": "whois.nic.report",
830 | "republican": "whois.nic.republican",
831 | "rest": "whois.nic.rest",
832 | "restaurant": "whois.nic.restaurant",
833 | "review": "whois.nic.review",
834 | "reviews": "whois.nic.reviews",
835 | "rexroth": "whois.nic.rexroth",
836 | "rich": "whois.nic.rich",
837 | "richardli": "whois.nic.richardli",
838 | "ricoh": "whois.nic.ricoh",
839 | "ril": "whois.nic.ril",
840 | "rio": "whois.gtlds.nic.br",
841 | "rip": "whois.nic.rip",
842 | "ro": "whois.rotld.ro",
843 | "rocks": "whois.nic.rocks",
844 | "rodeo": "whois.nic.rodeo",
845 | "rogers": "whois.nic.rogers",
846 | "room": "whois.nic.room",
847 | "rs": "whois.rnids.rs",
848 | "rsvp": "whois.nic.google",
849 | "ru": "whois.tcinet.ru",
850 | "rugby": "whois.nic.rugby",
851 | "ruhr": "whois.nic.ruhr",
852 | "run": "whois.nic.run",
853 | "rw": "whois.ricta.org.rw",
854 | "rwe": "whois.nic.rwe",
855 | "ryukyu": "whois.nic.ryukyu",
856 | "sa": "whois.nic.net.sa",
857 | "saarland": "whois.nic.saarland",
858 | "safe": "whois.nic.safe",
859 | "safety": "whois.nic.safety",
860 | "sale": "whois.nic.sale",
861 | "salon": "whois.nic.salon",
862 | "samsclub": "whois.nic.samsclub",
863 | "samsung": "whois.nic.samsung",
864 | "sandvik": "whois.nic.sandvik",
865 | "sandvikcoromant": "whois.nic.sandvikcoromant",
866 | "sanofi": "whois.nic.sanofi",
867 | "sap": "whois.nic.sap",
868 | "sarl": "whois.nic.sarl",
869 | "save": "whois.nic.save",
870 | "saxo": "whois.nic.saxo",
871 | "sb": "whois.nic.net.sb",
872 | "sbi": "whois.nic.sbi",
873 | "sbs": "whois.nic.sbs",
874 | "sc": "whois2.afilias-grs.net",
875 | "sca": "whois.nic.sca",
876 | "scb": "whois.nic.scb",
877 | "schaeffler": "whois.afilias-srs.net",
878 | "schmidt": "whois.nic.schmidt",
879 | "scholarships": "whois.nic.scholarships",
880 | "school": "whois.nic.school",
881 | "schule": "whois.nic.schule",
882 | "schwarz": "whois.nic.schwarz",
883 | "science": "whois.nic.science",
884 | "scot": "whois.nic.scot",
885 | "sd": "whois.sdnic.sd",
886 | "se": "whois.iis.se",
887 | "search": "whois.nic.google",
888 | "seat": "whois.nic.seat",
889 | "secure": "whois.nic.secure",
890 | "security": "whois.nic.security",
891 | "seek": "whois.nic.seek",
892 | "select": "whois.nic.select",
893 | "services": "whois.nic.services",
894 | "seven": "whois.nic.seven",
895 | "sew": "whois.nic.sew",
896 | "sex": "whois.nic.sex",
897 | "sexy": "whois.nic.sexy",
898 | "sfr": "whois.nic.sfr",
899 | "sg": "whois.sgnic.sg",
900 | "sh": "whois.nic.sh",
901 | "shangrila": "whois.nic.shangrila",
902 | "sharp": "whois.nic.gmo",
903 | "shaw": "whois.afilias-srs.net",
904 | "shell": "whois.nic.shell",
905 | "shia": "whois.nic.shia",
906 | "shiksha": "whois.nic.shiksha",
907 | "shoes": "whois.nic.shoes",
908 | "shop": "whois.nic.shop",
909 | "shopping": "whois.nic.shopping",
910 | "shouji": "whois.teleinfo.cn",
911 | "show": "whois.nic.show",
912 | "showtime": "whois.nic.showtime",
913 | "si": "whois.register.si",
914 | "silk": "whois.nic.silk",
915 | "sina": "whois.nic.sina",
916 | "singles": "whois.nic.singles",
917 | "site": "whois.nic.site",
918 | "sk": "whois.sk-nic.sk",
919 | "ski": "whois.nic.ski",
920 | "skin": "whois.nic.skin",
921 | "sky": "whois.nic.sky",
922 | "sl": "whois.nic.sl",
923 | "sling": "whois.nic.sling",
924 | "sm": "whois.nic.sm",
925 | "smart": "whois.nic.smart",
926 | "smile": "whois.nic.smile",
927 | "sn": "whois.nic.sn",
928 | "sncf": "whois.nic.sncf",
929 | "so": "whois.nic.so",
930 | "soccer": "whois.nic.soccer",
931 | "social": "whois.nic.social",
932 | "softbank": "whois.nic.softbank",
933 | "software": "whois.nic.software",
934 | "solar": "whois.nic.solar",
935 | "solutions": "whois.nic.solutions",
936 | "sony": "whois.nic.sony",
937 | "soy": "whois.nic.google",
938 | "spa": "whois.nic.spa",
939 | "space": "whois.nic.space",
940 | "sport": "whois.nic.sport",
941 | "spot": "whois.nic.spot",
942 | "srl": "whois.nic.srl",
943 | "ss": "whois.nic.ss",
944 | "st": "whois.nic.st",
945 | "stada": "whois.nic.stada",
946 | "star": "whois.nic.star",
947 | "statebank": "whois.nic.statebank",
948 | "stc": "whois.nic.stc",
949 | "stcgroup": "whois.nic.stcgroup",
950 | "stockholm": "whois.nic.stockholm",
951 | "storage": "whois.nic.storage",
952 | "store": "whois.nic.store",
953 | "stream": "whois.nic.stream",
954 | "studio": "whois.nic.studio",
955 | "study": "whois.nic.study",
956 | "style": "whois.nic.style",
957 | "su": "whois.tcinet.ru",
958 | "sucks": "whois.nic.sucks",
959 | "supplies": "whois.nic.supplies",
960 | "supply": "whois.nic.supply",
961 | "support": "whois.nic.support",
962 | "surf": "whois.nic.surf",
963 | "surgery": "whois.nic.surgery",
964 | "suzuki": "whois.nic.suzuki",
965 | "swatch": "whois.nic.swatch",
966 | "swiss": "whois.nic.swiss",
967 | "sx": "whois.sx",
968 | "sy": "whois.tld.sy",
969 | "sydney": "whois.nic.sydney",
970 | "systems": "whois.nic.systems",
971 | "tab": "whois.nic.tab",
972 | "taipei": "whois.nic.taipei",
973 | "talk": "whois.nic.talk",
974 | "taobao": "whois.nic.taobao",
975 | "tatamotors": "whois.nic.tatamotors",
976 | "tatar": "whois.nic.tatar",
977 | "tattoo": "whois.uniregistry.net",
978 | "tax": "whois.nic.tax",
979 | "taxi": "whois.nic.taxi",
980 | "tc": "whois.nic.tc",
981 | "tci": "whois.nic.tci",
982 | "td": "whois.nic.td",
983 | "tdk": "whois.nic.tdk",
984 | "team": "whois.nic.team",
985 | "tech": "whois.nic.tech",
986 | "technology": "whois.nic.technology",
987 | "tel": "whois.nic.tel",
988 | "temasek": "whois.nic.temasek",
989 | "tennis": "whois.nic.tennis",
990 | "teva": "whois.nic.teva",
991 | "tf": "whois.nic.tf",
992 | "tg": "whois.nic.tg",
993 | "th": "whois.thnic.co.th",
994 | "thd": "whois.nic.thd",
995 | "theater": "whois.nic.theater",
996 | "theatre": "whois.nic.theatre",
997 | "tiaa": "whois.nic.tiaa",
998 | "tickets": "whois.nic.tickets",
999 | "tienda": "whois.nic.tienda",
1000 | "tips": "whois.nic.tips",
1001 | "tires": "whois.nic.tires",
1002 | "tirol": "whois.nic.tirol",
1003 | "tk": "whois.dot.tk",
1004 | "tl": "whois.nic.tl",
1005 | "tm": "whois.nic.tm",
1006 | "tmall": "whois.nic.tmall",
1007 | "tn": "whois.ati.tn",
1008 | "to": "whois.tonic.to",
1009 | "today": "whois.nic.today",
1010 | "tokyo": "whois.nic.tokyo",
1011 | "tools": "whois.nic.tools",
1012 | "top": "whois.nic.top",
1013 | "toray": "whois.nic.toray",
1014 | "toshiba": "whois.nic.toshiba",
1015 | "total": "whois.nic.total",
1016 | "tours": "whois.nic.tours",
1017 | "town": "whois.nic.town",
1018 | "toyota": "whois.nic.toyota",
1019 | "toys": "whois.nic.toys",
1020 | "tr": "whois.trabis.gov.tr",
1021 | "trade": "whois.nic.trade",
1022 | "trading": "whois.nic.trading",
1023 | "training": "whois.nic.training",
1024 | "travel": "whois.nic.travel",
1025 | "travelers": "whois.nic.travelers",
1026 | "travelersinsurance": "whois.nic.travelersinsurance",
1027 | "trust": "whois.nic.trust",
1028 | "trv": "whois.nic.trv",
1029 | "tube": "whois.nic.tube",
1030 | "tui": "whois.nic.tui",
1031 | "tunes": "whois.nic.tunes",
1032 | "tushu": "whois.nic.tushu",
1033 | "tv": "whois.nic.tv",
1034 | "tvs": "whois.nic.tvs",
1035 | "tw": "whois.twnic.net.tw",
1036 | "tz": "whois.tznic.or.tz",
1037 | "ua": "whois.ua",
1038 | "ubank": "whois.nic.ubank",
1039 | "ubs": "whois.nic.ubs",
1040 | "ug": "whois.co.ug",
1041 | "uk": "whois.nic.uk",
1042 | "unicom": "whois.nic.unicom",
1043 | "university": "whois.nic.university",
1044 | "uno": "whois.nic.uno",
1045 | "uol": "whois.gtlds.nic.br",
1046 | "ups": "whois.nic.ups",
1047 | "us": "whois.nic.us",
1048 | "uy": "whois.nic.org.uy",
1049 | "uz": "whois.cctld.uz",
1050 | "vacations": "whois.nic.vacations",
1051 | "vana": "whois.nic.vana",
1052 | "vanguard": "whois.nic.vanguard",
1053 | "vc": "whois2.afilias-grs.net",
1054 | "ve": "whois.nic.ve",
1055 | "vegas": "whois.nic.vegas",
1056 | "ventures": "whois.nic.ventures",
1057 | "verisign": "whois.nic.verisign",
1058 | "vermögensberater": "whois.nic.xn--vermgensberater-ctb",
1059 | "vermögensberatung": "whois.nic.xn--vermgensberatung-pwb",
1060 | "versicherung": "whois.nic.versicherung",
1061 | "vet": "whois.nic.vet",
1062 | "vg": "whois.nic.vg",
1063 | "viajes": "whois.nic.viajes",
1064 | "video": "whois.nic.video",
1065 | "vig": "whois.nic.vig",
1066 | "viking": "whois.nic.viking",
1067 | "villas": "whois.nic.villas",
1068 | "vin": "whois.nic.vin",
1069 | "vip": "whois.nic.vip",
1070 | "virgin": "whois.nic.virgin",
1071 | "visa": "whois.nic.visa",
1072 | "vision": "whois.nic.vision",
1073 | "viva": "whois.nic.viva",
1074 | "vlaanderen": "whois.nic.vlaanderen",
1075 | "vodka": "whois.nic.vodka",
1076 | "volkswagen": "whois.nic.volkswagen",
1077 | "volvo": "whois.nic.volvo",
1078 | "vote": "whois.nic.vote",
1079 | "voting": "whois.nic.voting",
1080 | "voto": "whois.nic.voto",
1081 | "voyage": "whois.nic.voyage",
1082 | "vu": "whois.dnrs.vu",
1083 | "wales": "whois.nic.wales",
1084 | "walmart": "whois.nic.walmart",
1085 | "walter": "whois.nic.walter",
1086 | "wang": "whois.gtld.knet.cn",
1087 | "wanggou": "whois.nic.wanggou",
1088 | "watch": "whois.nic.watch",
1089 | "watches": "whois.nic.watches",
1090 | "webcam": "whois.nic.webcam",
1091 | "weber": "whois.nic.weber",
1092 | "website": "whois.nic.website",
1093 | "wed": "whois.nic.wed",
1094 | "wedding": "whois.nic.wedding",
1095 | "weibo": "whois.nic.weibo",
1096 | "weir": "whois.nic.weir",
1097 | "wf": "whois.nic.wf",
1098 | "whoswho": "whois.nic.whoswho",
1099 | "wien": "whois.nic.wien",
1100 | "wiki": "whois.nic.wiki",
1101 | "win": "whois.nic.win",
1102 | "wine": "whois.nic.wine",
1103 | "wme": "whois.nic.wme",
1104 | "wolterskluwer": "whois.nic.wolterskluwer",
1105 | "woodside": "whois.nic.woodside",
1106 | "work": "whois.nic.work",
1107 | "works": "whois.nic.works",
1108 | "world": "whois.nic.world",
1109 | "wow": "whois.nic.wow",
1110 | "ws": "whois.website.ws",
1111 | "wtc": "whois.nic.wtc",
1112 | "wtf": "whois.nic.wtf",
1113 | "xerox": "whois.nic.xerox",
1114 | "xfinity": "whois.nic.xfinity",
1115 | "xihuan": "whois.teleinfo.cn",
1116 | "xin": "whois.nic.xin",
1117 | "xn--11b4c3d": "whois.nic.xn--11b4c3d",
1118 | "xn--1qqw23a": "whois.ngtld.cn",
1119 | "xn--2scrj9c": "whois.registry.in",
1120 | "xn--30rr7y": "whois.gtld.knet.cn",
1121 | "xn--3bst00m": "whois.gtld.knet.cn",
1122 | "xn--3ds443g": "whois.teleinfo.cn",
1123 | "xn--3e0b707e": "whois.kr",
1124 | "xn--3hcrj9c": "whois.registry.in",
1125 | "xn--3pxu8k": "whois.nic.xn--3pxu8k",
1126 | "xn--42c2d9a": "whois.nic.xn--42c2d9a",
1127 | "xn--45br5cyl": "whois.registry.in",
1128 | "xn--45brj9c": "whois.registry.in",
1129 | "xn--45q11c": "whois.gtld.knet.cn",
1130 | "xn--4dbrk0ce": "whois.isoc.org.il",
1131 | "xn--4gbrim": "whois.nic.xn--4gbrim",
1132 | "xn--55qw42g": "whois.conac.cn",
1133 | "xn--55qx5d": "whois.ngtld.cn",
1134 | "xn--5su34j936bgsg": "whois.nic.xn--5su34j936bgsg",
1135 | "xn--5tzm5g": "whois.nic.xn--5tzm5g",
1136 | "xn--6frz82g": "whois.nic.xn--6frz82g",
1137 | "xn--6qq986b3xl": "whois.gtld.knet.cn",
1138 | "xn--80adxhks": "whois.nic.xn--80adxhks",
1139 | "xn--80ao21a": "whois.nic.kz",
1140 | "xn--80aqecdr1a": "whois.nic.xn--80aqecdr1a",
1141 | "xn--80asehdb": "whois.nic.xn--80asehdb",
1142 | "xn--80aswg": "whois.nic.xn--80aswg",
1143 | "xn--8y0a063a": "whois.nic.xn--8y0a063a",
1144 | "xn--90a3ac": "whois.rnids.rs",
1145 | "xn--90ae": "whois.imena.bg",
1146 | "xn--90ais": "whois.cctld.by",
1147 | "xn--9dbq2a": "whois.nic.xn--9dbq2a",
1148 | "xn--9et52u": "whois.gtld.knet.cn",
1149 | "xn--9krt00a": "whois.nic.xn--9krt00a",
1150 | "xn--b4w605ferd": "whois.nic.xn--b4w605ferd",
1151 | "xn--c1avg": "whois.nic.xn--c1avg",
1152 | "xn--c2br7g": "whois.nic.xn--c2br7g",
1153 | "xn--cckwcxetd": "whois.nic.xn--cckwcxetd",
1154 | "xn--cg4bki": "whois.kr",
1155 | "xn--clchc0ea0b2g2a9gcd": "whois.sgnic.sg",
1156 | "xn--czrs0t": "whois.nic.xn--czrs0t",
1157 | "xn--czru2d": "whois.gtld.knet.cn",
1158 | "xn--d1acj3b": "whois.nic.xn--d1acj3b",
1159 | "xn--d1alf": "whois.marnet.mk",
1160 | "xn--e1a4c": "whois.eu",
1161 | "xn--efvy88h": "whois.nic.xn--efvy88h",
1162 | "xn--fhbei": "whois.nic.xn--fhbei",
1163 | "xn--fiq228c5hs": "whois.teleinfo.cn",
1164 | "xn--fiq64b": "whois.gtld.knet.cn",
1165 | "xn--fiqs8s": "cwhois.cnnic.cn",
1166 | "xn--fiqz9s": "cwhois.cnnic.cn",
1167 | "xn--fjq720a": "whois.nic.xn--fjq720a",
1168 | "xn--flw351e": "whois.nic.google",
1169 | "xn--fpcrj9c3d": "whois.registry.in",
1170 | "xn--fzc2c9e2c": "whois.nic.lk",
1171 | "xn--fzys8d69uvgm": "whois.nic.xn--fzys8d69uvgm",
1172 | "xn--gecrj9c": "whois.registry.in",
1173 | "xn--h2breg3eve": "whois.registry.in",
1174 | "xn--h2brj9c": "whois.registry.in",
1175 | "xn--h2brj9c8c": "whois.registry.in",
1176 | "xn--hxt814e": "whois.gtld.knet.cn",
1177 | "xn--i1b6b1a6a2e": "whois.nic.xn--i1b6b1a6a2e",
1178 | "xn--io0a7i": "whois.ngtld.cn",
1179 | "xn--j1aef": "whois.nic.xn--j1aef",
1180 | "xn--j1amh": "whois.dotukr.com",
1181 | "xn--j6w193g": "whois.hkirc.hk",
1182 | "xn--jlq480n2rg": "whois.nic.xn--jlq480n2rg",
1183 | "xn--kcrx77d1x4a": "whois.nic.xn--kcrx77d1x4a",
1184 | "xn--kprw13d": "whois.twnic.net.tw",
1185 | "xn--kpry57d": "whois.twnic.net.tw",
1186 | "xn--kput3i": "whois.nic.xn--kput3i",
1187 | "xn--lgbbat1ad8j": "whois.nic.dz",
1188 | "xn--mgb9awbf": "whois.registry.om",
1189 | "xn--mgba3a4f16a": "whois.nic.ir",
1190 | "xn--mgba7c0bbn0a": "whois.nic.xn--mgba7c0bbn0a",
1191 | "xn--mgbaakc7dvf": "whois.centralnic.com",
1192 | "xn--mgbaam7a8h": "whois.aeda.net.ae",
1193 | "xn--mgbab2bd": "whois.nic.xn--mgbab2bd",
1194 | "xn--mgbah1a3hjkrd": "whois.nic.mr",
1195 | "xn--mgbbh1a": "whois.registry.in",
1196 | "xn--mgbbh1a71e": "whois.registry.in",
1197 | "xn--mgbca7dzdo": "whois.nic.xn--mgbca7dzdo",
1198 | "xn--mgberp4a5d4ar": "whois.nic.net.sa",
1199 | "xn--mgbgu82a": "whois.registry.in",
1200 | "xn--mgbi4ecexp": "whois.nic.xn--mgbi4ecexp",
1201 | "xn--mgbt3dhd": "whois.nic.xn--mgbt3dhd",
1202 | "xn--mgbtx2b": "whois.cmc.iq",
1203 | "xn--mgbx4cd0ab": "whois.mynic.my",
1204 | "xn--mix891f": "whois.monic.mo",
1205 | "xn--mk1bu44c": "whois.nic.xn--mk1bu44c",
1206 | "xn--mxtq1m": "whois.nic.xn--mxtq1m",
1207 | "xn--ngbc5azd": "whois.nic.xn--ngbc5azd",
1208 | "xn--ngbe9e0a": "whois.nic.xn--ngbe9e0a",
1209 | "xn--ngbrx": "whois.nic.xn--ngbrx",
1210 | "xn--node": "whois.itdc.ge",
1211 | "xn--nqv7f": "whois.nic.xn--nqv7f",
1212 | "xn--nqv7fs00ema": "whois.nic.xn--nqv7fs00ema",
1213 | "xn--o3cw4h": "whois.thnic.co.th",
1214 | "xn--ogbpf8fl": "whois.tld.sy",
1215 | "xn--p1acf": "whois.nic.xn--p1acf",
1216 | "xn--p1ai": "whois.tcinet.ru",
1217 | "xn--pgbs0dh": "whois.ati.tn",
1218 | "xn--pssy2u": "whois.nic.xn--pssy2u",
1219 | "xn--q7ce6a": "whois.nic.la",
1220 | "xn--q9jyb4c": "whois.nic.google",
1221 | "xn--qcka1pmc": "whois.nic.google",
1222 | "xn--qxa6a": "whois.eu",
1223 | "xn--rvc1e0am3e": "whois.registry.in",
1224 | "xn--s9brj9c": "whois.registry.in",
1225 | "xn--ses554g": "whois.nic.xn--ses554g",
1226 | "xn--t60b56a": "whois.nic.xn--t60b56a",
1227 | "xn--tckwe": "whois.nic.xn--tckwe",
1228 | "xn--tiq49xqyj": "whois.nic.xn--tiq49xqyj",
1229 | "xn--unup4y": "whois.nic.xn--unup4y",
1230 | "xn--vermgensberater-ctb": "whois.nic.xn--vermgensberater-ctb",
1231 | "xn--vermgensberatung-pwb": "whois.nic.xn--vermgensberatung-pwb",
1232 | "xn--vhquv": "whois.nic.xn--vhquv",
1233 | "xn--vuq861b": "whois.teleinfo.cn",
1234 | "xn--w4r85el8fhu5dnra": "whois.nic.xn--w4r85el8fhu5dnra",
1235 | "xn--w4rs40l": "whois.nic.xn--w4rs40l",
1236 | "xn--wgbh1c": "whois.dotmasr.eg",
1237 | "xn--wgbl6a": "whois.registry.qa",
1238 | "xn--xhq521b": "whois.ngtld.cn",
1239 | "xn--xkc2al3hye2a": "whois.nic.lk",
1240 | "xn--xkc2dl3a5ee0h": "whois.registry.in",
1241 | "xn--y9a3aq": "whois.amnic.net",
1242 | "xn--yfro4i67o": "whois.sgnic.sg",
1243 | "xn--ygbi2ammx": "whois.pnina.ps",
1244 | "xn--zfr164b": "whois.conac.cn",
1245 | "xxx": "whois.nic.xxx",
1246 | "xyz": "whois.nic.xyz",
1247 | "yachts": "whois.nic.yachts",
1248 | "yamaxun": "whois.nic.yamaxun",
1249 | "ye": "whois.y.net.ye",
1250 | "yodobashi": "whois.nic.gmo",
1251 | "yoga": "whois.nic.yoga",
1252 | "yokohama": "whois.nic.yokohama",
1253 | "you": "whois.nic.you",
1254 | "youtube": "whois.nic.google",
1255 | "yt": "whois.nic.yt",
1256 | "yun": "whois.teleinfo.cn",
1257 | "zappos": "whois.nic.zappos",
1258 | "zara": "whois.nic.zara",
1259 | "zip": "whois.nic.google",
1260 | "zm": "whois.zicta.zm",
1261 | "zone": "whois.nic.zone",
1262 | "zuerich": "whois.nic.zuerich",
1263 | "ευ": "whois.eu",
1264 | "бг": "whois.imena.bg",
1265 | "бел": "whois.cctld.by",
1266 | "дети": "whois.nic.xn--d1acj3b",
1267 | "ею": "whois.eu",
1268 | "католик": "whois.nic.xn--80aqecdr1a",
1269 | "ком": "whois.nic.xn--j1aef",
1270 | "қаз": "whois.nic.kz",
1271 | "мкд": "whois.marnet.mk",
1272 | "москва": "whois.nic.xn--80adxhks",
1273 | "онлайн": "whois.nic.xn--80asehdb",
1274 | "орг": "whois.nic.xn--c1avg",
1275 | "рус": "whois.nic.xn--p1acf",
1276 | "рф": "whois.tcinet.ru",
1277 | "сайт": "whois.nic.xn--80aswg",
1278 | "срб": "whois.rnids.rs",
1279 | "укр": "whois.dotukr.com",
1280 | "გე": "whois.itdc.ge",
1281 | "հայ": "whois.amnic.net",
1282 | "ישראל": "whois.isoc.org.il",
1283 | "קום": "whois.nic.xn--9dbq2a",
1284 | "ابوظبي": "whois.nic.xn--mgbca7dzdo",
1285 | "اتصالات": "whois.centralnic.com",
1286 | "الجزائر": "whois.nic.dz",
1287 | "السعودية": "whois.nic.net.sa",
1288 | "العليان": "whois.nic.xn--mgba7c0bbn0a",
1289 | "امارات": "whois.aeda.net.ae",
1290 | "ایران": "whois.nic.ir",
1291 | "بارت": "whois.registry.in",
1292 | "بازار": "whois.nic.xn--mgbab2bd",
1293 | "بھارت": "whois.registry.in",
1294 | "بيتك": "whois.nic.xn--ngbe9e0a",
1295 | "ڀارت": "whois.registry.in",
1296 | "تونس": "whois.ati.tn",
1297 | "سورية": "whois.tld.sy",
1298 | "شبكة": "whois.nic.xn--ngbc5azd",
1299 | "عراق": "whois.cmc.iq",
1300 | "عرب": "whois.nic.xn--ngbrx",
1301 | "عمان": "whois.registry.om",
1302 | "فلسطين": "whois.pnina.ps",
1303 | "قطر": "whois.registry.qa",
1304 | "كاثوليك": "whois.nic.xn--mgbi4ecexp",
1305 | "كوم": "whois.nic.xn--fhbei",
1306 | "مصر": "whois.dotmasr.eg",
1307 | "مليسيا": "whois.mynic.my",
1308 | "موريتانيا": "whois.nic.mr",
1309 | "موقع": "whois.nic.xn--4gbrim",
1310 | "همراه": "whois.nic.xn--mgbt3dhd",
1311 | "कॉम": "whois.nic.xn--11b4c3d",
1312 | "नेट": "whois.nic.xn--c2br7g",
1313 | "भारत": "whois.registry.in",
1314 | "भारतम्": "whois.registry.in",
1315 | "भारोत": "whois.registry.in",
1316 | "संगठन": "whois.nic.xn--i1b6b1a6a2e",
1317 | "ভারত": "whois.registry.in",
1318 | "ভাৰত": "whois.registry.in",
1319 | "ਭਾਰਤ": "whois.registry.in",
1320 | "ભારત": "whois.registry.in",
1321 | "ଭାରତ": "whois.registry.in",
1322 | "இந்தியா": "whois.registry.in",
1323 | "இலங்கை": "whois.nic.lk",
1324 | "சிங்கப்பூர்": "whois.sgnic.sg",
1325 | "భారత్": "whois.registry.in",
1326 | "ಭಾರತ": "whois.registry.in",
1327 | "ഭാരതം": "whois.registry.in",
1328 | "ලංකා": "whois.nic.lk",
1329 | "คอม": "whois.nic.xn--42c2d9a",
1330 | "ไทย": "whois.thnic.co.th",
1331 | "ລາວ": "whois.nic.la",
1332 | "닷넷": "whois.nic.xn--t60b56a",
1333 | "닷컴": "whois.nic.xn--mk1bu44c",
1334 | "삼성": "whois.kr",
1335 | "한국": "whois.kr",
1336 | "アマゾン": "whois.nic.xn--cckwcxetd",
1337 | "グーグル": "whois.nic.google",
1338 | "コム": "whois.nic.xn--tckwe",
1339 | "みんな": "whois.nic.google",
1340 | "中信": "whois.gtld.knet.cn",
1341 | "中国": "cwhois.cnnic.cn",
1342 | "中國": "cwhois.cnnic.cn",
1343 | "中文网": "whois.teleinfo.cn",
1344 | "亚马逊": "whois.nic.xn--jlq480n2rg",
1345 | "企业": "whois.nic.xn--vhquv",
1346 | "佛山": "whois.ngtld.cn",
1347 | "信息": "whois.teleinfo.cn",
1348 | "八卦": "whois.gtld.knet.cn",
1349 | "公司": "whois.ngtld.cn",
1350 | "公益": "whois.conac.cn",
1351 | "台湾": "whois.twnic.net.tw",
1352 | "台灣": "whois.twnic.net.tw",
1353 | "商城": "whois.gtld.knet.cn",
1354 | "商店": "whois.nic.xn--czrs0t",
1355 | "嘉里": "whois.nic.xn--w4rs40l",
1356 | "嘉里大酒店": "whois.nic.xn--w4r85el8fhu5dnra",
1357 | "在线": "whois.teleinfo.cn",
1358 | "大拿": "whois.nic.xn--pssy2u",
1359 | "天主教": "whois.nic.xn--tiq49xqyj",
1360 | "娱乐": "whois.nic.xn--fjq720a",
1361 | "广东": "whois.ngtld.cn",
1362 | "微博": "whois.nic.xn--9krt00a",
1363 | "慈善": "whois.gtld.knet.cn",
1364 | "我爱你": "whois.gtld.knet.cn",
1365 | "手机": "whois.nic.xn--kput3i",
1366 | "政务": "whois.conac.cn",
1367 | "政府": "whois.nic.xn--mxtq1m",
1368 | "新加坡": "whois.sgnic.sg",
1369 | "新闻": "whois.nic.xn--efvy88h",
1370 | "时尚": "whois.gtld.knet.cn",
1371 | "机构": "whois.nic.xn--nqv7f",
1372 | "淡马锡": "whois.nic.xn--b4w605ferd",
1373 | "游戏": "whois.nic.xn--unup4y",
1374 | "澳門": "whois.monic.mo",
1375 | "点看": "whois.nic.xn--3pxu8k",
1376 | "移动": "whois.nic.xn--6frz82g",
1377 | "组织机构": "whois.nic.xn--nqv7fs00ema",
1378 | "网址": "whois.nic.xn--ses554g",
1379 | "网店": "whois.gtld.knet.cn",
1380 | "网站": "whois.nic.xn--5tzm5g",
1381 | "网络": "whois.ngtld.cn",
1382 | "联通": "whois.nic.xn--8y0a063a",
1383 | "谷歌": "whois.nic.google",
1384 | "集团": "whois.gtld.knet.cn",
1385 | "電訊盈科": "whois.nic.xn--fzys8d69uvgm",
1386 | "飞利浦": "whois.nic.xn--kcrx77d1x4a",
1387 | "香格里拉": "whois.nic.xn--5su34j936bgsg",
1388 | "香港": "whois.hkirc.hk",
1389 | "co.uk": "whois.nic.uk",
1390 | "biz.ua": "whois.ua",
1391 | "br.com": "whois.verisign-grs.com",
1392 | "co.am": "whois.amnic.net",
1393 | "co.in": "whois.registry.in",
1394 | "co.ua": "whois.ua",
1395 | "com.am": "whois.amnic.net",
1396 | "com.kz": "whois.nic.kz",
1397 | "com.ua": "whois.ua",
1398 | "kiev.ua": "whois.ua",
1399 | "net.am": "whois.amnic.net",
1400 | "net.in": "whois.registry.in",
1401 | "org.am": "whois.amnic.net",
1402 | "org.in": "whois.registry.in",
1403 | "org.kz": "whois.nic.kz",
1404 | "pp.ua": "whois.ua",
1405 | "ru.com": "whois.verisign-grs.com"
1406 | }
--------------------------------------------------------------------------------
/wordlists/dirb_big.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thewhiteh4t/FinalRecon/ac4681c72613ef88a170d8f0ac9dfaa4714ade7b/wordlists/dirb_big.txt
--------------------------------------------------------------------------------
/wordlists/dirb_small.txt:
--------------------------------------------------------------------------------
1 | 0
2 | 00
3 | 01
4 | 02
5 | 03
6 | 1
7 | 10
8 | 100
9 | 1000
10 | 123
11 | 2
12 | 20
13 | 200
14 | 2000
15 | 2001
16 | 2002
17 | 2003
18 | 2004
19 | 2005
20 | 3
21 | @
22 | Admin
23 | Administration
24 | CVS
25 | CYBERDOCS
26 | CYBERDOCS25
27 | CYBERDOCS31
28 | INSTALL_admin
29 | Log
30 | Logs
31 | Pages
32 | Servlet
33 | Servlets
34 | SiteServer
35 | Sources
36 | Statistics
37 | Stats
38 | W3SVC
39 | W3SVC1
40 | W3SVC2
41 | W3SVC3
42 | WEB-INF
43 | _admin
44 | _pages
45 | a
46 | aa
47 | aaa
48 | abc
49 | about
50 | academic
51 | access
52 | accessgranted
53 | account
54 | accounting
55 | action
56 | actions
57 | active
58 | adm
59 | admin
60 | admin_
61 | admin_login
62 | admin_logon
63 | administrat
64 | administration
65 | administrator
66 | adminlogin
67 | adminlogon
68 | adminsql
69 | admon
70 | adsl
71 | agent
72 | agents
73 | alias
74 | aliases
75 | all
76 | alpha
77 | analog
78 | analyse
79 | announcements
80 | answer
81 | any
82 | apache
83 | api
84 | app
85 | applet
86 | applets
87 | appliance
88 | application
89 | applications
90 | apps
91 | archive
92 | archives
93 | arrow
94 | asp
95 | aspadmin
96 | assets
97 | attach
98 | attachments
99 | audit
100 | auth
101 | auto
102 | automatic
103 | b
104 | back
105 | back-up
106 | backdoor
107 | backend
108 | backoffice
109 | backup
110 | backups
111 | bak
112 | bak-up
113 | bakup
114 | bank
115 | banks
116 | banner
117 | banners
118 | base
119 | basic
120 | bass
121 | batch
122 | bd
123 | bdata
124 | bea
125 | bean
126 | beans
127 | beta
128 | bill
129 | billing
130 | bin
131 | binaries
132 | biz
133 | blog
134 | blow
135 | board
136 | boards
137 | body
138 | boot
139 | bot
140 | bots
141 | box
142 | boxes
143 | broken
144 | bsd
145 | bug
146 | bugs
147 | build
148 | builder
149 | bulk
150 | buttons
151 | c
152 | cache
153 | cachemgr
154 | cad
155 | can
156 | captcha
157 | car
158 | card
159 | cardinal
160 | cards
161 | carpet
162 | cart
163 | cas
164 | cat
165 | catalog
166 | catalogs
167 | catch
168 | cc
169 | ccs
170 | cd
171 | cdrom
172 | cert
173 | certenroll
174 | certificate
175 | certificates
176 | certs
177 | cfdocs
178 | cfg
179 | cgi
180 | cgi-bin
181 | cgi-bin/
182 | cgi-win
183 | cgibin
184 | chan
185 | change
186 | changepw
187 | channel
188 | chart
189 | chat
190 | class
191 | classes
192 | classic
193 | classified
194 | classifieds
195 | client
196 | clients
197 | cluster
198 | cm
199 | cmd
200 | code
201 | coffee
202 | coke
203 | command
204 | commerce
205 | commercial
206 | common
207 | component
208 | compose
209 | composer
210 | compressed
211 | comunicator
212 | con
213 | config
214 | configs
215 | configuration
216 | configure
217 | connect
218 | connections
219 | console
220 | constant
221 | constants
222 | contact
223 | contacts
224 | content
225 | contents
226 | control
227 | controller
228 | controlpanel
229 | controls
230 | corba
231 | core
232 | corporate
233 | count
234 | counter
235 | cpanel
236 | create
237 | creation
238 | credit
239 | creditcards
240 | cron
241 | crs
242 | css
243 | customer
244 | customers
245 | cv
246 | cvs
247 | d
248 | daemon
249 | dat
250 | data
251 | database
252 | databases
253 | dav
254 | db
255 | dba
256 | dbase
257 | dbm
258 | dbms
259 | debug
260 | default
261 | delete
262 | deletion
263 | demo
264 | demos
265 | deny
266 | deploy
267 | deployment
268 | design
269 | details
270 | dev
271 | dev60cgi
272 | devel
273 | develop
274 | developement
275 | developers
276 | development
277 | device
278 | devices
279 | devs
280 | diag
281 | dial
282 | dig
283 | dir
284 | directory
285 | discovery
286 | disk
287 | dispatch
288 | dispatcher
289 | dms
290 | dns
291 | doc
292 | docs
293 | docs41
294 | docs51
295 | document
296 | documents
297 | down
298 | download
299 | downloads
300 | draft
301 | dragon
302 | dratfs
303 | driver
304 | dump
305 | dumpenv
306 | e
307 | easy
308 | ebriefs
309 | echannel
310 | ecommerce
311 | edit
312 | editor
313 | element
314 | elements
315 | email
316 | employees
317 | en
318 | eng
319 | engine
320 | english
321 | enterprise
322 | env
323 | environ
324 | environment
325 | error
326 | errors
327 | es
328 | esales
329 | esp
330 | established
331 | esupport
332 | etc
333 | event
334 | events
335 | example
336 | examples
337 | exchange
338 | exe
339 | exec
340 | executable
341 | executables
342 | explorer
343 | export
344 | external
345 | extra
346 | Extranet
347 | extranet
348 | fail
349 | failed
350 | fcgi-bin
351 | feedback
352 | field
353 | file
354 | files
355 | filter
356 | firewall
357 | first
358 | flash
359 | folder
360 | foo
361 | forget
362 | forgot
363 | forgotten
364 | form
365 | format
366 | formhandler
367 | formsend
368 | formupdate
369 | fortune
370 | forum
371 | forums
372 | frame
373 | framework
374 | ftp
375 | fun
376 | function
377 | functions
378 | games
379 | gate
380 | generic
381 | gest
382 | get
383 | global
384 | globalnav
385 | globals
386 | gone
387 | gp
388 | gpapp
389 | granted
390 | graphics
391 | group
392 | groups
393 | guest
394 | guestbook
395 | guests
396 | hack
397 | hacker
398 | handler
399 | hanlder
400 | happening
401 | head
402 | header
403 | headers
404 | hello
405 | helloworld
406 | help
407 | hidden
408 | hide
409 | history
410 | hits
411 | home
412 | homepage
413 | homes
414 | homework
415 | host
416 | hosts
417 | htdocs
418 | htm
419 | html
420 | htmls
421 | ibm
422 | icons
423 | idbc
424 | iis
425 | images
426 | img
427 | import
428 | inbox
429 | inc
430 | include
431 | includes
432 | incoming
433 | incs
434 | index
435 | index2
436 | index_adm
437 | index_admin
438 | indexes
439 | info
440 | information
441 | ingres
442 | ingress
443 | ini
444 | init
445 | input
446 | install
447 | installation
448 | interactive
449 | internal
450 | internet
451 | intranet
452 | intro
453 | inventory
454 | invitation
455 | invite
456 | ipp
457 | ips
458 | j
459 | java
460 | java-sys
461 | javascript
462 | jdbc
463 | job
464 | join
465 | jrun
466 | js
467 | jsp
468 | jsps
469 | jsr
470 | keep
471 | kept
472 | kernel
473 | key
474 | lab
475 | labs
476 | launch
477 | launchpage
478 | ldap
479 | left
480 | level
481 | lib
482 | libraries
483 | library
484 | libs
485 | link
486 | links
487 | linux
488 | list
489 | load
490 | loader
491 | lock
492 | lockout
493 | log
494 | logfile
495 | logfiles
496 | logger
497 | logging
498 | login
499 | logo
500 | logon
501 | logout
502 | logs
503 | lost%2Bfound
504 | ls
505 | magic
506 | mail
507 | mailbox
508 | maillist
509 | main
510 | maint
511 | makefile
512 | man
513 | manage
514 | management
515 | manager
516 | manual
517 | map
518 | market
519 | marketing
520 | master
521 | mbo
522 | mdb
523 | me
524 | member
525 | members
526 | memory
527 | menu
528 | message
529 | messages
530 | messaging
531 | meta
532 | metabase
533 | mgr
534 | mine
535 | minimum
536 | mirror
537 | mirrors
538 | misc
539 | mkstats
540 | model
541 | modem
542 | module
543 | modules
544 | monitor
545 | mount
546 | mp3
547 | mp3s
548 | mqseries
549 | mrtg
550 | ms
551 | ms-sql
552 | msql
553 | mssql
554 | music
555 | my
556 | my-sql
557 | mysql
558 | names
559 | navigation
560 | ne
561 | net
562 | netscape
563 | netstat
564 | network
565 | new
566 | news
567 | next
568 | nl
569 | nobody
570 | notes
571 | novell
572 | nul
573 | null
574 | number
575 | object
576 | objects
577 | odbc
578 | of
579 | off
580 | office
581 | ogl
582 | old
583 | oldie
584 | on
585 | online
586 | open
587 | openapp
588 | openfile
589 | operator
590 | oracle
591 | oradata
592 | order
593 | orders
594 | outgoing
595 | output
596 | pad
597 | page
598 | pages
599 | pam
600 | panel
601 | paper
602 | papers
603 | pass
604 | passes
605 | passw
606 | passwd
607 | passwor
608 | password
609 | passwords
610 | path
611 | pdf
612 | perl
613 | perl5
614 | personal
615 | personals
616 | pgsql
617 | phone
618 | php
619 | phpMyAdmin
620 | phpmyadmin
621 | pics
622 | ping
623 | pix
624 | pl
625 | pls
626 | plx
627 | pol
628 | policy
629 | poll
630 | pop
631 | portal
632 | portlet
633 | portlets
634 | post
635 | postgres
636 | power
637 | press
638 | preview
639 | print
640 | printenv
641 | priv
642 | private
643 | privs
644 | process
645 | processform
646 | prod
647 | production
648 | products
649 | professor
650 | profile
651 | program
652 | project
653 | proof
654 | properties
655 | protect
656 | protected
657 | proxy
658 | ps
659 | pub
660 | public
661 | publish
662 | publisher
663 | purchase
664 | purchases
665 | put
666 | pw
667 | pwd
668 | python
669 | query
670 | queue
671 | quote
672 | ramon
673 | random
674 | rank
675 | rcs
676 | readme
677 | redir
678 | redirect
679 | reference
680 | references
681 | reg
682 | reginternal
683 | regional
684 | register
685 | registered
686 | release
687 | remind
688 | reminder
689 | remote
690 | removed
691 | report
692 | reports
693 | requisite
694 | research
695 | reseller
696 | resource
697 | resources
698 | responder
699 | restricted
700 | retail
701 | right
702 | robot
703 | robotics
704 | root
705 | route
706 | router
707 | rpc
708 | rss
709 | rules
710 | run
711 | sales
712 | sample
713 | samples
714 | save
715 | saved
716 | schema
717 | scr
718 | scratc
719 | script
720 | scripts
721 | sdk
722 | search
723 | secret
724 | secrets
725 | section
726 | sections
727 | secure
728 | secured
729 | security
730 | select
731 | sell
732 | send
733 | sendmail
734 | sensepost
735 | sensor
736 | sent
737 | server
738 | server_stats
739 | servers
740 | service
741 | services
742 | servlet
743 | servlets
744 | session
745 | sessions
746 | set
747 | setting
748 | settings
749 | setup
750 | share
751 | shared
752 | shell
753 | shit
754 | shop
755 | shopper
756 | show
757 | showcode
758 | shtml
759 | sign
760 | signature
761 | signin
762 | simple
763 | single
764 | site
765 | sitemap
766 | sites
767 | small
768 | snoop
769 | soap
770 | soapdocs
771 | software
772 | solaris
773 | solutions
774 | somebody
775 | source
776 | sources
777 | spain
778 | spanish
779 | sql
780 | sqladmin
781 | src
782 | srchad
783 | srv
784 | ssi
785 | ssl
786 | staff
787 | start
788 | startpage
789 | stat
790 | statistic
791 | statistics
792 | stats
793 | status
794 | stop
795 | store
796 | story
797 | string
798 | student
799 | stuff
800 | style
801 | stylesheet
802 | stylesheets
803 | submit
804 | submitter
805 | sun
806 | super
807 | support
808 | supported
809 | survey
810 | svc
811 | svn
812 | svr
813 | sw
814 | sys
815 | sysadmin
816 | system
817 | table
818 | tag
819 | tape
820 | tar
821 | target
822 | tech
823 | temp
824 | template
825 | templates
826 | temporal
827 | temps
828 | terminal
829 | test
830 | testing
831 | tests
832 | text
833 | texts
834 | ticket
835 | tmp
836 | today
837 | tool
838 | toolbar
839 | tools
840 | top
841 | topics
842 | tour
843 | tpv
844 | trace
845 | traffic
846 | transactions
847 | transfer
848 | transport
849 | trap
850 | trash
851 | tree
852 | trees
853 | tutorial
854 | uddi
855 | uninstall
856 | unix
857 | up
858 | update
859 | updates
860 | upload
861 | uploader
862 | uploads
863 | usage
864 | user
865 | users
866 | usr
867 | ustats
868 | util
869 | utilities
870 | utility
871 | utils
872 | validation
873 | validatior
874 | vap
875 | var
876 | vb
877 | vbs
878 | vbscript
879 | vbscripts
880 | vfs
881 | view
882 | viewer
883 | views
884 | virtual
885 | visitor
886 | vpn
887 | w
888 | w3
889 | w3c
890 | warez
891 | wdav
892 | web
893 | webaccess
894 | webadmin
895 | webapp
896 | webboard
897 | webcart
898 | webdata
899 | webdav
900 | webdist
901 | webhits
902 | weblog
903 | weblogic
904 | weblogs
905 | webmail
906 | webmaster
907 | websearch
908 | website
909 | webstat
910 | webstats
911 | webvpn
912 | welcome
913 | wellcome
914 | whatever
915 | whatnot
916 | whois
917 | will
918 | win
919 | windows
920 | word
921 | wordpress
922 | work
923 | workplace
924 | workshop
925 | wp
926 | wstats
927 | wusage
928 | www
929 | wwwboard
930 | wwwjoin
931 | wwwlog
932 | wwwstats
933 | xcache
934 | xfer
935 | xml
936 | xmlrpc
937 | xsl
938 | xsql
939 | xyz
940 | zap
941 | zip
942 | zipfiles
943 | zips
944 | ~adm
945 | ~admin
946 | ~administrator
947 | ~bin
948 | ~ftp
949 | ~guest
950 | ~mail
951 | ~operator
952 | ~root
953 | ~sys
954 | ~sysadm
955 | ~sysadmin
956 | ~test
957 | ~user
958 | ~webmaster
959 | ~www
960 |
--------------------------------------------------------------------------------