├── .github
└── FUNDING.yml
├── CHANGELOG.md
├── Dockerfile
├── LICENSE
├── README.md
├── __init__.py
├── config.py
├── dichawk.txt
├── hawkscan.py
├── modules
├── __init__.py
├── after_fuzzing
│ ├── __init__.py
│ ├── scan_errors.py
│ └── send_notify.py
├── auto_update.py
├── before_fuzzing
│ ├── __init__.py
│ ├── before_run.py
│ ├── check_cms.py
│ ├── check_socketio.py
│ ├── check_subdomain.py
│ └── google_dorks.py
├── during_fuzzing
│ ├── bypass_forbidden.py
│ ├── check_backup.py
│ └── parsing_html.py
├── manage_dir.py
├── output.py
├── proxy
│ ├── check_proxy.py
│ └── test_proxies.py
├── resume.py
├── terminal_size.py
└── waf
│ ├── bypass_waf.py
│ └── detect_waf.py
├── report
├── __init__.py
├── creat_report.py
└── html
│ ├── fonts
│ ├── charte.css
│ └── page.css
│ ├── img
│ ├── share.png
│ └── share.svg
│ └── scripts
│ └── scripts.js
├── requirements.txt
├── run_modules.py
├── setup.py
├── sites
└── your scan website.txt
├── static
├── banner.py
└── logo_hawkscan.jpeg
├── sublist
└── _scan_of_sublist3r.txt
└── tools
└── Sublist3r
├── .gitignore
├── LICENSE
├── MANIFEST.in
├── README.md
├── requirements.txt
├── setup.py
├── subbrute
├── __init.py__
├── names.txt
├── resolvers.txt
└── subbrute.py
└── sublist3r.py
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: c0dejump
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
14 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | Changelog:
2 | ----------
3 |
4 | - 2.6
5 | ---------
6 | Updated: dichawk endpoints
7 | New: lightmode option
8 | ---------
9 |
10 | - 2.5
11 | ---------
12 | Updated: half of the code has been refactored/reorganized
13 | Updated: dichawk endpoints
14 | Updated: JS sensitives endpoints
15 | Updated: Exclude option (referer to Readme)
16 | Fixed: ANy error with exclude option
17 | New: Download all js link with specific keyword found
18 | ---------
19 |
20 | - 2.3.1
21 | ---------
22 | Updated: dichawk endpoints
23 | Updated: JS sensitives endpoints
24 | ---------
25 |
26 | - 2.3
27 | ---------
28 | Started: Add proxy function (--proxy proxy.lst) [In progress]
29 | New: Adding path disclosure recon
30 | New: Detecting potential hidden directory
31 | Updated: dichawk endpoints
32 | ---------
33 |
34 | - 2.2
35 | ---------
36 | New: Wiki created !
37 | Fixed: any bugs and redesign titles pre-scan
38 | Changed: waybacktool module deleted by a personal module (less slow)
39 | ---------
40 |
41 | - 2.1
42 | ---------
43 | New: Option -nfs (not first step) to pass the first recon steps
44 | Fixed: Any bug with the download file and bypass forbidden when differents options
45 | New: Google cse search (buckets...)
46 | New: Add LICENSE & PyPI version and stats
47 | ---------
48 |
49 | - 2.0
50 | ---------
51 | Redefining priorities/tasks
52 | New: Let's debug certificate subdomains results
53 | New: Display the current bypass number during scan ("CB:")
54 | New: Easter egg for xmas :)
55 | Updated: Fix any bugs
56 |
57 | ---------
58 |
59 | - 1.9.9
60 | ---------
61 | New: Cloudflare protection detection in live
62 | Updated: Bugs correction with backup extension scan
63 | ---------
64 |
65 | - 1.9.8
66 | ---------
67 | Updated: New ways to bypass forbidden (Thanks @yunem_se !)
68 | Updated: New socketio endpoints
69 | Updated: New words in dichawk.txt
70 | ---------
71 |
72 | - 1.9.7
73 | ---------
74 | Updated: New logo by @__PH4NTOM__!
75 | ---------
76 |
77 | - 1.9.6
78 | ---------
79 | Fixed: Any bugs
80 | Updated: Little style modifications
81 | ---------
82 |
83 | - 1.9.5
84 | ---------
85 | Fixed: A pass on the source code, more speedy
86 | ---------
87 |
88 | - 1.9.4
89 | ---------
90 | Added: Function "vim_backup" to test backup vim during scan when any -b option
91 | ---------
92 |
93 | - 1.9.3
94 | ---------
95 | Updated: New banner
96 | Fixed: Multiple website with file which contain url
97 |
98 | - 1.9.2:
99 | ---------
100 | Updated: code review & optimisation
101 | Updated: Multiple new paths/words in dichawk.txt
102 | ---------
103 |
104 | - 1.9.1
105 | ---------
106 | Added: Option "-f" for scanning multiple website to one time. Ex: ```-f urls_file.txt```
107 | Updated: Clean code & directory
108 | ---------
109 |
110 | - 1.9
111 | ---------
112 | Fixed: Fixed percentage & line count bug during scan
113 | Added: Display errors number in live during scan
114 | ---------
115 |
116 | - 1.8.8
117 | ---------
118 | Added: Output file format function. Available formats: json, csv, txt. Ex: ```-of json```, ```-o /tmp/Target -of csv```
119 | ---------
120 |
121 | - 1.8.7
122 | ---------
123 | Fixed: Reduction of false positives number
124 | Added: Header of Hawkscan when your typing "head hawkscan.py"
125 | ---------
126 |
127 | - 1.8.6
128 | ---------
129 | Fixed: Any bugs: Thread modification, header parameters, bypass forbidden & any others...
130 | Added: google module in requirements/setup
131 | Updated: Deleted degoogle modules/script, google dork works now with the "googlesearch" module
132 | Updated: A little style modification
133 | Updated: Default thread now 30
134 | ---------
135 |
136 | - 1.8.5:
137 | ---------
138 | Added: A new restriction bypass feature, that test "post,put,patch & option" requests methods on a restriction page
139 | Fixed: The little style display problems
140 | ---------
141 |
142 | - 1.8.4
143 | ---------
144 | Fixed: Better display of live lines
145 | Added: A new file to manage the modules to launches
146 | ---------
147 |
148 | - 1.8.3
149 | ---------
150 | Fixed: Bug in socketio module
151 | Fixed: Add size bytes during th error scan
152 | Added: Words in wordlist
153 | ---------
154 |
155 | - 1.8.2
156 | ---------
157 | Updated: New logo made by Cyber_Ph4ntoM
158 | Updated: Code review
159 | Updated: Add multiple words in dichawk.txt
160 | ---------
161 |
162 | - 1.8.1
163 | ---------
164 | Updated: Style refont
165 | Fixed: Mutliple bugs
166 | ---------
167 |
168 | - 1.8
169 | ---------
170 | Update: you can choose your backup file when you scan, EX:
171 | -b: default all backup file
172 | -b .bck, .old, .bak: just these backups
173 |
174 | - 1.7.9
175 | ---------
176 | Updated: dico.txt → dichawk.txt (dico.txt it was to simple for a personal dictionary :)
177 | Fixed: Bug on parsing JS
178 | ---------
179 |
180 | - 1.7.8
181 | ---------
182 | Fixed: Bug on the exclude function
183 | Fixed: Bug on the bypass forbidden function
184 | Added: News header value in bypass forbidden function
185 | ---------
186 |
187 | - 1.7.7
188 | ---------
189 | Updated: Rrefont helping style
190 | Added: Notify when scan completed (Only work on Linux)
191 | ---------
192 |
193 | - 1.7.6
194 | ---------
195 | Added: New function added: check_socketio(), to check the websocket requests during the first step. That can potentially leak any informations (path, message, users...). Adding too in JS verification to check if any endpoint look like socketio.
196 | Fixed: Reducted false positive number
197 | ---------
198 |
199 | - 1.7.5
200 | ---------
201 | Deleted: WhoIs function (useless)
202 | Updated: Style refont
203 | ---------
204 |
205 | - 1.7.4
206 | ---------
207 | Updated: Dockerfile
208 | Added: A resume of commands (url, threads...) during the begin scan
209 | ---------
210 |
211 | - 1.7.3
212 | ---------
213 | Updated: setup.py
214 | ---------
215 |
216 | - 1.7.2
217 | ---------
218 | Updated: Add new content in google dork, dico and javascript recon
219 | Updated: Real setup.py :)
220 | ---------
221 |
222 | - 1.7.1
223 | ---------
224 | Fixed: Any bugs
225 | Modified: Raw output, modification for any integration
226 | ---------
227 |
228 | - 1.7
229 | ---------
230 | Added: Function "check_backup_domain" added, test before start "domain.zip/rar etc.."
231 | Added: New option (-ffs) to force the first step of scan during the first running (waf, vhosts, wayback etc...)
232 | ---------
233 |
234 | - 1.6.9
235 | ---------
236 | Added: multiple excludes (only number or bytes number), exemple: --exclude 403,500 // --exclude 1337b,500...
237 | ---------
238 |
239 | - 1.6.8
240 | ---------
241 | Fixed: Bugs
242 | Modified: function "check_ip" which check if the IP o the website was different by domain name => Now: check_vhost
243 | ---------
244 |
245 | - 1.6.7
246 | ---------
247 | Added: Option --auth for HTTP authentification. Exemple --auth admin:admin
248 | Fixed: Bugs
249 | ---------
250 |
251 | - 1.6.6
252 | ---------
253 | Faster
254 | Updated: Less FP with bypass forbidden function
255 | Updated: Less errors in princpal script
256 | Updated: Little modifications in report
257 | Fixed: Style and system bugs
258 | Added: Auto resize relative to window
259 | ---------
260 |
261 | - 1.6
262 | ---------
263 | Added: "degoogle" tools for google dork queries, more fast and less of google captcha
264 | Updated: Code optimization
265 | ---------
266 |
267 | - 1.5.9.1
268 | ---------
269 | Fixed: System bugs
270 | ---------
271 |
272 | - 1.5.9
273 | ---------
274 | Started: Code optimization
275 | Updated: Changed changelog.md + Readme.md
276 | ---------
277 |
278 | - 1.5.8
279 | ---------
280 | Updated: file and directory function management
281 | A directory of the website is automatically create like: "website_date/".
282 | And if the directory exist an other directory is created like: "website date_hour/"
283 | ---------
284 |
285 | - 1.5.7
286 | ---------
287 | Added: Function to try if the website IP is different of the website domain
288 | Updated: dico.txt
289 | ---------
290 |
291 | - 1.5.6
292 | ---------
293 | Fixed: system bugs
294 | Added: New exclude type, now you can exclude a number of byte with "b" at the end in your number, like: --exclude 550b
295 | ---------
296 |
297 | - 1.5.5
298 | ---------
299 | Added: Google dork requests at the first scan
300 | ---------
301 |
302 | - 1.5.4
303 | ---------
304 | Added: Option "--js" for scan and analyse JS
305 | Deleted: "dryscrape" librarie for the moment, many error with it, I'll remake it later
306 | ---------
307 |
308 | - 1.5.3
309 | ---------
310 | Added: Setup.py, you can just doing "python setup.py"
311 | ---------
312 |
313 | - 1.5.2
314 | ---------
315 | Added: Try differents bypass for 403 code error
316 | Updated: dico.txt
317 | ---------
318 |
319 | - 1.5.1
320 | ---------
321 | New banner
322 | Fix bugs
323 | ---------
324 |
325 | - 1.5
326 | ---------
327 | ~~Auto activate JS during scan if the webite is full JS (website 2.0)~~
328 | ---------
329 |
330 | - 1.4
331 | ---------
332 | Add: Dockerfile
333 | ---------
334 |
335 | - 1.3.3
336 | ---------
337 | Add: New function which try automatically if it's possible scanning with "localhost" host
338 | ---------
339 |
340 | - 1.3.2
341 | ---------
342 | Replace: "--cookie" by "-H" for different header values; ex: -H "Host:test" // -H "Authentification:cookie" (not space after ":" or "=")
343 | ---------
344 |
345 | - 1.3.1
346 | ---------
347 | Code review
348 | New logo
349 | Adding Changelog
350 | ---------
351 |
352 | - 1.2
353 | ---------
354 | Adding news words in dico.txt (old dico_extra.txt)
355 | Adding extensions in backup check test function, option -b (.json, .xml, .bkp...) => very long
356 | Test bypass of waf rate limited in real time (X-Originating-IP...)
357 | Exclude response http code (--exclude 403)
358 | Filter on response http code in report
359 | ---------
360 |
361 | - 1.0
362 | ---------
363 | Better management Threads
364 | Add news words in dico_extra.txt
365 | New style for the report
366 | Errors log management
367 | ---------
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.7-alpine
2 | RUN apk update
3 | RUN apk add libffi libffi-dev gcc g++ make openssl-dev openssl openssh curl
4 | WORKDIR /root
5 | COPY . /root/HawkScan
6 | WORKDIR /root/HawkScan
7 |
8 | # Get Rust for python cryptography
9 | RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
10 | ENV PATH="/root/.cargo/bin:${PATH}"
11 |
12 | RUN pip install -r requirements.txt && \
13 | rm -rf .git && \
14 | apk del libffi-dev gcc g++ make openssl-dev curl && \
15 | rm -rf /var/cache/apk/*
16 | ENTRYPOINT ["python", "hawkscan.py"]
17 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 c0dejump
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # HawkScan
2 |
3 | [](https://pypi.org/project/hawkscan)
4 | [](https://pypistats.org/packages/hawkscan)
5 | [](https://twitter.com/intent/follow?screen_name=c0dejump)
6 |
7 |
8 | 
9 |
10 | Security Tool for Reconnaissance and Information Gathering on a website. (python 3.x)
11 |
12 | - [News](https://github.com/c0dejump/HawkScan/#News)
13 | - [Installation](https://github.com/c0dejump/HawkScan/#Installation)
14 | - [Special features](https://github.com/c0dejump/HawkScan/#Special-features)
15 | - [TODO](https://github.com/c0dejump/HawkScan/#todo)
16 | - [Usage](https://github.com/c0dejump/HawkScan/#usage)
17 | - [Exemples](https://github.com/c0dejump/HawkScan/#exemples)
18 | - [Thanks](https://github.com/c0dejump/HawkScan/#thanks)
19 | - [Donations](https://github.com/c0dejump/HawkScan/#donations)
20 | - [Tools used](https://github.com/c0dejump/HawkScan/#tools-used)
21 | - [Wiki](https://github.com/c0dejump/HawkScan/wiki)
22 |
23 | # News v2.x
24 | - Add proxy function
25 | - Redefining priorities/tasks
26 | - Let's debug certificate subdomains results
27 | - Display the current bypass number during scan ("CB:")
28 | - Easter egg for xmas :)
29 | - Option -nfs (not first step) to pass the first recon steps
30 | - Google CSE before scan
31 | - Creation of WIKI
32 | - Detecting potential path disclosure into html webpage
33 | - Detecting potential hidden directory
34 | *(for more details go on CHANGELOG.md)*
35 |
36 | # Installation
37 | ```
38 |
39 | - git clone https://github.com/c0dejump/HawkScan.git && sudo python3 HawkScan/setup.py install
40 |
41 | - pip(3) install -r requirements.txt
42 |
43 | - python3 -m pip install -r requirements.txt
44 |
45 | ```
46 |
47 | # Special features
48 |
49 | ### Before scan
50 | - [x] Check header information
51 | - [x] Check DNS information
52 | - [x] Check Github
53 | - [x] CMS detection + version and vulns
54 | - [x] Check in waybackmachine
55 | - [x] Check if DataBase firebaseio existe and accessible
56 | - [x] Testing if it's possible scanning with "localhost" host
57 | - [x] Check Google Dork
58 | - [x] Check Host IP
59 | - [x] Check backup domain name (ex: www.domain.com/domain.zip)
60 | - [x] Check socketio connection
61 | - [x] cse google search (buckets...)
62 |
63 | ### During - After scan
64 | - [x] Test backup/old file on all the files found (index.php.bak, index.php~ ...)
65 | - [x] Backup system (if the script stopped, it take again in same place)
66 | - [x] WAF detection and Response error to WAF + Testing bypass it
67 | - [x] Option --exclude to exclude page, code error, bytes
68 | - [x] Option rate-limit if app is unstable (--timesleep)
69 | - [x] Search S3 buckets in source code page
70 | - [x] Try differents bypass for 403/401 code error
71 | - [x] JS parsing and analysis (option --js)
72 | - [x] Auto resize relative to window
73 | - [x] Notify when scan completed (Only work on Linux)
74 | - [x] Multiple output format. Available formats: json, csv, txt
75 | - [x] Multiple website scanning
76 | - [x] Prefix filename (old_, copy of...)
77 | - [x] Detecting potential path disclosure into html webpage
78 |
79 |
80 | # TODO
81 | **P1 is the most important**
82 |
83 | [WIP] Multiple exclude like: --exclude 403,1337b [P1] [In progress] (see [Exemples](https://github.com/c0dejump/HawkScan/#exemples))
84 | [WIP] Anonymous routing through some proxy (http/s proxy list) [P1] [In progress]
85 | [WIP] Re-build resport scan [P1]
86 | [WIP] HExHTTP replace "header information" before scan
87 | - [ ] asyncio instead of threading ? [PX]
88 | - [ ] Add crt.sh to check potential hidden subdomain (with letdebug module ?) [PX]
89 | - [ ] Push results into DB [P2]
90 | - [ ] If re-scan a website with an existing folder, just doing a diff btw the scan to the folder (like) // interesting ? [P2]
91 | - [ ] Pre-run to check the waf sensitive (by proxy with 40 threads for exemple) // add proxy funtion [P2]
92 | - [ ] Check source code and verify leak or sensitive data in Github // Other tool ? [P3]
93 | - [ ] Scan API endpoints/informations leaks [P3]
94 |
95 | # Usage
96 |
97 | ```
98 |
99 | usage: hawkscan.py [-h] [-u URL] [-f FILE_URL] [-t THREAD] [--exclude EXCLUDE [EXCLUDE ...]] [--auto] [--update] [-w WORDLIST] [-b [BACKUP ...]] [-p PREFIX] [-H HEADER_] [-a USER_AGENT] [--redirect] [--auth AUTH] [--timesleep TS] [--proxie PROXIE] [-r] [-s SUBDOMAINS] [--js] [--nfs] [--ffs] [--notify] [-o OUTPUT] [-of OUTPUT_TYPE]
100 |
101 | ```
102 |
103 | ```
104 | > General:
105 | -u URL URL to scan [required]
106 | -f FILE_URL file with multiple URLs to scan
107 | -t THREAD Number of threads to use for URL Fuzzing. Default: 30
108 | --exclude EXCLUDE [EXCLUDE ...] Exclude page, response code, response size. (Exemples: --exclude 500,337b)
109 | --auto Automatic threads depending response to website. Max: 30
110 | --update For automatic update
111 | --lightmode For a just simple fuzzing 1 request per second & a new session for each request
112 |
113 |
114 | > Wordlist Settings:
115 | -w WORDLIST Wordlist used for Fuzzing the desired webite. Default: dichawk.txt
116 | -b Adding prefix/suffix backup extensions during the scan. (Exemples: exemple.com/~ex/, exemple.com/ex.php.bak...) /!\ beware, take more longer
117 | -p PREFIX Add prefix in wordlist to scan
118 |
119 | > Request Settings:
120 | -H HEADER_ Modify header. (Exemple: -H "cookie: test")
121 | -a USER_AGENT Choice user-agent. Default: Random
122 | --redirect For scan with redirect response (301/302)
123 | --auth AUTH HTTP authentification. (Exemples: --auth admin:admin)
124 | --timesleep TS To define a timesleep/rate-limit if app is unstable during scan.
125 |
126 | > Tips:
127 | -r Recursive dir/files
128 | -s SUBDOMAINS Subdomain tester
129 | --js For try to found keys, token, sensitive endpoints... in the javascript page
130 | --nfs Not the first step of scan during the first running (waf, vhosts, wayback etc...)
131 | --ffs Force the first step of scan during the first running (waf, vhosts, wayback etc...)
132 | --notify For receveid notify when the scan finished (only work on linux)
133 |
134 | > Export Settings:
135 | -o OUTPUT Output to site_scan.txt (default in website directory)
136 | -of OUTPUT_TYPE Output file format. Available formats: json, csv, txt
137 | ```
138 |
139 | # Examples
140 |
141 | ```
142 | //Basic
143 | python hawkscan.py -u https://www.exemple.com/
144 |
145 | //With specific dico
146 | python hawkscan.py -u https://www.exemple.com/ -w dico_extra.txt
147 |
148 | //with 30 threads
149 | python hawkscan.py -u https://www.exemple.com/ -t 30
150 |
151 | //With backup files scan
152 | python hawkscan.py -u https://www.exemple.com/ -b
153 |
154 | //With an exclude page
155 | python hawkscan.py -u https://www.exemple.com/ --exclude profile.php
156 |
157 | //With an exclude response code
158 | python hawkscan.py -u https://www.exemple.com/ --exclude 403
159 |
160 | //With an exclude bytes number
161 | python hawkscan.py -u https://www.exemple.com/ --exclude 1337b
162 |
163 | //With two excludes type
164 | python hawkscan.py -u https://www.exemple.com/ --exclude 1337b,403
165 |
166 | ```
167 |
168 | # Thanks
169 | Layno (https://github.com/Clayno/) [Technical helper]
170 | Sanguinarius (https://twitter.com/sanguinarius_Bt) [Technical helper]
171 | Jamb0n69 (https://twitter.com/jamb0n69) [Technical helper]
172 | Cyber_Ph4ntoM (https://twitter.com/__PH4NTOM__) [Beta tester & Logo Graphist]
173 |
174 |
175 | # Donations
176 |
177 | https://www.paypal.me/c0dejump
178 |
179 | Or if you want to offer me a coffee :)
180 |
181 | https://ko-fi.com/c0dejump
182 |
183 |
184 | ## Tools used
185 |
186 | This script use "WafW00f" to detect the WAF in the first step (https://github.com/EnableSecurity/wafw00f)
187 |
188 | This script use "Sublist3r" to scan subdomains (https://github.com/aboul3la/Sublist3r)
189 |
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/__init__.py
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
1 | # coding: utf-8
2 |
3 | WARNING = "\033[31m[!] \033[0m"
4 | SERV_ERR = "\033[33m[!] \033[0m"
5 | FORBI = "\033[31m[x] \033[0m"
6 | PLUS = "\033[32m[+] \033[0m"
7 | INFO = "\033[34m[i] \033[0m"
8 | LESS = "\033[33m[-] \033[0m"
9 | LINE = "\033[34m\u2500\033[0m" * 40 + "\n"
10 | BACK = "\033[36m[B] \033[0m"
11 | S3 = "\033[33m[S3] \033[0m"
12 | EXCL = "\033[33m[E] \033[0m"
13 | BYP = "\033[34m[BYPASSED] \033[0m"
14 | JS = "\033[33m[JavaScript] \033[0m"
15 | WAF = "\033[36m[WAF] \033[0m"
16 | INFO_MOD = "\033[34m\u251c \033[0m"
17 |
18 | #Default backup extension while your test with the "-b" option
19 | EXT_B = ['.db', '.swp', '.yml', '.xsd', '.xml', '.wml', '.bkp', '.rar', '.zip', '.7z', '.bak', '.bac', '.BAK', '.NEW', '.old',
20 | '.bkf', '.bok', '.cgi', '.dat', '.ini', '.log', '.key', '.conf', '.env', '_bak', '_old', '.bak1', '.json', '.lock',
21 | '.save', '.atom', '.action', '_backup', '.backup', '.config', '?stats=1', '/authorize/', '.md', '.gz',
22 | '.txt', '~', '%01', '(1)', '.sql.gz', '.tgz', 'tar.gz', '.gzip', '.tar', 'tar.bz2', '.war', '.jar', '.cab']
23 |
24 | MINI_B = ['.bkp', '.bak', '.old', '_bak', '_old', '~', '.bak1', '_backup', '.backup']
25 |
26 |
27 | ARCH = ['.sql.gz', '.tgz', 'tar.gz', '.gzip', '.rar', '.zip', '.7z']
--------------------------------------------------------------------------------
/modules/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/modules/__init__.py
--------------------------------------------------------------------------------
/modules/after_fuzzing/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/modules/after_fuzzing/__init__.py
--------------------------------------------------------------------------------
/modules/after_fuzzing/scan_errors.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | #modules in standard library
5 | import requests
6 | import sys, os, re
7 | from config import PLUS, WARNING, INFO, LESS, LINE, FORBI, BACK, EXCL, SERV_ERR, BYP, WAF, EXT_B, MINI_B
8 |
9 | def scan_error(directory, forbi, filterManager):
10 | """
11 | scan_error: Checking the links who was in error during scan
12 | """
13 | filterM = filterManager()
14 |
15 | error_count = 0
16 | errors_stat = False
17 | print(LINE)
18 | print("{} Error check".format(INFO))
19 | print(LINE)
20 | path_error = directory + "/errors.txt"
21 | if os.path.exists(path_error):
22 | with open(path_error) as read_links:
23 | for ec in read_links.read().splitlines():
24 | error_count += 1
25 | with open(path_error) as read_links:
26 | print("{}[{}] Errors detected".format(INFO, error_count))
27 | for error_link in read_links.read().splitlines():
28 | try:
29 | req = requests.get(error_link, verify=False, timeout=10) if not auth else requests.get(error_link, verify=False, auth=(auth.split(":")[0], auth.split(":")[1]), timeout=10)
30 | len_req_error = len(req.content)
31 | if exclude:
32 | if type(req_p) == int:
33 | pass
34 | else:
35 | cep = filterM.check_exclude_page(s, req, error_link, directory, forbi, HOUR, bp_current)
36 | if cep:
37 | error_status = req.status_code
38 | if error_status in [404, 406]:
39 | pass
40 | else:
41 | print("{}[{}] [{}b] {}".format(INFO, req.status_code, len_req_error, error_link))
42 | output_scan(directory, error_link, len_req_error, req.status_code)
43 | errors_stat = True
44 | else:
45 | error_status = req.status_code
46 | if error_status in [404, 406]:
47 | pass
48 | else:
49 | print("{}[{}] [{}b] {}".format(INFO, req.status_code, len_req_error, error_link))
50 | output_scan(directory, error_link, len_req_error, req.status_code)
51 | errors_stat = True
52 | except Exception:
53 | pass
54 | #traceback.print_exc()
55 | sys.stdout.write("\033[34m[i] {}\033[0m\r".format(error_link))
56 | sys.stdout.write("\033[K")
57 | if errors_stat == False:
58 | print("{} Nothing error error need to be fixed".format(PLUS))
59 | os.system("rm {}".format(path_error))
60 | else:
61 | print("{} Nothing errors need to be fixed".format(PLUS))
--------------------------------------------------------------------------------
/modules/after_fuzzing/send_notify.py:
--------------------------------------------------------------------------------
1 |
2 | from notifypy import Notify
3 |
4 |
5 | def notify_scan_completed():
6 | """
7 | notify_scan_completed: Send a notification when the scan if finish (only works on Linux)
8 | """
9 | notification = Notify()
10 | notification.title = "Hawkscan"
11 | notification.message = "Scan completed"
12 | notification.send()
13 |
14 | if __name__ == '__main__':
15 | notify_scan_completed()
--------------------------------------------------------------------------------
/modules/auto_update.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | def auto_update():
4 | """
5 | auto_update: for update the tool
6 | """
7 | updt = 0
8 | print("{}Checking update...".format(INFO))
9 | os.system("git pull origin master > /dev/null 2>&1 > git_status.txt")
10 | with open("git_status.txt", "r") as gs:
11 | for s in gs:
12 | if "Already up to date" not in s:
13 | updt = 1
14 | if updt == 1:
15 | print("{}A new version was be donwload\n".format(INFO))
16 | os.system("cd ../ && rm -rf HawkScan && git clone https://github.com/c0dejump/HawkScan.git")
17 | else:
18 | print("{}Nothing update found".format(INFO))
19 | os.system("rm -rf git_status.txt")
--------------------------------------------------------------------------------
/modules/before_fuzzing/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/modules/before_fuzzing/__init__.py
--------------------------------------------------------------------------------
/modules/before_fuzzing/before_run.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from bs4 import BeautifulSoup
3 | import json
4 | import sys, re, os
5 | import ssl
6 | import socket
7 | import traceback
8 | from requests.exceptions import Timeout
9 | import time
10 | # External
11 | from config import PLUS, WARNING, INFO, LESS, LINE, FORBI, BACK, INFO_MOD
12 |
13 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
14 |
15 | class before_start:
16 |
17 |
18 | def get_header(self, url, directory):
19 | """Get header of website (cookie, link, etc...)"""
20 | r = requests.get(url, allow_redirects=False, verify=False)
21 | print("\033[36m HEADER\033[0m")
22 | print(LINE)
23 | print(" {} \n".format(r.headers).replace(',','\n'))
24 | print(LINE)
25 | with open(directory + '/header.csv', 'w+') as file:
26 | file.write(str(r.headers).replace(',','\n'))
27 |
28 |
29 | def gitpast(self, url):
30 | """
31 | Github: check github informations
32 | """
33 | print("\033[36m Github \033[0m")
34 | print(LINE)
35 | url = url.split(".")[1] if "www" in url else url.split("/")[2]
36 | url = "{}".format(url)
37 | print("search: {}\n".format(url))
38 | types = ["Commits", "Issues", "Repositories", "Topics", "Wikis", "Users", "Code"]
39 | try:
40 | for t in types:
41 | github = "https://github.com/search?q={}&type={}".format(url, t)
42 | req = requests.get(github, verify=False)
43 | soup = BeautifulSoup(req.text, "html.parser")
44 | search = soup.find('a', {"class":"menu-item selected"})
45 | if search:
46 | for s in search.find("span"):
47 | print(" {}{}: {}".format(INFO_MOD, t, s))
48 | else:
49 | print(" {}{}: not found".format(INFO_MOD, t))
50 | except:
51 | print("{}You need connection to check the github".format(WARNING))
52 | print("\n" + LINE)
53 |
54 |
55 | def get_dns(self, url, directory):
56 | """Get DNS informations"""
57 | port = 0
58 | print("\033[36m DNS information \033[0m")
59 | print(LINE)
60 | try:
61 | if "https" in url:
62 | url = url.replace('https://','').replace('/','')
63 | port = 443
64 | else:
65 | url = url.replace('http://','').replace('/','')
66 | port = 80
67 | context = ssl.create_default_context()
68 | conn = context.wrap_socket(socket.socket(socket.AF_INET), server_hostname=url)
69 | conn.connect((url, port))
70 | cert = conn.getpeercert()
71 | print(" \u251c Organization: {}".format(cert['subject']))
72 | print(" \u251c DNS: {}".format(cert['subjectAltName']))
73 | print(" \u251c SerialNumber: {}".format(cert['serialNumber']))
74 | conn.close()
75 | with open(directory + '/dns_info.csv', 'w+') as file:
76 | file.write(str(cert).replace(',','\n').replace('((','').replace('))',''))
77 | except:
78 | erreur = sys.exc_info()
79 | typerr = u"%s" % (erreur[0])
80 | typerr = typerr[typerr.find("'")+1:typerr.rfind("'")]
81 | print(typerr)
82 | msgerr = u"%s" % (erreur[1])
83 | print(msgerr + "\n")
84 | print(LINE)
85 |
86 |
87 | def letsdebug(self, url):
88 | """
89 | letsdebug: Get certificate of the website and potentialy found old certificate with old subdomain
90 | """
91 | print("\033[36m Let's Debug information \033[0m")
92 | print(LINE)
93 | try:
94 | wait_finish = True
95 | list_result = []
96 | string_result = ""
97 | domain = ".".join(url.split("/")[2].split(".")[1:]) if len(url.split("/")[2].split(".")) == 3 else ".".join(url.split("/")[2].split(".")[0:])
98 | url_ld = "https://letsdebug.net/"
99 | print(" {} {}".format(INFO_MOD, domain))
100 | datas = {"domain":domain,"method":"http-01"}
101 | req = requests.post(url_ld, data=datas, allow_redirects=True, verify=False)
102 | url_debug = "{}?debug=y".format(req.url)
103 | while wait_finish:
104 | res = requests.get(url_debug, verify=False)
105 | if "please wait" in res.text:
106 | time.sleep(1)
107 | else:
108 | wait_finish = False
109 | soup = BeautifulSoup(res.text, "html.parser")
110 | search = soup.find('div', {"id":"RateLimit-Debug"})
111 | if search:
112 | for s in search:
113 | if s != None and s != "\n":
114 | string_result += str(s)
115 | result = re.findall(r'\[.*?\]', string_result)
116 | for r in result:
117 | r = r.replace("[","").replace("]","")
118 | if r not in list_result:
119 | list_result.append(r)
120 | for rl in list_result:
121 | print(" {} {}".format(INFO_MOD, rl))
122 | else:
123 | print(" {} Nothing certificate subdomain found".format(INFO_MOD))
124 | except:
125 | pass
126 | print(LINE)
127 |
128 |
129 | def firebaseio(self, url):
130 | """
131 | Firebaseio: To check db firebaseio
132 | ex: --firebase facebook
133 | """
134 | get_domain = url.split("/")[2]
135 | parse_domain = get_domain.split(".")
136 | if not "www" in get_domain:
137 | dire = "{}-{}".format(parse_domain[0], parse_domain[1]) if len(parse_domain) > 2 else "{}".format(parse_domain[0])
138 | else:
139 | dire = "{}".format(parse_domain[1])
140 | print("\033[36m Firebaseio \033[0m")
141 | print(LINE)
142 | url = 'https://{}.firebaseio.com/.json'.format(dire.split(".")[0])
143 | print(" Target: {}\n".format(url))
144 | try:
145 | r = requests.get(url, verify=False).json()
146 | if 'error' in r.keys():
147 | if r['error'] == 'Permission denied':
148 | print(" {}{} seems to be protected".format(FORBI, url)) #successfully protected
149 | elif r['error'] == '404 Not Found':
150 | print(" {}{} not found".format(LESS, url)) #doesn't exist
151 | elif "Firebase error." in r['error']:
152 | print(" {}{} Firebase error. Please ensure that you spelled the name of your Firebase correctly ".format(WARNING, url))
153 | else:
154 | print(" {}{} seems to be vulnerable !".format(PLUS, url)) #vulnerable
155 | except AttributeError:
156 | '''
157 | Some DBs may just return null
158 | '''
159 | print("{} null return".format(INFO))
160 | except:
161 | print("Error with the requests, please do a manual check")
162 | pass
163 | print(LINE)
164 |
165 |
166 | def wayback_check(self, url, directory):
167 | """
168 | Wayback_check:
169 | Check in a wayback machine to found old file on the website or other things...
170 | """
171 | print("\033[36m Wayback \033[0m")
172 | print(LINE)
173 | print(url + "\n")
174 | url_wayb = "http://web.archive.org/cdx/search?url=*.{}/*&output=list&fl=original,statuscode&collapse=urlkey&filter=!statuscode:404".format(url)
175 | try:
176 | req_way = requests.get(url_wayb, verify=False)
177 | urls_w = req_way.text.split("\n")
178 | #print(urls_w)
179 | for url_w in urls_w:
180 | try:
181 | status_c = url_w.split(" ")[1]
182 | url_ext = url_w.split(" ")[0]
183 | if "css" not in url_ext and "jpg" not in url_ext and "png" not in url_ext and \
184 | "jpeg" not in url_ext and "svg" not in url_ext and "JPG" not in url_ext and "gif" not in url_ext and \
185 | "mov" not in url_ext and "mp4" not in url_ext and "woff" not in url_ext and "ttf" not in url_ext and \
186 | "eot" not in url_ext:
187 | if status_c in ['200', '403', '401', '301', '302']:
188 | with open("{}/wayback.txt".format(directory), "a+") as w:
189 | w.write("{} {}\n".format(url_ext, status_c))
190 | if status_c == "200":
191 | print(" \033[32m\u251c\033[0m {} \033[32m{}".format(url_ext, status_c))
192 | elif status_c in ['403', '401']:
193 | print(" \033[31m\u251c\033[0m {} \033[31m{}".format(url_ext, status_c))
194 | elif status_c in ['301', '302']:
195 | print(" \033[33m\u251c\033[0m {} \033[33m{}".format(url_ext, status_c))
196 | else:
197 | pass
198 | except:
199 | pass
200 |
201 | except:
202 | #traceback.print_exc()
203 | print(" {} An error occurred please check manually ".format(LESS))
204 | print(LINE)
205 |
206 |
207 | def check_localhost(self, url):
208 | """
209 | Check_localhost: Function which try automatically if it's possible scanning with "localhost" host for discovery other files/directories
210 | """
211 | list_test = ["127.0.0.1", "localhost"]
212 | localhost = False
213 | print("\033[36m Localhost host \033[0m")
214 | print(LINE)
215 | for lt in list_test:
216 | header = {"Host": lt}
217 | try:
218 | req = requests.get(url, headers=header, verify=False, timeout=10)
219 | if req.status_code == 200:
220 | print(" \033[32m\u251c\033[0m You can potentialy try bf directories with this option '-H \"Host:{}\"' ".format(lt))
221 | localhost = True
222 | else:
223 | pass
224 | except:
225 | pass
226 | if not localhost:
227 | print(" {} Not seem possible to scan with localhost host".format(LESS))
228 | print(LINE)
229 |
230 |
231 | def check_vhost(self, domain, url):
232 | """
233 | check_ip:
234 | Check the host ip if this webpage is different or not
235 | """
236 | print("\033[36m Vhosts misconfiguration \033[0m")
237 | print(LINE)
238 | try:
239 | req_index = requests.get(url, verify=False, timeout=10)
240 | len_index = len(req_index.content)
241 | retrieve_ip = False
242 | dom = socket.gethostbyname(domain)
243 | ips = ["https://{}/".format(dom), "http://{}/".format(dom), "http://www2.{}/".format(domain), "http://www3.{}/".format(domain), "https://www2.{}/".format(domain),
244 | "https://www3.{}/".format(domain)]
245 | for ip in ips:
246 | try:
247 | req_ip = requests.get(ip, verify=False, timeout=10)
248 | if req_ip.status_code not in [404, 403, 425, 503, 500, 400] and len(req_ip.content) != len_index:
249 | retrieve_ip = True
250 | print(" \033[32m\u251c\033[0m The host IP seem to be different, check it: {} ".format(ip))
251 | except:
252 | #print(" \033[33m\u251c\033[0m The host IP have a problem, check it manualy please: {} ".format(ip))
253 | pass
254 | if not retrieve_ip:
255 | print(" {} IPs do not appear to be different from the host".format(LESS))
256 | print(LINE)
257 | except:
258 | pass
259 |
260 |
261 | def check_backup_domain(self, domain, url):
262 | """
263 | check_backup_domain:
264 | Check the backup domain, like exemple.com/exemple.zip
265 | """
266 | print("\033[36m Domain backup \033[0m")
267 | print(LINE)
268 | backup_dn_ext = ["zip", "rar", "iso", "tar", "gz", "tgz", "tar.gz", "7z", "jar", "sql.gz", "log", "bk", "bak", "bck", "old", "conf"]
269 | found_bdn = False
270 | len_response = 0
271 | try:
272 | req_index = requests.get(url, verify=False, timeout=10)
273 | len_index = len(req_index.content)
274 | domain = domain.split('.')[1] if len(domain.split('.')) > 2 else domain.split('.')[0]
275 | print(" {}List of backup extension for domain {}: {}\nExemple: {}{}.zip\n".format(INFO, domain, backup_dn_ext, url, domain.split('.')[0]))
276 | for bdn in backup_dn_ext:
277 | url_dn_ext = "{}{}.{}".format(url, domain.split('.')[0], bdn)
278 | try:
279 | req_dn_ext = requests.get(url_dn_ext, verify=False, timeout=10)
280 | if req_dn_ext.status_code not in [404, 403, 401, 500, 400, 425] and len(req_dn_ext.content) not in range(len_index - 10, len_index + 10):
281 | if len(req_dn_ext.content) not in range(len_response - 10, len_response + 10):
282 | print(" {} {} found ({}b)".format(PLUS, url_dn_ext, len(req_dn_ext.text)))
283 | len_response = len(req_dn_ext.content)
284 | found_bdn = True
285 | except:
286 | pass
287 | except:
288 | pass
289 | if not found_bdn:
290 | print(" {} No backup domain name found".format(LESS))
291 | print(LINE)
292 |
293 |
294 |
295 | def test_timeout(self, url, first=False):
296 | """
297 | Test_timeout: just a little function for test if the connection is good or not
298 | """
299 | try:
300 | req_timeout = requests.get(url, timeout=30, verify=False, headers={'User-agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; LCJB; rv:11.0) like Gecko'})
301 | except Timeout:
302 | print("{}Service potentialy Unavailable, The site web seem unavailable please wait...\n".format(WARNING))
303 | if first:
304 | next_step = input("Do you want continue ?: [y:N] ")
305 | if next_step in ["y", "Y"]:
306 | pass
307 | else:
308 | sys.exit()
309 | else:
310 | time.sleep(180)
311 | except:
312 | pass
--------------------------------------------------------------------------------
/modules/before_fuzzing/check_cms.py:
--------------------------------------------------------------------------------
1 | import json
2 | import requests
3 | from bs4 import BeautifulSoup
4 | from config import PLUS, WARNING, INFO, LESS, LINE
5 |
6 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
7 |
8 | class check_cms:
9 |
10 | def detect_cms(self, url, directory):
11 | """
12 | CMS:
13 | Detect if the website use a CMS
14 | """
15 | print("\033[36m CMS \033[0m")
16 | print(LINE)
17 | try:
18 | whatscms_url = "https://whatcms.org/APIEndpoint/Detect?key=1481ff2f874c4942a734d9c499c22b6d8533007dd1f7005c586ea04efab2a3277cc8f2&url={}".format(url)
19 | req = requests.get(whatscms_url, timeout=10, allow_redirects=False, verify=False)
20 | if "Not Found" in req.text or "Theme not detected" in req.text:
21 | with open(directory + "/cms.txt", "w+") as cms_write:
22 | cms_write.write("this website does not seem to use a CMS")
23 | print(" {} This website does not seem to use a CMS \n".format(LESS))
24 | print(LINE)
25 | return False, False;
26 | else:
27 | reqt = json.loads(req.text)
28 | result = reqt["result"].get("name")
29 | v = reqt["result"].get("version")
30 | if v:
31 | with open(directory + "/cms.txt", "w+") as cms_write:
32 | cms_write.write("This website use {} {}".format(result, v))
33 | print(" {} This website use \033[32m{} {} \033[0m\n".format(PLUS, result, v))
34 | return result, v;
35 | else:
36 | with open(directory + "/cms.txt", "w+") as cms_write:
37 | cms_write.write("This website use {} but nothing version found".format(LESS, result))
38 | print(" {} This website use \033[32m{}\033[0m but nothing version found \n".format(PLUS, result))
39 | print(LINE)
40 | return False, False;
41 | except:
42 | print(" {} You need connection to check the CMS".format(WARNING))
43 | print(LINE)
44 | return False, False;
45 |
46 |
47 | def cve_cms(self, result, v):
48 | """
49 | CVE_CMS:
50 | Check CVE with cms and version detected by the function 'detect_cms'.
51 | """
52 | url_comp = "https://www.cvedetails.com/version-search.php?vendor={}&product=&version={}".format(result, v)
53 | req = requests.get(url_comp, allow_redirects=True, verify=False, timeout=10)
54 | if not "matches" in req.text:
55 | print(" {} CVE found ! \n{}{}\n".format(WARNING, WARNING, url_comp))
56 | if 'WordPress' in req.text:
57 | version = v.replace('.','')
58 | site = "https://wpvulndb.com/wordpresses/{}".format(version)
59 | req = requests.get(site, verify=False)
60 | soup = BeautifulSoup(req.text, "html.parser")
61 | search = soup.find_all('tr')
62 | if search:
63 | for p in search:
64 | dates = p.find("td").text.strip()
65 | detail = p.find("a").text.strip()
66 | print(" {}{} : {}".format(WARNING, dates, detail))
67 | else:
68 | print(" {} Nothing wpvunldb found \n".format(LESS))
69 | elif 'WordPress' in req.text:
70 | version = v.replace('.','')
71 | site = "https://wpvulndb.com/wordpresses/{}".format(version)
72 | req = requests.get(site, verify=False)
73 | soup = BeautifulSoup(req.text, "html.parser")
74 | search = soup.find_all('tr')
75 | if search:
76 | print(" {} CVE found ! \n{}{}\n".format(WARNING, WARNING, site))
77 | for p in search:
78 | dates = p.find("td").text.strip()
79 | detail = p.find("a").text.strip()
80 | print("{}{} : {}".format(WARNING, dates, detail))
81 | print(LINE)
82 | else:
83 | print(" {} Nothing wpvunldb found ".format(LESS))
84 | print(LINE)
85 | else:
86 | print(" {} Nothing CVE found \n".format(LESS))
87 | print(LINE)
--------------------------------------------------------------------------------
/modules/before_fuzzing/check_socketio.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #Thanks Jamb0n69 !
3 |
4 | import socketio
5 | import time
6 | import os, sys
7 | import traceback
8 | import argparse
9 | import json
10 |
11 | # External
12 | from config import PLUS, WARNING, INFO, LESS, LINE, FORBI, BACK
13 |
14 |
15 | socketio_paths = [
16 | "socket.io", "socketio", "io", "socket", "signalr", "xmpp-websocket", "websocket", ".ws", "ws"
17 | ]
18 |
19 |
20 | class check_socketio:
21 | """
22 | check_socketio: Check socketio connection without authentification. Possible found message, logs or other traces
23 | """
24 |
25 | sio = socketio.Client(reconnection=False)
26 | dynamic_function_number = 0
27 |
28 |
29 |
30 | def connect(self, url, path):
31 | try:
32 | #print(url+path) #DEBUG
33 | self.sio.connect(url, socketio_path=path)
34 | return True
35 | except Exception as e:
36 | #print(e) #DEBUG
37 | if "www." in url:
38 | urli = url.replace("www.","")
39 | self.connect(urli, path)
40 | return e
41 | return False
42 |
43 | def disconnect(self):
44 | try:
45 | self.sio.disconnect()
46 | except:
47 | pass
48 |
49 |
50 | def run_socketio(self, url, poutput):
51 | """
52 | run_socketio:
53 | Try socketio connection
54 | """
55 | found_socket = False
56 | for path in socketio_paths:
57 | connect = self.connect(url, path)
58 | if type(connect) == bool and connect:
59 | print(" {} {}{} found !".format(PLUS, url, path))
60 | domain = url.split("/")[2] if not "www" in url else ".".join(url.split("/")[2].split(".")[1:])
61 | print(" {} Try this \"\033[36msudo apt install npm -y && npx wscat -c ws://{}/socket.io/?transport=websocket\033[0m\" \n If you have a 30X redirection you can try with 'wss://'".format(INFO, domain))
62 | self.disconnect()
63 | found_socket = True
64 | elif not found_socket and poutput:
65 | print(" \033[33m\u251c \033[0m {}{}: {}".format(url, path, connect))
66 | if not found_socket:
67 | print(" \033[33m{}\033[0m Nothing Socketio found".format(LESS))
68 |
69 |
70 | def main_socketio(self, url):
71 | print("\033[36m Websockets \033[0m")
72 | print(LINE)
73 | if "www." in url:
74 | urls = []
75 | urls.append(url)
76 | urls.append(url.replace("www.", ""))
77 | for u in urls:
78 | print(" \033[34m\u251c\033[0m {}".format(u))
79 | self.run_socketio(u, poutput=False)
80 | else:
81 | self.run_socketio(url, poutput=True)
82 |
83 |
84 | """if __name__ == '__main__':
85 | url = sys.argv[1]
86 |
87 | check_socketio = check_socketio()
88 | check_socketio.run_socketio(url)"""
--------------------------------------------------------------------------------
/modules/before_fuzzing/check_subdomain.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from config import INFO
3 | from tools.Sublist3r import sublist3r
4 |
5 | def subdomain(subdomains):
6 | """
7 | Subdomains:
8 | Check subdomains with the option -s (-s google.fr)
9 | script use sublit3r to scan subdomain (it's a basic scan)
10 | """
11 | print("search subdomains:\n")
12 | sub_file = "sublist/" + subdomains + ".txt"
13 | sub = sublist3r.main(subdomains, 40, sub_file, ports=None, silent=False, verbose=False, enable_bruteforce=False, engines=None)
14 | print(LINE)
15 | time.sleep(2)
--------------------------------------------------------------------------------
/modules/before_fuzzing/google_dorks.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | import sys
5 | import requests
6 | from config import WARNING, INFO, LINE, INFO_MOD, LESS, PLUS
7 | import time
8 | import traceback
9 | from googlesearch import search
10 | import json
11 | import random
12 |
13 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
14 |
15 |
16 | def timer(length):
17 | #timer to wait
18 | start = time.time()
19 | running = True
20 | while running:
21 | if time.time() - start >= length:
22 | running = False
23 | else:
24 | sys.stdout.write(""+ str(length - (time.time() - start)) + " secondes...\r")
25 | sys.stdout.flush()
26 | print("\n")
27 |
28 |
29 | def query_dork(domain, directory):
30 | """
31 | query_dork: function to search google dork
32 | """
33 | key_break = False
34 | found = False
35 | answer_yes = False
36 | print("\033[36m Google Dork \033[0m")
37 | print(LINE)
38 | if 'www' in domain:
39 | direct = domain.split('.')
40 | director = direct[1]
41 | domain = "{}.{}".format(direct[1], direct[2].replace("/",""))
42 | else:
43 | direct = domain.split('/')
44 | director = direct[2]
45 | domain = director
46 | ext = domain.split(".")[1]
47 | bill = 'facture site:{} filetype:pdf'.format(domain) if "fr" in ext else 'bill site:{} filetype:pdf'.format(domain) #FR/EN
48 | #Didn't hesitate to add your queries
49 | queries = [
50 | bill,
51 | 'site:{} filetype:bak'.format(domain),
52 | 'budget site:{} filetype:pdf'.format(domain),
53 | 'site:{} ext:action OR ext:adr OR ext:ascx OR ext:asmx OR ext:axd OR ext:backup OR ext:bak OR ext:bkf OR ext:bkp OR ext:bok OR ext:achee OR ext:cfg OR ext:cfm OR ext:cgi OR ext:cnf OR ext:conf OR ext:config OR ext:crt OR ext:csr OR ext:csv OR ext:dat OR ext:doc OR ext:docx OR ext:eml OR ext:env OR ext:exe OR ext:gz OR ext:ica OR ext:inf OR ext:ini OR ext:java'.format(domain),
54 | 'site:{} ext:json OR ext:key OR ext:log OR ext:lst OR ext:mai OR ext:mbox OR ext:mbx OR ext:md OR ext:mdb OR ext:nsf OR ext:old OR ext:oraext: OR ext:pac OR ext:passwd OR ext:pcf OR ext:pem OR ext:pgp OR ext:pl OR ext:plist OR ext:pwd OR ext:rdp OR ext:reg OR ext:rtf OR ext:skr OR ext:sql OR ext:swf OR ext:tpl'.format(domain),
55 | 'site:{} ext:txt OR ext:url OR ext:wml OR ext:xls OR ext:xlsx OR ext:xml OR ext:xsd OR ext:yml OR ext:NEW OR ext:save OR ext:ppt'.format(domain),
56 | 'site:{} filetype:xls inurl:"email.xls"'.format(domain),
57 | 'site:{} intitle:"index of"'.format(domain),
58 | 'intitle:"Dashboard [Jenkins]" {}'.format(domain),
59 | '"{}" inurl:gitlab OR site:pastebin.com OR site:github.com'.format(domain),
60 | #'site:http://prezi.com "{}"'.format(domain),
61 | 'site:http://codeshare.io "{}"'.format(domain),
62 | 'site:http://sharecode.io "{}"'.format(domain),
63 | 'site:http://bitbucket.org "{}"'.format(domain),
64 | 'site:*.atlassian.net "{}"'.format(domain),
65 | #'"{}" language:bash pwd'.format(domain),
66 | #'site:http://box.com "{}"'.format(domain)
67 | ]
68 | for query in queries:
69 | print(" {}{} (Tape ctrl+c to pass)\n".format(INFO_MOD, query))
70 | try:
71 | for j in search(query, tld="com", num=5, stop=5, pause=2.6):
72 | try:
73 | req_url_found = requests.get(j, verify=False, timeout=4)
74 | if req_url_found.status_code not in [404, 408, 503, 405, 428, 412, 429, 403, 401]:
75 | print(" \033[32m[{}]\033[0m {}".format(req_url_found.status_code, j))
76 | try:
77 | with open(directory+"/{}".format(directory), "a+") as raw:
78 | raw.write("{}\n".format(j))
79 | except:
80 | pass
81 | #traceback.print_exc() #DEBUG
82 | elif req_url_found.status_code in [403, 401]:
83 | print(" \033[31m[{}]\033[0m {}".format(req_url_found.status_code, j))
84 | else:
85 | print(" \033[31m[{}]\033[0m {}".format(req_url_found.status_code, j))
86 | except:
87 | #traceback.print_exc() #DEBUG
88 | print(" {}Error with URL {}".format(WARNING, j))
89 | print("")
90 | except:
91 | print(" {} Google captcha seem to be activated, try it later...\n".format(WARNING))
92 | break
93 | print(LINE)
94 |
95 |
96 | def query_cse(domain, directory):
97 | if 'www' in domain:
98 | direct = domain.split('.')
99 | director = direct[1]
100 | domain = "{}.{}".format(direct[1], direct[2].replace("/",""))
101 | else:
102 | direct = domain.split('/')
103 | director = direct[2]
104 | domain = director
105 | print("\033[36m Google CSE \033[0m")
106 | print(LINE)
107 | api_key = "AIzaSyBNVyJ7mixhF-MJ9RA8oDn4RzDye6fqTBg"
108 | url_cse = "https://www.googleapis.com/customsearch/v1?key={}&cx=002972716746423218710:veac6ui3rio&q={}".format(api_key, domain)
109 | #you can add your own API KEY for more requests (for this is limited to 100 requests)
110 | req_cse = requests.get(url_cse, verify=False)
111 | text = req_cse.text
112 | #print(text) #DEBUG
113 | cse_result_decod = json.loads(text)
114 | try:
115 | crd = cse_result_decod["queries"]["request"]
116 | res = ""
117 | for c in crd:
118 | res = c
119 | print(" {}{} Result found, go on https://cse.google.com/cse?cx=002972716746423218710:veac6ui3rio#gsc.q={}".format(PLUS, res["totalResults"], domain))
120 | except:
121 | #traceback.print_exc()
122 | print(" {}No Results".format(LESS))
123 | print(LINE)
124 |
125 | """if __name__ == '__main__':
126 | domain = "https://www.bmw-motorrad.de/" #DEBUG
127 | directory = "test"
128 | query_cse(domain, "test")"""
--------------------------------------------------------------------------------
/modules/during_fuzzing/bypass_forbidden.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import traceback
3 | import socket
4 | from config import PLUS, WARNING, INFO, BYP
5 |
6 |
7 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
8 |
9 | def post(res): req_p = requests.post(res, verify=False, allow_redirects=False, timeout=10); return req_p.status_code, "POST", len(req_p.content)
10 | def put(res): req_pt = requests.put(res, verify=False, allow_redirects=False, timeout=10); return req_pt.status_code, "PUT", len(req_pt.content)
11 | def patch(res): req_ptch = requests.patch(res, verify=False, allow_redirects=False, timeout=10); return req_ptch.status_code, "PATCH", len(req_ptch.content)
12 | def options(res): req_o = requests.options(res, verify=False, allow_redirects=False, timeout=10); return req_o.status_code, "OPTIONS", len(req_o.content)
13 |
14 |
15 | def method(res):
16 | """
17 | Try other method
18 | Ex: OPTIONS /admin
19 | """
20 | result_list = []
21 | for funct in [post, put, patch, options]:
22 | try:
23 | result_list.append(funct(res))
24 | except:
25 | pass
26 | #traceback.print_exc()
27 | #sys.exit()
28 | for rs, type_r, len_req in result_list:
29 | if rs not in [403, 401, 404, 406, 421, 429, 301, 302, 400, 408, 503, 405, 428, 412, 666, 500, 501, 502, 307] and len_req != 0:
30 | print("{}[{}] Forbidden page {} Bypass with this requests type: {} [{}]".format(BYP, rs, res, type_r, len_req))
31 |
32 |
33 | def original_url(s, res, page, url):
34 | # Ex: http://lpage.com/admin header="X-Originating-URL": admin
35 | header = {
36 | "X-Originating-URL": page
37 | }
38 | req_ou = s.get(res, verify=False, headers=header, allow_redirects=False, timeout=10)
39 | if req_ou.status_code not in [403, 401, 404, 406, 421, 429, 301, 302, 400, 408, 503, 405, 428, 412, 666, 500, 501, 410, 502, 307] and len(req_ou.content) > 0:
40 | print("{}[{}] {} Forbidden Bypass with: 'X-Originating-URL: {}'".format(BYP, req_ou.status_code, url+page, page))
41 |
42 |
43 | def IP_authorization(s, res, url, domain, page, exclude_len):
44 | # Ex: http://lpage.com/admin header="X-Custom-IP-Authorization": 127.0.0.1
45 | headers_type = [
46 | "X-Originating-IP", "X-Forwarded", "Forwarded", "Forwarded-For", "Forwarded-For-IP", "X-Forwarder-For", "X-Forwarded-For", "X-Forwarded-For-Original",
47 | "X-Forwarded-By", "X-Forwarded-Host", "X-Remote-IP", "X-Remote-Addr", "X-Client-IP", "Client-IP", "Access-Control-Allow-Origin", "Origin",
48 | "X-Custom-IP-Authorization", "X-Forwarded-For ", "Host"
49 | ]
50 |
51 | try:
52 | website_ip = socket.gethostbyname(domain)
53 | ips_type = [website_ip, "127.0.0.1", "*", "8.8.8.8", "null", "192.168.0.2", "10.0.0.1", "0.0.0.0", "localhost", "192.168.1.1", "::1","0:0:0:0:0:0:0:1", "127.0.0.2"]
54 | except:
55 | ips_type = ["127.0.0.1", "*", "8.8.8.8", "null", "192.168.0.2", "10.0.0.1", "0.0.0.0", "localhost", "192.168.1.1", "::1","0:0:0:0:0:0:0:1", "127.0.0.2"]
56 | for h in headers_type:
57 | for ip in ips_type:
58 | headers = {h : ip}
59 | req_ip = s.get(res, verify=False, headers=headers, allow_redirects=False, timeout=10)
60 | len_req_ip = len(req_ip.content)
61 | ranges = range(len_req_ip - 50, len_req_ip + 50) if len_req_ip < 100000 else range(len_req_ip - 1000, len_req_ip + 1000)
62 | if req_ip.status_code not in [403, 401, 404, 406, 421, 429, 301, 302, 400, 408, 503, 405, 428, 412, 666, 500, 501, 410, 502, 307] and len(req_ip.content) not in ranges and len(req_ip.content) > 0:
63 | if exclude_len:
64 | if exclude_len != len_req_ip:
65 | print("{}[{}] {} Forbidden Bypass with: {}".format(BYP, req_ip.status_code, url+page, headers))
66 | else:
67 | print("{}[{}] {} Forbidden Bypass with: {}".format(BYP, req_ip.status_code, url+page, headers))
68 |
69 |
70 | def other_bypass(s, url, page, req_url, exclude_len):
71 | """
72 | other_bypass: all other known bypass
73 | """
74 | payl = [page+"/.", "/"+page+"/", "./"+page+"/./", "%2e/"+page, page+"/.;/", ".;/"+page, page+"..;", page+"/;/", page+"..%3B",
75 | page+"/%3B", page+".%3B/", page+"~", page+"/..;/", page+"%20", page+"%09", page+"%00", page+"??", page+"#", page+"/*", page+"/*/"] #http://exemple.com/+page+bypass
76 |
77 | len_req_url = len(req_url.content)
78 | ranges = range(len_req_url - 50, len_req_url + 50) if len_req_url < 100000 else range(len_req_url - 1000, len_req_url + 1000)
79 | for p in payl:
80 | url_b = url + p
81 | req_payload = s.get(url_b, verify=False, allow_redirects=False, timeout=10)
82 | #print(req_payload.status_code) #DEBUG
83 | #print("{}:{}".format(len(req_payload.content), len(req_url.content))) #DEBUG
84 | if req_payload.status_code not in [403, 401, 404, 406, 421, 429, 301, 302, 400, 408, 503, 405, 428, 412, 666, 500, 501, 410, 502, 307] and len(req_payload.content) not in ranges and len(req_payload.content) > 0:
85 | if exclude_len:
86 | if exclude_len != len(req_payload.content):
87 | print("{}[{}] Forbidden Bypass with : {} [{}b]".format(BYP, req_payload.status_code, url_b, len(req_payload.content)))
88 | else:
89 | print("{}[{}] Forbidden Bypass with : {} [{}b]".format(BYP, req_payload.status_code, url_b, len(req_payload.content)))
90 |
91 | #@timeit #Debug
92 | def bypass_forbidden(res, exclude_len=False):
93 | """
94 | Bypass_forbidden: function for try to bypass code response 403/401
95 | """
96 | s = requests.session()
97 | res_page = res.split("/")[3:]
98 | url_split = res.split("/")[:3]
99 | url = "/".join(url_split) + "/"
100 | page = "/".join(res_page) if len(res_page) > 1 else "".join(res_page)
101 | domain = "/".join(res.split("/")[:3]) + "/"
102 | req_res = s.get(res, verify=False, timeout=10)
103 | req_url = s.get(url, verify=False, timeout=10)
104 | if req_url.status_code in [403, 401]:
105 | original_url(s, res, page, url)
106 | IP_authorization(s, res, url, domain, page, exclude_len)
107 | method(res)
108 | other_bypass(s, url, page, req_url, exclude_len)
109 | elif len(req_res.content) in range(len(req_url.content) - 50, len(req_url.content) + 50):
110 | pass
111 | else:
112 | original_url(s, res, page, url)
113 | IP_authorization(s, res, url, domain, page, exclude_len)
114 | method(res)
115 | other_bypass(s, url, page, req_url, exclude_len)
116 |
117 |
118 | """if __name__ == '__main__':
119 | res = ""
120 | bypass_forbidden(res)"""
--------------------------------------------------------------------------------
/modules/during_fuzzing/check_backup.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | import requests
5 | from config import PLUS, WARNING, INFO, LESS, LINE, FORBI, BACK, EXCL, SERV_ERR, BYP, WAF, EXT_B, MINI_B, ARCH
6 | from bs4 import BeautifulSoup
7 | from modules.output import multiple_outputs
8 |
9 |
10 | def scan_backup(s, url, len_req, res, js, req_p, bp_current, exclude, backup, header_parsed, user_agent, directory, forbi, filterM, page, tw, parsing, authent, get_date):
11 |
12 |
13 | if len(backup) > 0 and backup[0] == "min":
14 | bckp = MINI_B
15 | elif len(backup) > 0 and backup[0] == "arc":
16 | bckp = ARCH
17 | else:
18 | if len(backup) == 1:
19 | for bck in backup:
20 | bckp = bck.split(",")
21 | else:
22 | bckp = EXT_B if backup == [] else [bck.replace(",","") for bck in backup]
23 |
24 | size_check = len_req
25 |
26 | other_backup = ["old_", "~", "Copy of "]
27 | for ob in other_backup:
28 | size_bckp = prefix_backup(s, url, res, req_p, bp_current, js, header_parsed, exclude, tw, user_agent, directory, forbi, get_date, filterM, parsing, ob)
29 |
30 | for exton in bckp:
31 | size_bckp = suffix_backup(s, url, res, req_p, bp_current, js, header_parsed, exclude, tw, page, exton, size_check, directory, forbi, get_date, parsing, filterM, authent)
32 |
33 |
34 | def prefix_backup(s, url, res, req_p, bp_current, js, header_parsed, exclude, tw, user_agent, directory, forbi, HOUR, filterM, parsing, ob):
35 | """
36 | prefix_backup:
37 | Like the function 'suffix_backup' but check if the type backup dir like '~articles/' exist.
38 | """
39 | mo = multiple_outputs()
40 | pars = res.split("/")
41 | hidd_tild = "{}{}{}/".format(url, ob, pars[3]) if pars[-1] == "" else "{}{}{}".format(url, ob, pars[3])
42 | if header_parsed:
43 | user_agent.update(header_parsed)
44 | req_tild = requests.get(hidd_tild, headers=user_agent, allow_redirects=False, verify=False, timeout=10)
45 | else:
46 | req_tild = requests.get(hidd_tild, headers=user_agent, allow_redirects=False, verify=False, timeout=10)
47 | status_tild = req_tild.status_code
48 | if status_tild not in [404, 403, 500, 400, 301, 302]:
49 | if exclude:
50 | filterM.exclude_type(req_p, s, req_tild, res, directory, forbi, HOUR, bp_current, parsing, len(req_tild.content))
51 | else:
52 | h_bytes_len = "[{}b]".format(len(req_tild.content))
53 | print("{} {} {:<13}{:<10}".format(HOUR, PLUS, h_bytes_len, hidd_tild))
54 | mo.raw_output(directory, hidd_tild, 200, len(req_tild.content))
55 |
56 |
57 |
58 | def suffix_backup(s, url, res, req_p, bp_current, js, header_parsed, exclude, tw, page, exton, size_check, directory, forbi, HOUR, parsing, filterM, authent):
59 | """
60 | suffix_backup:
61 | During the scan, check if a backup file or dir exist.
62 | You can modify this in "config.py"
63 | """
64 |
65 | mo = multiple_outputs()
66 |
67 | d_files = directory + "/files/" #directory to download backup file if exist
68 |
69 | res_b = res + exton
70 | page_b = page + exton
71 | #print(res_b) #DEBUG
72 | anti_sl = res_b.split("/")
73 | rep = anti_sl[3:]
74 | result = rep[-1]
75 | r_files = d_files + result
76 |
77 | if header_parsed:
78 | req_b = s.get(res_b, allow_redirects=False, verify=False, headers=header_parsed)
79 | else:
80 | req_b = s.get(res_b, allow_redirects=False, verify=False, timeout=10, auth=authent)
81 |
82 | soup = BeautifulSoup(req_b.text, "html.parser")
83 | req_b_status = req_b.status_code
84 |
85 | size_bytes = len(req_b.content)
86 | size_bytes_b = "[{}b]".format(size_bytes)
87 |
88 |
89 | if req_b_status == 200:
90 | ranges = range(size_check - 60, size_check + 60) if size_check < 10000 else range(size_check - 1000, size_check + 1000)
91 | if size_bytes == size_check or size_bytes in ranges:
92 | #if the number of bytes of the page equal to size_check variable and not bigger than size_check +5 and not smaller than size_check -5
93 | pass
94 | elif size_bytes != size_check:
95 | if js and size_bytes > 0:
96 | parsing.get_javascript(res, req_b, directory)
97 | if exclude:
98 | filterM.exclude_type(req_p, s, req_b, res_b, directory, forbi, HOUR, bp_current, parsing, size_bytes)
99 | else:
100 | print("{} {} {:<13}{:<10}".format(HOUR, BACK, size_bytes_b, res_b if tw > 120 else page_b))
101 | try:
102 | with open(r_files, 'w+') as fichier_bak:
103 | fichier_bak.write(str(soup))
104 | mo.raw_output(directory, res_b, 200, size_bytes)
105 | except:
106 | pass
107 | return size_bytes
108 | elif req_b_status in [404, 406, 429, 503, 502, 500, 400]:
109 | pass
110 | elif req_b_status in [301, 302, 303, 307, 308]:
111 | """redirect_link = ""
112 | for rh in req_b.headers:
113 | if "location" in rh or "Location" in rh:
114 | loc = req_b.headers[rh]
115 | redirect_link = loc if "http" in loc else "{}{}".format(url, loc)
116 | req_loc = s.get(redirect_link, verify=False, allow_redirects=False)
117 | if "/".join(res.split("/")[1:]) == "/".join(loc.split("/")[1:-1]) and len(req_loc.content) != index_len and not "." in loc:
118 | print(" \033[33m[<>]\033[0m {} redirect to \033[33m{}\033[0m [\033[33mPotential Hidden Directory\033[0m]".format(res, loc))
119 | else:
120 | req_loc = s.get(res, verify=False, allow_redirects=True)
121 | redirect_link = req_loc.url
122 | print("{} {} {} → {}".format(HOUR, LESS, res_b if tw > 120 else page_b, redirect_link))"""
123 | pass
124 | elif req_b_status in [403, 401]:
125 | ranges = range(size_check - 50, size_check + 50) if size_check < 10000 else range(size_check - 1000, size_check + 1000)
126 | if size_bytes == size_check or size_bytes in ranges:
127 | #if the number of bytes of the page equal to size_check variable and not bigger than size_check +5 and not smaller than size_check -5
128 | pass
129 | else:
130 | if exclude:
131 | filterM.exclude_type(req_p, s, req_b, res_b, directory, forbi, HOUR, bp_current, parsing, size_bytes)
132 | else:
133 | print("{} {} [{}] {}".format(HOUR, FORBI, size_bytes, res_b))
134 | #bypass_forbidden(res_b)
135 | mo.raw_output(directory, res_b, req_b_status, size_bytes)
136 | #pass
137 | else:
138 | if exclude:
139 | filterM.exclude_type(req_p, s, req_b, res_b, directory, forbi, HOUR, bp_current, parsing, size_bytes)
140 | else:
141 | print("{}{} {}".format(HOUR, res_b if tw > 120 else page_b, req_b.status_code))
142 |
143 |
144 | def vim_backup(s, url, res, user_agent, exclude):
145 | """
146 | vim_backup: Testing backup vim like ".plop.swp"
147 | """
148 | if "." in res:
149 | pars = res.split("/")
150 | vb = ".{}.swp".format(pars[-1])
151 | vim_b = "{}{}/".format(url, vb) if pars[-1] == "" else "{}{}".format(url, vb)
152 | req_vb = s.get(vim_b, headers=user_agent, allow_redirects=False, verify=False, timeout=10)
153 | if req_vb.status_code not in [404, 403, 401, 500, 406] and len(req_vb.content) != len(req_vb.content):
154 | if exclude:
155 | if exclude != len(req_vb.text) and len(req_vb.text) != 0:
156 | print("{} {} [{}b] Potential backup vim found {:<15}".format(get_date, PLUS, len(req_vb.text), vim_b))
157 | else:
158 | if len(req_vb.text) != 0:
159 | print("{} {} [{}b] Potential backup vim found {:<15}".format(get_date, PLUS, len(req_vb.text), vim_b))
--------------------------------------------------------------------------------
/modules/during_fuzzing/parsing_html.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | from bs4 import BeautifulSoup
4 | import warnings
5 | import requests
6 | import csv
7 | import sys, re, os
8 | from config import S3, JS, WARNING
9 | import traceback
10 | from bs4 import MarkupResemblesLocatorWarning
11 |
12 |
13 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
14 | warnings.filterwarnings('ignore', category=MarkupResemblesLocatorWarning)
15 |
16 | class parsing_html:
17 | """
18 | Parsing_html: class with all function who parse html
19 | """
20 | def get_links(self, req, directory):
21 | """
22 | Get_links: get all links on webpage during the scan
23 | """
24 | #print("{}:{}".format(req, req.url)) #DEBUG
25 | soup = BeautifulSoup(req.text, "html.parser")
26 | search = soup.find_all('a')
27 | if search:
28 | for s in search:
29 | link = s.get("href")
30 | try:
31 | if re.match(r'http(s)', link):
32 | with open(directory + "/links.txt", "a+") as links:
33 | links.write(str(link+"\n"))
34 | else:
35 | pass
36 | except:
37 | pass
38 |
39 |
40 | def html_recon(self, res, req, directory):
41 | """
42 | Check if S3 buckets and path disclosure are in html page
43 | """
44 | path_disclosure = ["file://", "tmp/", "var/www", "/usr/", "var/lib", "srv/www", "srv/data", "var/opt", "file:///", "var/run"]
45 | #archive extentions
46 | archives = ['db', 'swp', 'yml', 'xsd', 'wml', 'bkp', 'rar', 'zip', '7z', 'bak', 'bac', 'NEW',
47 | 'old', 'bkf', 'bok', 'cgi', 'dat', 'ini', 'log', 'key', 'conf', 'env', '_bak', '_old', 'json', 'lock',
48 | 'save', 'atom', 'action', '_backup', 'backup', 'config', '/authorize/', 'md', 'gz', 'txt', '%01',
49 | '(1)', 'sql.gz', 'tgz', 'tar.gz', 'gzip', 'war', 'jar', 'cab']
50 |
51 | s3_keyword = ["S3://", "s3-", "amazonaws", "aws.", "userPoolId"]
52 |
53 | for s3_f in s3_keyword:
54 | reqtext = req.text.split(" ")
55 | for req_key in reqtext:
56 | req_value = req_key.split('"')
57 | for rv in req_value:
58 | if s3_f in rv: #TODO → and "dmoain" in rv
59 | if not os.path.exists(directory + "/s3_links.txt"):
60 | with open(directory + "/s3_links.txt", "a+") as s3_links:
61 | s3_links.write(str(rv+"\n"))
62 | else:
63 | with open(directory + "/s3_links.txt", "r+") as read_links:
64 | if any(rl.strip() == rv.strip() for rl in read_links.readlines()):
65 | pass
66 | else:
67 | try:
68 | req_s3 = requests.get(rv, verify=False, headers={'User-agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; LCJB; rv:11.0) like Gecko'})
69 | if req_s3.status_code == 200:
70 | print("{}[200] Potentialy s3 buckets found: {} in {}".format(S3, rv, res))
71 | read_links.write(rv + "\n")
72 | except:
73 | #print("{} Error with the URL {}".format(S3, rv))
74 | pass
75 | #traceback.print_exc()
76 | for pad in path_disclosure:
77 | #regex
78 | m = re.search(r"{}[a-zA-z/]+".format(pad), req.text)
79 | if m:
80 | print(" {}Possible path disclosure \033[34m{}\033[0m in {}".format(WARNING, m.group(0), res))
81 | for arc in archives:
82 | #regex
83 | a = re.search(r"\.{}$".format(arc), req.text)
84 | if a:
85 | print(" {}Possible archives \033[34m{}\033[0m in {}".format(WARNING, a.group(0), res))
86 |
87 |
88 |
89 | def sitemap(self, req, directory):
90 | """Get sitemap.xml of website"""
91 | soup = BeautifulSoup(req.text, "html.parser")
92 | with open(directory + '/sitemap.xml', 'w+') as file:
93 | file.write(str(soup).replace(' ','\n'))
94 |
95 |
96 | def get_javascript(self, url, req, directory):
97 | """search potentialy sensitive keyword in javascript"""
98 | REGEX_ = {
99 | "AMAZON_URL_1":r"[a-z0-9.-]+\.s3-[a-z0-9-]\\.amazonaws\.com",
100 | "AMAZON_URL_2":r"[a-z0-9.-]+\.s3-website[.-](eu|ap|us|ca|sa|cn)",
101 | "AMAZON_URL_3":r"s3\\.amazonaws\.com/[a-z0-9._-]+",
102 | "AMAZON_URL_4":r"s3-[a-z0-9-]+\.amazonaws\\.com/[a-z0-9._-]+",
103 | "AMAZON_KEY":r"([^A-Z0-9]|^)(AKIA|A3T|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{12,}",
104 | "Authorization":r"^Bearer\s[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$",
105 | "accessToken":r"^acesstoken=[0-9]{13,17}",
106 | "vtex-key":r"vtex-api-(appkey|apptoken)",
107 | "google_api":r"AIza[0-9A-Za-z-_]{35}",
108 | "firebase":r"AAAA[A-Za-z0-9_-]{7}:[A-Za-z0-9_-]{140}",
109 | "paypal_braintree_access_token":r"access_token\$production\$[0-9a-z]{16}\$[0-9a-f]{32}",
110 | "github_access_token":r"[a-zA-Z0-9_-]*:[a-zA-Z0-9_\-]+@github\.com*",
111 | "json_web_token":r"ey[A-Za-z0-9-_=]+\.[A-Za-z0-9-_=]+\.?[A-Za-z0-9-_.+/=]*$",
112 | "SSH_privKey":r"([-]+BEGIN [^\s]+ PRIVATE KEY[-]+[\s]*[^-]*[-]+END [^\s]+ PRIVATE KEY[-]+)",
113 | }
114 |
115 | url_index = url.split("/")[0:3] if "http" in url else url
116 | url_index = "/".join(url_index)
117 |
118 | UNINTERESTING_EXTENSIONS = ['css', 'svg', 'png', 'jpeg', 'jpg', 'mp4', 'gif']
119 |
120 | UNINTERESTING_JS_FILES = ['bootstrap', 'jquery']
121 |
122 | INTERESTING_KEY = ['_public_key', '_token', '_password', '_database', 'secret_key', '_secret', 'api_key', 'apisettings',
123 | 'sourcemappingurl', 'private_key', 'jwt_secret', 'api_secret_key', 'access_key', 'access_token', 'admin_pass', 'admin_user',
124 | 'algolia_admin_key', 'algolia_api_key', 'alias_pass', 'alicloud_access_key', 'amazon_secret_access_key', 'amazonaws',
125 | 'ansible_vault_password', 'aos_key', 'api_key_secret', 'api_key_sid', 'api_secret', 'api.googlemaps AIza', 'apidocs', 'apikey',
126 | 'apiSecret', 'app_debug', 'app_id', 'app_key', 'app_log_level', 'app_secret', 'appkey', 'appkeysecret', 'application_key',
127 | 'appsecret', 'appspot', 'auth_token', 'authorizationToken', 'authsecret', 'aws_access', 'aws_access_key_id', 'aws_bucket', 'aws_key',
128 | 'aws_secret', 'aws_secret_key', 'aws_token', 'AWSSecretKey', 'b2_app_key', 'bashrc password', 'bintray_apikey', 'bintray_gpg_password',
129 | 'bintray_key', 'bintraykey', 'bluemix_api_key', 'bluemix_pass', 'browserstack_access_key', 'bucket_password', 'bucketeer_aws_access_key_id',
130 | 'bucketeer_aws_secret_access_key', 'built_branch_deploy_key', 'bx_password', 'cache_driver', 'cache_s3_secret_key', 'cattle_access_key',
131 | 'cattle_secret_key', 'certificate_password', 'ci_deploy_password', 'client_secret', 'client_zpk_secret_key', 'clojars_password', 'cloud_api_key',
132 | 'cloud_watch_aws_access_key', 'cloudant_password', 'cloudflare_api_key', 'cloudflare_auth_key', 'cloudinary_api_secret', 'cloudinary_name', 'codecov_token',
133 | 'conn.login', 'connectionstring', 'consumer_key', 'consumer_secret', 'cypress_record_key', 'database_password', 'database_schema_test',
134 | 'datadog_api_key', 'datadog_app_key', 'db_password', 'db_server', 'db_username', 'dbpasswd', 'dbpassword', 'dbuser', 'deploy_password', 'digitalocean_ssh_key_body',
135 | 'digitalocean_ssh_key_ids', 'docker_hub_password', 'docker_key', 'docker_pass', 'docker_passwd', 'docker_password', 'dockerhub_password', 'dockerhubpassword',
136 | 'dot-files', 'dotfiles', 'droplet_travis_password', 'dynamoaccesskeyid', 'dynamosecretaccesskey', 'elastica_host', 'elastica_port', 'elasticsearch_password',
137 | 'encryption_key', 'encryption_password', 'env.heroku_api_key', 'env.sonatype_password', 'eureka.awssecretkey', 'apex', 'firebase', 'xoxp', 'hapikey', 'client_credentials',
138 | 'amplitude', 'getKey', 'appcenter', 'TrackerToken', 'conversationspasskey', 'Passkey', 'accesstoken', 'verifycustomtoken', 'signInWithCustomToken', 'x-pendo-integration-key',
139 | 'sendgrid_token', 'x-api-token', 'x-api-key', 'branch_secret', 'Idempotency-Key', 'cognito_config', 'cognito']
140 |
141 | SOCKET_END = ["socket.io", "socketio", "socket", "websocket", "app.module.ts", "ws://", "wss://", "xmpp-websocket"]
142 |
143 | text = req.content
144 | url = req.url
145 | regex = r'''((https?:)?[/]{1,2}[^'\"> ]{5,})|(\.(get|post|ajax|load)\s*\(\s*['\"](https?:)?[/]{1,2}[^'\"> ]{5,})'''
146 | if ".js" in url:
147 | for keyword_match in INTERESTING_KEY:
148 | if keyword_match.lower() in text.decode('utf-8', errors="ignore").lower():
149 | try:
150 | with open("{}/js.txt".format(directory), 'w+') as js_write:
151 | js_link = open("{}/js.txt".format(directory), 'r')
152 | if "{}::{}".format(url, keyword_match) not in js_link.read():
153 | print("{}Potential keyword found \033[33m[{}] \033[0min {}".format(JS, keyword_match, url))
154 | js_write.write("{}::{}\n".format(url, keyword_match))
155 | js_link.close()
156 | except:
157 | traceback.print_exc()
158 | for socketio_ in SOCKET_END:
159 | if socketio_ in text.decode('utf-8', errors="ignore"):
160 | print("{}Potential socketio endpoint found \033[33m[{}] \033[0min {}".format(JS, socketio_, url))
161 | else:
162 | matches = re.findall(regex, text.decode('utf-8', errors="ignore"))
163 | for match in matches:
164 | #print(match[0]) #DEBUG
165 | if not any('{}'.format(ext) in match[0] for ext in UNINTERESTING_EXTENSIONS) and url_index in match[0] and ".js" in match[0]:
166 | req_js = requests.get(match[0], verify=False, headers={'User-agent': 'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; LCJB; rv:11.0) like Gecko'})
167 | #print(match[0]) #DEBUG
168 | for keyword_match in INTERESTING_KEY:
169 | if keyword_match.lower() in req_js.text.lower():
170 | #print("{}Potentialy keyword found \033[33m[{}] \033[0min {}".format(JS, keyword_match, match[0]))
171 | try:
172 | with open("{}/js.txt".format(directory), 'a+') as js_write:
173 | js_link = open("{}/js.txt".format(directory), 'r')
174 | #print(js_link.read())
175 | if "{}::{}".format(match[0], keyword_match) not in js_link.read():
176 | print("{}Potential keyword found \033[33m[{}] \033[0min {}".format(JS, keyword_match, match[0]))
177 | js_write.write("{}::{}\n".format(match[0], keyword_match))
178 | js_link.close()
179 | except:
180 | traceback.print_exc()
181 |
182 | for k, v in REGEX_.items():
183 | values_found = re.findall(v, text.decode('utf-8', errors="ignore"))
184 | if values_found:
185 | for v in values_found:
186 | try:
187 | with open("{}/js.txt".format(directory), 'a+') as js_write:
188 | js_link = open("{}/js.txt".format(directory), 'r')
189 | if "{}::{}::{}".format(k, url, v) not in js_link.read():
190 | print("{}Keyword found \033[33m[{}] \033[0min {} with value \033[32m[{}] \033[0".format(JS, k, url, v))
191 | js_write.write("{}::{}::{}\n".format(url, k, v))
192 | js_link.close()
193 | except:
194 | traceback.print_exc()
195 |
196 |
197 |
198 | """if __name__ == '__main__':
199 | ph = parsing_html()
200 | url = "https://www..fr/"
201 | req = requests.get(url)
202 | ph.get_javascript(url, req)""" #DEBUG
--------------------------------------------------------------------------------
/modules/manage_dir.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | import os
5 | from config import PLUS, WARNING, INFO, LESS, LINE, FORBI, BACK
6 |
7 | class manage_dir:
8 |
9 | def check_backup(self, directory):
10 | """Check if a backup file exist from function 'Status'"""
11 | if os.path.exists(directory + "/backup.txt"):
12 | bp = input("A backup file exists, do you want to Continue with it or Restart ? [c:r]: ")
13 | if bp == 'C' or bp == 'c':
14 | print("restart from last save of backup.txt ...")
15 | print(LINE)
16 | return True
17 | else:
18 | try:
19 | os.remove(directory+'/output/raw.txt')
20 | except:
21 | pass
22 | print(LINE)
23 | return False
24 | else:
25 | pass
--------------------------------------------------------------------------------
/modules/output.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | import os
5 | import json
6 | import csv
7 |
8 | class multiple_outputs:
9 | """
10 | multiple_outputs:
11 | To export in multiple file format.
12 | Available: txt, json, csv
13 | TODO: other format
14 | """
15 |
16 | def raw_output(self, directory, res, stats, size_res):
17 | if not os.path.exists(directory+"/output/"):
18 | os.makedirs(directory+"/output/")
19 | with open(directory+"/output/raw.txt", "a+") as raw:
20 | raw.write("url, {}, {}, {}b\n".format(res, stats, size_res))
21 |
22 | def json_output(self, directory, res, stats, size_res):
23 | if not os.path.exists(directory+"/output/"):
24 | os.makedirs(directory+"/output/")
25 | data = {
26 | 'url': '{}'.format(res),
27 | 'response_status': '{}'.format(stats),
28 | 'size_bytes': '{}'.format(size_res)
29 | }
30 | with open(directory+"/output/json_output.txt", "a+") as raw_json:
31 | json.dump(data, raw_json)
32 |
33 | def csv_output(self, directory, res, stats, size_res):
34 | data = ['{}'.format(res), '{}'.format(stats), '{}'.format(size_res)]
35 |
36 | with open(directory+"/output/csv_ouput.csv", 'a+') as f:
37 | writer = csv.writer(f)
38 | writer.writerow(data)
--------------------------------------------------------------------------------
/modules/proxy/check_proxy.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | import random
4 | import sys
5 | import traceback
6 |
7 | try:
8 | from Queue import Queue
9 | except:
10 | import queue as Queue
11 | import threading
12 | from threading import Thread
13 | try:
14 | enclosure_queue = Queue()
15 | except:
16 | enclosure_queue = Queue.Queue()
17 |
18 |
19 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
20 |
21 |
22 | def proxy(i, q, n, url):
23 | session = requests.session()
24 | for l in range(n):
25 | proxie = q.get()
26 | try:
27 | proxies = {
28 | 'http': proxie.rstrip(),
29 | 'https': proxie.rstrip()
30 | }
31 | req = session.get(url, verify=False, timeout=10, proxies=proxies)
32 | list_ips.append(proxie.rstrip())
33 | except:
34 | #traceback.print_exc()
35 | proxies = {
36 | 'https': '{}'.format(proxie.rstrip())
37 | }
38 | try:
39 | req = session.get(url, verify=False, timeout=10, proxies=proxies)
40 | list_ips.append(proxie.rstrip())
41 | except:
42 | #traceback.print_exc()
43 | pass
44 | q.task_done()
45 |
46 | def check_proxy(proxy_list):
47 | global list_ips
48 | list_ips = []
49 |
50 | n = 0
51 |
52 | url = "https://httpbin.org/ip"
53 |
54 | with open(proxy_list, "r") as datas:
55 | for data in datas:
56 | n += 1
57 | print(" Proxy IPs checking, please wait...")
58 | try:
59 | with open(proxy_list, "r") as datas:
60 | for d in datas:
61 | enclosure_queue.put(d.rstrip())
62 | for i in range(10):
63 | worker = Thread(target=proxy, args=(i, enclosure_queue, n, url))
64 | worker.setDaemon(True)
65 | worker.start()
66 | enclosure_queue.join()
67 | except KeyboardInterrupt:
68 | print(" Canceled by keyboard interrupt (Ctrl-C)")
69 | sys.exit()
70 | except Exception:
71 | traceback.print_exc()
72 | print(list_ips)
73 | return(list_ips)
--------------------------------------------------------------------------------
/modules/proxy/test_proxies.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import json
3 | import random
4 | import sys
5 | import traceback
6 |
7 | try:
8 | from Queue import Queue
9 | except:
10 | import queue as Queue
11 | import threading
12 | from threading import Thread
13 | try:
14 | enclosure_queue = Queue()
15 | except:
16 | enclosure_queue = Queue.Queue()
17 |
18 |
19 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
20 |
21 |
22 | def proxy(i, q, n, url):
23 | session = requests.session()
24 | for l in range(n):
25 | proxie = q.get()
26 | try:
27 | proxies = {
28 | 'http': proxie.rstrip(),
29 | 'https': proxie.rstrip()
30 | }
31 | print(proxies)
32 | req = session.get(url, verify=False, timeout=15, proxies=proxies)
33 | print(req)
34 | list_ips.append(d.rstrip())
35 | print("ok")
36 | except:
37 | proxies = {
38 | 'https': '{}'.format(proxie.rstrip())
39 | }
40 | try:
41 | req = session.get(url, verify=False, timeout=15, proxies=proxies)
42 | list_ips.append(d.rstrip())
43 | print(req)
44 | print("ok2")
45 | except:
46 | pass
47 | q.task_done()
48 | sys.stdout.write(" {}/{}\r".format(l, n))
49 |
50 | if __name__ == '__main__':
51 | n = 0
52 |
53 | global list_ips
54 | list_ips = []
55 |
56 | url = "https://httpbin.org/ip"
57 |
58 | with open("../../BB-TOOLS/Proxy List-1.txt", "r") as datas:
59 | for data in datas:
60 | n += 1
61 | print(" Proxy IPs checking, please wait...")
62 | try:
63 | with open("../../BB-TOOLS/Proxy List-1.txt", "r") as datas:
64 | for d in datas:
65 | enclosure_queue.put(d.rstrip())
66 | for i in range(10):
67 | worker = Thread(target=proxy, args=(i, enclosure_queue, n, url))
68 | worker.setDaemon(True)
69 | worker.start()
70 | enclosure_queue.join()
71 | except KeyboardInterrupt:
72 | print(" Canceled by keyboard interrupt (Ctrl-C)")
73 | sys.exit()
74 | except Exception:
75 | traceback.print_exc()
76 | print(list_ips)
--------------------------------------------------------------------------------
/modules/resume.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | def resume_options(url, thread, wordlist, recur, js, exclude, proxy, header, backup):
4 | """
5 | resume_scan: Just a little resume of the options for the scan
6 | """
7 | recur_ = "\033[32mTrue\033[0m" if recur else "\033[31mFalse\033[0m"
8 | js_ = "\033[32mTrue\033[0m" if js else "\033[31mFalse\033[0m"
9 | backup_ = ' '.join(["\033[32m{}\033[0m".format(b) for b in backup]) if backup else "\033[31mFalse\033[0m"
10 | exclude_ = ' '.join(["\033[32m{}\033[0m".format(e) for e in exclude]) if exclude else "\033[31mFalse\033[0m"
11 | header_ = "\033[32m{} + Random user agent\033[0m".format(header) if header else "\033[36mRandom user agent\033[0m"
12 | print("""
13 | \033[34m Target: \033[0m \033[32m{}\033[0m
14 | \033[34m Header: \033[0m {}
15 | \033[34m Threads: \033[0m \033[32m{}\033[0m
16 | \033[34m Wordlist: \033[0m \033[32m{}\033[0m
17 | \033[34m Recursive: \033[0m {}
18 | \033[34m Javascript Check: \033[0m {}
19 | \033[34m Backup extension: \033[0m {}
20 | \033[34m Exclude option: \033[0m {}
21 | \033[34m Proxy: \033[0m {}
22 | \033[31m___________________________________________________________________\033[0m
23 | """.format(url, header_, thread, wordlist, recur_, js_, backup_, exclude_, "\033[32mTrue\033[0m" if proxy else "\033[31mFalse\033[0m"))
--------------------------------------------------------------------------------
/modules/terminal_size.py:
--------------------------------------------------------------------------------
1 | import fcntl, termios, struct
2 | import sys
3 |
4 | def terminal_size():
5 | #to define size of terminal used
6 | if sys.stdout.isatty():
7 | th, tw, hp, wp = struct.unpack('HHHH',
8 | fcntl.ioctl(0, termios.TIOCGWINSZ,
9 | struct.pack('HHHH', 0, 0, 0, 0)))
10 | return tw, th
11 | else:
12 | return 0, 0
--------------------------------------------------------------------------------
/modules/waf/bypass_waf.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import socket
3 | import traceback
4 | from modules.waf.detect_waf import verify_waf
5 | from config import PLUS, WARNING, INFO, WAF
6 |
7 | requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
8 |
9 | def bypass_by_user_agent(req, res):
10 | #TODO
11 | user_agent_list = {
12 | "Googlebot": ""
13 | }
14 |
15 |
16 | def bypass_waf(req, res):
17 | """
18 | Bypass_waf: try if the waf can be bypass, using different payloads
19 | """
20 | win = False
21 | domain = res.split("/")[2]
22 | website_ip = socket.gethostbyname(domain) # take ip website
23 | header_base = [
24 | "X-Originating-IP", "X-Forwarded","Forwarded","Forwarded-For","Forwarded-For-IP","X-Forwarder-For","X-Forwarded-For","X-Forwarded-For-Original",
25 | "X-Forwarded-By","X-Forwarded-Host","X-Remote-IP","X-Remote-Addr","X-Client-IP","Client-IP","Cookie","Access-Control-Allow-Origin","Origin",
26 | "Timing-Allow-Origin","X-Forwarded-For "
27 | ]
28 | options = [website_ip, domain, "127.0.0.1", "127.0.0.2", "*", "8.8.8.8", "null", "192.168.0.2", "10.0.0.1", "localhost", "0.0.0.0","::1","0:0:0:0:0:0:0:1"]
29 | for hb in header_base:
30 | for o in options:
31 | headers = {
32 | hb : o
33 | }
34 | try:
35 | display = False
36 | vrfy = verify_waf(req, res, headers, display)
37 | #print(vrfy)
38 | if vrfy == False:
39 | #win = True
40 | for h in headers:
41 | print("{}Potential bypass WAF rate limit with option:\033[36m -H \"{}:{}\" \033[0m".format(WAF, h, headers[h]))
42 | return headers
43 | except Exception:
44 | pass
45 | #traceback.print_exc()
46 | if not win:
47 | try:
48 | headers = {
49 | "Clear-Site-Data":"*"
50 | }
51 | display = False
52 | vrfy = verify_waf(req, res, headers, display)
53 | if vrfy == False:
54 | #win = True
55 | for h in headers:
56 | print("{}Potential bypass WAF rate limit with option:\033[36m -H \"{}:{}\" \033[0m".format(WAF, h, headers[h]))
57 | return headers
58 | """else:
59 | bypass_by_user_agent(req, res)"""
60 | except:
61 | pass
62 | #traceback.print_exc()
63 | return win
64 |
65 | """if __name__ == '__main__':
66 | req = "plop"
67 | user_agent = False
68 | res = ""
69 | bypass_waf(req, res)""" #DEBUG
--------------------------------------------------------------------------------
/report/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/report/__init__.py
--------------------------------------------------------------------------------
/report/creat_report.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | import csv
3 | import traceback
4 |
5 | def create_report(directory, cookie_):
6 | """
7 | Create_report: make a html report with url, waf, email...
8 | """
9 | urls = ""
10 | waf = ""
11 | mails = ""
12 | nowdate = datetime.now()
13 | nowdate = "{}-{}-{}".format(nowdate.day, nowdate.month, nowdate.year)
14 | #directory = "../" + directory#DEBUG
15 | if cookie_:
16 | auth_stat = "Authenticated"
17 | else:
18 | auth_stat = "No Authenticated"
19 | with open("report/report_"+ directory.split("/")[-1] + ".html", "a+") as test:
20 | with open(directory + "/output/raw.txt", "r") as scan:
21 | for s in scan.read().splitlines():
22 | s = s.split(', ')
23 | if s[2] in ["301","302"]:
24 | urls += """
25 |
26 | {} |
27 | {} |
28 | {} |
29 | {} |
30 |
31 | """.format(nowdate, s[1], s[1], s[2], s[3])
32 |
33 | elif s[2] in ["401","403"]:
34 | urls += """
35 |
36 | {} |
37 | {} |
38 | {} |
39 | {} |
40 |
41 | """.format(nowdate, s[1], s[1], s[2], s[3])
42 | elif s[2] in ["400", "500"]:
43 | urls += """
44 |
45 | {} |
46 | {} |
47 | {} |
48 | {} |
49 |
50 | """.format(nowdate, s[1], s[1], s[2], s[3])
51 | else:
52 | urls += """
53 |
54 | {} |
55 | {} |
56 | {} |
57 | {} |
58 |
59 | """.format(nowdate, s[1], s[1], s[2], s[3])
60 | try:
61 | with open(directory + "/waf.txt", "r") as waff:
62 | waf_res = ""
63 | for w in waff.read().splitlines():
64 | if "The site" in w:
65 | waf_res = w
66 | if waf_res:
67 | waf += """
68 | {}
69 | """.format(waf_res)
70 | except:
71 | waf += """
72 | This site dosn't seem to use a WAF
73 | """
74 | try:
75 | with open(directory + "/google_dorks.txt", "r") as google_dork:
76 | reader = csv.reader(csvFile)
77 | for gd in google_dork:
78 | gd_link += """
79 |
80 | {} |
81 |
82 | """.format(l, l)
83 | except:
84 | gd_link = " No google dork result found |
"
85 | try:
86 | link = ""
87 | with open(directory + "/links.txt", "r") as links:
88 | for l in links.read().splitlines():
89 | link += """
90 |
91 | {} |
92 |
93 | """.format(l, l)
94 | except:
95 | link = " No links found |
"
96 | try:
97 | wayback = ""
98 | with open(directory + "/wayback.txt", "r") as waybacks:
99 | for wb in waybacks.read().splitlines():
100 | w = wb.split(" ")
101 | w_status = w[1]
102 | wayback += """
103 |
104 | {} |
105 | {} |
106 |
107 | """.format(w[0], w[0], w_status)
108 | except:
109 | pass
110 | try:
111 | with open(directory + "/cms.txt","r") as cmsFile:
112 | cms = ""
113 | for cms_read in cmsFile.read().splitlines():
114 | cms += """
115 | {}
116 | """.format(cms_read)
117 | except:
118 | cms = " This site dosn't seem to use a CMS "
119 | test.write('''
120 |
121 |
122 |
123 |
124 | Hawkscan Report
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
138 |
139 |
140 |
141 |
142 |
143 |
WAF
144 | {}
145 |
146 |
147 |
CMS
148 | {}
149 |
150 |
151 |
Status
152 | {}
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 | -
173 |
URLs
174 |
175 |
176 |
183 |
184 |
185 | Date |
186 | Url |
187 | Status |
188 | Bytes |
189 | {}
190 |
191 |
192 |
193 |
194 | -
195 |
MAIL
196 |
197 |
198 |
199 | Google Dork |
200 | {}
201 |
202 |
203 |
204 |
205 | -
206 |
Links
207 |
208 |
209 |
210 | {}
211 |
212 |
213 |
214 |
215 | -
216 |
Wayback
217 |
218 |
219 |
220 | {}
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
241 |
242 |
243 |
244 |
245 |
246 | '''.format(waf, cms, auth_stat, urls, gd_link, link, wayback))
247 |
248 | """if __name__ == '__main__':
249 | directory = "/sites/"
250 | cookie_ = None
251 | create_report(directory, cookie_)"""#DEBUG
--------------------------------------------------------------------------------
/report/html/fonts/charte.css:
--------------------------------------------------------------------------------
1 | /* ------
2 | SOMMAIRE
3 | ---------*/
4 | /*
5 |
6 | 00 - COULEURS
7 | 01 - TITRES
8 | 02 - LIENS
9 | 03 - BOUTONS
10 | 04 - PADDING ET LARGEURS
11 | */
12 | body{
13 | margin: 0px;
14 | }
15 | .container{
16 | margin: 0 auto;
17 | max-width: 95%;
18 | width: 1600px;
19 | }
20 | .containerplus{
21 | margin: 0 auto;
22 | max-width: 95%;
23 | width: 1450px;
24 | }
25 | .centerText{
26 | text-align: center;
27 | }
28 |
29 | @font-face{
30 | font-family: 'pr';/*poppins regular*/
31 | src: url('fonts/poppins/Poppins-Regular.woff2') format('woff2'),
32 | url('fonts/poppins/Poppins-Regular.woff') format('woff');
33 | font-weight: normal;
34 | font-style: normal;
35 | }
36 | @font-face{
37 | font-family: 'psb';/*poppins semi-bold*/
38 | src: url('fonts/poppins/Poppins-SemiBold.woff2') format('woff2'),
39 | url('fonts/poppins/Poppins-SemiBold.woff') format('woff');
40 | font-weight: normal;
41 | font-style: normal;
42 | }
43 | @font-face{
44 | font-family: 'pb';/*poppins bold*/
45 | src: url('fonts/poppins/Poppins-Bold.woff2') format('woff2'),
46 | url('fonts/poppins/Poppins-Bold.woff') format('woff');
47 | font-weight: normal;
48 | font-style: normal;
49 | }
50 |
51 |
52 |
53 |
54 | /* ----------
55 | 00 - COULEURS
56 | -------------*/
57 | :root{
58 | --color00: #fff; /*Blanc*/
59 | --color01: #000; /*Noir*/
60 | --color02: #00dffc; /*Bleu clair*/
61 | --color03: #008c9e; /*Bleu moyen*/
62 | --color04: #005f6b; /*Bleu foncé*/
63 | --color05: #343838; /*Gris clair*/
64 | --color06: #272625; /*Gris foncé*/
65 | }
66 | /*
67 | Utilisation :
68 | background: var(--color1);
69 | border-bottom: 3px solid var(--color1);
70 | color: var(--color1);
71 | */
72 |
73 |
74 | /* ------
75 | 01 - TITRES
76 | ---------*/
77 | h1, h2, h3, h4, h5, h6, .like-h1, .like-h2, .like-h3, .like-h4, .like-h5, .like-h6{
78 | line-height:1;
79 | margin: 0px;
80 | color: var(--color06);
81 | }
82 | h1, .like-h1{
83 | text-transform: uppercase;
84 | font-size: 3.5rem;
85 | font-family: "pb", sans-serif;
86 | }
87 | h2, .like-h2{
88 | font-size: 2.5rem;
89 | font-family: "pm", sans-serif;
90 | }
91 | h3, .like-h3{
92 | font-size: 2rem;
93 | font-family: "psm", sans-serif;
94 | }
95 | h4, .like-h4{
96 | font-size: 2rem;
97 | }
98 | h5, .like-h5{
99 | font-size: 1.8rem;
100 | }
101 | h6, .like-h6{
102 | font-size: 1.6rem;
103 | }
104 |
105 |
106 |
107 | /* ------------
108 | 02 - LIENS
109 | --------------*/
110 | a, a:link, a:visited{
111 | color: #4d4d4d;
112 | text-decoration: none;
113 | }
114 | a:hover, a:active{
115 | color: #000;
116 | }
117 |
118 |
119 | /* ------------
120 | 03 - BOUTONS
121 | --------------*/
122 |
123 | a.bouton1, .bouton1 a,
124 | a.bouton2, .bouton2 a,
125 | a.bouton3, .bouton3 a, {
126 | font-family: "mb", sans-serif;
127 | display: inline-block;
128 | padding: 10px 20px 10px 20px;
129 | color: #fff;
130 | margin: 15px 0 0 0;
131 | border-radius: 22px;
132 | cursor: pointer;
133 | }
134 |
135 | a.bouton1, .bouton1 a {
136 | background-color: var(--color03);
137 | border: 3px solid var(--color03);
138 | transition: border 0.5s, background 0.5s, color 0.5s;
139 | }
140 | a.bouton1:hover, .bouton1 a:hover {
141 | background-color: var(--color02);
142 | color: var(--color03);
143 | border: 3px solid var(--color03);
144 | }
145 | a.bouton2, .bouton2 a {
146 | background-color: var(--color03);
147 | }
148 | a.bouton2:hover, .bouton2 a:hover {
149 |
150 | }
151 | a.bouton3, .bouton3 a {
152 | background-color: var(--color04);
153 | }
154 | a.bouton3:hover, .bouton3 a:hover {
155 |
156 | }
157 |
158 | button.bouton1, .bouton1 button,
159 | button.bouton2, .bouton2 button,
160 | button.bouton3, .bouton3 button,
161 | button.bouton4, .bouton4 button {
162 | font-family: "mb", sans-serif;
163 | display: inline-block;
164 | padding: 10px 20px 10px 20px;
165 | color: #fff;
166 | margin: 15px 0 0 0;
167 | border-radius: 22px;
168 | cursor: pointer;
169 | }
170 |
171 | button.bouton1, .bouton1 button {
172 | background-color: var(--color03);
173 | border: 3px solid var(--color03);
174 | transition: border 0.5s, background 0.5s, color 0.5s;
175 | }
176 | button.bouton1:hover, .bouton1 button:hover {
177 | background-color: var(--color02);
178 | color: var(--color03);
179 | border: 3px solid var(--color03);
180 | }
181 | button.bouton2, .bouton2 button {
182 | background-color: var(--color03);
183 | }
184 | button.bouton2:hover, .bouton2 button:hover {
185 |
186 | }
187 | button.bouton3, .bouton3 button {
188 | background-color: var(--color04);
189 | }
190 | button.bouton3:hover, .bouton3 button:hover {
191 |
192 | }
193 | button.bouton4, .bouton4 button {
194 | color: black;
195 | background-color: #c4c4c4;
196 | border: 2px solid #2A2A2A;
197 | border-radius: 0px;
198 | transition: border 0.5s, background 0.5s, color 0.5s;
199 | }
200 | button.bouton4:hover, .bouton4 button:hover {
201 | color: white;
202 | background-color: var(--color02)
203 | }
204 |
205 |
206 |
207 | /* ------------
208 | 04 - CONTENU WYSIWYG
209 | --------------*/
210 | p{
211 | margin: 0 0 10px 0;
212 | font-family: 'pr', sans-serif;
213 | color: #2A2A2A;
214 | line-height: 1.4;
215 | }
216 | strong{
217 | font-family: 'pb', sans-serif;
218 | font-weight: normal;
219 | }
220 | em{
221 | font-style:italic;
222 | }
223 | em strong, strong em{
224 | font-family: 'pb', sans-serif;
225 | font-style: italic;
226 | font-weight: normal;
227 | }
228 | cite{
229 | color:#000;
230 | font-family: Arial, sans-serif;
231 | font-size:1.1em;
232 | padding:0 3px 0 3px;
233 | }
234 | acronym{
235 | border-bottom:1px dashed #007aaa;
236 | cursor:help;
237 | }
238 | q{
239 | color:#007aaa;
240 | font-style:italic;
241 | }
242 | q:after{
243 | content: " ' Source: "attr(cite)" ";
244 | font-size:1em;
245 | }
246 |
247 | ul{
248 | color: #2A2A2A;
249 | margin: 5px 0 5px 15px;
250 | }
251 | ul li{
252 | list-style: none;
253 | padding: 0 0 10px 0px;
254 | position: relative;
255 | font-family: 'psb', sans-serif;
256 | color: #2A2A2A;
257 | }
258 | hr{
259 | border-bottom:2px ridge #666;
260 | border:0;
261 | }
262 |
263 |
264 |
265 | /* CLASS GENERIQUES */
266 |
267 | /* Width */
268 |
269 |
270 | .w100 {
271 | width: 100%
272 | }
273 | .w90 {
274 | width: 90%
275 | }
276 | .w80 {
277 | width: 80%
278 | }
279 | .w70 {
280 | width: 70%
281 | }
282 | .w60 {
283 | width: 60%
284 | }
285 | .w50 {
286 | width: 50%
287 | }
288 | .w45 {
289 | width: 45%
290 | }
291 | .w40 {
292 | width: 40%
293 | }
294 | .w30 {
295 | width: 30%
296 | }
297 | .w25 {
298 | width: 25%
299 | }
300 | .w20 {
301 | width: 20%
302 | }
303 | .w10 {
304 | width: 10%
305 | }
306 |
307 | /* Width flex avec goutiere */
308 |
309 |
310 | .w1-2 {
311 | width: calc(100% / 2.1)
312 | }
313 |
314 | .w1-3 {
315 | width: calc(100% / 3.2)
316 | }
317 |
318 | .w1-4 {
319 | width: calc(100% / 4.3)
320 | }
321 |
322 | /* Flex */
323 |
324 |
325 | .flex {
326 | display: -webkit-box;
327 | display: -ms-flexbox;
328 | display: flex
329 | }
330 | .flex-wrap {
331 | -ms-flex-wrap: wrap;
332 | flex-wrap: wrap;
333 | }
334 |
335 | .flex-jcc {
336 | -webkit-box-pack: center;
337 | -ms-flex-pack: center;
338 | justify-content: center
339 | }
340 | .flex-jce{
341 | -webkit-box-pack: flex-end;
342 | -ms-flex-pack: flex-end;
343 | justify-content: flex-end
344 | }
345 | .flex-aic {
346 | -webkit-box-align: center;
347 | -ms-flex-align: center;
348 | align-items: center
349 | }
350 |
351 | .flex-aife {
352 | -webkit-box-align: end;
353 | -ms-flex-align: end;
354 | align-items: flex-end
355 | }
356 |
357 | .flex-jsb {
358 | -webkit-box-pack: justify;
359 | -ms-flex-pack: justify;
360 | justify-content: space-between
361 | }
362 |
363 | .flex-jsa {
364 | -webkit-box-pack: justify;
365 | -ms-flex-pack: justify;
366 | justify-content: space-around
367 | }
368 |
369 | .flex-dc {
370 | -webkit-box-orient: vertical;
371 | -webkit-box-direction: normal;
372 | -ms-flex-direction: column;
373 | flex-direction: column;
374 | }
375 |
376 |
377 | /* MEDIA // Cover */
378 |
379 | /* .cover {
380 | overflow: hidden;
381 | } */
382 | .cover img{
383 | object-fit: cover;
384 | font-family: 'object-fit: cover';
385 | height: 100%;
386 | width: 100%;
387 | }
388 |
389 |
390 | /*RESPONSIVE
391 | 01 - DESKTOP */
392 | @media screen and (max-width: 1400px), (max-device-width: 1400px){
393 | }
394 | @media screen and (max-width: 1200px), (max-device-width: 1200px){
395 |
396 | }
397 | /*02 TABLETTE*/
398 | @media screen and (max-width: 1024px), (max-device-width: 1024px){
399 |
400 | }
401 | /*03 MINI-TABLETTE*/
402 | @media screen and (max-width: 767px), (max-device-width: 767px){
403 |
404 | }
--------------------------------------------------------------------------------
/report/html/fonts/page.css:
--------------------------------------------------------------------------------
1 | /* ------
2 | SOMMAIRE
3 | ---------*/
4 | /*
5 |
6 | 00 - MAIN
7 |
8 | */
9 | header{
10 | padding: 35px 0px;
11 | border-bottom: 2px solid var(--color05);
12 | }
13 | header h1{
14 | text-align: center;
15 | }
16 |
17 |
18 |
19 | main .subTitle{
20 | padding: 40px 0px;
21 | }
22 | main .subTitle span.subText{
23 | color: var(--color02);
24 | font-family: 'pr', sans-serif;
25 | }
26 | main .subLink{
27 | background-color: var(--color06);
28 | padding: 20px 0px;
29 | }
30 | main .subLink h2{
31 | color: var(--color02);
32 | text-transform: uppercase;
33 | font-size: 2rem;
34 | }
35 | ul {
36 | list-style-type: none;
37 | }
38 |
39 | input[type=radio] {
40 | position: absolute;
41 | left: -9999px;
42 | top: -9999px;
43 | }
44 |
45 | .tabs {
46 | position: relative;
47 | z-index: 999;
48 | height: 42px;
49 | white-space: nowrap;
50 | font-size: 0;
51 | display: flex;
52 | align-items: center;
53 | }
54 |
55 | .tabs label {
56 | cursor: pointer;
57 | color: var(--color02);
58 | text-transform: uppercase;
59 | font-size: 2rem;
60 | font-family: "pm", sans-serif;
61 | display: block;
62 | padding: 8px 0px;
63 | transition: color 0.5s, background 0.5s;
64 | line-height: 36px;
65 | }
66 |
67 | .tabs label:first-child {
68 | margin-left: 0;
69 | }
70 |
71 | .tabs label:hover {
72 | background: var(--color02);
73 | color: var(--color06);
74 | }
75 |
76 | input:nth-child(1):checked ~ .tabs label:nth-child(1),
77 | input:nth-child(2):checked ~ .tabs label:nth-child(2),
78 | input:nth-child(3):checked ~ .tabs label:nth-child(3),
79 | input:nth-child(4):checked ~ .tabs label:nth-child(4) {
80 | background: var(--color02);
81 | color: var(--color06);
82 | }
83 |
84 | .sections {
85 | margin-top: 40px;
86 | }
87 |
88 | .sections li {
89 | display: none;
90 | width: 100%;
91 | padding: 1em;
92 | border: solid 1px #DDD;
93 | border-radius: 0 5px 5px 5px;
94 | background-color: #FFF;
95 | box-shadow: 1px 1px 20px rgba(0,0,0,0.4);
96 | }
97 | .sections li h3{
98 | margin-bottom: 20px;
99 | position: relative;
100 | margin-left: 30px;
101 | }
102 | .sections li h3::before{
103 | content: "";
104 | position: absolute;
105 | top: 50%;
106 | right: 100%;
107 | transform: translateY(-50%);
108 | width: 25px;
109 | height: 25px;
110 | background-image: url(img/share.png);
111 | background-size: contain;
112 | background-repeat: no-repeat;
113 | margin-right: 5px;
114 | }
115 |
116 | input:nth-child(1):checked ~ .sections li:nth-child(1),
117 | input:nth-child(2):checked ~ .sections li:nth-child(2),
118 | input:nth-child(3):checked ~ .sections li:nth-child(3),
119 | input:nth-child(4):checked ~ .sections li:nth-child(4) {
120 | display: block;
121 | }
122 |
123 | table {
124 | border: 3px solid var(--color03);
125 | border-collapse: collapse;
126 | width: calc(100% / 2.1);
127 | background-color: var(--color06);
128 | }
129 | th {
130 | font-family: monospace;
131 | border: 2px solid var(--color03);
132 | padding: 15px 25px;
133 | font-family: 'mb', sans-serif;
134 | color: var(--color03);
135 | text-transform: uppercase;
136 | text-align: center;
137 | background-color: var(--color06);
138 | background-image: url(sky.jpg);
139 | }
140 | td {
141 | font-family: sans-serif;
142 | border: 2px solid var(--color03);
143 | padding: 15px 25px;
144 | text-align: center;
145 | background-color: var(--color06);
146 | color: var(--color03);
147 | }
--------------------------------------------------------------------------------
/report/html/img/share.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/report/html/img/share.png
--------------------------------------------------------------------------------
/report/html/img/share.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/report/html/scripts/scripts.js:
--------------------------------------------------------------------------------
1 | $(document).ready(function(){
2 | $("#status_code").change(function(){
3 | if($(this).val() === "plus"){
4 | $(".value200").show();
5 | $(".value403").hide();
6 | $(".value300").hide();
7 | $(".value4500").hide();
8 | }
9 | else if($(this).val() === "redirect"){
10 | $(".value300").show();
11 | $(".value200").hide();
12 | $(".value403").hide();
13 | $(".value4500").hide();
14 | }
15 | else if($(this).val() === "forbi"){
16 | $(".value403").show();
17 | $(".value200").hide();
18 | $(".value300").hide();
19 | $(".value4500").hide();
20 | }
21 | else if($(this).val() === "serv_error"){
22 | $(".value403").hide();
23 | $(".value200").hide();
24 | $(".value300").hide();
25 | $(".value4500").show();
26 | }
27 | else{
28 | $(".value200").show();
29 | $(".value403").show();
30 | }
31 | });
32 | });
33 |
34 |
35 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | requests
2 | pyopenssl
3 | queuelib
4 | fake_useragent
5 | beautifulsoup4
6 | argparse
7 | bs4
8 | dnspython
9 | wafw00f
10 | python-whois
11 | sockets
12 | python-engineio==3.14.2
13 | python-socketio[client]==4.6.0
14 | google
15 | notifypy
16 | notify-py
--------------------------------------------------------------------------------
/run_modules.py:
--------------------------------------------------------------------------------
1 | from modules.before_fuzzing.check_cms import check_cms
2 | from modules.before_fuzzing.check_socketio import check_socketio
3 | from modules.before_fuzzing.google_dorks import query_dork, query_cse
4 | from modules.waf.detect_waf import detect_wafw00f
5 |
6 | class check_modules:
7 | """
8 | check_modules: To manage all module launches
9 | """
10 |
11 | def run_all_modules(self, beforeStart, url, directory, dire, thread):
12 |
13 | ########## Native modules ##########
14 |
15 | checkCms = check_cms()
16 | checkSocketio = check_socketio()
17 |
18 | """
19 | In Progress
20 | #TODO
21 | prescan = False
22 | if prescan:
23 | dw = detect_wafw00f(url, directory, thread)
24 | pre_scan(dw, url) ## pre-check waf sensitivity
25 | """
26 |
27 | beforeStart.get_header(url, directory)
28 | beforeStart.get_dns(url, directory)
29 | beforeStart.letsdebug(url)
30 | result, v = checkCms.detect_cms(url, directory)
31 | if result:
32 | checkCms.cve_cms(result, v)
33 | dw = detect_wafw00f(url, directory, thread)
34 | beforeStart.wayback_check(dire, directory)
35 | beforeStart.gitpast(url)
36 | query_cse(url, directory)
37 | query_dork(url, directory)
38 | beforeStart.firebaseio(url)
39 | beforeStart.check_localhost(url)
40 | beforeStart.check_vhost(dire, url)
41 | beforeStart.check_backup_domain(dire, url)
42 | checkSocketio.main_socketio(url)
43 | if dw:
44 | return dw
45 |
46 | ########## Personal modules ##########
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # Author:
2 | # Nathan Faillenot (codejump - @c0dejump)
3 |
4 | import pathlib
5 | import setuptools
6 | from setuptools import setup, find_packages, Extension
7 |
8 | with open("README.md", "r", encoding="utf-8") as fh:
9 | long_description = fh.read()
10 |
11 | setup(
12 | name="HawkScan",
13 | version="2.2",
14 | author="c0dejump",
15 | author_email="codejumpgame@gmail.com",
16 | description="Security Tool for Reconnaissance and Information Gathering on a website. (python 3.x)",
17 | long_description=long_description,
18 | long_description_content_type="text/markdown",
19 | packages=find_packages(exclude=["static"]),
20 | include_package_data=True,
21 | url="https://github.com/c0dejump/HawkScan/",
22 | install_requires=[
23 | 'requests',
24 | 'pyopenssl',
25 | 'queuelib',
26 | 'fake_useragent',
27 | 'argparse',
28 | 'bs4',
29 | 'dnspython',
30 | 'wafw00f',
31 | 'python-whois',
32 | 'sockets',
33 | 'python-engineio==3.14.2',
34 | 'python-socketio[client]==4.6.0',
35 | 'google',
36 | 'notifypy'
37 | ],
38 | project_urls={
39 | "Bug Tracker": "https://github.com/c0dejump/HawkScan/issues",
40 | },
41 | classifiers=[
42 | "Programming Language :: Python :: 3",
43 | "License :: OSI Approved :: MIT License",
44 | "Operating System :: OS Independent",
45 | ],
46 | )
--------------------------------------------------------------------------------
/sites/your scan website.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/sites/your scan website.txt
--------------------------------------------------------------------------------
/static/banner.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | from datetime import *
4 |
5 | today = date.today()
6 | day_date = today.day
7 | month_date = today.month
8 |
9 | def banner():
10 | if month_date == 12 and day_date in range(1, 25):
11 | print("""
12 | \033[31m
13 |
14 | .:*~*:._.:*~*:._.:*~*:._.:*~*:._.:*~*:._.:*~*:.
15 | . * .
16 | . /.\ .
17 | . /..'\
18 | . /'.'\ (_) (_) | | / _____)
19 | . /.''.'\ _______ _____ _ _ _| | _( (____ ____ _____ ____ \033[0m \033[34m
20 | . /.'.'.\ | ___ (____ | | | | |_/ )\____ \ / ___|____ | _ \
21 | . /'.''.'.\ | | | / ___ | | | | _ ( _____) | (___/ ___ | | | |
22 | . ^^^[_]^^^ |_| |_\_____|\___/|_| \_|______/ \____)_____|_| |_|
23 | . .
24 | . .
25 | . .\033[0m
26 | .:*~*:._.:*~*:._.:*~*:._.:*~*:._.:*~*:._.:*~*:.
27 |
28 | v\033[34m2\033[0m.\033[31m3\033[0m \033[34mBy\033[0m \033[31mcodejump\033[0m
29 | ___________________________________________________________________
30 | """)
31 |
32 | elif month_date == 10 and day_date in range(10, 20):
33 | print("""
34 |
35 | \033[31m
36 | ( /( )
37 | )\()) ) ( ( ( /( )
38 | ((_)\ ( /( )\))( )\())( ( ( /( (
39 | _((_))(_)|(_)()\((_)\ )\ )\ )(_)) )\ )
40 | | || ((_)__(()((_) |(_|(_)((_|(_)_ _(_/(
41 | | __ / _` \ V V / / /(_-< _|/ _` | ' \))
42 | |_||_\__,_|\_/\_/|_\_\/__|__|\__,_|_||_|
43 |
44 | \033[0m """)
45 |
46 | else:
47 | print("""
48 | \033[31m
49 | _ _ _ ______
50 | (_) (_) | | / _____)
51 | _______ _____ _ _ _| | _( (____ ____ _____ ____
52 | | ___ (____ | | | | |_/ )\____ \ / ___|____ | _ \
53 | | | | / ___ | | | | _ ( _____) | (___/ ___ | | | |
54 | |_| |_\_____|\___/|_| \_|______/ \____)_____|_| |_|
55 |
56 | v2.6 By codejump
57 | ___________________________________________________________________\033[0m
58 | """)
--------------------------------------------------------------------------------
/static/logo_hawkscan.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/static/logo_hawkscan.jpeg
--------------------------------------------------------------------------------
/sublist/_scan_of_sublist3r.txt:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/sublist/_scan_of_sublist3r.txt
--------------------------------------------------------------------------------
/tools/Sublist3r/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 |
--------------------------------------------------------------------------------
/tools/Sublist3r/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 2, June 1991
3 |
4 | Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
5 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
6 | Everyone is permitted to copy and distribute verbatim copies
7 | of this license document, but changing it is not allowed.
8 |
9 | Preamble
10 |
11 | The licenses for most software are designed to take away your
12 | freedom to share and change it. By contrast, the GNU General Public
13 | License is intended to guarantee your freedom to share and change free
14 | software--to make sure the software is free for all its users. This
15 | General Public License applies to most of the Free Software
16 | Foundation's software and to any other program whose authors commit to
17 | using it. (Some other Free Software Foundation software is covered by
18 | the GNU Lesser General Public License instead.) You can apply it to
19 | your programs, too.
20 |
21 | When we speak of free software, we are referring to freedom, not
22 | price. Our General Public Licenses are designed to make sure that you
23 | have the freedom to distribute copies of free software (and charge for
24 | this service if you wish), that you receive source code or can get it
25 | if you want it, that you can change the software or use pieces of it
26 | in new free programs; and that you know you can do these things.
27 |
28 | To protect your rights, we need to make restrictions that forbid
29 | anyone to deny you these rights or to ask you to surrender the rights.
30 | These restrictions translate to certain responsibilities for you if you
31 | distribute copies of the software, or if you modify it.
32 |
33 | For example, if you distribute copies of such a program, whether
34 | gratis or for a fee, you must give the recipients all the rights that
35 | you have. You must make sure that they, too, receive or can get the
36 | source code. And you must show them these terms so they know their
37 | rights.
38 |
39 | We protect your rights with two steps: (1) copyright the software, and
40 | (2) offer you this license which gives you legal permission to copy,
41 | distribute and/or modify the software.
42 |
43 | Also, for each author's protection and ours, we want to make certain
44 | that everyone understands that there is no warranty for this free
45 | software. If the software is modified by someone else and passed on, we
46 | want its recipients to know that what they have is not the original, so
47 | that any problems introduced by others will not reflect on the original
48 | authors' reputations.
49 |
50 | Finally, any free program is threatened constantly by software
51 | patents. We wish to avoid the danger that redistributors of a free
52 | program will individually obtain patent licenses, in effect making the
53 | program proprietary. To prevent this, we have made it clear that any
54 | patent must be licensed for everyone's free use or not licensed at all.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | GNU GENERAL PUBLIC LICENSE
60 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
61 |
62 | 0. This License applies to any program or other work which contains
63 | a notice placed by the copyright holder saying it may be distributed
64 | under the terms of this General Public License. The "Program", below,
65 | refers to any such program or work, and a "work based on the Program"
66 | means either the Program or any derivative work under copyright law:
67 | that is to say, a work containing the Program or a portion of it,
68 | either verbatim or with modifications and/or translated into another
69 | language. (Hereinafter, translation is included without limitation in
70 | the term "modification".) Each licensee is addressed as "you".
71 |
72 | Activities other than copying, distribution and modification are not
73 | covered by this License; they are outside its scope. The act of
74 | running the Program is not restricted, and the output from the Program
75 | is covered only if its contents constitute a work based on the
76 | Program (independent of having been made by running the Program).
77 | Whether that is true depends on what the Program does.
78 |
79 | 1. You may copy and distribute verbatim copies of the Program's
80 | source code as you receive it, in any medium, provided that you
81 | conspicuously and appropriately publish on each copy an appropriate
82 | copyright notice and disclaimer of warranty; keep intact all the
83 | notices that refer to this License and to the absence of any warranty;
84 | and give any other recipients of the Program a copy of this License
85 | along with the Program.
86 |
87 | You may charge a fee for the physical act of transferring a copy, and
88 | you may at your option offer warranty protection in exchange for a fee.
89 |
90 | 2. You may modify your copy or copies of the Program or any portion
91 | of it, thus forming a work based on the Program, and copy and
92 | distribute such modifications or work under the terms of Section 1
93 | above, provided that you also meet all of these conditions:
94 |
95 | a) You must cause the modified files to carry prominent notices
96 | stating that you changed the files and the date of any change.
97 |
98 | b) You must cause any work that you distribute or publish, that in
99 | whole or in part contains or is derived from the Program or any
100 | part thereof, to be licensed as a whole at no charge to all third
101 | parties under the terms of this License.
102 |
103 | c) If the modified program normally reads commands interactively
104 | when run, you must cause it, when started running for such
105 | interactive use in the most ordinary way, to print or display an
106 | announcement including an appropriate copyright notice and a
107 | notice that there is no warranty (or else, saying that you provide
108 | a warranty) and that users may redistribute the program under
109 | these conditions, and telling the user how to view a copy of this
110 | License. (Exception: if the Program itself is interactive but
111 | does not normally print such an announcement, your work based on
112 | the Program is not required to print an announcement.)
113 |
114 | These requirements apply to the modified work as a whole. If
115 | identifiable sections of that work are not derived from the Program,
116 | and can be reasonably considered independent and separate works in
117 | themselves, then this License, and its terms, do not apply to those
118 | sections when you distribute them as separate works. But when you
119 | distribute the same sections as part of a whole which is a work based
120 | on the Program, the distribution of the whole must be on the terms of
121 | this License, whose permissions for other licensees extend to the
122 | entire whole, and thus to each and every part regardless of who wrote it.
123 |
124 | Thus, it is not the intent of this section to claim rights or contest
125 | your rights to work written entirely by you; rather, the intent is to
126 | exercise the right to control the distribution of derivative or
127 | collective works based on the Program.
128 |
129 | In addition, mere aggregation of another work not based on the Program
130 | with the Program (or with a work based on the Program) on a volume of
131 | a storage or distribution medium does not bring the other work under
132 | the scope of this License.
133 |
134 | 3. You may copy and distribute the Program (or a work based on it,
135 | under Section 2) in object code or executable form under the terms of
136 | Sections 1 and 2 above provided that you also do one of the following:
137 |
138 | a) Accompany it with the complete corresponding machine-readable
139 | source code, which must be distributed under the terms of Sections
140 | 1 and 2 above on a medium customarily used for software interchange; or,
141 |
142 | b) Accompany it with a written offer, valid for at least three
143 | years, to give any third party, for a charge no more than your
144 | cost of physically performing source distribution, a complete
145 | machine-readable copy of the corresponding source code, to be
146 | distributed under the terms of Sections 1 and 2 above on a medium
147 | customarily used for software interchange; or,
148 |
149 | c) Accompany it with the information you received as to the offer
150 | to distribute corresponding source code. (This alternative is
151 | allowed only for noncommercial distribution and only if you
152 | received the program in object code or executable form with such
153 | an offer, in accord with Subsection b above.)
154 |
155 | The source code for a work means the preferred form of the work for
156 | making modifications to it. For an executable work, complete source
157 | code means all the source code for all modules it contains, plus any
158 | associated interface definition files, plus the scripts used to
159 | control compilation and installation of the executable. However, as a
160 | special exception, the source code distributed need not include
161 | anything that is normally distributed (in either source or binary
162 | form) with the major components (compiler, kernel, and so on) of the
163 | operating system on which the executable runs, unless that component
164 | itself accompanies the executable.
165 |
166 | If distribution of executable or object code is made by offering
167 | access to copy from a designated place, then offering equivalent
168 | access to copy the source code from the same place counts as
169 | distribution of the source code, even though third parties are not
170 | compelled to copy the source along with the object code.
171 |
172 | 4. You may not copy, modify, sublicense, or distribute the Program
173 | except as expressly provided under this License. Any attempt
174 | otherwise to copy, modify, sublicense or distribute the Program is
175 | void, and will automatically terminate your rights under this License.
176 | However, parties who have received copies, or rights, from you under
177 | this License will not have their licenses terminated so long as such
178 | parties remain in full compliance.
179 |
180 | 5. You are not required to accept this License, since you have not
181 | signed it. However, nothing else grants you permission to modify or
182 | distribute the Program or its derivative works. These actions are
183 | prohibited by law if you do not accept this License. Therefore, by
184 | modifying or distributing the Program (or any work based on the
185 | Program), you indicate your acceptance of this License to do so, and
186 | all its terms and conditions for copying, distributing or modifying
187 | the Program or works based on it.
188 |
189 | 6. Each time you redistribute the Program (or any work based on the
190 | Program), the recipient automatically receives a license from the
191 | original licensor to copy, distribute or modify the Program subject to
192 | these terms and conditions. You may not impose any further
193 | restrictions on the recipients' exercise of the rights granted herein.
194 | You are not responsible for enforcing compliance by third parties to
195 | this License.
196 |
197 | 7. If, as a consequence of a court judgment or allegation of patent
198 | infringement or for any other reason (not limited to patent issues),
199 | conditions are imposed on you (whether by court order, agreement or
200 | otherwise) that contradict the conditions of this License, they do not
201 | excuse you from the conditions of this License. If you cannot
202 | distribute so as to satisfy simultaneously your obligations under this
203 | License and any other pertinent obligations, then as a consequence you
204 | may not distribute the Program at all. For example, if a patent
205 | license would not permit royalty-free redistribution of the Program by
206 | all those who receive copies directly or indirectly through you, then
207 | the only way you could satisfy both it and this License would be to
208 | refrain entirely from distribution of the Program.
209 |
210 | If any portion of this section is held invalid or unenforceable under
211 | any particular circumstance, the balance of the section is intended to
212 | apply and the section as a whole is intended to apply in other
213 | circumstances.
214 |
215 | It is not the purpose of this section to induce you to infringe any
216 | patents or other property right claims or to contest validity of any
217 | such claims; this section has the sole purpose of protecting the
218 | integrity of the free software distribution system, which is
219 | implemented by public license practices. Many people have made
220 | generous contributions to the wide range of software distributed
221 | through that system in reliance on consistent application of that
222 | system; it is up to the author/donor to decide if he or she is willing
223 | to distribute software through any other system and a licensee cannot
224 | impose that choice.
225 |
226 | This section is intended to make thoroughly clear what is believed to
227 | be a consequence of the rest of this License.
228 |
229 | 8. If the distribution and/or use of the Program is restricted in
230 | certain countries either by patents or by copyrighted interfaces, the
231 | original copyright holder who places the Program under this License
232 | may add an explicit geographical distribution limitation excluding
233 | those countries, so that distribution is permitted only in or among
234 | countries not thus excluded. In such case, this License incorporates
235 | the limitation as if written in the body of this License.
236 |
237 | 9. The Free Software Foundation may publish revised and/or new versions
238 | of the General Public License from time to time. Such new versions will
239 | be similar in spirit to the present version, but may differ in detail to
240 | address new problems or concerns.
241 |
242 | Each version is given a distinguishing version number. If the Program
243 | specifies a version number of this License which applies to it and "any
244 | later version", you have the option of following the terms and conditions
245 | either of that version or of any later version published by the Free
246 | Software Foundation. If the Program does not specify a version number of
247 | this License, you may choose any version ever published by the Free Software
248 | Foundation.
249 |
250 | 10. If you wish to incorporate parts of the Program into other free
251 | programs whose distribution conditions are different, write to the author
252 | to ask for permission. For software which is copyrighted by the Free
253 | Software Foundation, write to the Free Software Foundation; we sometimes
254 | make exceptions for this. Our decision will be guided by the two goals
255 | of preserving the free status of all derivatives of our free software and
256 | of promoting the sharing and reuse of software generally.
257 |
258 | NO WARRANTY
259 |
260 | 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
261 | FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
262 | OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
263 | PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
264 | OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
265 | MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
266 | TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
267 | PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
268 | REPAIR OR CORRECTION.
269 |
270 | 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
271 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
272 | REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
273 | INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
274 | OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
275 | TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
276 | YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
277 | PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
278 | POSSIBILITY OF SUCH DAMAGES.
279 |
280 | END OF TERMS AND CONDITIONS
281 |
282 | How to Apply These Terms to Your New Programs
283 |
284 | If you develop a new program, and you want it to be of the greatest
285 | possible use to the public, the best way to achieve this is to make it
286 | free software which everyone can redistribute and change under these terms.
287 |
288 | To do so, attach the following notices to the program. It is safest
289 | to attach them to the start of each source file to most effectively
290 | convey the exclusion of warranty; and each file should have at least
291 | the "copyright" line and a pointer to where the full notice is found.
292 |
293 | {description}
294 | Copyright (C) {year} {fullname}
295 |
296 | This program is free software; you can redistribute it and/or modify
297 | it under the terms of the GNU General Public License as published by
298 | the Free Software Foundation; either version 2 of the License, or
299 | (at your option) any later version.
300 |
301 | This program is distributed in the hope that it will be useful,
302 | but WITHOUT ANY WARRANTY; without even the implied warranty of
303 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
304 | GNU General Public License for more details.
305 |
306 | You should have received a copy of the GNU General Public License along
307 | with this program; if not, write to the Free Software Foundation, Inc.,
308 | 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
309 |
310 | Also add information on how to contact you by electronic and paper mail.
311 |
312 | If the program is interactive, make it output a short notice like this
313 | when it starts in an interactive mode:
314 |
315 | Gnomovision version 69, Copyright (C) year name of author
316 | Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
317 | This is free software, and you are welcome to redistribute it
318 | under certain conditions; type `show c' for details.
319 |
320 | The hypothetical commands `show w' and `show c' should show the appropriate
321 | parts of the General Public License. Of course, the commands you use may
322 | be called something other than `show w' and `show c'; they could even be
323 | mouse-clicks or menu items--whatever suits your program.
324 |
325 | You should also get your employer (if you work as a programmer) or your
326 | school, if any, to sign a "copyright disclaimer" for the program, if
327 | necessary. Here is a sample; alter the names:
328 |
329 | Yoyodyne, Inc., hereby disclaims all copyright interest in the program
330 | `Gnomovision' (which makes passes at compilers) written by James Hacker.
331 |
332 | {signature of Ty Coon}, 1 April 1989
333 | Ty Coon, President of Vice
334 |
335 | This General Public License does not permit incorporating your program into
336 | proprietary programs. If your program is a subroutine library, you may
337 | consider it more useful to permit linking proprietary applications with the
338 | library. If this is what you want to do, use the GNU Lesser General
339 | Public License instead of this License.
340 |
341 |
--------------------------------------------------------------------------------
/tools/Sublist3r/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE README.md
2 | include subbrute/*.txt
3 |
--------------------------------------------------------------------------------
/tools/Sublist3r/README.md:
--------------------------------------------------------------------------------
1 | ## About Sublist3r
2 |
3 | Sublist3r is a python tool designed to enumerate subdomains of websites using OSINT. It helps penetration testers and bug hunters collect and gather subdomains for the domain they are targeting. Sublist3r enumerates subdomains using many search engines such as Google, Yahoo, Bing, Baidu and Ask. Sublist3r also enumerates subdomains using Netcraft, Virustotal, ThreatCrowd, DNSdumpster and ReverseDNS.
4 |
5 | [subbrute](https://github.com/TheRook/subbrute) was integrated with Sublist3r to increase the possibility of finding more subdomains using bruteforce with an improved wordlist. The credit goes to TheRook who is the author of subbrute.
6 |
7 | ## Screenshots
8 |
9 | 
10 |
11 |
12 | ## Installation
13 |
14 | ```
15 | git clone https://github.com/aboul3la/Sublist3r.git
16 | ```
17 |
18 | ## Recommended Python Version:
19 |
20 | Sublist3r currently supports **Python 2** and **Python 3**.
21 |
22 | * The recommended version for Python 2 is **2.7.x**
23 | * The recommended version for Python 3 is **3.4.x**
24 |
25 | ## Dependencies:
26 |
27 | Sublist3r depends on the `requests`, `dnspython` and `argparse` python modules.
28 |
29 | These dependencies can be installed using the requirements file:
30 |
31 | - Installation on Windows:
32 | ```
33 | c:\python27\python.exe -m pip install -r requirements.txt
34 | ```
35 | - Installation on Linux
36 | ```
37 | sudo pip install -r requirements.txt
38 | ```
39 |
40 | Alternatively, each module can be installed independently as shown below.
41 |
42 | #### Requests Module (http://docs.python-requests.org/en/latest/)
43 |
44 | - Install for Windows:
45 | ```
46 | c:\python27\python.exe -m pip install requests
47 | ```
48 |
49 | - Install for Ubuntu/Debian:
50 | ```
51 | sudo apt-get install python-requests
52 | ```
53 |
54 | - Install for Centos/Redhat:
55 | ```
56 | sudo yum install python-requests
57 | ```
58 |
59 | - Install using pip on Linux:
60 | ```
61 | sudo pip install requests
62 | ```
63 |
64 | #### dnspython Module (http://www.dnspython.org/)
65 |
66 | - Install for Windows:
67 | ```
68 | c:\python27\python.exe -m pip install dnspython
69 | ```
70 |
71 | - Install for Ubuntu/Debian:
72 | ```
73 | sudo apt-get install python-dnspython
74 | ```
75 |
76 | - Install using pip:
77 | ```
78 | sudo pip install dnspython
79 | ```
80 |
81 | #### argparse Module
82 |
83 | - Install for Ubuntu/Debian:
84 | ```
85 | sudo apt-get install python-argparse
86 | ```
87 |
88 | - Install for Centos/Redhat:
89 | ```
90 | sudo yum install python-argparse
91 | ```
92 |
93 | - Install using pip:
94 | ```
95 | sudo pip install argparse
96 | ```
97 |
98 | **for coloring in windows install the following libraries**
99 | ```
100 | c:\python27\python.exe -m pip install win_unicode_console colorama
101 | ```
102 |
103 | ## Usage
104 |
105 | Short Form | Long Form | Description
106 | ------------- | ------------- |-------------
107 | -d | --domain | Domain name to enumerate subdomains of
108 | -b | --bruteforce | Enable the subbrute bruteforce module
109 | -p | --ports | Scan the found subdomains against specific tcp ports
110 | -v | --verbose | Enable the verbose mode and display results in realtime
111 | -t | --threads | Number of threads to use for subbrute bruteforce
112 | -e | --engines | Specify a comma-separated list of search engines
113 | -o | --output | Save the results to text file
114 | -h | --help | show the help message and exit
115 |
116 | ### Examples
117 |
118 | * To list all the basic options and switches use -h switch:
119 |
120 | ```python sublist3r.py -h```
121 |
122 | * To enumerate subdomains of specific domain:
123 |
124 | ``python sublist3r.py -d example.com``
125 |
126 | * To enumerate subdomains of specific domain and show only subdomains which have open ports 80 and 443 :
127 |
128 | ``python sublist3r.py -d example.com -p 80,443``
129 |
130 | * To enumerate subdomains of specific domain and show the results in realtime:
131 |
132 | ``python sublist3r.py -v -d example.com``
133 |
134 | * To enumerate subdomains and enable the bruteforce module:
135 |
136 | ``python sublist3r.py -b -d example.com``
137 |
138 | * To enumerate subdomains and use specific engines such Google, Yahoo and Virustotal engines
139 |
140 | ``python sublist3r.py -e google,yahoo,virustotal -d example.com``
141 |
142 |
143 | ## Using Sublist3r as a module in your python scripts
144 |
145 | **Example**
146 |
147 | ```python
148 | import sublist3r
149 | subdomains = sublist3r.main(domain, no_threads, savefile, ports, silent, verbose, enable_bruteforce, engines)
150 | ```
151 | The main function will return a set of unique subdomains found by Sublist3r
152 |
153 | **Function Usage:**
154 | * **domain**: The domain you want to enumerate subdomains of.
155 | * **savefile**: save the output into text file.
156 | * **ports**: specify a comma-sperated list of the tcp ports to scan.
157 | * **silent**: set sublist3r to work in silent mode during the execution (helpful when you don't need a lot of noise).
158 | * **verbose**: display the found subdomains in real time.
159 | * **enable_bruteforce**: enable the bruteforce module.
160 | * **engines**: (Optional) to choose specific engines.
161 |
162 | Example to enumerate subdomains of Yahoo.com:
163 | ```python
164 | import sublist3r
165 | subdomains = sublist3r.main('yahoo.com', 40, 'yahoo_subdomains.txt', ports= None, silent=False, verbose= False, enable_bruteforce= False, engines=None)
166 | ```
167 |
168 | ## License
169 |
170 | Sublist3r is licensed under the GNU GPL license. take a look at the [LICENSE](https://github.com/aboul3la/Sublist3r/blob/master/LICENSE) for more information.
171 |
172 |
173 | ## Credits
174 |
175 | * [TheRook](https://github.com/TheRook) - The bruteforce module was based on his script **subbrute**.
176 | * [Bitquark](https://github.com/bitquark) - The Subbrute's wordlist was based on his research **dnspop**.
177 |
178 | ## Thanks
179 |
180 | * Special Thanks to [Ibrahim Mosaad](https://twitter.com/ibrahim_mosaad) for his great contributions that helped in improving the tool.
181 |
182 | ## Version
183 | **Current version is 1.0**
184 |
--------------------------------------------------------------------------------
/tools/Sublist3r/requirements.txt:
--------------------------------------------------------------------------------
1 | argparse
2 | dnspython
3 | requests
4 |
--------------------------------------------------------------------------------
/tools/Sublist3r/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup, find_packages
2 |
3 | setup(
4 | name='Sublist3r',
5 | version='1.0',
6 | python_requires='>=2.7',
7 | install_requires=['dnspython', 'requests', 'argparse; python_version==\'2.7\''],
8 | packages=find_packages()+['.'],
9 | include_package_data=True,
10 | url='https://github.com/aboul3la/Sublist3r',
11 | license='GPL-2.0',
12 | description='Subdomains enumeration tool for penetration testers',
13 | classifiers=[
14 | 'Development Status :: 5 - Production/Stable',
15 | 'Environment :: Console',
16 | 'Intended Audience :: Information Technology',
17 | 'Intended Audience :: System Administrators',
18 | 'Intended Audience :: Telecommunications Industry',
19 | 'License :: OSI Approved :: GNU General Public License v2',
20 | 'Operating System :: POSIX :: Linux',
21 | 'Programming Language :: Python',
22 | 'Programming Language :: Python :: 2',
23 | 'Programming Language :: Python :: 3',
24 | 'Programming Language :: Python :: 2.7',
25 | 'Programming Language :: Python :: 3.4',
26 | 'Programming Language :: Python :: 3.5',
27 | 'Programming Language :: Python :: 3.6',
28 | 'Topic :: Security',
29 | ],
30 | keywords='subdomain dns detection',
31 | entry_points={
32 | 'console_scripts': [
33 | 'sublist3r = sublist3r:interactive',
34 | ],
35 | },
36 | )
37 |
--------------------------------------------------------------------------------
/tools/Sublist3r/subbrute/__init.py__:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/c0dejump/HawkScan/a233b50ae4baaf26e330ac94ea3b846517b95b7c/tools/Sublist3r/subbrute/__init.py__
--------------------------------------------------------------------------------
/tools/Sublist3r/subbrute/resolvers.txt:
--------------------------------------------------------------------------------
1 | 103.20.188.35
2 | 103.20.188.83
3 | 103.22.248.62
4 | 103.3.46.105
5 | 106.186.17.181
6 | 106.51.255.133
7 | 109.69.8.34
8 | 109.69.8.51
9 | 110.170.117.15
10 | 110.76.151.17
11 | 114.114.114.114
12 | 114.114.114.119
13 | 114.114.115.115
14 | 114.114.115.119
15 | 115.68.100.102
16 | 115.68.100.103
17 | 115.68.62.210
18 | 115.68.62.222
19 | 115.85.69.162
20 | 117.102.224.154
21 | 117.102.224.230
22 | 119.160.208.251
23 | 119.160.208.252
24 | 119.18.159.222
25 | 119.252.167.229
26 | 121.152.231.196
27 | 121.194.2.2
28 | 12.127.16.67
29 | 12.127.17.72
30 | 121.52.206.130
31 | 121.52.87.128
32 | 122.0.0.12
33 | 122.155.12.41
34 | 122.155.167.38
35 | 122.155.167.70
36 | 122.155.3.119
37 | 122.210.229.161
38 | 122.255.96.132
39 | 124.107.135.126
40 | 1.2.4.8
41 | 128.199.248.105
42 | 129.250.35.250
43 | 129.250.35.251
44 | 129.7.1.1
45 | 129.7.1.6
46 | 130.180.228.2
47 | 131.155.140.130
48 | 131.191.7.12
49 | 134.48.1.32
50 | 134.60.1.111
51 | 137.82.1.1
52 | 139.0.27.186
53 | 139.130.4.4
54 | 139.175.55.244
55 | 141.1.1.1
56 | 141.1.27.249
57 | 141.211.125.15
58 | 141.211.125.17
59 | 141.211.144.15
60 | 141.211.144.17
61 | 142.103.1.1
62 | 142.46.1.130
63 | 142.46.128.130
64 | 144.76.202.253
65 | 147.235.250.2
66 | 147.235.251.3
67 | 147.29.10.55
68 | 147.29.10.6
69 | 148.233.151.6
70 | 148.233.151.8
71 | 148.243.65.17
72 | 149.156.64.210
73 | 149.211.153.50
74 | 151.11.85.5
75 | 152.99.1.10
76 | 152.99.200.6
77 | 152.99.78.136
78 | 153.19.1.254
79 | 158.43.128.1
80 | 158.43.128.72
81 | 158.43.192.1
82 | 158.43.240.3
83 | 158.43.240.4
84 | 159.90.200.7
85 | 160.7.240.20
86 | 164.124.101.2
87 | 164.124.107.9
88 | 165.166.142.42
89 | 165.21.100.88
90 | 165.21.83.88
91 | 165.87.13.129
92 | 165.87.201.244
93 | 168.126.63.1
94 | 168.188.1.1
95 | 168.213.3.10
96 | 168.213.3.11
97 | 168.215.165.186
98 | 168.215.210.50
99 | 168.95.1.1
100 | 170.51.255.100
101 | 170.56.58.53
102 | 173.44.32.2
103 | 174.34.129.34
104 | 178.151.86.169
105 | 178.161.146.10
106 | 178.254.21.113
107 | 180.211.129.42
108 | 185.46.7.100
109 | 185.46.7.110
110 | 187.115.52.83
111 | 187.73.241.67
112 | 189.90.16.20
113 | 190.11.32.199
114 | 192.116.16.26
115 | 192.172.250.8
116 | 192.190.173.40
117 | 192.43.161.22
118 | 192.76.144.66
119 | 193.101.111.10
120 | 193.111.144.145
121 | 193.111.144.161
122 | 193.111.200.191
123 | 193.111.238.5
124 | 193.138.78.117
125 | 193.142.218.3
126 | 193.148.29.100
127 | 193.148.29.103
128 | 193.151.32.40
129 | 193.16.255.2
130 | 193.17.213.10
131 | 193.189.114.254
132 | 193.200.68.230
133 | 193.201.185.3
134 | 193.205.136.1
135 | 193.22.119.195
136 | 193.226.128.129
137 | 193.226.61.1
138 | 193.228.86.5
139 | 193.230.161.3
140 | 193.230.161.4
141 | 193.230.183.201
142 | 193.230.230.1
143 | 193.231.112.1
144 | 193.231.249.1
145 | 193.231.80.7
146 | 193.232.69.22
147 | 193.252.247.52
148 | 193.252.247.53
149 | 193.254.232.1
150 | 193.255.146.53
151 | 193.26.6.130
152 | 193.27.192.98
153 | 193.33.114.2
154 | 193.33.220.3
155 | 193.33.236.1
156 | 193.41.10.1
157 | 193.41.59.151
158 | 193.43.108.3
159 | 193.43.108.62
160 | 193.43.17.4
161 | 193.58.204.59
162 | 193.58.251.251
163 | 193.67.79.39
164 | 193.78.240.12
165 | 193.86.86.2
166 | 193.89.221.124
167 | 193.89.221.2
168 | 193.89.248.1
169 | 193.95.93.243
170 | 193.95.93.77
171 | 194.102.106.1
172 | 194.113.160.68
173 | 194.1.154.37
174 | 194.117.245.2
175 | 194.12.224.34
176 | 194.126.130.7
177 | 194.132.119.151
178 | 194.132.32.32
179 | 194.141.12.1
180 | 194.141.45.4
181 | 194.145.147.194
182 | 194.145.240.6
183 | 194.146.136.1
184 | 194.149.133.11
185 | 194.149.146.2
186 | 194.149.156.140
187 | 194.150.168.168
188 | 194.153.232.17
189 | 194.158.206.205
190 | 194.158.206.206
191 | 194.164.181.2
192 | 194.169.239.10
193 | 194.169.244.33
194 | 194.169.244.34
195 | 194.172.160.4
196 | 194.179.109.10
197 | 194.179.1.100
198 | 194.18.231.5
199 | 194.187.164.20
200 | 194.190.225.2
201 | 194.20.0.24
202 | 194.213.193.5
203 | 194.226.211.11
204 | 194.246.126.68
205 | 194.246.127.11
206 | 194.250.223.1
207 | 194.250.223.2
208 | 194.25.0.52
209 | 194.25.0.60
210 | 194.39.185.10
211 | 194.50.10.2
212 | 194.52.202.98
213 | 194.54.181.90
214 | 194.6.240.1
215 | 194.72.9.61
216 | 194.75.147.212
217 | 194.77.8.1
218 | 194.88.202.11
219 | 194.88.203.6
220 | 194.98.65.165
221 | 195.112.96.34
222 | 195.113.144.194
223 | 195.114.173.153
224 | 195.12.4.247
225 | 195.129.12.114
226 | 195.129.12.122
227 | 195.129.12.83
228 | 195.13.38.3
229 | 195.137.162.149
230 | 195.140.236.250
231 | 195.140.236.253
232 | 195.14.50.21
233 | 195.146.81.130
234 | 195.153.19.10
235 | 195.153.19.5
236 | 195.158.239.4
237 | 195.167.98.3
238 | 195.170.96.2
239 | 195.170.97.254
240 | 195.175.121.10
241 | 195.175.39.39
242 | 195.175.39.40
243 | 195.177.223.3
244 | 195.177.240.3
245 | 195.178.123.130
246 | 195.182.110.132
247 | 195.182.192.10
248 | 195.182.192.2
249 | 195.186.1.110
250 | 195.186.1.111
251 | 195.186.4.110
252 | 195.186.4.111
253 | 195.189.130.1
254 | 195.189.131.1
255 | 195.198.214.72
256 | 195.20.193.11
257 | 195.2.195.1
258 | 195.22.192.252
259 | 195.24.228.3
260 | 195.243.214.4
261 | 195.244.25.3
262 | 195.245.76.6
263 | 195.27.1.1
264 | 195.35.110.4
265 | 195.5.125.3
266 | 195.60.70.5
267 | 195.67.15.102
268 | 195.67.15.73
269 | 195.67.160.3
270 | 195.67.27.18
271 | 195.69.65.98
272 | 195.70.237.42
273 | 195.70.248.1
274 | 195.74.128.6
275 | 195.7.64.3
276 | 195.88.84.100
277 | 195.96.208.1
278 | 195.99.66.220
279 | 196.41.225.11
280 | 198.60.22.2
281 | 198.82.247.34
282 | 199.249.18.1
283 | 199.249.19.2
284 | 199.44.194.2
285 | 199.80.64.202
286 | 200.113.185.227
287 | 200.118.2.88
288 | 200.175.3.232
289 | 200.221.11.100
290 | 200.221.11.101
291 | 200.221.137.40
292 | 200.221.137.41
293 | 200.221.137.42
294 | 200.221.137.43
295 | 200.221.137.44
296 | 200.221.137.45
297 | 200.221.137.46
298 | 200.221.137.47
299 | 200.35.174.126
300 | 200.40.230.36
301 | 200.49.160.31
302 | 200.49.160.35
303 | 200.53.250.1
304 | 200.56.224.11
305 | 200.57.2.108
306 | 200.57.7.61
307 | 200.69.193.2
308 | 200.85.0.105
309 | 200.85.35.158
310 | 200.85.61.90
311 | 200.88.127.22
312 | 200.88.127.23
313 | 200.95.144.3
314 | 201.131.4.5
315 | 201.131.4.9
316 | 202.120.111.3
317 | 202.130.97.65
318 | 202.130.97.66
319 | 202.136.162.11
320 | 202.138.120.4
321 | 202.138.120.6
322 | 202.138.120.87
323 | 202.148.202.3
324 | 202.148.202.4
325 | 202.152.162.66
326 | 202.180.160.1
327 | 202.181.224.2
328 | 202.199.160.206
329 | 202.248.20.133
330 | 202.248.37.74
331 | 202.28.162.1
332 | 202.30.143.11
333 | 202.38.128.58
334 | 202.43.178.244
335 | 202.43.178.245
336 | 202.44.204.63
337 | 202.44.55.193
338 | 202.46.1.2
339 | 202.51.96.5
340 | 202.62.224.2
341 | 202.83.20.101
342 | 202.83.30.5
343 | 202.86.8.100
344 | 202.91.8.234
345 | 203.109.129.67
346 | 203.109.129.68
347 | 203.113.11.37
348 | 203.115.130.74
349 | 203.115.71.66
350 | 203.115.81.38
351 | 203.119.36.106
352 | 203.119.8.106
353 | 203.130.2.3
354 | 203.133.1.7
355 | 203.133.1.8
356 | 203.146.237.222
357 | 203.146.237.237
358 | 203.156.104.21
359 | 203.176.144.12
360 | 203.176.144.20
361 | 203.189.88.10
362 | 203.189.88.11
363 | 203.189.88.133
364 | 203.189.88.148
365 | 203.189.88.151
366 | 203.189.88.152
367 | 203.189.88.154
368 | 203.189.88.156
369 | 203.189.88.211
370 | 203.189.88.212
371 | 203.189.88.213
372 | 203.189.88.214
373 | 203.189.88.54
374 | 203.189.89.1
375 | 203.189.89.134
376 | 203.189.89.15
377 | 203.189.89.209
378 | 203.189.89.241
379 | 203.189.89.36
380 | 203.189.89.65
381 | 203.193.139.150
382 | 203.196.0.6
383 | 203.198.7.66
384 | 203.2.193.67
385 | 203.239.131.1
386 | 203.248.252.2
387 | 203.250.129.214
388 | 203.253.31.1
389 | 203.41.44.20
390 | 203.63.8.27
391 | 203.80.96.10
392 | 203.89.226.24
393 | 203.89.226.26
394 | 203.90.78.65
395 | 204.116.57.2
396 | 204.117.214.10
397 | 204.174.120.45
398 | 204.95.160.2
399 | 205.134.162.209
400 | 205.151.222.250
401 | 205.152.6.20
402 | 205.171.2.65
403 | 205.172.19.193
404 | 205.172.19.79
405 | 205.236.148.130
406 | 205.236.148.131
407 | 205.242.187.234
408 | 206.124.0.254
409 | 206.124.1.254
410 | 206.124.64.1
411 | 206.124.64.253
412 | 206.248.95.194
413 | 206.253.194.65
414 | 206.253.33.130
415 | 206.253.33.131
416 | 206.51.143.55
417 | 206.80.254.4
418 | 206.80.254.68
419 | 207.17.190.5
420 | 207.17.190.7
421 | 207.179.3.25
422 | 207.241.160.34
423 | 207.248.224.71
424 | 207.248.224.72
425 | 207.248.57.10
426 | 207.249.163.155
427 | 207.91.130.4
428 | 207.91.250.34
429 | 208.116.30.21
430 | 208.38.1.15
431 | 208.48.253.106
432 | 208.59.89.20
433 | 208.67.220.220
434 | 208.67.220.222
435 | 208.67.222.220
436 | 208.67.222.222
437 | 208.72.120.204
438 | 208.78.24.238
439 | 208.79.56.204
440 | 208.90.237.9
441 | 209.0.205.11
442 | 209.143.0.10
443 | 209.143.22.182
444 | 209.172.128.2
445 | 209.191.129.65
446 | 209.195.95.95
447 | 209.197.128.2
448 | 209.213.223.18
449 | 209.216.160.131
450 | 209.216.160.2
451 | 209.252.33.101
452 | 209.51.161.14
453 | 209.51.161.58
454 | 209.55.0.110
455 | 209.55.1.220
456 | 209.63.0.18
457 | 209.87.64.70
458 | 209.87.79.232
459 | 210.180.98.69
460 | 210.220.163.82
461 | 210.2.4.8
462 | 210.29.96.33
463 | 210.34.0.18
464 | 210.34.48.34
465 | 210.44.112.66
466 | 210.80.58.3
467 | 210.80.58.66
468 | 210.94.0.7
469 | 211.115.194.2
470 | 211.115.194.3
471 | 211.161.46.84
472 | 211.172.208.2
473 | 211.175.82.66
474 | 211.237.65.21
475 | 211.237.65.31
476 | 211.41.128.70
477 | 211.41.128.71
478 | 211.60.155.5
479 | 211.63.64.11
480 | 211.67.112.1
481 | 211.78.130.10
482 | 211.78.130.11
483 | 211.78.130.3
484 | 212.102.225.2
485 | 212.110.122.132
486 | 212.1.118.3
487 | 212.112.39.22
488 | 212.112.39.25
489 | 212.116.76.76
490 | 212.118.0.2
491 | 212.118.241.1
492 | 212.118.241.33
493 | 212.122.224.10
494 | 212.14.253.242
495 | 212.15.86.12
496 | 212.181.124.8
497 | 212.19.149.226
498 | 212.192.128.3
499 | 212.19.96.2
500 | 212.203.32.11
501 | 212.203.33.12
502 | 212.211.132.4
503 | 212.214.229.170
504 | 212.216.172.222
505 | 212.230.255.1
506 | 212.230.255.129
507 | 212.236.250.4
508 | 212.245.158.66
509 | 212.26.6.11
510 | 212.28.34.90
511 | 212.30.96.211
512 | 212.31.253.69
513 | 212.31.32.130
514 | 212.31.32.131
515 | 212.34.194.211
516 | 212.36.24.3
517 | 212.37.208.3
518 | 212.40.0.10
519 | 212.40.5.50
520 | 212.40.5.51
521 | 212.49.128.65
522 | 212.51.16.1
523 | 212.51.17.1
524 | 212.54.160.7
525 | 212.57.190.166
526 | 212.58.3.2
527 | 212.58.3.7
528 | 212.58.3.8
529 | 212.59.199.2
530 | 212.59.199.6
531 | 212.62.98.10
532 | 212.66.0.1
533 | 212.66.1.1
534 | 212.66.129.98
535 | 212.66.160.2
536 | 212.67.131.4
537 | 212.73.209.34
538 | 212.73.65.40
539 | 212.82.225.7
540 | 212.82.226.212
541 | 212.85.112.32
542 | 212.85.32.3
543 | 212.89.130.180
544 | 212.9.160.1
545 | 212.94.162.33
546 | 212.94.32.32
547 | 212.94.34.34
548 | 212.96.1.70
549 | 212.97.32.2
550 | 212.98.160.50
551 | 212.98.160.65
552 | 213.0.76.5
553 | 213.0.77.5
554 | 213.0.77.8
555 | 213.115.244.69
556 | 213.128.194.2
557 | 213.131.178.10
558 | 213.135.67.1
559 | 213.151.109.1
560 | 213.157.0.194
561 | 213.157.196.130
562 | 213.157.196.131
563 | 213.157.196.132
564 | 213.158.72.1
565 | 213.16.104.61
566 | 213.164.38.66
567 | 213.171.220.209
568 | 213.172.33.34
569 | 213.178.66.2
570 | 213.184.242.6
571 | 213.211.50.1
572 | 213.211.50.2
573 | 213.218.117.85
574 | 213.234.128.211
575 | 213.235.248.228
576 | 213.239.204.35
577 | 213.241.193.250
578 | 213.244.72.31
579 | 213.27.209.53
580 | 213.27.209.8
581 | 213.55.96.166
582 | 213.8.145.133
583 | 213.88.195.146
584 | 213.88.195.147
585 | 213.88.195.148
586 | 216.106.1.2
587 | 216.106.184.6
588 | 216.131.94.5
589 | 216.131.95.20
590 | 216.136.95.2
591 | 216.138.119.6
592 | 216.146.35.230
593 | 216.147.131.33
594 | 216.17.128.1
595 | 216.17.128.2
596 | 216.175.203.51
597 | 216.181.31.11
598 | 216.184.96.4
599 | 216.184.96.5
600 | 216.184.96.6
601 | 216.185.64.6
602 | 216.186.27.15
603 | 216.194.28.33
604 | 216.198.139.68
605 | 216.21.128.22
606 | 216.21.129.22
607 | 216.218.221.6
608 | 216.218.226.238
609 | 216.235.1.3
610 | 216.237.221.42
611 | 216.244.192.3
612 | 216.244.192.32
613 | 216.254.141.13
614 | 216.254.141.2
615 | 216.254.95.2
616 | 216.27.175.2
617 | 216.47.160.12
618 | 216.47.160.13
619 | 216.52.126.1
620 | 216.52.129.1
621 | 216.52.161.33
622 | 216.52.169.1
623 | 216.52.190.33
624 | 216.52.254.1
625 | 216.52.254.33
626 | 216.52.41.1
627 | 216.52.41.33
628 | 216.52.65.1
629 | 216.52.65.33
630 | 216.52.94.1
631 | 216.52.94.33
632 | 216.52.97.33
633 | 216.54.201.11
634 | 216.58.97.20
635 | 216.58.97.21
636 | 216.66.22.2
637 | 216.66.38.58
638 | 216.66.80.26
639 | 216.66.80.30
640 | 216.66.80.98
641 | 216.81.128.132
642 | 216.81.96.67
643 | 216.81.96.68
644 | 217.107.10.254
645 | 217.107.11.35
646 | 217.113.48.1
647 | 217.115.16.2
648 | 217.115.16.3
649 | 217.117.0.38
650 | 217.117.111.1
651 | 217.144.144.211
652 | 217.144.6.6
653 | 217.148.0.17
654 | 217.149.155.180
655 | 217.149.17.1
656 | 217.15.17.2
657 | 217.156.106.1
658 | 217.173.198.3
659 | 217.17.34.68
660 | 217.174.252.116
661 | 217.18.206.12
662 | 217.18.206.22
663 | 217.18.80.105
664 | 217.18.90.105
665 | 217.196.1.5
666 | 217.196.1.6
667 | 217.219.236.8
668 | 217.22.209.254
669 | 217.24.112.2
670 | 217.27.240.20
671 | 217.28.113.13
672 | 217.28.98.62
673 | 217.31.204.130
674 | 217.32.105.66
675 | 217.64.163.1
676 | 217.64.167.1
677 | 217.65.192.1
678 | 217.66.226.8
679 | 217.69.160.18
680 | 217.69.169.25
681 | 217.72.1.2
682 | 217.72.168.34
683 | 217.73.17.110
684 | 217.76.240.2
685 | 217.78.80.70
686 | 217.78.80.74
687 | 217.79.225.8
688 | 217.8.180.98
689 | 218.102.23.228
690 | 218.192.240.2
691 | 218.223.32.1
692 | 218.232.110.36
693 | 218.232.110.37
694 | 219.250.36.130
695 | 219.252.2.100
696 | 220.128.173.228
697 | 220.227.60.12
698 | 220.233.0.1
699 | 221.139.13.130
700 | 24.154.1.4
701 | 24.154.1.5
702 | 35.8.2.41
703 | 35.8.2.42
704 | 35.8.2.45
705 | 35.8.98.43
706 | 37.19.5.135
707 | 37.235.1.174
708 | 37.235.1.177
709 | 42.62.176.30
710 | 4.79.132.219
711 | 50.21.174.18
712 | 58.68.121.230
713 | 58.96.3.34
714 | 61.19.252.238
715 | 61.208.115.242
716 | 61.56.211.185
717 | 61.63.0.66
718 | 61.70.87.96
719 | 62.105.17.252
720 | 62.108.161.161
721 | 62.109.182.2
722 | 62.116.30.200
723 | 62.128.1.42
724 | 62.128.1.53
725 | 62.129.252.215
726 | 62.129.252.252
727 | 62.134.11.4
728 | 62.140.239.1
729 | 62.141.38.230
730 | 62.149.128.2
731 | 62.165.32.250
732 | 62.165.33.250
733 | 62.168.59.67
734 | 62.177.42.174
735 | 62.196.2.70
736 | 62.20.15.234
737 | 62.20.57.226
738 | 62.231.76.49
739 | 62.233.128.17
740 | 62.24.228.202
741 | 62.33.203.33
742 | 62.3.32.16
743 | 62.3.32.17
744 | 62.36.225.150
745 | 62.37.225.56
746 | 62.37.225.57
747 | 62.37.228.20
748 | 62.40.32.34
749 | 62.76.76.62
750 | 62.77.85.100
751 | 62.77.85.98
752 | 62.77.94.72
753 | 62.8.96.38
754 | 62.94.0.41
755 | 62.94.0.42
756 | 62.95.15.107
757 | 62.97.84.4
758 | 63.105.204.164
759 | 63.171.232.38
760 | 63.171.232.39
761 | 63.218.44.186
762 | 63.251.129.33
763 | 63.251.161.1
764 | 63.251.161.33
765 | 63.251.62.1
766 | 63.251.62.33
767 | 64.105.163.106
768 | 64.105.172.26
769 | 64.105.179.138
770 | 64.105.189.26
771 | 64.105.199.74
772 | 64.105.199.76
773 | 64.105.202.138
774 | 64.105.97.90
775 | 64.119.60.5
776 | 64.119.60.9
777 | 64.13.115.12
778 | 64.132.61.131
779 | 64.132.94.250
780 | 64.13.48.12
781 | 64.135.1.20
782 | 64.135.1.22
783 | 64.254.99.13
784 | 64.56.129.2
785 | 64.61.99.2
786 | 64.79.224.3
787 | 64.81.127.2
788 | 64.81.159.2
789 | 64.94.1.1
790 | 64.94.1.33
791 | 64.94.33.33
792 | 65.163.107.11
793 | 65.203.109.2
794 | 65.39.139.53
795 | 65.74.130.5
796 | 65.74.130.6
797 | 66.118.80.4
798 | 66.119.93.10
799 | 66.119.93.4
800 | 66.163.0.161
801 | 66.163.0.173
802 | 66.165.177.69
803 | 66.165.183.87
804 | 66.182.208.5
805 | 66.203.72.10
806 | 66.207.160.111
807 | 66.216.18.222
808 | 66.218.245.13
809 | 66.218.44.5
810 | 66.232.139.10
811 | 66.252.170.3
812 | 66.28.0.45
813 | 66.28.0.61
814 | 66.51.206.100
815 | 66.80.130.18
816 | 66.81.0.252
817 | 66.92.159.2
818 | 66.92.224.2
819 | 66.92.64.2
820 | 66.93.87.2
821 | 67.100.88.27
822 | 67.214.64.6
823 | 68.179.203.94
824 | 69.146.17.3
825 | 69.16.169.11
826 | 69.16.170.11
827 | 69.24.112.11
828 | 69.25.1.1
829 | 69.25.1.33
830 | 69.26.129.2
831 | 69.28.104.5
832 | 69.28.136.102
833 | 69.28.148.102
834 | 69.28.97.4
835 | 69.54.70.15
836 | 69.67.97.18
837 | 69.7.192.1
838 | 69.7.192.2
839 | 70.36.0.5
840 | 70.36.0.6
841 | 72.11.150.10
842 | 72.11.150.74
843 | 72.52.104.74
844 | 74.222.30.2
845 | 74.82.46.6
846 | 75.94.255.12
847 | 76.73.18.50
848 | 77.240.144.164
849 | 77.241.112.23
850 | 77.247.176.114
851 | 77.41.229.2
852 | 77.72.192.3
853 | 77.73.104.3
854 | 77.87.152.9
855 | 77.88.8.1
856 | 77.88.8.2
857 | 77.88.8.8
858 | 77.88.8.88
859 | 78.159.224.224
860 | 78.159.232.232
861 | 78.31.96.2
862 | 79.132.192.2
863 | 79.141.81.250
864 | 79.141.82.250
865 | 79.141.83.250
866 | 80.149.86.20
867 | 80.254.79.157
868 | 80.67.169.12
869 | 80.72.146.2
870 | 80.73.1.1
871 | 80.74.160.11
872 | 80.79.179.2
873 | 80.84.72.20
874 | 80.88.171.16
875 | 80.92.178.98
876 | 80.94.48.254
877 | 81.17.66.14
878 | 81.17.72.70
879 | 81.180.201.98
880 | 81.18.242.100
881 | 81.189.212.129
882 | 81.18.97.50
883 | 81.200.80.11
884 | 81.222.80.2
885 | 81.23.144.250
886 | 81.24.128.146
887 | 81.25.152.2
888 | 81.27.133.50
889 | 81.27.135.50
890 | 81.28.128.34
891 | 8.15.12.5
892 | 81.7.200.80
893 | 81.92.96.22
894 | 81.92.97.12
895 | 81.95.128.218
896 | 82.115.163.2
897 | 82.141.136.2
898 | 82.144.181.1
899 | 82.145.160.140
900 | 82.145.163.1
901 | 82.151.90.1
902 | 82.198.129.138
903 | 82.199.32.36
904 | 82.212.67.100
905 | 82.212.67.101
906 | 82.96.65.2
907 | 82.96.81.10
908 | 82.96.86.20
909 | 82.99.211.195
910 | 83.137.41.8
911 | 83.137.41.9
912 | 83.142.192.2
913 | 83.142.9.30
914 | 83.143.12.246
915 | 83.143.8.220
916 | 83.149.244.194
917 | 83.151.112.193
918 | 83.166.8.18
919 | 83.240.154.200
920 | 83.242.140.10
921 | 83.97.97.3
922 | 84.200.69.80
923 | 84.200.70.40
924 | 84.8.2.11
925 | 85.114.105.3
926 | 85.115.224.18
927 | 85.119.136.158
928 | 85.119.72.2
929 | 85.124.252.33
930 | 85.132.32.41
931 | 85.132.32.42
932 | 85.158.50.50
933 | 85.174.190.2
934 | 8.5.244.5
935 | 85.88.19.10
936 | 85.88.19.11
937 | 87.103.133.167
938 | 87.104.254.135
939 | 87.104.254.39
940 | 87.197.40.58
941 | 87.204.12.130
942 | 87.204.28.12
943 | 87.229.99.1
944 | 88.147.158.1
945 | 88.255.242.6
946 | 88.255.96.196
947 | 8.8.4.4
948 | 88.82.84.129
949 | 8.8.8.8
950 | 89.107.129.15
951 | 89.107.16.2
952 | 89.185.75.244
953 | 89.186.66.6
954 | 89.186.66.7
955 | 89.233.250.137
956 | 89.249.224.1
957 | 90.189.109.2
958 | 91.143.20.6
959 | 91.144.248.227
960 | 91.185.2.10
961 | 91.185.6.10
962 | 91.188.0.35
963 | 91.188.0.5
964 | 91.194.112.10
965 | 91.197.164.11
966 | 91.198.154.133
967 | 91.199.139.1
968 | 91.203.177.4
969 | 91.203.188.1
970 | 91.207.40.2
971 | 91.210.24.22
972 | 91.211.16.6
973 | 91.212.56.5
974 | 91.214.72.33
975 | 91.214.72.34
976 | 91.98.128.112
977 | 92.43.224.1
978 | 93.157.14.65
979 | 93.157.233.3
980 | 93.188.152.3
981 | 94.247.200.2
982 | 94.247.200.3
983 | 95.158.128.2
984 | 95.158.129.2
985 | 95.173.193.3
986 | 95.85.9.86
987 |
--------------------------------------------------------------------------------
/tools/Sublist3r/subbrute/subbrute.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | #
3 | #SubBrute v1.2
4 | #A (very) fast subdomain enumeration tool.
5 | #
6 | #Maintained by rook
7 | #Contributors:
8 | #JordanMilne, KxCode, rc0r, memoryprint, ppaulojr
9 | #
10 | import re
11 | import optparse
12 | import os
13 | import signal
14 | import sys
15 | import uuid
16 | import random
17 | import ctypes
18 | import dns.resolver
19 | import dns.rdatatype
20 | import json
21 |
22 | #Python 2.x and 3.x compatiablity
23 | #We need the Queue library for exception handling
24 | try:
25 | import queue as Queue
26 | except:
27 | import Queue
28 |
29 | #The 'multiprocessing' library does not rely upon a Global Interpreter Lock (GIL)
30 | import multiprocessing
31 |
32 | #Microsoft compatiablity
33 | if sys.platform.startswith('win'):
34 | #Drop-in replacement, subbrute + multiprocessing throws exceptions on windows.
35 | import threading
36 | multiprocessing.Process = threading.Thread
37 |
38 | class verify_nameservers(multiprocessing.Process):
39 |
40 | def __init__(self, target, record_type, resolver_q, resolver_list, wildcards):
41 | multiprocessing.Process.__init__(self, target = self.run)
42 | self.daemon = True
43 | signal_init()
44 |
45 | self.time_to_die = False
46 | self.resolver_q = resolver_q
47 | self.wildcards = wildcards
48 | #Do we need wildcards for other types of records?
49 | #This needs testing!
50 | self.record_type = "A"
51 | if record_type == "AAAA":
52 | self.record_type = record_type
53 | self.resolver_list = resolver_list
54 | resolver = dns.resolver.Resolver()
55 | #The domain provided by the user.
56 | self.target = target
57 | #1 website in the world, modify the following line when this status changes.
58 | #www.google.cn, I'm looking at you ;)
59 | self.most_popular_website = "www.google.com"
60 | #We shouldn't need the backup_resolver, but we we can use them if need be.
61 | #We must have a resolver, and localhost can work in some environments.
62 | self.backup_resolver = resolver.nameservers + ['127.0.0.1', '8.8.8.8', '8.8.4.4']
63 | #Ideally a nameserver should respond in less than 1 sec.
64 | resolver.timeout = 1
65 | resolver.lifetime = 1
66 | try:
67 | #Lets test the letancy of our connection.
68 | #Google's DNS server should be an ideal time test.
69 | resolver.nameservers = ['8.8.8.8']
70 | resolver.query(self.most_popular_website, self.record_type)
71 | except:
72 | #Our connection is slower than a junebug in molasses
73 | resolver = dns.resolver.Resolver()
74 | self.resolver = resolver
75 |
76 | def end(self):
77 | self.time_to_die = True
78 |
79 | #This process cannot block forever, it needs to check if its time to die.
80 | def add_nameserver(self, nameserver):
81 | keep_trying = True
82 | while not self.time_to_die and keep_trying:
83 | try:
84 | self.resolver_q.put(nameserver, timeout = 1)
85 | trace("Added nameserver:", nameserver)
86 | keep_trying = False
87 | except Exception as e:
88 | if type(e) == Queue.Full or str(type(e)) == "":
89 | keep_trying = True
90 |
91 | def verify(self, nameserver_list):
92 | added_resolver = False
93 | for server in nameserver_list:
94 | if self.time_to_die:
95 | #We are done here.
96 | break
97 | server = server.strip()
98 | if server:
99 | self.resolver.nameservers = [server]
100 | try:
101 | #test_result = self.resolver.query(self.most_popular_website, "A")
102 | #should throw an exception before this line.
103 | if True:#test_result:
104 | #Only add the nameserver to the queue if we can detect wildcards.
105 | if(self.find_wildcards(self.target)):# and self.find_wildcards(".com")
106 | #wildcards have been added to the set, it is now safe to be added to the queue.
107 | #blocking queue, this process will halt on put() when the queue is full:
108 | self.add_nameserver(server)
109 | added_resolver = True
110 | else:
111 | trace("Rejected nameserver - wildcard:", server)
112 | except Exception as e:
113 | #Rejected server :(
114 | trace("Rejected nameserver - unreliable:", server, type(e))
115 | return added_resolver
116 |
117 | def run(self):
118 | #Every user will get a different set of resovlers, this helps redistribute traffic.
119 | random.shuffle(self.resolver_list)
120 | if not self.verify(self.resolver_list):
121 | #This should never happen, inform the user.
122 | sys.stderr.write('Warning: No nameservers found, trying fallback list.\n')
123 | #Try and fix it for the user:
124 | self.verify(self.backup_resolver)
125 | #End of the resolvers list.
126 | try:
127 | self.resolver_q.put(False, timeout = 1)
128 | except:
129 | pass
130 |
131 | #Only add the nameserver to the queue if we can detect wildcards.
132 | #Returns False on error.
133 | def find_wildcards(self, host):
134 | #We want sovle the following three problems:
135 | #1)The target might have a wildcard DNS record.
136 | #2)The target maybe using geolocaiton-aware DNS.
137 | #3)The DNS server we are testing may respond to non-exsistant 'A' records with advertizements.
138 | #I have seen a CloudFlare Enterprise customer with the first two conditions.
139 | try:
140 | #This is case #3, these spam nameservers seem to be more trouble then they are worth.
141 | wildtest = self.resolver.query(uuid.uuid4().hex + ".com", "A")
142 | if len(wildtest):
143 | trace("Spam DNS detected:", host)
144 | return False
145 | except:
146 | pass
147 | test_counter = 8
148 | looking_for_wildcards = True
149 | while looking_for_wildcards and test_counter >= 0 :
150 | looking_for_wildcards = False
151 | #Don't get lost, this nameserver could be playing tricks.
152 | test_counter -= 1
153 | try:
154 | testdomain = "%s.%s" % (uuid.uuid4().hex, host)
155 | wildtest = self.resolver.query(testdomain, self.record_type)
156 | #This 'A' record may contain a list of wildcards.
157 | if wildtest:
158 | for w in wildtest:
159 | w = str(w)
160 | if w not in self.wildcards:
161 | #wildcards were detected.
162 | self.wildcards[w] = None
163 | #We found atleast one wildcard, look for more.
164 | looking_for_wildcards = True
165 | except Exception as e:
166 | if type(e) == dns.resolver.NXDOMAIN or type(e) == dns.name.EmptyLabel:
167 | #not found
168 | return True
169 | else:
170 | #This resolver maybe flakey, we don't want it for our tests.
171 | trace("wildcard exception:", self.resolver.nameservers, type(e))
172 | return False
173 | #If we hit the end of our depth counter and,
174 | #there are still wildcards, then reject this nameserver because it smells bad.
175 | return (test_counter >= 0)
176 |
177 | class lookup(multiprocessing.Process):
178 |
179 | def __init__(self, in_q, out_q, resolver_q, domain, wildcards, spider_blacklist):
180 | multiprocessing.Process.__init__(self, target = self.run)
181 | signal_init()
182 | self.required_nameservers = 16
183 | self.in_q = in_q
184 | self.out_q = out_q
185 | self.resolver_q = resolver_q
186 | self.domain = domain
187 | self.wildcards = wildcards
188 | self.spider_blacklist = spider_blacklist
189 | self.resolver = dns.resolver.Resolver()
190 | #Force pydns to use our nameservers
191 | self.resolver.nameservers = []
192 |
193 | def get_ns(self):
194 | ret = []
195 | try:
196 | ret = [self.resolver_q.get_nowait()]
197 | if ret == False:
198 | #Queue is empty, inform the rest.
199 | self.resolver_q.put(False)
200 | ret = []
201 | except:
202 | pass
203 | return ret
204 |
205 | def get_ns_blocking(self):
206 | ret = []
207 | ret = [self.resolver_q.get()]
208 | if ret == False:
209 | trace("get_ns_blocking - Resolver list is empty.")
210 | #Queue is empty, inform the rest.
211 | self.resolver_q.put(False)
212 | ret = []
213 | return ret
214 |
215 | def check(self, host, record_type = "A", retries = 0):
216 | trace("Checking:", host)
217 | cname_record = []
218 | retries = 0
219 | if len(self.resolver.nameservers) <= self.required_nameservers:
220 | #This process needs more nameservers, lets see if we have one avaible
221 | self.resolver.nameservers += self.get_ns()
222 | #Ok we should be good to go.
223 | while True:
224 | try:
225 | #Query the nameserver, this is not simple...
226 | if not record_type or record_type == "A":
227 | resp = self.resolver.query(host)
228 | #Crawl the response
229 | hosts = extract_hosts(str(resp.response), self.domain)
230 | for h in hosts:
231 | if h not in self.spider_blacklist:
232 | self.spider_blacklist[h]=None
233 | trace("Found host with spider:", h)
234 | self.in_q.put((h, record_type, 0))
235 | return resp
236 | if record_type == "CNAME":
237 | #A max 20 lookups
238 | for x in range(20):
239 | try:
240 | resp = self.resolver.query(host, record_type)
241 | except dns.resolver.NoAnswer:
242 | resp = False
243 | pass
244 | if resp and resp[0]:
245 | host = str(resp[0]).rstrip(".")
246 | cname_record.append(host)
247 | else:
248 | return cname_record
249 | else:
250 | #All other records:
251 | return self.resolver.query(host, record_type)
252 |
253 | except Exception as e:
254 | if type(e) == dns.resolver.NoNameservers:
255 | #We should never be here.
256 | #We must block, another process should try this host.
257 | #do we need a limit?
258 | self.in_q.put((host, record_type, 0))
259 | self.resolver.nameservers += self.get_ns_blocking()
260 | return False
261 | elif type(e) == dns.resolver.NXDOMAIN:
262 | #"Non-existent domain name."
263 | return False
264 | elif type(e) == dns.resolver.NoAnswer:
265 | #"The response did not contain an answer."
266 | if retries >= 1:
267 | trace("NoAnswer retry")
268 | return False
269 | retries += 1
270 | elif type(e) == dns.resolver.Timeout:
271 | trace("lookup failure:", host, retries)
272 | #Check if it is time to give up.
273 | if retries >= 3:
274 | if retries > 3:
275 | #Sometimes 'internal use' subdomains will timeout for every request.
276 | #As far as I'm concerned, the authorative name server has told us this domain exists,
277 | #we just can't know the address value using this method.
278 | return ['Mutiple Query Timeout - External address resolution was restricted']
279 | else:
280 | #Maybe another process can take a crack at it.
281 | self.in_q.put((host, record_type, retries + 1))
282 | return False
283 | retries += 1
284 | #retry...
285 | elif type(e) == IndexError:
286 | #Some old versions of dnspython throw this error,
287 | #doesn't seem to affect the results, and it was fixed in later versions.
288 | pass
289 | elif type(e) == TypeError:
290 | # We'll get here if the number procs > number of resolvers.
291 | # This is an internal error do we need a limit?
292 | self.in_q.put((host, record_type, 0))
293 | return False
294 | elif type(e) == dns.rdatatype.UnknownRdatatype:
295 | error("DNS record type not supported:", record_type)
296 | else:
297 | trace("Problem processing host:", host)
298 | #dnspython threw some strange exception...
299 | raise e
300 |
301 | def run(self):
302 | #This process needs one resolver before it can start looking.
303 | self.resolver.nameservers += self.get_ns_blocking()
304 | while True:
305 | found_addresses = []
306 | work = self.in_q.get()
307 | #Check if we have hit the end marker
308 | while not work:
309 | #Look for a re-queued lookup
310 | try:
311 | work = self.in_q.get(blocking = False)
312 | #if we took the end marker of the queue we need to put it back
313 | if work:
314 | self.in_q.put(False)
315 | except:#Queue.Empty
316 | trace('End of work queue')
317 | #There isn't an item behind the end marker
318 | work = False
319 | break
320 | #Is this the end all work that needs to be done?
321 | if not work:
322 | #Perpetuate the end marker for all threads to see
323 | self.in_q.put(False)
324 | #Notify the parent that we have died of natural causes
325 | self.out_q.put(False)
326 | break
327 | else:
328 | if len(work) == 3:
329 | #keep track of how many times this lookup has timedout.
330 | (hostname, record_type, timeout_retries) = work
331 | response = self.check(hostname, record_type, timeout_retries)
332 | else:
333 | (hostname, record_type) = work
334 | response = self.check(hostname, record_type)
335 | sys.stdout.flush()
336 | trace(response)
337 | #self.wildcards is populated by the verify_nameservers() thread.
338 | #This variable doesn't need a muetex, because it has a queue.
339 | #A queue ensure nameserver cannot be used before it's wildcard entries are found.
340 | reject = False
341 | if response:
342 | for a in response:
343 | a = str(a)
344 | if a in self.wildcards:
345 | trace("resovled wildcard:", hostname)
346 | reject= True
347 | #reject this domain.
348 | break;
349 | else:
350 | found_addresses.append(a)
351 | if not reject:
352 | #This request is filled, send the results back
353 | result = (hostname, record_type, found_addresses)
354 | self.out_q.put(result)
355 |
356 | #Extract relevant hosts
357 | #The dot at the end of a domain signifies the root,
358 | #and all TLDs are subs of the root.
359 | host_match = re.compile(r"((?<=[\s])[a-zA-Z0-9_-]+\.(?:[a-zA-Z0-9_-]+\.?)+(?=[\s]))")
360 | def extract_hosts(data, hostname):
361 | #made a global to avoid re-compilation
362 | global host_match
363 | ret = []
364 | hosts = re.findall(host_match, data)
365 | for fh in hosts:
366 | host = fh.rstrip(".")
367 | #Is this host in scope?
368 | if host.endswith(hostname):
369 | ret.append(host)
370 | return ret
371 |
372 | #Return a list of unique sub domains, sorted by frequency.
373 | #Only match domains that have 3 or more sections subdomain.domain.tld
374 | domain_match = re.compile("([a-zA-Z0-9_-]*\.[a-zA-Z0-9_-]*\.[a-zA-Z0-9_-]*)+")
375 | def extract_subdomains(file_name):
376 | #Avoid re-compilation
377 | global domain_match
378 | subs = {}
379 | sub_file = open(file_name).read()
380 | f_all = re.findall(domain_match, sub_file)
381 | del sub_file
382 | for i in f_all:
383 | if i.find(".") >= 0:
384 | p = i.split(".")[0:-1]
385 | #gobble everything that might be a TLD
386 | while p and len(p[-1]) <= 3:
387 | p = p[0:-1]
388 | #remove the domain name
389 | p = p[0:-1]
390 | #do we have a subdomain.domain left?
391 | if len(p) >= 1:
392 | trace(str(p), " : ", i)
393 | for q in p:
394 | if q :
395 | #domain names can only be lower case.
396 | q = q.lower()
397 | if q in subs:
398 | subs[q] += 1
399 | else:
400 | subs[q] = 1
401 | #Free some memory before the sort...
402 | del f_all
403 | #Sort by freq in desc order
404 | subs_sorted = sorted(subs.keys(), key = lambda x: subs[x], reverse = True)
405 | return subs_sorted
406 |
407 | def print_target(target, record_type = None, subdomains = "names.txt", resolve_list = "resolvers.txt", process_count = 16, output = False, json_output = False, found_subdomains=[],verbose=False):
408 | subdomains_list = []
409 | results_temp = []
410 | run(target, record_type, subdomains, resolve_list, process_count)
411 | for result in run(target, record_type, subdomains, resolve_list, process_count):
412 | (hostname, record_type, response) = result
413 | if not record_type:
414 | result = hostname
415 | else:
416 | result = "%s,%s" % (hostname, ",".join(response).strip(","))
417 | if result not in found_subdomains:
418 | if verbose:
419 | print(result)
420 | subdomains_list.append(result)
421 |
422 | return set(subdomains_list)
423 |
424 | def run(target, record_type = None, subdomains = "names.txt", resolve_list = "resolvers.txt", process_count = 16):
425 | subdomains = check_open(subdomains)
426 | resolve_list = check_open(resolve_list)
427 | if (len(resolve_list) / 16) < process_count:
428 | sys.stderr.write('Warning: Fewer than 16 resovlers per thread, consider adding more nameservers to resolvers.txt.\n')
429 | if os.name == 'nt':
430 | wildcards = {}
431 | spider_blacklist = {}
432 | else:
433 | wildcards = multiprocessing.Manager().dict()
434 | spider_blacklist = multiprocessing.Manager().dict()
435 | in_q = multiprocessing.Queue()
436 | out_q = multiprocessing.Queue()
437 | #have a buffer of at most two new nameservers that lookup processes can draw from.
438 | resolve_q = multiprocessing.Queue(maxsize = 2)
439 |
440 | #Make a source of fast nameservers avaiable for other processes.
441 | verify_nameservers_proc = verify_nameservers(target, record_type, resolve_q, resolve_list, wildcards)
442 | verify_nameservers_proc.start()
443 | #The empty string
444 | in_q.put((target, record_type))
445 | spider_blacklist[target]=None
446 | #A list of subdomains is the input
447 | for s in subdomains:
448 | s = str(s).strip()
449 | if s:
450 | if s.find(","):
451 | #SubBrute should be forgiving, a comma will never be in a url
452 | #but the user might try an use a CSV file as input.
453 | s=s.split(",")[0]
454 | if not s.endswith(target):
455 | hostname = "%s.%s" % (s, target)
456 | else:
457 | #A user might feed an output list as a subdomain list.
458 | hostname = s
459 | if hostname not in spider_blacklist:
460 | spider_blacklist[hostname]=None
461 | work = (hostname, record_type)
462 | in_q.put(work)
463 | #Terminate the queue
464 | in_q.put(False)
465 | for i in range(process_count):
466 | worker = lookup(in_q, out_q, resolve_q, target, wildcards, spider_blacklist)
467 | worker.start()
468 | threads_remaining = process_count
469 | while True:
470 | try:
471 | #The output is valid hostnames
472 | result = out_q.get(True, 10)
473 | #we will get an empty exception before this runs.
474 | if not result:
475 | threads_remaining -= 1
476 | else:
477 | #run() is a generator, and yields results from the work queue
478 | yield result
479 | except Exception as e:
480 | #The cx_freeze version uses queue.Empty instead of Queue.Empty :(
481 | if type(e) == Queue.Empty or str(type(e)) == "":
482 | pass
483 | else:
484 | raise(e)
485 | #make sure everyone is complete
486 | if threads_remaining <= 0:
487 | break
488 | trace("killing nameserver process")
489 | #We no longer require name servers.
490 | try:
491 | killproc(pid = verify_nameservers_proc.pid)
492 | except:
493 | #Windows threading.tread
494 | verify_nameservers_proc.end()
495 | trace("End")
496 |
497 | #exit handler for signals. So ctrl+c will work.
498 | #The 'multiprocessing' library each process is it's own process which side-steps the GIL
499 | #If the user wants to exit prematurely, each process must be killed.
500 | def killproc(signum = 0, frame = 0, pid = False):
501 | if not pid:
502 | pid = os.getpid()
503 | if sys.platform.startswith('win'):
504 | try:
505 | kernel32 = ctypes.windll.kernel32
506 | handle = kernel32.OpenProcess(1, 0, pid)
507 | kernel32.TerminateProcess(handle, 0)
508 | except:
509 | #Oah windows.
510 | pass
511 | else:
512 | os.kill(pid, 9)
513 |
514 | #Toggle debug output
515 | verbose = False
516 | def trace(*args, **kwargs):
517 | if verbose:
518 | for a in args:
519 | sys.stderr.write(str(a))
520 | sys.stderr.write(" ")
521 | sys.stderr.write("\n")
522 |
523 | def error(*args, **kwargs):
524 | for a in args:
525 | sys.stderr.write(str(a))
526 | sys.stderr.write(" ")
527 | sys.stderr.write("\n")
528 | sys.exit(1)
529 |
530 | def check_open(input_file):
531 | ret = []
532 | #If we can't find a resolver from an input file, then we need to improvise.
533 | try:
534 | ret = open(input_file).readlines()
535 | except:
536 | error("File not found:", input_file)
537 | if not len(ret):
538 | error("File is empty:", input_file)
539 | return ret
540 |
541 | #Every 'multiprocessing' process needs a signal handler.
542 | #All processes need to die, we don't want to leave zombies.
543 | def signal_init():
544 | #Escliate signal to prevent zombies.
545 | signal.signal(signal.SIGINT, killproc)
546 | try:
547 | signal.signal(signal.SIGTSTP, killproc)
548 | signal.signal(signal.SIGQUIT, killproc)
549 | except:
550 | #Windows
551 | pass
552 |
553 | if __name__ == "__main__":
554 | if getattr(sys, 'frozen', False):
555 | # cx_freeze windows:
556 | base_path = os.path.dirname(sys.executable)
557 | multiprocessing.freeze_support()
558 | else:
559 | #everything else:
560 | base_path = os.path.dirname(os.path.realpath(__file__))
561 | parser = optparse.OptionParser("usage: %prog [options] target")
562 | parser.add_option("-s", "--subs", dest = "subs", default = os.path.join(base_path, "names.txt"),
563 | type = "string", help = "(optional) list of subdomains, default = 'names.txt'")
564 | parser.add_option("-r", "--resolvers", dest = "resolvers", default = os.path.join(base_path, "resolvers.txt"),
565 | type = "string", help = "(optional) A list of DNS resolvers, if this list is empty it will OS's internal resolver default = 'resolvers.txt'")
566 | parser.add_option("-t", "--targets_file", dest = "targets", default = "",
567 | type = "string", help = "(optional) A file containing a newline delimited list of domains to brute force.")
568 | parser.add_option("-o", "--output", dest = "output", default = False, help = "(optional) Output to file (Greppable Format)")
569 | parser.add_option("-j", "--json", dest="json", default = False, help="(optional) Output to file (JSON Format)")
570 | parser.add_option("-a", "-A", action = 'store_true', dest = "ipv4", default = False,
571 | help = "(optional) Print all IPv4 addresses for sub domains (default = off).")
572 | parser.add_option("--type", dest = "type", default = False,
573 | type = "string", help = "(optional) Print all reponses for an arbitrary DNS record type (CNAME, AAAA, TXT, SOA, MX...)")
574 | parser.add_option("-c", "--process_count", dest = "process_count",
575 | default = 16, type = "int",
576 | help = "(optional) Number of lookup theads to run. default = 16")
577 | parser.add_option("-f", "--filter_subs", dest = "filter", default = "",
578 | type = "string", help = "(optional) A file containing unorganized domain names which will be filtered into a list of subdomains sorted by frequency. This was used to build names.txt.")
579 | parser.add_option("-v", "--verbose", action = 'store_true', dest = "verbose", default = False,
580 | help = "(optional) Print debug information.")
581 | (options, args) = parser.parse_args()
582 |
583 |
584 | verbose = options.verbose
585 |
586 | if len(args) < 1 and options.filter == "" and options.targets == "":
587 | parser.error("You must provie a target. Use -h for help.")
588 |
589 | if options.filter != "":
590 | #cleanup this file and print it out
591 | for d in extract_subdomains(options.filter):
592 | print(d)
593 | sys.exit()
594 |
595 | if options.targets != "":
596 | targets = check_open(options.targets) #the domains
597 | else:
598 | targets = args #multiple arguments on the cli: ./subbrute.py google.com gmail.com yahoo.com if (len(resolver_list) / 16) < options.process_count:
599 |
600 | output = False
601 | if options.output:
602 | try:
603 | output = open(options.output, "w")
604 | except:
605 | error("Failed writing to file:", options.output)
606 |
607 | json_output = False
608 | if options.json:
609 | try:
610 | json_output = open(options.json, "w")
611 | except:
612 | error("Failed writing to file:", options.json)
613 |
614 | record_type = False
615 | if options.ipv4:
616 | record_type="A"
617 | if options.type:
618 | record_type = str(options.type).upper()
619 |
620 | threads = []
621 | for target in targets:
622 | target = target.strip()
623 | if target:
624 |
625 | #target => domain
626 | #record_type =>
627 | #options.subs => file the contain the subdomains list
628 | #options.process_count => process count default = 16
629 | #options.resolvers => the resolvers file
630 | #options.output
631 | #options.json
632 | print(target, record_type, options.subs, options.resolvers, options.process_count, output, json_output)
633 | print_target(target, record_type, options.subs, options.resolvers, options.process_count, output, json_output)
634 |
635 |
636 |
--------------------------------------------------------------------------------