├── README.md
└── Dir-Xcan6.py
/README.md:
--------------------------------------------------------------------------------
1 | Dir-Xcan6
2 | ========
3 |
4 | Release Date: 18/12/2014
5 |
6 | Dir-Xcan6 is a free and open source scanner. Based on the OWASP's DirBuster project that is now closed. It is _mostly_ experimental software.
7 |
8 | This program is for finding hidden directories that are not directly linked on a website. It find HTTP response code 200 directories and outputs the URL to file.
9 |
10 | Changelog:
11 |
12 | # [DONE] - Http Authentication
13 | # [DONE] - Add COLOR.
14 | # [DONE] - http Proxy options.
15 | # [DONE] - Kill threads on Ctrl+C.
16 | # [DONE] - Defaults added to Arguments.
17 | # [DONE] - Now using Requests instead of Urllib2.
18 | # [DONE] - Verbose modes added, prints found and Non-Authed folders.
19 | # [DONE] - Added User-Agent option.
20 | # [DONE] - Cookie Authentication (with multiple cookies)
21 | # [DONE] - SOCKS Proxy options # To use TOR socks5://127.0.0.1:9050 or socks4://127.0.0.1:9050
22 |
23 | TODO:
24 |
25 | # Change number of threads on responce time from server.
26 | # Fix error reporting for connection issues.
27 | # Add Pause/Stop/Start functions to script.
28 | # Add XML output option.
29 | # Custom 404 page option.
30 | # Add NTLM Authentication
31 |
32 | --**** Use at your own risk. ****--
33 |
34 | + Tested on: Linux 3.2.6 Ubuntu/Debian (Backtrack & Kali)
35 |
36 | ## Usage:
37 |
38 | root@bt:~# Dir-Xcan6.py -s https://testphp.vulnweb.com -f directorylist.txt -o Dir-Xcan-results.html -n 30 -p socks5://127.0.0.1:9050 -a username:admin
39 | -s http://192.168.0.1 -a admin:password -u Mozilla/4.0 -V
40 |
41 | -s = Target domain name or ip
42 | -f = Filename of the list you want to scan for (Default is "directorylist.txt" thats included in the repo)
43 | -o = Output Filename for logging of Code 200 Responses (Default is "Dir-Xcan-results.html")
44 | -n = Number of threads (Default is 5)
45 | -p = HTTP Proxy settings (ip:port)
46 | -a = HTTP Basic Authentication (Username:Password)
47 | -u = User-Agent String (Default is "Mozilla/5.0")
48 | -V = Verbose Mode, Prints 200 and 401 codes to the screen.
49 |
50 |
51 | Other Arguments:
52 |
53 | -v = Version information
54 | -h = Help menu
55 |
56 | The program will print out the code 200 HTTP Responses to the output file.
57 |
58 | It will feed you the percentage of the scan until completion and the ammount of time it took
59 | to complete the task.
60 |
61 | Enjoy. :]
62 | ~/ NoobieDog
63 |
64 | ## Contact Information:
65 |
66 | [ NoobieDog ] - @NoobieDog on Twitter
67 | - stuart@sensepost.com
68 | - www.sensepost.com // @sensepost
69 |
70 | ## Original Header:
71 |
72 | - This was written for educational purpose and pentests only. Use it at your own risk.
73 | - Author will be not responsible for any damage!
74 | - Toolname : Dir-Xcan6.py
75 | - Coder : stuart@sensepost.com // @NoobieDog
76 | - Version : 6.0
77 |
--------------------------------------------------------------------------------
/Dir-Xcan6.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | # LAST UPDATE 18/12/14
3 | #
4 | # DIR-XCAN5.PY
5 | # This program is for finding hidden directories that are not directly linked on a website. It find HTTP response code 200 directories and outputs the URL to file.
6 |
7 | # THIS PROGRAM IS A PYTHON VERSION OF THE OWASP'S DIRBUSTER PROJECT THAT IS NOW CLOSED
8 | # https://www.owasp.org/index.php/Category:OWASP_DirBuster_Project
9 | #
10 | # This script uses OWASP's DirBuster list - directory-list-2.3-medium.txt
11 | #
12 | # Copyright 2007 James Fisher
13 | #
14 | # This work is licensed under the Creative Commons
15 | # Attribution-Share Alike 3.0 License. To view a copy of this
16 | # license, visit http://creativecommons.org/licenses/by-sa/3.0/
17 | # or send a letter to Creative Commons, 171 Second Street,
18 | # Suite 300, San Francisco, California, 94105, USA.
19 | #
20 | # ADD ME ON TWITTER @NOOBIEDOG
21 |
22 | #Changelog:
23 | # [DONE] - Http Authentication
24 | # [DONE] - Add COLOR.
25 | # [DONE] - http Proxy options.
26 | # [DONE] - Kill threads on Ctrl+C.
27 | # [DONE] - Defaults added to Arguments.
28 | # [DONE] - Now using Requests instead of Urllib2.
29 | # [DONE] - Verbose modes added, prints found and Non-Authed folders.
30 | # [DONE] - Added User-Agent option.
31 | # [DONE] - Cookie Authentication (with multiple cookies)
32 | # [DONE] - SOCKS Proxy options # To use TOR socks5://127.0.0.1:9050 or socks4://127.0.0.1:9050
33 | #TODO:
34 |
35 | # Change number of threads on responce time from server.
36 | # Fix error reporting for connection issues.
37 | # Add Pause/Stop/Start functions to script.
38 | # Add XML output option.
39 | # Custom 404 page option.
40 | # Add NTLM Authentication
41 |
42 | __author__ = '@NoobieDog'
43 |
44 | from sys import argv
45 | import argparse
46 | import Queue
47 | import sys
48 | import threading
49 | #import requests
50 | import requesocks
51 | import re
52 | import time
53 |
54 | # Console colors
55 | W = '\033[0m' # white (normal)
56 | R = '\033[31m' # red
57 | G = '\033[32m' # green
58 | O = '\033[33m' # orange
59 | B = '\033[34m' # blue
60 | GR = '\033[37m' # gray
61 | BB = '\033[1m' # Bold
62 | NB = '\033[0m' # Not bold
63 |
64 | def mapcount(listing):
65 | lines = 0
66 | with open(listing) as f:
67 | lines = sum(1 for line in f)
68 | return lines
69 |
70 | parser = argparse.ArgumentParser(
71 | version='5.0',
72 | description='A Python version of DirBuster',
73 | epilog='Dir-Xcan is a multi threaded python application designed to brute force directories on web/application servers.')
74 |
75 | parser.add_argument('-s', action="store", help='Website Domain or IP')
76 | parser.add_argument('-d', action="store", help='Directory word list', default="directorylist.txt")
77 | parser.add_argument('-o', action="store", help='Output file name (HTML)', default="Dir-Xcan-results.html")
78 | parser.add_argument('-n', action="store", help='Number of threads', default="5")
79 | parser.add_argument('-p', action="store", help='Proxy address and port (host:port)')
80 | parser.add_argument('-a', action="store", help='Authentication BasicHTTP(username:password)')
81 | parser.add_argument('-c', action="store", help='use a previously established sessions cookie', default=None)
82 | parser.add_argument('-u', action="store", help='User-Agent', default="Mozilla/5.0")
83 | parser.add_argument("-V", action="store_true", help="Output information about new data.")
84 |
85 | try:
86 | results = vars(parser.parse_args())
87 | #results = parser.parse_args()
88 |
89 | except IOError, msg:
90 | parser.error(str(msg))
91 |
92 |
93 | print O + '''
94 | %s _____ _____ _____ __ _______ _ _
95 | | __ \_ _| __ \ \ \ / / ____| /\ | \ | |
96 | | | | || | | |__) |____\ V / | / \ | \| |
97 | | | | || | | _ /______> <| | / /\ \ | . ` |
98 | | |__| || |_| | \ \ / . \ |____ / ____ \| |\ |
99 | |_____/_____|_| \_\ /_/ \_\_____/_/ \_\_| \_|%s
100 |
101 | %sRelease Date%s: 06/10/2014
102 | %sRelease Version%s: V.5.0
103 | %sCode%s: stuart@sensepost.com // @NoobieDog
104 | %sVisit%s: www.sensepost.com // @sensepost
105 | ''' %(BB,NB,R,W,R,W,R,W,R,W)
106 |
107 | ProxyOpt = False
108 | AuthOpt = False
109 | CookiesOpt = False
110 |
111 | if not results['s'] or not results['d']:
112 | parser.print_help()
113 | exit()
114 | else:
115 | target = results['s']
116 | if not target.startswith("http"):
117 | print R + ' Please include the http:// or https:// parts' + W
118 | exit()
119 | list_file = results['d']
120 | outputname = results['o']
121 | ThreadNumber = int(results['n'])
122 | Proxy_Addr = results['p']
123 | Auth_Data = results['a']
124 | Usr_Agent = results['u']
125 | if results['p']:
126 | ProxyOpt = True
127 | Proxies = {
128 | "http": Proxy_Addr,
129 | "https": Proxy_Addr
130 | }
131 | if results['a']:
132 | AuthOpt = True
133 | Auth_User, Auth_Pwd = results['a'].split(':', 1)
134 | if results['u']:
135 | headers = {
136 | 'User-Agent': Usr_Agent,
137 | }
138 | if results['c'] is not None: ####### need to change imput to : not ; as it fuckes with life!
139 | CookiesOpt = True
140 | cookies = {}
141 |
142 | # Check to see if the cookie has a semicolon, if so there might be mutiple cookies
143 | if re.search(';', results['c']):
144 | print results['c']
145 | cookielist = results['c'].split(';')
146 | # Loop through list of cookies
147 | for authcookies in cookielist:
148 |
149 | # If there isn't an equal and some sort of content, then it isn't a valid cookie, otherwise add to list of cookies
150 | if re.search('[":_-/a-zA-Z0-9]', authcookies) and re.search('[=]', authcookies): ##### Error here too, regex all fucked up
151 | cookieparts = authcookies.split('=')
152 | cookies[cookieparts[0]] = cookieparts[1]
153 |
154 | else:
155 | # Check to see if cookie has =, if not it is malformed and send dummy cookie
156 | # If so, split at the = into correct name/value pairs
157 | if re.search('=', results['c']):
158 | cookielist = results['c'].split('=')
159 | cookies[cookielist[0]] = cookielist[1]
160 | else:
161 | print ' Error in Cookie - Sort your shit out!'
162 | else:
163 | cookielist = results['c'].split('=')
164 | cookies = {cookielist[0]: cookielist[1],}
165 |
166 | print O + ' lines to try..' + str(mapcount(list_file)) + W
167 |
168 | with open(list_file) as f:
169 | directorys = f.readlines()
170 | queue = Queue.Queue()
171 | NotFound = 0
172 | NotAuthorised = 0
173 | Found = 0
174 | Forbidden = 0
175 | Other = 0
176 | LinesLeft = len(directorys)
177 | Lines = len(directorys)
178 |
179 | def GetURL(host, target):
180 | global NotFound, Found, Forbidden, Other, LinesLeft, Lines
181 | sys.stdout.write("\r\x1b[K \033[31m%d \033[0mFound, \033[33m%d \033[0mForbidden, \033[32m%d \033[0mNotFound, \033[37m%d \033[0mOther, \033[37m%d \033[0mPercent Left" % (Found, Forbidden, NotFound, Other, LinesLeft*100/Lines))
182 | sys.stdout.flush()
183 |
184 | try:
185 | if AuthOpt == True:
186 | url = requesocks.get(target + '/' + str(host.rstrip()), auth=(Auth_User, Auth_Pwd), headers=headers)
187 | elif ProxyOpt == True:
188 | url = requesocks.get(target + '/' + str(host.rstrip()), proxies=Proxies, headers=headers)
189 | elif AuthOpt and ProxyOpt == True:
190 | url = requesocks.get(target + '/' + str(host.rstrip()), proxies=Proxy_Addr, auth=(Auth_User, Auth_Pwd), headers=headers)
191 | elif CookiesOpt == True:
192 | url = requesocks.get(target + '/' + str(host.rstrip()), cookies=cookies, headers=headers)
193 | else:
194 | url = requesocks.get(target + '/' + str(host.rstrip()), headers=headers)
195 |
196 | code = url.status_code
197 | if code == 401:
198 | Other += 1
199 | LinesLeft -= 1
200 | outputfile.write("" + target + '/' + host + " - REQUIRES AUTHENTICATION
\n");
201 | if results['V']:
202 | sys.stdout.write("\r\x1b[K\033[33m %s/%s\033[0m-REQUIRES AUTHENTICATION" % (target, host)) # Doesnt print after value :S
203 | sys.stdout.flush()
204 |
205 | elif code == 403:
206 | Forbidden = Forbidden + 1
207 | LinesLeft -= 1
208 | elif code == 404: # Need to look at making this shizz better (array or list)
209 | NotFound += 1
210 | LinesLeft -= 1
211 | elif code == 200:
212 | Found += 1
213 | LinesLeft -= 1
214 | outputfile.write("" + target + '/' + host + "
\n");
215 | if results['V']:
216 | sys.stdout.write("\r\x1b[K\033[31m %s/%s\033[0m-FOUND" % (target, host)) # Doesnt print after value :S
217 | sys.stdout.flush()
218 | else:
219 | Other += 1
220 | LinesLeft -= 1
221 |
222 |
223 | except requesocks.ConnectionError, e:
224 | outputfile.write("We failed to reach a server.
Reason: Connection Error