")
132 | keys.update({key["name"]: key["name"]})
133 |
134 | else:
135 | Log.info("Form key name: "+G +
136 | key["name"]+N+" value: "+G+self.payload)
137 | keys.update({key["name"]: self.payload})
138 |
139 | except Exception as e:
140 | Log.info("Internal error: "+str(e))
141 | try:
142 | Log.info("Form key name: "+G +
143 | key["name"]+N+" value: "+G+self.payload)
144 | keys.update({key["name"]: self.payload})
145 | except KeyError as e:
146 | Log.info("Internal error: "+str(e))
147 |
148 | Log.info("Sending payload (GET) method...")
149 | req = self.session.get(urljoin(self.url, action), params=keys)
150 | if self.payload in req.text:
151 | Log.high("Detected XSS (GET) at " +
152 | urljoin(self.url, req.url))
153 | Log.high("GET data: "+str(keys))
154 | else:
155 | Log.info("Not vulnerable.")
156 |
157 | @classmethod
158 | def get_method(self):
159 | bsObj = BeautifulSoup(self.body, "html.parser")
160 | links = bsObj.find_all("a", href=True)
161 | for a in links:
162 | url = a["href"]
163 | if url.startswith("http://") is False or url.startswith("https://") is False or url.startswith("mailto:") is False:
164 | base = urljoin(self.url, a["href"])
165 | query = urlparse(base).query
166 | if query != "":
167 | Log.warning("Found link with query: "+G +
168 | query+N+" Maybe a vulnerable XSS point.")
169 |
170 | query_payload = query.replace(
171 | query[query.find("=")+1:len(query)], self.payload, 1)
172 | test = base.replace(query, query_payload, 1)
173 |
174 | query_all = base.replace(query, urlencode(
175 | {x: self.payload for x in parse_qs(query)}))
176 |
177 | Log.info("Query (GET) : "+test)
178 | Log.info("Query (GET) : "+query_all)
179 |
180 | _respon = self.session.get(test)
181 | if self.payload in _respon.text or self.payload in self.session.get(query_all).text:
182 | Log.high("Detected XSS (GET) at "+_respon.url)
183 | else:
184 | Log.info(
185 | "This page is safe from XSS (GET) attack but not 100% yet...")
186 |
187 | @classmethod
188 | def main(self, url, proxy, headers, payload, cookie, method=2):
189 |
190 | print(W+"*"*15)
191 | self.payload = payload
192 | self.url = url
193 |
194 | self.session = session(proxy, headers, cookie)
195 | Log.info("Checking connection to: "+Y+url)
196 | try:
197 | ctr = self.session.get(url)
198 | self.body = ctr.text
199 | except Exception as e:
200 | Log.high("Internal error: "+str(e))
201 | return
202 |
203 | if ctr.status_code > 400:
204 | Log.info("Connection failed "+G+str(ctr.status_code))
205 | return
206 | else:
207 | Log.info("Connection estabilished "+G+str(ctr.status_code))
208 |
209 | if method >= 2:
210 | self.post_method()
211 | self.get_method()
212 | self.get_method_form()
213 |
214 | elif method == 1:
215 | self.post_method()
216 |
217 | elif method == 0:
218 | self.get_method()
219 | self.get_method_form()
220 |
--------------------------------------------------------------------------------
/crawler.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # Import Modules
5 | import requests
6 | from core import *
7 | from Log import *
8 | from bs4 import BeautifulSoup
9 | from helper import *
10 | from urllib.parse import urljoin
11 | from multiprocessing import Process
12 |
13 | __Name__ = "scancss"
14 | __description__ = "scancss is a xss vulnariblity scanner tool."
15 | __author__ = "Md. Nur habib"
16 | __copyright__ = "Copyright 2022."
17 | __license__ = "GNU v.20"
18 | __version__ = "v1.0.1"
19 | __email__ = "thenurhabib@gmail.com"
20 |
21 |
22 |
23 | # Crawler Class.
24 | class crawler:
25 |
26 | visited = []
27 |
28 | @classmethod
29 | def getLinks(self, base, proxy, headers, cookie):
30 |
31 | lst = []
32 |
33 | conn = session(proxy, headers, cookie)
34 | text = conn.get(base).text
35 | isi = BeautifulSoup(text, "html.parser")
36 |
37 | for obj in isi.find_all("a", href=True):
38 | url = obj["href"]
39 |
40 | if url.startswith("http://") or url.startswith("https://"):
41 | continue
42 |
43 | elif url.startswith("mailto:") or url.startswith("javascript:"):
44 | continue
45 |
46 | elif urljoin(base, url) in self.visited:
47 | continue
48 |
49 | else:
50 | lst.append(urljoin(base, url))
51 | self.visited.append(urljoin(base, url))
52 |
53 | return lst
54 |
55 | @classmethod
56 | def crawl(self, base, depth, proxy, headers, level, method, cookie):
57 |
58 | urls = self.getLinks(base, proxy, headers, cookie)
59 |
60 | for url in urls:
61 |
62 | p = Process(target=core.main, args=(
63 | url, proxy, headers, level, cookie, method))
64 | p.start()
65 | p.join()
66 | if depth != 0:
67 | self.crawl(url, depth-1, base, proxy, level, method, cookie)
68 |
69 | else:
70 | break
71 |
--------------------------------------------------------------------------------
/helper.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # Import Modules
5 | import json
6 | import requests
7 |
8 | __Name__ = "scancss"
9 | __description__ = "scancss is a xss vulnariblity scanner tool."
10 | __author__ = "Md. Nur habib"
11 | __copyright__ = "Copyright 2022."
12 | __license__ = "GNU v.20"
13 | __version__ = "v1.0.1"
14 | __email__ = "thenurhabib@gmail.com"
15 |
16 |
17 | # Style class
18 | class Style:
19 | reset = '\033[0m'
20 | bold = '\033[01m'
21 | underline = '\033[04m'
22 | red = '\033[31m'
23 | blue = '\033[34m'
24 | cyan = '\033[36m'
25 | lightgrey = '\033[37m'
26 | darkgrey = '\033[90m'
27 | yellow = '\033[93m'
28 |
29 |
30 | agent = {
31 | 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
32 | line = "—————————————————"
33 |
34 |
35 | def session(proxies, headers, cookie):
36 | requestVariable = requests.Session()
37 | requestVariable.proxies = proxies
38 | requestVariable.headers = headers
39 | requestVariable.cookies.update(json.loads(cookie))
40 | return requestVariable
41 |
42 |
43 | logo = f"""{Style.bold}{Style.yellow}
44 | ___ ___ ___ ___ ___ ___ ___
45 | /\ \ /\ \ /\ \ /\__\ /\ \ /\ \ /\ \
46 | /::\ \ /::\ \ /::\ \ /:| _|_ /::\ \ /::\ \ /::\ \
47 | /\:\:\__\ /:/\:\__\ /::\:\__\ /::|/\__\ /:/\:\__\ /\:\:\__\ /\:\:\__\\
48 | \:\:\/__/ \:\ \/__/ \/\::/ / \/|::/ / \:\ \/__/ \:\:\/__/ \:\:\/__/
49 | \::/ / \:\__\ /:/ / |:/ / \:\__\ \::/ / \::/ /
50 | \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ \/__/ {Style.reset}{Style.red}{__version__}{Style.yellow}
51 |
52 |
53 | 𝓕𝓪𝓼𝓽𝓮𝓼𝓽 𝓣𝓸𝓸𝓵 𝓽𝓸 𝓯𝓲𝓷𝓭 𝓒𝓻𝓸𝓼𝓼 𝓼𝓲𝓽𝓮 𝓼𝓬𝓻𝓲𝓹𝓽𝓲𝓷𝓰.
54 | ======================================================
55 | {Style.reset}{Style.darkgrey}
56 | Author : Md. Nur Habib
57 | Email : thenurhabib@gmail.com{Style.reset} \n\n"""
58 |
--------------------------------------------------------------------------------
/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thenurhabib/scancss/45dbc0a8ae3fff391ce0d273de9f631785785ef7/img/logo.png
--------------------------------------------------------------------------------
/img/ss1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thenurhabib/scancss/45dbc0a8ae3fff391ce0d273de9f631785785ef7/img/ss1.png
--------------------------------------------------------------------------------
/img/ss2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thenurhabib/scancss/45dbc0a8ae3fff391ce0d273de9f631785785ef7/img/ss2.png
--------------------------------------------------------------------------------
/img/ss3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/thenurhabib/scancss/45dbc0a8ae3fff391ce0d273de9f631785785ef7/img/ss3.png
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | scancss
6 |
7 |
8 |
9 | Fastest tool to find XSS.
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 | 
24 |
25 |
26 |
27 | ### scancss is a fastest tool to detect Cross Site scripting (XSS) automatically and it's also an intelligent payload generator.
28 |
29 |
30 |
31 | ### Main Features
32 | - Reflected XSS scanning
33 | - Blind xss find
34 | - Crawling all links on a website
35 | - POST and GET forms are supported
36 | - Advanced error handling
37 | - Multiprocessing support
38 |
39 |
40 |
41 | 
42 |
43 |
44 |
45 |
46 | ### Documentation
47 | ### install
48 | ```yaml
49 | git clone https://github.com/thenurhabib/scancss.git
50 | cd scancss
51 | python -m pip install -r requirements.txt
52 | python3 scancss.py --help
53 | ```
54 |
55 |
56 | #### Usage
57 |
58 | ```yaml
59 | ========================================================================
60 | usage: scancss -u [options]
61 |
62 | Options:
63 | --help Show usage and help parameters
64 | -u Target url (e.g. http://example.com)
65 | --depth Depth web page to crawl. Default: 2
66 | --payload-level Level for payload Generator, 7 for custom payload. {1...6}. Default: 6
67 | --payload Load custom payload directly (e.g. )
68 | --method Method setting(s):
69 | 0: GET
70 | 1: POST
71 | 2: GET and POST (default)
72 | --user-agent Request user agent (e.g. Chrome/2.1.1/...)
73 | --single Single scan. No crawling just one address
74 | --proxy Set proxy (e.g. {'https':'https://10.10.1.10:1080'})
75 | --about Print information about scancss tool
76 | --cookie Set cookie (e.g {'ID':'12464476836'})
77 |
78 | ========================================================================
79 | ```
80 |
81 |
82 | 
83 |
84 | ### Author
85 | ```yaml
86 | Name : Md. Nur habib
87 | Medium : thenurhabib.medium.com
88 | Twitter : https://twitter.com/thenurhab1b
89 | HackerRank : https://www.hackerrank.com/thenurhabib
90 |
91 | ```
92 |
93 | ##### Thank You.
94 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | bs4 >= 0.0.1
2 | requests >= 2.0.0
3 | click >=8.0
4 |
--------------------------------------------------------------------------------
/scancss.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | __Name__ = "scancss"
5 | __description__ = "scancss is a xss vulnariblity scanner tool."
6 | __author__ = "Md. Nur habib"
7 | __copyright__ = "Copyright 2022."
8 | __license__ = "GNU v.20"
9 | __version__ = "v1.0.1"
10 | __email__ = "thenurhabib@gmail.com"
11 |
12 |
13 | # Import Modules.
14 | import argparse
15 | from core import *
16 | from random import randint
17 | from Log import *
18 | from helper import *
19 | from crawler import *
20 |
21 |
22 | # Style class
23 | class Style:
24 | reset = '\033[0m'
25 | bold = '\033[01m'
26 | underline = '\033[04m'
27 | red = '\033[31m'
28 | blue = '\033[34m'
29 | cyan = '\033[36m'
30 | lightgrey = '\033[37m'
31 | darkgrey = '\033[90m'
32 | yellow = '\033[93m'
33 |
34 |
35 | epilog = f"""
36 | ==================================================
37 | GitHub: https://www.github.com/thenurhabib/scancss
38 | Version : {__version__}
39 |
40 | """
41 |
42 |
43 | def check(getopt):
44 | payload = int(getopt.payload_level)
45 | if payload > 6 and getopt.payload is None:
46 | Log.info("Do you want use custom payload (Y/n)?")
47 | answer = input("> {W}")
48 | if answer.lower().strip() == "y":
49 | Log.info("Write the XSS payload below")
50 | payload = input("> {W}")
51 | else:
52 | payload = core.generate(randint(1, 6))
53 |
54 | else:
55 | payload = core.generate(payload)
56 |
57 | return payload if getopt.payload is None else getopt.payload
58 |
59 |
60 | def start():
61 | parse = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
62 | usage="scancss -u [options]", epilog=epilog, add_help=False)
63 |
64 | pos_opt = parse.add_argument_group("Options")
65 | pos_opt.add_argument("--help", action="store_true",
66 | default=False, help="Show usage and help parameters")
67 | pos_opt.add_argument(
68 | "-u", metavar="", help="Target url (e.g. http://example.com)")
69 | pos_opt.add_argument("--depth", metavar="",
70 | help="Depth web page to crawl. Default: 2", default=2)
71 | pos_opt.add_argument("--payload-level", metavar="",
72 | help="Level for payload Generator, 7 for custom payload. {1...6}. Default: 6", default=6)
73 | pos_opt.add_argument("--payload", metavar="",
74 | help="Load custom payload directly (e.g. )", default=None)
75 | pos_opt.add_argument("--method", metavar="",
76 | help="Method setting(s): \n\t0: GET\n\t1: POST\n\t2: GET and POST (default)", default=2, type=int)
77 | pos_opt.add_argument("--user-agent", metavar="",
78 | help="Request user agent (e.g. Chrome/2.1.1/...)", default=agent)
79 | pos_opt.add_argument("--single", metavar="",
80 | help="Single scan. No crawling just one address")
81 | pos_opt.add_argument("--proxy", default=None, metavar="",
82 | help="Set proxy (e.g. {'https':'https://10.10.1.10:1080'})")
83 | pos_opt.add_argument("--about", action="store_true",
84 | help="Print information about scancss tool")
85 | pos_opt.add_argument(
86 | "--cookie", help="Set cookie (e.g {'ID':'1094200543'})", default='''{"ID":"1094200543"}''', metavar="")
87 |
88 | getopt = parse.parse_args()
89 | print(logo)
90 | Log.info(f"{Style.bold}{Style.blue}--scancss{Style.reset}{Style.cyan}")
91 | if getopt.u:
92 | core.main(getopt.u, getopt.proxy, getopt.user_agent,
93 | check(getopt), getopt.cookie, getopt.method)
94 |
95 | crawler.crawl(getopt.u, int(getopt.depth), getopt.proxy,
96 | getopt.user_agent, check(getopt), getopt.method, getopt.cookie)
97 |
98 | elif getopt.single:
99 | core.main(getopt.single, getopt.proxy, getopt.user_agent,
100 | check(getopt), getopt.cookie, getopt.method)
101 |
102 | elif getopt.about:
103 | print(f"""
104 | ***************
105 | This Tool Made of Educational and legal purpose Only,
106 | Please Don't Use it for Any Bad or Illegal Purpose.
107 | ****************
108 | {epilog}""")
109 | else:
110 | parse.print_help()
111 |
112 |
113 | if __name__ == "__main__":
114 | start()
115 |
--------------------------------------------------------------------------------