├── README.md ├── nova.txt ├── novaa.txt ├── parametri.txt ├── path-reflection.py ├── reflection.py ├── subs-dnsbruter-medium.txt ├── subs-dnsbruter-small.txt └── xss0rRecon.sh /README.md: -------------------------------------------------------------------------------- 1 | xss0rRecon 2 | 3 | Welcome to the xss0rRecon tool repository! 🚀 4 | Installation Guide: https://xss0r.medium.com/tool-overview-6c255fe7ec9b 5 | 6 | To use xss0rRecon effectively, it's essential that all required files are downloaded and placed in the same folder. Follow the instructions below to get started. 7 | 🛠️ Setup Instructions: 8 | 9 | Download the necessary files: 10 | Visit https://store.xss0r.com 11 | Choose a plan (the PRO plan is free!). - From the 10th to the 15th of each month, we provide a 5-day free license for the Professional plan, allowing users to explore the tool before committing to a purchase. The license details are listed on store.xss0r.com on the popup and will remain active only during this period. After the 15th, the license will expire. 12 | 13 | Download all the tools, wordlists, and the xss0r tool. 14 | 15 | Extract everything: 16 | Ensure all the downloaded tools, wordlists, and the xss0r tool are extracted into the same folder where the xss0rRecon tool is located. 17 | 18 | Run the tool: 19 | With everything in place, you’re now ready to run xss0rRecon and start your recon tasks! 💻 20 | 21 | If you have any questions or run into issues, feel free to reach out to me. 22 | -------------------------------------------------------------------------------- /nova.txt: -------------------------------------------------------------------------------- 1 | id 2 | admin 3 | listing 4 | administracija 5 | vijece 6 | dokumenti 7 | dokument 8 | opcina 9 | maglaj 10 | ls 11 | 12 12 | id 13 | number 14 | page 15 | stranica 16 | -------------------------------------------------------------------------------- /novaa.txt: -------------------------------------------------------------------------------- 1 | id 2 | admin 3 | listing 4 | administracija 5 | vijece 6 | dokumenti 7 | webmail 8 | ns 9 | ns1 10 | mail 11 | dokument 12 | opcina 13 | artist 14 | maglaj 15 | ls 16 | cat 17 | 12 18 | id 19 | number 20 | page 21 | stranica 22 | 23 | -------------------------------------------------------------------------------- /path-reflection.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import threading 3 | import argparse 4 | from urllib.parse import urlparse, urlunparse 5 | import time 6 | import os 7 | 8 | # Set the timeout limit (in seconds) 9 | TIMEOUT = 10 10 | 11 | # Global variables to track progress 12 | total_urls = 0 13 | processed_urls = 0 14 | saved_urls = set() # Set to track saved URLs 15 | 16 | # ANSI escape sequences for color 17 | BOLD = '\033[1m' 18 | RED = '\033[91m' 19 | BOLD_RED = '\033[1;91m' 20 | GREEN = '\033[92m' 21 | BLUE = '\033[94m' 22 | CYAN = '\033[96m' 23 | RESET = '\033[0m' 24 | 25 | def print_banner(): 26 | banner = f""" 27 | {GREEN}#########################################{RESET} 28 | {GREEN}# # 29 | {GREEN}# {BOLD}XSS Reflection Checker V2 {RESET}{GREEN} # 30 | {GREEN}# {BOLD}Developed by Ibrahim{RESET}{GREEN} # 31 | {GREEN}# # 32 | {GREEN}#########################################{RESET} 33 | """ 34 | print(banner) 35 | 36 | def save_reflected_url(url, modified_path, output_file): 37 | """Save reflected URLs with '{payload}' replacing 'ibrahimXSS'""" 38 | payload_url = urlunparse(url._replace(path=modified_path)).replace("ibrahimXSS", "{payload}") 39 | if payload_url not in saved_urls: 40 | with open(output_file, 'a') as f: 41 | f.write(payload_url + '\n') 42 | saved_urls.add(payload_url) 43 | print(f"{GREEN}[SAVED] {payload_url}{RESET}") 44 | 45 | def check_reflection(url, output_file): 46 | global processed_urls 47 | 48 | # List of supported extensions 49 | SUPPORTED_EXTENSIONS = ['php', 'asp', 'aspx', 'jsp', 'jspx', 'html', 'htm', 'xhtml', 'shtml', 'js'] 50 | 51 | try: 52 | parsed_url = urlparse(url) 53 | path_segments = parsed_url.path.strip('/').split('/') 54 | 55 | # Test each path segment individually 56 | for i, segment in enumerate(path_segments): 57 | modified_segments = path_segments.copy() 58 | 59 | # Inject 'ibrahimXSS' only for supported extensions 60 | if '.' in segment: 61 | base, ext = segment.rsplit('.', 1) 62 | if ext in SUPPORTED_EXTENSIONS: 63 | modified_segments[i] = f'ibrahimXSS.{ext}' 64 | else: 65 | # For non-extension segments, inject 'ibrahimXSS' 66 | modified_segments[i] = 'ibrahimXSS' 67 | 68 | modified_path = '/' + '/'.join(modified_segments) 69 | modified_url = urlunparse(parsed_url._replace(path=modified_path)) 70 | 71 | # Send the request 72 | response = requests.get(modified_url, timeout=TIMEOUT) 73 | if 'ibrahimXSS' in response.text: 74 | print(f"{GREEN}[+] Reflection found: {modified_url}{RESET}") 75 | save_reflected_url(parsed_url, modified_path, output_file) 76 | 77 | # Test appending 'ibrahimXSS' at the end of the path 78 | appended_path = parsed_url.path.rstrip('/') + '/ibrahimXSS' 79 | appended_url = urlunparse(parsed_url._replace(path=appended_path)) 80 | response = requests.get(appended_url, timeout=TIMEOUT) 81 | if 'ibrahimXSS' in response.text: 82 | print(f"{GREEN}[+] Reflection found: {appended_url}{RESET}") 83 | save_reflected_url(parsed_url, appended_path, output_file) 84 | 85 | except requests.exceptions.Timeout: 86 | print(f"{RED}[!] Timeout: {url}{RESET}") 87 | time.sleep(2) 88 | except requests.exceptions.RequestException as e: 89 | print(f"{RED}[!] Error: {url} - {str(e)}{RESET}") 90 | time.sleep(2) 91 | finally: 92 | processed_urls += 1 93 | print(f"{BLUE}[INFO] Progress: {processed_urls}/{total_urls} URLs processed.{RESET}") 94 | 95 | def post_process_urls(input_file, output_file): 96 | """Post-process URLs to remove duplicates and replace 'ibrahimXSS' with '{payload}'""" 97 | print(f"{CYAN}Processing URLs to replace 'ibrahimXSS' with '{{payload}}'...{RESET}") 98 | temp_file = f"{output_file}.tmp" 99 | with open(input_file, 'r') as infile, open(temp_file, 'w') as outfile: 100 | urls = set() 101 | for line in infile: 102 | line = line.strip().replace("ibrahimXSS", "{payload}") 103 | urls.add(line) 104 | for url in sorted(urls): 105 | outfile.write(url + '\n') 106 | os.rename(temp_file, output_file) 107 | print(f"{GREEN}Processed URLs saved to {output_file}{RESET}") 108 | 109 | def main(): 110 | global total_urls 111 | print_banner() 112 | 113 | parser = argparse.ArgumentParser(description="Path Reflection Checker") 114 | parser.add_argument("file_path", type=str, help="Path to the text file containing URLs") 115 | parser.add_argument("--threads", type=int, default=5, help="Number of threads to use (default: 5)") 116 | args = parser.parse_args() 117 | 118 | output_file = "path-xss.txt" 119 | processed_output = "path-xss-urls.txt" 120 | 121 | # Clear previous results 122 | open(output_file, 'w').close() 123 | 124 | try: 125 | with open(args.file_path, 'r') as f: 126 | urls = [line.strip() for line in f if line.strip()] 127 | total_urls = len(urls) 128 | except Exception as e: 129 | print(f"{RED}Error: {str(e)}{RESET}") 130 | return 131 | 132 | # Start reflection checks 133 | threads = [] 134 | for url in urls: 135 | while threading.active_count() - 1 >= args.threads: 136 | pass 137 | thread = threading.Thread(target=check_reflection, args=(url, output_file)) 138 | threads.append(thread) 139 | thread.start() 140 | 141 | for thread in threads: 142 | thread.join() 143 | 144 | # Post-process URLs 145 | if os.path.isfile(output_file): 146 | print(f"{CYAN}Reflection check complete. Post-processing results...{RESET}") 147 | post_process_urls(output_file, processed_output) 148 | else: 149 | print(f"{RED}Error: {output_file} was not generated.{RESET}") 150 | 151 | if __name__ == "__main__": 152 | main() 153 | -------------------------------------------------------------------------------- /reflection.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import threading 3 | import argparse 4 | from urllib.parse import urlparse, parse_qs, urlunparse, urlencode 5 | import time 6 | 7 | # Set the timeout limit (in seconds) 8 | TIMEOUT = 10 9 | 10 | # Global variables to track progress 11 | total_urls = 0 12 | processed_urls = 0 13 | 14 | # ANSI escape sequences for color 15 | BOLD = '\033[1m' 16 | RED = '\033[91m' 17 | BOLD_RED = '\033[1;91m' 18 | GREEN = '\033[92m' 19 | BLUE = '\033[94m' 20 | RESET = '\033[0m' 21 | 22 | def print_banner(): 23 | banner = f""" 24 | {GREEN}#########################################{RESET} 25 | {GREEN}# #{RESET} 26 | {GREEN}# {BOLD}XSS Reflection Checker V2 {RESET}{GREEN} #{RESET} 27 | {GREEN}# {BOLD}Developed by Ibrahim{RESET}{GREEN} #{RESET} 28 | {GREEN}# #{RESET} 29 | {GREEN}#########################################{RESET} 30 | {BOLD}Usage:{RESET} # 31 | python reflection.py urls.txt --threads 2 32 | """ 33 | print(banner) 34 | 35 | def save_reflected_url(original_url, param_name, modified_params, output_file): 36 | """Save the modified URL with {payload} replacing the specific parameter.""" 37 | temp_params = modified_params.copy() 38 | 39 | # Save with {payload} in place of the current parameter without encoding 40 | temp_params[param_name] = "{payload}" 41 | query = "&".join(f"{k}={','.join(v)}" if isinstance(v, list) else f"{k}={v}" for k, v in temp_params.items()) 42 | payload_url = urlunparse(urlparse(original_url)._replace(query=query)) 43 | 44 | # Save the clean payload URL to the output file 45 | with open(output_file, 'a') as f: 46 | f.write(payload_url + '\n') 47 | 48 | print(f"{GREEN}[SAVED] {payload_url}{RESET}") 49 | 50 | def check_reflection(url, output_file): 51 | global processed_urls 52 | 53 | try: 54 | parsed_url = urlparse(url) 55 | query_params = parse_qs(parsed_url.query) 56 | 57 | # Ensure empty parameters are handled 58 | for param in parsed_url.query.split("&"): 59 | key_value = param.split("=") 60 | if len(key_value) == 1 or key_value[1] == "": 61 | query_params[key_value[0]] = ["ibrahimXSS"] 62 | 63 | # Process each parameter individually 64 | for param in query_params: 65 | # Make a deep copy of query parameters to modify only one at a time 66 | modified_params = {k: v[:] for k, v in query_params.items()} 67 | 68 | # Temporarily set the current parameter to `ibrahimXSS` 69 | modified_params[param] = ['ibrahimXSS'] 70 | 71 | # Reconstruct the modified URL 72 | query = "&".join(f"{k}={','.join(v)}" if isinstance(v, list) else f"{k}={v}" for k, v in modified_params.items()) 73 | modified_url = urlunparse(parsed_url._replace(query=query)) 74 | 75 | # Make a request with a timeout 76 | response = requests.get(modified_url, timeout=TIMEOUT) 77 | 78 | # Check if 'ibrahimXSS' is reflected in the response 79 | if 'ibrahimXSS' in response.text: 80 | print(f"{GREEN}[+] Reflection found on {modified_url} for parameter '{BOLD_RED}{param}{RESET}'") 81 | 82 | # Save URL with {payload} replacing only the current parameter 83 | save_reflected_url(url, param, modified_params, output_file) 84 | 85 | except requests.exceptions.Timeout: 86 | print(f"{RED}[!] Connection Timeout: {url}{RESET}") 87 | time.sleep(2) 88 | except requests.exceptions.RequestException: 89 | print(f"{RED}[!] Connection Timeout: {url}{RESET}") 90 | time.sleep(2) 91 | finally: 92 | processed_urls += 1 93 | print(f"{BLUE}[INFO] Progress: {processed_urls}/{total_urls} URLs processed.{RESET}") 94 | 95 | def main(): 96 | global total_urls 97 | 98 | print_banner() 99 | 100 | parser = argparse.ArgumentParser(description="Reflection Checker") 101 | parser.add_argument("file_path", type=str, help="Path to the text file containing URLs") 102 | parser.add_argument("--threads", type=int, default=5, help="Number of threads to use (default: 5)") 103 | 104 | args = parser.parse_args() 105 | 106 | # Read URLs from the file 107 | try: 108 | with open(args.file_path, 'r') as f: 109 | urls = [line.strip() for line in f if line.strip()] 110 | total_urls = len(urls) 111 | except Exception as e: 112 | print(f"{RED}Error reading file: {str(e)}{RESET}") 113 | return 114 | 115 | # Set the output file 116 | output_file = 'xss.txt' 117 | 118 | # Clear previous results in the output file 119 | open(output_file, 'w').close() 120 | 121 | threads = [] 122 | for url in urls: 123 | while threading.active_count() - 1 >= args.threads: 124 | pass # Wait for available thread slot 125 | 126 | thread = threading.Thread(target=check_reflection, args=(url, output_file)) 127 | threads.append(thread) 128 | thread.start() 129 | 130 | for thread in threads: 131 | thread.join() 132 | 133 | if __name__ == "__main__": 134 | main() 135 | -------------------------------------------------------------------------------- /xss0rRecon.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Function to Install prerequired files 4 | # Check if python3-venv is installed 5 | if ! dpkg -l | grep -q python3-venv; then 6 | echo "python3-venv not found. Installing..." 7 | sudo apt install -y python3-venv 8 | else 9 | echo "python3-venv is already installed." 10 | fi 11 | 12 | # Create and activate virtual environment 13 | python3 -m venv .venv 14 | source .venv/bin/activate 15 | 16 | # Function to handle errors with manual installation solutions 17 | handle_error_with_solution() { 18 | echo -e "${RED}Error occurred during the execution of $1. Exiting step but continuing with the next installation.${NC}" 19 | echo "Error during: $1" >> error.log 20 | echo -e "${YELLOW}Possible Solution for manual installation:${NC}" 21 | echo -e "${BOLD_WHITE}$2${NC}" 22 | } 23 | 24 | # Define colors 25 | BOLD_WHITE='\033[1;97m' 26 | BOLD_BLUE='\033[1;34m' 27 | RED='\033[0;31m' 28 | YELLOW='\033[1;33m' 29 | CYAN='\033[0;36m' 30 | NC='\033[0m' # No Color 31 | 32 | # Function to handle errors 33 | handle_error() { 34 | echo -e "${RED}Error occurred during the execution of $1. Exiting.${NC}" 35 | echo "Error during: $1" >> error.log 36 | exit 1 37 | } 38 | 39 | # Function to show progress with emoji 40 | show_progress() { 41 | echo -e "${BOLD_BLUE}Current process: $1...⌛️${NC}" 42 | } 43 | 44 | # Function to check if a command exists and is executable 45 | check_command() { 46 | if ! command -v "$1" &> /dev/null; then 47 | echo -e "${RED}$1 could not be found or is not installed correctly.${NC}" 48 | handle_error "$1 installation check" 49 | else 50 | echo -e "${BOLD_BLUE}$1 installed correctly.${NC}" 51 | fi 52 | } 53 | 54 | # Clear the terminal 55 | clear 56 | 57 | # Display banner 58 | echo -e "${BOLD_BLUE}" 59 | echo " ___ ____ _____ " 60 | echo "__ _____ ___ / _ \ _ __ | _ \ ___ ___ ___ _ __ __ _|___ / " 61 | echo "\ \/ / __/ __| | | | '__| | |_) / _ \/ __/ _ \| '_ \ \ \ / / |_ \ " 62 | echo " > <\__ \__ \ |_| | | | _ < __/ (_| (_) | | | | \ V / ___) |" 63 | echo "/_/\_\___/___/\___/|_| |_| \_\___|\___\___/|_| |_| \_/ |____/ " 64 | echo " xss0r Recon v3" 65 | echo -e "${NC}" 66 | 67 | # Centered Contact Information 68 | echo -e "${BOLD_BLUE} Website: store.xss0r.com${NC}" 69 | echo -e "${BOLD_BLUE} Free BlindXSS Testing: xss0r.com${NC}" 70 | echo -e "${BOLD_BLUE} X: x.com/xss0r${NC}" 71 | 72 | # Function to display options 73 | display_options() { 74 | echo -e "${BOLD_BLUE}Please select an option:${NC}" 75 | echo -e "${RED}1: Install all tools${NC}" 76 | echo -e "${RED}2: Enter a domain name of the target${NC}" 77 | echo -e "${YELLOW}3: Domain Enumeration and Filtering${NC}" 78 | echo -e "${YELLOW}4: URL Crawling and Filtering${NC}" 79 | echo -e "${YELLOW}5: In-depth URL Filtering${NC}" 80 | echo -e "${YELLOW}6: HiddenParamFinder${NC}" 81 | echo -e "${YELLOW}7: Preparing for XSS Detection and Query String URL Analysis${NC}" 82 | echo -e "${YELLOW}8: Launching xss0r Tool${NC}" 83 | echo -e "${YELLOW}9: Exit${NC}" 84 | echo -e "${YELLOW}10: Guide to Deploying xss0r on VPS Servers${NC}" 85 | echo -e "${YELLOW}11: Path-based XSS${NC}" 86 | echo -e "${YELLOW}12: Domains Search Inputs${NC}" 87 | } 88 | 89 | 90 | # Function to display Guide to Deploying xss0r on VPS Servers information with better formatting and crystal-like color 91 | show_vps_info() { 92 | echo -e "${CYAN}To run xss0r continuously on bug bounty programs and keep it running in the background, a VPS server is highly recommended.${NC}" 93 | echo -e "${CYAN}I personally recommend Contabo, which I've been using for the past three years. It has performed reliably without any restrictions.${NC}" 94 | echo -e "${CYAN}Additionally, the pricing is very competitive.${NC}\n" 95 | 96 | echo -e "${CYAN}Here is the link to purchase the Contabo VPS 2 server Debian OS:${NC}" 97 | echo -e "${CYAN}Make sure to select under OS settings to be DEBIAN OS 12:${NC}" 98 | echo -e "${CYAN}https://contabo.com/en/vps/cloud-vps-2/?image=debian.329&qty=1&contract=1&storage-type=vps2-400-gb-sdd${NC}\n" 99 | echo -e "${CYAN}You can select any plan from Contabo Hosting https://contabo.com/en/vps/${NC}\n" 100 | 101 | echo -e "${CYAN}After completing the purchase, you can expect to receive your credentials via email within 15 minutes to 3 hours.${NC}\n" 102 | 103 | echo -e "${CYAN}Next, update your VPS and install tmux to allow xss0r to run in the background.${NC}\n" 104 | 105 | echo -e "${CYAN}Below are the essential tmux commands:${NC}\n" 106 | 107 | echo -e "${CYAN}#### Start a new tmux session:${NC}" 108 | echo -e "${CYAN}apt install tmux # Install tmux${NC}" 109 | echo -e "${CYAN}tmux new-session -s xss0r # Create a new tmux session${NC}" 110 | echo -e "${CYAN}tmux attach-session -t xss0r # Reattach to an existing tmux session from another terminal tab${NC}" 111 | echo -e "${CYAN}tmux detach -s xss0r # Detach from the tmux session${NC}" 112 | echo -e "${CYAN}tmux kill-session -t xss0r # Terminate the xss0r tmux session${NC}" 113 | echo -e "${CYAN}tmux kill-server # Terminate all tmux sessions${NC}" 114 | echo -e "${CYAN}tmux ls # List all active tmux sessions${NC}\n" 115 | 116 | echo -e "${CYAN}#### Install and Configure Cockpit https://YourVpsIP:9090${NC}" 117 | echo -e "${CYAN}#### Cockpit it WEB GUI for SSH with many features like Navigator (File Manmager) for quick upload/download files:${NC}" 118 | echo -e "${CYAN}sudo apt install cockpit cockpit-podman -y # Install Cockpit and Podman support${NC}" 119 | echo -e "${CYAN}sudo systemctl start cockpit # Start Cockpit service${NC}" 120 | echo -e "${CYAN}sudo systemctl enable cockpit # Enable Cockpit to start on boot${NC}" 121 | echo -e "${CYAN}sudo apt install ufw -y # Install UFW firewall${NC}" 122 | echo -e "${CYAN}sudo ufw enable # Enable UFW firewall${NC}" 123 | echo -e "${CYAN}sudo ufw allow 9090 # Allow Cockpit access on port 9090${NC}" 124 | echo -e "${CYAN}sudo ufw allow 80 # Allow HTTP access${NC}" 125 | echo -e "${CYAN}sudo ufw allow 22 # Allow SSH access${NC}" 126 | echo -e "${CYAN}sudo ufw allow 3389 # Allow RDP access${NC}" 127 | echo -e "${CYAN}sudo ufw reload # Reload UFW rules${NC}\n" 128 | echo -e "${CYAN}sudo ufw allow 22/tcp # Allow ssh over tcp${NC}\n" 129 | echo -e "${CYAN}sudo ufw allow ssh # Allow ssh 130 | echo -e "${CYAN}Configure Cockpit to Allow # Unencrypted Access and Root Login:${NC}" 131 | echo -e "${CYAN}sudo nano /etc/cockpit/cockpit.conf # Add settings to cockpit.conf${NC}" 132 | echo -e "${CYAN}[WebService]\nAllowUnencrypted = true\nLogin= root\n" # Configuration content for cockpit.conf 133 | echo -e "${CYAN}sudo systemctl restart cockpit # Restart Cockpit service to apply changes${NC}" 134 | echo -e "${CYAN}sudo apt-get upgrade cockpit # Upgrade Cockpit${NC}" 135 | echo -e "${CYAN}sudo nano /etc/cockpit/disallowed-users # Delete 'root' user from disallowed-users${NC}" 136 | echo -e "${CYAN}sudo nano /etc/pam.d/cockpit # Comment pam_listfile.so item=user sense=deny line${NC}" 137 | echo -e "${CYAN}sudo mkdir -p /etc/cockpit/ws-certs.d # Create directory for certificates${NC}" 138 | echo -e "${CYAN}sudo rm /etc/cockpit/ws-certs.d/0-self-signed.cert # Remove self-signed cert${NC}" 139 | echo -e "${CYAN}sudo systemctl restart cockpit # Restart Cockpit service${NC}\n" 140 | 141 | echo -e "${CYAN}#### Install Cockpit Navigator Plugin & ZIP archive:${NC}" 142 | echo -e "${CYAN}sudo apt-get install rsync zip # Install rsync zip${NC}" 143 | echo -e "${CYAN}sudo apt-get install unzip # Install unzip ${NC}" 144 | echo -e "${CYAN}sudo apt-get install p7zip-full # Install p7zip${NC}" 145 | echo -e "${CYAN}wget https://github.com/45Drives/cockpit-navigator/releases/download/v0.5.10/cockpit-navigator_0.5.10-1focal_all.deb # Download Cockpit Navigator${NC}" 146 | echo -e "${CYAN}sudo dpkg -i cockpit-navigator_0.5.10-1focal_all.deb # Install Cockpit Navigator${NC}" 147 | echo -e "${CYAN}Navigate to https://YourVpsIP:9090/navigator # Access Cockpit Navigator in your browser${NC}\n" 148 | 149 | echo -e "${CYAN}#### Install Kali Linux and Desktop Environment:${NC}" 150 | echo -e "${CYAN}sudo nano /etc/apt/sources.list # Add Kali repository to sources.list${NC}" 151 | echo -e "${CYAN}deb http://http.kali.org/kali kali-rolling main contrib non-free non-free-firmware\n" 152 | echo -e "${CYAN}wget -q -O - https://archive.kali.org/archive-key.asc | gpg --dearmor > /etc/apt/trusted.gpg.d/kali-archive-keyring.gpg # Import Kali keyring${NC}" 153 | echo -e "${CYAN}sudo apt update # Update package list${NC}" 154 | echo -e "${CYAN}sudo apt upgrade # Upgrade packages${NC}" 155 | echo -e "${CYAN}sudo apt full-upgrade # Full system upgrade${NC}" 156 | echo -e "${CYAN}sudo apt dist-upgrade # Distribution upgrade${NC}" 157 | echo -e "${CYAN}sudo apt -y install kali-linux-everything # Install all Kali tools${NC}" 158 | echo -e "${CYAN}sudo apt install kali-desktop-gnome # Install Kali GNOME Desktop${NC}" 159 | echo -e "${CYAN}sudo apt install kali-linux-default # Install default Kali packages${NC}" 160 | echo -e "${CYAN}sudo apt update --fix-missing # Fix missing dependencies${NC}" 161 | echo -e "${CYAN}sudo apt --fix-broken install # Fix broken installations${NC}" 162 | echo -e "${CYAN}sudo dpkg --configure -a # Reconfigure dpkg${NC}" 163 | echo -e "${CYAN}sudo update-alternatives --config x-session-manager # Configure session manager${NC}" 164 | echo -e "${CYAN}sudo apt -y install kali-root-login # Enable root login${NC}" 165 | echo -e "${CYAN}sudo passwd # Set root password${NC}" 166 | echo -e "${CYAN}sudo apt autoremove # Remove unnecessary packages${NC}" 167 | echo -e "${CYAN}sudo apt clean # Clean up package cache${NC}\n" 168 | echo -e "${CYAN}sudo reboot # Update changes to VPS server${NC}\n" 169 | 170 | 171 | # Steps for installing xss0r on VPS 172 | echo -e "${CYAN}#### Steps for installing xss0r on VPS:${NC}" 173 | 174 | echo -e "${CYAN}1. Install Cockpit ${NC} # Install Cockpit for VPS management" 175 | echo -e "${CYAN}2. Install Debian ${NC} # Install the Debian OS" 176 | echo -e "${CYAN}3. nano /etc/apt/sources.list ${NC} # Edit source list in Debian OS" 177 | echo -e "${CYAN}4. deb http://asi-fs-d.contabo.net/debian bookworm main non-free-firmware ${NC} # Change 'bookworm' to 'testing'" 178 | echo -e "${CYAN}5. deb-src http://asi-fs-d.contabo.net/debian bookworm main non-free-firmware ${NC} # Change 'bookworm' to 'testing'" 179 | echo -e "${CYAN}6. Update & Upgrade ${NC} # sudo apt update && sudo apt install libc6 -y && sudo apt install gnome -y" 180 | echo -e "${CYAN}7. Install Kali OS ${NC} # Not needed any changes except updates & upgrades" 181 | echo -e "${CYAN}8. Upload all files to your VPS ${NC} # Upload xss0r + xss0rRecon files" 182 | echo -e "${CYAN}9. chmod +x xss0r ${NC} # Add execute permission to the xss0r tool" 183 | echo -e "${CYAN}10. Install required Chrome version from the eBook ${NC} # Install the required Chrome version as outlined in the eBook" 184 | echo -e "${CYAN}11. Run xss0r and enter API License ${NC} # Run xss0r tool and enter your API license" 185 | echo -e "${CYAN}12. Run xss0rRecon and install all tools ${NC} # Run xss0rRecon and install necessary tools" 186 | echo -e "${CYAN}13. Ensure all files in the same folder ${NC} # Make sure all files are inside the same folder" 187 | echo -e "${CYAN}14. Run xss0r tool ${NC} # Launch and run xss0r tool" 188 | 189 | } 190 | 191 | # Initialize a variable for the domain name 192 | domain_name="" 193 | last_completed_option=1 194 | skip_order_check_for_option_4=false 195 | total_merged_urls=0 196 | 197 | # Function to run step 1 (Install all tools) 198 | install_tools() { 199 | # Find the current directory path 200 | CURRENT_DIR=$(pwd) 201 | 202 | echo -e "${BOLD_WHITE}You selected: Install all tools${NC}" 203 | 204 | show_progress "Installing dependencies" 205 | sudo apt-mark hold google-chrome-stable 206 | sudo apt install git 207 | sudo apt update && sudo apt install needrestart -y && sudo apt upgrade -y -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" && sudo apt dist-upgrade -y -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-confdef" && sudo dpkg --configure -a && sudo apt -f install -y && sudo needrestart -q -n sudo apt update --fix-missing 208 | # Check if the OS is Ubuntu 209 | if grep -qi "ubuntu" /etc/*release; then 210 | echo "Ubuntu detected! Running installation commands..." 211 | 212 | # Update and upgrade packages 213 | apt update && apt upgrade -y 214 | 215 | # Install required dependencies 216 | apt install software-properties-common -y 217 | 218 | # Add the deadsnakes PPA 219 | add-apt-repository ppa:deadsnakes/ppa -y 220 | 221 | # Update package list again 222 | apt update 223 | 224 | # Install Python 3.12 225 | apt install python3.12 -y 226 | 227 | # Verify installation 228 | sudo update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.12 1 229 | sudo update-alternatives --config python3 230 | sudo ln -sf /usr/bin/python3 /usr/bin/python 231 | sudo apt install --reinstall python3-apt 232 | sudo apt install python3-distutils 233 | curl -sS https://bootstrap.pypa.io/get-pip.py | python3 234 | sudo apt install --reinstall python3-pip 235 | python3.12 --version 236 | else 237 | echo "This is not an Ubuntu system. Skipping installation." 238 | fi 239 | sudo apt install python3.12-venv 240 | python3 -m venv .venv 241 | source .venv/bin/activate 242 | sudo apt install -y python3-pip 243 | sudo apt upgrade python3 244 | sudo apt install pip 245 | sudo apt install pip3 246 | pip3 install requests urllib3 247 | sudo pip uninstall -y subprober subdominator dnsbruter --break-system-packages 248 | pip install aiosqlite 249 | sudo apt install -y python3.12 250 | sudo apt install -y build-essential libssl-dev zlib1g-dev libncurses5-dev libnss3-dev libsqlite3-dev libreadline-dev libffi-dev curl libbz2-dev make 251 | sudo apt install -y pkg-config 252 | sudo apt install -y libssl-dev libffi-dev 253 | sudo pip install colorama --break-system-packages 254 | sudo pip install aiodns --break-system-packages 255 | sudo pip install aiofiles --break-system-packages 256 | sudo pip install -U bs4 --break-system-packages 257 | sudo pip install -U lxml --break-system-packages 258 | sudo pip install --upgrade cython 259 | sudo pip install aiojarm --break-system-packages 260 | sudo pip install playwright --break-system-packages 261 | sudo pip install subprober --break-system-packages --no-deps anyio==4.6.2 262 | sudo pip install uvloop --break-system-packages 263 | sudo pip install -U bs4 --break-system-packages 264 | sudo pip install -U lxml --break-system-packages 265 | sudo apt --fix-broken install 266 | sudo apt install -y python3 python3-pip python3-venv python3-setuptools git wget curl 267 | sudo apt-get install -y rsync zip unzip p7zip-full golang-go terminator pipx tmux 268 | 269 | # Remove conflicting package if it exists 270 | sudo apt remove -y python3-structlog 271 | 272 | # Set full permissions for the xss0rRecon script 273 | sudo chmod 755 xss0rRecon.sh 274 | 275 | # Step 1: Install Python3 virtual environment and structlog in venv 276 | show_progress "Installing python3-venv and setting up virtual environment" 277 | 278 | # Upgrade pip 279 | sudo pip install --upgrade pip 280 | sudo pip install tldextract --break-system-packages 281 | sudo pip install structlog requests uvloop setuptools pipx 282 | 283 | # Install necessary Python packages within the virtual environment 284 | sudo pip install structlog requests uvloop setuptools 285 | 286 | # Install pipx within the virtual environment 287 | sudo pip install pipx 288 | sudo pip install asynciolimiter 289 | sudo pip install aiojarm 290 | sudo pip install playwright 291 | 292 | 293 | # Install Dnsbruter, Subdominator, SubProber within the virtual environment 294 | sudo pip install git+https://github.com/RevoltSecurities/Dnsbruter 295 | sudo pip install git+https://github.com/RevoltSecurities/Subdominator --break-system-packages 296 | sudo pip install git+https://github.com/RevoltSecurities/Subdominator --no-deps httpx==0.25.2 297 | pipx install git+https://github.com/RevoltSecurities/Subdominator 298 | sudo pip install git+https://github.com/RevoltSecurities/Subprober --break-system-packages 299 | sudo pip install git+https://github.com/RevoltSecurities/Subprober --break-system-packages 300 | sudo pip install subprober --break-system-packages --no-deps anyio==4.6.2 301 | sudo pip install git+https://github.com/RevoltSecurities/Subprober.git --no-deps aiojarm 302 | sudo pip install git+https://github.com/RevoltSecurities/Subprober.git --no-deps playwright 303 | pipx install git+https://github.com/RevoltSecurities/Subprober --break-system-packages 304 | 305 | # Install Uro, Arjun, and other required Python packages 306 | sudo pip install uro 307 | sudo pip install arjun 308 | sudo pip install alive_progress ratelimit 309 | 310 | # Add Go bin to PATH 311 | export PATH=$PATH:$(go env GOPATH)/bin 312 | 313 | # Dynamically set the PATH based on the current user 314 | if [ "$EUID" -eq 0 ]; then 315 | echo "You are the root user." 316 | export PATH="$PATH:/root/.local/bin" 317 | else 318 | # Detect the username of the home user 319 | USERNAME=$(whoami) 320 | echo "You are the home user: $USERNAME" 321 | export PATH="$PATH:/home/$USERNAME/.local/bin" 322 | fi 323 | 324 | # Sleep for 3 seconds 325 | sleep 3 326 | 327 | # Print the updated PATH for confirmation 328 | echo "Updated PATH: $PATH" 329 | 330 | # Display installed tools 331 | echo -e "${BOLD_BLUE}All tools have been successfully installed within the virtual environment.${NC}" 332 | 333 | 334 | # Sleep for 3 seconds 335 | sleep 3 336 | 337 | # Print the updated PATH for confirmation 338 | echo "Updated PATH: $PATH" 339 | 340 | # Step 2: Install the latest version of pip 341 | show_progress "Installing/Upgrading pip" 342 | sudo apt update && sudo apt install python3-pip -y 343 | sudo pip3 install --upgrade pip --root-user-action=ignore 344 | sudo pip install tldextract --break-system-packages 345 | echo "managed by system package manager" | sudo tee /usr/lib/python3.12/EXTERNALLY-MANAGED 346 | sleep 3 347 | 348 | # Step 3: Install Go 349 | show_progress "Installing Go 1.22.5" 350 | 351 | # Step 1: Remove any existing Go installations 352 | echo "Removing existing Go installations and cache..." 353 | sudo apt remove --purge golang -y 354 | sudo apt autoremove --purge -y 355 | sudo apt clean 356 | sudo rm -rf /usr/local/go /usr/bin/go /usr/local/bin/go /root/go ~/go ~/.cache/go-build ~/.config/go ~/.config/gopls 357 | 358 | # Remove Go from PATH if previously added 359 | export PATH=$(echo "$PATH" | sed -e 's|:/usr/local/go/bin||' -e 's|:$HOME/go/bin||') 360 | 361 | # Confirm removal 362 | echo "Existing Go installations removed." 363 | 364 | # Step 2: Download and Install Go 365 | echo "Downloading Go 1.22.5..." 366 | sudo apt install golang -y 367 | wget https://go.dev/dl/go1.22.5.linux-amd64.tar.gz 368 | 369 | echo "Installing Go 1.22.5..." 370 | sudo tar -C /usr/local -xzf go1.22.5.linux-amd64.tar.gz 371 | 372 | # Clean up the downloaded tarball 373 | sudo rm -r go1.22.5.linux-amd64.tar.gz 374 | 375 | # Step 3: Set up environment variables 376 | echo "Configuring Go environment..." 377 | echo 'export PATH=$PATH:/usr/local/go/bin' | sudo tee -a /etc/profile.d/go.sh 378 | echo 'export GOPATH=$HOME/go' | sudo tee -a /etc/profile.d/go.sh 379 | echo 'export PATH=$PATH:$GOPATH/bin' | sudo tee -a /etc/profile.d/go.sh 380 | 381 | # Apply environment changes immediately 382 | source /etc/profile.d/go.sh 383 | 384 | # Make Go available globally for all users 385 | sudo ln -sf /usr/local/go/bin/go /usr/bin/go 386 | sudo ln -sf /usr/local/go/bin/gofmt /usr/bin/gofmt 387 | 388 | # Step 4: Verify the installation 389 | echo "Verifying Go installation..." 390 | if go version; then 391 | echo -e "Go 1.22.5 has been successfully installed and configured." 392 | else 393 | echo -e "Failed to install Go. Please check for errors and retry." 394 | exit 1 395 | fi 396 | 397 | # Step 5: Install dependencies for GVM (optional, for managing multiple Go versions) 398 | echo "Installing dependencies for GVM..." 399 | sudo apt install -y curl git mercurial make binutils bison gcc build-essential 400 | 401 | # Step 6: (Optional) Install and Configure GVM for Version Management 402 | echo "Installing GVM..." 403 | if [ ! -d "$HOME/.gvm" ]; then 404 | bash < <(curl -sSL https://raw.githubusercontent.com/moovweb/gvm/master/binscripts/gvm-installer) 405 | source ~/.gvm/scripts/gvm 406 | gvm install go1.22.5 407 | gvm use go1.22.5 --default 408 | else 409 | echo "GVM is already installed." 410 | fi 411 | 412 | # Final Step: Clean Go cache 413 | go clean 414 | echo "Go installation complete!" 415 | 416 | # Check if Go is installed and its version 417 | echo "Checking Go version..." 418 | if command -v go &> /dev/null; then 419 | GO_VERSION=$(go version) 420 | if [[ $GO_VERSION == go\ version\ go* ]]; then 421 | echo "Go is installed: $GO_VERSION" 422 | else 423 | echo "Go command exists, but the version could not be determined." 424 | fi 425 | else 426 | echo "Go is not installed on this system." 427 | fi 428 | # Confirm successful installation 429 | echo -e "${BOLD_BLUE}Go has been successfully installed and configured.${NC}" 430 | 431 | # Sleep to allow changes to take effect 432 | sleep 3 433 | 434 | # Install Python 3.12 435 | sudo apt install python3.12 -y 436 | 437 | # Install pip for Python 3.12 438 | curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py 439 | python3.12 get-pip.py 440 | 441 | # Install pipx and ensure it's in the PATH 442 | pip install pipx==1.7.1 --break-system-packages --root-user-action=ignore 443 | pipx ensurepath 444 | 445 | # Verify Python, pip, and pipx installations 446 | python3 --version 447 | pip --version 448 | pipx --version 449 | sudo pip install structlog requests 450 | sudo pip install --upgrade pip 451 | sudo pip install --upgrade pipx 452 | sudo apt install pipx -y 453 | pipx ensurepath 454 | subprober -up 455 | cp /root/.local/bin/subprober /usr/local/bin 456 | 457 | # Step 4: Install Dnsbruter (Skip if already installed) 458 | if ! command -v dnsbruter &> /dev/null; then 459 | show_progress "Installing Dnsbruter" 460 | 461 | # Try installing directly with pip 462 | python3 -m venv .venv 463 | source .venv/bin/activate 464 | sudo pip install --no-deps --force-reinstall --break-system-packages git+https://github.com/RevoltSecurities/Dnsbruter 465 | pipx install git+https://github.com/RevoltSecurities/Dnsbruter.git 466 | 467 | 468 | # Check if the installation was successful 469 | python3 -m venv .venv 470 | source .venv/bin/activate 471 | python3 -m pip install --upgrade dnsbruter 472 | python3 -m pip install --break-system-packages --upgrade dnsbruter 473 | dnsbruter -up 474 | if ! pip show dnsbruter &> /dev/null; then 475 | echo "Direct installation failed. Attempting installation via cloning the repository." 476 | 477 | # Clone the repository and install from source 478 | git clone https://github.com/RevoltSecurities/Dnsbruter.git 479 | cd Dnsbruter || exit 480 | 481 | # Install from the local cloned repository 482 | sudo pip install . --break-system-packages --root-user-action=ignore 483 | python3 -m venv .venv 484 | source .venv/bin/activate 485 | python3 -m pip install --upgrade dnsbruter 486 | python3 -m pip install --break-system-packages --upgrade dnsbruter 487 | dnsbruter -up 488 | 489 | # Clean up by removing the cloned directory after installation 490 | cd .. 491 | sudo rm -rf Dnsbruter 492 | else 493 | echo "Dnsbruter installed successfully using pip." 494 | fi 495 | 496 | # Final check to ensure dnsbruter is accessible globally 497 | if command -v dnsbruter &> /dev/null; then 498 | echo "Dnsbruter is successfully installed and globally available." 499 | dnsbruter -up && dnsbruter -h 500 | else 501 | echo "Dnsbruter installation failed. Please check the installation steps." 502 | fi 503 | 504 | show_progress "Dnsbruter installation complete." 505 | sleep 3 506 | python3 -m venv .venv 507 | source .venv/bin/activate 508 | sudo pip3 install dnsbruter "aiodns>=3.2.0" "aiofiles>=24.1.0" "alive_progress>=3.2.0" "art>=6.1" "asynciolimiter>=1.1.0.post3" "colorama>=0.4.4" "requests>=2.32.3" "setuptools>=75.2.0" "uvloop>=0.21.0" 509 | 510 | else 511 | show_progress "Dnsbruter is already installed. Skipping installation." 512 | fi 513 | 514 | # Step 5: Install Subdominator (Skip if the folder already exists) 515 | if [ ! -d "Subdominator" ]; then 516 | show_progress "Installing Subdominator" 517 | 518 | # Try installing directly with pip 519 | python3 -m venv .venv 520 | source .venv/bin/activate 521 | sudo pip uninstall uvloop -y && sudo pip3 uninstall uvloop -y && sudo pipx uninstall uvloop || true && sudo pip install uvloop --break-system-packages 522 | sudo pip install --upgrade aiodns pycares --break-system-packages 523 | sudo pip install git+https://github.com/RevoltSecurities/Subdominator --break-system-packages --root-user-action=ignore 524 | sudo pip install git+https://github.com/RevoltSecurities/Subdominator --no-deps httpx==0.25.2 525 | 526 | # Check if the installation was successful 527 | if ! pip show subdominator &> /dev/null; then 528 | echo "Direct installation failed. Attempting installation via cloning the repository." 529 | 530 | # Clone the repository and install from source 531 | git clone https://github.com/RevoltSecurities/Subdominator.git 532 | cd Subdominator || exit 533 | 534 | # Install from local cloned repository 535 | sudo pip install . --break-system-packages --root-user-action=ignore 536 | subdominator -up 537 | 538 | # Clean up by removing the cloned directory after installation 539 | cd .. 540 | sudo rm -rf Subdominator 541 | python3 -m venv .venv 542 | source .venv/bin/activate 543 | sudo pipx inject subdominator "aiofiles>=23.2.1" "aiosqlite" "aiohttp>=3.9.4" "appdirs>=1.4.4" "httpx>=0.27.2" "art>=6.1" "beautifulsoup4>=4.11.1" "colorama>=0.4.6" "fake_useragent>=1.5.0" "PyYAML>=6.0.1" "requests>=2.31.0" "rich>=13.7.1" "urllib3>=1.26.18" "tldextract>=5.1.2" 544 | 545 | else 546 | echo "Subdominator installed successfully using pip." 547 | fi 548 | 549 | show_progress "Subdominator installation complete." 550 | sleep 3 551 | else 552 | show_progress "Subdominator is already installed. Skipping installation." 553 | fi 554 | 555 | # Step 6: Install SubProber (Skip if the folder already exists) 556 | if [ ! -d "SubProber" ]; then 557 | show_progress "Installing SubProber" 558 | 559 | # Try installing directly with pip 560 | python3 -m venv .venv 561 | source .venv/bin/activate 562 | sudo pip install git+https://github.com/RevoltSecurities/Subprober --break-system-packages --root-user-action=ignore 563 | pipx install git+https://github.com/RevoltSecurities/Subprober.git 564 | 565 | # Check if the installation was successful 566 | if ! pip show subprober &> /dev/null; then 567 | echo "Direct installation failed. Attempting installation via cloning the repository." 568 | 569 | # Clone the repository and install from source 570 | git clone https://github.com/RevoltSecurities/Subprober.git 571 | cd Subprober || exit 572 | 573 | # Install from local cloned repository 574 | sudo pip install . --break-system-packages --root-user-action=ignore 575 | pip install subprober aiojarm 576 | subprober -up 577 | 578 | # Clean up by removing the cloned directory after installation 579 | cd .. 580 | sudo rm -rf Subprober 581 | cp /root/.local/bin/subprober /usr/local/bin 582 | else 583 | echo "SubProber installed successfully using pip." 584 | fi 585 | 586 | show_progress "SubProber installation complete." 587 | python3 -m venv .venv 588 | source .venv/bin/activate 589 | sudo pip3 install --break-system-packages "subprober" "aiodns>=3.2.0" "aiofiles>=24.1.0" "aiojarm>=0.2.2" "alive_progress>=3.2.0" "appdirs>=1.4.4" "art>=6.4" "asynciolimiter>=1.1.1" "beautifulsoup4>=4.12.3" "colorama>=0.4.6" "cryptography>=44.0.0" "fake_useragent>=1.5.1" "httpx>=0.28.1" "mmh3>=5.0.1" "playwright>=1.49.1" "requests>=2.32.3" "rich>=13.9.4" "setuptools>=75.2.0" "simhash>=2.1.2" "urllib3>=1.26.18" "uvloop>=0.21.0" "websockets>=14.1" "bs4>=0.0.2" "lxml>=5.3.0" 590 | for t in dnsbruter subdominator subprober; do [ -f "$HOME/.local/bin/$t" ] && [ "$HOME/.local/bin/$t" != "/usr/local/bin/$t" ] && sudo cp "$HOME/.local/bin/$t" /usr/local/bin/; done 591 | pwd && ORIGIN="$(pwd)" && cd "$ORIGIN/.venv/bin" && sudo cp * /usr/local/bin && cd "$ORIGIN" 592 | pip install subprober 593 | sleep 3 594 | else 595 | show_progress "SubProber is already installed. Skipping installation." 596 | fi 597 | 598 | # Step 7: Install GoSpider 599 | python3 -m venv .venv 600 | source .venv/bin/activate 601 | show_progress "Installing GoSpider" 602 | 603 | 604 | # Attempt to install GoSpider using 'go install' 605 | echo -e "${BOLD_WHITE}Attempting to install GoSpider using 'go install'...${NC}" 606 | if go install github.com/jaeles-project/gospider@latest; then 607 | echo -e "${BOLD_BLUE}GoSpider installed successfully via 'go install'.${NC}" 608 | 609 | # Copy the binary to /usr/local/bin for system-wide access 610 | sudo cp "$(go env GOPATH)/bin/gospider" /usr/local/bin/ 611 | else 612 | echo -e "${YELLOW}Failed to install GoSpider via 'go install'. Attempting to install from source...${NC}" 613 | 614 | # Clone the GoSpider repository 615 | git clone https://github.com/jaeles-project/gospider.git 616 | cd gospider 617 | 618 | # Build the GoSpider binary 619 | if go build; then 620 | chmod +x gospider 621 | sudo mv gospider /usr/local/bin/ 622 | echo -e "${BOLD_BLUE}GoSpider installed successfully from source.${NC}" 623 | cd .. 624 | sudo rm -rf gospider 625 | else 626 | echo -e "${RED}Failed to build GoSpider from source.${NC}" 627 | cd .. 628 | rm -rf gospider 629 | exit 1 630 | fi 631 | fi 632 | 633 | # Ensure /usr/local/bin is in PATH 634 | if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then 635 | export PATH="$PATH:/usr/local/bin" 636 | fi 637 | 638 | # Verify that GoSpider is accessible 639 | if ! command -v gospider &> /dev/null; then 640 | echo -e "${RED}GoSpider is not in your PATH. Please ensure /usr/local/bin is in your PATH.${NC}" 641 | exit 1 642 | fi 643 | 644 | sleep 3 645 | 646 | # Step 8: Install Hakrawler 647 | python3 -m venv .venv 648 | source .venv/bin/activate 649 | show_progress "Installing Hakrawler" 650 | 651 | 652 | # Attempt to install Hakrawler using 'go install' 653 | echo -e "${BOLD_WHITE}Attempting to install Hakrawler using 'go install'...${NC}" 654 | if go install github.com/hakluke/hakrawler@latest; then 655 | echo -e "${BOLD_BLUE}Hakrawler installed successfully via 'go install'.${NC}" 656 | 657 | # Copy the binary to /usr/local/bin for system-wide access 658 | sudo cp "$(go env GOPATH)/bin/hakrawler" /usr/local/bin/ 659 | else 660 | echo -e "${YELLOW}Failed to install Hakrawler via 'go install'. Attempting to install from source...${NC}" 661 | 662 | # Clone the Hakrawler repository 663 | git clone https://github.com/hakluke/hakrawler.git 664 | cd hakrawler 665 | 666 | # Build the Hakrawler binary 667 | if go build; then 668 | chmod +x hakrawler 669 | sudo mv hakrawler /usr/local/bin/ 670 | echo -e "${BOLD_BLUE}Hakrawler installed successfully from source.${NC}" 671 | cd .. 672 | sudo rm -rf hakrawler 673 | else 674 | echo -e "${RED}Failed to build Hakrawler from source.${NC}" 675 | cd .. 676 | rm -rf hakrawler 677 | exit 1 678 | fi 679 | fi 680 | 681 | # Ensure /usr/local/bin is in PATH 682 | if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then 683 | export PATH="$PATH:/usr/local/bin" 684 | fi 685 | 686 | # Verify that Hakrawler is accessible 687 | if ! command -v hakrawler &> /dev/null; then 688 | echo -e "${RED}Hakrawler is not in your PATH. Please ensure /usr/local/bin is in your PATH.${NC}" 689 | exit 1 690 | fi 691 | 692 | sleep 3 693 | 694 | 695 | # Step 8.1: Install URLFinder 696 | python3 -m venv .venv 697 | source .venv/bin/activate 698 | show_progress "Installing URLFinder" 699 | 700 | 701 | # Attempt to install URLFinder using 'go install' 702 | echo -e "${BOLD_WHITE}Attempting to install URLFinder using 'go install'...${NC}" 703 | if go install -v github.com/projectdiscovery/urlfinder/cmd/urlfinder@latest; then 704 | echo -e "${BOLD_BLUE}URLFinder installed successfully via 'go install'.${NC}" 705 | 706 | # Copy the binary to /usr/local/bin for system-wide access 707 | sudo cp "$(go env GOPATH)/bin/urlfinder" /usr/local/bin/ 708 | else 709 | echo -e "${YELLOW}Failed to install URLFinder via 'go install'. Attempting to install manually...${NC}" 710 | 711 | # Clone the URLFinder repository 712 | git clone https://github.com/projectdiscovery/urlfinder.git 713 | cd urlfinder/cmd/urlfinder 714 | 715 | # Build the URLFinder binary 716 | if go build; then 717 | chmod +x urlfinder 718 | sudo cp urlfinder /usr/local/bin/ 719 | echo -e "${BOLD_BLUE}URLFinder installed successfully from source.${NC}" 720 | cd ../../../ 721 | sudo rm -rf urlfinder 722 | else 723 | echo -e "${RED}Failed to build URLFinder from source.${NC}" 724 | cd ../../../ 725 | rm -rf urlfinder 726 | exit 1 727 | fi 728 | fi 729 | 730 | # Ensure /usr/local/bin is in PATH 731 | if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then 732 | export PATH="$PATH:/usr/local/bin" 733 | fi 734 | 735 | # Verify that URLFinder is accessible 736 | if ! command -v urlfinder &> /dev/null; then 737 | echo -e "${RED}URLFinder is not in your PATH. Please ensure /usr/local/bin is in your PATH.${NC}" 738 | exit 1 739 | fi 740 | 741 | sleep 3 742 | 743 | 744 | 745 | # Step 9: Install Katana 746 | python3 -m venv .venv 747 | source .venv/bin/activate 748 | show_progress "Installing Katana" 749 | 750 | 751 | # Attempt to install Katana using 'go install' 752 | echo -e "${BOLD_WHITE}Attempting to install Katana using 'go install'...${NC}" 753 | if go install github.com/projectdiscovery/katana/cmd/katana@latest; then 754 | echo -e "${BOLD_BLUE}Katana installed successfully via 'go install'.${NC}" 755 | 756 | # Copy the binary to /usr/local/bin for system-wide access 757 | sudo cp "$(go env GOPATH)/bin/katana" /usr/local/bin/ 758 | else 759 | echo -e "${YELLOW}Failed to install Katana via 'go install'. Attempting to install from source...${NC}" 760 | 761 | # Clone the Katana repository 762 | git clone https://github.com/projectdiscovery/katana.git 763 | cd katana/cmd/katana 764 | 765 | # Build the Katana binary 766 | if go build; then 767 | chmod +x katana 768 | sudo mv katana /usr/local/bin/ 769 | echo -e "${BOLD_BLUE}Katana installed successfully from source.${NC}" 770 | cd ../../.. 771 | sudo rm -rf katana 772 | else 773 | echo -e "${RED}Failed to build Katana from source.${NC}" 774 | cd ../../.. 775 | rm -rf katana 776 | exit 1 777 | fi 778 | fi 779 | 780 | # Ensure /usr/local/bin is in PATH 781 | if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then 782 | export PATH="$PATH:/usr/local/bin" 783 | fi 784 | 785 | # Verify that Katana is accessible 786 | if ! command -v katana &> /dev/null; then 787 | echo -e "${RED}Katana is not in your PATH. Please ensure /usr/local/bin is in your PATH.${NC}" 788 | exit 1 789 | fi 790 | 791 | sleep 3 792 | 793 | 794 | # Install Gau 795 | python3 -m venv .venv 796 | source .venv/bin/activate 797 | show_progress "Installing Gau" 798 | 799 | 800 | # Attempt to install Gau using 'go install' 801 | echo -e "${BOLD_WHITE}Attempting to install Gau using 'go install'...${NC}" 802 | if go install github.com/lc/gau/v2/cmd/gau@latest; then 803 | echo -e "${BOLD_BLUE}Gau installed successfully via 'go install'.${NC}" 804 | 805 | # Copy the binary to /usr/local/bin for system-wide access 806 | sudo cp "$(go env GOPATH)/bin/gau" /usr/local/bin/ 807 | else 808 | echo -e "${YELLOW}Failed to install Gau via 'go install'. Attempting to install from source...${NC}" 809 | 810 | # Clone the Gau repository 811 | git clone https://github.com/lc/gau 812 | cd gau/cmd/gau 813 | 814 | # Build the Gau binary 815 | if go build; then 816 | chmod +x gau 817 | sudo mv gau /usr/local/bin/ 818 | echo -e "${BOLD_BLUE}Gau installed successfully from source.${NC}" 819 | cd ../../.. 820 | sudo rm -rf gau 821 | else 822 | echo -e "${RED}Failed to build Gau from source.${NC}" 823 | cd ../../.. 824 | rm -rf gau 825 | exit 1 826 | fi 827 | fi 828 | 829 | # Attempt to install Katana using 'go install' 830 | python3 -m venv .venv 831 | source .venv/bin/activate 832 | echo -e "${BOLD_WHITE}Attempting to install Katana using 'go install'...${NC}" 833 | if go install github.com/projectdiscovery/katana/cmd/katana@latest; then 834 | echo -e "${BOLD_BLUE}Katana installed successfully via 'go install'.${NC}" 835 | 836 | # Copy the binary to /usr/local/bin for system-wide access 837 | sudo cp "$(go env GOPATH)/bin/katana" /usr/local/bin/ 838 | else 839 | echo -e "${YELLOW}Failed to install Katana via 'go install'. Attempting to install from source...${NC}" 840 | 841 | # Clone the Katana repository 842 | git clone https://github.com/projectdiscovery/katana.git 843 | cd katana/cmd/katana 844 | 845 | # Build the Katana binary 846 | if go build; then 847 | chmod +x katana 848 | sudo mv katana /usr/local/bin/ 849 | echo -e "${BOLD_BLUE}Katana installed successfully from source.${NC}" 850 | cd ../../.. 851 | sudo rm -rf katana 852 | else 853 | echo -e "${RED}Failed to build Katana from source.${NC}" 854 | cd ../../.. 855 | rm -rf katana 856 | exit 1 857 | fi 858 | fi 859 | 860 | # Attempt to install Waybackurls using 'go install' 861 | python3 -m venv .venv 862 | source .venv/bin/activate 863 | echo -e "${BOLD_WHITE}Attempting to install Waybackurls using 'go install'...${NC}" 864 | if go install github.com/tomnomnom/waybackurls@latest; then 865 | echo -e "${BOLD_BLUE}Waybackurls installed successfully via 'go install'.${NC}" 866 | 867 | # Copy the binary to /usr/local/bin for system-wide access 868 | sudo cp "$(go env GOPATH)/bin/waybackurls" /usr/local/bin/ 869 | else 870 | echo -e "${YELLOW}Failed to install Waybackurls via 'go install'. Attempting to install from source...${NC}" 871 | 872 | # Clone the Waybackurls repository 873 | git clone https://github.com/tomnomnom/waybackurls.git 874 | cd waybackurls 875 | 876 | # Build the Waybackurls binary 877 | if go build; then 878 | chmod +x waybackurls 879 | sudo mv waybackurls /usr/local/bin/ 880 | echo -e "${BOLD_BLUE}Waybackurls installed successfully from source.${NC}" 881 | cd .. 882 | sudo rm -rf waybackurls 883 | else 884 | echo -e "${RED}Failed to build Waybackurls from source.${NC}" 885 | cd .. 886 | rm -rf waybackurls 887 | pip uninstall pipx 888 | rm -rf /usr/local/bin/pipx 889 | rm -rf ~/.local/bin/pipx 890 | rm -rf ~/.local/pipx 891 | deactivate 892 | python3 -m pip install --user pipx 893 | python3 -m pipx ensurepath 894 | source ~/.bashrc 895 | rm -rf .venv 896 | python3 -m venv .venv 897 | source .venv/bin/activate 898 | pipx uninstall uro 899 | pip uninstall uro 900 | pipx install uro 901 | pip install --user uro 902 | export PATH=$HOME/.local/bin:$PATH 903 | pip install --upgrade pip setuptools wheel 904 | pip install git+https://github.com/RevoltSecurities/Dnsbruter 905 | pip install git+https://github.com/RevoltSecurities/Subprober 906 | pip install aiodns aiofiles alive_progress art asynciolimiter colorama requests uvloop 907 | pip install dnsbruter "aiodns>=3.2.0" "aiofiles>=24.1.0" "alive_progress>=3.2.0" "art>=6.1" "asynciolimiter>=1.1.0.post3" "colorama>=0.4.4" "requests>=2.32.3" "setuptools>=75.2.0" "uvloop>=0.21.0" 908 | sudo pipx inject subdominator "aiofiles>=23.2.1" "aiosqlite" "aiohttp>=3.9.4" "appdirs>=1.4.4" "httpx>=0.27.2" "art>=6.1" "beautifulsoup4>=4.11.1" "colorama>=0.4.6" "fake_useragent>=1.5.0" "PyYAML>=6.0.1" "requests>=2.31.0" "rich>=13.7.1" "urllib3>=1.26.18" "tldextract>=5.1.2" 909 | pip install "subprober" "aiodns>=3.2.0" "aiofiles>=24.1.0" "aiojarm>=0.2.2" "alive_progress>=3.2.0" "appdirs>=1.4.4" "art>=6.4" "asynciolimiter>=1.1.1" "beautifulsoup4>=4.12.3" "colorama>=0.4.6" "cryptography>=44.0.0" "fake_useragent>=1.5.1" "httpx>=0.28.1" "mmh3>=5.0.1" "playwright>=1.49.1" "requests>=2.32.3" "rich>=13.9.4" "setuptools>=75.2.0" "simhash>=2.1.2" "urllib3>=1.26.18" "uvloop>=0.21.0" "websockets>=14.1" "bs4>=0.0.2" "lxml>=5.3.0" 910 | exit 1 911 | fi 912 | fi 913 | 914 | 915 | # Ensure /usr/local/bin is in PATH 916 | if [[ ":$PATH:" != *":/usr/local/bin:"* ]]; then 917 | export PATH="$PATH:/usr/local/bin" 918 | fi 919 | 920 | # Confirm installation and configuration 921 | if command -v gau &> /dev/null; then 922 | echo -e "${BOLD_BLUE}Gau is successfully installed and globally available.${NC}" 923 | else 924 | echo -e "${RED}Gau installation failed. Please check the installation steps.${NC}" 925 | exit 1 926 | fi 927 | 928 | sleep 3 929 | 930 | # Step 12: Install Uro 931 | show_progress "Installing Uro" 932 | pip install uro --break-system-packages --root-user-action=ignore 933 | uro -h # Ensure Uro runs with sudo 934 | sleep 3 935 | 936 | # Step 13: Install Arjun 937 | show_progress "Installing Arjun" 938 | sudo apt install -y arjun 939 | sudo pip3 install arjun --break-system-packages --root-user-action=ignore 940 | sudo pip install alive_progress --break-system-packages --root-user-action=ignore 941 | sudo pip install ratelimit --break-system-packages --root-user-action=ignore 942 | sudo mv /usr/lib/python3.12/EXTERNALLY-MANAGED /usr/lib/python3.12/EXTERNALLY-MANAGED.bak 943 | sleep 3 944 | 945 | # Step 14: Install Tmux 946 | show_progress "Installing Tmux" 947 | sudo apt install -y tmux 948 | sudo apt --fix-broken install 949 | sudo apt update 950 | dnsbruter -up 951 | sleep 3 952 | 953 | # Set specific permissions for installed tools 954 | sudo chmod 755 /usr/local/bin/waybackurls 955 | sudo chmod 755 /usr/local/bin/katana 956 | sudo chmod 755 /usr/local/bin/gau 957 | sudo chmod 755 /usr/local/bin/uro 958 | sudo chmod 755 /usr/local/bin/gospider 959 | sudo chmod 755 /usr/local/bin/hakrawler 960 | sudo chmod 755 /usr/local/bin/urlfinder 961 | 962 | # Find paths for subprober, subdominator, and dnsbruter 963 | SUBPROBER_PATH=$(which subprober) 964 | SUBDOMINATOR_PATH=$(which subdominator) 965 | DNSBRUTER_PATH=$(which dnsbruter) 966 | 967 | # Check if the tools are found and copy them to the .venv/bin directory 968 | if [ -n "$SUBPROBER_PATH" ]; then 969 | sudo cp "$SUBPROBER_PATH" .venv/bin/ 970 | else 971 | echo "subprober not found!" 972 | fi 973 | 974 | if [ -n "$SUBDOMINATOR_PATH" ]; then 975 | sudo cp "$SUBDOMINATOR_PATH" .venv/bin/ 976 | else 977 | echo "subdominator not found!" 978 | fi 979 | 980 | if [ -n "$DNSBRUTER_PATH" ]; then 981 | sudo cp "$DNSBRUTER_PATH" .venv/bin/ 982 | else 983 | echo "dnsbruter not found!" 984 | fi 985 | 986 | # Display installed tools 987 | echo -e "${BOLD_BLUE}All tools have been successfully installed.${NC}" 988 | 989 | # Checking each tool with -h for verification 990 | echo -e "${BOLD_WHITE}Checking installed tools...${NC}" 991 | 992 | echo -e "${BOLD_WHITE}1. Dnsbruter:${NC}" 993 | dnsbruter -h > /dev/null 2>&1 && echo "Dnsbruter is installed" || echo "Dnsbruter is not installed correctly" 994 | 995 | echo -e "${BOLD_WHITE}2. Subdominator:${NC}" 996 | subdominator -h > /dev/null 2>&1 && echo "Subdominator is installed" || echo "Subdominator is not installed correctly" 997 | 998 | echo -e "${BOLD_WHITE}3. SubProber:${NC}" 999 | subprober -h > /dev/null 2>&1 && echo "SubProber is installed" || echo "SubProber is not installed correctly" 1000 | 1001 | echo -e "${BOLD_WHITE}4. GoSpider:${NC}" 1002 | gospider -h > /dev/null 2>&1 && echo "GoSpider is installed" || echo "GoSpider is not installed correctly" 1003 | 1004 | echo -e "${BOLD_WHITE}5. Hakrawler:${NC}" 1005 | hakrawler --help > /dev/null 2>&1 && echo "Hakrawler is installed" || echo "Hakrawler is not installed correctly" 1006 | 1007 | echo -e "${BOLD_WHITE}6. URLFinder:${NC}" 1008 | urlfinder --help > /dev/null 2>&1 && echo "URLFinder is installed" || echo "URLFinder is not installed correctly" 1009 | 1010 | echo -e "${BOLD_WHITE}6. Katana:${NC}" 1011 | katana -h > /dev/null 2>&1 && echo "Katana is installed" || echo "Katana is not installed correctly" 1012 | 1013 | echo -e "${BOLD_WHITE}7. Waybackurls:${NC}" 1014 | waybackurls -h > /dev/null 2>&1 && echo "Waybackurls is installed" || echo "Waybackurls is not installed correctly" 1015 | 1016 | echo -e "${BOLD_WHITE}8. Gau:${NC}" 1017 | gau -h > /dev/null 2>&1 && echo "Gau is installed" || echo "Gau is not installed correctly" 1018 | 1019 | echo -e "${BOLD_WHITE}9. Uro:${NC}" 1020 | uro -h > /dev/null 2>&1 && echo "Uro is installed" || echo "Uro is not installed correctly" 1021 | 1022 | echo -e "${BOLD_WHITE}10. Arjun:${NC}" 1023 | arjun -h > /dev/null 2>&1 && echo "Arjun is installed" || echo "Arjun is not installed correctly" 1024 | 1025 | echo -e "${BOLD_WHITE}11. URLFinder:${NC}" 1026 | urlfinder -h > /dev/null 2>&1 && echo "URLFinder is installed" || echo "URLFinder is not installed correctly" 1027 | 1028 | echo -e "${BOLD_WHITE}11. Tmux:${NC}" 1029 | echo "Tmux is installed (skipping check)" 1030 | 1031 | # Cyan and White message with tool links for manual installation 1032 | echo -e "\n${BOLD_CYAN}If you encounter any issues or are unable to run any of the tools,${NC}" 1033 | echo -e "${BOLD_WHITE}please refer to the following links for manual installation:${NC}" 1034 | echo -e "${BOLD_WHITE}Waybackurls:${NC} https://github.com/tomnomnom/waybackurls" 1035 | echo -e "${BOLD_WHITE}Gau:${NC} https://github.com/lc/gau" 1036 | echo -e "${BOLD_WHITE}Uro:${NC} https://github.com/s0md3v/uro" 1037 | echo -e "${BOLD_WHITE}Katana:${NC} https://github.com/projectdiscovery/katana" 1038 | echo -e "${BOLD_WHITE}Hakrawler:${NC} https://github.com/hakluke/hakrawler" 1039 | echo -e "${BOLD_WHITE}GoSpider:${NC} https://github.com/jaeles-project/gospider" 1040 | echo -e "${BOLD_WHITE}Arjun:${NC} https://github.com/s0md3v/Arjun" 1041 | echo -e "${BOLD_WHITE}Dnsbruter:${NC} https://github.com/RevoltSecurities/Dnsbruter" 1042 | echo -e "${BOLD_WHITE}SubProber:${NC} https://github.com/RevoltSecurities/SubProber" 1043 | echo -e "${BOLD_WHITE}Subdominator:${NC} https://github.com/RevoltSecurities/Subdominator" 1044 | echo -e "${BOLD_WHITE}UrlFinder:${NC} https://github.com/projectdiscovery/urlfinder" 1045 | 1046 | # Adding extra space for separation 1047 | echo -e "\n\n" 1048 | 1049 | # Bold blue message surrounded by a rectangle of lines with extra spacing 1050 | echo -e "${BOLD_BLUE}=============================================================================================${NC}" 1051 | echo -e "${BOLD_BLUE}| |${NC}" 1052 | echo -e "${BOLD_BLUE}| NOTE: To use this tool, you must have the xss0r tool, which is an XSS detection |${NC}" 1053 | echo -e "${BOLD_BLUE}| and exploitation tool for all types of XSS attacks, in the same directory. |${NC}" 1054 | echo -e "${BOLD_BLUE}| |${NC}" 1055 | echo -e "${BOLD_BLUE}| Alongside the xss0r tool, you'll also need two wordlists and a 2 Pythons reflection |${NC}" 1056 | echo -e "${BOLD_BLUE}| detection tools. All of these can be found in any of the XSS plans available on the site. |${NC}" 1057 | echo -e "${BOLD_BLUE}| |${NC}" 1058 | echo -e "${BOLD_BLUE}| You can get them by visiting: https://store.xss0r.com/ and purchasing any plan that |${NC}" 1059 | echo -e "${BOLD_BLUE}| fits your needs. |${NC}" 1060 | echo -e "${BOLD_BLUE}| |${NC}" 1061 | echo -e "${BOLD_BLUE}| If you already have a plan, simply copy the xss0r tool, the wordlists, and the |${NC}" 1062 | echo -e "${BOLD_BLUE}| reflection detection tool into the same folder where your xss0rRecon tool is located. |${NC}" 1063 | echo -e "${BOLD_BLUE}| |${NC}" 1064 | echo -e "${BOLD_BLUE}| Alternatively, if you don't have a plan or the tools, you can use the PRO plan for free |${NC}" 1065 | echo -e "${BOLD_BLUE}| for 5 days each month from the 10th to the 15th. |${NC}" 1066 | echo -e "${BOLD_BLUE}| |${NC}" 1067 | echo -e "${BOLD_BLUE}| The release of the key is posted on the homepage banner at store.xss0r.com, but this |${NC}" 1068 | echo -e "${BOLD_BLUE}| option is only available for those who have not yet tested the tool. |${NC}" 1069 | echo -e "${BOLD_BLUE}| |${NC}" 1070 | echo -e "${BOLD_BLUE}=============================================================================================${NC}" 1071 | 1072 | echo -e "\n\n" 1073 | 1074 | } 1075 | 1076 | 1077 | # Setup and activate Python virtual environment 1078 | setup_and_activate_venv() { 1079 | echo -e "${BOLD_WHITE}Setting up and activating Python virtual environment...${NC}" 1080 | # Create a virtual environment in the .venv directory if it doesn't already exist 1081 | if [ ! -d ".venv" ]; then 1082 | echo -e "${BOLD_BLUE}Creating Python virtual environment in .venv...${NC}" 1083 | python3 -m venv .venv 1084 | if [ $? -ne 0 ]; then 1085 | echo -e "${RED}Error: Failed to create virtual environment.${NC}" 1086 | exit 1 1087 | fi 1088 | fi 1089 | 1090 | # Activate the virtual environment 1091 | echo -e "${BOLD_BLUE}Activating virtual environment...${NC}" 1092 | source .venv/bin/activate 1093 | if [ $? -ne 0 ]; then 1094 | echo -e "${RED}Error: Failed to activate virtual environment.${NC}" 1095 | exit 1 1096 | fi 1097 | 1098 | echo -e "${BOLD_GREEN}Virtual environment activated successfully!${NC}" 1099 | } 1100 | 1101 | # Call the virtual environment setup before running step 3 1102 | setup_and_activate_venv 1103 | 1104 | # Function to run step 3 (Domain Enumeration and Filtering) 1105 | run_step_3() { 1106 | # Check if the user wants to skip the order check for step 3 1107 | source "$(pwd)/.venv/bin/activate" 1108 | if [ "$skip_order_check_for_option_4" = true ]; then 1109 | echo -e "${BOLD_BLUE}Skipping step 3 order check and directly using the domain list provided...${NC}" 1110 | if [ -f "${domain_name}-domains.txt" ]; then 1111 | echo -e "${BOLD_WHITE}Using your provided list of domains from ${domain_name}-domains.txt${NC}" 1112 | proceed_with_existing_file "${domain_name}-domains.txt" 1113 | else 1114 | echo -e "${RED}Error: File ${domain_name}-domains.txt not found. Please ensure the file is in the current directory.${NC}" 1115 | exit 1 1116 | fi 1117 | return 1118 | fi 1119 | 1120 | echo -e "${BOLD_WHITE}You selected: Domain Enumeration and Filtering for $domain_name${NC}" 1121 | echo -e "${BOLD_WHITE}Do you want to use your own list of domains or xss0rRecon to find it for you? Enter Y for your list or N for xss0rRecon list - domain list must be in format ${domain_name}-domains.txt: ${NC}" 1122 | read user_choice 1123 | 1124 | # Convert user input to uppercase 1125 | user_choice=$(echo "$user_choice" | tr '[:lower:]' '[:upper:]') 1126 | 1127 | if [[ "$user_choice" == "Y" ]]; then 1128 | if [ -f "${domain_name}-domains.txt" ]; then 1129 | echo -e "${BOLD_WHITE}Using your provided list of domains from ${domain_name}-domains.txt${NC}" 1130 | # Skip directly to the Y/N prompt for continuing the scan 1131 | read -p "$(echo -e "${BOLD_WHITE}Your domain file has been created. Would you like to continue scanning your target domain, including all its subdomains? If so, please enter 'Y'. If you prefer to modify the domain file first, so you can delete these and add your domains, enter 'N', and you can manually proceed with step 4 afterwards. Do you want to continue scanning with all subdomains (Y/N)?: ${NC}")" continue_scan 1132 | if [[ "$continue_scan" =~ ^[Yy]$ ]]; then 1133 | # Step xx: Filtering ALIVE DOMAINS 1134 | show_progress "Filtering ALIVE DOMAINS" 1135 | python3 -m venv .venv 1136 | source .venv/bin/activate 1137 | subprober -f "${domain_name}-domains.txt" -sc -ar -o "${domain_name}-alive" -nc -c 20 || handle_error "subprober" 1138 | sleep 5 1139 | rm -r "${domain_name}-domains.txt" 1140 | mv "${domain_name}-alive" "${domain_name}-domains.txt" 1141 | 1142 | # Step xx: Filtering valid URLS 1143 | show_progress "Filtering valid DOMAINS" 1144 | grep -oP 'http[^\s]*' "${domain_name}-domains.txt" > ${domain_name}-valid || handle_error "grep valid urls" 1145 | sleep 5 1146 | rm -r "${domain_name}-domains.txt" 1147 | mv ${domain_name}-valid "${domain_name}-domains.txt" 1148 | 1149 | # Step xx: Remove duplicates 1150 | show_progress "Removing duplicate domains" 1151 | initial_count=$(wc -l < "${domain_name}-domains.txt") 1152 | awk '{if (!seen[$0]++) print}' "${domain_name}-domains.txt" >> "subs-filtered.txt" || handle_error "Removing duplicates from ${domain_name}-domains.txt" 1153 | final_count_subs=$(wc -l < "subs-filtered.txt") 1154 | removed_count=$((initial_count - final_count_subs)) 1155 | rm -r "${domain_name}-domains.txt" 1156 | mv "subs-filtered.txt" "${domain_name}-domains.txt" 1157 | echo -e "${RED}Removed $removed_count duplicate domains.${NC}" 1158 | 1159 | # Step xx: Normalize to `http://` and remove `www.` 1160 | awk '{sub(/^https?:\/\//, "http://", $0); sub(/^http:\/\/www\./, "http://", $0); domain = $0; if (!seen[domain]++) print domain}' \ 1161 | "${domain_name}-domains.txt" > "final-${domain_name}-domains.txt" || handle_error "Final filtering" 1162 | rm -r "${domain_name}-domains.txt" 1163 | mv "final-${domain_name}-domains.txt" "${domain_name}-domains.txt" 1164 | sleep 5 1165 | 1166 | skip_order_check_for_option_4=true 1167 | echo -e "${BOLD_BLUE}Automatically continuing with step 4: URL Crawling and Filtering...${NC}" 1168 | run_step_4 # Automatically continue to step 4 1169 | else 1170 | echo -e "${BOLD_WHITE}Please edit your file ${domain_name}-domains.txt and remove any unwanted subdomains before continuing.${NC}" 1171 | skip_order_check_for_option_4=true 1172 | fi 1173 | return 1174 | else 1175 | echo -e "${RED}Error: File ${domain_name}-domains.txt not found. Please ensure the file is in the current directory.${NC}" 1176 | exit 1 1177 | fi 1178 | elif [[ "$user_choice" == "N" ]]; then 1179 | # Step 1: Passive FUZZ domains with wordlist 1180 | show_progress "Passive FUZZ domains with wordlist" 1181 | python3 -m venv .venv 1182 | source .venv/bin/activate 1183 | dnsbruter -d "$domain_name" -w subs-dnsbruter-small.txt -c 150 -wt 80 -rt 500 -wd -ws wild.txt -o output-dnsbruter.txt || handle_error "dnsbruter" 1184 | sleep 5 1185 | 1186 | # Step 2: Active brute crawling domains 1187 | show_progress "Active brute crawling domains" 1188 | python3 -m venv .venv 1189 | source .venv/bin/activate 1190 | subdominator -d "$domain_name" -o output-subdominator.txt || handle_error "subdominator" 1191 | sleep 5 1192 | 1193 | # Step 3: Checking if output-dnsbruter.txt was created 1194 | if [ ! -f "output-dnsbruter.txt" ]; then 1195 | echo "Error: output-dnsbruter.txt not found. The dnsbruter command may have failed." 1196 | if [ -f "output-subdominator.txt" ]; then 1197 | echo "Moving output-subdominator.txt to ${domain_name}-domains.txt" 1198 | mv output-subdominator.txt "${domain_name}-domains.txt" 1199 | else 1200 | echo "Error: output-subdominator.txt not found. The subdominator command may have also failed." 1201 | exit 1 1202 | fi 1203 | else 1204 | if [ -f "output-subdominator.txt" ]; then 1205 | show_progress "Merging passive and active results into one file" 1206 | cat output-dnsbruter.txt output-subdominator.txt > "${domain_name}-domains.txt" || handle_error "Merging domains" 1207 | else 1208 | echo "Error: output-subdominator.txt not found. Proceeding with output-dnsbruter.txt only." 1209 | mv output-dnsbruter.txt "${domain_name}-domains.txt" 1210 | fi 1211 | fi 1212 | # Step 4: Removing old temporary files 1213 | show_progress "Removing old temporary files" 1214 | [ -f "output-dnsbruter.txt" ] && rm output-dnsbruter.txt || handle_error "Removing output-dnsbruter.txt" 1215 | [ -f "output-subdominator.txt" ] && rm output-subdominator.txt || handle_error "Removing output-subdominator.txt" 1216 | sleep 3 1217 | else 1218 | echo -e "${RED}Invalid choice entered. Please run the script again and choose Y or N.${NC}" 1219 | exit 1 1220 | fi 1221 | 1222 | # Step 6: Removing duplicate domains 1223 | show_progress "Removing duplicate domains" 1224 | remove_duplicates "${domain_name}-domains.txt" 1225 | } 1226 | 1227 | proceed_with_existing_file() { 1228 | file_path=$1 1229 | echo -e "${RED}Proceeding with file: $file_path${NC}" 1230 | remove_duplicates "$file_path" 1231 | } 1232 | 1233 | remove_duplicates() { 1234 | file_path=$1 1235 | initial_count=$(wc -l < "$file_path") 1236 | awk '{sub(/^https?:\/\//, "", $0); sub(/^www\./, "", $0); if (!seen[$0]++) print}' "$file_path" > "unique-$file_path" 1237 | final_count=$(wc -l < "unique-$file_path") 1238 | removed_count=$((initial_count - final_count)) 1239 | echo -e "${RED}Removed $removed_count duplicate domains. Total subdomains after processing: $final_count${NC}" 1240 | sleep 3 1241 | 1242 | # Step 6.1: Removing old domain list 1243 | show_progress "Removing old domain list" 1244 | rm -r "${domain_name}-domains.txt" || handle_error "Removing old domain list" 1245 | sleep 3 1246 | 1247 | # Step 7: Filtering ALIVE domain names 1248 | show_progress "Filtering ALIVE domain names" 1249 | subprober -f "unique-${domain_name}-domains.txt" -sc -ar -o "subprober-${domain_name}-domains.txt" -nc -c 20 || handle_error "subprober" 1250 | sleep 5 1251 | 1252 | 1253 | # Step 2y1: Filtering valid domain names 1254 | show_progress "Filtering valid domain names" 1255 | grep -oP 'http[^\s]*' "subprober-${domain_name}-domains.txt" > output-domains.txt || handle_error "grep valid domains" 1256 | sleep 3 1257 | 1258 | # Step 2y2: Replacing with valid domains 1259 | sudo mv output-domains.txt subs-subs.txt 1260 | echo "Replaced 'old' with valid domain names." 1261 | sleep 3 1262 | 1263 | 1264 | # Step 8: Renaming final output 1265 | show_progress "Renaming final output to new file" 1266 | mv subs-subs.txt "${domain_name}-domains.txt" || handle_error "Renaming output file" 1267 | sleep 3 1268 | 1269 | # Step 9: Final filtering of unique domain names 1270 | show_progress "Last step filtering domains" 1271 | 1272 | # Normalize to `http://` and remove `www.` 1273 | awk '{sub(/^https?:\/\//, "http://", $0); sub(/^http:\/\/www\./, "http://", $0); domain = $0; if (!seen[domain]++) print domain}' \ 1274 | "${domain_name}-domains.txt" > "final-${domain_name}-domains.txt" || handle_error "Final filtering" 1275 | sleep 5 1276 | 1277 | # Step 10: Renaming final file to new file 1278 | show_progress "Renaming final file to new file" 1279 | 1280 | # Deduplication to remove duplicates, ensuring `www.` is not included 1281 | awk '{sub(/^http:\/\/www\./, "http://", $0); print}' "final-${domain_name}-domains.txt" | \ 1282 | awk '!seen[$0]++' > "${domain_name}-domains.txt" || handle_error "Removing duplicates and renaming output file" 1283 | # Delete the intermediate file 1284 | rm -r "final-${domain_name}-domains.txt" || handle_error "Deleting intermediate file" 1285 | sleep 3 1286 | 1287 | # Display the completion message in red 1288 | echo -e "${BOLD_RED}Enumeration and filtering process completed successfully. Final output saved as ${domain_name}-domains.txt.${NC}" 1289 | 1290 | 1291 | # Step 10.1: Deleting all unwanted files 1292 | show_progress "Deleting all unwanted files" 1293 | sudo rm -r "unique-${domain_name}-domains.txt" || echo "Some files could not be deleted. Please check permissions." 1294 | sleep 3 1295 | 1296 | 1297 | # New message for the user with Y/N option 1298 | read -p "$(echo -e "${BOLD_WHITE}Your domain file has been created. Would you like to continue scanning your target domain, including all its subdomains? If so, please enter 'Y'. If you prefer to modify the domain file first, so you can delete these and add your domains, enter 'N', and you can manually proceed with step 4 afterwards. Do you want to continue scanning with all subdomains (Y/N)?: ${NC}")" continue_scan 1299 | if [[ "$continue_scan" =~ ^[Yy]$ ]]; then 1300 | skip_order_check_for_option_4=true 1301 | echo -e "${BOLD_BLUE}Automatically continuing with step 4: URL Crawling and Filtering...${NC}" 1302 | run_step_4 # Automatically continue to step 4 1303 | else 1304 | echo -e "${BOLD_WHITE}Please edit your file ${domain_name}-domains.txt and remove any unwanted subdomains before continuing.${NC}" 1305 | skip_order_check_for_option_4=true 1306 | fi 1307 | } 1308 | 1309 | 1310 | # Function to run step 4 (URL Crawling and Filtering) 1311 | run_step_4() { 1312 | echo -e "${BOLD_WHITE}You selected: URL Crawling and Filtering for $domain_name${NC}" 1313 | 1314 | # Ask user if they want to use their own crawled links file 1315 | echo -e "${BOLD_WHITE}Do you want to use your own crawled links file? (Y/N)${NC}" 1316 | read -r use_own_links_file 1317 | 1318 | if [[ "$use_own_links_file" =~ ^[Yy]$ ]]; then 1319 | echo -e "${BOLD_GREEN}Skipping default crawling steps. Proceeding with your own links file...${NC}" 1320 | echo -e "${BOLD_GREEN}Please save your list of URLS in format "${domain_name}-links-final.txt"${NC}" 1321 | 1322 | # Ensure the user's file is in the correct format 1323 | if [[ ! -f "${domain_name}-links-final.txt" ]]; then 1324 | echo -e "${BOLD_RED}Error: File ${domain_name}-links-final.txt not found!${NC}" 1325 | exit 1 1326 | fi 1327 | 1328 | # Create new folder 'urls' and assign permissions 1329 | show_progress "Creating 'urls' directory and setting permissions" 1330 | sudo mkdir -p urls 1331 | sudo chmod 777 urls 1332 | 1333 | # Copy the user's file to the 'urls' folder 1334 | show_progress "Copying ${domain_name}-links-final.txt to 'urls' directory" 1335 | sudo cp "${domain_name}-links-final.txt" urls/ 1336 | 1337 | # Display professional message about the URLs 1338 | echo -e "${BOLD_WHITE}All identified URLs have been successfully saved in the newly created 'urls' directory.${NC}" 1339 | echo -e "${CYAN}These URLs represent potential targets that were not filtered out during the previous steps.${NC}" 1340 | echo -e "${CYAN}You can use the file 'urls/${domain_name}-links-final.txt' for further vulnerability testing with tools like Nuclei or any other inspection frameworks to identify additional vulnerabilities.${NC}" 1341 | echo -e "${CYAN}We are now continuing with our main purpose of XSS filtration and vulnerability identification.${NC}" 1342 | 1343 | # Display the number of URLs in the final merged file 1344 | total_merged_urls=$(wc -l < "${domain_name}-links-final.txt") 1345 | echo -e "${BOLD_WHITE}Total URLs merged: ${RED}${total_merged_urls}${NC}" 1346 | sleep 3 1347 | 1348 | # Automatically start step 5 after completing step 4 1349 | run_step_5 1350 | fi 1351 | 1352 | echo -e "${BOLD_WHITE}You selected: URL Crawling and Filtering for $domain_name${NC}" 1353 | 1354 | # Step 1: Crawling with GoSpider 1355 | show_progress "Crawling links with GoSpider" 1356 | gospider -S "${domain_name}-domains.txt" -c 10 -d 5 | tee -a "${domain_name}-gospider.txt" || handle_error "GoSpider crawl" 1357 | sleep 3 1358 | 1359 | # Step 2: Crawling with Hakrawler 1360 | show_progress "Crawling links with Hakrawler" 1361 | cat "${domain_name}-domains.txt" | hakrawler -d 3 | tee -a "${domain_name}-hakrawler.txt" || handle_error "Hakrawler crawl" 1362 | sleep 3 1363 | 1364 | # Step 2.1: Crawling with URLFinder 1365 | show_progress "Crawling links with URLFinder" 1366 | urlfinder -all -d "${domain_name}-domains.txt" -o "${domain_name}-urlfinder.txt" || handle_error "URLFinder crawl" 1367 | sleep 3 1368 | 1369 | 1370 | # Step 3: Crawling with Katana 1371 | show_progress "Crawling links with Katana" 1372 | cat "${domain_name}-domains.txt" | katana -jc | tee -a "${domain_name}-katana.txt" || handle_error "Katana crawl" 1373 | sleep 3 1374 | 1375 | # Step 4: Crawling with Waybackurls 1376 | show_progress "Crawling links with Waybackurls" 1377 | cat "${domain_name}-domains.txt" | waybackurls | tee -a "${domain_name}-waybackurls.txt" || handle_error "Waybackurls crawl" 1378 | sleep 3 1379 | 1380 | # Step 5: Crawling with Gau 1381 | show_progress "Crawling links with Gau" 1382 | rm -r /root/.gau.toml 1383 | rm -r /home/$(whoami)/.gau.toml 1384 | # Perform crawling with Gau and save results 1385 | cat "${domain_name}-domains.txt" | gau | tee -a "${domain_name}-gau.txt" || handle_error "Gau crawl" 1386 | 1387 | sleep 3 1388 | 1389 | echo -e "${BOLD_BLUE}Crawling and filtering URLs completed successfully. Output files created for each tool.${NC}" 1390 | 1391 | # Step 6: Filter invalid links on Gospider and Hakrawler 1392 | show_progress "Filtering invalid links on Gospider & Hakrawler & UrlFinder" 1393 | grep -oP 'http[^\s]*' "${domain_name}-gospider.txt" > "${domain_name}-gospider1.txt" 1394 | grep -oP 'http[^\s]*' "${domain_name}-hakrawler.txt" > "${domain_name}-hakrawler1.txt" 1395 | grep -oP 'http[^\s]*' "${domain_name}-urlfinder.txt" > "${domain_name}-urlfinder1.txt" 1396 | sleep 3 1397 | 1398 | # Step 7: Remove old Gospider & Hakrawler & UrlFinder files 1399 | show_progress "Removing old Gospider & Hakrawler & UrlFinder files" 1400 | rm -r "${domain_name}-gospider.txt" "${domain_name}-hakrawler.txt" "${domain_name}-urlfinder.txt" 1401 | sleep 3 1402 | 1403 | # Step 8: Filter similar URLs with URO tool 1404 | show_progress "Filtering similar URLs with URO tool" 1405 | uro -i "${domain_name}-gospider1.txt" -o urogospider.txt & 1406 | uro_pid_gospider=$! 1407 | 1408 | uro -i "${domain_name}-hakrawler1.txt" -o urohakrawler.txt & 1409 | uro_pid_hakrawler=$! 1410 | 1411 | uro -i "${domain_name}-urlfinder1.txt" -o urourlfinder.txt & 1412 | uro_pid_urlfinder=$! 1413 | 1414 | uro -i "${domain_name}-katana.txt" -o urokatana.txt & 1415 | uro_pid_katana=$! 1416 | 1417 | uro -i "${domain_name}-waybackurls.txt" -o urowaybackurls.txt & 1418 | uro_pid_waybackurls=$! 1419 | 1420 | uro -i "${domain_name}-gau.txt" -o urogau.txt & 1421 | uro_pid_gau=$! 1422 | 1423 | # Monitor the processes 1424 | while kill -0 $uro_pid_gospider 2> /dev/null || kill -0 $uro_pid_hakrawler 2> /dev/null || \ 1425 | kill -0 $uro_pid_katana 2> /dev/null || kill -0 $uro_pid_waybackurls 2> /dev/null || \ 1426 | kill -0 $uro_pid_urlfinder 2> /dev/null || kill -0 $uro_pid_urlfinder 2> /dev/null || \ 1427 | kill -0 $uro_pid_gau 2> /dev/null; do 1428 | echo -e "${BOLD_BLUE}URO tool is still running...⌛️${NC}" 1429 | sleep 30 # Check every 30 seconds 1430 | done 1431 | 1432 | echo -e "${BOLD_BLUE}URO processing completed. Files created successfully.${NC}" 1433 | sleep 3 1434 | 1435 | # Step 9: Remove all previous files 1436 | show_progress "Removing all previous files" 1437 | sudo rm -r "${domain_name}-gospider1.txt" "${domain_name}-hakrawler1.txt" "${domain_name}-katana.txt" "${domain_name}-waybackurls.txt" "${domain_name}-gau.txt" "${domain_name}-urlfinder1.txt" 1438 | sleep 3 1439 | 1440 | # Step 10: Merge all URO files into one final file 1441 | show_progress "Merging all URO files into one final file" 1442 | cat urogospider.txt urohakrawler.txt urokatana.txt urowaybackurls.txt urogau.txt urourlfinder.txt > "${domain_name}-links-final.txt" 1443 | 1444 | # Create new folder 'urls' and assign permissions 1445 | show_progress "Creating 'urls' directory and setting permissions" 1446 | sudo mkdir -p urls 1447 | sudo chmod 777 urls 1448 | 1449 | # Copy the final file to the 'urls' folder 1450 | show_progress "Copying ${domain_name}-links-final.txt to 'urls' directory" 1451 | sudo cp "${domain_name}-links-final.txt" urls/ 1452 | 1453 | # Display professional message about the URLs 1454 | echo -e "${BOLD_WHITE}All identified URLs have been successfully saved in the newly created 'urls' directory.${NC}" 1455 | echo -e "${CYAN}These URLs represent potential targets that were not filtered out during the previous steps.${NC}" 1456 | echo -e "${CYAN}You can use the file 'urls/${domain_name}-links-final.txt' for further vulnerability testing with tools like Nuclei or any other inspection frameworks to identify additional vulnerabilities.${NC}" 1457 | echo -e "${CYAN}We are now continuing with our main purpose of XSS filtration and vulnerability identification.${NC}" 1458 | 1459 | # Display the number of URLs in the final merged file 1460 | total_merged_urls=$(wc -l < "${domain_name}-links-final.txt") 1461 | echo -e "${BOLD_WHITE}Total URLs merged: ${RED}${total_merged_urls}${NC}" 1462 | sleep 3 1463 | 1464 | # Step 11: Remove all 5 previous files 1465 | show_progress "Removing all 6 previous files" 1466 | sudo rm -r urokatana.txt urohakrawler.txt urowaybackurls.txt urogau.txt urogospider.txt urourlfinder.txt 1467 | sleep 3 1468 | 1469 | # Automatically start step 5 after completing step 4 1470 | run_step_5 1471 | } 1472 | 1473 | # Function to run step 5 (In-depth URL Filtering) 1474 | run_step_5() { 1475 | echo -e "${BOLD_WHITE}You selected: Filtering extensions from the URLs for $domain_name${NC}" 1476 | 1477 | # Step 14: Filtering extensions from the URLs 1478 | show_progress "Filtering extensions from the URLs" 1479 | cat ${domain_name}-links-final.txt | grep -E -v '\.css($|\s|\?|&|#|/|\.)|\.js($|\s|\?|&|#|/|\.)|\.jpg($|\s|\?|&|#|/|\.)|\.JPG($|\s|\?|&|#|/|\.)|\.PNG($|\s|\?|&|#|/|\.)|\.GIF($|\s|\?|&|#|/|\.)|\.avi($|\s|\?|&|#|/|\.)|\.dll($|\s|\?|&|#|/|\.)|\.pl($|\s|\?|&|#|/|\.)|\.webm($|\s|\?|&|#|/|\.)|\.c($|\s|\?|&|#|/|\.)|\.py($|\s|\?|&|#|/|\.)|\.bat($|\s|\?|&|#|/|\.)|\.tar($|\s|\?|&|#|/|\.)|\.swp($|\s|\?|&|#|/|\.)|\.tmp($|\s|\?|&|#|/|\.)|\.sh($|\s|\?|&|#|/|\.)|\.deb($|\s|\?|&|#|/|\.)|\.exe($|\s|\?|&|#|/|\.)|\.zip($|\s|\?|&|#|/|\.)|\.mpeg($|\s|\?|&|#|/|\.)|\.mpg($|\s|\?|&|#|/|\.)|\.flv($|\s|\?|&|#|/|\.)|\.wmv($|\s|\?|&|#|/|\.)|\.wma($|\s|\?|&|#|/|\.)|\.aac($|\s|\?|&|#|/|\.)|\.m4a($|\s|\?|&|#|/|\.)|\.ogg($|\s|\?|&|#|/|\.)|\.mp4($|\s|\?|&|#|/|\.)|\.mp3($|\s|\?|&|#|/|\.)|\.bat($|\s|\?|&|#|/|\.)|\.dat($|\s|\?|&|#|/|\.)|\.cfg($|\s|\?|&|#|/|\.)|\.cfm($|\s|\?|&|#|/|\.)|\.bin($|\s|\?|&|#|/|\.)|\.jpeg($|\s|\?|&|#|/|\.)|\.JPEG($|\s|\?|&|#|/|\.)|\.ps.gz($|\s|\?|&|#|/|\.)|\.gz($|\s|\?|&|#|/|\.)|\.gif($|\s|\?|&|#|/|\.)|\.tif($|\s|\?|&|#|/|\.)|\.tiff($|\s|\?|&|#|/|\.)|\.csv($|\s|\?|&|#|/|\.)|\.png($|\s|\?|&|#|/|\.)|\.ttf($|\s|\?|&|#|/|\.)|\.ppt($|\s|\?|&|#|/|\.)|\.pptx($|\s|\?|&|#|/|\.)|\.ppsx($|\s|\?|&|#|/|\.)|\.doc($|\s|\?|&|#|/|\.)|\.woff($|\s|\?|&|#|/|\.)|\.xlsx($|\s|\?|&|#|/|\.)|\.xls($|\s|\?|&|#|/|\.)|\.mpp($|\s|\?|&|#|/|\.)|\.mdb($|\s|\?|&|#|/|\.)|\.json($|\s|\?|&|#|/|\.)|\.woff2($|\s|\?|&|#|/|\.)|\.icon($|\s|\?|&|#|/|\.)|\.pdf($|\s|\?|&|#|/|\.)|\.docx($|\s|\?|&|#|/|\.)|\.svg($|\s|\?|&|#|/|\.)|\.txt($|\s|\?|&|#|/|\.)|\.jar($|\s|\?|&|#|/|\.)|\.0($|\s|\?|&|#|/|\.)|\.1($|\s|\?|&|#|/|\.)|\.2($|\s|\?|&|#|/|\.)|\.3($|\s|\?|&|#|/|\.)|\.4($|\s|\?|&|#|/|\.)|\.m4r($|\s|\?|&|#|/|\.)|\.kml($|\s|\?|&|#|/|\.)|\.pro($|\s|\?|&|#|/|\.)|\.yao($|\s|\?|&|#|/|\.)|\.gcn3($|\s|\?|&|#|/|\.)|\.PDF($|\s|\?|&|#|/|\.)|\.egy($|\s|\?|&|#|/|\.)|\.par($|\s|\?|&|#|/|\.)|\.lin($|\s|\?|&|#|/|\.)|\.yht($|\s|\?|&|#|/|\.)' > filtered-extensions-links.txt 1480 | sleep 5 1481 | 1482 | # Step 15: Renaming filtered extensions file 1483 | show_progress "Renaming filtered extensions file" 1484 | mv filtered-extensions-links.txt "${domain_name}-links-clean.txt" 1485 | sleep 3 1486 | 1487 | # Step 16: Filtering unwanted domains from the URLs 1488 | show_progress "Filtering unwanted domains from the URLs" 1489 | grep -E "^(https?://)?([a-zA-Z0-9.-]+\.)?${domain_name}" "${domain_name}-links-clean.txt" > "${domain_name}-links-clean1.txt" 1490 | sleep 3 1491 | 1492 | # Step 17: Removing old filtered file 1493 | show_progress "Removing old filtered file" 1494 | rm -r ${domain_name}-links-clean.txt ${domain_name}-links-final.txt 1495 | sleep 3 1496 | 1497 | # Step 18: Renaming new filtered file 1498 | show_progress "Renaming new filtered file" 1499 | mv ${domain_name}-links-clean1.txt ${domain_name}-links-clean.txt 1500 | sleep 3 1501 | 1502 | # Step 19: Running URO tool again to filter duplicate and similar URLs 1503 | show_progress "Running URO tool again to filter duplicate and similar URLs" 1504 | uro -i "${domain_name}-links-clean.txt" -o "${domain_name}-uro.txt" & 1505 | uro_pid_clean=$! 1506 | 1507 | # Monitor the URO process 1508 | while kill -0 $uro_pid_clean 2> /dev/null; do 1509 | echo -e "${BOLD_BLUE}URO tool is still running for clean URLs...⌛️${NC}" 1510 | sleep 30 # Check every 30 seconds 1511 | done 1512 | 1513 | echo -e "${BOLD_BLUE}URO processing completed. Files created successfully.${NC}" 1514 | sleep 3 1515 | 1516 | # Display the number of URLs in the URO output file 1517 | echo -e "${BOLD_WHITE}Total URLs in final output: ${RED}$(wc -l < "${domain_name}-uro.txt")${NC}" 1518 | sleep 3 1519 | 1520 | # Step 20: Removing old file 1521 | show_progress "Removing old file" 1522 | rm -r "${domain_name}-links-clean.txt" 1523 | sleep 3 1524 | 1525 | # Step 21: Removing 99% similar parameters with bash command 1526 | show_progress "Removing 99% similar parameters with bash command" 1527 | filtered_output="filtered_output.txt" 1528 | if [[ ! -f "${domain_name}-uro.txt" ]]; then 1529 | echo "File not found! Please check the path and try again." 1530 | exit 1 1531 | fi 1532 | awk -F'[?&]' '{gsub(/:80/, "", $1); base_url=$1; params=""; for (i=2; i<=NF; i++) {split($i, kv, "="); if (kv[1] != "id") {params = params kv[1]; if (i < NF) {params = params "&";}}} full_url=base_url"?"params; if (!seen[full_url]++) {print $0 > "'"$filtered_output"'";}}' "${domain_name}-uro.txt" 1533 | sleep 5 1534 | 1535 | # Display the number of URLs in the filtered output file 1536 | echo -e "${BOLD_WHITE}Total filtered URLs: ${RED}$(wc -l < "$filtered_output")${NC}" 1537 | sleep 3 1538 | 1539 | # Step 22: Removing old file 1540 | show_progress "Removing old file" 1541 | rm -r "${domain_name}-uro.txt" 1542 | sleep 3 1543 | 1544 | # Step 23: Rename to new file 1545 | show_progress "Rename to new file" 1546 | mv filtered_output.txt "${domain_name}-links.txt" 1547 | sleep 3 1548 | 1549 | # Step 24: Filtering ALIVE URLS 1550 | show_progress "Filtering ALIVE URLS" 1551 | python3 -m venv .venv 1552 | source .venv/bin/activate 1553 | subprober -f "${domain_name}-links.txt" -sc -ar -o "${domain_name}-links-alive.txt" -nc -mc 200,201,202,204,301,302,304,307,308,403,500,504,401,407 -c 20 || handle_error "subprober" 1554 | sleep 5 1555 | 1556 | # Step 25: Removing old file 1557 | show_progress "Removing old file" 1558 | rm -r ${domain_name}-links.txt 1559 | sleep 3 1560 | 1561 | # Step 26: Filtering valid URLS 1562 | show_progress "Filtering valid URLS" 1563 | grep -oP 'http[^\s]*' "${domain_name}-links-alive.txt" > ${domain_name}-links-valid.txt || handle_error "grep valid urls" 1564 | sleep 5 1565 | 1566 | # Step 27: Removing intermediate file and renaming final output 1567 | show_progress "Final cleanup and renaming" 1568 | rm -r ${domain_name}-links-alive.txt 1569 | mv ${domain_name}-links-valid.txt ${domain_name}-links.txt 1570 | sleep 3 1571 | 1572 | echo -e "${BOLD_BLUE}Filtering process completed successfully. Final output saved as ${domain_name}-links.txt.${NC}" 1573 | 1574 | # Automatically start step 6 after completing step 5 1575 | run_step_6 1576 | } 1577 | 1578 | # Function to run step 6 (HiddenParamFinder) 1579 | run_step_6() { 1580 | echo -e "${BOLD_WHITE}You selected: HiddenParamFinder for $domain_name${NC}" 1581 | 1582 | # Step 1: Preparing URLs with clean extensions 1583 | show_progress "Preparing URLs with clean extensions, created 2 files: arjun-urls.txt and output-php-links.txt" 1584 | 1585 | # Extract all URLs with specific extensions into arjun-urls.txt and output-php-links.txt 1586 | cat "${domain_name}-links.txt" | grep -E "\.php($|\s|\?|&|#|/|\.)|\.asp($|\s|\?|&|#|/|\.)|\.aspx($|\s|\?|&|#|/|\.)|\.cfm($|\s|\?|&|#|/|\.)|\.jsp($|\s|\?|&|#|/|\.)" | \ 1587 | awk '{print > "arjun-urls.txt"; print > "output-php-links.txt"}' 1588 | sleep 3 1589 | 1590 | # Step 2: Clean parameters from URLs in arjun-urls.txt 1591 | show_progress "Filtering and cleaning arjun-urls.txt to remove parameters and duplicates" 1592 | 1593 | # Clean parameters from URLs and save the cleaned version back to arjun-urls.txt 1594 | awk -F'?' '{print $1}' arjun-urls.txt | awk '!seen[$0]++' > temp_arjun_urls.txt 1595 | 1596 | # Replace arjun-urls.txt with the cleaned file 1597 | mv temp_arjun_urls.txt arjun-urls.txt 1598 | 1599 | show_progress "Completed cleaning arjun-urls.txt. All URLs are now clean, unique, and saved." 1600 | 1601 | 1602 | # Check if Arjun generated any files 1603 | if [ ! -s arjun-urls.txt ] && [ ! -s output-php-links.txt ]; then 1604 | echo -e "${RED}Arjun did not find any new links or did not create any files.${NC}" 1605 | echo -e "${BOLD_BLUE}Renaming ${domain_name}-links.txt to urls-ready.txt and continuing...${NC}" 1606 | mv "${domain_name}-links.txt" urls-ready.txt || handle_error "Renaming ${domain_name}-links.txt" 1607 | sleep 3 1608 | run_step_7 # Automatically proceed to step 7 1609 | return 1610 | fi 1611 | 1612 | echo -e "${BOLD_BLUE}URLs prepared successfully and files created.${NC}" 1613 | echo -e "${BOLD_BLUE}arjun-urls.txt and output-php-links.txt have been created.${NC}" 1614 | 1615 | # Step 2: Running Arjun on clean URLs if arjun-urls.txt is present 1616 | if [ -s arjun-urls.txt ]; then 1617 | show_progress "Running Arjun on clean URLs" 1618 | arjun -i arjun-urls.txt -oT arjun_output.txt -t 10 -w parametri.txt || handle_error "Arjun command" 1619 | 1620 | # Merge files and process .php links 1621 | if [ -f arjun-urls.txt ] || [ -f output-php-links.txt ] || [ -f arjun_output.txt ]; then 1622 | # Merge and extract only the base .php URLs, then remove duplicates 1623 | cat arjun-urls.txt output-php-links.txt arjun_output.txt 2>/dev/null | awk -F'?' '/\.php/ {print $1}' | sort -u > arjun-final.txt 1624 | 1625 | echo -e "${BOLD_BLUE}arjun-final.txt created successfully with merged and deduplicated links.${NC}" 1626 | else 1627 | echo -e "${YELLOW}No input files for merging. Skipping merge step.${NC}" 1628 | fi 1629 | 1630 | sleep 5 1631 | 1632 | # Count the number of new links discovered by Arjun 1633 | if [ -f arjun_output.txt ]; then 1634 | new_links_count=$(wc -l < arjun_output.txt) 1635 | echo -e "${BOLD_BLUE}Arjun has completed running on the clean URLs.${NC}" 1636 | echo -e "${BOLD_RED}Arjun discovered ${new_links_count} new links.${NC}" 1637 | echo -e "${CYAN}The new links discovered by Arjun are:${NC}" 1638 | cat arjun_output.txt 1639 | else 1640 | echo -e "${YELLOW}No output file was created by Arjun.${NC}" 1641 | fi 1642 | else 1643 | echo -e "${RED}No input file (arjun-urls.txt) found for Arjun.${NC}" 1644 | fi 1645 | 1646 | # Continue with other steps or clean up 1647 | show_progress "Cleaning up temporary files" 1648 | if [[ -f arjun-urls.txt || -f arjun_output.txt || -f output-php-links.txt ]]; then 1649 | [[ -f arjun-urls.txt ]] && rm -r arjun-urls.txt 1650 | [[ -f output-php-links.txt ]] && rm -r output-php-links.txt 1651 | sleep 3 1652 | else 1653 | echo -e "${RED}No Arjun files to remove.${NC}" 1654 | fi 1655 | 1656 | echo -e "${BOLD_BLUE}Files merged and cleanup completed. Final output saved as arjun-final.txt.${NC}" 1657 | 1658 | # Step 5: Creating a new file for XSS testing 1659 | if [ -f arjun-final.txt ]; then 1660 | show_progress "Creating a new file for XSS testing" 1661 | 1662 | # Ensure arjun-final.txt is added to urls-ready.txt 1663 | cat "${domain_name}-links.txt" arjun-final.txt > urls-ready1337.txt || handle_error "Creating XSS testing file" 1664 | rm -r "${domain_name}-links.txt" 1665 | mv urls-ready1337.txt "${domain_name}-links.txt" 1666 | sleep 3 1667 | mv "${domain_name}-links.txt" urls-ready.txt || handle_error "Renaming ${domain_name}-links.txt" 1668 | fi 1669 | 1670 | # Automatically start step 7 after completing step 6 1671 | run_step_7 1672 | } 1673 | 1674 | # Function to run step 7 (Getting ready for XSS & URLs with query strings) 1675 | run_step_7() { 1676 | echo -e "${BOLD_WHITE}You selected: Preparing for XSS Detection and Query String URL Analysis for $domain_name${NC}" 1677 | 1678 | # Step 1: Filtering URLs with query strings 1679 | show_progress "Filtering URLs with query strings" 1680 | grep '=' urls-ready.txt > "$domain_name-query.txt" 1681 | sleep 5 1682 | echo -e "${BOLD_BLUE}Filtering completed. Query URLs saved as ${domain_name}-query.txt.${NC}" 1683 | 1684 | # Step 2: Renaming the remaining URLs 1685 | show_progress "Renaming remaining URLs" 1686 | mv urls-ready.txt "$domain_name-ALL-links.txt" 1687 | sleep 3 1688 | echo -e "${BOLD_BLUE}All-links URLs saved as ${domain_name}-ALL-links.txt.${NC}" 1689 | 1690 | # Step 3: Analyzing and reducing the query URLs based on repeated parameters 1691 | show_progress "Analyzing query strings for repeated parameters" 1692 | 1693 | # Start the analysis in the background and get the process ID (PID) 1694 | (> ibro-xss.txt; > temp_param_names.txt; > temp_param_combinations.txt; while read -r url; do base_url=$(echo "$url" | cut -d'?' -f1); extension=$(echo "$base_url" | grep -oiE '\.php|\.asp|\.aspx|\.cfm|\.jsp'); if [[ -n "$extension" ]]; then echo "$url" >> ibro-xss.txt; else params=$(echo "$url" | grep -oE '\?.*' | tr '?' ' ' | tr '&' '\n'); param_names=$(echo "$params" | cut -d'=' -f1); full_param_string=$(echo "$url" | cut -d'?' -f2); if grep -qx "$full_param_string" temp_param_combinations.txt; then continue; else new_param_names=false; for param_name in $param_names; do if ! grep -qx "$param_name" temp_param_names.txt; then new_param_names=true; break; fi; done; if $new_param_names; then echo "$url" >> ibro-xss.txt; echo "$full_param_string" >> temp_param_combinations.txt; for param_name in $param_names; do echo "$param_name" >> temp_param_names.txt; done; fi; fi; fi; done < "${domain_name}-query.txt"; echo "Processed URLs with unique parameters: $(wc -l < ibro-xss.txt)") & 1695 | 1696 | # Save the process ID (PID) of the background task 1697 | analysis_pid=$! 1698 | 1699 | # Monitor the process in the background 1700 | while kill -0 $analysis_pid 2> /dev/null; do 1701 | echo -e "${BOLD_BLUE}Analysis tool is still running...⌛️${NC}" 1702 | sleep 30 # Check every 30 seconds 1703 | done 1704 | 1705 | # When finished 1706 | echo -e "${BOLD_GREEN}Analysis completed. $(wc -l < ibro-xss.txt) URLs with repeated parameters have been saved.${NC}" 1707 | rm temp_param_names.txt temp_param_combinations.txt 1708 | sleep 3 1709 | 1710 | # Step 4: Cleanup and rename the output file 1711 | show_progress "Cleaning up intermediate files and setting final output" 1712 | rm -r "${domain_name}-query.txt" 1713 | mv ibro-xss.txt "${domain_name}-query.txt" 1714 | echo -e "${BOLD_BLUE}Cleaned up and renamed output to ${domain_name}-query.txt.${NC}" 1715 | sleep 3 1716 | 1717 | # Step 4: Cleanup and rename the output file 1718 | show_progress "Cleaning up intermediate files and setting final output" 1719 | 1720 | # Filter the file ${domain_name}-query.txt using the specified awk command 1721 | show_progress "Filtering ${domain_name}-query.txt for unique and normalized URLs" 1722 | awk '{ gsub(/^https:/, "http:"); gsub(/^http:\/\/www\./, "http://"); if (!seen[$0]++) print }' "${domain_name}-query.txt" | tr -d '\r' > "${domain_name}-query1.txt" 1723 | 1724 | # Remove the old query file 1725 | rm -r "${domain_name}-query.txt" 1726 | 1727 | # Rename the filtered file to the original name 1728 | mv "${domain_name}-query1.txt" "${domain_name}-query.txt" 1729 | 1730 | # Count the number of URLs in the renamed file 1731 | url_count=$(wc -l < "${domain_name}-query.txt") 1732 | 1733 | ## Final message with progress count 1734 | echo -e "${BOLD_BLUE}Cleaned up and renamed output to ${domain_name}-query.txt.${NC}" 1735 | echo -e "${BOLD_BLUE}Total URLs to be tested for Page Reflection: ${url_count}${NC}" 1736 | sleep 3 1737 | 1738 | # Add links from arjun_output.txt into ${domain_name}-query.txt 1739 | if [ -f "arjun_output.txt" ]; then 1740 | echo -e "${BOLD_WHITE}Adding links from arjun_output.txt into ${domain_name}-query.txt.${NC}" 1741 | cat arjun_output.txt >> "${domain_name}-query.txt" 1742 | echo -e "${BOLD_BLUE}Links from arjun_output.txt added to ${domain_name}-query.txt.${NC}" 1743 | else 1744 | echo -e "${YELLOW}No Arjun output links to add. Proceeding without additional links.${NC}" 1745 | fi 1746 | 1747 | # Extract unique subdomains and append search queries 1748 | echo -e "${BOLD_WHITE}Processing unique subdomains to append search queries...${NC}" 1749 | 1750 | # Define the list of search queries to append 1751 | search_queries=( 1752 | "search?q=aaa" 1753 | "?query=aaa" 1754 | "en-us/Search#/?search=aaa" 1755 | "Search/Results?q=aaa" 1756 | "q=aaa" 1757 | "search.php?query=aaa" 1758 | "en-us/search?q=aaa" 1759 | "s=aaa" 1760 | "find?q=aaa" 1761 | "result?q=aaa" 1762 | "query?q=aaa" 1763 | "search?term=aaa" 1764 | "search?query=aaa" 1765 | "search?keywords=aaa" 1766 | "search?text=aaa" 1767 | "search?word=aaa" 1768 | "find?query=aaa" 1769 | "result?query=aaa" 1770 | "search?input=aaa" 1771 | "search/results?query=aaa" 1772 | "search-results?q=aaa" 1773 | "search?keyword=aaa" 1774 | "results?query=aaa" 1775 | "search?search=aaa" 1776 | "search?searchTerm=aaa" 1777 | "search?searchQuery=aaa" 1778 | "search?searchKeyword=aaa" 1779 | "search.php?q=aaa" 1780 | "search/?query=aaa" 1781 | "search/?q=aaa" 1782 | "search/?search=aaa" 1783 | "search.aspx?q=aaa" 1784 | "search.aspx?query=aaa" 1785 | "search.asp?q=aaa" 1786 | "index.asp?id=aaa" 1787 | "dashboard.asp?user=aaa" 1788 | "blog/search/?query=aaa" 1789 | "pages/searchpage.aspx?id=aaa" 1790 | "search.action?q=aaa" 1791 | "search.json?q=aaa" 1792 | "search/index?q=aaa" 1793 | "lookup?q=aaa" 1794 | "browse?q=aaa" 1795 | "search-products?q=aaa" 1796 | "products/search?q=aaa" 1797 | "news?q=aaa" 1798 | "articles?q=aaa" 1799 | "content?q=aaa" 1800 | "explore?q=aaa" 1801 | "search/advanced?q=aaa" 1802 | "search-fulltext?q=aaa" 1803 | "products?query=aaa" 1804 | "search?product=aaa" 1805 | "catalog/search?q=aaa" 1806 | "store/search?q=aaa" 1807 | "shop?q=aaa" 1808 | "items?query=aaa" 1809 | "search?q=aaa&category=aaa" 1810 | "store/search?term=aaa" 1811 | "marketplace?q=aaa" 1812 | "blog/search?q=aaa" 1813 | "news?query=aaa" 1814 | "articles?search=aaa" 1815 | "topics?q=aaa" 1816 | "stories?q=aaa" 1817 | "newsfeed?q=" 1818 | "search-posts?q=aaa" 1819 | "blog/posts?q=aaa" 1820 | "search/article?q=aaa" 1821 | "api/search?q=aaa" 1822 | "en/search/explore?q=aaa" 1823 | "bs-latn-ba/Search/Results?q=aaa" 1824 | "en-us/marketplace/apps?search=aaa" 1825 | "search/node?keys=aaaa" 1826 | "v1/search?q=aaa" 1827 | "api/v1/search?q=aaa" 1828 | ) 1829 | 1830 | # Extract unique subdomains (normalize to remove protocol and www) 1831 | normalized_subdomains=$(awk -F/ '{print $1 "//" $3}' "${domain_name}-query.txt" | sed -E 's~(https?://)?(www\.)?~~' | sort -u) 1832 | 1833 | # Create a mapping of preferred protocols for unique subdomains 1834 | declare -A preferred_protocols 1835 | while read -r url; do 1836 | # Extract protocol, normalize subdomain 1837 | protocol=$(echo "$url" | grep -oE '^https?://') 1838 | subdomain=$(echo "$url" | sed -E 's~(https?://)?(www\.)?~~' | awk -F/ '{print $1}') 1839 | 1840 | # Set protocol preference: prioritize http over https 1841 | if [[ "$protocol" == "http://" ]]; then 1842 | preferred_protocols["$subdomain"]="http://" 1843 | elif [[ -z "${preferred_protocols["$subdomain"]}" ]]; then 1844 | preferred_protocols["$subdomain"]="https://" 1845 | fi 1846 | done < "${domain_name}-query.txt" 1847 | 1848 | # Create a new file for the appended URLs 1849 | append_file="${domain_name}-query-append.txt" 1850 | > "$append_file" 1851 | 1852 | # Append each search query to the preferred subdomains 1853 | for subdomain in $normalized_subdomains; do 1854 | protocol="${preferred_protocols[$subdomain]}" 1855 | for query in "${search_queries[@]}"; do 1856 | echo "${protocol}${subdomain}/${query}" >> "$append_file" 1857 | done 1858 | done 1859 | 1860 | # Combine the original file with the appended file 1861 | cat "${domain_name}-query.txt" "$append_file" > "${domain_name}-query-final.txt" 1862 | 1863 | # Replace the original file with the combined result 1864 | mv "${domain_name}-query-final.txt" "${domain_name}-query.txt" 1865 | 1866 | echo -e "${BOLD_BLUE}Appended URLs saved and combined into ${domain_name}-query.txt.${NC}" 1867 | 1868 | # Step 3: Checking page reflection on the URLs 1869 | if [ -f "reflection.py" ]; then 1870 | echo -e "${BOLD_WHITE}Checking page reflection on the URLs with command: python3 reflection.py ${domain_name}-query.txt --threads 2${NC}" 1871 | sudo python3 reflection.py "${domain_name}-query.txt" --threads 2 || handle_error "reflection.py execution" 1872 | sleep 5 1873 | 1874 | # Check if xss.txt is created after reflection.py 1875 | if [ -f "xss.txt" ]; then 1876 | # Check if xss.txt has any URLs (non-empty file) 1877 | total_urls=$(wc -l < xss.txt) 1878 | if [ "$total_urls" -eq 0 ]; then 1879 | # If no URLs were found, stop the tool 1880 | echo -e "\033[1;36mNo reflective URLs were identified. The process will terminate, and no further XSS testing will be conducted.\033[0m" 1881 | exit 0 1882 | else 1883 | echo -e "${BOLD_WHITE}Page reflection done! New file created: xss.txt${NC}" 1884 | 1885 | # Display the number of URLs affected by reflection 1886 | echo -e "${BOLD_WHITE}Total URLs reflected: ${RED}${total_urls}${NC}" 1887 | 1888 | # Filtering duplicate URLs 1889 | echo -e "${BOLD_BLUE}Filtering duplicate URLs...${NC}" 1890 | awk '{ gsub(/^https:/, "http:"); gsub(/^http:\/\/www\./, "http://"); if (!seen[$0]++) print }' "xss.txt" | tr -d '\r' > "xss1.txt" 1891 | sleep 3 1892 | 1893 | # Remove the original xss.txt file 1894 | echo -e "${BOLD_BLUE}Removing the old xss.txt file...${NC}" 1895 | sudo rm -r xss.txt arjun_output.txt arjun-final.txt "${domain_name}-query-append.txt" 1896 | sleep 3 1897 | 1898 | # Removing 99% similar parameters with bash command 1899 | echo -e "${BOLD_BLUE}Removing 99% similar parameters...${NC}" 1900 | awk -F'[?&]' '{gsub(/:80/, "", $1); base_url=$1; domain=base_url; params=""; for (i=2; i<=NF; i++) {split($i, kv, "="); if (!seen[domain kv[1]]++) {params=params kv[1]; if (i "xss-urls.txt";}' xss1.txt 1901 | sleep 5 1902 | 1903 | # Remove the intermediate xss1.txt file 1904 | echo -e "${BOLD_BLUE}Removing the intermediate xss1.txt file...${NC}" 1905 | sudo rm -r xss1.txt 1906 | sleep 3 1907 | 1908 | # Running URO for xss-urls.txt file 1909 | echo -e "${BOLD_BLUE}Running URO for xss-urls.txt file...${NC}" 1910 | uro -i xss-urls.txt -o xss-urls1337.txt 1911 | rm -r xss-urls.txt 1912 | mv xss-urls1337.txt xss-urls.txt 1913 | sleep 5 1914 | 1915 | # Final message with the total number of URLs in xss-urls.txt 1916 | total_urls=$(wc -l < xss-urls.txt) 1917 | echo -e "${BOLD_WHITE}New file is ready for XSS testing: xss-urls.txt with TOTAL URLs: ${total_urls}${NC}" 1918 | echo -e "${BOLD_WHITE}Initial Total Merged URLs in the beginning : ${RED}${total_merged_urls}${NC}" 1919 | echo -e "${BOLD_WHITE}Filtered Final URLs for XSS Testing: ${RED}${total_urls}${NC}" 1920 | 1921 | #Sorting URLs for xss0r: 1922 | echo -e "${BOLD_BLUE}Sorting valid format URLs for xss0r...${NC}" 1923 | awk '{sub("http://", "http://www."); sub("https://", "https://www."); print}' xss-urls.txt | sort -u > sorted-xss-urls.txt 1924 | rm -r xss-urls.txt 1925 | mv sorted-xss-urls.txt xss-urls.txt 1926 | sleep 5 1927 | 1928 | 1929 | # Automatically run the xss0r command after reflection step 1930 | ./xss0r --get --urls xss-urls.txt --payloads payloads.txt --shuffle --threads 10 --path || handle_error "Launching xss0r Tool" 1931 | fi 1932 | else 1933 | echo -e "${RED}xss.txt not found. No reflective URLs identified.${NC}" 1934 | echo -e "\033[1;36mNo reflective URLs were identified. The process will terminate, and no further XSS testing will be conducted.\033[0m" 1935 | exit 0 1936 | fi 1937 | else 1938 | echo -e "${RED}reflection.py not found in the current directory. Skipping page reflection step.${NC}" 1939 | fi 1940 | } 1941 | 1942 | # Function to run step 8 (Launching xss0r Tool) 1943 | run_step_8() { 1944 | echo -e "${BOLD_WHITE}You selected: Launching xss0r Tool for $domain_name${NC}" 1945 | 1946 | # Check if xss0r and xss-urls.txt files exist 1947 | if [ -f "xss0r" ] && [ -f "xss-urls.txt" ]; then 1948 | show_progress "Running xss0r for XSS vulnerabilities" 1949 | ./xss0r --get --urls xss-urls.txt --payloads payloads.txt --shuffle --threads 10 --path 1950 | if [[ $? -ne 0 ]]; then # Check if xss0r command failed 1951 | echo -e "${RED}The xss0r Tool encountered an error during execution.${NC}" 1952 | exit 1 1953 | fi 1954 | sleep 5 1955 | echo -e "${BOLD_BLUE}xss0r completed. Check the output files for results.${NC}" 1956 | else 1957 | # Custom error message when xss0r is missing 1958 | if [ ! -f "xss0r" ]; then 1959 | echo -e "${RED}The xss0r Tool is not present in the current directory.${NC}" 1960 | echo -e "${CYAN}Please ensure the xss0r tool is placed in the directory and run the script again.${NC}" 1961 | echo -e "${BOLD_WHITE}Alternatively, you can download or purchase the tool from store.xss0r.com. ${NC}" 1962 | echo -e "${BOLD_WHITE}After obtaining the tool, execute the xss0r to enter your API key, and then proceed with the xss0rRecon tool.${NC}" 1963 | fi 1964 | 1965 | # Check if xss-urls.txt file is missing 1966 | if [ ! -f "xss-urls.txt" ]; then 1967 | echo -e "${RED}The xss-urls.txt file is not present in the current directory. Please make sure the file is generated or placed in the directory and try again. Alternatively, you can download or purchase the tool from store.xss0r.com. After obtaining the tool, execute the xss0r to enter your API key, and then proceed with the xss0rRecon tool.${NC}" 1968 | fi 1969 | fi 1970 | } 1971 | 1972 | # Function for Path-based XSS 1973 | run_path_based_xss() { 1974 | echo -e "${BOLD_WHITE}You selected: Path-based XSS${NC}" 1975 | 1976 | # Check if any *-ALL-links.txt files are available 1977 | available_files=$(ls *-ALL-links.txt 2>/dev/null) 1978 | 1979 | # If no files are found, display a message and return 1980 | if [ -z "$available_files" ]; then 1981 | echo -e "${RED}No *-ALL-links.txt files found.${NC}" 1982 | echo -e "${BOLD_WHITE}Please start scanning your domain from step 2.${NC}" 1983 | echo -e "${BOLD_WHITE}After completing the crawling and filtering processes, a file for Path-based XSS (${domain_name}-ALL-links.txt) will be generated.${NC}" 1984 | return 1985 | fi 1986 | 1987 | # List available domain files if found 1988 | echo -e "${BOLD_WHITE}Available domain files:${NC}" 1989 | echo "$available_files" 1990 | 1991 | # Prompt the user to enter the domain name (without the -ALL-links.txt part) 1992 | read -p "Please enter the domain name (just the base, without '-ALL-links.txt'): " domain_name 1993 | 1994 | # Debugging output to check if domain_name is correctly set 1995 | echo "Debug: The domain name is set to '${domain_name}'" 1996 | 1997 | # Check if the required file exists 1998 | if [ ! -f "${domain_name}-ALL-links.txt" ]; then 1999 | echo -e "${CYAN}Error: There is no file available for scanning path-based XSS.${NC}" 2000 | echo -e "${CYAN}It appears that the necessary file, ${domain_name}-ALL-links.txt, has not been generated.${NC}" 2001 | echo -e "${BOLD_WHITE}This file is created after completing the crawling and filtering processes.${NC}" 2002 | echo -e "${BOLD_WHITE}Please return to Option 2 and follow the full process, including crawling and URL filtering.${NC}" 2003 | return 2004 | fi 2005 | 2006 | # Function to count and display the number of URLs after filtering 2007 | count_urls() { 2008 | local file=$1 2009 | local message=$2 2010 | local count=$(sudo wc -l < "$file") 2011 | echo -e "${CYAN}${message} After filtering, the number of URLs is: ${RED}${count}${NC}" 2012 | } 2013 | 2014 | # Step 0: Initial count of URLs in the main target file 2015 | show_progress "Analyzing the initial number of URLs in ${domain_name}-ALL-links.txt..." 2016 | count_urls "${domain_name}-ALL-links.txt" "Initial URL count before filtration." 2017 | 2018 | # Step 1: Filtering duplicate URLs 2019 | show_progress "Filtering duplicate URLs..." 2020 | sudo awk '{ gsub(/^https:/, "http:"); gsub(/^http:\/\/www\./, "http://"); if (!seen[$0]++) print }' "${domain_name}-ALL-links.txt" | sudo tr -d '\r' > "path1.txt" 2021 | sleep 3 2022 | count_urls "path1.txt" "Duplicate URLs filtered successfully." 2023 | 2024 | # Step 1.1: Filtering similar URLs with the same base path 2025 | show_progress "Filtering similar URLs with similar base paths..." 2026 | awk -F'/' '{base_path=$1"/"$2"/"$3"/"$4"/"$5"/"$6; if (!seen_base[base_path]++) print $0}' path1.txt > path1-filtered.txt 2027 | sleep 3 2028 | count_urls "path1-filtered.txt" "Similar URLs with the same base path filtered." 2029 | 2030 | # Step 2: Removing 99% similar parameters 2031 | show_progress "Removing 99% similar parameters..." 2032 | awk -F'[?&]' '{gsub(/:80/, "", $1); base_url=$1; domain=base_url; params=""; for (i=2; i<=NF; i++) {split($i, kv, "="); if (!seen[domain kv[1]]++) {params=params kv[1]; if (i "path3.txt";}' path1-filtered.txt 2033 | sleep 5 2034 | count_urls "path3.txt" "Parameters processed and URLs filtered." 2035 | 2036 | # Step 3: Including all domains from the URLs without filtering 2037 | show_progress "Including all domains from the URLs..." 2038 | cat "path3.txt" > "path4.txt" 2039 | sleep 3 2040 | count_urls "path4.txt" "All domains included successfully." 2041 | 2042 | # Step 4: Filtering extensions from the URLs 2043 | show_progress "Filtering extensions from the URLs..." 2044 | cat path4.txt | sudo grep -E -v '\.css($|\s|\?|&|#|/|\.)|\.jpg($|\s|\?|&|#|/|\.)|\.JPG($|\s|\?|&|#|/|\.)|\.PNG($|\s|\?|&|#|/|\.)|\.GIF($|\s|\?|&|#|/|\.)|\.avi($|\s|\?|&|#|/|\.)|\.dll($|\s|\?|&|#|/|\.)|\.pl($|\s|\?|&|#|/|\.)|\.webm($|\s|\?|&|#|/|\.)|\.c($|\s|\?|&|#|/|\.)|\.py($|\s|\?|&|#|/|\.)|\.bat($|\s|\?|&|#|/|\.)|\.tar($|\s|\?|&|#|/|\.)|\.swp($|\s|\?|&|#|/|\.)|\.tmp($|\s|\?|&|#|/|\.)|\.sh($|\s|\?|&|#|/|\.)|\.deb($|\s|\?|&|#|/|\.)|\.exe($|\s|\?|&|#|/|\.)|\.zip($|\s|\?|&|#|/|\.)|\.mpeg($|\s|\?|&|#|/|\.)|\.mpg($|\s|\?|&|#|/|\.)|\.flv($|\s|\?|&|#|/|\.)|\.wmv($|\s|\?|&|#|/|\.)|\.wma($|\s|\?|&|#|/|\.)|\.aac($|\s|\?|&|#|/|\.)|\.m4a($|\s|\?|&|#|/|\.)|\.ogg($|\s|\?|&|#|/|\.)|\.mp4($|\s|\?|&|#|/|\.)|\.mp3($|\s|\?|&|#|/|\.)|\.bat($|\s|\?|&|#|/|\.)|\.dat($|\s|\?|&|#|/|\.)|\.cfg($|\s|\?|&|#|/|\.)|\.cfm($|\s|\?|&|#|/|\.)|\.bin($|\s|\?|&|#|/|\.)|\.jpeg($|\s|\?|&|#|/|\.)|\.JPEG($|\s|\?|&|#|/|\.)|\.ps.gz($|\s|\?|&|#|/|\.)|\.gz($|\s|\?|&|#|/|\.)|\.gif($|\s|\?|&|#|/|\.)|\.tif($|\s|\?|&|#|/|\.)|\.tiff($|\s|\?|&|#|/|\.)|\.csv($|\s|\?|&|#|/|\.)|\.png($|\s|\?|&|#|/|\.)|\.ttf($|\s|\?|&|#|/|\.)|\.ppt($|\s|\?|&|#|/|\.)|\.pptx($|\s|\?|&|#|/|\.)|\.ppsx($|\s|\?|&|#|/|\.)|\.doc($|\s|\?|&|#|/|\.)|\.woff($|\s|\?|&|#|/|\.)|\.xlsx($|\s|\?|&|#|/|\.)|\.xls($|\s|\?|&|#|/|\.)|\.mpp($|\s|\?|&|#|/|\.)|\.mdb($|\s|\?|&|#|/|\.)|\.json($|\s|\?|&|#|/|\.)|\.woff2($|\s|\?|&|#|/|\.)|\.icon($|\s|\?|&|#|/|\.)|\.pdf($|\s|\?|&|#|/|\.)|\.docx($|\s|\?|&|#|/|\.)|\.svg($|\s|\?|&|#|/|\.)|\.txt($|\s|\?|&|#|/|\.)|\.jar($|\s|\?|&|#|/|\.)|\.0($|\s|\?|&|#|/|\.)|\.1($|\s|\?|&|#|/|\.)|\.2($|\s|\?|&|#|/|\.)|\.3($|\s|\?|&|#|/|\.)|\.4($|\s|\?|&|#|/|\.)|\.m4r($|\s|\?|&|#|/|\.)|\.kml($|\s|\?|&|#|/|\.)|\.pro($|\s|\?|&|#|/|\.)|\.yao($|\s|\?|&|#|/|\.)|\.gcn3($|\s|\?|&|#|/|\.)|\.PDF($|\s|\?|&|#|/|\.)|\.egy($|\s|\?|&|#|/|\.)|\.par($|\s|\?|&|#|/|\.)|\.lin($|\s|\?|&|#|/|\.)|\.yht($|\s|\?|&|#|/|\.)' > path5.txt 2045 | sleep 5 2046 | count_urls "path5.txt" "Extensions filtered and URLs cleaned." 2047 | 2048 | # Step 5: Running URO tool again to filter duplicate and similar URLs 2049 | show_progress "Running URO tool again to filter duplicate and similar URLs..." 2050 | uro -i path5.txt -o path6.txt & 2051 | uro_pid_clean=$! 2052 | 2053 | # Monitor the URO process 2054 | while kill -0 $uro_pid_clean 2> /dev/null; do 2055 | show_progress "URO tool is still running for clean URLs...⌛" 2056 | sleep 30 # Check every 30 seconds 2057 | done 2058 | 2059 | # Final message after URO processing completes 2060 | show_progress "URO processing completed. Files created successfully." 2061 | count_urls "path6.txt" "Final cleaned URLs after URO filtering." 2062 | 2063 | # Step 6: Deleting all previous files except the last one (path6.txt) 2064 | show_progress "Deleting all intermediate files..." 2065 | rm -f path1.txt path1-filtered.txt path3.txt path4.txt path5.txt ${domain_name}-unique-links.txt 2066 | 2067 | # Step 7: Renaming path6.txt to path-ready.txt 2068 | show_progress "Renaming path6.txt to path-ready.txt..." 2069 | mv path6.txt path-ready.txt 2070 | 2071 | # Step 8: Final message with the new file 2072 | echo -e "${CYAN}New file created: path-ready.txt for path-based XSS.${NC}" 2073 | 2074 | # Step 9: Running Python script for reflection checks 2075 | show_progress "Running Python script for reflection checks on filtered URLs..." 2076 | sudo python3 path-reflection.py path-ready.txt --threads 2 2077 | 2078 | # Step 9.1: Checking if the new file is generated 2079 | if [ -f path-xss-urls.txt ]; then 2080 | echo -e "${CYAN}New file generated: path-xss-urls.txt.${NC}" 2081 | count_urls "path-xss-urls.txt" "Final URL count in path-xss-urls.txt after Python processing." 2082 | else 2083 | echo -e "${RED}Error: path-xss-urls.txt was not generated! Please check the Python script.${NC}" 2084 | fi 2085 | 2086 | # Run the URL processing function 2087 | process_urls 2088 | 2089 | # Remove duplicate entries and normalize slashes in the output file, 2090 | # ensuring the protocol part (https:// or http://) is not affected 2091 | sort "$output_file" | sudo uniq | sudo sed -E 's|(https?://)|\1|; s|//|/|g' | sudo sed 's|:/|://|g' > "$output_file.tmp" && sudo mv "$output_file.tmp" "$output_file" 2092 | 2093 | # Final message for processed URLs 2094 | echo -e "${CYAN}Processed URLs have been saved to $output_file.${NC}" 2095 | 2096 | # Step 11: Deleting intermediate files 2097 | show_progress "Deleting intermediate files path-ready.txt and path-xss.txt..." 2098 | rm -f path-ready.txt path-xss.txt 2099 | 2100 | echo -e "${CYAN}Intermediate files deleted. Final output is $output_file.${NC}" 2101 | 2102 | # Step 12: Launch the xss0r tool for path-based XSS testing 2103 | echo -e "${BOLD_BLUE}Launching the xss0r tool on path-xss-urls.txt...${NC}" 2104 | ./xss0r --get --urls path-xss-urls.txt --payloads payloads.txt --shuffle --threads 10 --path 2105 | if [[ $? -ne 0 ]]; then 2106 | echo -e "${RED}The xss0r tool encountered an error during execution.${NC}" 2107 | exit 1 2108 | else 2109 | echo -e "${BOLD_GREEN}xss0r tool executed successfully! Check the output for results.${NC}" 2110 | fi 2111 | } 2112 | 2113 | # Function to handle script interruption 2114 | trap_interrupt() { 2115 | echo -e "\n${RED}Script interrupted. Exiting.${NC}" 2116 | exit 1 2117 | } 2118 | 2119 | # Trap SIGINT (Ctrl+C) 2120 | trap trap_interrupt SIGINT 2121 | 2122 | # Function for Domains Search Input with Query Appending 2123 | run_domains_search_input() { 2124 | echo -e "${BOLD_WHITE}You selected: Domains Search Input with Query Appending${NC}" 2125 | 2126 | # Define search queries 2127 | domains_queries=( 2128 | "search?q=aaa" 2129 | "?query=aaa" 2130 | "en-us/Search#/?search=aaa" 2131 | "Search/Results?q=aaa" 2132 | "q=aaa" 2133 | "search.php?query=aaa" 2134 | "en-us/search?q=aaa" 2135 | "s=aaa" 2136 | "find?q=aaa" 2137 | "result?q=aaa" 2138 | "query?q=aaa" 2139 | "search?term=aaa" 2140 | "search?query=aaa" 2141 | "search?keywords=aaa" 2142 | "search?text=aaa" 2143 | "search?word=aaa" 2144 | "find?query=aaa" 2145 | "result?query=aaa" 2146 | "search?input=aaa" 2147 | "search/results?query=aaa" 2148 | "search-results?q=aaa" 2149 | "search?keyword=aaa" 2150 | "results?query=aaa" 2151 | "search?search=aaa" 2152 | "search?searchTerm=aaa" 2153 | "search?searchQuery=aaa" 2154 | "search?searchKeyword=aaa" 2155 | "search.php?q=aaa" 2156 | "search/?query=aaa" 2157 | "search/?q=aaa" 2158 | "search/?search=aaa" 2159 | "search.aspx?q=aaa" 2160 | "search.aspx?query=aaa" 2161 | "search.asp?q=aaa" 2162 | "index.asp?id=aaa" 2163 | "dashboard.asp?user=aaa" 2164 | "blog/search/?query=aaa" 2165 | "pages/searchpage.aspx?id=aaa" 2166 | "search.action?q=aaa" 2167 | "search.json?q=aaa" 2168 | "search/index?q=aaa" 2169 | "lookup?q=aaa" 2170 | "browse?q=aaa" 2171 | "search-products?q=aaa" 2172 | "products/search?q=aaa" 2173 | "news?q=aaa" 2174 | "articles?q=aaa" 2175 | "content?q=aaa" 2176 | "explore?q=aaa" 2177 | "search/advanced?q=aaa" 2178 | "search-fulltext?q=aaa" 2179 | "products?query=aaa" 2180 | "search?product=aaa" 2181 | "catalog/search?q=aaa" 2182 | "store/search?q=aaa" 2183 | "shop?q=aaa" 2184 | "items?query=aaa" 2185 | "search?q=aaa&category=aaa" 2186 | "store/search?term=aaa" 2187 | "marketplace?q=aaa" 2188 | "blog/search?q=aaa" 2189 | "news?query=aaa" 2190 | "articles?search=aaa" 2191 | "topics?q=aaa" 2192 | "stories?q=aaa" 2193 | "newsfeed?q=" 2194 | "search-posts?q=aaa" 2195 | "blog/posts?q=aaa" 2196 | "search/article?q=aaa" 2197 | "/api/search?q=aaa" 2198 | "en/search/explore?q=aaa" 2199 | "bs-latn-ba/Search/Results?q=aaa" 2200 | "en-us/marketplace/apps?search=aaa" 2201 | "v1/search?q=aaa" 2202 | "search/node?keys=aaaa" 2203 | "api/v1/search?q=aaa" 2204 | ) 2205 | 2206 | normalize_domain() { 2207 | local domain="$1" 2208 | domain=$(echo "$domain" | tr '[:upper:]' '[:lower:]' | sed 's/^http:\/\///' | sed 's/^https:\/\///' | sed 's/^www\.//') 2209 | echo "http://$domain" 2210 | } 2211 | 2212 | append_and_save() { 2213 | local domain="$1" 2214 | local output_file="$2" 2215 | normalized_domain=$(normalize_domain "$domain") 2216 | for query in "${domains_queries[@]}"; do 2217 | if [[ $query == /* ]]; then 2218 | echo "$normalized_domain$query" >> "$output_file" 2219 | else 2220 | echo "$normalized_domain/$query" >> "$output_file" 2221 | fi 2222 | done 2223 | } 2224 | 2225 | # Prompt for domains file 2226 | read -p "Enter the path to your domains .txt file: " domains_file 2227 | if [[ ! -f $domains_file ]]; then 2228 | echo -e "${RED}The file does not exist - Please use your domains file from step 3.${NC}" 2229 | return 2230 | fi 2231 | 2232 | # Prepare output file 2233 | output_file="appended-domains.txt" 2234 | > "$output_file" 2235 | 2236 | echo -e "${BOLD_BLUE}Processing domains from $domains_file and appending queries...${NC}" 2237 | 2238 | # Process each domain and append queries 2239 | while IFS= read -r domain || [[ -n "$domain" ]]; do 2240 | append_and_save "$domain" "$output_file" 2241 | done < "$domains_file" 2242 | 2243 | echo -e "${BOLD_GREEN}All domains appended with queries and saved to $output_file.${NC}" 2244 | 2245 | # Run the reflection.py script 2246 | reflection_script="reflection.py" 2247 | if [[ -f $reflection_script ]]; then 2248 | echo -e "${BOLD_BLUE}Formatting URLs in $output_file to http://www format...${NC}" 2249 | 2250 | # Preprocess $output_file to ensure all URLs are in the http://www format 2251 | temp_file="formatted_$output_file" 2252 | awk -F'://' '{print "http://www." $2}' "$output_file" > "$temp_file" 2253 | 2254 | # Replace the original file with the formatted version 2255 | mv "$temp_file" "$output_file" 2256 | 2257 | echo -e "${BOLD_GREEN}URLs formatted successfully.${NC}" 2258 | echo -e "${BOLD_BLUE}Running reflection.py on $output_file...${NC}" 2259 | sudo python3 "$reflection_script" "$output_file" --threads 3 2260 | echo -e "${BOLD_GREEN}Reflection done, new domains saved in the file xss.txt.${NC}" 2261 | 2262 | # Run the xss0r command 2263 | if [[ -x ./xss0r ]]; then 2264 | echo -e "${BOLD_BLUE}Running xss0r Tool:${NC}" 2265 | ./xss0r --get --urls xss.txt --payloads payloads.txt --shuffle --threads 10 2266 | else 2267 | echo -e "${RED}xss0r executable not found in the current directory.${NC}" 2268 | fi 2269 | else 2270 | echo -e "${RED}Reflection script $reflection_script not found.${NC}" 2271 | fi 2272 | } 2273 | 2274 | while true; do 2275 | # Display options 2276 | display_options 2277 | read -p "Enter your choice [1-12]: " choice 2278 | 2279 | # Check if the selected option is in the correct order 2280 | if [[ $choice -ge 2 && $choice -le 8 && $choice -ne 4 ]]; then 2281 | if [[ $choice -gt $((last_completed_option + 1)) ]]; then 2282 | echo -e "${RED}Please respect order one by one from 1-8, you can't skip previous Options${NC}" 2283 | continue 2284 | fi 2285 | fi 2286 | 2287 | case $choice in 2288 | 1) 2289 | install_tools 2290 | last_completed_option=1 2291 | ;; 2292 | 2) 2293 | read -p "Please enter a domain name (example.com): " domain_name 2294 | echo -e "${BOLD_WHITE}You selected: Domain name set to $domain_name${NC}" 2295 | last_completed_option=2 2296 | 2297 | # Automatically proceed to Step 3 after setting the domain name 2298 | read -p "$(echo -e "${BOLD_WHITE}Do you want to proceed with domain enumeration and filtering for $domain_name (Y/N)?: ${NC}")" proceed_to_step_3 2299 | if [[ "$proceed_to_step_3" =~ ^[Yy]$ ]]; then 2300 | echo -e "${BOLD_BLUE}Automatically continuing with step 3: Domain Enumeration and Filtering for $domain_name...${NC}" 2301 | run_step_3 2302 | last_completed_option=3 2303 | else 2304 | echo -e "${BOLD_WHITE}You can manually start Step 3 whenever you are ready.${NC}" 2305 | fi 2306 | ;; 2307 | 3) 2308 | if [ -z "$domain_name" ]; then 2309 | echo "Domain name is not set. Please select option 2 to set the domain name." 2310 | else 2311 | run_step_3 2312 | last_completed_option=3 2313 | fi 2314 | ;; 2315 | 4) 2316 | if [ -z "$domain_name" ]; then 2317 | echo "Domain name is not set. Please select option 2 to set the domain name." 2318 | else 2319 | run_step_4 2320 | last_completed_option=4 2321 | fi 2322 | ;; 2323 | 5) 2324 | if [ -z "$domain_name" ]; then 2325 | echo "Domain name is not set. Please select option 2 to set the domain name." 2326 | else 2327 | run_step_5 2328 | last_completed_option=5 2329 | fi 2330 | ;; 2331 | 6) 2332 | if [ -z "$domain_name" ]; then 2333 | echo "Domain name is not set. Please select option 2 to set the domain name." 2334 | else 2335 | run_step_6 2336 | last_completed_option=6 2337 | fi 2338 | ;; 2339 | 7) 2340 | if [ -z "$domain_name" ]; then 2341 | echo "Domain name is not set. Please select option 2 to set the domain name." 2342 | else 2343 | run_step_7 2344 | last_completed_option=7 2345 | fi 2346 | ;; 2347 | 8) 2348 | if [ -z "$domain_name" ]; then 2349 | echo "Domain name is not set. Please select option 2 to set the domain name." 2350 | else 2351 | run_step_8 2352 | last_completed_option=8 2353 | fi 2354 | ;; 2355 | 9) 2356 | echo "Exiting script." 2357 | exit 0 2358 | ;; 2359 | 10) 2360 | echo -e "${BOLD_WHITE}You selected: Guide to Deploying xss0r on VPS Servers${NC}" 2361 | show_vps_info 2362 | ;; 2363 | 11) # Execute Path-based XSS 2364 | run_path_based_xss 2365 | last_completed_option=11 2366 | ;; 2367 | 12) # Domains Search Input 2368 | run_domains_search_input 2369 | last_completed_option=12 2370 | ;; 2371 | *) 2372 | echo "Invalid option. Please select a number between 1 and 11." 2373 | ;; 2374 | esac 2375 | done 2376 | --------------------------------------------------------------------------------