├── README.md ├── alienvault.sh ├── dorking.py ├── lostfuzzer.sh ├── punycode_gen.py ├── urlscan.py ├── virustotal.sh └── wayback.sh /README.md: -------------------------------------------------------------------------------- 1 | ⚠️ **Disclaimer**: The content in this repository is for educational and informational purposes only; the authors hold no responsibility for misuse. Ensure proper authorization before use, act responsibly at your own risk, and comply with all legal and ethical guidelines. 2 | -------------------------------------------------------------------------------- /alienvault.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Check if jq is installed. 4 | if ! command -v jq &>/dev/null; then 5 | echo "jq is required but not installed. Please install jq and rerun this script." 6 | exit 1 7 | fi 8 | 9 | # Check if domain is provided as an argument. 10 | if [ -z "$1" ]; then 11 | echo "Usage: $0 " 12 | exit 1 13 | fi 14 | 15 | # Get domain from argument. 16 | domain=$1 17 | 18 | # Set initial pagination parameters. 19 | page=1 20 | limit=500 21 | 22 | echo "Fetching URLs from AlienVault OTX for domain: $domain" 23 | 24 | while true; do 25 | echo "Fetching page $page..." 26 | 27 | # Retrieve the JSON response for the current page. 28 | response=$(curl -s "https://otx.alienvault.com/api/v1/indicators/hostname/${domain}/url_list?limit=${limit}&page=${page}") 29 | 30 | # Extract URLs from the JSON response. 31 | urls=$(echo "$response" | jq -r '.url_list[]?.url') 32 | 33 | # If no URLs were returned, break the loop. 34 | if [[ -z "$urls" ]]; then 35 | echo "No more URLs found on page $page. Finishing." 36 | break 37 | fi 38 | 39 | # Print the retrieved URLs. 40 | echo "$urls" 41 | 42 | # Count how many URLs were returned on this page. 43 | count=$(echo "$response" | jq -r '.url_list | length') 44 | echo "Found $count URL(s) on page $page." 45 | 46 | # If fewer URLs than the limit were returned, assume it's the last page. 47 | if (( count < limit )); then 48 | echo "Reached the last page." 49 | break 50 | fi 51 | 52 | # Increment page number for the next iteration. 53 | page=$((page + 1)) 54 | done 55 | 56 | echo "Done fetching URLs." 57 | -------------------------------------------------------------------------------- /dorking.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | 4 | from __future__ import print_function 5 | import sys 6 | import time 7 | 8 | # Attempt to import the googlesearch module 9 | try: 10 | from googlesearch import search 11 | except ImportError: 12 | print("\033[91m[ERROR] Missing dependency: googlesearch-python\033[0m") 13 | print("\033[93m[INFO] Install it using: pip install googlesearch-python\033[0m") 14 | sys.exit(1) 15 | 16 | # Check for Python version 17 | if sys.version_info[0] < 3: 18 | print("\n\033[91m[ERROR] This script requires Python 3.x\033[0m\n") 19 | sys.exit(1) 20 | 21 | # ANSI color codes for styling output 22 | class Colors: 23 | RED = "\033[91m" 24 | BLUE = "\033[94m" 25 | GREEN = "\033[92m" 26 | YELLOW = "\033[93m" 27 | RESET = "\033[0m" 28 | 29 | # Default output filename 30 | log_file = "dorks_output.txt" 31 | 32 | def logger(data): 33 | """Logs data to a file.""" 34 | with open(log_file, "a", encoding="utf-8") as file: 35 | file.write(data + "\n") 36 | 37 | def dorks(): 38 | """Main function for handling Google Dorking.""" 39 | global log_file 40 | try: 41 | dork = input(f"{Colors.BLUE}\n[+] Enter The Dork Search Query: {Colors.RESET}") 42 | 43 | user_choice = input(f"{Colors.BLUE}[+] Enter Total Number of Results You Want (or type 'all' to fetch everything): {Colors.RESET}").strip().lower() 44 | 45 | if user_choice == "all": 46 | total_results = float("inf") 47 | else: 48 | try: 49 | total_results = int(user_choice) 50 | if total_results <= 0: 51 | raise ValueError("Number must be greater than zero.") 52 | except ValueError: 53 | print(f"{Colors.RED}[ERROR] Invalid number entered! Please enter a positive integer or 'all'.{Colors.RESET}") 54 | return 55 | 56 | save_output = input(f"{Colors.BLUE}\n[+] Do You Want to Save the Output? (Y/N): {Colors.RESET}").strip().lower() 57 | if save_output == "y": 58 | log_file = input(f"{Colors.BLUE}[+] Enter Output Filename: {Colors.RESET}").strip() 59 | if not log_file: 60 | log_file = "dorks_output.txt" 61 | if not log_file.endswith(".txt"): 62 | log_file += ".txt" 63 | 64 | print(f"\n{Colors.GREEN}[INFO] Searching... Please wait...{Colors.RESET}\n") 65 | 66 | fetched = 0 67 | 68 | for result in search(dork): 69 | if fetched >= total_results: 70 | break 71 | print(f"{Colors.YELLOW}[+] {Colors.RESET}{result}") 72 | 73 | if save_output == "y": 74 | logger(result) 75 | 76 | fetched += 1 77 | 78 | except KeyboardInterrupt: 79 | print(f"\n{Colors.RED}[!] User Interruption Detected! Exiting...{Colors.RESET}\n") 80 | sys.exit(1) 81 | except Exception as e: 82 | print(f"{Colors.RED}[ERROR] {str(e)}{Colors.RESET}") 83 | 84 | print(f"{Colors.GREEN}\n[✔] Automation Done..{Colors.RESET}") 85 | sys.exit() 86 | 87 | if __name__ == "__main__": 88 | dorks() 89 | -------------------------------------------------------------------------------- /lostfuzzer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Colors 4 | RED='\033[91m' 5 | GREEN='\033[92m' 6 | YELLOW='\033[93m' 7 | RESET='\033[0m' 8 | 9 | # ASCII Banner 10 | echo -e "${RED}" 11 | cat << "EOF" 12 | ______ _____________ 13 | ___ /______________ /___ __/___ _________________________ 14 | __ /_ __ \_ ___/ __/_ /_ _ / / /__ /__ /_ _ \_ ___/ 15 | _ / / /_/ /(__ )/ /_ _ __/ / /_/ /__ /__ /_/ __/ / 16 | _/ \____//____/ \__/ /_/ \__,_/ _____/____/\___//_/ 17 | 18 | by ~/.coffinxp@lostsec 19 | EOF 20 | echo -e "${RESET}" 21 | 22 | # ===== Functions ===== 23 | 24 | usage() { 25 | echo -e "${YELLOW}Usage: $0 -d domain.com | -l subdomains.txt [-t threads]${RESET}" 26 | exit 1 27 | } 28 | 29 | check_tools() { 30 | REQUIRED_TOOLS=("gau" "uro" "httpx-toolkit" "nuclei") 31 | for tool in "${REQUIRED_TOOLS[@]}"; do 32 | if ! command -v "$tool" &>/dev/null; then 33 | echo -e "${RED}[ERROR] $tool is not installed. Please install it and try again.${RESET}" 34 | exit 1 35 | fi 36 | done 37 | } 38 | 39 | summary() { 40 | echo -e "\n${GREEN}===== SUMMARY =====${RESET}" 41 | echo "Total URLs fetched: $(wc -l < "$GAU_FILE" 2>/dev/null || echo 0)" 42 | echo "URLs with params: $(wc -l < "$FILTERED_URLS_FILE" 2>/dev/null || echo 0)" 43 | echo "Live URLs: $(wc -l < "$LIVE_URLS" 2>/dev/null || echo 0)" 44 | echo "Vulnerabilities: $(wc -l < "$NUCLEI_RESULTS" 2>/dev/null || echo 0)" 45 | echo "Results saved in: $OUTPUT_DIR/" 46 | echo "=============================" 47 | } 48 | 49 | # ===== Argument Parsing ===== 50 | DOMAIN="" 51 | LIST="" 52 | THREADS=10 53 | 54 | while getopts "d:l:t:" opt; do 55 | case "$opt" in 56 | d) DOMAIN=$OPTARG ;; 57 | l) LIST=$OPTARG ;; 58 | t) THREADS=$OPTARG ;; 59 | *) usage ;; 60 | esac 61 | done 62 | 63 | if [ -z "$DOMAIN" ] && [ -z "$LIST" ]; then 64 | usage 65 | fi 66 | 67 | check_tools 68 | 69 | # ===== Setup ===== 70 | OUTPUT_DIR="results_$(date +%F_%H-%M-%S)" 71 | mkdir -p "$OUTPUT_DIR" 72 | 73 | GAU_FILE="$OUTPUT_DIR/gau_urls.txt" 74 | FILTERED_URLS_FILE="$OUTPUT_DIR/filtered_urls.txt" 75 | LIVE_URLS="$OUTPUT_DIR/live_urls.txt" 76 | NUCLEI_RESULTS="$OUTPUT_DIR/nuclei_results.txt" 77 | 78 | trap 'rm -f "$GAU_FILE.tmp"' EXIT 79 | 80 | # ===== Collect Targets ===== 81 | if [ -n "$DOMAIN" ]; then 82 | TARGETS="$DOMAIN" 83 | elif [ -f "$LIST" ]; then 84 | TARGETS=$(cat "$LIST") 85 | else 86 | echo -e "${RED}[ERROR] List file not found.${RESET}" 87 | exit 1 88 | fi 89 | 90 | # Strip protocols 91 | TARGETS=$(echo "$TARGETS" | sed 's|https\?://||g') 92 | 93 | # ===== Step 1: Fetch URLs ===== 94 | echo -e "${GREEN}[INFO] Fetching URLs with gau...${RESET}" 95 | echo "$TARGETS" | xargs -P"$THREADS" -I{} gau "{}" >> "$GAU_FILE" 96 | 97 | if [ ! -s "$GAU_FILE" ]; then 98 | echo -e "${RED}[ERROR] No URLs found. Exiting.${RESET}" 99 | exit 1 100 | fi 101 | 102 | # ===== Step 2: Filter with params ===== 103 | echo -e "${GREEN}[INFO] Filtering URLs with parameters...${RESET}" 104 | grep -E '\?[^=]+=.+$' "$GAU_FILE" | uro | awk '!seen[$0]++' > "$FILTERED_URLS_FILE" 105 | 106 | # ===== Step 3: Live check ===== 107 | echo -e "${GREEN}[INFO] Checking live URLs...${RESET}" 108 | httpx-toolkit -silent -t 300 -rl 200 < "$FILTERED_URLS_FILE" > "$LIVE_URLS" 109 | 110 | # ===== Step 4: Run nuclei ===== 111 | echo -e "${GREEN}[INFO] Running nuclei scan...${RESET}" 112 | nuclei -dast -retries 2 -silent -o "$NUCLEI_RESULTS" < "$LIVE_URLS" 113 | 114 | # ===== Final Summary ===== 115 | summary 116 | -------------------------------------------------------------------------------- /punycode_gen.py: -------------------------------------------------------------------------------- 1 | def encode_punycode(char): 2 | try: 3 | return char.encode('idna').decode('ascii') 4 | except: 5 | try: 6 | return 'xn--' + char.encode('punycode').decode('ascii') 7 | except: 8 | return None 9 | 10 | homoglyphs_map = { 11 | 'a': ['à','á','â','ã','ä','å','ɑ','А','Α','Ꭺ','A','𝔄','𝕬','𝒜','𝐀','𝐴','𝘈','𝙰','𝖠','𝗔','𝘼','𝚨','𝑨','ⓐ','Ⓐ','🅐','🅰','𝔞','𝖆','𝒶','𝗮','𝘢'], 12 | 'b': ['Ь','Ꮟ','Ƅ','ᖯ','𝐛','𝑏','𝒃','𝓫','𝔟','𝕓','𝖇','𝗯','𝘣','𝙗','𝚋'], 13 | 'c': ['ϲ','с','ƈ','ȼ','ḉ','ⲥ','𝐜','𝑐','𝒄','𝓬','𝔠','𝕔','𝖈','𝗰','𝘤','𝙘','𝚌'], 14 | 'd': ['ԁ','ժ','Ꮷ','𝐝','𝑑','𝒅','𝓭','𝔡','𝕕','𝖉','𝗱','𝘥','𝙙','𝚍'], 15 | 'e': ['е','ҽ','℮','ḛ','ḝ','ẹ','é','è','ê','ë','ē','ė','ę','𝐞','𝑒','𝒆','𝓮','𝔢','𝕖','𝖊','𝗲','𝘦','𝙚','𝚎'], 16 | 'f': ['ғ','𝐟','𝑓','𝒇','𝓯','𝔣','𝕗','𝖋','𝗳','𝘧','𝙛','𝚏'], 17 | 'g': ['ɡ','ց','𝐠','𝑔','𝒈','𝓰','𝔤','𝕘','𝖌','𝗴','𝘨','𝙜','𝚐'], 18 | 'h': ['һ','հ','Ꮒ','ℎ','𝐡','𝒉','𝒽','𝓱','𝔥','𝕙','𝖍','𝗵','𝘩','𝙝','𝚑'], 19 | 'i': ['і','ɩ','Ꭵ','Ⅰ','ı','í','ì','î','ï','ī','į','𝐢','𝑖','𝒊','𝓲','𝔦','𝕚','𝖎','𝗶','𝘪','𝙞','𝚒'], 20 | 'j': ['ј','ʝ','ϳ','𝐣','𝑗','𝒋','𝓳','𝔧','𝕛','𝖏','𝗷','𝘫','𝙟','𝚓'], 21 | 'k': ['κ','𝐤','𝑘','𝒌','𝓴','𝔨','𝕜','𝖐','𝗸','𝘬','𝙠','𝚔'], 22 | 'l': ['ⅼ','ӏ','Ɩ','ʟ','𝐥','𝑙','𝒍','𝓵','𝔩','𝕝','𝖑','𝗹','𝘭','𝙡','𝚕'], 23 | 'm': ['м','ṃ','ᴍ','𝐦','𝑚','𝒎','𝓶','𝔪','𝕞','𝖒','𝗺','𝘮','𝙢','𝚖'], 24 | 'n': ['ո','п','ռ','ṅ','ṇ','ṋ','𝐧','𝑛','𝒏','𝓷','𝔫','𝕟','𝖓','𝗻','𝘯','𝙣','𝚗'], 25 | 'o': ['ο','օ','ӧ','ö','ó','ò','ô','õ','ō','ő','ⲟ','𝐨','𝑜','𝓸','𝔬','𝕠','𝖔','𝗼','𝘰','𝙤','𝚬'], 26 | 'p': ['р','ρ','⍴','𝐩','𝑝','𝒑','𝓹','𝔭','𝕡','𝖕','𝗽','𝘱','𝙥','𝚭'], 27 | 'q': ['զ','ԛ','գ','𝐪','𝑞','𝒒','𝓺','𝔮','𝕢','𝖖','𝗾','𝘲','𝙦','𝚞'], 28 | 'r': ['ᴦ','г','ř','ȓ','ṛ','ⲅ','𝐫','𝑟','𝒓','𝓻','𝔯','𝕣','𝖗','𝗿','𝘳','𝙧','𝚛'], 29 | 's': ['ѕ','ʂ','ṡ','ṣ','𝐬','𝑠','𝒔','𝓼','𝔰','𝕤','𝖘','𝘴','𝙨','𝚜'], 30 | 't': ['т','τ','ṭ','ț','ⲧ','𝐭','𝑡','𝒕','𝓽','𝔱','𝕥','𝖙','𝘵','𝙩','𝚝'], 31 | 'u': ['υ','ս','ü','ú','ù','û','ū','ⲩ','𝐮','𝑢','𝒖','𝓾','𝔲','𝕦','𝖚','𝘶','𝙪','𝚞'], 32 | 'v': ['ν','ѵ','ⴸ','𝐯','𝑣','𝒗','𝓿','𝔳','𝕧','𝖛','𝗏','𝘷','𝙫','𝚟'], 33 | 'w': ['ԝ','ա','ѡ','ⲱ','𝐰','𝑤','𝒘','𝔀','𝕨','𝖜','𝗐','𝘸','𝙬','𝚠'], 34 | 'x': ['х','ҳ','ӿ','𝐱','𝑥','𝒙','𝔁','𝕩','𝖝','𝗑','𝘹','𝙭','𝚡'], 35 | 'y': ['у','ү','ӯ','ý','ÿ','ⲩ','𝐲','𝑦','𝒚','𝔂','𝕪','𝖞','𝗒','𝘺','𝙮','𝚢'], 36 | 'z': ['ᴢ','ż','ź','ž','𝐳','𝑧','𝒛','𝔃','𝕫','𝖟','𝗓','𝘻','𝙯','𝚣'] 37 | } 38 | 39 | def generate_punycode_variants(letter): 40 | print(f"\n🔎 Punycode variants for letter: '{letter}'\n") 41 | letter = letter.lower() 42 | glyphs = homoglyphs_map.get(letter, []) 43 | if not glyphs: 44 | print("❌ No homoglyphs found for this letter.") 45 | return 46 | 47 | for glyph in glyphs: 48 | punycode = encode_punycode(glyph) 49 | if punycode: 50 | print(f"{glyph} -> {punycode}") 51 | 52 | # User input 53 | letter = input("Enter a letter (a-z): ").strip() 54 | if len(letter) == 1 and letter.isalpha(): 55 | generate_punycode_variants(letter) 56 | else: 57 | print("❗ Please enter a single valid letter.") 58 | -------------------------------------------------------------------------------- /urlscan.py: -------------------------------------------------------------------------------- 1 | #!/usr/local/python3.13/bin/python3.13 2 | 3 | import requests 4 | import argparse 5 | import re 6 | import os 7 | 8 | API_KEY = "xxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" # ← Insert your API Key here 9 | 10 | if not API_KEY: 11 | print("[ERROR] Please add your API key in the script before running.") 12 | exit(1) 13 | 14 | parser = argparse.ArgumentParser(description="Automating usage of urlscanner.io") 15 | parser.add_argument('-m', '--mode', required=True, choices=['subdomains', 'urls'], help="Mode to scan: subdomains or urls.") 16 | parser.add_argument('-d', '--domain', help="Single domain to scan.") 17 | parser.add_argument('-df', '--domain-file', help="File containing multiple domains to scan.") 18 | args = parser.parse_args() 19 | 20 | def print_banner(): 21 | print(r''' 22 | __ _ 23 | __ _______/ /_____________ _____ (_)___ 24 | / / / / ___/ / ___/ ___/ __ `/ __ \ / / __ \ 25 | / /_/ / / / (__ ) /__/ /_/ / / / / / / /_/ / 26 | \__,_/_/ /_/____/\___/\__,_/_/ /_(_)_/\____/ 27 | 28 | 29 | Created by: coffinxp 30 | ''') 31 | 32 | def sanitize_domain(domain): 33 | domain = domain.strip().lower() 34 | domain = re.sub(r'^https?://', '', domain) # remove http/https 35 | return domain if domain and not domain.startswith('#') else None 36 | 37 | def safe_request(url, headers): 38 | try: 39 | return requests.get(url, headers=headers, timeout=10) 40 | except requests.RequestException: 41 | return None 42 | 43 | def dedup_and_sort(items): 44 | return sorted(set(items)) 45 | 46 | def scan_domain(domain, mode, api_key): 47 | domain = sanitize_domain(domain) 48 | if not domain: 49 | return 50 | 51 | url = f"https://urlscan.io/api/v1/search/?q=page.domain:{domain}&size=100" 52 | headers = {"API-Key": api_key} 53 | response = safe_request(url, headers=headers) 54 | 55 | if not response: 56 | return 57 | 58 | results = [] 59 | if mode == "subdomains": 60 | matched = re.findall(rf"https?://((?:[a-zA-Z0-9_-]+\.)+{re.escape(domain)})", response.text) 61 | stripped = [re.sub(r"^https?://", "", url) for url in matched] 62 | filtered = [url for url in stripped if url != domain] 63 | results = [url.split("/")[0] for url in filtered] 64 | 65 | elif mode == "urls": 66 | matched = re.findall(rf"https?://(?:[a-zA-Z0-9_-]+\.)+{re.escape(domain)}/[^\s\"'>]+", response.text) 67 | results = matched 68 | 69 | unique = dedup_and_sort(results) 70 | 71 | for item in unique: 72 | print(item) # Show on screen 73 | 74 | # ---------------- MAIN ---------------- 75 | print_banner() 76 | 77 | domains_to_scan = [] 78 | 79 | # Use single domain 80 | if args.domain: 81 | single = sanitize_domain(args.domain) 82 | if single: 83 | domains_to_scan = [single] 84 | else: 85 | print("[!] Invalid domain input.") 86 | exit(1) 87 | 88 | # Use domain file 89 | elif args.domain_file: 90 | if os.path.isfile(args.domain_file): 91 | with open(args.domain_file, 'r') as f: 92 | domains_to_scan = [sanitize_domain(line) for line in f if sanitize_domain(line)] 93 | else: 94 | print(f"[!] File not found: {args.domain_file}") 95 | exit(1) 96 | 97 | else: 98 | print("[!] Please provide either a single domain (-d) or a domain file (-df).") 99 | exit(1) 100 | 101 | for domain in domains_to_scan: 102 | scan_domain(domain, args.mode, API_KEY) 103 | 104 | print(f"\n[✔] Completed scan for {len(domains_to_scan)} domain(s).") 105 | -------------------------------------------------------------------------------- /virustotal.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Function to check if input is an IP address 4 | is_ip() { 5 | local input=$1 6 | [[ $input =~ ^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$ ]] 7 | } 8 | 9 | # Function to fetch and display undetected URLs and hostnames for IP or domain 10 | fetch_undetected_urls() { 11 | local input=$1 12 | local api_key_index=$2 13 | local api_key 14 | 15 | if [ $api_key_index -eq 1 ]; then 16 | api_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" 17 | elif [ $api_key_index -eq 2 ]; then 18 | api_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" 19 | else 20 | api_key="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" 21 | fi 22 | 23 | if is_ip "$input"; then 24 | local URL="https://www.virustotal.com/vtapi/v2/ip-address/report?apikey=$api_key&ip=$input" 25 | echo -e "\nFetching data for IP: \033[1;34m$input\033[0m (using API key $api_key_index)" 26 | else 27 | local URL="https://www.virustotal.com/vtapi/v2/domain/report?apikey=$api_key&domain=$input" 28 | echo -e "\nFetching data for domain: \033[1;34m$input\033[0m (using API key $api_key_index)" 29 | fi 30 | 31 | response=$(curl -s "$URL") 32 | if [[ $? -ne 0 || -z "$response" ]]; then 33 | echo -e "\033[1;31mError fetching data for: $input\033[0m" 34 | return 35 | fi 36 | 37 | # Print hostnames if input is IP 38 | if is_ip "$input"; then 39 | hostnames=$(echo "$response" | jq -r '.resolutions[].hostname' 2>/dev/null) 40 | if [[ -n "$hostnames" ]]; then 41 | echo -e "\033[1;35mHostnames resolved for IP: $input\033[0m" 42 | echo "$hostnames" 43 | else 44 | echo -e "\033[1;33mNo hostnames found for IP: $input\033[0m" 45 | fi 46 | fi 47 | 48 | # Extract and print undetected URLs 49 | undetected_urls=$(echo "$response" | jq -r '.undetected_urls[][0]' 2>/dev/null) 50 | if [[ -z "$undetected_urls" ]]; then 51 | echo -e "\033[1;33mNo undetected URLs found for: $input\033[0m" 52 | else 53 | echo -e "\033[1;32mUndetected URLs for: $input\033[0m" 54 | echo "$undetected_urls" 55 | fi 56 | } 57 | 58 | # Function to display a countdown 59 | countdown() { 60 | local seconds=$1 61 | while [ $seconds -gt 0 ]; do 62 | echo -ne "\033[1;36mWaiting for $seconds seconds...\033[0m\r" 63 | sleep 1 64 | : $((seconds--)) 65 | done 66 | echo -ne "\033[0K" # Clear the countdown line 67 | } 68 | 69 | # Check if an argument is provided 70 | if [ -z "$1" ]; then 71 | echo -e "\033[1;31mUsage: $0 \033[0m" 72 | exit 1 73 | fi 74 | 75 | # Initialize variables for API key rotation 76 | api_key_index=1 77 | request_count=0 78 | 79 | # Check if the argument is a file 80 | if [ -f "$1" ]; then 81 | while IFS= read -r input; do 82 | input=$(echo "$input" | sed 's|https\?://||') # Strip scheme if present 83 | fetch_undetected_urls "$input" $api_key_index 84 | countdown 20 85 | 86 | # Rotate API key every 5 requests 87 | request_count=$((request_count + 1)) 88 | if [ $request_count -ge 5 ]; then 89 | request_count=0 90 | if [ $api_key_index -eq 1 ]; then 91 | api_key_index=2 92 | elif [ $api_key_index -eq 2 ]; then 93 | api_key_index=3 94 | else 95 | api_key_index=1 96 | fi 97 | fi 98 | done < "$1" 99 | else 100 | input=$(echo "$1" | sed 's|https\?://||') # Strip scheme if present 101 | fetch_undetected_urls "$input" $api_key_index 102 | fi 103 | 104 | echo -e "\033[1;32mAll done!\033[0m" 105 | 106 | -------------------------------------------------------------------------------- /wayback.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Check if domain was provided 4 | if [ -z "$1" ]; then 5 | echo "Usage: $0 domain.com [-s] [-e] [-sc codes] [-scx codes]" 6 | echo "Examples:" 7 | echo " $0 example.com -s -sc 200" 8 | echo " $0 example.com -sc 200,302,403" 9 | echo " $0 example.com -scx 404,500" 10 | echo " $0 example.com -e extensions only" 11 | exit 1 12 | fi 13 | 14 | domain=$1 15 | shift # shift args so $2 becomes the flags 16 | 17 | # Defaults 18 | subdomains=false 19 | extensions=false 20 | status_code="" 21 | exclude_status_code="" 22 | 23 | # Parse flags 24 | while [[ $# -gt 0 ]]; do 25 | case "$1" in 26 | -s) subdomains=true ;; 27 | -e) extensions=true ;; 28 | -sc) status_code=$2; shift ;; # include codes 29 | -scx) exclude_status_code=$2; shift ;; # exclude codes 30 | esac 31 | shift 32 | done 33 | 34 | # Regex for sensitive extensions 35 | ext_regex='xls|xml|xlsx|json|pdf|sql|doc|docx|pptx|txt|git|zip|tar\.gz|tgz|bak|7z|rar|log|cache|secret|db|backup|yml|gz|config|csv|yaml|md|md5|exe|dll|bin|ini|bat|sh|tar|deb|rpm|iso|img|env|apk|msi|dmg|tmp|crt|pem|key|pub|asc' 36 | 37 | # Decide base URL 38 | if $subdomains; then 39 | base_url="https://web.archive.org/cdx/search/cdx?url=*.$domain/*&collapse=urlkey&output=text&fl=original,statuscode" 40 | echo "Fetching results for $domain including subdomains..." 41 | else 42 | base_url="https://web.archive.org/cdx/search/cdx?url=$domain/*&collapse=urlkey&output=text&fl=original,statuscode" 43 | echo "Fetching results for $domain (main domain only)..." 44 | fi 45 | 46 | # Add extension filter 47 | if $extensions; then 48 | echo "Filtering by specific file extensions..." 49 | base_url="$base_url&filter=original:.*\.($ext_regex)$" 50 | fi 51 | 52 | # Add status code include filter 53 | if [ -n "$status_code" ]; then 54 | status_code_regex=$(echo "$status_code" | sed 's/,/|/g') 55 | echo "Including only HTTP status code(s): $status_code" 56 | base_url="$base_url&filter=statuscode:($status_code_regex)" 57 | fi 58 | 59 | # Add status code exclude filter 60 | if [ -n "$exclude_status_code" ]; then 61 | exclude_status_code_regex=$(echo "$exclude_status_code" | sed 's/,/|/g') 62 | echo "Excluding HTTP status code(s): $exclude_status_code" 63 | base_url="$base_url&filter=!statuscode:($exclude_status_code_regex)" 64 | fi 65 | 66 | # Run query 67 | urls=$(curl -s "$base_url") 68 | 69 | # Show results (only URLs, strip status codes) 70 | if [ -z "$urls" ]; then 71 | echo "No results found." 72 | else 73 | echo "$urls" | awk '{print $1}' 74 | fi 75 | --------------------------------------------------------------------------------