├── config ├── README.md ├── ReconCobra.jpeg └── ReconCobra.desktop ├── sub1.sh ├── maahro.sh ├── vasl.sh ├── netblock.sh ├── ShaheenX.sh ├── dnsbrute.sh ├── router.sh ├── shaheenx.sh ├── cname.sh ├── scrambler.sh ├── xwin2.sh ├── crtstart.sh ├── smbclient1.sh ├── snmp.sh ├── snmp1.sh ├── snmp2.sh ├── snmp3.sh ├── snmp4.sh ├── cobb.sh ├── ftp.sh ├── imap.sh ├── pop3.sh ├── pptp.sh ├── smtp.sh ├── http-form.sh ├── smbvul.sh ├── theharvester.sh ├── virustotal.sh ├── enumdom.sh ├── certspot.sh ├── nmapasnlookup.sh ├── cors.sh ├── entrust.sh ├── smbclient.sh ├── smbclient2.sh ├── firewalka.sh ├── firewalkb.sh ├── firewalkc.sh ├── googlegct.sh ├── email.sh ├── resolve.sh ├── linkfinder2.sh ├── xwin.sh ├── xwin3.sh ├── censys.sh ├── xwin4.sh ├── bile.sh ├── wayback.sh ├── mireer.sh ├── common2.sh ├── nmapmetasploitoutput.sh ├── checkurls.sh ├── xwin1.sh ├── mdsse.sh ├── traceroute-function ├── screenshostinstall.sh ├── common_crawl_download.sh ├── linkfinder3.sh ├── common_crawl_reverse.sh ├── endpoint_extraction.sh ├── common1.sh ├── fb.sh ├── passive.sh ├── inteli.sh ├── cobe.sh ├── googl.pl ├── ip2bin.sh ├── cobd.sh ├── robots.sh ├── useddatabasefiles.sh ├── sss.sh ├── sitemap.sh ├── mascan.sh ├── certificate.sh ├── ssl.sh ├── nmapcommonscan.sh ├── linkfinder1.sh ├── screenshot.sh ├── bruter.sh ├── takeoverscanner.sh ├── hostnametoip.sh ├── cobc.sh ├── coba.sh ├── entrust.go ├── emailtls.pl ├── findasn.sh ├── crt.sh ├── cob_a.sh ├── cc.sh ├── sub1.pl ├── cob_b.sh ├── browser.sh ├── censys.py ├── Termux_fixme.sh ├── dork.pl ├── sixth.pl ├── ip.pl ├── README.md ├── Termux_Installer.sh ├── certspot.go ├── metacrawler.pl ├── js_linkfinder.py ├── js_linkfinder └── js_linkfinder.py ├── gct.py ├── cobra_webmeta_crawler.py ├── geotagging_crawler └── cobra_geotagging_crawler.py ├── BlackArch_Installer.sh ├── ParrotOS_Installer.sh ├── Kali_Installer.sh └── ftp.pl /config/README.md: -------------------------------------------------------------------------------- 1 | Files 2 | -------------------------------------------------------------------------------- /sub1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | perl sub1.pl -------------------------------------------------------------------------------- /maahro.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd maahro 3 | perl maahro.pl -------------------------------------------------------------------------------- /vasl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd vasl 3 | python2 vasl.py -------------------------------------------------------------------------------- /netblock.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | nmap -iL input.txt > /dev/null -------------------------------------------------------------------------------- /ShaheenX.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd ShaheenX 3 | perl ShaheenX.pl -------------------------------------------------------------------------------- /dnsbrute.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd panthera 3 | perl dnsbrute.pl -------------------------------------------------------------------------------- /router.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd routersploit 3 | python3 rsf.py -------------------------------------------------------------------------------- /shaheenx.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd ShaheenX 3 | perl ShaheenX.pl -------------------------------------------------------------------------------- /cname.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd vasl 3 | perl netblock_cname_finder.pl -------------------------------------------------------------------------------- /scrambler.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd HackTheWorld 3 | python2 HackTheWorld.py -------------------------------------------------------------------------------- /xwin2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "[+] Enter Target : " Target 3 | xspy $Target -------------------------------------------------------------------------------- /crtstart.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Enter Target: " 3 | read Target 4 | ./crt.sh $Target -------------------------------------------------------------------------------- /smbclient1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | smbclient -L ///$Target -------------------------------------------------------------------------------- /snmp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | snmp-check $Target -c public -------------------------------------------------------------------------------- /snmp1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | snmp-check $Target -c public -------------------------------------------------------------------------------- /snmp2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | snmp-check $Target -c private -------------------------------------------------------------------------------- /snmp3.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | snmp-check $Target -c public -v2c -------------------------------------------------------------------------------- /snmp4.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | snmp-check $Target -c private -v2c -------------------------------------------------------------------------------- /cobb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "Enter victim url: " victim 3 | echo "$victim" 4 | perl metacrawler.pl -------------------------------------------------------------------------------- /config/ReconCobra.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Agent00049/ReconCobra/HEAD/config/ReconCobra.jpeg -------------------------------------------------------------------------------- /ftp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | nmap --script ftp-brute -p 21 $Target -------------------------------------------------------------------------------- /imap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | nmap -p143 –script imap-brute $Target -------------------------------------------------------------------------------- /pop3.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | nmap -p110 –script pop3-brute $Target -------------------------------------------------------------------------------- /pptp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "[+] Enter Target : " Target 3 | cat wordlist.txt | thc-pptp-bruter $Target -------------------------------------------------------------------------------- /smtp.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | nmap -p25 --script smtp-enum-users $Target -------------------------------------------------------------------------------- /http-form.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | nmap --script http-form-brute -p 80 $Target -------------------------------------------------------------------------------- /smbvul.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | nmap --script smb-vuln* -p 139,445 $Target 5 | -------------------------------------------------------------------------------- /theharvester.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | theharvester -d $Target -l 200 -b google -------------------------------------------------------------------------------- /virustotal.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd vasl 3 | echo " [+] Enter URL" 4 | read Target 5 | ruby virustotal.rb $Target -------------------------------------------------------------------------------- /enumdom.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | echo "enumdomusers" | rpcclient -U "" -N $Target -------------------------------------------------------------------------------- /certspot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Enter Target: " 3 | read Target 4 | go run ./certspot.go $Target > ./crt/$Target.list -------------------------------------------------------------------------------- /nmapasnlookup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Enter asnlookup.list" 3 | read Target 4 | nmap -iL $Target > $Target.nmapresult -------------------------------------------------------------------------------- /cors.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo -n "[ + ] Enter Target: " 3 | read Target 4 | cd CORScanner 5 | python2 cors_scan.py -u $Target --verbose -------------------------------------------------------------------------------- /entrust.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Enter Target: " 3 | read Target 4 | go run ./entrust.go -domain $Target > ./crt/$Target.list -------------------------------------------------------------------------------- /smbclient.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | smbclient -L ///$Target\\ipc\$ -U administrator -n "justatest" -------------------------------------------------------------------------------- /smbclient2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | smbclient -L ///$TArget\\ipc\$ -U administrator -n "justatest" -------------------------------------------------------------------------------- /firewalka.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | hping3 -S -c 1 -s 5151 -p 80 $Target > Firewalk_results/$Target.lista -------------------------------------------------------------------------------- /firewalkb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | hping3 -A -c 1 -s 5151 -p 80 $Target > Firewalk_results/$Target.listb -------------------------------------------------------------------------------- /firewalkc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | hping3 -2 -c 1 -s 5151 -p 80 $Target > Firewalk_results/$Target.listc -------------------------------------------------------------------------------- /googlegct.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Enter Target: " 3 | read Target 4 | mkdir crt 5 | python2 gct.py -d $Target -e show > ./crt/gct.list -------------------------------------------------------------------------------- /email.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target" 3 | read Target 4 | python infoga.py --domain $Target --source all --breach -v 2 --report resulst.txt -------------------------------------------------------------------------------- /resolve.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Enter Target, example 192.168.1.0-24" 3 | read Target 4 | nmap $Target > input.txt 5 | nmap -sP -iL -v input.txt -------------------------------------------------------------------------------- /linkfinder2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd LinkFinder 3 | read -p "[ + ] Enter victim url: " victim 4 | echo "$victim" 5 | python2 linkfinder.py -i $victim -d 6 | -------------------------------------------------------------------------------- /xwin.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "[+] Enter Target : " Target 3 | read -p "[+] Enter Number : " Screen 4 | xwininfo -root -tree -display $Target:$Screen 5 | -------------------------------------------------------------------------------- /xwin3.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd xrdp 3 | read -p "[+] Enter Target : " Target 4 | read -p "[+] Enter Screen : " Screen 5 | ./xrdp.py $Target:$Screen --no-disp 6 | -------------------------------------------------------------------------------- /censys.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Targets example: google or msn or microsoft" 3 | echo " [+] Enter Search Target: " 4 | read Target 5 | python2 censys.py -f $Target -------------------------------------------------------------------------------- /xwin4.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "[+] Enter Target : " Target 3 | read -p "[+] Enter Screen : " Screen 4 | xwininfo -display $Target:$Screen -all -root | grep \" 5 | -------------------------------------------------------------------------------- /bile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd BiLE-suite 3 | echo "Enter Target" 4 | read Target 5 | perl BiLE.pl $Target out.txt 6 | perl BiLE-weigh.pl sp_bile_out.txt.mine out.txt 7 | cd .. -------------------------------------------------------------------------------- /wayback.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir wayback_results 3 | cd wayback_results 4 | echo -n "Enter URL: " ; read URL 5 | wayback_machine_downloader http://$URL > $URL.list 6 | cd .. -------------------------------------------------------------------------------- /mireer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "[ + ] Enter Target: " 3 | read Target 4 | mkdir mirror_websites 5 | cd mirror_websites 6 | mkdir $Target 7 | httrack -w $Target 8 | cd .. 9 | cd .. 10 | -------------------------------------------------------------------------------- /common2.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo -n "[ + ] Enter File with Path: " ; read File 3 | mkdir json-results 4 | cd json-describe/target/debug 5 | ./json-describe $File > /json-results/$File.list 6 | cd .. -------------------------------------------------------------------------------- /nmapmetasploitoutput.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Target" 3 | read Target 4 | nmap -n -D192.168.1.5,10.5.1.2,172.1.2.4,3.4.2.1 $Target > report.data 5 | nmap -T4 -A -p 1-1000 -oX - $Target > report.xml -------------------------------------------------------------------------------- /checkurls.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | while read url 3 | do 4 | urlstatus=$(curl -o /dev/null --silent --head --write-out '%{http_code}' "$url" ) 5 | echo "$url $urlstatus" >> urlstatus.txt 6 | done < $1 -------------------------------------------------------------------------------- /xwin1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "[+] Enter Target : " Target 3 | read -p "[+] Enter Number : " Screen 4 | xwd -root -screen -silent -display $Target:$Screen > screenshot.xwd 5 | convert screenshot.xwd screenshot.png 6 | 7 | -------------------------------------------------------------------------------- /mdsse.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Remember the saved file path" 3 | echo " [+] Targets example: google or msn or microsoft" 4 | echo " [+] Enter Search Target: " 5 | read Target 6 | cd Asnlookup 7 | python2 asnlookup.py -o $Target -------------------------------------------------------------------------------- /traceroute-function: -------------------------------------------------------------------------------- 1 | # function 2 | function traceroute-mapper() { 3 | source /usr/share/ReconCobra/mapper.sh 4 | xdg-open "https://stefansundin.github.io/traceroute-mapper/?trace=$(traceroute -q1 $* | sed ':a;N;$!ba;s/\n/%0A/g')" 5 | } -------------------------------------------------------------------------------- /screenshostinstall.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo apt-get install xfonts-75dpi 3 | wget http://downloads.sourceforge.net/project/wkhtmltopdf/0.12.2.1/wkhtmltox-0.12.2.1_linux-trusty-i386.deb 4 | sudo dpkg -i wkhtmltox-0.12.2.1_linux-trusty-i386.deb -------------------------------------------------------------------------------- /common_crawl_download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir common_crawl_results 3 | cd common_crawl_results 4 | echo -e "Enter URL in reverse order only: " 5 | read Domain 6 | curl -o http://urlsearch.commoncrawl.org/download?q=$Domain common_crawl_results 7 | cd .. -------------------------------------------------------------------------------- /linkfinder3.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd LinkFinder 3 | echo "" 4 | echo "[ + ] Before using this tool" 5 | echo "[ + ] copy all js files into LinkFinder folder" 6 | echo "[ + ] loading results" 7 | echo "" 8 | python linkfinder.py -i '*.js' -r ^/api/ -o results.html 9 | -------------------------------------------------------------------------------- /common_crawl_reverse.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | input="${1}" reverse="" 3 | 4 | for (( i=0; i<${#input}; i++ )) 5 | do reverse="${input:${i}:1}$reverse" 6 | done 7 | echo "$reverse" 8 | 9 | 10 | #http://urlsearch.commoncrawl.org/download?q=com.google 2> /dev/null -------------------------------------------------------------------------------- /endpoint_extraction.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir endpoint_extraction_results 3 | cd endpoint_extraction_results 4 | echo -n "Enter URL to search : " ; read URL 5 | curl "http://web.archive.org/cdx/search/cdx?url=$URL/*&output=text&fl=original&collapse=urlkey" > $URL.list 6 | cd .. 7 | -------------------------------------------------------------------------------- /common1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir common_crawl_results 3 | cd common_crawl_results 4 | echo -e "[ + ] URL Examples, com.google, pk.daraz " 5 | echo -n "[ + ] Enter URL in reverse order only: " ; read Domain 6 | curl http://urlsearch.commoncrawl.org/download?q=$Domain -o $Domain.list 7 | cd .. -------------------------------------------------------------------------------- /fb.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | BROWSER="xdg-open" 3 | VER="1.9" 4 | OKBLUE='\033[94m' 5 | OKRED='\033[91m' 6 | OKGREEN='\033[92m' 7 | OKORANGE='\033[93m' 8 | DELAY=1 9 | RESET='\e[0m' 10 | 11 | # LOAD WEBSITE IN A WEB BROSER 12 | $BROWSER "https://pitoolbox.com.au/facebook-tool/" 2> /dev/null 13 | 14 | -------------------------------------------------------------------------------- /passive.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TARGET="$1" 3 | BROWSER="xdg-open" 4 | VER="1.9" 5 | OKBLUE='\033[94m' 6 | OKRED='\033[91m' 7 | OKGREEN='\033[92m' 8 | OKORANGE='\033[93m' 9 | DELAY=1 10 | RESET='\e[0m' 11 | 12 | # LOAD WEBSITE IN A WEB BROSER 13 | $BROWSER "https://passivedns.mnemonic.no" 2> /dev/null -------------------------------------------------------------------------------- /inteli.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TARGET="$1" 3 | BROWSER="xdg-open" 4 | VER="1.9" 5 | OKBLUE='\033[94m' 6 | OKRED='\033[91m' 7 | OKGREEN='\033[92m' 8 | OKORANGE='\033[93m' 9 | DELAY=1 10 | RESET='\e[0m' 11 | 12 | echo "Enter Target" 13 | read Target 14 | 15 | $BROWSER "https://intelx.io/?s=$Target" 2> /dev/null 16 | -------------------------------------------------------------------------------- /cobe.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | YELLOW="\033[1;33m" 3 | ENDCOLOR="\033[0m" 4 | clear 5 | echo -e 6 | echo -e $YELLOW"[ + ] Points [ + ]"$ENDCOLOR 7 | echo "1. Extracting all Location" 8 | echo "" 9 | cd exiftool 10 | mkdir geotagging_crawler 11 | ./exiftool -w filelocationsinformation.txt -location:all ../geotagging_crawler/* 12 | -------------------------------------------------------------------------------- /config/ReconCobra.desktop: -------------------------------------------------------------------------------- 1 | [Desktop Entry] 2 | Name=ReconCobra 3 | Comment=Web Footprinting Tool 4 | Encoding=UTF-8 5 | Exec=sh -c "reconcobra;${SHELL:-bash}" 6 | Icon=ReconCobra.png 7 | StartupNotify=false 8 | Terminal=true 9 | Type=Application 10 | Categories=01-info-gathering; 11 | X-Kali-Package=ReconCobra 12 | Name[C]=ReconCobra 13 | -------------------------------------------------------------------------------- /googl.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | use strict; 3 | use warnings; 4 | use 5.010; 5 | 6 | use WWW::Mechanize; 7 | my $w = WWW::Mechanize->new(); 8 | $w->get('http://www.google.com/'); 9 | $w->submit_form( 10 | fields => { 11 | q => 'test automation using perl' 12 | }, 13 | ); 14 | $w->follow_link( n => 5 ); 15 | say $w->title; -------------------------------------------------------------------------------- /ip2bin.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | function convip() 4 | { 5 | CONV=({0..1}{0..1}{0..1}{0..1}{0..1}{0..1}{0..1}{0..1}) 6 | 7 | ip="" 8 | for byte in `echo ${1} | tr "." " "`; do 9 | ip="${ip}.${CONV[${byte}]}" 10 | done 11 | echo ${ip:1} 12 | } 13 | 14 | read ip1 15 | 16 | a=`convip "${ip1}"` 17 | 18 | echo "${a}" 19 | -------------------------------------------------------------------------------- /cobd.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | YELLOW="\033[1;33m" 3 | ENDCOLOR="\033[0m" 4 | clear 5 | echo -e 6 | echo -e $YELLOW"[ + ] Points [ + ]"$ENDCOLOR 7 | echo "1. Extracting all GPS data" 8 | echo "" 9 | cd exiftool 10 | mkdir geotagging_crawler 11 | ./exiftool ../geotagging_crawler/* | awk '/Gps|GPS|gps/{ print $0 }' > ../geotagging_crawler/filesgpsinformation.txt 12 | 13 | -------------------------------------------------------------------------------- /robots.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TARGET="$1" 3 | BROWSER="xdg-open" 4 | VER="1.9" 5 | OKBLUE='\033[94m' 6 | OKRED='\033[91m' 7 | OKGREEN='\033[92m' 8 | OKORANGE='\033[93m' 9 | DELAY=1 10 | RESET='\e[0m' 11 | 12 | echo "Enter Target" 13 | read Target 14 | # LOAD WEBSITE IN A WEB BROSER 15 | $BROWSER "https://www.google.com/search?q=site:$Target+ext:txt+robots.txt" 2> /dev/null 16 | 17 | -------------------------------------------------------------------------------- /useddatabasefiles.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TARGET="$1" 3 | BROWSER="xdg-open" 4 | VER="1.9" 5 | OKBLUE='\033[94m' 6 | OKRED='\033[91m' 7 | OKGREEN='\033[92m' 8 | OKORANGE='\033[93m' 9 | DELAY=1 10 | RESET='\e[0m' 11 | 12 | read -p "[+] Enter Target: " Target 13 | # LOAD WEBSITE IN A WEB BROSER 14 | $BROWSER "https://www.google.com/search?q=site:$Target+sql+%7C+phpinfo" 2> /dev/null 15 | 16 | -------------------------------------------------------------------------------- /sss.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TARGET="$1" 3 | BROWSER="xdg-open" 4 | VER="1.9" 5 | OKBLUE='\033[94m' 6 | OKRED='\033[91m' 7 | OKGREEN='\033[92m' 8 | OKORANGE='\033[93m' 9 | DELAY=1 10 | RESET='\e[0m' 11 | 12 | echo "Enter Target" 13 | read Target 14 | # LOAD WEBSITE IN A WEB BROSER 15 | $BROWSER "https://www.social-searcher.com/google-social-search/?q=$Target&fb=on&tw=on&gp=on&in=on&li=on&pi=on" 2> /dev/null 16 | 17 | -------------------------------------------------------------------------------- /sitemap.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TARGET="$1" 3 | BROWSER="xdg-open" 4 | VER="1.9" 5 | OKBLUE='\033[94m' 6 | OKRED='\033[91m' 7 | OKGREEN='\033[92m' 8 | OKORANGE='\033[93m' 9 | DELAY=1 10 | RESET='\e[0m' 11 | 12 | echo "Enter Target" 13 | read Target 14 | # LOAD WEBSITE IN A WEB BROSER 15 | $BROWSER "https://www.google.com/search?q=site:$Target+ext:xml+|+ext:conf+|+ext:cnf+|+ext:reg+|+ext:inf+|+ext:rdp+|+ext:cfg+|+ext:txt+|+ext:ora+|+ext:ini" 2> /dev/null 16 | 17 | -------------------------------------------------------------------------------- /mascan.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "[+] Enter Target in IP/CIDR/Block: " Target 3 | masscan -p21,22,23,25,43,53,68,80,110,111,115,119,123,137,139,143,161,220,389,443,445,513,514,691,1433,1521,1701,1723,2049,3306,3388,3389,4125,5060,5061,5062,5063,5064,5432,5800,5801,5802,5804,5900,5901,5903,5905,5986,6000,6001,6002,6003,6060,6061,6062,6063,6379,7000,7001,7199,8000,8080,8090,8443,9001,9042,27017,27018,27019 --banners --open-only --rate 1000 --range $Target > mascanresults.txt -------------------------------------------------------------------------------- /certificate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | apt-get install xdg-utils 3 | TARGET="$1" 4 | BROWSER="xdg-open" 5 | VER="1.9" 6 | OKBLUE='\033[94m' 7 | OKRED='\033[91m' 8 | OKGREEN='\033[92m' 9 | OKORANGE='\033[93m' 10 | DELAY=1 11 | RESET='\e[0m' 12 | 13 | if [ -z $TARGET ]; then 14 | echo -e "$OKORANGE + -- --=[Usage: certificate.sh $RESET" 15 | exit 16 | fi 17 | 18 | # LOAD WEBSITE IN A WEB BROSER 19 | $BROWSER "https://censys.io/certificates?q=$TARGET" 2> /dev/null 20 | -------------------------------------------------------------------------------- /ssl.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | #apt-get install xdg-utils 3 | TARGET="$1" 4 | BROWSER="xdg-open" 5 | VER="1.9" 6 | OKBLUE='\033[94m' 7 | OKRED='\033[91m' 8 | OKGREEN='\033[92m' 9 | OKORANGE='\033[93m' 10 | DELAY=1 11 | RESET='\e[0m' 12 | 13 | if [ -z $TARGET ]; then 14 | echo -e "$OKORANGE + -- --=[Usage: ssl.sh $RESET" 15 | exit 16 | fi 17 | 18 | # LOAD WEBSITE IN A WEB BROSER 19 | $BROWSER "https://ssltools.digicert.com/checker/views/checkInstallation.jsp" 2> /dev/null -------------------------------------------------------------------------------- /nmapcommonscan.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo -n "[+] Enter Target : " 3 | read Target 4 | nmap -p 21,22,23,25,43,53,68,80,110,111,115,119,123,137,139,143,161,220,389,443,445,513,514,691,1433,1521,1701,1723,2049,3306,3388,3389,4125,5060,5061,5062,5063,5064,5432,5800,5801,5802,5804,5900,5901,5903,5905,5986,6000,6001,6002,6003,6060,6061,6062,6063,6379,7000,7001,7199,8000,8080,8090,8443,9001,9042,27017,27018,27019 $Target > "nmapcommonscan_data.txt" 5 | echo -e "Data is saved as nmapcommonscan_data.txt" 6 | -------------------------------------------------------------------------------- /linkfinder1.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | mkdir js_linkfinder 3 | mkdir results 4 | cd js_linkfinder 5 | 6 | echo "[ + ] Enter TERMUX option for Termux else press RETURN :" 7 | read $first 8 | 9 | if [ "$first" == 'TERMUX' ] 10 | 11 | then 12 | 13 | read -p "[ + ] Enter victim url: " victim 14 | echo "$victim" 15 | python js_linkfinder.py --wait=2 --download $victim 16 | 17 | else 18 | 19 | read -p "[ + ] Enter victim url: " victim 20 | echo "$victim" 21 | python3 js_linkfinder.py --wait=2 --download $victim 22 | 23 | fi 24 | 25 | cp *.js /results 26 | cd .. 27 | -------------------------------------------------------------------------------- /screenshot.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | read -p "[+] Provide File Path: " path 3 | echo "[+] Do Port and Block Scan before using this tool" 4 | echo "[+] Loading" 5 | cd EyeWitness 6 | python EyeWitness.py -f $path -d screens --active-scan --web --only-ports '21,22,23,25,43,53,68,80,110,111,115,119,123,137,139,143,161,220,389,443,445,513,514,691,1433,1521,1701,1723,2049,3306,3388,3389,4125,5060,5061,5062,5063,5064,5432,5800,5801,5802,5804,5900,5901,5903,5905,5986,6000,6001,6002,6003,6060,6061,6062,6063,6379,7000,7001,7199,8000,8080,8090,8443,9001,9042,27017,27018,27019' 7 | cd .. 8 | -------------------------------------------------------------------------------- /bruter.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo -e "" 3 | echo -e "[+] Example: SSH/22, RDP/3389, FTP/21, Telnet/23, HTTP(S)/443, POP3(S)/110, IMAP/143" 4 | echo -e " SMB/139, VNC/5900-5, SIP/5060-5065, Redis/6379, PostgreSQL/5432, MySQL/3306" 5 | echo -e " MSSQL/1433, MongoDB/27017-19, Cassandra/7000-7001-9042-7199-8080, WinRM,/5986" 6 | echo -e " OWA/80-443-4125-3389" 7 | echo -e "" 8 | read -p "[+] Enter Target User (root/administrator): " User 9 | read -p "[+] Enter Target IP: " Target 10 | read -p "[+] Enter Target Port: " Port 11 | ncrack -v --user $User $Target:$Port 12 | -------------------------------------------------------------------------------- /takeoverscanner.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo " [+] Copy all subdomain results in file first then use this feature" 3 | echo " [+] Enter the subdomain list file, you created" 4 | echo " [+] New file will be created as complete subdomains list" 5 | echo " [+] File will provide fullest possible information of takeovers" 6 | echo " [+] Output file will show various services famous to unknown" 7 | echo " [+] Services that are vulernable to takeovers" 8 | echo " [+] Enter file path: " 9 | read $Target 10 | dig -f $Target 2> /dev/null | awk '/CNAME/ {print}' > takeoverlist_complete_subdomains.txt 11 | -------------------------------------------------------------------------------- /hostnametoip.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | cd ShaheenX 3 | perl ShaheenX.pl 4 | nmap -sn -oG status.txt -v -iL bingsubdomain.txt > bigsubdomain_hostnametoip.txt 5 | nmap -sn -oG status.txt -v -iL robtexsubdomain.txt > robtexsubdomain_hostnametoip.txt 6 | nmap -sn -oG status.txt -v -iL 51_subdomains.txt > 51_subdomains_hostnametoip.txt 7 | nmap -sn -oG status.txt -v -iL asksubdomain.txt > asksubdomain_hostnametoip.txt 8 | nmap -sn -oG status.txt -v -iL baidusubdomain.txt > baidusubdomain_hostnametoip.txt 9 | nmap -sn -oG status.txt -v -iL netcraftsubdomain.txt > netcraftsubdomain_hostnametoip.txt 10 | nmap -sn -oG status.txt -v -iL yahoosubdomain.txt > yahoosubdomain_hostnametoip.txt 11 | -------------------------------------------------------------------------------- /cobc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | YELLOW="\033[1;33m" 3 | ENDCOLOR="\033[0m" 4 | clear 5 | echo -e 6 | echo -e $YELLOW"[ + ] Downloading [ + ]"$ENDCOLOR 7 | echo "1. Crawling for files for Location and GPS co-ordinates related files" 8 | echo "" 9 | 10 | echo "[ + ] Enter TERMUX option for Termux else press RETURN :" 11 | read $first 12 | 13 | if [ "$first" == 'TERMUX' ] 14 | 15 | then 16 | 17 | read -p "[ + ] Enter victim url in absolute format: " victim 18 | echo "$victim" 19 | cd geotagging_crawler 20 | python cobra_geotagging_crawler.py --wait=2 --download $victim 21 | 22 | else 23 | 24 | read -p "[ + ] Enter victim url: " victim 25 | echo "$victim" 26 | cd geotagging_crawler 27 | python3 cobra_geotagging_crawler.py --wait=2 --download $victim 28 | 29 | fi 30 | -------------------------------------------------------------------------------- /coba.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | YELLOW="\033[1;33m" 3 | ENDCOLOR="\033[0m" 4 | clear 5 | echo -e 6 | echo -e $YELLOW"[ + ] Points [ + ]"$ENDCOLOR 7 | echo "1. Author = Computer Users" 8 | echo "2. Last/Modified By = Computer Users" 9 | echo "3. Producer or Creator = Software / OS Versions" 10 | echo "4. Title = Path Information" 11 | echo "5. Creator = Software / OS Versions" 12 | echo "6. Mime = Content Information" 13 | echo "7. SVG = image/svg+xml; charset=us-ascii means XSS" 14 | echo "" 15 | 16 | echo "[ + ] Enter TERMUX option for Termux else press RETURN :" 17 | read $first 18 | 19 | if [ "$first" == 'TERMUX' ] 20 | 21 | then 22 | 23 | read -p "[ + ] Enter victim url in absolute format: " victim 24 | echo "$victim" 25 | python cobra_webmeta_crawler.py --wait=2 --download $victim 26 | 27 | else 28 | 29 | read -p "[ + ] Enter victim url: " victim 30 | echo "$victim" 31 | python3 cobra_webmeta_crawler.py --wait=2 --download $victim 32 | 33 | fi 34 | -------------------------------------------------------------------------------- /entrust.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "io/ioutil" 7 | "net/http" 8 | ) 9 | 10 | func main() { 11 | var domain string 12 | flag.StringVar(&domain, "domain", "", "Domain to search for CT logs") 13 | flag.Parse() 14 | fmt.Println(domain) 15 | entrustcertsearch(domain) 16 | } 17 | 18 | func entrustcertsearch(domain string) { 19 | APIURL := fmt.Sprintf("https://ctsearch.entrust.com/api/v1/certificates?fields=issuerCN,subjectO,issuerDN,issuerO,subjectDN,signAlg,san,publicKeyType,publicKeySize,validFrom,validTo,sn,ev,logEntries.logName,subjectCNReversed&domain=%s&includeExpired=false&exactMatch=false&limit=5000", domain) 20 | 21 | req, err := http.NewRequest(http.MethodGet, APIURL, nil) 22 | if err != nil { 23 | panic(err) 24 | } 25 | 26 | client := http.DefaultClient 27 | resp, err := client.Do(req) 28 | if err != nil { 29 | panic(err) 30 | } 31 | 32 | defer resp.Body.Close() 33 | body, err := ioutil.ReadAll(resp.Body) 34 | if err != nil { 35 | panic(err) 36 | } 37 | 38 | fmt.Printf(string(body)) 39 | } -------------------------------------------------------------------------------- /emailtls.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl -w 2 | use strict; 3 | use LWP::UserAgent; 4 | use HTTP::Request::Common; 5 | use XML::XPath; 6 | 7 | $domain=; 8 | my $ua = LWP::UserAgent->new; 9 | my $request = POST( 'https://www.checktls.com/TestReceiver', [ 10 | CUSTOMERCODE => '$domain', 11 | CUSTOMERPASS => 'IllNeverTell', 12 | EMAIL => 'test@CheckTLS.com', 13 | LEVEL => 'XML_DETAIL', 14 | ] ); 15 | my $response = $ua->request($request); 16 | unless( $response->is_success) { 17 | print $response->status_line; 18 | } 19 | my $XML = $response->content(); 20 | my $xp = XML::XPath->new(xml => $XML); 21 | print 'Target = ' . $xp->find('/CheckTLS/eMailAddress') . "\n"; 22 | print 'Score = ' . $xp->find('/CheckTLS/ConfidenceFactor') . "\n"; 23 | my $nodeset = $xp->findnodes('/CheckTLS/MX'); 24 | print 'MX Count = ' . $nodeset->get_nodelist() . "\n"; 25 | foreach my $mx ($nodeset->get_nodelist()) { 26 | print 'MX = ' . $mx->find('@exchange') . "\n"; 27 | print 'MX SSL Version = ' . $mx->find('SSL/SSLVersion') . "\n"; 28 | } 29 | print 'MX1.Cert1.Subject.commonName = ' . $xp->find('/CheckTLS/MX[1]/SSL/Certs/Cert[1]/Subject/commonName') . "\n"; -------------------------------------------------------------------------------- /findasn.sh: -------------------------------------------------------------------------------- 1 | #***************************************************************************************# 2 | #----------------------- EULA LICENSE AGREEMENT NOTICE ---------------------------------# 3 | #1. This software uses EULA based software agreement that grants users rights to use for# 4 | #any purpose, modify and redistribute creative works about this perl software. # 5 | #2. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 6 | #what you do with by/this software. Your free to use this software as it is for any # 7 | #purpose that suites as long as it is not related to crime. # 8 | #***************************************************************************************# 9 | 10 | #!/bin/bash 11 | echo "[ + ] This is automatic net block finder script" 12 | echo "[ + ] Integrated with net block finder Perl software which requires nmap" 13 | echo "[ + ] Use this net block step by step with net block finder Perl for more results" 14 | echo -n "[ + ] Enter IP address to find associated AS Blocks: " 15 | 16 | read IP 17 | whois -h whois.radb.net -i origin -T route $(whois -h whois.radb.net ${IP} | grep origin: | awk '{print $NF}' | head -1) | grep -w "route:" | awk '{print $NF}' | sort -n > input.txt 18 | echo "file written, input.txt" 19 | -------------------------------------------------------------------------------- /crt.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Author: Tyler Wrightson 4 | # Updated: Haroon Awan 5 | # Date: June/8/2019 6 | 7 | TARGET="$1" 8 | DIR="$PWD/crt" 9 | 10 | if [ -z $TARGET ]; then 11 | echo -e "Usage: crt.sh URL" 12 | exit 13 | fi 14 | 15 | echo -e "[ + ] Creating directory $DIR" 16 | mkdir $DIR 17 | echo -e "[ + ] Downloading from https://crt.sh" 18 | TARGET=${TARGET// /+} 19 | echo -e "[ + ] url: https://crt.sh/?q=$TARGET" 20 | curl -s https://crt.sh/?q=$TARGET > $DIR/curl.txt 21 | echo -e "[ + ] Saving Certificate IDs to $DIR/crt.ids" 22 | cat $DIR/curl.txt | grep ?id= | cut -d \" -f5 | cut -d '<' -f1 | cut -d '>' -f2 >> $DIR/crt.ids 23 | 24 | TOTAL=`wc -l $DIR/crt.ids` 25 | echo -e "[ + ] Total Number of Certs: $TOTAL" 26 | cat $DIR/crt.ids| while read line 27 | do 28 | echo "[ + ] Downloading Certificate ID: $line" 29 | curl -s https://crt.sh/?id=$line > $DIR/$line.txt 30 | done 31 | 32 | # Note that the (.*?) makes the search 'ungreedy' - which matches 33 | # only the first occurence of the
right after our search string 34 | cat $DIR/* | grep -oP '(DNS)(.*?)(
)' | cut -d ":" -f2 | cut -d "<" -f1 | sort -u >> $DIR/domains.txt 35 | 36 | echo -e "[ + ] Domains saved to: $DIR/domains.txt" 37 | echo -e "[ + ] Fixing and tweaking domains.txt 38 | cd crt 39 | sed -ie 's/*.//g' domains.txt 40 | cp domains.txt domains.list 41 | rm *.txt 42 | rm *.txte 43 | rm *.ids 44 | cd .. 45 | echo -e "[ + ] Done" -------------------------------------------------------------------------------- /cob_a.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | wget -i metacrawler_bingfiles.txt 3 | cd exiftool 4 | ./exiftool *.pdf > pdfmetacrawler_results.txt 5 | ./exiftool *.svg > svgmetacrawler_results.txt 6 | ./exiftool *.doc > docmetacrawler_results.txt 7 | ./exiftool *.xls > xlsmetacrawler_results.txt 8 | ./exiftool *.xlsx > xslxmetacrawler_results.txt 9 | ./exiftool *.ppt > pptmetacrawler_results.txt 10 | ./exiftool *.pptx > pptxmetacrawler_results.txt 11 | ./exiftool *.csv > csvmetacrawler_results.txt 12 | ./exiftool *.rtf > rtfmetacrawler_results.txt 13 | ./exiftool *.zip > zipmetacrawler_results.txt 14 | ./exiftool *.rtf > rtfmetacrawler_results.txt 15 | ./exiftool *rar > rarmetacrawler_results.txt 16 | ./exiftool *.tgz > tgzmetacrawler_results.txt 17 | ./exiftool *.gz > gzmetacrawler_results.txt 18 | ./exiftool *.xz > xzmetacrawler_results.txt 19 | mkdir metacrawler_results 20 | cp *.pdf /metacrawler_results/ 21 | cp *.svg /metacrawler_results/ 22 | cp *.doc /metacrawler_results/ 23 | cp *.xls /metacrawler_results/ 24 | cp *.xlsx /metacrawler_results/ 25 | cp *.ppt /metacrawler_results/ 26 | cp *.pptx /metacrawler_results/ 27 | cp *.csv /metacrawler_results/ 28 | cp *.rtf /metacrawler_results/ 29 | cp *.tgz /metacrawler_results/ 30 | cp *.gz /metacrawler_results/ 31 | cp *.xz /metacrawler_results/ 32 | cd .. 33 | rm *.pdf 34 | rm *.svg 35 | rm *.doc 36 | rm *.xls 37 | rm *.xlsx 38 | rm *.ppt 39 | rm *.pptx 40 | rm *.csv 41 | rm *.rtf 42 | rm *.tgz 43 | rm *.gz 44 | rm *.xz -------------------------------------------------------------------------------- /cc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Author: Tyler Wrightson 4 | # Updated: Haroon Awan 5 | # Date: June/8/2019 6 | 7 | TARGET="$1" 8 | DIR="$PWD/crt" 9 | 10 | if [ -z $TARGET ]; then 11 | echo -e "Usage: cc.sh URL" 12 | exit 13 | fi 14 | 15 | echo -e "[ + ] Creating directory $DIR" 16 | mkdir $DIR 17 | echo -e "[ + ] Downloading from http://urlsearch.commoncrawl.org" 18 | TARGET=${TARGET// /+} 19 | echo -e "[ + ] url: http://urlsearch.commoncrawl.org/?q=$TARGET" 20 | curl -s http://urlsearch.commoncrawl.org/?q=$TARGET > $DIR/curl.txt 21 | echo -e "[ + ] Saving Certificate IDs to $DIR/crt.ids" 22 | cat $DIR/curl.txt | grep ?q= | cut -d \" -f5 | cut -d '<' -f1 | cut -d '>' -f2 >> $DIR/crt.ids 23 | 24 | TOTAL=`wc -l $DIR/crt.ids` 25 | echo -e "[ + ] Total Number of Common Crawls: $TOTAL" 26 | cat $DIR/crt.ids| while read line 27 | do 28 | echo "[ + ] Downloading Common Crawl IDs: $line" 29 | curl -s http://urlsearch.commoncrawl.org/?q=$line > $DIR/$line.txt 30 | done 31 | 32 | # Note that the (.*?) makes the search 'ungreedy' - which matches 33 | # only the first occurence of the
right after our search string 34 | cat $DIR/* | grep -oP '(DNS)(.*?)(
)' | cut -d ":" -f2 | cut -d "<" -f1 | sort -u >> $DIR/domains.txt 35 | 36 | echo -e "[ + ] Domains saved to: $DIR/domains.txt" 37 | echo -e "[ + ] Fixing and tweaking domains.txt 38 | cd crt 39 | sed -ie 's/*.//g' domains.txt 40 | cp domains.txt domains.list 41 | rm *.txt 42 | rm *.txte 43 | rm *.ids 44 | cd .. 45 | echo -e "[ + ] Done" -------------------------------------------------------------------------------- /sub1.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | use HTTP::Request; 3 | use LWP::Simple; 4 | use LWP::UserAgent; 5 | use IO::Socket::INET; 6 | use Term::ANSIColor; 7 | use IO::Select; 8 | use HTTP::Response; 9 | use HTTP::Request::Common qw(POST); 10 | use HTTP::Request::Common qw(GET); 11 | use URI::URL; 12 | 13 | # CODE START ###### 14 | my $ua = LWP::UserAgent->new; 15 | $ua = LWP::UserAgent->new(keep_alive => 1); 16 | $ua->agent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.63 Safari/537.31"); 17 | print color("bold Green")," \n\n Enter domain: "; 18 | chomp($site8=); 19 | 20 | if ($site8) { subdomain(); } 21 | sub subdomain { 22 | $url = "https://www.pagesinventory.com/search/?s=$site8"; 23 | $request = $ua->get($url); 24 | $response = $request->content; 25 | 26 | $ip= (gethostbyname($site8))[4]; 27 | my ($a,$b,$c,$d) = unpack('C4',$ip); 28 | $ip_address ="$a.$b.$c.$d"; 29 | if($response =~ /Search result for/){ 30 | print "Website: $site8\n"; 31 | print "IP: $ip_address\n\n"; 32 | 33 | while($response =~ m/(.*?)/g ) { 34 | print "Subdomain: $1\n"; 35 | print "IP: $3\n"; 36 | open(OUT, ">>subdomains_links.txt"); print OUT "$1\n, $3\n"; close(OUT); 37 | sleep(1); 38 | } 39 | }elsif($ip_address =~ /[0-9]/){ 40 | if($response =~ /Nothing was found/){ 41 | print ,"Website: $site8\n"; 42 | print ,"IP: $ip_address\n"; 43 | print ,"No Subdomains Found For This Domain\n"; 44 | }}else { 45 | print "\nThere Is A Problem\n"; 46 | } 47 | } -------------------------------------------------------------------------------- /cob_b.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | wget -i metacrawler_googlefiles.txt 3 | mkdir downloaded_meta_files 4 | mkdir metacrawler_results 5 | cd exiftool 6 | ./exiftool *.pdf > ../metacrawler_results/pdfmetacrawler_results.txt 7 | ./exiftool *.svg > ../metacrawler_results/svgmetacrawler_results.txt 8 | ./exiftool *.doc > ../metacrawler_results/docmetacrawler_results.txt 9 | ./exiftool *.xls > ../metacrawler_results/xlsmetacrawler_results.txt 10 | ./exiftool *.xlsx > ../metacrawler_results/xslxmetacrawler_results.txt 11 | ./exiftool *.ppt > ../metacrawler_results/pptmetacrawler_results.txt 12 | ./exiftool *.pptx > ../metacrawler_results/pptxmetacrawler_results.txt 13 | ./exiftool *.csv > ../metacrawler_results/csvmetacrawler_results.txt 14 | ./exiftool *.rtf > ../metacrawler_results/rtfmetacrawler_results.txt 15 | ./exiftool *.zip > ../metacrawler_results/zipmetacrawler_results.txt 16 | ./exiftool *.rtf > ../metacrawler_results/rtfmetacrawler_results.txt 17 | ./exiftool *rar > ../metacrawler_results/rarmetacrawler_results.txt 18 | ./exiftool *.tgz > ../metacrawler_results/tgzmetacrawler_results.txt 19 | ./exiftool *.gz > ../metacrawler_results/gzmetacrawler_results.txt 20 | ./exiftool *.xz > ../metacrawler_results/xzmetacrawler_results.txt 21 | cp *.pdf ../downloaded_meta_files/ 22 | cp *.svg ../downloaded_meta_files/ 23 | cp *.doc ../downloaded_meta_files/ 24 | cp *.xls ../downloaded_meta_files/ 25 | cp *.xlsx ../downloaded_meta_files/ 26 | cp *.ppt ../downloaded_meta_files/ 27 | cp *.pptx ../downloaded_meta_files/ 28 | cp *.csv ../downloaded_meta_files/ 29 | cp *.rtf ../downloaded_meta_files/ 30 | cp *.tgz ../downloaded_meta_files/ 31 | cp *.gz ../downloaded_meta_files/ 32 | cp *.xz ../downloaded_meta_files/ 33 | rm *.pdf 34 | rm *.svg 35 | rm *.doc 36 | rm *.xls 37 | rm *.xlsx 38 | rm *.ppt 39 | rm *.pptx 40 | rm *.csv 41 | rm *.rtf 42 | rm *.tgz 43 | rm *.gz 44 | rm *.xz 45 | cd .. -------------------------------------------------------------------------------- /browser.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | TARGET="$1" 3 | BROWSER="xdg-open" 4 | VER="1.9" 5 | OKBLUE='\033[94m' 6 | OKRED='\033[91m' 7 | OKGREEN='\033[92m' 8 | OKORANGE='\033[93m' 9 | DELAY=1 10 | RESET='\e[0m' 11 | 12 | echo "[ + ] Enter Url" 13 | read URL 14 | if [ -z $URL ]; then 15 | echo -e "$OKORANGE + browser.sh URL $RESET" 16 | exit 17 | fi 18 | 19 | # LOAD WEBSITE IN A WEB BROSER 20 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:cgi+OR+ext:php+OR+ext:asp+OR+ext:aspx+OR+ext:jsp+OR+ext:jspx+OR+ext:swf+OR+ext:fla+OR+ext:xml" 2> /dev/null 21 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:doc+OR+ext:docx+OR+ext:csv+OR+ext:pdf+OR+ext:txt+OR+ext:log+OR+ext:bak" 2> /dev/null 22 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:action+OR+struts" 2> /dev/null 23 | $BROWSER "https://www.google.com/search?q=site:pastebin.com+$TARGET" 2> /dev/null 24 | $BROWSER "https://www.google.com/search?q=site:linkedin.com+employees+$TARGET" 2> /dev/null 25 | $BROWSER "https://www.google.com/search?q=site:$TARGET+intitle:index.of" 2> /dev/null 26 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:xml+|+ext:conf+|+ext:cnf+|+ext:reg+|+ext:inf+|+ext:rdp+|+ext:cfg+|+ext:txt+|+ext:ora+|+ext:ini" 2> /dev/null 27 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:sql+|+ext:dbf+|+ext:mdb" 2> /dev/null 28 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:log" 2> /dev/null 29 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:bkf+|+ext:bkp+|+ext:bak+|+ext:old+|+ext:backup" 2> /dev/null 30 | $BROWSER "https://www.google.com/search?q=site:$TARGET+intext:%22sql+syntax+near%22+|+intext:%22syntax+error+has+occurred%22+|+intext:%22incorrect+syntax+near%22+|+intext:%22unexpected+end+of+SQL+command%22+|+intext:%22Warning:+mysql_connect()%22+|+intext:%22Warning:+mysql_query()%22+|+intext:%22Warning:+pg_connect()%22" 2> /dev/null 31 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:doc+|+ext:docx+|+ext:odt+|+ext:pdf+|+ext:rtf+|+ext:sxw+|+ext:psw+|+ext:ppt+|+ext:pptx+|+ext:pps+|+ext:csv" 2> /dev/null 32 | $BROWSER "https://www.google.com/search?q=site:$TARGET+ext:php+|+ext:asp+|+ext:aspx+|+ext:jspf+|+ext:jspa+|+ext:txt+|+ext:swf" 2> /dev/null 33 | -------------------------------------------------------------------------------- /censys.py: -------------------------------------------------------------------------------- 1 | # !/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # Name: censys.py 4 | # Purpose: CynsysIO API Script 5 | # By: Jerry Gamblin 6 | # Date: 18.12.15 7 | # Rev Level 0.9 (With some great fixes by @BurnItWithTruth) 8 | # ----------------------------------------------- 9 | 10 | from termcolor import colored 11 | import argparse 12 | import json 13 | import requests 14 | import codecs 15 | import locale 16 | import os 17 | import sys 18 | import ast 19 | 20 | 21 | class Censys: 22 | 23 | def __init__(self, ip): 24 | 25 | self.API_URL = "https://www.censys.io/api/v1" 26 | self.UID = "87e236b1-4e2c-41bd-9691-0ffd90d6af17" 27 | self.SECRET = "kykASr9kJyld7SnyANt8o0mj11rNC9ux" 28 | self.ip = ip 29 | 30 | def search(self): 31 | 32 | pages = float('inf') 33 | page = 1 34 | 35 | while page <= pages: 36 | 37 | params = {'query' : self.ip, 'page' : page} 38 | res = requests.post(self.API_URL + "/search/ipv4", json = params, auth = (self.UID, self.SECRET)) 39 | payload = res.json() 40 | 41 | 42 | for r in payload['results']: 43 | 44 | ip = r["ip"] 45 | proto = r["protocols"] 46 | proto = [p.split("/")[0] for p in proto] 47 | proto.sort(key=float) 48 | protoList = ','.join(map(str, proto)) 49 | 50 | 51 | print '[%s] IP: %s - Protocols: %s' % (colored('*', 'red'), ip, protoList) 52 | 53 | 54 | if '80' in protoList: 55 | self.view(ip) 56 | 57 | pages = payload['metadata']['pages'] 58 | page += 1 59 | 60 | def view(self, server): 61 | 62 | res = requests.get(self.API_URL + ("/view/ipv4/%s" % server), auth = (self.UID, self.SECRET)) 63 | payload = res.json() 64 | 65 | try: 66 | if 'title' in payload['80']['http']['get'].keys(): 67 | print "[+] Title: %s" % payload['80']['http']['get']['title'] 68 | if 'server' in payload['80']['http']['get']['headers'].keys(): 69 | print "[+] Server: %s" % payload['80']['http']['get']['headers']['server'] 70 | except Exception as error: 71 | print error 72 | 73 | parser = argparse.ArgumentParser(description = 'CENSYS.IO Web Server Search') 74 | parser.add_argument('-f', '--find', help='CENSYS Search', required = True) 75 | 76 | 77 | args = parser.parse_args() 78 | ip = args.find 79 | 80 | censys = Censys(ip) 81 | censys.search() -------------------------------------------------------------------------------- /Termux_fixme.sh: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | #!/bin/bash 22 | 23 | clear 24 | 25 | echo "Ultimate Recon and Foot Printing Software Version 1.0a"; 26 | echo "Termux Installer By: Haroon Awan and HackerUniversee"; 27 | echo "Coded By: Haroon Awan"; 28 | echo "Mail: mrharoonawan@gmail.com"; 29 | echo ""; 30 | 31 | 32 | echo -e "prerequisite install" 33 | apt-get install -y xrdp 34 | apt-get install -y xdotool 35 | apt-get install -y ccrypt 36 | echo "y" | git clone https://github.com/haroonawanofficial/vasl.git 37 | 38 | echo -e "Installing Extra Perl Modules ..."; 39 | echo "y" | cpan install WWW::Mechanize 40 | echo "y" | cpan install HTML::TokeParser 41 | echo "y" | cpan install HTML::Parser 42 | echo "y" | apt-get install python 43 | echo "y" | apt-get install python2 44 | echo "y" | apt-get install python3 45 | 46 | echo "y" | wget https://cpan.metacpan.org/authors/id/B/BP/BPS/HTTP-Server-Simple-0.52.tar.gz 47 | tar -xvf HTTP-Server-Simple-0.52.tar.gz 48 | cd HTTP-Server-Simple-0.52 49 | perl Makefile.PL 50 | make 51 | make install 52 | cd .. 53 | echo "y" | wget https://cpan.metacpan.org/authors/id/G/GA/GAAS/HTML-Parser-3.72.tar.gz 54 | tar -xvf HTML-Parser-3.72.tar.gz 55 | cd HTML-Parser-3.72 56 | perl Makefile.PL 57 | make 58 | make install 59 | cd .. -------------------------------------------------------------------------------- /dork.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | #Google Search Tool . 3 | #sh0utz: oTo + #slotin + #bhf @ irc.bluehell.org - we 0wn your f4ce! ;p 4 | #Written by dni 5 | #much thanks to TuXeD & Syn4ps3 for their help ;) 6 | 7 | use LWP::UserAgent; 8 | 9 | @google = ("www.google.com","www.google.ae","www.google.at","www.google.be","www.google.ca","www.google.ch","www.google.cl","www.google.co.cr","www.google.co.hu","www.google.co.il","www.google.co.jp","www.google.co.kr","www.google.co.nz","www.google.co.th","www.google.co.uk","www.google.co.ve","www.google.com.ar","www.google.com.au","www.google.com.br","www.google.com.gr","www.google.com.hk","www.google.com.mx","www.google.com.ni","www.google.com.pa","www.google.com.pe","www.google.com.pr","www.google.com.py","www.google.com.ru","www.google.com.sg","www.google.com.tj","www.google.com.tr","www.google.com.tw","www.google.com.ua","www.google.com.uy","www.google.de","www.google.dk","www.google.es","www.google.fi","www.google.fr","www.google.gl","www.google.ie","www.google.it","www.google.kz","www.google.lv","www.google.nl","www.google.pl","www.google.uz"); 10 | 11 | @google2 = ("(U.S)","(Emiratos Arabes)","(Austria)","(Belgica)","(Canada)","(Suiza)","(Chile)","(Costa Rica)","(Hungria)","(Israel)","(Japon)","(Corea)","(Nueva Zelanda)","(Tailandia)","(Reino Unido)","(Venezuela)","(Argentina)","(Australia)","(Brasil)","(Grecia)","(Hong Kong)","(Mexico)","(Nicaragua)","(Panama)","(Peru)","(Puerto Rico)","(Paraguay)","(Rusia)","(Singapur)","(Tajikistan)","(Turquia)","(Taiwan)","(Ucrania)","(Uruguay)","(Alemania)","(Dinamarca)","(Espana)","(Finlandia)","(Francia)","(Groelandia)","(Irlanda)","(Italia)","(Kazajistan)","(Letonia)","(Holanda)","(Polonia)","(Uzbekistan)"); 12 | 13 | 14 | if(!$ARGV[0]) 15 | { 16 | print "Usage: perl d0rk.pl \n"; 17 | print "perl $0 \"Powered by your mom\" www.google.nl\n"; 18 | print "Now you must enter your query: "; 19 | $param0=; 20 | } 21 | if(!$ARGV[1]) { 22 | print "perl $0 \n"; 23 | print "perl $0 \"Powered by your mom\" www.google.nl\n"; 24 | print "Stand by for a list of google engines:..\n"; 25 | $i=0; 26 | while ($i < scalar(@google)) 27 | { 28 | printf "$i. %-15s %-15s ", $google[$i], $google2[$i]; 29 | $m = ($i+1) % 3; 30 | if ($m == 0 && $i != 0) { print "\n";} 31 | $i++; 32 | } 33 | 34 | print "\nJust press the number of the Search Engine you are going to use and then press ENTER: "; 35 | $aux=; 36 | $param1 = $google[$aux]; 37 | } 38 | else 39 | { 40 | $param1 = $ARGV[1]; 41 | } 42 | 43 | if ($ARGV[0]) { $param0 = $ARGV[0];} 44 | $query = $param0; 45 | print "Searching $param1 for: $query\n"; 46 | sleep 2; 47 | $counter = 0; 48 | $ua = new LWP::UserAgent; 49 | $ua->timeout(30); 50 | $ua->agent(" Mozilla/5.0 (0wn3d; U; PPC Sik OS XXX Mach-O; en-US; rv:1.8.1) Gecko/20061010 Firefox/2.0"); 51 | $a=0; 52 | 53 | while($results !~ /results/) { 54 | $url = "http://$param1/search?q=\"$query\"&hl=id&lr=&start=$a&sa=N"; 55 | $response = $ua->get($url); 56 | $counter=$counter+10; 57 | if(!($response->is_success)) { 58 | print ($response->status_line. " Error! \n"); } 59 | else { 60 | @results = $response->as_string; 61 | $results= "@results"; 62 | 63 | while($results =~ m/.*?<\/a>/g) 64 | { 65 | 66 | $results1 =~ s/ .*?<\/a>/$1/; 67 | $host =$1; 68 | print "$host\n"; 69 | open (OUT, ">>scan_results.txt"); 70 | print OUT "$host\n"; 71 | close(OUT); 72 | } 73 | 74 | $a = $a + 10; 75 | } 76 | 77 | } 78 | 79 | print "Results saved to scan_results.txt\n"; -------------------------------------------------------------------------------- /sixth.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | use Net::RawIP qw(:pcap); 3 | require 'getopts.pl'; 4 | 5 | # Proof of concept thingum, for AntiRez's scanning through 6 | # host ip ID's - Allows for totally invisible port scan 7 | # Select dummy_ip where dummy_IP is a host on the internet, that 8 | # isn't actively communicating (or even one thats communicating, but 9 | # very consistantly 10 | 11 | $a = new Net::RawIP ({tcp =>{}}); 12 | $b = new Net::RawIP; 13 | $c = new Net::RawIP; 14 | 15 | Getopts('d:t:s:f:i:p'); 16 | 17 | $dummy = $opt_d; 18 | $target = $opt_t; 19 | $startport = $opt_s; 20 | $device = $opt_i; 21 | $device = 'ppp0' if($device eq ""); 22 | ($opt_f == "") ? ($finalport = $startport) : ($finalport = $opt_f); 23 | 24 | $my_int=${ifaddrlist()}{$device}; 25 | 26 | unless ($opt_d && $opt_t && $opt_s ) 27 | { 28 | print "\nUsage $0 -d -t -s -f -i \n"; 29 | exit; 30 | } 31 | 32 | print "\n Launching SixthSense by MH\n\n"; 33 | print " Dummy : $dummy\n Target : $target\n"; 34 | print " Device : $device\n Startport : $startport\n"; 35 | print " Endport : $finalport\n"; 36 | 37 | # Initiate lipbcap 38 | $filt = 'ip proto \\tcp and dst '.$my_int.' and src '.$dummy; 39 | $size = 1500; 40 | $tout = 30; 41 | $pcap = $a->pcapinit($device,$filt,$size,$tout); 42 | $offset = linkoffset($pcap); 43 | 44 | print "\nScanning Dumb Host \(for Dumbness\)\n"; 45 | 46 | for($m=0;$m<4;$m++){sleep 1;&send_packet;loop $pcap,1,\&dmp,\@a;} 47 | &dumb_scan; 48 | 49 | if($winner==1){ 50 | for ($j=$startport;$j<$finalport+1;$j++) 51 | { 52 | undef @seqs; 53 | $port = $j; 54 | for($m=0;$m<4;$m++){sleep 1;&send_packet;&send_bad_packet($port);loop $pcap,1,\&dmp,\@a;} 55 | &post_spoof; 56 | } 57 | } 58 | 59 | sub dmp{ 60 | $a->bset(substr($_[2],$offset)); 61 | my ($eyedee) = $a->get({ip => [qw(id)]}); 62 | printf(" %u\n",$eyedee); 63 | push(@seqs,$eyedee); 64 | } 65 | 66 | sub dumb_scan{ 67 | # Make sure u have 4 non 0 id's before going on 68 | do{sleep 1;}while($seqs[3]==0 || $seqs[2]==0 || $seqs[1]==0 || $seqs[0]==0); 69 | 70 | # Check consistancy of ip id increments 71 | if($seqs[3]-$seqs[2] == $seqs[2]-$seqs[1] 72 | && $seqs[2]-$seqs[1] == $seqs[1]-$seqs[0]) 73 | { 74 | $diff = $seqs[2]-$seqs[1]; 75 | print "\nWe Have a consistant ",$diff," increment host\n"; 76 | print "*** Injecting Spoofed Packet ***\n\n"; 77 | undef @seqs; 78 | $winner=1; 79 | } 80 | else {print "\n\n Dumb host not dumb enough... exiting..\n\n";} 81 | } 82 | 83 | sub post_spoof{ 84 | # Make sure we have 4 non-zero id's 85 | do{sleep 1;} 86 | while($seqs[3]==0 || $seqs[2]==0 || $seqs[1]==0 || $seqs[0]==0); 87 | 88 | # Check if id increments remained constant, despite our spoofed 89 | # packet 90 | if($seqs[3]-$seqs[2] == $seqs[2]-$seqs[1] && 91 | $seqs[2]-$seqs[1] == $seqs[1]-$seqs[0] && 92 | $seqs[2]-$seqs[1] == $diff) 93 | {print "\nNope... doesnt look like $port is open on $target \n\n";} 94 | else { 95 | print "\n*** Yup looks like $port is open on $target ***\n\n"; 96 | push(@disoop,$port); 97 | } 98 | } 99 | 100 | sub send_packet{ 101 | $b->set({ ip => {saddr => $my_int, 102 | daddr => $dummy},tcp=> {dest => 0, 103 | source => 0, ack_seq => "0",}}); 104 | $b->send; 105 | } 106 | 107 | sub send_bad_packet{ 108 | $c->set({ ip => {saddr => $dummy, 109 | daddr => $target},tcp=> {dest => $port, 110 | source => '80',psh => 1, 111 | syn => 1}}); 112 | $c->send; 113 | } 114 | 115 | sub END{ 116 | if($winner==1 && @disoop != ""){ 117 | print " *** "; 118 | foreach(@disoop){print "$_ ";} 119 | print " appear to b open on $target\n\n"; 120 | } 121 | } -------------------------------------------------------------------------------- /ip.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl 2 | use Net::RawIP qw(:pcap); 3 | require 'getopts.pl'; 4 | 5 | # Proof of concept thingum, for AntiRez's scanning through 6 | # host ip ID's - Allows for totally invisible port scan 7 | # Select dummy_ip where dummy_IP is a host on the internet, that 8 | # isn't actively communicating (or even one thats communicating, but 9 | # very consistantly 10 | 11 | $a = new Net::RawIP ({tcp =>{}}); 12 | $b = new Net::RawIP; 13 | $c = new Net::RawIP; 14 | 15 | Getopts('d:t:s:f:i:p', \my %opts); 16 | 17 | $dummy = $opt_d; 18 | $target = $opt_t; 19 | $startport = $opt_s; 20 | $device = $opt_i; 21 | $device = 'wlan0' if($device eq ""); 22 | ($opt_f == "") ? ($finalport = $startport) : ($finalport = $opt_f); 23 | 24 | $my_int=${ifaddrlist()}{$device}; 25 | 26 | unless ($opt_d && $opt_t && $opt_s ) 27 | { 28 | print "\nUsage $0 -d -t -s -f -i \n"; 29 | exit; 30 | } 31 | 32 | print "\n Launching SixthSense by MH\n\n"; 33 | print " Dummy : $dummy\n Target : $target\n"; 34 | print " Device : $device\n Startport : $startport\n"; 35 | print " Endport : $finalport\n"; 36 | 37 | # Initiate lipbcap 38 | $filt = 'ip proto \\tcp and dst '.$my_int.' and src '.$dummy; 39 | $size = 1500; 40 | $tout = 30; 41 | $pcap = $a->pcapinit($device,$filt,$size,$tout); 42 | $offset = linkoffset($pcap); 43 | 44 | print "\nScanning Dumb Host \(for Dumbness\)\n"; 45 | 46 | for($m=0;$m<4;$m++){sleep 1;&send_packet;loop $pcap,1,\&dmp,\@a;} 47 | &dumb_scan; 48 | 49 | if($winner==1){ 50 | for ($j=$startport;$j<$finalport+1;$j++) 51 | { 52 | undef @seqs; 53 | $port = $j; 54 | for($m=0;$m<4;$m++){sleep 1;&send_packet;&send_bad_packet($port);loop $pcap,1,\&dmp,\@a;} 55 | &post_spoof; 56 | } 57 | } 58 | 59 | sub dmp{ 60 | $a->bset(substr($_[2],$offset)); 61 | my ($eyedee) = $a->get({ip => [qw(id)]}); 62 | printf(" %u\n",$eyedee); 63 | push(@seqs,$eyedee); 64 | } 65 | 66 | sub dumb_scan{ 67 | # Make sure u have 4 non 0 id's before going on 68 | do{sleep 1;}while($seqs[3]==0 || $seqs[2]==0 || $seqs[1]==0 || $seqs[0]==0); 69 | 70 | # Check consistancy of ip id increments 71 | if($seqs[3]-$seqs[2] == $seqs[2]-$seqs[1] 72 | && $seqs[2]-$seqs[1] == $seqs[1]-$seqs[0]) 73 | { 74 | $diff = $seqs[2]-$seqs[1]; 75 | print "\nWe Have a consistant ",$diff," increment host\n"; 76 | print "*** Injecting Spoofed Packet ***\n\n"; 77 | undef @seqs; 78 | $winner=1; 79 | } 80 | else {print "\n\n Dumb host not dumb enough... exiting..\n\n";} 81 | } 82 | 83 | sub post_spoof{ 84 | # Make sure we have 4 non-zero id's 85 | do{sleep 1;} 86 | while($seqs[3]==0 || $seqs[2]==0 || $seqs[1]==0 || $seqs[0]==0); 87 | 88 | # Check if id increments remained constant, despite our spoofed 89 | # packet 90 | if($seqs[3]-$seqs[2] == $seqs[2]-$seqs[1] && 91 | $seqs[2]-$seqs[1] == $seqs[1]-$seqs[0] && 92 | $seqs[2]-$seqs[1] == $diff) 93 | {print "\nNope... doesnt look like $port is open on $target \n\n";} 94 | else { 95 | print "\n*** Yup looks like $port is open on $target ***\n\n"; 96 | push(@disoop,$port); 97 | } 98 | } 99 | 100 | sub send_packet{ 101 | $b->set({ ip => {saddr => $my_int, 102 | daddr => $dummy},tcp=> {dest => 0, 103 | source => 0, ack_seq => "0",}}); 104 | $b->send; 105 | } 106 | 107 | sub send_bad_packet{ 108 | $c->set({ ip => {saddr => $dummy, 109 | daddr => $target},tcp=> {dest => $port, 110 | source => '80',psh => 1, 111 | syn => 1}}); 112 | $c->send; 113 | } 114 | 115 | sub END{ 116 | if($winner==1 && @disoop != ""){ 117 | print " *** "; 118 | foreach(@disoop){print "$_ ";} 119 | print " appear to b open on $target\n\n"; 120 | } 121 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LICENSE 2 | EULA 3 | 4 | # ReconCobra 5 | - Reconcobra is Foot printing software for Ultimate Information Gathering 6 | - Kali, Parrot OS, Black Arch, Termux, Android Led TV 7 | 8 | 9 | # Interface 10 | - Software have 82 Options with full automation with powerful information gathering capability 11 |
12 | 13 |
14 | 15 |
16 | 17 |
18 | 19 | # In-Action 20 | 21 |
22 | 23 |
24 |
25 | 26 |
27 |
28 | 29 |
30 |
31 | 32 |
33 |
34 | 35 |
36 |
37 | 38 |
39 |
40 | 41 |
42 | 43 | 44 | # Brief Introduction 45 | - ReconCobra is useful in Banks, Private Organisations and Ethical hacker personnel for legal auditing. 46 | - It serves as a defense method to find as much as information possible for gaining unauthorised access and intrusion. 47 | - With the emergence of more advanced technology, cybercriminals have also found more ways to get into the system of many organizations. 48 | - ReconCobra software can audit, firewall behaviour, if it is leaking backend machines/server and replying pings, it can find internal and external networks where many software’s like erp, mail firewalls are installed, exposing servers so it do Footprinting, Scanning & Enumeration as much as possible of target, to discover and collect most possible informations like username, web technologies, files, endpoint, api and much more. 49 | - It’s first step to stop cyber criminals by securing your Infrastructural Information Gathering leakage. 50 | ReconCobra is false positive free, when there is something it will show no matter what, if it is not, it will give blank results rather error. 51 | 52 | 53 | # University Course 54 | - ReconCobra is now a part of International Hacking Trainings for OSINT 55 | - Cybersecurity365.com OSINT for Reconnaissance trainings for CEH, CISSP, Security+, ITPA 56 | 57 | # Appeared 58 | - https://codeby.net/threads/reconcobra.68782 59 | - https://www.facebook.com/1470285456587684/posts/reconcobra-ultimate-recon-software-for-information-gatheringbrief-introductionre/2351883108427910/ 60 | - https://raidforums.com/Thread-reconcobra-Ultimate-Recon-Software-for-Information-Gathering 61 | - Kelvin Security 62 | - National Cyber Services Security 63 | - https://psdrepo.blogspot.com/2019/08/codebynet_14 64 | - Cyber Junkies 65 | - https://vaultdomain.com/reconcobrathe-ultimate-recon-software-for-information-gathering-osint/ 66 | - rdp4you 67 | - Digital Union Media in OSINT category 68 | 69 | # Tweets 70 | - Over, 1000+ Tweets about ReconCobra 71 | 72 | # Integrations 73 | - Tigerman Root Software Package 74 | 75 | # Fan Made Youtube Video 76 | - https://www.youtube.com/watch?v=kfykYEMS8YU 77 | - https://www.youtube.com/watch?v=j2DsDi43jO8 78 | - https://www.facebook.com/MRRobotZalla/videos/769801130133298/?t=0 79 | 80 | # Official Youtube Video 81 | - https://www.youtube.com/watch?v=TupCmgzp6hg 82 | 83 | 84 | 85 | 86 | 87 | # Kali Installation 88 | - git clone https://github.com/haroonawanofficial/ReconCobra.git 89 | - cd Reconcobra 90 | - sudo chmod u+x *.sh 91 | - ./Kali_Installer.sh 92 | - ReconCobra will integrate as system software 93 | - Dependencies will be handled automatically 94 | - Third party software(s)/dependencies/modules will be handled automatically 95 | 96 | # Parrot OS Installation 97 | - git clone https://github.com/haroonawanofficial/ReconCobra.git 98 | - cd Reconcobra 99 | - chmod u+x *.sh 100 | - Bash ParrotOS_Installer.sh 101 | - ReconCobra will integrate as system software 102 | - Dependencies will be handled automatically 103 | - Third party software(s)/dependencies/modules will be handled automatically 104 | 105 | # Termux Installation 106 | - git clone https://github.com/haroonawanofficial/ReconCobra.git 107 | - cd Reconcobra 108 | - chmod u+x *.sh 109 | - pkg install proot 110 | - type: termux-chroot 111 | - ./Termux_Installer.sh 112 | - ./Termux_fixme.sh 113 | - Reboot your Termux 114 | - perl ReconCobraTermux.pl 115 | - Dependencies will be handled automatically for Termux 116 | - Third party software(s)/dependencies/modules will be handled automatically for Termux 117 | 118 | # Android Led TV Installation 119 | - Install termux 120 | - Input usb keyboard 121 | - git clone https://github.com/haroonawanofficial/ReconCobra.git 122 | - cd Reconcobra 123 | - chmod u+x *.sh 124 | - pkg install proot 125 | - type: termux-chroot 126 | - ./Termux_Installer.sh 127 | - ./Termux_fixme.sh 128 | - Reboot your Termux 129 | - perl ReconCobraTermux.pl 130 | - Dependencies will be handled automatically for Termux 131 | - Third party software(s)/dependencies/modules will be handled automatically for Termux 132 | 133 | # Black Arch Installation 134 | - Open issue, if error occur 135 | - git clone https://github.com/haroonawanofficial/ReconCobra.git 136 | - cd Reconcobra 137 | - chmod u+x *.sh 138 | - ./BlackArch_Installer.sh 139 | - ReconCobra will integrate as system software 140 | - Dependencies will be handled automatically 141 | - Third party software(s)/dependencies/modules will be handled automatically 142 | 143 | # Developer 144 | - Haroon Awan 145 | - mrharoonawan@gmail.com 146 | 147 | # Co-developer & Senior Tester 148 | - Arun S 149 | 150 | # Sponsor and Support via BTC 151 | - 3BuUYgEgsRuEra4GwqNVLKnDCTjLEDfptu 152 | -------------------------------------------------------------------------------- /Termux_Installer.sh: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | #!/bin/bash 22 | 23 | clear 24 | 25 | echo "Ultimate Recon and Foot Printing Software Version 1.0a"; 26 | echo "Termux Installer By: Haroon Awan and HackerUniversee"; 27 | echo "Coded By: Haroon Awan"; 28 | echo "Mail: mrharoonawan@gmail.com"; 29 | echo ""; 30 | 31 | 32 | echo -e "prerequisite install" 33 | apt-get install wget 34 | apt-get install make 35 | apt-get install clang 36 | apt-get install unzip 37 | apt-get install tar 38 | apt-get install -y xrdp 39 | apt-get install -y ccrypt 40 | 41 | echo -e "Installing Perl ..."; 42 | apt-get install -y perl 43 | echo -e "Installing JSON Module ..."; 44 | cpan install JSON 45 | echo -e "Installing Extra Perl Modules ..."; 46 | echo "y" | wget https://cpan.metacpan.org/authors/id/B/BP/BPS/HTTP-Server-Simple-0.52.tar.gz 47 | tar -xvf HTTP-Server-Simple-0.52.tar.gz 48 | cd HTTP-Server-Simple-0.52 49 | perl Makefile.PL 50 | make 51 | make install 52 | cd .. 53 | echo "y" | cpan install WWW::Mechanize 54 | echo "y" | cpan install use HTML::TokeParser 55 | echo "y" | cpan install Term::ANSIColor 56 | echo "y" | cpan install Mojo::DOM 57 | echo "y" | cpan install Data::Dumper 58 | echo "y" | cpan install Win32::Console::ANSI 59 | echo "y" | cpan install HTML::TableExtract 60 | echo "y" | cpan install Data::Validate::Domain 61 | echo "y" | cpan install LWP::Protocol::https 62 | echo "y" | cpan install Mozilla::CA 63 | echo "y" | cpan install Bundle::LWP 64 | 65 | 66 | echo -e "Installing dependencies ..."; 67 | echo "y" | apt-get install xdg-utils 68 | echo "y" | apt-get install python-yaml 69 | echo "y" | apt-get install hping3 70 | echo "y" | apt-get install python 71 | echo "y" | apt-get install golang 72 | echo "y" | apt-get install curl 73 | echo "y" | apt-get install nfs-common 74 | echo "y" | apt-get install smbclient 75 | echo "y" | apt-get install x11-utils xutils-dev imagemagick libxext-dev xspy 76 | echo "y" | apt-get install cargo 77 | echo "y" | apt-get install gem 78 | gem install wayback_machine_downloader 79 | echo "y" | apt-get install perl-LWP-Protocol-https 80 | echo "y" | git clone https://github.com/haroonawanofficial/cobra.git 81 | curl -LO https://raw.githubusercontent.com/Hax4us/httrack_In_termux/master/httrack 82 | cd httrack 83 | chmod u+x * 84 | sh httrack 85 | cd .. 86 | echo "y" | git clone https://github.com/haroonawanofficial/maahro.git 87 | echo "y" | git clone https://github.com/haroonawanofficial/ShaheenX.git 88 | echo "y" | git clone https://github.com/stormshadow07/HackTheWorld.git 89 | cd HackTheWorld 90 | chmod +x install.sh && ./install.sh 91 | cd .. 92 | echo "y" | git clone https://github.com/chenjj/CORScanner.git 93 | cd CORScanner 94 | pip install -r requirements.txt 95 | cd .. 96 | echo "y" | git clone https://github.com/yassineaboukir/Asnlookup.git 97 | echo "y" | git clone https://github.com/exiftool/exiftool.git 98 | echo "y" | git clone https://github.com/sensepost/BiLE-suite.git 99 | echo "y" | git clone https://github.com/GerbenJavado/LinkFinder.git 100 | cd LinkFinder 101 | python setup.py install 102 | echo "y" | git clone https://github.com/heycam/json-describe 103 | cd json-describe 104 | cargo build 105 | cd .. 106 | echo "y" | https://github.com/haroonawanofficial/vasl.git 107 | echo "y" | apt-get install nmap 108 | echo "y" | git clone https://github.com/stormshadow07/HackTheWorld.git 109 | cd HackTheWorld 110 | chmod +x install.sh && ./install.sh 111 | cd .. 112 | echo "y" | git clone https://github.com/threat9/routersploit 113 | cd routersploit 114 | sudo easy_install pip 115 | sudo pip install -r requirements.txt 116 | cd .. 117 | echo "y" | git clone https://github.com/haroonawanofficial/panthera.git 118 | echo "y" | git clone https://github.com/naqushab/SearchEngineScrapy.git 119 | pip install jsbeautifier 120 | pip install argparse 121 | pip install requests 122 | pip install request 123 | cd SearchEngineScrapy 124 | pip install -r requirements.txt 125 | virtualenv --python="2" env 126 | env/bin/activate 127 | cd .. 128 | echo "y" | git clone https://github.com/FortyNorthSecurity/EyeWitness.git 129 | cd EyeWitness/setup 130 | chmod u+x setup.sh 131 | ./setup.sh 132 | cd .. 133 | chmod u+x *.sh 134 | cat traceroute-function >> ~/.bashrc 135 | source ~/.bashrc 136 | 137 | 138 | echo -e "[+] Installed Success!"; 139 | echo -e "[+] Reboot Termux"; 140 | echo -e "[+] Upon successful reboot enter for interface, perl ReconCobratermux.pl"; -------------------------------------------------------------------------------- /certspot.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bytes" 5 | "encoding/json" 6 | "fmt" 7 | "io/ioutil" 8 | "log" 9 | "math/rand" 10 | "net/http" 11 | "net/url" 12 | "os" 13 | "strings" 14 | "time" 15 | ) 16 | 17 | var _ = fmt.Println 18 | 19 | // Log Certficate logs 20 | type Log struct { 21 | ID string `json:"id"` 22 | Index int64 `json:"index"` 23 | Timestamp string `json:"timestamp"` 24 | } 25 | 26 | // Certificate Certspotter Certificate Object 27 | type Certificate struct { 28 | Type string `json:"type"` 29 | Sha256 string `json:"sha256"` 30 | DNSNames []string `json:"dns_names"` 31 | PubkeySha256 string `json:"pubkey_sha256"` 32 | Issuer string `json:"issuer"` 33 | NotBefore string `json:"not_before"` 34 | NotAfter string `json:"not_after"` 35 | Logs []Log `json:"logs"` 36 | Data string `json:"data"` 37 | } 38 | 39 | type header struct { 40 | name string 41 | value string 42 | } 43 | 44 | type query struct { 45 | key string 46 | value string 47 | } 48 | 49 | // used for get or post request 50 | type request struct { 51 | method string 52 | url *url.URL 53 | body []byte 54 | headers []header 55 | params []query 56 | } 57 | 58 | func randomUserAgent() string { 59 | 60 | userAgents := make([]string, 0) 61 | userAgents = append(userAgents, 62 | "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36", 63 | "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.19 (KHTML, like Gecko) Chrome/1.0.154.53 Safari/525.19", 64 | "Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US) AppleWebKit/533.4 (KHTML, like Gecko) Chrome/5.0.375.86 Safari/533.4", 65 | "Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/540.0 (KHTML,like Gecko) Chrome/9.1.0.0 Safari/540.0", 66 | "Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Ubuntu/10.10 Chromium/8.0.552.237 Chrome/8.0.552.237 Safari/534.10", 67 | "Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16", 68 | "Mozilla/5.0 (Windows; U; Windows NT 6.1; x64; fr; rv:1.9.2.13) Gecko/20101203 Firebird/3.6.13", 69 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A", 70 | "Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25", 71 | "Mozilla/5.0 (Windows NT 5.2; RW; rv:7.0a1) Gecko/20091211 SeaMonkey/9.23a1pre", 72 | "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1", 73 | "Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0", 74 | "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0", 75 | "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:52.0) Gecko/20100101 Firefox/52.0", 76 | ) 77 | 78 | // Fisher–Yates shuffle 79 | // shuffle without allocating any additional slices. 80 | for i := range userAgents { 81 | j := rand.Intn(i + 1) 82 | userAgents[i], userAgents[j] = userAgents[j], userAgents[i] 83 | } 84 | 85 | return userAgents[rand.Intn(len(userAgents))] 86 | } 87 | 88 | func bakeRequest(r request) *http.Request { 89 | 90 | req, err := http.NewRequest(r.method, r.url.String(), bytes.NewBuffer(r.body)) 91 | if err != nil { 92 | panic(err) 93 | } 94 | 95 | for _, header := range r.headers { 96 | 97 | req.Header.Set(header.name, header.value) 98 | } 99 | 100 | if len(r.params) > 0 { 101 | 102 | query := req.URL.Query() 103 | 104 | for _, q := range r.params { 105 | 106 | query.Add(q.key, q.value) 107 | } 108 | 109 | req.URL.RawQuery = query.Encode() 110 | } 111 | 112 | return req 113 | } 114 | 115 | // makes http get or post query 116 | func makeRequest(r request) *http.Response { 117 | 118 | req := bakeRequest(r) 119 | client := &http.Client{} 120 | 121 | resp, err := client.Do(req) 122 | 123 | if err != nil { 124 | log.Fatal(err) 125 | } 126 | 127 | return resp 128 | } 129 | 130 | // CertScraper Request the API for related certificates 131 | func CertScraper(search string) []Certificate { 132 | 133 | url, err := url.Parse("https://certspotter.com/api/v0/certs") 134 | 135 | if err != nil { 136 | log.Fatal(search) 137 | } 138 | 139 | body := []byte("") 140 | headers := []header{ 141 | header{name: "Host", value: "certspotter.com"}, 142 | header{name: "User-Agent", value: randomUserAgent()}, 143 | header{name: "Accept", value: "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"}, 144 | header{name: "Accept-Language", value: "en-US,en;q=0.5"}, 145 | header{name: "'Accept-Encoding'", value: "gzip, deflate, br"}, 146 | header{name: "Connection", value: "keep-alive"}, 147 | } 148 | 149 | params := []query{ 150 | query{key: "domain", value: search}, 151 | } 152 | 153 | resp := makeRequest(request{method: "GET", url: url, body: body, headers: headers, params: params}) 154 | defer resp.Body.Close() 155 | 156 | certificates := make([]Certificate, 0) 157 | 158 | rbody, err := ioutil.ReadAll(resp.Body) 159 | 160 | if err != nil { 161 | log.Fatal(err) 162 | } 163 | 164 | err = json.Unmarshal(rbody, &certificates) 165 | 166 | if err != nil { 167 | log.Fatal(err) 168 | } 169 | 170 | return certificates 171 | } 172 | 173 | func contains(s []string, e string) bool { 174 | for _, a := range s { 175 | if a == e { 176 | return true 177 | } 178 | } 179 | return false 180 | } 181 | 182 | // init will be called before the main function 183 | // Its the right place to initialize the seed Value 184 | func init() { 185 | 186 | // note: 187 | // Each time you set the same seed, you get the same sequence 188 | // You have to set the seed only once 189 | // you simply call Intn to get the next random integer 190 | rand.Seed(time.Now().UTC().UnixNano()) 191 | } 192 | 193 | func main() { 194 | 195 | if len(os.Args) != 2 { 196 | log.Fatal("Usage: certdomainfinder example.com") 197 | } 198 | 199 | domain := os.Args[1] 200 | duplicateDomains := make([]string, 0) 201 | 202 | certificates := CertScraper(domain) 203 | 204 | for _, certificate := range certificates { 205 | for _, dnsName := range certificate.DNSNames { 206 | if !strings.Contains(dnsName, "*") && strings.Contains(dnsName, domain) && !contains(duplicateDomains, dnsName) { 207 | 208 | duplicateDomains = append(duplicateDomains, dnsName) 209 | fmt.Println(dnsName) 210 | } 211 | } 212 | } 213 | } -------------------------------------------------------------------------------- /metacrawler.pl: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | #!/usr/bin/perl 22 | 23 | use HTML::TokeParser; 24 | use Mojo; 25 | use Mojo::DOM; 26 | use HTML::TokeParser; 27 | use HTTP::Request; 28 | use LWP::Simple; 29 | use LWP::UserAgent; 30 | use IO::Socket::INET; 31 | use Term::ANSIColor; 32 | use IO::Select; 33 | use HTTP::Response; 34 | use HTTP::Request::Common qw(POST); 35 | use HTTP::Request::Common qw(GET); 36 | use URI::URL; 37 | use feature ':5.10'; 38 | use LWP::UserAgent; 39 | no warnings 'uninitialized'; 40 | use Term::ANSIColor; 41 | use Data::Validate::Domain qw(is_domain); 42 | 43 | system "clear"; 44 | print color('bold red'); 45 | print "\n\n Project\n"; 46 | print "\n : ShaheenX :\n\n\n"; 47 | print color('bold yellow'); 48 | print "[ + ] Programmer: Haroon Awan\n"; 49 | print "[ + ] License: EULA\n"; 50 | print "[ + ] Version: 1.0\n"; 51 | print "[ + ] Contact: mrharoonawan\@gmail\.com \n"; 52 | print "[ + ] Environment: Shell & Perl for Debian/Kali\n"; 53 | print "[ + ] Github: Https://www.github.com/haroonawanofficial\n"; 54 | print "[ + ] Design Scheme: Get meta data from google\n"; 55 | print "[ + ] Usage: Read README.MD before using\n\n\n"; 56 | print color('reset'); 57 | print color("bold white"),"[ + ] 1 - Download Meta Data from Bing Search Engine\n"; 58 | print color("bold white"),"[ + ] 2 - Download Meta Data from Google Search Engine\n"; 59 | print color("bold white"),"[ + ] Enter desired search engine option: "; 60 | print color("green"); 61 | print color 'reset'; 62 | chomp($name=); 63 | 64 | if ($name=~ "1") 65 | { 66 | if ($^O =~ /MSWin32/) {system("cls"); system("color A"); 67 | }else {} 68 | 69 | # USER AGENT ALGORITHM ###### 70 | $ag = LWP::UserAgent->new(); 71 | $ag->agent("Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.3) Gecko/20010801"); 72 | $ag->timeout(10); 73 | #$ag->agent("Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.157 Safari/537.36"); 74 | #Extra user-agent in case, google block any kind of request 75 | 76 | # DORK AND QUERY ALGORITHM ###### 77 | print color("bold Green")," \n\n [ + ] Enter domain name only: "; 78 | chomp($dork=); 79 | print color("yellow"), "\n"; 80 | 81 | 82 | # PAGE SCRAPE ALROGITHM ###### 83 | for (my $i=1; $i<=2000; $i+=10) { 84 | $url = "https://www.bing.com/search?q=site%3A$dork+ext:xml | ext:conf | ext:cnf | ext:reg | ext:inf | ext:pdf | ext:rdp | ext:cfg | ext:txt | ext:ora | ext:ini | ext:doc | ext:docx | ext:odt | ext:pdf | ext:rtf | ext:sxw | ext:psw | ext:ppt | ext:pptx | ext:pps | ext:csv&filt=all&first=$i&FORM=PERE"; 85 | $resp = $ag->request(HTTP::Request->new(GET => $url)); 86 | $rrs = $resp->content; 87 | 88 | # ERROR HANDLGING ALGORITHM ###### 89 | if ($rrs =~ m/Enter captcha/i) { 90 | print "[!] Error: Bing is blocking our requests, change your IP and clear cache [!]\n\n"; 91 | exit; 92 | } 93 | else {} 94 | 95 | $p = HTML::TokeParser->new(\$rrs); 96 | while ($p->get_tag("cite")) { 97 | my @link = $p->get_trimmed_text("/cite"); 98 | foreach(@link) { print "$_\n"; } 99 | open(OUT, ">>metacrawler_bingfiles.txt"); print OUT "@link\n"; close(OUT); 100 | } 101 | } 102 | print "[+] Finished enumerating Bing\n"; 103 | $cleaner = system("./cob_a.sh"); 104 | exit; 105 | } 106 | 107 | 108 | if ($name=~ "2") 109 | { 110 | if ($^O =~ /MSWin32/) {system("cls"); system("color A"); 111 | }else {} 112 | 113 | # USER AGENT ALGORITHM ###### 114 | $ag = LWP::UserAgent->new(); 115 | $ag->agent("Mozilla/5.0 (X11; U; Linux i686; en-US; rv:0.9.3) Gecko/20010801"); 116 | $ag->timeout(10); 117 | 118 | # DORK AND QUERY ALGORITHM ###### 119 | print color("bold Green")," \n\n [ + ] Enter domain name : "; 120 | chomp($dork=); 121 | print color("yellow"), "\n"; 122 | 123 | 124 | # PAGE SCRAPE ALROGITHM ###### 125 | for (my $i=1; $i<=2000; $i+=10) { 126 | #$url = "https://google.com/search?q=site%3A$dork.com+ext:xml | ext:conf | ext:cnf | ext:reg | ext:inf | ext:pdf | ext:rdp | ext:cfg | ext:txt | ext:ora | ext:ini | ext:doc | ext:docx | ext:odt | ext:pdf | ext:rtf | ext:sxw | ext:psw | ext:ppt | ext:pptx | ext:pps | ext:csv&btnG=Search&hl=en-US&biw=&bih=&gbv=1&start=$i&filter=0"; 127 | $url = "https://google.com/search?q=site%3A$dork+ext:xml+%7C+ext:conf+%7C+ext:cnf+%7C+ext:reg+%7C+ext:inf+%7C+ext:pdf+%7C+ext:rdp+%7C+ext:cfg+%7C+ext:txt+%7C+ext:ora+%7C+ext:ini+%7C+ext:doc+%7C+ext:docx+%7C+ext:odt+%7C+ext:pdf+%7C+ext:rtf+%7C+ext:sxw+%7C+ext:psw+%7C+ext:ppt+%7C+ext:pptx+%7C+ext:pps+%7C+ext:csv&btnG=Search&hl=en-US&biw=&bih=&gbv=1&start=$i&filter=0"; 128 | $resp = $ag->request(HTTP::Request->new(GET => $url)); 129 | $rrs = $resp->content; 130 | 131 | # ERROR HANDLGING ALGORITHM ###### 132 | if ($rrs =~ m/Our systems have detected unusual traffic/i) { 133 | print "[!] Error: Google is blocking our requests, change your IP and clear cache [!]\n\n"; 134 | exit; 135 | } 136 | else {} 137 | 138 | $p = HTML::TokeParser->new(\$rrs); 139 | while ($p->get_tag("cite")) { 140 | my @link = $p->get_trimmed_text("/cite"); 141 | foreach(@link) { print "$_\n"; } 142 | open(OUT, ">>metacrawler_googlefiles.txt"); print OUT "@link\n"; close(OUT); 143 | } 144 | } 145 | print "[+] Finished enumerating Google\n"; 146 | $cleaner = system("./cob_b.sh"); 147 | exit; 148 | } 149 | -------------------------------------------------------------------------------- /js_linkfinder.py: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | 22 | #!/usr/bin/env python3 23 | # -*- coding: utf-8 -*- 24 | # Name: cobra.py 25 | # Original Author: Simon Descarpentries 26 | # Modified By: Haroon Awan 27 | # Licence: EULA 28 | 29 | import argparse, os, sys, threading 30 | from sys import argv, stderr 31 | from random import randint 32 | from time import sleep 33 | from urllib.parse import urljoin 34 | import requests, re, yaml, datetime 35 | 36 | cmd = "clear" 37 | returned_value = os.system(cmd) 38 | sys.stdout.write("\033[1;37m") 39 | 40 | print ("""\ 41 | [ Syntax ] 42 | python3 js_linkfinder.py --wait=2 --download https://www.victim.com 43 | """) 44 | 45 | 46 | __all__ = ['boss', 'download_files', 'download_file', 'run_cmd'] 47 | WANTED_EXT = '\.(js?|o(d|t)[js]|js?)$' 48 | BIN_EXT = re.compile( 49 | '\.?(jpe?g|png|gif|ico|swf|flv|exe|mpe?.|h26.|avi|m.v|zip|rar|t?gz|xz|js)$', re.I) 50 | RE_FIND_LINKS = re.compile('(href|src)="(.*?)"|url\("?\'?(.*?)\'?"?\)', re.I) 51 | RE_REL_LINK = re.compile('^https?://', re.I) 52 | RE_CONTENT_TYPE = re.compile('text/(html|css)', re.I) 53 | 54 | def run_cmd(argv): 55 | regext = WANTED_EXT 56 | do_dl = False 57 | do_journal = False 58 | do_wait = 5 59 | do_random_wait = True 60 | single_page = False 61 | 62 | for i, arg in enumerate(argv): 63 | if i == 0: # 1st arg of argv is the program name 64 | continue 65 | elif arg == '--download': 66 | do_dl = True 67 | elif arg.startswith('--wait'): 68 | do_wait = int(arg[len('--wait='):]) 69 | elif arg.startswith('http'): 70 | continue 71 | elif arg == '--download-file': 72 | if len(argv) < 3: 73 | raise SystemExit("Argument missing, check usage\n") 74 | else: 75 | download_file(argv[-1], do_wait, do_random_wait) 76 | raise SystemExit 77 | elif arg == '--download-files': 78 | if len(argv) < 3: 79 | raise SystemExit("Argument missing, check usage\n") 80 | else: 81 | download_files(argv[-1], do_wait, do_random_wait) 82 | raise SystemExit 83 | elif arg.startswith('--test'): 84 | import doctest 85 | doctest.run_docstring_examples(globals()[arg[len('--test='):]], globals()) 86 | raise SystemExit() 87 | else: 88 | raise SystemExit("Invalid argument "+arg+"\n") 89 | 90 | if len(argv) < 2: 91 | raise SystemExit("") 92 | 93 | boss(argv[-1], re.compile(regext, re.I), do_dl, do_journal, single_page) 94 | 95 | 96 | 97 | 98 | def boss(base_url, wanted_ext=WANTED_EXT, do_dl=False, do_journal=False, 99 | do_wait=False, do_random_wait=False, single_page=False): 100 | journal = 0 101 | 102 | if do_journal: 103 | # logging.config.dictConfig(yaml.load(LOGGING)) 104 | journal = logging.getLogger('journal') 105 | found_pages_list = [base_url] 106 | found_pages_set = set(found_pages_list) 107 | regurgited_pages = set() 108 | caught_docs = set() 109 | for page_url in found_pages_list: 110 | do_wait and controlled_sleep(do_wait, do_random_wait) 111 | do_journal and journal.info("tries page " + page_url) 112 | try: 113 | page = requests.get(page_url, stream=True) 114 | except Exception as e: 115 | do_journal and journal.error(e) 116 | stderr(e) 117 | continue 118 | if (page.status_code == requests.codes.ok and 119 | RE_CONTENT_TYPE.search(page.headers['content-type'])): 120 | found_pages_list, found_pages_set, regurgited_pages, caught_docs = explore_page( 121 | base_url, page_url, str(page.content), wanted_ext, journal, do_dl, 122 | found_pages_list, found_pages_set, regurgited_pages, caught_docs) 123 | page.close() 124 | if single_page: 125 | break 126 | if do_journal: 127 | journal.info("found %d pages, %d doc(s)" % (len(found_pages_set), len(caught_docs))) 128 | 129 | 130 | def explore_page(base_url, page_url, page_str, wanted_ext, journal, do_dl, 131 | found_pages_list, found_pages_set, regurgited_pages, caught_docs): 132 | # extract links 133 | for a_href in RE_FIND_LINKS.finditer(page_str): 134 | a_href = a_href.group(a_href.lastindex) 135 | if not RE_REL_LINK.search(a_href): # if it's a relative link 136 | a_href = urljoin(page_url, a_href) 137 | if wanted_ext.search(a_href) and a_href not in caught_docs: # wanted doc ? 138 | caught_docs.add(a_href) 139 | do_dl and download_file(a_href) or print(a_href) 140 | elif base_url in a_href and not BIN_EXT.search(a_href): # next page ? 141 | if a_href not in found_pages_set: 142 | journal and journal.info("will explore "+a_href) 143 | found_pages_list.append(a_href) 144 | found_pages_set.add(a_href) 145 | elif a_href not in regurgited_pages: # junk link ? 146 | journal and journal.debug("regurgited link "+a_href) 147 | regurgited_pages.add(a_href) 148 | return found_pages_list, found_pages_set, regurgited_pages, caught_docs 149 | 150 | 151 | def controlled_sleep(seconds=1, do_random_wait=False): 152 | sleep(randint(1, seconds) if do_random_wait else seconds) 153 | 154 | 155 | def download_file(URL, do_wait=False, do_random_wait=False): 156 | do_wait and controlled_sleep(do_wait, do_random_wait) 157 | with open(URL.split('/')[-1], 'wb') as f: 158 | f.write(requests.get(URL, stream=True).content) 159 | 160 | 161 | def download_files(URLs_file, do_wait=False, do_random_wait=False): 162 | line_nb = 0 163 | downloaded_files = 0 164 | with open(URLs_file) as f: 165 | for line in f: 166 | line = line.rstrip('\n') 167 | if line is '': 168 | continue 169 | line_nb += 1 170 | print('download %d - %s' % (line_nb, line)) 171 | try: 172 | download_file(line, do_wait, do_random_wait) 173 | downloaded_files += 1 174 | except Exception as e: 175 | stderr(e) 176 | print('downloaded %d / %d' % (downloaded_files, line_nb)) 177 | 178 | 179 | if __name__ == '__main__': 180 | run_cmd(argv) 181 | -------------------------------------------------------------------------------- /js_linkfinder/js_linkfinder.py: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | 22 | #!/usr/bin/env python3 23 | # -*- coding: utf-8 -*- 24 | # Name: cobra.py 25 | # Original Author: Simon Descarpentries 26 | # Modified By: Haroon Awan 27 | # Licence: EULA 28 | 29 | import argparse, os, sys, threading 30 | from sys import argv, stderr 31 | from random import randint 32 | from time import sleep 33 | from urllib.parse import urljoin 34 | import requests, re, yaml, datetime 35 | 36 | cmd = "clear" 37 | returned_value = os.system(cmd) 38 | sys.stdout.write("\033[1;37m") 39 | 40 | print ("""\ 41 | [ Syntax ] 42 | python3 js_linkfinder.py --wait=2 --download https://www.victim.com 43 | """) 44 | 45 | 46 | __all__ = ['boss', 'download_files', 'download_file', 'run_cmd'] 47 | WANTED_EXT = '\.(js?|o(d|t)[js]|js?)$' 48 | BIN_EXT = re.compile( 49 | '\.?(jpe?g|png|gif|ico|swf|flv|exe|mpe?.|h26.|avi|m.v|zip|rar|t?gz|xz|js)$', re.I) 50 | RE_FIND_LINKS = re.compile('(href|src)="(.*?)"|url\("?\'?(.*?)\'?"?\)', re.I) 51 | RE_REL_LINK = re.compile('^https?://', re.I) 52 | RE_CONTENT_TYPE = re.compile('text/(html|css)', re.I) 53 | 54 | def run_cmd(argv): 55 | regext = WANTED_EXT 56 | do_dl = False 57 | do_journal = False 58 | do_wait = 5 59 | do_random_wait = True 60 | single_page = False 61 | 62 | for i, arg in enumerate(argv): 63 | if i == 0: # 1st arg of argv is the program name 64 | continue 65 | elif arg == '--download': 66 | do_dl = True 67 | elif arg.startswith('--wait'): 68 | do_wait = int(arg[len('--wait='):]) 69 | elif arg.startswith('http'): 70 | continue 71 | elif arg == '--download-file': 72 | if len(argv) < 3: 73 | raise SystemExit("Argument missing, check usage\n") 74 | else: 75 | download_file(argv[-1], do_wait, do_random_wait) 76 | raise SystemExit 77 | elif arg == '--download-files': 78 | if len(argv) < 3: 79 | raise SystemExit("Argument missing, check usage\n") 80 | else: 81 | download_files(argv[-1], do_wait, do_random_wait) 82 | raise SystemExit 83 | elif arg.startswith('--test'): 84 | import doctest 85 | doctest.run_docstring_examples(globals()[arg[len('--test='):]], globals()) 86 | raise SystemExit() 87 | else: 88 | raise SystemExit("Invalid argument "+arg+"\n") 89 | 90 | if len(argv) < 2: 91 | raise SystemExit("") 92 | 93 | boss(argv[-1], re.compile(regext, re.I), do_dl, do_journal, single_page) 94 | 95 | 96 | 97 | 98 | def boss(base_url, wanted_ext=WANTED_EXT, do_dl=False, do_journal=False, 99 | do_wait=False, do_random_wait=False, single_page=False): 100 | journal = 0 101 | 102 | if do_journal: 103 | # logging.config.dictConfig(yaml.load(LOGGING)) 104 | journal = logging.getLogger('journal') 105 | found_pages_list = [base_url] 106 | found_pages_set = set(found_pages_list) 107 | regurgited_pages = set() 108 | caught_docs = set() 109 | for page_url in found_pages_list: 110 | do_wait and controlled_sleep(do_wait, do_random_wait) 111 | do_journal and journal.info("tries page " + page_url) 112 | try: 113 | page = requests.get(page_url, stream=True) 114 | except Exception as e: 115 | do_journal and journal.error(e) 116 | stderr(e) 117 | continue 118 | if (page.status_code == requests.codes.ok and 119 | RE_CONTENT_TYPE.search(page.headers['content-type'])): 120 | found_pages_list, found_pages_set, regurgited_pages, caught_docs = explore_page( 121 | base_url, page_url, str(page.content), wanted_ext, journal, do_dl, 122 | found_pages_list, found_pages_set, regurgited_pages, caught_docs) 123 | page.close() 124 | if single_page: 125 | break 126 | if do_journal: 127 | journal.info("found %d pages, %d doc(s)" % (len(found_pages_set), len(caught_docs))) 128 | 129 | 130 | def explore_page(base_url, page_url, page_str, wanted_ext, journal, do_dl, 131 | found_pages_list, found_pages_set, regurgited_pages, caught_docs): 132 | # extract links 133 | for a_href in RE_FIND_LINKS.finditer(page_str): 134 | a_href = a_href.group(a_href.lastindex) 135 | if not RE_REL_LINK.search(a_href): # if it's a relative link 136 | a_href = urljoin(page_url, a_href) 137 | if wanted_ext.search(a_href) and a_href not in caught_docs: # wanted doc ? 138 | caught_docs.add(a_href) 139 | do_dl and download_file(a_href) or print(a_href) 140 | elif base_url in a_href and not BIN_EXT.search(a_href): # next page ? 141 | if a_href not in found_pages_set: 142 | journal and journal.info("will explore "+a_href) 143 | found_pages_list.append(a_href) 144 | found_pages_set.add(a_href) 145 | elif a_href not in regurgited_pages: # junk link ? 146 | journal and journal.debug("regurgited link "+a_href) 147 | regurgited_pages.add(a_href) 148 | return found_pages_list, found_pages_set, regurgited_pages, caught_docs 149 | 150 | 151 | def controlled_sleep(seconds=1, do_random_wait=False): 152 | sleep(randint(1, seconds) if do_random_wait else seconds) 153 | 154 | 155 | def download_file(URL, do_wait=False, do_random_wait=False): 156 | do_wait and controlled_sleep(do_wait, do_random_wait) 157 | with open(URL.split('/')[-1], 'wb') as f: 158 | f.write(requests.get(URL, stream=True).content) 159 | 160 | 161 | def download_files(URLs_file, do_wait=False, do_random_wait=False): 162 | line_nb = 0 163 | downloaded_files = 0 164 | with open(URLs_file) as f: 165 | for line in f: 166 | line = line.rstrip('\n') 167 | if line is '': 168 | continue 169 | line_nb += 1 170 | print('download %d - %s' % (line_nb, line)) 171 | try: 172 | download_file(line, do_wait, do_random_wait) 173 | downloaded_files += 1 174 | except Exception as e: 175 | stderr(e) 176 | print('downloaded %d / %d' % (downloaded_files, line_nb)) 177 | 178 | 179 | if __name__ == '__main__': 180 | run_cmd(argv) 181 | -------------------------------------------------------------------------------- /gct.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | # GCTsubDomainDownloader 4 | 5 | import requests 6 | import re 7 | import json 8 | import sys,os 9 | import traceback 10 | import argparse 11 | import time,datetime 12 | from tqdm import tqdm 13 | 14 | proxies = None 15 | 16 | sess = requests.Session() 17 | 18 | #text highlight 19 | class Colored(object): 20 | RED = '\033[31m' 21 | GREEN = '\033[32m' 22 | YELLOW = '\033[33m' 23 | BLUE = '\033[34m' 24 | FUCHSIA = '\033[35m' 25 | CYAN = '\033[36m' 26 | WHITE = '\033[37m' 27 | 28 | #: no color 29 | RESET = '\033[0m' 30 | 31 | def color_str(self, color, s): 32 | return '{}{}{}'.format( 33 | getattr(self, color), 34 | s, 35 | self.RESET 36 | ) 37 | 38 | def red(self, s): 39 | return self.color_str('RED', s) 40 | 41 | def green(self, s): 42 | return self.color_str('GREEN', s) 43 | 44 | def yellow(self, s): 45 | return self.color_str('YELLOW', s) 46 | 47 | def blue(self, s): 48 | return self.color_str('BLUE', s) 49 | 50 | def fuchsia(self, s): 51 | return self.color_str('FUCHSIA', s) 52 | 53 | def cyan(self, s): 54 | return self.color_str('CYAN', s) 55 | 56 | def white(self, s): 57 | return self.color_str('WHITE', s) 58 | 59 | #domainfinde function 60 | class Domain: 61 | def __init__(self,search_domain,save_path,show_expired): 62 | self.search_domain = search_domain 63 | self.save_path = save_path 64 | self.show_expired = show_expired 65 | self.domains = {} 66 | self.total_num = 0 67 | self.page_token = '' 68 | self.flag = 0 69 | self.headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36"} 70 | self.indexUrl = 'https://transparencyreport.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch?include_subdomains=true' 71 | self.nextUrl = 'https://transparencyreport.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch/page?p=' 72 | 73 | def get_domain(self): 74 | c = Colored() 75 | 76 | do = 10 77 | while do > 0: 78 | if self.page_token != '': 79 | req = sess.get(self.nextUrl+self.page_token,headers=self.headers,proxies=proxies,verify=True) 80 | else: 81 | if self.show_expired == 'show': 82 | req = sess.get(self.indexUrl+'&domain='+self.search_domain+'&include_expired=true',headers=self.headers,proxies=proxies,verify=True) 83 | else: 84 | req = sess.get(self.indexUrl+'&domain='+self.search_domain,headers=self.headers,proxies=proxies,verify=True) 85 | rep = (req.text).lstrip(")]}'") 86 | rep = re.sub(r'\[\[\"https\.ct\.cdsr\"\,','[',rep) 87 | rep = rep.replace('\n','').replace('\\','') 88 | rep = rep[:-1] 89 | rep = json.loads(rep) 90 | 91 | if self.total_num == 0: 92 | break 93 | else: 94 | t = rep[2][0] 95 | p = re.match( r'[a-zA-Z0-9]{10}',t) 96 | if p: 97 | break 98 | else: 99 | do = do - 1 100 | print((" "+c.red("Occurred an Connection error...retry count down %d" %(do)))) 101 | 102 | if self.total_num == 0: 103 | for x in rep[1]: 104 | self.total_num += x[3] 105 | if self.total_num != 0: 106 | print((" "+c.red(str(self.total_num))+c.green(" subdomain certificate logs found"))) 107 | else: 108 | print((" "+c.red(str(self.total_num))+c.green(" subdomain certificate logs found"))) 109 | print((c.red("[+]No subdomain certificate log found"))) 110 | exit() 111 | 112 | for y in rep[0]: 113 | if y[1] not in self.domains: 114 | self.domains[y[1]] = {} 115 | self.domains[y[1]]['expired_time'] = int((str(y[4]))[:-3]) 116 | self.domains[y[1]]['is_expired'] = 0 117 | else: 118 | if self.domains[y[1]]['expired_time'] < int((str(y[4]))[:-3]): 119 | self.domains[y[1]]['expired_time'] = int((str(y[4]))[:-3]) 120 | now = time.time() 121 | if now >int((str(y[4]))[:-3]): 122 | self.domains[y[1]]['is_expired'] = 1 123 | else: 124 | self.domains[y[1]]['is_expired'] = 0 125 | else: 126 | continue 127 | 128 | pageNum = (self.total_num//10) + 1 129 | with tqdm(total=self.total_num,ncols=80) as pbar: 130 | if self.flag < pageNum: 131 | if self.total_num - (self.flag)*10 <10: 132 | pbar.update(self.total_num) 133 | else: 134 | pbar.update((self.flag+1)*10) 135 | self.flag = self.flag+1 136 | if rep[2][1] != None: 137 | self.page_token = rep[2][1] 138 | self.get_domain() 139 | 140 | def run(self): 141 | c = Colored() 142 | print(("[+]Searching subdomains for "+c.cyan(self.search_domain))) 143 | self.get_domain() 144 | print((c.fuchsia("[+]Printing subdomains for ")+c.cyan(self.search_domain))) 145 | for key,value in list(self.domains.items()): 146 | if value['is_expired'] == 1 and self.show_expired == 'show': 147 | print((key+" "+c.red("[Expired on "+datetime.datetime.fromtimestamp(value['expired_time']).strftime('%Y-%m-%d')+"]"))) 148 | else: 149 | print(key) 150 | self.write_log() 151 | 152 | def write_log(self): 153 | c = Colored() 154 | if(os.name == 'posix'): 155 | path = os.getcwd()+"/"+self.save_path+"/" 156 | else: 157 | path = os.getcwd()+"\\"+self.save_path+"\\" 158 | if os.path.exists(path) == False: 159 | os.mkdir(path) 160 | with open(path+self.search_domain+'.txt', 'w') as f: 161 | for key,value in list(self.domains.items()): 162 | if value['is_expired'] == 1 and self.show_expired == 'show': 163 | f.write(key+" "+"[Expired on "+datetime.datetime.fromtimestamp(value['expired_time']).strftime('%Y-%m-%d')+"]\r\n") 164 | f.flush() 165 | else: 166 | f.write(key+"\r\n") 167 | f.flush() 168 | print((c.fuchsia("[+]The ")+c.cyan(self.search_domain)+c.fuchsia("'s subdomains have been saved in ")+c.cyan(path+self.search_domain+".txt"))) 169 | f.close() 170 | 171 | if '__main__' == __name__: 172 | print(''' 173 | ''') 174 | 175 | c = Colored() 176 | parser = argparse.ArgumentParser() 177 | parser.add_argument('-d', '--domain', dest='search_domain', action='store',required=True,help='The domain you want to search(input example: google.com/twitter.com),no need to add http/https') 178 | parser.add_argument('-s', '--save', dest='save_path', action='store', default='log',required=False,help='The folder that subdomains will be saved under current path,(default:log),no need to /') 179 | parser.add_argument('-e', '--expired', dest='show_expired', action='store', required=True,help='show the subdomains which have an expired Security certificate(input choices:show/hide)') 180 | args = parser.parse_args() 181 | 182 | if re.match(r"^([a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}$",args.search_domain): 183 | try: 184 | if args.show_expired == 'show' or args.show_expired == 'hide': 185 | d = Domain(args.search_domain, args.save_path,args.show_expired) 186 | d.run() 187 | else: 188 | print((c.red("[+]argument --expired/-e is illegal!"))) 189 | exit() 190 | except KeyboardInterrupt: 191 | print((c.red("[+]Ctrl+c exit..."))) 192 | exit() 193 | except Exception: 194 | traceback.print_tb(sys.exc_info()[2]) 195 | print((c.red("[+]Error exit..."))) 196 | else: 197 | print((c.red("[+]argument --domain/-d is illegal!"))) 198 | exit() -------------------------------------------------------------------------------- /cobra_webmeta_crawler.py: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | 22 | #!/usr/bin/env python3 23 | # -*- coding: utf-8 -*- 24 | # Name: cobra.py 25 | # Original Author: Simon Descarpentries 26 | # Modified By: Haroon Awan 27 | # Licence: EULA 28 | 29 | import argparse, os, sys, threading 30 | from sys import argv, stderr 31 | from random import randint 32 | from time import sleep 33 | from urllib.parse import urljoin 34 | import requests, re, yaml, datetime 35 | 36 | cmd = "clear" 37 | returned_value = os.system(cmd) 38 | sys.stdout.write("\033[1;37m") 39 | 40 | print ("""\ 41 | 42 | _.--.... 43 | _....---;:'::' ^__/ 44 | .' `'`___....---=-'` 45 | /::' (` 46 | \' `:. 47 | `\::. ';-"":::-._ {} 48 | _.--'`\:' .'`-.`'`.' `{I} 49 | .-' `' .;;`\::. '. _: {-I}`\\ 50 | .' .:. `:: _)::: _;' `{=I}.:|| 51 | /. ::::`":::` ':'.-'`':. {_I}::// 52 | |:. ':' ::::: .':'`:. `'|':||:' 53 | \: .:. ''' .:| .:, _:./':.| 54 | jgs '--.:::...---'\:'.:`':`':./ 55 | '-::..:::-' 56 | 57 | 58 | # ------------ -------- ----------- ----------- ------ 59 | # ************ ********** *********** *********** ******** 60 | # --- ---- ---- ---- ** ---- --- ---------- 61 | # *** *** *** *********** ********* **** **** 62 | # --- --- --- ----------- --------- ------------ 63 | # *** **** **** **** ** **** **** ************ 64 | # ------------ ---------- ----------- ---- ---- ---- ---- 65 | # ************ ******** *********** **** **** **** **** 66 | # crawler for metadata v1.0a 67 | 68 | [ Syntax ] 69 | python3 cobra.py --wait=2 --download https://www.victim.com 70 | """) 71 | 72 | 73 | __all__ = ['boss', 'download_files', 'download_file', 'run_cmd'] 74 | WANTED_EXT = '\.(pdf?|docx?|xlsx?|svg?|swf?|pptx?|o(d|t)[cgmpst]|csv|rtf|zip|rar|t?gz|xz|gz?)$' 75 | BIN_EXT = re.compile( 76 | '\.?(jpe?g|png|gif|ico|swf|flv|exe|mpe?.|h26.|avi|m.v|zip|rar|t?gz|xz|js)$', re.I) 77 | RE_FIND_LINKS = re.compile('(href|src)="(.*?)"|url\("?\'?(.*?)\'?"?\)', re.I) 78 | RE_REL_LINK = re.compile('^https?://', re.I) 79 | RE_CONTENT_TYPE = re.compile('text/(html|css)', re.I) 80 | 81 | def run_cmd(argv): 82 | regext = WANTED_EXT 83 | do_dl = False 84 | do_journal = False 85 | do_wait = 5 86 | do_random_wait = True 87 | single_page = False 88 | 89 | for i, arg in enumerate(argv): 90 | if i == 0: # 1st arg of argv is the program name 91 | continue 92 | elif arg == '--download': 93 | do_dl = True 94 | elif arg.startswith('--wait'): 95 | do_wait = int(arg[len('--wait='):]) 96 | elif arg.startswith('http'): 97 | continue 98 | elif arg == '--download-file': 99 | if len(argv) < 3: 100 | raise SystemExit("Argument missing, check usage\n") 101 | else: 102 | download_file(argv[-1], do_wait, do_random_wait) 103 | raise SystemExit 104 | elif arg == '--download-files': 105 | if len(argv) < 3: 106 | raise SystemExit("Argument missing, check usage\n") 107 | else: 108 | download_files(argv[-1], do_wait, do_random_wait) 109 | raise SystemExit 110 | elif arg.startswith('--test'): 111 | import doctest 112 | doctest.run_docstring_examples(globals()[arg[len('--test='):]], globals()) 113 | raise SystemExit() 114 | else: 115 | raise SystemExit("Invalid argument "+arg+"\n") 116 | 117 | if len(argv) < 2: 118 | raise SystemExit("") 119 | 120 | boss(argv[-1], re.compile(regext, re.I), do_dl, do_journal, single_page) 121 | 122 | 123 | 124 | 125 | def boss(base_url, wanted_ext=WANTED_EXT, do_dl=False, do_journal=False, 126 | do_wait=False, do_random_wait=False, single_page=False): 127 | journal = 0 128 | 129 | if do_journal: 130 | # logging.config.dictConfig(yaml.load(LOGGING)) 131 | journal = logging.getLogger('journal') 132 | found_pages_list = [base_url] 133 | found_pages_set = set(found_pages_list) 134 | regurgited_pages = set() 135 | caught_docs = set() 136 | for page_url in found_pages_list: 137 | do_wait and controlled_sleep(do_wait, do_random_wait) 138 | do_journal and journal.info("tries page " + page_url) 139 | try: 140 | page = requests.get(page_url, stream=True) 141 | except Exception as e: 142 | do_journal and journal.error(e) 143 | stderr(e) 144 | continue 145 | if (page.status_code == requests.codes.ok and 146 | RE_CONTENT_TYPE.search(page.headers['content-type'])): 147 | found_pages_list, found_pages_set, regurgited_pages, caught_docs = explore_page( 148 | base_url, page_url, str(page.content), wanted_ext, journal, do_dl, 149 | found_pages_list, found_pages_set, regurgited_pages, caught_docs) 150 | page.close() 151 | if single_page: 152 | break 153 | if do_journal: 154 | journal.info("found %d pages, %d doc(s)" % (len(found_pages_set), len(caught_docs))) 155 | 156 | 157 | def explore_page(base_url, page_url, page_str, wanted_ext, journal, do_dl, 158 | found_pages_list, found_pages_set, regurgited_pages, caught_docs): 159 | # extract links 160 | for a_href in RE_FIND_LINKS.finditer(page_str): 161 | a_href = a_href.group(a_href.lastindex) 162 | if not RE_REL_LINK.search(a_href): # if it's a relative link 163 | a_href = urljoin(page_url, a_href) 164 | if wanted_ext.search(a_href) and a_href not in caught_docs: # wanted doc ? 165 | caught_docs.add(a_href) 166 | do_dl and download_file(a_href) or print(a_href) 167 | elif base_url in a_href and not BIN_EXT.search(a_href): # next page ? 168 | if a_href not in found_pages_set: 169 | journal and journal.info("will explore "+a_href) 170 | found_pages_list.append(a_href) 171 | found_pages_set.add(a_href) 172 | elif a_href not in regurgited_pages: # junk link ? 173 | journal and journal.debug("regurgited link "+a_href) 174 | regurgited_pages.add(a_href) 175 | return found_pages_list, found_pages_set, regurgited_pages, caught_docs 176 | 177 | 178 | def controlled_sleep(seconds=1, do_random_wait=False): 179 | sleep(randint(1, seconds) if do_random_wait else seconds) 180 | 181 | 182 | def download_file(URL, do_wait=False, do_random_wait=False): 183 | do_wait and controlled_sleep(do_wait, do_random_wait) 184 | with open(URL.split('/')[-1], 'wb') as f: 185 | f.write(requests.get(URL, stream=True).content) 186 | 187 | 188 | def download_files(URLs_file, do_wait=False, do_random_wait=False): 189 | line_nb = 0 190 | downloaded_files = 0 191 | with open(URLs_file) as f: 192 | for line in f: 193 | line = line.rstrip('\n') 194 | if line is '': 195 | continue 196 | line_nb += 1 197 | print('download %d - %s' % (line_nb, line)) 198 | try: 199 | download_file(line, do_wait, do_random_wait) 200 | downloaded_files += 1 201 | except Exception as e: 202 | stderr(e) 203 | print('downloaded %d / %d' % (downloaded_files, line_nb)) 204 | 205 | 206 | if __name__ == '__main__': 207 | run_cmd(argv) 208 | -------------------------------------------------------------------------------- /geotagging_crawler/cobra_geotagging_crawler.py: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | 22 | #!/usr/bin/env python3 23 | # -*- coding: utf-8 -*- 24 | # Name: cobra.py 25 | # Original Author: Simon Descarpentries 26 | # Modified By: Haroon Awan 27 | # Licence: EULA 28 | 29 | import argparse, os, sys, threading 30 | from sys import argv, stderr 31 | from random import randint 32 | from time import sleep 33 | from urllib.parse import urljoin 34 | import requests, re, yaml, datetime 35 | 36 | cmd = "clear" 37 | returned_value = os.system(cmd) 38 | sys.stdout.write("\033[1;37m") 39 | 40 | print ("""\ 41 | 42 | _.--.... 43 | _....---;:'::' ^__/ 44 | .' `'`___....---=-'` 45 | /::' (` 46 | \' `:. 47 | `\::. ';-"":::-._ {} 48 | _.--'`\:' .'`-.`'`.' `{I} 49 | .-' `' .;;`\::. '. _: {-I}`\\ 50 | .' .:. `:: _)::: _;' `{=I}.:|| 51 | /. ::::`":::` ':'.-'`':. {_I}::// 52 | |:. ':' ::::: .':'`:. `'|':||:' 53 | \: .:. ''' .:| .:, _:./':.| 54 | jgs '--.:::...---'\:'.:`':`':./ 55 | '-::..:::-' 56 | 57 | 58 | # ------------ -------- ----------- ----------- ------ 59 | # ************ ********** *********** *********** ******** 60 | # --- ---- ---- ---- ** ---- --- ---------- 61 | # *** *** *** *********** ********* **** **** 62 | # --- --- --- ----------- --------- ------------ 63 | # *** **** **** **** ** **** **** ************ 64 | # ------------ ---------- ----------- ---- ---- ---- ---- 65 | # ************ ******** *********** **** **** **** **** 66 | # crawler for metadata v1.0a 67 | 68 | [ Syntax ] 69 | python3 cobra.py --wait=2 --download https://www.victim.com 70 | """) 71 | 72 | 73 | __all__ = ['boss', 'download_files', 'download_file', 'run_cmd'] 74 | 75 | WANTED_EXT = '\.(3gp?|mp4?|m4a?|aac?|ts?|swf?|ico?|gif?|png?|jpe?g|h26.|avi?|m.v?|flac?|gsm?|mid?|xmf?|mxmf?|rtttl?|rtx?|ota?|imy?|mp3?|mkv?|wav?|ogg?|mkv?|webm?|mkv?|webm?|bmp?|gi?|jpeg?|png?|webp?|heic?|heif?|gif?)$' 76 | BIN_EXT = re.compile( 77 | '\.?(3gp?|mp4?|m4a?|aac?|ts?|swf?|ico?|gif?|png?|jpe?g|h26.|avi?|m.v?|flac?|gsm?|mid?|xmf?|mxmf?|rtttl?|rtx?|ota?|imy?|mp3?|mkv?|wav?|ogg?|mkv?|webm?|mkv?|webm?|bmp?|gi?|jpeg?|png?|webp?|heic?|heif?|gif?)$', re.I) 78 | RE_FIND_LINKS = re.compile('(href|src)="(.*?)"|url\("?\'?(.*?)\'?"?\)', re.I) 79 | RE_REL_LINK = re.compile('^https?://', re.I) 80 | RE_CONTENT_TYPE = re.compile('text/(html|css)', re.I) 81 | 82 | def run_cmd(argv): 83 | regext = WANTED_EXT 84 | do_dl = False 85 | do_journal = False 86 | do_wait = 5 87 | do_random_wait = True 88 | single_page = False 89 | 90 | for i, arg in enumerate(argv): 91 | if i == 0: # 1st arg of argv is the program name 92 | continue 93 | elif arg == '--download': 94 | do_dl = True 95 | elif arg.startswith('--wait'): 96 | do_wait = int(arg[len('--wait='):]) 97 | elif arg.startswith('http'): 98 | continue 99 | elif arg == '--download-file': 100 | if len(argv) < 3: 101 | raise SystemExit("Argument missing, check usage\n") 102 | else: 103 | download_file(argv[-1], do_wait, do_random_wait) 104 | raise SystemExit 105 | elif arg == '--download-files': 106 | if len(argv) < 3: 107 | raise SystemExit("Argument missing, check usage\n") 108 | else: 109 | download_files(argv[-1], do_wait, do_random_wait) 110 | raise SystemExit 111 | elif arg.startswith('--test'): 112 | import doctest 113 | doctest.run_docstring_examples(globals()[arg[len('--test='):]], globals()) 114 | raise SystemExit() 115 | else: 116 | raise SystemExit("Invalid argument "+arg+"\n") 117 | 118 | if len(argv) < 2: 119 | raise SystemExit("") 120 | 121 | boss(argv[-1], re.compile(regext, re.I), do_dl, do_journal, single_page) 122 | 123 | 124 | 125 | 126 | def boss(base_url, wanted_ext=WANTED_EXT, do_dl=False, do_journal=False, 127 | do_wait=False, do_random_wait=False, single_page=False): 128 | journal = 0 129 | 130 | if do_journal: 131 | # logging.config.dictConfig(yaml.load(LOGGING)) 132 | journal = logging.getLogger('journal') 133 | found_pages_list = [base_url] 134 | found_pages_set = set(found_pages_list) 135 | regurgited_pages = set() 136 | caught_docs = set() 137 | for page_url in found_pages_list: 138 | do_wait and controlled_sleep(do_wait, do_random_wait) 139 | do_journal and journal.info("tries page " + page_url) 140 | try: 141 | page = requests.get(page_url, stream=True) 142 | except Exception as e: 143 | do_journal and journal.error(e) 144 | stderr(e) 145 | continue 146 | if (page.status_code == requests.codes.ok and 147 | RE_CONTENT_TYPE.search(page.headers['content-type'])): 148 | found_pages_list, found_pages_set, regurgited_pages, caught_docs = explore_page( 149 | base_url, page_url, str(page.content), wanted_ext, journal, do_dl, 150 | found_pages_list, found_pages_set, regurgited_pages, caught_docs) 151 | page.close() 152 | if single_page: 153 | break 154 | if do_journal: 155 | journal.info("found %d pages, %d doc(s)" % (len(found_pages_set), len(caught_docs))) 156 | 157 | 158 | def explore_page(base_url, page_url, page_str, wanted_ext, journal, do_dl, 159 | found_pages_list, found_pages_set, regurgited_pages, caught_docs): 160 | # extract links 161 | for a_href in RE_FIND_LINKS.finditer(page_str): 162 | a_href = a_href.group(a_href.lastindex) 163 | if not RE_REL_LINK.search(a_href): # if it's a relative link 164 | a_href = urljoin(page_url, a_href) 165 | if wanted_ext.search(a_href) and a_href not in caught_docs: # wanted doc ? 166 | caught_docs.add(a_href) 167 | do_dl and download_file(a_href) or print(a_href) 168 | elif base_url in a_href and not BIN_EXT.search(a_href): # next page ? 169 | if a_href not in found_pages_set: 170 | journal and journal.info("will explore "+a_href) 171 | found_pages_list.append(a_href) 172 | found_pages_set.add(a_href) 173 | elif a_href not in regurgited_pages: # junk link ? 174 | journal and journal.debug("regurgited link "+a_href) 175 | regurgited_pages.add(a_href) 176 | return found_pages_list, found_pages_set, regurgited_pages, caught_docs 177 | 178 | 179 | def controlled_sleep(seconds=1, do_random_wait=False): 180 | sleep(randint(1, seconds) if do_random_wait else seconds) 181 | 182 | 183 | def download_file(URL, do_wait=False, do_random_wait=False): 184 | do_wait and controlled_sleep(do_wait, do_random_wait) 185 | with open(URL.split('/')[-1], 'wb') as f: 186 | f.write(requests.get(URL, stream=True).content) 187 | 188 | 189 | def download_files(URLs_file, do_wait=False, do_random_wait=False): 190 | line_nb = 0 191 | downloaded_files = 0 192 | with open(URLs_file) as f: 193 | for line in f: 194 | line = line.rstrip('\n') 195 | if line is '': 196 | continue 197 | line_nb += 1 198 | print('download %d - %s' % (line_nb, line)) 199 | try: 200 | download_file(line, do_wait, do_random_wait) 201 | downloaded_files += 1 202 | except Exception as e: 203 | stderr(e) 204 | print('downloaded %d / %d' % (downloaded_files, line_nb)) 205 | 206 | 207 | if __name__ == '__main__': 208 | run_cmd(argv) 209 | -------------------------------------------------------------------------------- /BlackArch_Installer.sh: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | #!/bin/bash 22 | 23 | red="\e[0;31m" 24 | green="\e[0;32m" 25 | off="\e[0m" 26 | 27 | function banner() { 28 | clear 29 | echo " "; 30 | echo " "; 31 | echo " "; 32 | echo " .########...########..######.....#######...##....##.....######.....#######....######....########......########.. "; 33 | echo " .##.....##..##.......##....##..##.....##..###...##....##....##...##.....##...##...##...##.....##....##.....##. "; 34 | echo " .##.....##..##.......##........##.....##..####..##....##.........##.....##...##..##....##......##...##.....##. "; 35 | echo " .########...######...##........##.....##..##.##.##....##.........##.....##...##..##....#########....#########. "; 36 | echo " .##...##....##.......##........##.....##..##..####....##.........##.....##...##...##...##....##.....##.....##. "; 37 | echo " .##....##...##.......##....##..##.....##..##...###....##....##...##.....##...##...###..##.....##....##.....##. "; 38 | echo " .##.....##..########..######....#######...##....##.....######.....#######....#######...##......##...##.....##. "; 39 | echo " Ultimate Recon and Foot Printing Software Version 1.0a "; 40 | echo " [Coded By: Haroon Awan] "; 41 | echo " [Contact: mrharoonawan@gmail.com] "; 42 | echo " "; 43 | echo " "; 44 | echo " "; 45 | } 46 | 47 | function linux() { 48 | echo -e "$red [$green+$red]$off Installing APT-GET ..."; 49 | pacman -S pacaur 50 | pacaur -S apt 51 | echo -e "$red [$green+$red]$off Installing Perl ..."; 52 | apt-get install -y perl 53 | echo -e "$red [$green+$red]$off Installing JSON Module ..."; 54 | cpan install JSON 55 | echo -e "$red [$green+$red]$off Installing Extra Perl Modules ..."; 56 | perl -MCPAN -e "WWW::Mechanize" 57 | perl -MCPAN -e "use HTML::TokeParser" 58 | perl -MCPAN -e "Term::ANSIColor" 59 | perl -MCPAN -e "Mojo::DOM" 60 | perl -MCPAN -e "Data::Dumper" 61 | perl -MCPAN -e "Win32::Console::ANSI" 62 | perl -MCPAN -e "HTML::TableExtract" 63 | perl -MCPAN -e "Data::Validate::Domain" 64 | perl -MCPAN -e "LWP::Protocol::https" 65 | perl -MCPAN -e "Mozilla::CA" 66 | perl -MCPAN -e "Bundle::LWP" 67 | 68 | 69 | echo -e "$red [$green+$red]$off Checking directories..." 70 | if [ -d "/usr/share/ReconCobra" ]; then 71 | echo -e "$red [$green+$red]$off A Directory ReconCobra Was Found! Do You Want To Replace It? [Y/n]:" ; 72 | read replace 73 | if [ "$replace" = "Y" ]; then 74 | rm -r "/usr/share/ReconCobra" 75 | rm "/usr/share/icons/ReconCobra.png" 76 | rm "/usr/share/applications/ReconCobra.desktop" 77 | rm "/usr/local/bin/ReconCobra" 78 | 79 | else 80 | echo -e "$red [$green+$red]$off If You Want To Install You Must Remove Previous Installations"; 81 | exit 82 | fi 83 | fi 84 | 85 | echo -e "$red [$green+$red]$off Installing ..."; 86 | echo -e "$red [$green+$red]$off Creating Symbolic Link ..."; 87 | echo -e "#!/bin/bash 88 | perl /usr/share/ReconCobra/ReconCobra.pl" '${1+"$@"}' > "ReconCobra"; 89 | chmod +x "ReconCobra"; 90 | mkdir "/usr/share/ReconCobra" 91 | cp "installer.sh" "/usr/share/ReconCobra" 92 | cp "ReconCobra.pl" "/usr/share/ReconCobra" 93 | cp "config/ReconCobra.jpeg" "/usr/share/icons" 94 | cp "config/ReconCobra.desktop" "/usr/share/applications" 95 | cp "ReconCobra" "/usr/local/bin/" 96 | rm "ReconCobra"; 97 | 98 | echo -e "$red [$green+$red]$off Installing dependencies..." 99 | echo "y" | apt-get install xdg-utils 100 | echo "y" | apt-get install cargo 101 | echo "y" | apt-get install python-yaml 102 | echo "y" | apt-get install hping3 103 | echo "y" | apt-get install python2.7 104 | echo "y" | apt-get install python3 105 | echo "y" | apt-get install x11-utils xutils-dev imagemagick libxext-dev xspy 106 | echo "y" | apt-get install golang 107 | echo "y" | apt-get install curl 108 | echo "y" | apt-get install nfs-common 109 | echo "y" | apt-get install smbclient 110 | echo "y" | apt-get install gem 111 | gem install wayback_machine_downloader 112 | echo "y" | apt-get install perl-LWP-Protocol-https 113 | echo "y" | git clone https://github.com/xroche/httrack.git --recurse 114 | cd httrack 115 | ./configure --prefix=$HOME/usr && make -j8 && make install 116 | cd .. 117 | echo "y" | git clone https://github.com/haroonawanofficial/cobra.git 118 | echo "y" | git clone https://github.com/haroonawanofficial/maahro.git 119 | echo "y" | git clone https://github.com/haroonawanofficial/ShaheenX.git 120 | echo "y" | git clone https://github.com/chenjj/CORScanner.git 121 | cd CORScanner 122 | pip install -r requirements.txt 123 | cd .. 124 | echo "y" | git clone https://github.com/stormshadow07/HackTheWorld.git 125 | cd HackTheWorld 126 | chmod +x install.sh && ./install.sh 127 | cd .. 128 | echo "y" | git clone https://github.com/threat9/routersploit 129 | cd routersploit 130 | easy_install pip 131 | pip install -r requirements.txt 132 | cd .. 133 | echo "y" | git clone https://github.com/yassineaboukir/Asnlookup.git 134 | echo "y" | git clone https://github.com/exiftool/exiftool.git 135 | echo "y" | git clone https://github.com/GerbenJavado/LinkFinder.git 136 | echo "y" | git clone https://github.com/sensepost/BiLE-suite.git 137 | echo "y" | git clone https://github.com/haroonawanofficial/vasl.git 138 | echo "y" | git clone https://github.com/haroonawanofficial/panthera.git 139 | echo "y" | git clone https://github.com/naqushab/SearchEngineScrapy.git 140 | echo "y" | git clone https://github.com/heycam/json-describe 141 | cd json-describe 142 | cargo build 143 | cd .. 144 | echo "y" | apt-get install nmap 145 | echo "y" | apt-get install xrdp 146 | pip install jsbeautifier 147 | pip install argparse 148 | pip install requests 149 | pip install request 150 | cd SearchEngineScrapy 151 | pip install -r requirements.txt 152 | virtualenv --python="2" env 153 | env/bin/activate 154 | cd .. 155 | echo "y" | git clone https://github.com/FortyNorthSecurity/EyeWitness.git 156 | cd EyeWitness/setup 157 | chmod u+x setup.sh 158 | ./setup.sh 159 | cd .. 160 | cd .. 161 | chmod u+x *.sh 162 | cp * -r /usr/share/ReconCobra 163 | cp *.sh /usr/share/ReconCobra 164 | cat traceroute-function >> ~/.bashrc 165 | source ~/.bashrc 166 | 167 | if [ -d "/usr/share/ReconCobra" ] ; 168 | then 169 | echo -e "$red [$green+$red]$off ReconCobra Successfully Installed, Starting"; 170 | sleep 2; 171 | ReconCobra 172 | else 173 | echo -e "$red [$green+$red]$off ReconCobra Cannot Be Installed. Trying using Portable Edition !"; 174 | exit 175 | fi 176 | } 177 | 178 | if [ -d "/usr/bin/" ];then 179 | banner 180 | echo -e "$red [$green+$red]$off ReconCobra Will Be Installed In Your System"; 181 | linux 182 | else 183 | echo -e "$red [$green+$red]$off ReconCobra Cannot Be Installed. Trying using Portable Edition !"; 184 | exit 185 | fi 186 | -------------------------------------------------------------------------------- /ParrotOS_Installer.sh: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | #!/bin/bash 22 | 23 | red="\e[0;31m" 24 | green="\e[0;32m" 25 | off="\e[0m" 26 | 27 | function banner() { 28 | clear 29 | echo " "; 30 | echo " "; 31 | echo " "; 32 | echo " .########...########..######.....#######...##....##.....######.....#######....######....########......########.. "; 33 | echo " .##.....##..##.......##....##..##.....##..###...##....##....##...##.....##...##...##...##.....##....##.....##. "; 34 | echo " .##.....##..##.......##........##.....##..####..##....##.........##.....##...##..##....##......##...##.....##. "; 35 | echo " .########...######...##........##.....##..##.##.##....##.........##.....##...##..##....#########....#########. "; 36 | echo " .##...##....##.......##........##.....##..##..####....##.........##.....##...##...##...##....##.....##.....##. "; 37 | echo " .##....##...##.......##....##..##.....##..##...###....##....##...##.....##...##...###..##.....##....##.....##. "; 38 | echo " .##.....##..########..######....#######...##....##.....######.....#######....#######...##......##...##.....##. "; 39 | echo " Ultimate Recon and Foot Printing Software Version 1.0a "; 40 | echo " [Coded By: Haroon Awan] "; 41 | echo " [Contact: mrharoonawan@gmail.com] "; 42 | echo " "; 43 | echo " "; 44 | echo " "; 45 | } 46 | 47 | function linux() { 48 | echo -e "$red [$green+$red]$off Installing Perl ..."; 49 | sudo apt-get install -y perl 50 | echo -e "$red [$green+$red]$off Installing JSON Module ..."; 51 | cpan -fi JSON 52 | echo -e "$red [$green+$red]$off Installing Extra Perl Modules ..."; 53 | echo "y" | cpan -fi WWW::Mechanize 54 | echo "y" | cpan -fi use HTML::TokeParser 55 | echo "y" | cpan -fi Term::ANSIColor 56 | echo "y" | cpan -fi Mojo::DOM 57 | echo "y" | cpan -fi Data::Dumper 58 | echo "y" | cpan -fi Win32::Console::ANSI 59 | echo "y" | cpan -fi HTML::TableExtract 60 | echo "y" | cpan -fi Data::Validate::Domain 61 | echo "y" | cpan -fi LWP::Protocol::https 62 | echo "y" | cpan -fi Mozilla::CA 63 | echo "y" | cpan -fi Bundle::LWP 64 | 65 | 66 | echo -e "$red [$green+$red]$off Checking directories..." 67 | if [ -d "/usr/share/ReconCobra" ]; then 68 | echo -e "$red [$green+$red]$off A Directory ReconCobra Was Found! Do You Want To Replace It? [Y/n]:" ; 69 | read replace 70 | if [ "$replace" = "Y" ]; then 71 | sudo rm -r "/usr/share/ReconCobra" 72 | sudo rm "/usr/share/icons/ReconCobra.png" 73 | sudo rm "/usr/share/applications/ReconCobra.desktop" 74 | sudo rm "/usr/local/bin/ReconCobra" 75 | 76 | else 77 | echo -e "$red [$green+$red]$off If You Want To Install You Must Remove Previous Installations"; 78 | exit 79 | fi 80 | fi 81 | 82 | echo -e "$red [$green+$red]$off Installing ..."; 83 | echo -e "$red [$green+$red]$off Creating Symbolic Link ..."; 84 | echo -e "#!/bin/bash 85 | perl /usr/share/ReconCobra/ReconCobra.pl" '${1+"$@"}' > "ReconCobra"; 86 | chmod +x "ReconCobra"; 87 | sudo mkdir "/usr/share/ReconCobra" 88 | sudo cp "installer.sh" "/usr/share/ReconCobra" 89 | sudo cp "ReconCobra.pl" "/usr/share/ReconCobra" 90 | sudo cp "config/ReconCobra.jpeg" "/usr/share/icons" 91 | sudo cp "config/ReconCobra.desktop" "/usr/share/applications" 92 | sudo cp "ReconCobra" "/usr/local/bin/" 93 | rm "ReconCobra"; 94 | 95 | echo -e "$red [$green+$red]$off Installing dependencies..." 96 | echo "y" | apt-get install xdg-utils 97 | echo "y" | apt-get install xrdp 98 | echo "y" | apt-get install cargo 99 | echo "y" | apt-get install x11-utils xutils-dev imagemagick libxext-dev xspy 100 | echo "y" | apt-get install python-yaml 101 | echo "y" | apt-get install hping3 102 | echo "y" | apt-get install ccrypt 103 | echo "y" | apt-get install python2.7 104 | echo "y" | apt-get install python3 105 | echo "y" | apt-get install golang 106 | echo "y" | apt-get install curl 107 | echo "y" | apt-get install nfs-common 108 | echo "y" | apt-get install smbclient 109 | echo "y" | apt-get install gem 110 | gem install wayback_machine_downloader 111 | echo "y" | apt-get install perl-LWP-Protocol-https 112 | echo "y" | git clone https://github.com/xroche/httrack.git --recurse 113 | cd httrack 114 | ./configure --prefix=$HOME/usr && make -j8 && make install 115 | cd .. 116 | echo "y" | git clone https://github.com/haroonawanofficial/cobra.git 117 | echo "y" | git clone https://github.com/haroonawanofficial/maahro.git 118 | echo "y" | git clone https://github.com/haroonawanofficial/ShaheenX.git 119 | echo "y" | git clone https://github.com/chenjj/CORScanner.git 120 | cd CORScanner 121 | pip install -r requirements.txt 122 | cd .. 123 | echo "y" | git clone https://github.com/yassineaboukir/Asnlookup.git 124 | echo "y" | git clone https://github.com/exiftool/exiftool.git 125 | echo "y" | git clone https://github.com/GerbenJavado/LinkFinder.git 126 | echo "y" | git clone https://github.com/sensepost/BiLE-suite.git 127 | echo "y" | git clone https://github.com/stormshadow07/HackTheWorld.git 128 | cd HackTheWorld 129 | chmod +x install.sh && ./install.sh 130 | cd .. 131 | echo "y" | git clone https://github.com/haroonawanofficial/vasl.git 132 | echo "y" | git clone https://github.com/threat9/routersploit 133 | cd routersploit 134 | sudo easy_install pip 135 | sudo pip install -r requirements.txt 136 | cd .. 137 | echo "y" | git clone https://github.com/haroonawanofficial/panthera.git 138 | echo "y" | git clone https://github.com/naqushab/SearchEngineScrapy.git 139 | echo "y" | git clone https://github.com/heycam/json-describe 140 | cd json-describe 141 | cargo build 142 | cd .. 143 | echo "y" | apt-get install nmap 144 | pip install jsbeautifier 145 | pip install argparse 146 | pip install requests 147 | pip install request 148 | cd SearchEngineScrapy 149 | pip install -r requirements.txt 150 | sudo virtualenv --python="2" env 151 | sudo env/bin/activate 152 | cd .. 153 | echo "y" | git clone https://github.com/FortyNorthSecurity/EyeWitness.git 154 | cd EyeWitness/setup 155 | chmod u+x setup.sh 156 | ./setup.sh 157 | cd .. 158 | cd .. 159 | chmod u+x *.sh 160 | cp * -r /usr/share/ReconCobra 161 | cp *.sh /usr/share/ReconCobra 162 | cat traceroute-function >> ~/.bashrc 163 | source ~/.bashrc 164 | 165 | if [ -d "/usr/share/ReconCobra" ] ; 166 | then 167 | echo -e "$red [$green+$red]$off ReconCobra Successfully Installed, Starting"; 168 | sleep 2; 169 | ReconCobra 170 | else 171 | echo -e "$red [$green+$red]$off ReconCobra Cannot Be Installed. Trying using Portable Edition !"; 172 | exit 173 | fi 174 | } 175 | 176 | if [ -d "/usr/bin/" ];then 177 | banner 178 | echo -e "$red [$green+$red]$off ReconCobra Will Be Installed In Your System"; 179 | linux 180 | else 181 | echo -e "$red [$green+$red]$off ReconCobra Cannot Be Installed. Trying using Portable Edition !"; 182 | exit 183 | fi 184 | -------------------------------------------------------------------------------- /Kali_Installer.sh: -------------------------------------------------------------------------------- 1 | # *************************************************************************************** # 2 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 3 | # Agreement between "Haroon Awan" and "You"(user). # 4 | # ---------------------------------- EULA NOTICE ---------------------------------------- # 5 | # 1. By using this piece of software your bound to these point. # 6 | # 2. This an End User License Agreement (EULA) is a legal between a software application # 7 | # author "Haroon Awan" and (YOU) user of this software. # 8 | # 3. This software application grants users rights to use for any purpose or modify and # 9 | # redistribute creative works. # 10 | # 4. This software comes in "is-as" warranty, author "Haroon Awan" take no responsbility # 11 | # what you do with by/this software as your free to use this software. # 12 | # 5. Any other purpose(s) that it suites as long as it is not related to any kind of # 13 | # crime or using it in un-authorized environment. # 14 | # 6. You can use this software to protect and secure your data information in any # 15 | # environment. # 16 | # 7. It can also be used in state of being protection against the unauthorized use of # 17 | # information. # 18 | # 8. It can be used to take measures achieve protection. # 19 | # *************************************************************************************** # 20 | 21 | #!/bin/bash 22 | 23 | red="\e[0;31m" 24 | green="\e[0;32m" 25 | off="\e[0m" 26 | 27 | function banner() { 28 | clear 29 | echo " "; 30 | echo " "; 31 | echo " "; 32 | echo " .########...########..######.....#######...##....##.....######.....#######....######....########......########.. "; 33 | echo " .##.....##..##.......##....##..##.....##..###...##....##....##...##.....##...##...##...##.....##....##.....##. "; 34 | echo " .##.....##..##.......##........##.....##..####..##....##.........##.....##...##..##....##......##...##.....##. "; 35 | echo " .########...######...##........##.....##..##.##.##....##.........##.....##...##..##....#########....#########. "; 36 | echo " .##...##....##.......##........##.....##..##..####....##.........##.....##...##...##...##....##.....##.....##. "; 37 | echo " .##....##...##.......##....##..##.....##..##...###....##....##...##.....##...##...###..##.....##....##.....##. "; 38 | echo " .##.....##..########..######....#######...##....##.....######.....#######....#######...##......##...##.....##. "; 39 | echo " Ultimate Recon and Foot Printing Software Version 1.0a "; 40 | echo " [Coded By: Haroon Awan] "; 41 | echo " [Contact: mrharoonawan@gmail.com] "; 42 | echo " "; 43 | echo " "; 44 | echo " "; 45 | } 46 | 47 | function linux() { 48 | echo -e "$red [$green+$red]$off Installing Perl ..."; 49 | sudo apt-get install -y perl 50 | echo -e "$red [$green+$red]$off Installing JSON Module ..."; 51 | cpan install JSON 52 | echo -e "$red [$green+$red]$off Installing Extra Perl Modules ..."; 53 | echo "y" | cpan install WWW::Mechanize 54 | echo "y" | cpan install use HTML::TokeParser 55 | echo "y" | cpan install Term::ANSIColor 56 | echo "y" | cpan install Mojo::DOM 57 | echo "y" | cpan install Data::Dumper 58 | echo "y" | cpan install Win32::Console::ANSI 59 | echo "y" | cpan install HTML::TableExtract 60 | echo "y" | cpan install Data::Validate::Domain 61 | echo "y" | cpan install LWP::Protocol::https 62 | echo "y" | cpan install Mozilla::CA 63 | echo "y" | cpan install Bundle::LWP 64 | 65 | 66 | echo -e "$red [$green+$red]$off Checking directories..." 67 | if [ -d "/usr/share/ReconCobra" ]; then 68 | echo -e "$red [$green+$red]$off A Directory ReconCobra Was Found! Do You Want To Replace It? [Y/n]:" ; 69 | read replace 70 | if [ "$replace" = "Y" ]; then 71 | sudo rm -r "/usr/share/ReconCobra" 72 | sudo rm "/usr/share/icons/ReconCobra.png" 73 | sudo rm "/usr/share/applications/ReconCobra.desktop" 74 | sudo rm "/usr/local/bin/ReconCobra" 75 | 76 | else 77 | echo -e "$red [$green+$red]$off If You Want To Install You Must Remove Previous Installations"; 78 | exit 79 | fi 80 | fi 81 | 82 | echo -e "$red [$green+$red]$off Installing ..."; 83 | echo -e "$red [$green+$red]$off Creating Symbolic Link ..."; 84 | echo -e "#!/bin/bash 85 | perl /usr/share/ReconCobra/ReconCobra.pl" '${1+"$@"}' > "ReconCobra"; 86 | chmod +x "ReconCobra"; 87 | sudo mkdir "/usr/share/ReconCobra" 88 | sudo cp "installer.sh" "/usr/share/ReconCobra" 89 | sudo cp "ReconCobra.pl" "/usr/share/ReconCobra" 90 | sudo cp "config/ReconCobra.jpeg" "/usr/share/icons" 91 | sudo cp "config/ReconCobra.desktop" "/usr/share/applications" 92 | sudo cp "ReconCobra" "/usr/local/bin/" 93 | rm "ReconCobra"; 94 | 95 | echo -e "$red [$green+$red]$off Installing dependencies..." 96 | echo "y" | apt-get install xdg-utils 97 | echo "y" | apt-get install cargo 98 | echo "y" | apt-get install python-yaml 99 | echo "y" | apt-get install hping3 100 | echo "y" | apt-get install python2.7 101 | echo "y" | apt-get install python3 102 | echo "y" | apt-get install x11-utils xutils-dev imagemagick libxext-dev xspy 103 | echo "y" | apt-get install golang 104 | echo "y" | apt-get install curl 105 | echo "y" | apt-get install nfs-common 106 | echo "y" | apt-get install smbclient 107 | echo "y" | apt-get install gem 108 | gem install wayback_machine_downloader 109 | echo "y" | apt-get install perl-LWP-Protocol-https 110 | echo "y" | git clone https://github.com/xroche/httrack.git --recurse 111 | cd httrack 112 | ./configure --prefix=$HOME/usr && make -j8 && make install 113 | cd .. 114 | echo "y" | git clone https://github.com/haroonawanofficial/cobra.git 115 | echo "y" | git clone https://github.com/haroonawanofficial/maahro.git 116 | echo "y" | git clone https://github.com/haroonawanofficial/ShaheenX.git 117 | echo "y" | git clone https://github.com/chenjj/CORScanner.git 118 | cd CORScanner 119 | echo "y" | apt-get install python-pip 120 | echo "y" | apt-get install python3-pip 121 | pip install -r requirements.txt 122 | cd .. 123 | echo "y" | git clone https://github.com/stormshadow07/HackTheWorld.git 124 | cd HackTheWorld 125 | chmod +x install.sh && ./install.sh 126 | cd .. 127 | echo "y" | git clone https://github.com/threat9/routersploit 128 | cd routersploit 129 | sudo apt-get install python-setuptools 130 | pip install setuptools 131 | python setup.py install 132 | sudo pip install -r requirements.txt 133 | cd .. 134 | echo "y" | git clone https://github.com/yassineaboukir/Asnlookup.git 135 | echo "y" | git clone https://github.com/exiftool/exiftool.git 136 | echo "y" | git clone https://github.com/GerbenJavado/LinkFinder.git 137 | echo "y" | git clone https://github.com/sensepost/BiLE-suite.git 138 | echo "y" | git clone https://github.com/haroonawanofficial/vasl.git 139 | echo "y" | git clone https://github.com/haroonawanofficial/panthera.git 140 | echo "y" | git clone https://github.com/naqushab/SearchEngineScrapy.git 141 | echo "y" | git clone https://github.com/heycam/json-describe 142 | cd json-describe 143 | cargo build 144 | cd .. 145 | echo "y" | apt-get install nmap 146 | echo "y" | apt-get install xrdp 147 | pip install jsbeautifier 148 | pip install argparse 149 | pip install requests 150 | pip install request 151 | cd SearchEngineScrapy 152 | pip install -r requirements.txt 153 | sudo virtualenv --python="2" env 154 | sudo env/bin/activate 155 | cd .. 156 | echo "y" | git clone https://github.com/FortyNorthSecurity/EyeWitness.git 157 | cd EyeWitness/setup 158 | chmod u+x setup.sh 159 | ./setup.sh 160 | cd .. 161 | cd .. 162 | chmod u+x *.sh 163 | cp * -r /usr/share/ReconCobra 164 | cp *.sh /usr/share/ReconCobra 165 | cat traceroute-function >> ~/.bashrc 166 | source ~/.bashrc 167 | 168 | if [ -d "/usr/share/ReconCobra" ] ; 169 | then 170 | echo -e "$red [$green+$red]$off ReconCobra Successfully Installed, Starting"; 171 | sleep 2; 172 | ReconCobra 173 | else 174 | echo -e "$red [$green+$red]$off ReconCobra Cannot Be Installed. Trying using Portable Edition !"; 175 | exit 176 | fi 177 | } 178 | 179 | if [ -d "/usr/bin/" ];then 180 | banner 181 | echo -e "$red [$green+$red]$off ReconCobra Will Be Installed In Your System"; 182 | linux 183 | else 184 | echo -e "$red [$green+$red]$off ReconCobra Cannot Be Installed. Trying using Portable Edition !"; 185 | exit 186 | fi 187 | -------------------------------------------------------------------------------- /ftp.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/perl -w 2 | # ftp-user-enum - Brute Force Usernames 3 | # Copyright (C) 2006 pentestmonkey@pentestmonkey.net 4 | # 5 | # This program is free software; you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License version 2 as 7 | # published by the Free Software Foundation. 8 | # 9 | # This program is distributed in the hope that it will be useful, 10 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | # GNU General Public License for more details. 13 | # 14 | # You should have received a copy of the GNU General Public License along 15 | # with this program; if not, write to the Free Software Foundation, Inc., 16 | # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. 17 | # 18 | # This tool may be used for legal purposes only. Users take full responsibility 19 | # for any actions performed using this tool. If these terms are not acceptable to 20 | # you, then do not use this tool. 21 | # 22 | # You are encouraged to send comments, improvements or suggestions to 23 | # me at ftp-user-enum@pentestmonkey.net 24 | # 25 | # This program is derived from dns-grind v1.0 ( http://pentestmonkey.net/tools/ftp-user-enum ) 26 | 27 | use strict; 28 | use Socket; 29 | use IO::Handle; 30 | use IO::Select; 31 | use IO::Socket::INET; 32 | use Getopt::Std; 33 | $| = 1; 34 | 35 | my $VERSION = "1.0"; 36 | my $debug = 0; 37 | my @child_handles = (); 38 | my $verbose = 0; 39 | my $max_procs = 5; 40 | my $ftp_port = 21; 41 | my @usernames = (); 42 | my @hosts = (); 43 | my $recursive_flag = 1; 44 | my $query_timeout = 15; 45 | my $start_time = time(); 46 | my $end_time; 47 | my $mode = "sol"; 48 | my $kill_child_string = "\x00"; 49 | $SIG{CHLD} = 'IGNORE'; # auto-reap 50 | my %opts; 51 | my $usage=<; 122 | } 123 | 124 | if (defined($host_file)) { 125 | open(FILE, "<$host_file") or die "ERROR: Can't open username file $host_file: $!\n"; 126 | @hosts = map { chomp($_); $_ } ; 127 | } 128 | 129 | if (defined($username)) { 130 | push @usernames, $username; 131 | } 132 | 133 | if (defined($host)) { 134 | push @hosts, $host; 135 | } 136 | 137 | if (defined($host_file) and not @hosts) { 138 | print "ERROR: Targets file $host_file was empty\n"; 139 | exit 1; 140 | } 141 | 142 | if (defined($username_file) and not @usernames) { 143 | print "ERROR: Username file $username_file was empty\n"; 144 | exit 1; 145 | } 146 | 147 | print "Starting ftp-user-enum v$VERSION ( http://pentestmonkey.net/tools/ftp-user-enum )\n"; 148 | print "\n"; 149 | print " ----------------------------------------------------------\n"; 150 | print "| Scan Information |\n"; 151 | print " ----------------------------------------------------------\n"; 152 | print "\n"; 153 | print "Mode ..................... $mode\n"; 154 | print "Worker Processes ......... $max_procs\n"; 155 | print "Targets file ............. $host_file\n" if defined($host_file); 156 | print "Usernames file ........... $username_file\n" if defined($username_file); 157 | print "Target count ............. " . scalar(@hosts) . "\n" if @hosts; 158 | print "Username count ........... " . scalar(@usernames) . "\n" if @usernames; 159 | print "Target TCP port .......... $ftp_port\n"; 160 | print "Query timeout ............ $query_timeout secs\n"; 161 | print "\n"; 162 | print "######## Scan started at " . scalar(localtime()) . " #########\n"; 163 | 164 | # Spawn off correct number of children 165 | foreach my $proc_count (1..$max_procs) { 166 | socketpair(my $child, my $parent, AF_UNIX, SOCK_STREAM, PF_UNSPEC) or die "socketpair: $!"; 167 | $child->autoflush(1); 168 | $parent->autoflush(1); 169 | 170 | # Parent executes this 171 | if (my $pid = fork) { 172 | close $parent; 173 | print "[Parent] Spawned child with PID $pid to do resolving\n" if $debug; 174 | push @child_handles, $child; 175 | 176 | # Chile executes this 177 | } else { 178 | close $child; 179 | while (1) { 180 | my $timed_out = 0; 181 | 182 | # Read host and username from parent 183 | my $line = <$parent>; 184 | chomp($line); 185 | my ($host, $username) = $line =~ /^(\S+)\t(.*)$/; 186 | 187 | # Exit if told to by parent 188 | if ($line eq $kill_child_string) { 189 | print "[Child $$] Exiting\n" if $debug; 190 | exit 0; 191 | } 192 | 193 | # Sanity check host and username 194 | if (defined($host) and defined($username)) { 195 | print "[Child $$] Passed host $host and username $username\n" if $debug; 196 | } else { 197 | print "[Child $$] WARNING: Passed garbage. Ignoring: $line\n"; 198 | next; 199 | } 200 | 201 | # Do ftp query with timeout 202 | my $response; 203 | eval { 204 | local $SIG{ALRM} = sub { die "alarm\n" }; 205 | alarm $query_timeout; 206 | my $s = IO::Socket::INET->new( PeerAddr => $host, 207 | PeerPort => $ftp_port, 208 | Proto => 'tcp' 209 | ) 210 | or die "Can't connect to $host:$ftp_port: $!\n"; 211 | if ($mode eq "sol") { 212 | wait_for_banner($s); 213 | $s->send("cwd ~$username\r\n"); 214 | my $buffer; 215 | $s->recv($buffer, 10000); 216 | $response .= $buffer; 217 | my $wait = 0.1; 218 | select(undef, undef, undef, $wait); 219 | $s->recv($buffer, 10000); 220 | $response .= $buffer; 221 | } elsif ($mode eq "iu") { 222 | wait_for_banner($s); 223 | $s->send("USER $username\r\n"); 224 | $response = get_line($s); 225 | } else { 226 | die "ERROR: Incorrect mode. This shouldn't happen.\n"; 227 | } 228 | alarm 0; 229 | }; 230 | 231 | # if ($@) { 232 | # $timed_out = 1; 233 | # print "[Child $$] Timeout for username $username on host $host\n" if $debug; 234 | # } 235 | 236 | my $trace; 237 | if ($debug) { 238 | $trace = "[Child $$] $username\@$host: "; 239 | } else { 240 | $trace = "$username\@$host: "; 241 | } 242 | 243 | if ($mode eq "sol") { 244 | if ($response and not $timed_out) { 245 | 246 | # Negative result 247 | if ($response =~ /550 Unknown user name after ~/s) { 248 | print $parent $trace . "\n"; 249 | next; 250 | } 251 | 252 | # Positive result 253 | if ($response =~ /530 Please login with USER and PASS./) { 254 | print $parent $trace . "$username\n"; 255 | next; 256 | } 257 | 258 | # Unknown response 259 | $response =~ s/[\n\r]/./g; 260 | print $parent $trace . "$response\n"; 261 | next; 262 | } 263 | } elsif ($mode eq "iu") { 264 | if ($response and not $timed_out) { 265 | 266 | # Positive result 267 | if ($response =~ /530 User.*access denied./) { 268 | print $parent $trace . "$username\n"; 269 | next; 270 | } 271 | 272 | # Negative result 273 | if ($response =~ /530 /s) { 274 | print $parent $trace . "\n"; 275 | next; 276 | } 277 | 278 | # Positive result 279 | if ($response =~ /331 Password required for/) { 280 | print $parent $trace . "$username\n"; 281 | next; 282 | } 283 | 284 | # Unknown response 285 | $response =~ s/[\n\r]/./g; 286 | print $parent $trace . "$response\n"; 287 | next; 288 | } 289 | } else { 290 | die "ERROR: Incorrect mode. This shouldn't happen.\n"; 291 | } 292 | 293 | if ($timed_out) { 294 | print $parent $trace . "\n"; 295 | } else { 296 | if (!$response) { 297 | print $parent $trace . "\n"; 298 | } 299 | } 300 | } 301 | exit; 302 | } 303 | } 304 | 305 | # Fork once more to make a process that will us usernames and hosts 306 | socketpair(my $get_next_query, my $parent, AF_UNIX, SOCK_STREAM, PF_UNSPEC) or die "socketpair: $!"; 307 | $get_next_query->autoflush(1); 308 | $parent->autoflush(1); 309 | 310 | # Parent executes this 311 | if (my $pid = fork) { 312 | close $parent; 313 | 314 | # Chile executes this 315 | } else { 316 | # Generate queries from username-host pairs and send to parent 317 | foreach my $username (@usernames) { 318 | foreach my $host (@hosts) { 319 | my $query = $host . "\t" . $username; 320 | print "[Query Generator] Sending $query to parent\n" if $debug; 321 | print $parent "$query\n"; 322 | } 323 | } 324 | 325 | exit 0; 326 | } 327 | 328 | printf "Created %d child processes\n", scalar(@child_handles) if $debug; 329 | my $s = IO::Select->new(); 330 | my $s_in = IO::Select->new(); 331 | $s->add(@child_handles); 332 | $s_in->add(\*STDIN); 333 | my $timeout = 0; # non-blocking 334 | my $more_queries = 1; 335 | my $outstanding_queries = 0; 336 | my $query_count = 0; 337 | my $result_count = 0; 338 | 339 | # Write to each child process once 340 | writeloop: foreach my $write_handle (@child_handles) { 341 | my $query = <$get_next_query>; 342 | if ($query) { 343 | chomp($query); 344 | print "[Parent] Sending $query to child\n" if $debug; 345 | print $write_handle "$query\n"; 346 | $outstanding_queries++; 347 | } else { 348 | print "[Parent] Quitting main loop. All queries have been read.\n" if $debug; 349 | last writeloop; 350 | } 351 | } 352 | 353 | # Keep reading from child processes until there are no more queries left 354 | # Write to a child only after it has been read from 355 | mainloop: while (1) { 356 | # Wait until there's a child that we can either read from or written to. 357 | my ($rh_aref) = IO::Select->select($s, undef, undef); # blocking 358 | 359 | print "[Parent] There are " . scalar(@$rh_aref) . " children that can be read from\n" if $debug; 360 | 361 | foreach my $read_handle (@$rh_aref) { 362 | # Read from child 363 | chomp(my $line = <$read_handle>); 364 | if ($verbose == 1 or $debug == 1 or not ($line =~ // or $line =~ /no result/ or $line =~ //)) { 365 | print "$line\n"; 366 | $result_count++ unless ($line =~ // or $line =~ /no result/ or $line =~ //); 367 | } 368 | $outstanding_queries--; 369 | $query_count++; 370 | 371 | # Write to child 372 | my $query = <$get_next_query>; 373 | if ($query) { 374 | chomp($query); 375 | print "[Parent] Sending $query to child\n" if $debug; 376 | print $read_handle "$query\n"; 377 | $outstanding_queries++; 378 | } else { 379 | print "DEBUG: Quitting main loop. All queries have been read.\n" if $debug; 380 | last mainloop; 381 | } 382 | } 383 | } 384 | 385 | # Wait to get replies back from remaining children 386 | my $count = 0; 387 | readloop: while ($outstanding_queries) { 388 | my @ready_to_read = $s->can_read(1); # blocking 389 | foreach my $child_handle (@ready_to_read) { 390 | print "[Parent] Outstanding queries: $outstanding_queries\n" if $debug; 391 | chomp(my $line = <$child_handle>); 392 | if ($verbose == 1 or $debug == 1 or not ($line =~ // or $line =~ /no result/ or $line =~ //)) { 393 | print "$line\n"; 394 | $result_count++ unless ($line =~ // or $line =~ /no result/ or $line =~ //); 395 | } 396 | print $child_handle "$kill_child_string\n"; 397 | $s->remove($child_handle); 398 | $outstanding_queries--; 399 | $query_count++; 400 | } 401 | } 402 | 403 | # Tell any remaining children to exit 404 | foreach my $handle ($s->handles) { 405 | print "[Parent] Telling child to exit\n" if $debug; 406 | print $handle "$kill_child_string\n"; 407 | } 408 | 409 | # Wait for all children to terminate 410 | while(wait != -1) {}; 411 | 412 | print "######## Scan completed at " . scalar(localtime()) . " #########\n"; 413 | print "$result_count results.\n"; 414 | print "\n"; 415 | $end_time = time(); # Second granularity only to avoid depending on hires time module 416 | my $run_time = $end_time - $start_time; 417 | $run_time = 1 if $run_time < 1; # Avoid divide by zero 418 | printf "%d queries in %d seconds (%0.1f queries / sec)\n", $query_count, $run_time, $query_count / $run_time; 419 | 420 | sub wait_for_banner { 421 | my $sock = shift; 422 | my $banner = ""; 423 | # print "$$: waiting for banner\n"; 424 | 425 | while ($banner !~ /220 .*\n/s) { 426 | my $buffer; 427 | $sock->read($buffer, 1); 428 | $banner .= $buffer; 429 | # print "$$: $banner\n"; 430 | } 431 | 432 | # print "$$: final banner: $banner\n"; 433 | } 434 | 435 | sub get_line { 436 | my $sock = shift; 437 | my $line = ""; 438 | while ($line !~ /\d\d\d .*\n/s) { 439 | my $buffer; 440 | $sock->read($buffer, 1); 441 | $line .= $buffer; 442 | # print "$$: $banner\n"; 443 | } 444 | 445 | return $line; 446 | } --------------------------------------------------------------------------------