├── README.md ├── phoenix └── querycrt /README.md: -------------------------------------------------------------------------------- 1 | # BashitRecon 2 | 3 | ## Phoenix file 4 | 5 | Common passive ways to extract subdomains using a domain as input entrie. 6 | 7 | ## CIDR to ip range with ipcalc 8 | ``` 9 | prips() { 10 | read -r a b c d e f g h <<< $( 11 | ipcalc -nb "$1" | awk '/HostM/{print $2}' | tr '\n .' ' ' 12 | ); 13 | eval "echo {$a..$e}.{$b..$f}.{$c..$g}.{$d..$h}" 14 | } 15 | 16 | ``` 17 | ## Filtering by just internal IPs 18 | 19 | xargs -a hostslist -I@ sh -c 'ip=$(dig +short @); [ -z "${ip##*10.*}" ] && echo @' 20 | 21 | ## Filtering by up hosts 22 | 23 | xargs -P 500 -a hostslist -I@ sh -c 'dig @ | grep -q NOERROR 1>/dev/null && echo | echo @;' 24 | 25 | xargs -P 500 -a hostslist -I@ sh -c 'nc -w1 -z -v @ 80 2>/dev/null && echo @' 26 | 27 | ## Extract Only Http using gospider (required anew instalation) 28 | 29 | xargs -P 500 -a hostslist -I@ sh -c 'nc -w1 -z -v @ 443 2>/dev/null && echo @' | xargs -I@ -P10 sh -c 'gospider -a -s "https://@" -d 2 | grep -Eo "(http|https)://[^/\\"]+" | anew' 30 | 31 | ## Extract only JS using gospider (required anew instalation) 32 | 33 | xargs -P 500 -a hostslist -I@ sh -c 'nc -w1 -z -v @ 8443 2>/dev/null && echo @' | xargs -I@ -P10 sh -c 'gospider -a -s "https://@" -d 2 | grep -Eo "(http|https)://[^/\\"].*.js+" | sed "s#\] \- #\n#g" | anew' 34 | 35 | ## Extract only using openssl (required anew installation) 36 | xargs -P100 -a hostslist -I@ sh -c 'ip=$(dig +short @);[ ! -z "$ip" ] && printf "GET / HTTP/1.1\r\nHost: $ip\r\n\r\n" | timeout 2 openssl s_client -connect $ip:443 2>/dev/null' | sed 's# \|/\|=#\n#g' | grep paypal | anew 37 | 38 | ## Geting domains using reverse DNS 39 | 40 | ### Command 41 | 42 | xargs -P 500 -a hostslist -I@ sh -c 'dig @' 2>/dev/null | awk -F'<<>>' '{print $3}' | xargs -n1 | tee -a hosts 43 | 44 | ## Getting domains wich resolve to some IP (avoid false positives) 45 | 46 | ### Command 47 | 48 | cat hostUnicos.txt | while read line;do xargs -P 500 -a ../../subbrute/names_small.txt -I@ sh -c "dig +noidnout +noidnin +short @.$line | grep -c '^' 1>/dev/null && echo @.$line | tee -a hostsDig";done 49 | 50 | ## Geting subdomains by ssl 51 | 52 | ### Command 53 | 54 | xargs -P 100 -a hostUnicos -I@ sh -c 'sanssl @ 2>/dev/null | grep -v "No domains"' 55 | 56 | Source: https://raw.githubusercontent.com/appsecco/the-art-of-subdomain-enumeration/master/san_subdomain_enum.py 57 | 58 | ## Getting subdomains by all Dns records and brute subdomain 59 | 60 | ### Command 61 | 62 | dnsrecon -d alertmanager-1.staging.rtcdn.caffeine.tv -D /opt/SecLists/Discovery/DNS/subdomains-top1million-5000.txt -t brt~ 63 | (Doest seems to be so useful) 64 | 65 | ### Command 66 | 67 | altdns -t 600 -w /opt/SecLists/Discovery/DNS/subdomains-top1million-5000.txt -i hostUnicos.txt -o hosts -r -s results_output.txt 68 | -------------------------------------------------------------------------------- /phoenix: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Checar se tem 4 | # unfurl, jq, psql instalado 5 | # unfurl - https://github.com/tomnomnom/unfurl 6 | # jq - apt install jq 7 | # psql - apt install psql 8 | 9 | function wayback() { 10 | curl -s "http://web.archive.org/cdx/search/cdx?url=*.$1/&output=txt&fl=original&collapse=urlkey" | grep -vi '.svg\|.png\|.img\|.ttf\|.eot\|.woff\|.ico\|.css\|.jpg\|.jpeg\|.pdf\|.doc' | unfurl domains | sort -u 11 | } 12 | 13 | function crt() { 14 | # Old version =). Now we will gona use psql connection without limit 15 | # curl -s "https://crt.sh/?q=$1&output=json" | jq -r '.[]["name_value"]' | sed 's#\*\.##g; s#www\.##g' | sort -u 16 | if [ ! -f querycrt ]; then 17 | wget https://raw.githubusercontent.com/bminossi/BashitRecon/master/querycrt &>/dev/null 18 | fi 19 | query=$(cat querycrt | sed "s#uber.com#$1#g" >tmpsql) 20 | psql -f tmpsql -h crt.sh -p 5432 -U guest certwatch | grep "$1" | cut -d "|" -f3 | tr -d " \|+" | sort -u 21 | rm tmpsql 22 | } 23 | 24 | function host2ip() { 25 | ip=$(dig +short $1) 26 | } 27 | 28 | function ip2org() { 29 | host2ip $1 30 | org=$(curl -s https://api.iptoasn.com/v1/as/ip/$ip | jq -r ".as_description") 31 | } 32 | 33 | function org2ranges() { 34 | ip2org $1 35 | curl -s http://asnlookup.com/api/lookup?org=$org | jq -r ".[]" 36 | } 37 | 38 | # Recon Grepeando os JS da pagina 39 | #function spiderWaybackJs(){ 40 | #cat up | while read dominio;do 41 | # echo -ne "\n[$dominio]\n" 42 | # wayback=$(curl -sf http://archive.org/wayback/available?url=$dominio | jq -r ".archived_snapshots.closest.url") 43 | # curl -sf $wayback | grep -Eo '(http|https)://[^/"].*js' | grep "$dominio" |sort -u 44 | # done 45 | #} 46 | 47 | function gau() { 48 | availableApis=$(curl -sf http://index.commoncrawl.org/collinfo.json | jq -r '.[] | .["cdx-api"]') 49 | echo $availableApis | sed 's# #\n#g' | while read apiAvailable; do 50 | apiResult="$apiAvailable?url=*.$1" 51 | totalPages=$(curl -sf "$apiResult&showNumPages=true" | jq -r ".pages") 52 | for page in $(eval echo "{0..$totalPages}"); do 53 | result=$(curl -sf "$apiResult&output=json&fl=url&page=$page" | jq -r ".url") 54 | if [ -n "$result" ]; then 55 | echo $result | sed 's# #\n#g' | unfurl domains | sort -u 56 | fi 57 | done 58 | done 59 | } 60 | 61 | function abuseipdb() { 62 | curl -s "https://www.abuseipdb.com/whois/$1" | grep "
  • .*
  • " | grep -v "=" | sed 's/<[^>]*>//g' | sed "s/$/\.$1/g" 63 | } 64 | 65 | function sublist3r() { 66 | curl -s "https://api.sublist3r.com/search.php?domain=$1" | jq -r ".[]" 67 | } 68 | 69 | function threatcrowd() { 70 | curl -s "https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=$1" | jq -r ".subdomains|.[]" 71 | } 72 | 73 | function hackertarget() { 74 | # You can replace f1 cut parameter to f2, and you will get all ips from api! 75 | curl -s "http://api.hackertarget.com/hostsearch/?q=$1" | sort -u | cut -d "," -f1 76 | } 77 | 78 | function dnsbufferover() { 79 | # You can replace f2 to f1 to get all ips too :) 80 | curl -s "http://dns.bufferover.run/dns?q=.$1" | jq -r ".FDNS_A|.[]" | cut -d "," -f2 81 | } 82 | 83 | function askengine() { 84 | # Run it separately. It's a search engine tool, so it will be almost eternal =) 85 | for page in {1..999}; do 86 | curl -i -s "https://www.ask.com/web?q=site:$1%20-www.$1&o=0&l=dir&qo=pagination&page=$page" | 87 | grep "PartialSearchResults.*$1" | 88 | sed 's/<[^>]*>//g' | 89 | unfurl domains 90 | done 91 | } 92 | 93 | function rapiddns() { 94 | # It could take a while ... 95 | curl -s "https://rapiddns.io/subdomain/$1?full=1" | 96 | grep "href.*$1" | 97 | sed 's/<[^>]*>//g' 98 | } 99 | 100 | #wayback $1 101 | crt $1 102 | abuseipdb $1 103 | sublist3r $1 104 | threatcrowd $1 105 | hackertarget $1 106 | dnsbufferover $1 107 | #org2ranges $1 108 | 109 | # Slow tools 110 | rapiddns $1 111 | 112 | # Eternal Tools. Trust me. 113 | #gau $1 114 | #askengine $1 115 | -------------------------------------------------------------------------------- /querycrt: -------------------------------------------------------------------------------- 1 | WITH ci AS ( 2 | SELECT min(sub.CERTIFICATE_ID) ID, 3 | min(sub.ISSUER_CA_ID) ISSUER_CA_ID, 4 | array_agg(DISTINCT sub.NAME_VALUE) NAME_VALUES, 5 | x509_subjectName(sub.CERTIFICATE) SUBJECT_NAME, 6 | x509_notBefore(sub.CERTIFICATE) NOT_BEFORE, 7 | x509_notAfter(sub.CERTIFICATE) NOT_AFTER 8 | FROM (select * 9 | FROM certificate_and_identities cai 10 | WHERE plainto_tsquery('certwatch', '%uber.com%') @@ identities(cai.CERTIFICATE) 11 | AND cai.NAME_VALUE ILIKE ('%' || 'uber.com' || '%') 12 | LIMIT 9000000 13 | ) sub 14 | GROUP BY sub.CERTIFICATE 15 | ) 16 | SELECT ci.ISSUER_CA_ID, 17 | ca.NAME ISSUER_NAME, 18 | array_to_string(ci.NAME_VALUES, chr(10)) NAME_VALUE, 19 | ci.ID ID, 20 | le.ENTRY_TIMESTAMP, 21 | ci.NOT_BEFORE, 22 | ci.NOT_AFTER 23 | FROM ci 24 | LEFT JOIN LATERAL ( 25 | SELECT min(ctle.ENTRY_TIMESTAMP) ENTRY_TIMESTAMP 26 | FROM ct_log_entry ctle 27 | WHERE ctle.CERTIFICATE_ID = ci.ID 28 | ) le ON TRUE, 29 | ca 30 | WHERE ci.ISSUER_CA_ID = ca.ID 31 | ORDER BY le.ENTRY_TIMESTAMP DESC NULLS LAST; 32 | --------------------------------------------------------------------------------