├── GOscripts ├── cnames.go ├── cors.go ├── go.go ├── inturl.go └── trace.go ├── README.md ├── cors ├── go ├── html.sh ├── httprobe ├── hussh.sh ├── inturl ├── screens ├── 1.png └── hussh.png ├── trace └── waybackurls /GOscripts/cnames.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "math/rand" 7 | "net" 8 | "os" 9 | "strings" 10 | "sync" 11 | "time" 12 | 13 | "github.com/miekg/dns" 14 | ) 15 | 16 | func main() { 17 | 18 | servers := []string{ 19 | //"209.244.0.3", 20 | //"209.244.0.4", 21 | //"64.6.64.6", 22 | //"64.6.65.6", 23 | "8.8.8.8", 24 | "8.8.4.4", 25 | "9.9.9.9", 26 | // "149.112.112.112", 27 | // "84.200.69.80", 28 | // "84.200.70.40", 29 | // "8.26.56.26", 30 | // "8.20.247.20", 31 | // "208.67.222.222", 32 | // "208.67.220.220", 33 | // "199.85.126.10", 34 | // "199.85.127.10", 35 | // "81.218.119.11", 36 | // "209.88.198.133", 37 | // "195.46.39.39", 38 | // "195.46.39.40", 39 | // "69.195.152.204", 40 | // "23.94.60.240", 41 | // "208.76.50.50", 42 | // "208.76.51.51", 43 | // "216.146.35.35", 44 | // "216.146.36.36", 45 | // "37.235.1.174", 46 | // "37.235.1.177", 47 | // "198.101.242.72", 48 | // "23.253.163.53", 49 | // "77.88.8.8", 50 | // "77.88.8.1", 51 | // "91.239.100.100", 52 | // "89.233.43.71", 53 | // "74.82.42.42", 54 | // "109.69.8.51", 55 | // "156.154.70.1", 56 | // "156.154.71.1", 57 | "1.1.1.1", 58 | "1.0.0.1", 59 | // "45.77.165.194", 60 | } 61 | 62 | rand.Seed(time.Now().Unix()) 63 | 64 | type job struct{ domain, server string } 65 | jobs := make(chan job) 66 | 67 | var wg sync.WaitGroup 68 | for i := 0; i < 20; i++ { 69 | wg.Add(1) 70 | 71 | go func() { 72 | for j := range jobs { 73 | 74 | cname, err := getCNAME(j.domain, j.server) 75 | if err != nil { 76 | //fmt.Println(err) 77 | continue 78 | } 79 | 80 | if !resolves(cname) { 81 | fmt.Printf("%s does not resolve (pointed at by %s)\n", cname, j.domain) 82 | } 83 | } 84 | wg.Done() 85 | }() 86 | } 87 | 88 | sc := bufio.NewScanner(os.Stdin) 89 | 90 | for sc.Scan() { 91 | target := strings.ToLower(strings.TrimSpace(sc.Text())) 92 | if target == "" { 93 | continue 94 | } 95 | server := servers[rand.Intn(len(servers))] 96 | 97 | jobs <- job{target, server} 98 | } 99 | close(jobs) 100 | 101 | wg.Wait() 102 | 103 | } 104 | 105 | func resolves(domain string) bool { 106 | _, err := net.LookupHost(domain) 107 | return err == nil 108 | } 109 | 110 | func getCNAME(domain, server string) (string, error) { 111 | c := dns.Client{} 112 | 113 | m := dns.Msg{} 114 | if domain[len(domain)-1:] != "." { 115 | domain += "." 116 | } 117 | m.SetQuestion(domain, dns.TypeCNAME) 118 | m.RecursionDesired = true 119 | 120 | r, _, err := c.Exchange(&m, server+":53") 121 | if err != nil { 122 | return "", err 123 | } 124 | 125 | if len(r.Answer) == 0 { 126 | return "", fmt.Errorf("no answers for %s", domain) 127 | } 128 | 129 | for _, ans := range r.Answer { 130 | if r, ok := ans.(*dns.CNAME); ok { 131 | return r.Target, nil 132 | } 133 | } 134 | return "", fmt.Errorf("no cname for %s", domain) 135 | 136 | } -------------------------------------------------------------------------------- /GOscripts/cors.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "crypto/tls" 6 | "fmt" 7 | "io" 8 | "io/ioutil" 9 | "net" 10 | "net/http" 11 | "net/url" 12 | "os" 13 | "sync" 14 | "time" 15 | ) 16 | 17 | func main() { 18 | urls := make(chan string) 19 | 20 | // workers 21 | var wg sync.WaitGroup 22 | for i := 0; i < 20; i++ { 23 | wg.Add(1) 24 | 25 | c := getClient() 26 | go func() { 27 | defer wg.Done() 28 | 29 | for u := range urls { 30 | testOrigins(c, u) 31 | } 32 | }() 33 | } 34 | 35 | sc := bufio.NewScanner(os.Stdin) 36 | 37 | for sc.Scan() { 38 | urls <- sc.Text() 39 | } 40 | close(urls) 41 | 42 | wg.Wait() 43 | 44 | } 45 | 46 | func getClient() *http.Client { 47 | tr := &http.Transport{ 48 | MaxIdleConns: 30, 49 | IdleConnTimeout: time.Second, 50 | TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, 51 | DialContext: (&net.Dialer{ 52 | Timeout: time.Second * 10, 53 | KeepAlive: time.Second, 54 | }).DialContext, 55 | } 56 | 57 | re := func(req *http.Request, via []*http.Request) error { 58 | return http.ErrUseLastResponse 59 | } 60 | 61 | return &http.Client{ 62 | Transport: tr, 63 | CheckRedirect: re, 64 | Timeout: time.Second * 10, 65 | } 66 | } 67 | 68 | func testOrigins(c *http.Client, u string) { 69 | 70 | pp, err := getPermutations(u) 71 | 72 | if err != nil { 73 | fmt.Fprintf(os.Stderr, "%s\n", err) 74 | return 75 | } 76 | 77 | for _, p := range pp { 78 | 79 | req, err := http.NewRequest("GET", u, nil) 80 | if err != nil { 81 | return 82 | } 83 | req.Header.Set("Origin", p) 84 | 85 | resp, err := c.Do(req) 86 | if resp != nil { 87 | io.Copy(ioutil.Discard, resp.Body) 88 | resp.Body.Close() 89 | } 90 | if err != nil { 91 | fmt.Fprintf(os.Stderr, "error requesting %s: %s\n", u, err) 92 | return 93 | } 94 | 95 | acao := resp.Header.Get("Access-Control-Allow-Origin") 96 | acac := resp.Header.Get("Access-Control-Allow-Credentials") 97 | 98 | if acao == p { 99 | fmt.Printf("%s %s %s\n", u, p, acac) 100 | } 101 | } 102 | } 103 | 104 | func getPermutations(raw string) ([]string, error) { 105 | u, err := url.Parse(raw) 106 | if err != nil { 107 | return []string{}, err 108 | } 109 | 110 | fixed := []string{ 111 | "https://evil.com", 112 | } 113 | 114 | patterns := []string{ 115 | "https://%s.evil.com", 116 | "https://%sevil.com", 117 | } 118 | 119 | for i, p := range patterns { 120 | patterns[i] = fmt.Sprintf(p, u.Hostname()) 121 | } 122 | 123 | return append(fixed, patterns...), nil 124 | 125 | } -------------------------------------------------------------------------------- /GOscripts/go.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "crypto/tls" 6 | "flag" 7 | "fmt" 8 | "io" 9 | "io/ioutil" 10 | "net/http" 11 | "net/url" 12 | "os" 13 | "strings" 14 | "sync" 15 | "time" 16 | "unicode/utf8" 17 | ) 18 | 19 | var ( 20 | aliveHosts = make(map[string]struct{}) 21 | aliveMutex = &sync.Mutex{} 22 | 23 | client *http.Client 24 | transport *http.Transport 25 | wg sync.WaitGroup 26 | 27 | concurrency = 50 28 | maxSize = int64(1024000) 29 | ) 30 | 31 | func main() { 32 | flag.Parse() 33 | 34 | var input io.Reader 35 | input = os.Stdin 36 | 37 | if flag.NArg() > 0 { 38 | file, err := os.Open(flag.Arg(0)) 39 | if err != nil { 40 | fmt.Printf("failed to open file: %s\n", err) 41 | os.Exit(1) 42 | } 43 | input = file 44 | } 45 | 46 | sc := bufio.NewScanner(input) 47 | 48 | client = &http.Client{ 49 | Transport: &http.Transport{ 50 | MaxIdleConns: concurrency, 51 | MaxIdleConnsPerHost: concurrency, 52 | MaxConnsPerHost: concurrency, 53 | TLSClientConfig: &tls.Config{ 54 | InsecureSkipVerify: true, 55 | }, 56 | }, 57 | Timeout: 5 * time.Second, 58 | CheckRedirect: func(_ *http.Request, _ []*http.Request) error { 59 | return http.ErrUseLastResponse 60 | }, 61 | } 62 | 63 | semaphore := make(chan bool, concurrency) 64 | 65 | for sc.Scan() { 66 | raw := sc.Text() 67 | wg.Add(1) 68 | semaphore <- true 69 | go func(raw string) { 70 | defer wg.Done() 71 | u, err := url.ParseRequestURI(raw) 72 | if err != nil { 73 | return 74 | } 75 | resp, ws, err := fetchURL(u) 76 | if err != nil { 77 | return 78 | } 79 | if resp.StatusCode < 400 || resp.StatusCode >=500 { 80 | fmt.Printf("%-3d %-9d %-5d %s\n", resp.StatusCode, resp.ContentLength, ws, u.String()) 81 | } 82 | }(raw) 83 | <-semaphore 84 | } 85 | 86 | wg.Wait() 87 | 88 | if sc.Err() != nil { 89 | fmt.Printf("error: %s\n", sc.Err()) 90 | } 91 | } 92 | 93 | func fetchURL(u *url.URL) (*http.Response, int, error) { 94 | wordsSize := 0 95 | 96 | req, err := http.NewRequest("GET", u.String(), nil) 97 | if err != nil { 98 | return nil, 0, err 99 | } 100 | 101 | req.Header.Set("User-Agent", "burl/0.1") 102 | 103 | resp, err := client.Do(req) 104 | if err != nil { 105 | return nil, 0, err 106 | } 107 | 108 | defer resp.Body.Close() 109 | 110 | if resp.ContentLength <= maxSize { 111 | if respbody, err := ioutil.ReadAll(resp.Body); err == nil { 112 | resp.ContentLength = int64(utf8.RuneCountInString(string(respbody))) 113 | wordsSize = len(strings.Split(string(respbody), " ")) 114 | } 115 | } 116 | 117 | io.Copy(ioutil.Discard, resp.Body) 118 | 119 | return resp, wordsSize, err 120 | } -------------------------------------------------------------------------------- /GOscripts/inturl.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "net/url" 7 | "os" 8 | "sort" 9 | "strings" 10 | ) 11 | 12 | // Ideas: 13 | // More than, say, 3 query string parameteres (exluding utm_*?) 14 | // Popular app names (phpmyadmin etc) in path 15 | // Filenames from configfiles list / seclist 16 | // dev/stage/test in path or hostname 17 | // jenkins, graphite etc in hostname or path 18 | 19 | type urlCheck func(*url.URL) bool 20 | 21 | func main() { 22 | 23 | checks := []urlCheck{ 24 | // query string stuff 25 | func(u *url.URL) bool { 26 | 27 | interesting := 0 28 | for k, vv := range u.Query() { 29 | for _, v := range vv { 30 | if qsCheck(k, v) { 31 | interesting++ 32 | } 33 | } 34 | } 35 | return interesting > 0 36 | }, 37 | 38 | // extensions 39 | func(u *url.URL) bool { 40 | exts := []string{ 41 | ".php", 42 | ".phtml", 43 | ".asp", 44 | ".aspx", 45 | ".asmx", 46 | ".ashx", 47 | ".cgi", 48 | ".pl", 49 | ".json", 50 | ".xml", 51 | ".rb", 52 | ".py", 53 | ".sh", 54 | ".yaml", 55 | ".yml", 56 | ".toml", 57 | ".ini", 58 | ".md", 59 | ".mkd", 60 | ".do", 61 | ".jsp", 62 | ".jspa", 63 | ".txt", 64 | } 65 | 66 | p := strings.ToLower(u.EscapedPath()) 67 | for _, e := range exts { 68 | if strings.HasSuffix(p, e) { 69 | return true 70 | } 71 | } 72 | 73 | return false 74 | }, 75 | 76 | // path bits 77 | func(u *url.URL) bool { 78 | p := strings.ToLower(u.EscapedPath()) 79 | return strings.Contains(p, "ajax") || 80 | strings.Contains(p, "jsonp") || 81 | strings.Contains(p, "admin") || 82 | strings.Contains(p, "include") || 83 | strings.Contains(p, "src") || 84 | strings.Contains(p, "redirect") || 85 | strings.Contains(p, "proxy") || 86 | strings.Contains(p, "test") || 87 | strings.Contains(p, "tmp") || 88 | strings.Contains(p, "auth") || 89 | strings.Contains(p, "temp") 90 | }, 91 | 92 | // non-standard port 93 | func(u *url.URL) bool { 94 | return (u.Port() != "80" && u.Port() != "443" && u.Port() != "") 95 | }, 96 | } 97 | 98 | seen := make(map[string]bool) 99 | 100 | sc := bufio.NewScanner(os.Stdin) 101 | for sc.Scan() { 102 | 103 | u, err := url.Parse(sc.Text()) 104 | if err != nil { 105 | //fmt.Fprintf(os.Stderr, "failed to parse url %s [%s]\n", sc.Text(), err) 106 | continue 107 | } 108 | 109 | if isBoringStaticFile(u) { 110 | continue 111 | } 112 | 113 | // Go's maps aren't ordered, but we want to use all the param names 114 | // as part of the key to output only unique requests. To do that, put 115 | // them into a slice and then sort it. 116 | pp := make([]string, 0) 117 | for p, _ := range u.Query() { 118 | pp = append(pp, p) 119 | } 120 | sort.Strings(pp) 121 | 122 | key := fmt.Sprintf("%s%s?%s", u.Hostname(), u.EscapedPath(), strings.Join(pp, "&")) 123 | 124 | // Only output each host + path + params combination once 125 | if _, exists := seen[key]; exists { 126 | continue 127 | } 128 | seen[key] = true 129 | 130 | interesting := 0 131 | 132 | for _, check := range checks { 133 | if check(u) { 134 | interesting++ 135 | } 136 | } 137 | 138 | if interesting > 0 { 139 | fmt.Println(sc.Text()) 140 | } 141 | 142 | } 143 | 144 | } 145 | 146 | // qsCheck looks a key=value pair from a query 147 | // string and returns true if it looks interesting 148 | func qsCheck(k, v string) bool { 149 | k = strings.ToLower(k) 150 | v = strings.ToLower(v) 151 | 152 | // the super-common utm_referrer etc 153 | // are rarely interesting 154 | if strings.HasPrefix(k, "utm_") { 155 | return false 156 | } 157 | 158 | // value checks 159 | return strings.HasPrefix(v, "http") || 160 | strings.Contains(v, "{") || 161 | strings.Contains(v, "[") || 162 | strings.Contains(v, "/") || 163 | strings.Contains(v, "\\") || 164 | strings.Contains(v, "<") || 165 | strings.Contains(v, "(") || 166 | // shoutout to liveoverflow ;) 167 | strings.Contains(v, "eyj") || 168 | 169 | // key checks 170 | strings.Contains(k, "redirect") || 171 | strings.Contains(k, "debug") || 172 | strings.Contains(k, "password") || 173 | strings.Contains(k, "passwd") || 174 | strings.Contains(k, "file") || 175 | strings.Contains(k, "fn") || 176 | strings.Contains(k, "template") || 177 | strings.Contains(k, "include") || 178 | strings.Contains(k, "require") || 179 | strings.Contains(k, "url") || 180 | strings.Contains(k, "uri") || 181 | strings.Contains(k, "src") || 182 | strings.Contains(k, "href") || 183 | strings.Contains(k, "func") || 184 | strings.Contains(k, "callback") 185 | } 186 | 187 | func isBoringStaticFile(u *url.URL) bool { 188 | exts := []string{ 189 | // OK, so JS could be interesting, but 99% of the time it's boring. 190 | ".js", 191 | 192 | ".html", 193 | ".htm", 194 | ".svg", 195 | ".eot", 196 | ".ttf", 197 | ".woff", 198 | ".woff2", 199 | ".png", 200 | ".jpg", 201 | ".jpeg", 202 | ".gif", 203 | ".ico", 204 | } 205 | 206 | p := strings.ToLower(u.EscapedPath()) 207 | for _, e := range exts { 208 | if strings.HasSuffix(p, e) { 209 | return true 210 | } 211 | } 212 | 213 | return false 214 | } -------------------------------------------------------------------------------- /GOscripts/trace.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "os" 7 | ) 8 | 9 | func main() { 10 | counts := make(map[string]int) 11 | lineCountsInFiles := make(map[string]map[string]int) 12 | 13 | for _, filename := range os.Args[1:] { 14 | f, err := os.Open(filename) 15 | if err != nil { 16 | fmt.Fprintf(os.Stderr, "Problem reading %v: %v\n", filename, err) 17 | continue 18 | } 19 | input := bufio.NewScanner(f) 20 | for input.Scan() { 21 | line := input.Text() 22 | counts[line]++ 23 | if lineCountsInFiles[line] == nil { 24 | lineCountsInFiles[line] = make(map[string]int) 25 | } 26 | lineCountsInFiles[line][filename]++ 27 | } 28 | f.Close() 29 | } 30 | 31 | for line, n := range counts { 32 | if n > 1 { 33 | fmt.Printf("%v\n", line) 34 | } 35 | 36 | if n==1 { 37 | fmt.Printf("%v\n", line) 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HussH! 2 | a script which can be used for recon and exploitation purpose and is a combination of various tools . this tool works only on kali for oither distributions use docker 3 |
4 | **Install** 5 | 6 | ``` 7 | git clone https://github.com/harshnandwana/hussh 8 | ``` 9 | --- 10 | **Docker** 11 | ``` 12 | $ docker run -it -d --name hussh shubham9672/hussh 13 | ``` 14 | Now whenever you want to run your Docker container just use given below command 15 | ``` 16 | $ docker exec -it hussh /bin/bash 17 | ``` 18 | Commad for using hussh in docker 19 | ``` 20 | $ cd hussh 21 | $ ./hussh 22 | ``` 23 | **Usage** 24 | ``` 25 | $ chmod +x * 26 | $ ./hussh 27 | ``` 28 | ---- 29 | **Dependencies** 30 | along with all files present here use 31 | ``` 32 | $ sudo apt-get install sublist3r 33 | $ sudo apt-get install subfinder 34 | $ sudo apt-get install gospider 35 | $ sudo apt-get install golang 36 | ``` 37 | ---- 38 | 39 | **Output** 40 |
41 | To see the output move to /target/hussh-date/ and track for the file target.html and open it with your browser and you will se a page like this. 42 |
43 | ![web results](/screens/1.png) 44 |
45 | The process of work is shown here feel free to do changes and commit here. 46 |
47 | ![Process_dia](/screens/hussh.png) 48 |
49 |

HussH

50 | 51 | what this tool do. 52 | 1. it gathers subdomain using 53 | * sublist3r 54 | * subfinder 55 | append these data to a text file namely combined.txt in the folder /target/hussh-date/combined.txt and trim the duplicasy 56 | 2. check for active subdomains 57 | * check all the active domains in the combined.txt using a tool httprobe by tomnomnom and then keep only https:// sources there as these are the ultimate target. 58 | * use a go script named as go to find out the status code of all the domains this script is also the modified version of a script named _ by tomnomnom and then grep out the url having status code 200 and 302. append these data to a file named a as usefull.txt 59 | 3. way back machine 60 | * check for the url from usefull.txt in wyback machine and writesd the output in /target/hussh-date/wb.txt 61 | * now using a file inturl which is also inspired from _ by tomnomnom it sorts out only intersting files from web.txt 62 | 4. spider the subdomains 63 | * using gospider aas my long companion to spider the subdomains and find all other subdomains that may be available there 64 | * now filtering out the usfull data such as sites with url|robots|javascript|subdomains|forms using grep and appending them to a file /target/hussh-date/vulnd.txt 65 | 5. finding intersting files 66 | * appending usefull.txt and vulnd.txt to a file name vuln.txt and removing the duplicates. 67 | * again finding interesting files from vuln.txt 68 | 6. hunting or vulnerability 69 | * hunt for cors vulnerability in vulns.txt using cors script by tomnomnom 70 | 71 | please add other vulnerability 72 | and fork if using 73 | -------------------------------------------------------------------------------- /cors: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/cors -------------------------------------------------------------------------------- /go: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/go -------------------------------------------------------------------------------- /html.sh: -------------------------------------------------------------------------------- 1 | html(){ 2 | rm ./$1/$foldername/$1.html 3 | touch ./$1/$foldername/$1.html 4 | echo " report for $1 " >> ./$1/$foldername/$1.html 5 | echo "" >> ./$1/$foldername/$1.html 6 | echo "" >> ./$1/$foldername/$1.html 7 | echo "" >> ./$1/$foldername/$1.html 8 | echo "" >> ./$1/$foldername/$1.html 9 | echo " " >> ./$1/$foldername/$1.html 10 | echo "

Recon Report for $1

" >> ./$1/$foldername/$1.html 11 | echo "" 12 | echo "
" >> ./$1/$foldername/$1.html 13 | echo "
" >> ./$1/$foldername/$1.html 14 | echo "" >> ./$1/$foldername/$1.html 15 | echo "

see subdomains enumeration reports Here

" >> ./$1/$foldername/$1.html 16 | echo "
" >> ./$1/$foldername/$1.html 17 | echo "

" >> ./$1/$foldername/$1.html 18 | echo "

see active website here Here

" >> ./$1/$foldername/$1.html 19 | echo "

" >> ./$1/$foldername/$1.html 20 | echo "

" >> ./$1/$foldername/$1.html 21 | echo "

all usefull domains Here

" >> ./$1/$foldername/$1.html 22 | echo "

" >> ./$1/$foldername/$1.html 23 | echo "

" >> ./$1/$foldername/$1.html 24 | echo "

all domains with cors vulnerability Here

" >> ./$1/$foldername/$1.html 25 | echo "

" >> ./$1/$foldername/$1.html 26 | echo "

" >> ./$1/$foldername/$1.html 27 | echo "

postspider data Here

" >> ./$1/$foldername/$1.html 28 | echo "

" >> ./$1/$foldername/$1.html 29 | echo "

" >> ./$1/$foldername/$1.html 30 | echo "

all robots file Here

" >> ./$1/$foldername/$1.html 31 | echo "

" >> ./$1/$foldername/$1.html 32 | echo "

" >> ./$1/$foldername/$1.html 33 | echo "

URL files Here

" >> ./$1/$foldername/$1.html 34 | echo "

" >> ./$1/$foldername/$1.html 35 | echo "

" >> ./$1/$foldername/$1.html 36 | echo "

all javascript file Here

" >> ./$1/$foldername/$1.html 37 | echo "

" >> ./$1/$foldername/$1.html 38 | echo "

" >> ./$1/$foldername/$1.html 39 | echo "

all form files Here

" >> ./$1/$foldername/$1.html 40 | echo "

" >> ./$1/$foldername/$1.html 41 | echo "

" >> ./$1/$foldername/$1.html 42 | echo "

all subdomains internally listed files Here

" >> ./$1/$foldername/$1.html 43 | 44 | echo "

" >> ./$1/$foldername/$1.html 45 | 46 | 47 | 48 | } 49 | 50 | foldername=recon-$(date +"%Y-%m-%d") 51 | html $1 52 | 53 | -------------------------------------------------------------------------------- /httprobe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/httprobe -------------------------------------------------------------------------------- /hussh.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | logo(){ 4 | echo "$(tput setaf 2) 5 | _ _ _ _ _ 6 | | | | | _ _ ___ ___ | | | | | | 7 | | |_| | | | | | / __| / __| | |_| | | | 8 | | _ | | |_| | \__ \ \__ \ | _ | |_| 9 | |_| |_| \__,_| |___/ |___/ |_| |_| (_) 10 | 11 | $(tput sgr 0)" 12 | } 13 | 14 | domenum(){ 15 | subfinder -d $1 -o ./$1/$foldername/findf.txt> /dev/null #use subfinder for domain enum 16 | clear 17 | logo 18 | sublist3r -d $1 -o ./$1/$foldername/listf.txt> /dev/null #use sublist3r for domain enum > here is append 19 | clear 20 | logo 21 | cat ./$1/$foldername/findf.txt ./$1/$foldername/listf.txt > ./$1/$foldername/combined.txt #combine all domains 22 | ./trace ./$1/$foldername/combined.txt >> ./$1/$foldername/filtered.txt #remove duplicate to save resources 23 | } 24 | 25 | activedom(){ 26 | cat ./$1/$foldername/filtered.txt |./httprobe |tee -a ./$1/$foldername/httpf.txt #check which domains are with port 80 or 443 open 27 | cat ./$1/$foldername/httpf.txt |grep "https" |tee -a ./$1/$foldername/httpxf.txt 28 | sort ./$1/$foldername/httpxf.txt > ./$1/$foldername/httprobe.txt 29 | ./go ./$1/$foldername/httprobe.txt| tee -a ./$1/$foldername/statf.txt #used to detect status code 30 | cat ./$1/$foldername/statf.txt|grep "200" |awk '{print $4}'| tee -a ./$1/$foldername/usefull.txt #list all url with "200" to usefull.txt 31 | cat ./$1/$foldername/statf.txt|grep "302" |awk '{print $4}'| tee -a ./$1/$foldername/usefull.txt #list all url with "302" to usefull.txt 32 | } 33 | waybackmachine(){ 34 | cat ./$1/$foldername/usefull.txt|./waybackurls >>./$1/$foldername/wb.txt 35 | cat ./$1/$foldername/wb.txt |./inturl >>./$1/$foldername/wbinterest.txt 36 | } 37 | spider(){ 38 | gospider -o ./$1/$foldername/gspoutput -S ./$1/$foldername/usefull.txt #spider all domains. 39 | cd ./$1/$foldername/gspoutput 40 | cat * >> all.txt 41 | cat all.txt |grep "url" |awk '{print $5}' | grep -P '[(?:https:\/\/|www\.|https:\/\/)]([^\/]+)' >> urls.txt 42 | cat all.txt |grep "robots" |awk '{print $3}' | grep -P '[(?:https:\/\/|www\.|https:\/\/)]([^\/]+)' >> robots.txt 43 | cat all.txt |grep "javascript" |awk '{print $3}' | grep -P '[(?:https:\/\/|www\.|https:\/\/)]([^\/]+)' >> javas.txt 44 | cat all.txt |grep "subdomains" |awk '{print $3}' | grep -P '[^(?:https:\/\/|www\.|https:\/\/)]([^\/]+)' >> subd.txt 45 | cat all.txt |grep "form" |awk '{print $3}' | grep -P '[(?:https:\/\/|www\.|https:\/\/)]([^\/]+)' >> forms.txt 46 | cat urls.txt robots.txt javas.txt subd.txt forms.txt >>vulnd.txt 47 | #rm all.txt 48 | cd ../../.. 49 | } 50 | interesting(){ 51 | pwd 52 | cat ./$1/$foldername/usefull.txt ./$1/$foldername/gspoutput/vulnd.txt >>./$1/$foldername/vulna.txt 53 | ./trace ./$1/$foldername/vulna.txt >> ./$1/$foldername/vulns.txt 54 | cat ./$1/$foldername/vulns.txt |./inturl >>./$1/$foldername/interes.txt 55 | } 56 | 57 | vuln(){ 58 | 59 | cat ./$1/$foldername/vulns.txt| ./cors |tee -a ./$1/$foldername/cors.txt # check for cors 60 | 61 | } 62 | 63 | 64 | html(){ 65 | touch ./$1/$foldername/$1.html 66 | echo " HussH! " >> ./$1/$foldername/$1.html 67 | echo "" >> ./$1/$foldername/$1.html 68 | echo "" >> ./$1/$foldername/$1.html 69 | echo "" >> ./$1/$foldername/$1.html 70 | echo "" >> ./$1/$foldername/$1.html 71 | echo " " >> ./$1/$foldername/$1.html 72 | echo "

Recon Report for $1

" >> ./$1/$foldername/$1.html 73 | echo "" 74 | echo "
" >> ./$1/$foldername/$1.html 75 | echo "
" >> ./$1/$foldername/$1.html 76 | echo "" >> ./$1/$foldername/$1.html 77 | echo "

see subdomains enumeration reports Here

" >> ./$1/$foldername/$1.html 78 | echo "
" >> ./$1/$foldername/$1.html 79 | echo "

" >> ./$1/$foldername/$1.html 80 | echo "

see active website here Here

" >> ./$1/$foldername/$1.html 81 | echo "

" >> ./$1/$foldername/$1.html 82 | echo "

" >> ./$1/$foldername/$1.html 83 | echo "

all usefull domains Here

" >> ./$1/$foldername/$1.html 84 | echo "

" >> ./$1/$foldername/$1.html 85 | echo "

" >> ./$1/$foldername/$1.html 86 | echo "

all domains with cors vulnerability Here

" >> ./$1/$foldername/$1.html 87 | echo "

" >> ./$1/$foldername/$1.html 88 | echo "

" >> ./$1/$foldername/$1.html 89 | echo "

all interesting wayback domainsHere

" >> ./$1/$foldername/$1.html 90 | echo "

" >> ./$1/$foldername/$1.html 91 | echo "

" >> ./$1/$foldername/$1.html 92 | echo "

postspider data Here

" >> ./$1/$foldername/$1.html 93 | echo "

" >> ./$1/$foldername/$1.html 94 | echo "

" >> ./$1/$foldername/$1.html 95 | echo "

all robots file Here

" >> ./$1/$foldername/$1.html 96 | echo "

" >> ./$1/$foldername/$1.html 97 | echo "

" >> ./$1/$foldername/$1.html 98 | echo "

URL files Here

" >> ./$1/$foldername/$1.html 99 | echo "

" >> ./$1/$foldername/$1.html 100 | echo "

" >> ./$1/$foldername/$1.html 101 | echo "

all javascript file Here

" >> ./$1/$foldername/$1.html 102 | echo "

" >> ./$1/$foldername/$1.html 103 | echo "

" >> ./$1/$foldername/$1.html 104 | echo "

all form files Here

" >> ./$1/$foldername/$1.html 105 | echo "

" >> ./$1/$foldername/$1.html 106 | echo "

" >> ./$1/$foldername/$1.html 107 | echo "

all subdomains internally listed files Here

" >> ./$1/$foldername/$1.html 108 | echo "

" >> ./$1/$foldername/$1.html 109 | echo "

" >> ./$1/$foldername/$1.html 110 | echo "

all interesting domans/subdomains Here

" >> ./$1/$foldername/$1.html 111 | echo "

" >> ./$1/$foldername/$1.html 112 | 113 | 114 | 115 | } 116 | 117 | main(){ 118 | clear 119 | logo 120 | if [ -d "./$1" ] 121 | then 122 | echo "This is a known target." 123 | else 124 | mkdir ./$1 125 | fi 126 | mkdir ./$1/$foldername/ 127 | 128 | domenum $1 129 | clear 130 | logo 131 | activedom $1 132 | clear 133 | logo 134 | waybackmachine $1 135 | spider $1 136 | clear 137 | logo 138 | vuln $1 139 | clear 140 | logo 141 | interesting $1 142 | #clear 143 | #logo 144 | html $1 145 | 146 | } 147 | 148 | logo 149 | 150 | if [ ! "$1" ]; then 151 | echo "enter domain" 152 | echo "Usage $ ./hussh " 153 | fi 154 | foldername=hussh-$(date +"%Y-%m-%d") 155 | main $1 -------------------------------------------------------------------------------- /inturl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/inturl -------------------------------------------------------------------------------- /screens/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/screens/1.png -------------------------------------------------------------------------------- /screens/hussh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/screens/hussh.png -------------------------------------------------------------------------------- /trace: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/trace -------------------------------------------------------------------------------- /waybackurls: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/harshnandwana/hussh/165a48d82e38573fcd2fd360767595cc2998c8f2/waybackurls --------------------------------------------------------------------------------