├── .github └── workflows │ └── semgrep.yml ├── .gitignore ├── README.md ├── build.sh ├── const.go ├── former.go ├── go.mod ├── go.sum ├── loot.go ├── lootdb.json ├── main.go ├── process.go ├── regexes.json ├── utils.go ├── worker.go └── writer.go /.github/workflows/semgrep.yml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: {} 3 | pull_request: {} 4 | push: 5 | branches: 6 | - main 7 | - master 8 | paths: 9 | - .github/workflows/semgrep.yml 10 | schedule: 11 | # random HH:MM to avoid a load spike on GitHub Actions at 00:00 12 | - cron: 9 6 * * * 13 | name: Semgrep 14 | jobs: 15 | semgrep: 16 | name: semgrep/ci 17 | runs-on: ubuntu-20.04 18 | env: 19 | SEMGREP_APP_TOKEN: ${{ secrets.SEMGREP_APP_TOKEN }} 20 | container: 21 | image: returntocorp/semgrep 22 | steps: 23 | - uses: actions/checkout@v4 24 | - run: semgrep ci 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.exe 2 | *.exe~ 3 | *.dll 4 | *.so 5 | *.dylib 6 | 7 | # Test binary, built with `go test -c` 8 | *.test 9 | 10 | # Output of the go coverage tool, specifically when used with LiteIDE 11 | *.out 12 | 13 | # Dependency directories (remove the comment below to include it) 14 | # vendor/ 15 | 16 | # Go workspace file 17 | go.work 18 | 19 | # Custom 20 | httploot-* 21 | .colly_cache/ 22 | result.json 23 | httploot-results.csv 24 | checksums.txt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HTTPLoot 2 | An automated tool which can simultaneously crawl, fill forms, trigger error/debug pages and "loot" secrets out of the client-facing code of sites. 3 | 4 | ## Usage 5 | To use the tool, you can grab any one of the pre-built binaries from the [Releases](https://github.com/redhuntlabs/HTTPLoot/releases) section of the repository. If you want to build the source code yourself, you will need Go > 1.16 to build it. Simply running `go build` will output a usable binary for you. 6 | 7 | Additionally you will need two json files ([lootdb.json](https://github.com/redhuntlabs/HTTPLoot/blob/master/lootdb.json) and [regexes.json](https://github.com/redhuntlabs/HTTPLoot/blob/master/regexes.json)) alongwith the binary which you can get from the repo itself. Once you have all 3 files in the same folder, you can go ahead and fire up the tool. 8 | 9 | Video demo: 10 | 11 | [![video](https://user-images.githubusercontent.com/39941993/168653593-9551b6be-0eb7-4fa8-85ee-0de8e4506fe6.png)](https://www.youtube.com/watch?v=qc8Mm2O5t6Q) 12 | 13 | Here is the help usage of the tool: 14 | ```s 15 | $ ./httploot --help 16 | _____ 17 | )=( 18 | / \ H T T P L O O T 19 | ( $ ) v0.1 20 | \___/ 21 | 22 | [+] HTTPLoot by RedHunt Labs - A Modern Attack Surface (ASM) Management Company 23 | [+] Author: Pinaki Mondal (RHL Research Team) 24 | [+] Continuously Track Your Attack Surface using https://redhuntlabs.com/nvadr. 25 | 26 | Usage of ./httploot: 27 | -concurrency int 28 | Maximum number of sites to process concurrently (default 100) 29 | -depth int 30 | Maximum depth limit to traverse while crawling (default 3) 31 | -form-length int 32 | Length of the string to be randomly generated for filling form fields (default 5) 33 | -form-string string 34 | Value with which the tool will auto-fill forms, strings will be randomly generated if no value is supplied 35 | -input-file string 36 | Path of the input file containing domains to process 37 | -output-file string 38 | CSV output file path to write the results to (default "httploot-results.csv") 39 | -parallelism int 40 | Number of URLs per site to crawl parallely (default 15) 41 | -submit-forms 42 | Whether to auto-submit forms to trigger debug pages 43 | -timeout int 44 | The default timeout for HTTP requests (default 10) 45 | -user-agent string 46 | User agent to use during HTTP requests (default "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:98.0) Gecko/20100101 Firefox/98.0") 47 | -verify-ssl 48 | Verify SSL certificates while making HTTP requests 49 | -wildcard-crawl 50 | Allow crawling of links outside of the domain being scanned 51 | ``` 52 | 53 | ### Concurrent scanning 54 | There are two flags which help with the concurrent scanning: 55 | - `-concurrency`: Specifies the maximum number of sites to process concurrently. 56 | - `-parallelism`: Specifies the number of links per site to crawl parallely. 57 | 58 | Both `-concurrency` and `-parallelism` are crucial to performance and reliability of the tool results. 59 | 60 | ### Crawling 61 | The crawl depth can be specified using the `-depth` flag. The integer value supplied to this is the maximum chain depth of links to crawl grabbed on a site. 62 | 63 | An important flag `-wildcard-crawl` can be used to specify whether to crawl URLs outside the domain in scope. 64 | 65 | > __NOTE__: Using this flag might lead to infinite crawling in worst case scenarios if the crawler finds links to other domains continuously. 66 | 67 | ### Filling forms 68 | If you want the tool to scan for debug pages, you need to specify the `-submit-forms` argument. This will direct the tool to autosubmit forms and try to trigger error/debug pages _once a tech stack has been identified successfully_. 69 | 70 | If the `-submit-forms` flag is enabled, you can control the string to be submitted in the form fields. The `-form-string` specifies the string to be submitted, while the `-form-length` can control the length of the string to be randomly generated which will be filled into the forms. 71 | 72 | ### Network tuning 73 | Flags like: 74 | - `-timeout` - specifies the HTTP timeout of requests. 75 | - `-user-agent` - specifies the user-agent to use in HTTP requests. 76 | - `-verify-ssl` - specifies whether or not to verify SSL certificates. 77 | 78 | ### Input/Output 79 | Input file to read can be specified using the `-input-file` argument. You can specify a file path containing a list of URLs to scan with the tool. The `-output-file` flag can be used to specify the result output file path -- which by default goes into a file called `httploot-results.csv`. 80 | 81 | ## Further Details 82 | Further details about the research which led to the development of the tool can be found on our [RedHunt Labs Blog](https://redhuntlabs.com/blog/the-http-facet-httploot.html). 83 | 84 | ## License & Version 85 | The tool is licensed under the MIT license. See LICENSE. 86 | 87 | Currently the tool is at v0.1. 88 | 89 | ## Credits 90 | The RedHunt Labs Research Team would like to extend credits to the creators & maintainers of [shhgit](https://github.com/eth0izzle/shhgit) for the regular expressions provided by them in their repository. 91 | 92 | **[`To know more about our Attack Surface Management platform, check out NVADR.`](https://redhuntlabs.com/nvadr)** 93 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | export CGO_ENABLED=0 2 | GOOS=linux GOARCH=amd64 go build -o httploot-linux64 3 | GOOS=darwin GOARCH=amd64 go build -o httploot-darwin64 4 | GOOS=windows GOARCH=amd64 go build -o httploot-windows64.exe 5 | GOOS=windows GOARCH=386 go build -o httploot-windows32.exe 6 | GOOS=freebsd GOARCH=amd64 go build -o httploot-freebsd64 7 | GOOS=openbsd GOARCH=amd64 go build -o httploot-openbsd64 8 | shasum -a 256 httploot-* > checksums.txt -------------------------------------------------------------------------------- /const.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "regexp" 4 | 5 | type ( 6 | FoundIssue struct { 7 | Issue string `json:"issue"` 8 | Path string `json:"path"` 9 | Type string `json:"type"` 10 | Secret string `json:"secret"` 11 | } 12 | DBData struct { 13 | Issue string `json:"issue"` 14 | Severity string `json:"severity"` 15 | Detectors []string `json:"detectors"` 16 | Validators struct { 17 | Status []int `json:"status"` 18 | Regex []string `json:"regex"` 19 | } `json:"validators"` 20 | Extractors []struct { 21 | Regex string `json:"regex"` 22 | Cgroups string `json:"cgroups"` 23 | } `json:"extractors"` 24 | } 25 | ) 26 | 27 | var ( 28 | httpTimeout, baseFormLen int 29 | maxWorkers, concurrentURLs, crawlDepth int 30 | wildcardCrawl, submitForm, verifySSL bool 31 | inpFile, userAgent, formString, outCsv string 32 | 33 | dbData map[string]DBData 34 | regexData map[string]string 35 | 36 | crawlProgress = 0 37 | reJSScript = regexp.MustCompile(`(?i)]+src=['"]?([^'"\s>]+)`) 38 | 39 | FORM_STRING = "httpl00t" 40 | VERSION = "0.1" 41 | DATAFILE = "lootdb.json" 42 | OUTCSV = "httploot-results.csv" 43 | REGEXFILE = "regexes.json" 44 | USERAGENT = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:98.0) Gecko/20100101 Firefox/98.0" 45 | BYTES = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" 46 | LACKOFART = ` 47 | _____ 48 | )=( 49 | / \ H T T P L O O T 50 | ( $ ) v%s 51 | \___/ 52 | 53 | [+] HTTPLoot by RedHunt Labs - A Modern Attack Surface (ASM) Management Company 54 | [+] Author: Pinaki Mondal (RHL Research Team) 55 | [+] Continuously Track Your Attack Surface using https://redhuntlabs.com/nvadr. 56 | ` 57 | // MAXCRAWLVAL int 58 | ) 59 | -------------------------------------------------------------------------------- /former.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "math/rand" 5 | "net/url" 6 | "strings" 7 | 8 | "github.com/PuerkitoBio/goquery" 9 | "golang.org/x/net/html" 10 | ) 11 | 12 | type htmlForm struct { 13 | Action string 14 | Method string 15 | Values url.Values 16 | } 17 | 18 | func parseForms(node *html.Node) (forms []htmlForm) { 19 | if node == nil { 20 | return nil 21 | } 22 | 23 | doc := goquery.NewDocumentFromNode(node) 24 | doc.Find("form").Each(func(_ int, s *goquery.Selection) { 25 | form := htmlForm{Values: url.Values{}} 26 | form.Action, _ = s.Attr("action") 27 | form.Method, _ = s.Attr("method") 28 | 29 | s.Find("input").Each(func(_ int, s *goquery.Selection) { 30 | name, _ := s.Attr("name") 31 | if name == "" { 32 | return 33 | } 34 | 35 | typ, _ := s.Attr("type") 36 | typ = strings.ToLower(typ) 37 | _, checked := s.Attr("checked") 38 | if (typ == "radio" || typ == "checkbox") && !checked { 39 | return 40 | } 41 | 42 | value, _ := s.Attr("value") 43 | form.Values.Add(name, value) 44 | }) 45 | s.Find("textarea").Each(func(_ int, s *goquery.Selection) { 46 | name, _ := s.Attr("name") 47 | if name == "" { 48 | return 49 | } 50 | 51 | value := s.Text() 52 | form.Values.Add(name, value) 53 | }) 54 | forms = append(forms, form) 55 | }) 56 | 57 | return forms 58 | } 59 | 60 | func randStringGen(n int) string { 61 | b := make([]byte, n) 62 | for i := range b { 63 | b[i] = BYTES[rand.Intn(len(BYTES))] 64 | } 65 | return string(b) 66 | } 67 | 68 | func setValues(form *url.Values) map[string]string { 69 | dcombo := make(map[string]string) 70 | for key := range *form { 71 | dcombo[key] = randStringGen(baseFormLen) 72 | } 73 | return dcombo 74 | } 75 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module httploot 2 | 3 | go 1.17 4 | 5 | require github.com/gocolly/colly v1.2.0 6 | 7 | require ( 8 | github.com/PuerkitoBio/goquery v1.8.0 9 | github.com/andybalholm/cascadia v1.3.1 // indirect 10 | github.com/antchfx/htmlquery v1.2.4 // indirect 11 | github.com/antchfx/xmlquery v1.3.9 // indirect 12 | github.com/antchfx/xpath v1.2.0 // indirect 13 | github.com/gobwas/glob v0.2.3 // indirect 14 | github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect 15 | github.com/golang/protobuf v1.3.1 // indirect 16 | github.com/kennygrant/sanitize v1.2.4 // indirect 17 | github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect 18 | github.com/temoto/robotstxt v1.1.2 // indirect 19 | golang.org/x/net v0.0.0-20220225172249-27dd8689420f 20 | golang.org/x/text v0.3.7 // indirect 21 | google.golang.org/appengine v1.6.7 // indirect 22 | ) 23 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/PuerkitoBio/goquery v1.8.0 h1:PJTF7AmFCFKk1N6V6jmKfrNH9tV5pNE6lZMkG0gta/U= 2 | github.com/PuerkitoBio/goquery v1.8.0/go.mod h1:ypIiRMtY7COPGk+I/YbZLbxsxn9g5ejnI2HSMtkjZvI= 3 | github.com/andybalholm/cascadia v1.3.1 h1:nhxRkql1kdYCc8Snf7D5/D3spOX+dBgjA6u8x004T2c= 4 | github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA= 5 | github.com/antchfx/htmlquery v1.2.4 h1:qLteofCMe/KGovBI6SQgmou2QNyedFUW+pE+BpeZ494= 6 | github.com/antchfx/htmlquery v1.2.4/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc= 7 | github.com/antchfx/xmlquery v1.3.9 h1:Y+zyMdiUZ4fasTQTkDb3DflOXP7+obcYEh80SISBmnQ= 8 | github.com/antchfx/xmlquery v1.3.9/go.mod h1:wojC/BxjEkjJt6dPiAqUzoXO5nIMWtxHS8PD8TmN4ks= 9 | github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8= 10 | github.com/antchfx/xpath v1.2.0/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= 11 | github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= 12 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 13 | github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= 14 | github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= 15 | github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI= 16 | github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA= 17 | github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= 18 | github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= 19 | github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= 20 | github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= 21 | github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o= 22 | github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak= 23 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 24 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 25 | github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI= 26 | github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU= 27 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 28 | github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= 29 | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= 30 | github.com/temoto/robotstxt v1.1.2 h1:W2pOjSJ6SWvldyEuiFXNxz3xZ8aiWX5LbfDiOFd7Fxg= 31 | github.com/temoto/robotstxt v1.1.2/go.mod h1:+1AmkuG3IYkh1kv0d2qEB9Le88ehNO0zwOr3ujewlOo= 32 | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= 33 | golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= 34 | golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= 35 | golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= 36 | golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= 37 | golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= 38 | golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= 39 | golang.org/x/net v0.0.0-20220225172249-27dd8689420f h1:oA4XRj0qtSt8Yo1Zms0CUlsT3KG69V2UGQWPBxujDmc= 40 | golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= 41 | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= 42 | golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 43 | golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 44 | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 45 | golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 46 | golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 47 | golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 48 | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= 49 | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= 50 | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= 51 | golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= 52 | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= 53 | golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk= 54 | golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= 55 | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 56 | google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= 57 | google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= 58 | -------------------------------------------------------------------------------- /loot.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "log" 5 | "net/url" 6 | "regexp" 7 | "strings" 8 | "sync" 9 | ) 10 | 11 | func findSecrets(origPath, secPath string, content *[]byte) { 12 | if len(regexData) < 1 { 13 | if !serializeRegexDB(REGEXFILE) { 14 | log.Fatalln("Error serializing regex data. Exiting...") 15 | } 16 | } 17 | for typerex, regexstr := range regexData { 18 | demoRex := regexp.MustCompile(regexstr) 19 | x := demoRex.FindAllSubmatch(*content, -1) 20 | if len(x) > 0 { 21 | for _, y := range x { 22 | // fmt.Print("\n") 23 | // log.Printf("Secrets found -> Type: %s | Secret: %s", typerex, string(y[0])) 24 | writer.Write([]string{typerex + " Exposed", origPath, secPath, string(y[0])}) 25 | } 26 | } 27 | } 28 | } 29 | 30 | func getJavascript(path url.URL, mbody *[]byte, wg *sync.WaitGroup) { 31 | defer wg.Done() 32 | xscript := reJSScript.FindAllSubmatch(*mbody, -1) 33 | if len(xscript) < 1 { 34 | return 35 | } 36 | for _, script := range xscript { 37 | jsscript := string(script[1]) 38 | murl, err := url.Parse(jsscript) 39 | if err != nil { 40 | log.Println(err.Error()) 41 | continue 42 | } 43 | if !strings.Contains(jsscript, "://") { 44 | murl = path.ResolveReference(murl) 45 | } 46 | _, body, err := makeRequest(murl.String()) 47 | if err != nil { 48 | log.Println(err.Error()) 49 | continue 50 | } 51 | findSecrets(path.String(), murl.String(), body) 52 | } 53 | } 54 | 55 | func executeLoot(domainstacks, path string, status int, body *[]byte) { 56 | stacks := strings.Split(strings.Split(domainstacks, ":::")[1], ":") 57 | validated := false 58 | for _, stack := range stacks { 59 | for dbstack, sigs := range dbData { 60 | if dbstack != stack { 61 | continue 62 | } 63 | // initial validation of error/stack trace 64 | for _, krex := range sigs.Validators.Regex { 65 | validatorRex := regexp.MustCompile(krex) 66 | if validateStatusCode(status, sigs.Validators.Status) && 67 | validatorRex.Match(*body) { 68 | validated = true 69 | } 70 | } 71 | if validated { 72 | for _, kext := range sigs.Extractors { 73 | mstr := "" 74 | extractRex := regexp.MustCompile(kext.Regex) 75 | jkrex := extractRex.FindAllStringSubmatch(string(*body), -1) 76 | if jkrex != nil { 77 | for _, kl := range jkrex { 78 | mstr += strings.Join(kl[1:], " : ") + "\\n" 79 | } 80 | writer.Write([]string{kext.Cgroups + " Exposed", path, path, strings.TrimSpace(mstr), strings.Join(stacks, ":")}) 81 | } 82 | } 83 | } 84 | } 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /lootdb.json: -------------------------------------------------------------------------------- 1 | { 2 | "Django": { 3 | "issue": "Django Debug Mode Enabled Exposing Internal File Paths", 4 | "severity": "high", 5 | "detectors": [ 6 | "(?:powered by ]+>Django ?([\\d.]+)?<\\/a>|]*name=[\"']csrfmiddlewaretoken[\"'][^>]*>)", 7 | "(?s)You're seeing this error because you have <.{1,6}>DEBUG\\s=\\sTrue<\\/.{1,6}> in\\s*your Django settings file\\.", 8 | "(?s)Django\\s*will display a standard 404 page\\." 9 | ], 10 | "validators": { 11 | "status": [ 12 | 404 13 | ], 14 | "regex": [ 15 | "(?:Django tried these URL patterns|your Django settings file|empty path didn't match any of these)" 16 | ] 17 | }, 18 | "extractors": [ 19 | { 20 | "regex": "(?s)
  • \\s*([^\\s<]+)\\s*
  • ", 21 | "cgroups": "Django Internal Paths" 22 | } 23 | ] 24 | }, 25 | "Laravel": { 26 | "issue": "Laravel Debug Mode Enabled Exposing Secrets On Error Page", 27 | "severity": "critical", 28 | "detectors": [ 29 | "(?i)set-cookie: .*;?\\s?laravel_session=" 30 | ], 31 | "validators": { 32 | "status": [ 33 | 500, 34 | 501, 35 | 502, 36 | 503 37 | ], 38 | "regex": [ 39 | "(?:Environment &[a-z]{3}. details:|DB_DATABASE|DB_PASSWORD)" 40 | ] 41 | }, 42 | "extractors": [ 43 | { 44 | "regex": "(?s)[\\s]+([^>]+?).*?(.*?)", 45 | "cgroups": "Laravel Environment Variables" 46 | } 47 | ] 48 | } 49 | } -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "flag" 6 | "fmt" 7 | "log" 8 | "os" 9 | "os/signal" 10 | "time" 11 | ) 12 | 13 | func main() { 14 | fmt.Println(fmt.Sprintf(LACKOFART, VERSION)) 15 | flag.IntVar(&httpTimeout, "timeout", 10, "The default timeout for HTTP requests") 16 | flag.IntVar(&baseFormLen, "form-length", 5, "Length of the string to be randomly generated for filling form fields") 17 | flag.BoolVar(&submitForm, "submit-forms", false, "Whether to auto-submit forms to trigger debug pages") 18 | flag.IntVar(&maxWorkers, "concurrency", 100, "Maximum number of sites to process concurrently") 19 | flag.IntVar(&concurrentURLs, "parallelism", 15, "Number of URLs per site to crawl parallely") 20 | flag.BoolVar(&wildcardCrawl, "wildcard-crawl", false, "Allow crawling of links outside of the domain being scanned") 21 | flag.BoolVar(&verifySSL, "verify-ssl", false, "Verify SSL certificates while making HTTP requests") 22 | flag.IntVar(&crawlDepth, "depth", 3, "Maximum depth limit to traverse while crawling") 23 | flag.StringVar(&formString, "form-string", "", "Value with which the tool will auto-fill forms, strings will be randomly generated if no value is supplied") 24 | // flag.IntVar(&MAXCRAWLVAL, "max-crawl", 1000, "Maximum number of links to traverse per site") 25 | flag.StringVar(&outCsv, "output-file", "httploot-results.csv", "CSV output file path to write the results to") 26 | flag.StringVar(&userAgent, "user-agent", "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:98.0) Gecko/20100101 Firefox/98.0", "User agent string to use during HTTP requests") 27 | flag.StringVar(&inpFile, "input-file", "", "Path of the input file containing domains to process") 28 | 29 | flag.Parse() 30 | args := flag.Args() 31 | if len(args) < 1 && len(inpFile) < 1 { 32 | log.Fatalln("You need to supply at least a target for the tool to work!") 33 | } 34 | 35 | _, cancel := context.WithCancel(context.Background()) 36 | 37 | writer.Write([]string{"key", "asset", "secret_url", "secret", "stack"}) 38 | if len(dbData) < 1 { 39 | if err := serializeDB(DATAFILE); err != nil { 40 | log.Fatalln("Cannot serialize database. exiting...", err.Error()) 41 | } 42 | } 43 | 44 | c := make(chan os.Signal, 1) 45 | signal.Notify(c, os.Interrupt) 46 | go handleInterrupt(c, &cancel) 47 | 48 | tnoe := time.Now() 49 | log.Println("Starting scan at:", tnoe.Local().String()) 50 | go ProcessHosts(args) 51 | 52 | InitDispatcher(maxWorkers) 53 | dnoe := time.Now() 54 | log.Println("Scan finished at:", dnoe.Local().String()) 55 | log.Println("Total time taken:", time.Since(tnoe).String()) 56 | writer.Flush() 57 | } 58 | -------------------------------------------------------------------------------- /process.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bytes" 5 | "crypto/tls" 6 | "fmt" 7 | "log" 8 | "net/http" 9 | "net/url" 10 | "regexp" 11 | "strings" 12 | "sync" 13 | "time" 14 | 15 | "github.com/gocolly/colly" 16 | "golang.org/x/net/html" 17 | ) 18 | 19 | func (p *ProcJob) FingerPrint() { 20 | log.Println("Trying to identify tech stack:", p.Host) 21 | heads, body, err := makeRequest(p.Host) 22 | if err != nil { 23 | log.Println("Error during tech stack detection:", err.Error()) 24 | return 25 | } 26 | fresp := heads + "\n\n" + string(*body) 27 | outBreak: 28 | for tech, ddata := range dbData { 29 | for _, drex := range ddata.Detectors { 30 | rex := regexp.MustCompile(drex) 31 | if rex.MatchString(fresp) { 32 | log.Printf("Identified tech stack for %s: %s", p.Host, tech) 33 | p.Host = p.Host + ":::" + tech 34 | break outBreak 35 | } 36 | } 37 | } 38 | } 39 | 40 | func (p *ProcJob) ExecuteCrawler() { 41 | hastechs := false 42 | mainurl := p.Host 43 | 44 | if strings.Contains(p.Host, ":::") { 45 | hastechs = true 46 | mainurl = strings.Split(p.Host, ":::")[0] 47 | } 48 | log.Println("Processing:", p.Host) 49 | 50 | if !strings.Contains(p.Host, "://") { 51 | mainurl = "http://" + p.Host 52 | } else { 53 | p.Host = strings.Split(p.Host, "://")[1] 54 | } 55 | 56 | var c *colly.Collector 57 | if !wildcardCrawl { 58 | c = colly.NewCollector( 59 | colly.AllowedDomains(strings.Split(mainurl, "://")[1]), 60 | colly.UserAgent(USERAGENT), 61 | colly.ParseHTTPErrorResponse(), 62 | colly.MaxDepth(crawlDepth), 63 | colly.CacheDir(".colly_cache/"), 64 | ) 65 | } else { 66 | c = colly.NewCollector( 67 | colly.UserAgent(USERAGENT), 68 | colly.ParseHTTPErrorResponse(), 69 | colly.MaxDepth(crawlDepth), 70 | colly.CacheDir(".colly_cache/"), 71 | ) 72 | } 73 | 74 | c.WithTransport(&http.Transport{ 75 | TLSClientConfig: &tls.Config{ 76 | InsecureSkipVerify: !verifySSL, 77 | }, 78 | DisableKeepAlives: true, // we disable keep alive targets 79 | }) 80 | c.SetRequestTimeout(time.Duration(httpTimeout) * time.Second) 81 | c.Limit(&colly.LimitRule{ 82 | Parallelism: concurrentURLs, 83 | DomainGlob: "*", 84 | }) 85 | 86 | c.OnHTML("a[href]", func(e *colly.HTMLElement) { 87 | link := e.Attr("href") 88 | c.Visit(e.Request.AbsoluteURL(link)) 89 | }) 90 | 91 | c.OnResponse(func(r *colly.Response) { 92 | crawlProgress++ 93 | /* 94 | if crawlProgress > MAXCRAWLVAL { 95 | log.Println("Stopping crawling due to max URLs visit exceeded!") 96 | return 97 | } 98 | */ 99 | thisTime := time.Now() 100 | fmt.Printf("\r%d/%02d/%02d %02d:%02d:%02d Total processed: %d | Current: %s", 101 | thisTime.Year(), thisTime.Month(), thisTime.Day(), thisTime.Hour(), 102 | thisTime.Minute(), thisTime.Second(), crawlProgress, r.Request.URL.String()) 103 | 104 | wg := new(sync.WaitGroup) 105 | // start finding secrets on the html sources 106 | findSecrets(mainurl, r.Request.URL.String(), &r.Body) 107 | // start finding secrets within JS files 108 | wg.Add(1) 109 | go getJavascript(*r.Request.URL, &r.Body, wg) 110 | if hastechs { 111 | executeLoot(p.Host, r.Request.URL.String(), r.StatusCode, &r.Body) 112 | if submitForm { 113 | buff := bytes.NewReader(r.Body) 114 | root, err := html.Parse(buff) 115 | if err != nil { 116 | log.Println("error parsing html body:", err.Error(), r.Request.URL.String()) 117 | return 118 | } 119 | forms := parseForms(root) 120 | for _, form := range forms { 121 | actionURL, err := url.Parse(form.Action) 122 | if err != nil { 123 | log.Println(err.Error()) 124 | continue 125 | } 126 | actionURL = r.Request.URL.ResolveReference(actionURL) 127 | mval := setValues(&form.Values) 128 | err = c.Post(actionURL.String(), mval) 129 | if err != nil { 130 | log.Printf("Error posting form %s: %s", r.Request.URL.String(), err.Error()) 131 | } 132 | } 133 | } 134 | } 135 | wg.Wait() 136 | }) 137 | 138 | c.Visit(mainurl) 139 | // c.Wait() 140 | fmt.Print("\n") 141 | } 142 | -------------------------------------------------------------------------------- /regexes.json: -------------------------------------------------------------------------------- 1 | { 2 | "AWS Access Key ID Value": "(A3T[A-Z0-9]|AKIA|AGPA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}", 3 | "AWS Access Key ID": "((\\\"|'|`)?((?i)aws)?_?((?i)access)_?((?i)key)?_?((?i)id)?(\\\"|'|`)?\\\\s{0,50}(:|=>|=)\\\\s{0,50}(\\\"|'|`)?(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}(\\\"|'|`)?)", 4 | "AWS Account ID": "((\\\"|'|`)?((?i)aws)?_?((?i)account)_?((?i)id)?(\\\"|'|`)?\\\\s{0,50}(:|=>|=)\\\\s{0,50}(\\\"|'|`)?[0-9]{4}-?[0-9]{4}-?[0-9]{4}(\\\"|'|`)?)", 5 | "AWS Secret Access Key": "((\\\"|'|`)?((?i)aws)?_?((?i)secret)_?((?i)access)?_?((?i)key)?_?((?i)id)?(\\\"|'|`)?\\\\s{0,50}(:|=>|=)\\\\s{0,50}(\\\"|'|`)?[A-Za-z0-9/+=]{40}(\\\"|'|`)?)", 6 | "AWS Session Token": "((\\\"|'|`)?((?i)aws)?_?((?i)session)?_?((?i)token)?(\\\"|'|`)?\\\\s{0,50}(:|=>|=)\\\\s{0,50}(\\\"|'|`)?[A-Za-z0-9/+=]{16,}(\\\"|'|`)?)", 7 | "Artifactory": "(?i)artifactory.{0,50}(\\\"|'|`)?[a-zA-Z0-9=]{112}(\\\"|'|`)?", 8 | "CodeClimate": "(?i)codeclima.{0,50}(\\\"|'|`)?[0-9a-f]{64}(\\\"|'|`)?", 9 | "Facebook access token": "EAACEdEose0cBA[0-9A-Za-z]+", 10 | "Google (GCM) Service account": "((\\\"|'|`)?type(\\\"|'|`)?\\\\s{0,50}(:|=>|=)\\\\s{0,50}(\\\"|'|`)?service_account(\\\"|'|`)?,?)", 11 | "Stripe API key": "(?:r|s)k_[live|test]_[0-9a-zA-Z]{24}", 12 | "Google OAuth Key": "[0-9]+-[0-9A-Za-z_]{32}\\.apps\\.googleusercontent\\.com", 13 | "Google Cloud API Key": "AIza[0-9A-Za-z\\\\-_]{35}", 14 | "Google OAuth Access Token": "ya29\\\\.[0-9A-Za-z\\\\-_]+", 15 | "Picatic API key": "sk_[live|test]_[0-9a-z]{32}", 16 | "Square Access Token": "sq0atp-[0-9A-Za-z\\-_]{22}", 17 | "Square OAuth Secret": "sq0csp-[0-9A-Za-z\\-_]{43}", 18 | "PayPal/Braintree Access Token": "access_token\\$production\\$[0-9a-z]{16}\\$[0-9a-f]{32}", 19 | "Amazon MWS Auth Token": "amzn\\.mws\\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}", 20 | "Twilo API Key": "SK[0-9a-fA-F]{32}", 21 | "SendGrid API Key": "SG\\.[0-9A-Za-z\\-_]{22}\\.[0-9A-Za-z\\-_]{43}", 22 | "MailGun API Key": "key-[0-9a-zA-Z]{32}", 23 | "MailChimp API Key": "[0-9a-f]{32}-us[0-9]{12}", 24 | "SSH Password": "sshpass -p.*['|\\\"]", 25 | "Outlook team": "(https\\\\://outlook\\\\.office.com/webhook/[0-9a-f-]{36}\\\\@)", 26 | "Sauce Token": "(?i)sauce.{0,50}(\\\"|'|`)?[0-9a-f-]{36}(\\\"|'|`)?", 27 | "Slack Token": "(xox[pboa]-[0-9]{12}-[0-9]{12}-[0-9]{12}-[a-z0-9]{32})", 28 | "Slack Webhook": "https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}", 29 | "SonarQube Docs API Key": "(?i)sonar.{0,50}(\\\"|'|`)?[0-9a-f]{40}(\\\"|'|`)?", 30 | "HockeyApp": "(?i)hockey.{0,50}(\\\"|'|`)?[0-9a-f]{32}(\\\"|'|`)?", 31 | "Username and password in URI": "([\\w+]{1,24})(://)([^$<]{1})([^\\s\";]{1,}):([^$<]{1})([^\\s\";/]{1,})@[-a-zA-Z0-9@:%._\\+~#=]{1,256}\\.[a-zA-Z0-9()]{1,24}([^\\s]+)", 32 | "NuGet API Key": "oy2[a-z0-9]{43}", 33 | "StackHawk API Key": "hawk\\.[0-9A-Za-z\\-_]{20}\\.[0-9A-Za-z\\-_]{20}", 34 | "Contains a private key": "-----BEGIN (EC|RSA|DSA|OPENSSH|PGP) PRIVATE KEY", 35 | "WP-Config": "define(.{0,20})?(DB_CHARSET|NONCE_SALT|LOGGED_IN_SALT|AUTH_SALT|NONCE_KEY|DB_HOST|DB_PASSWORD|AUTH_KEY|SECURE_AUTH_KEY|LOGGED_IN_KEY|DB_NAME|DB_USER)(.{0,20})?['|\"].{10,120}['|\"]", 36 | "AWS cred file info": "(?i)(aws_access_key_id|aws_secret_access_key)(.{0,20})?=.[0-9a-zA-Z\\/+]{20,40}", 37 | "Facebook Secret Key": "(?i)(facebook|fb)(.{0,20})?(?-i)['\\\"][0-9a-f]{32}['\\\"]", 38 | "Facebook Client ID": "(?i)(facebook|fb)(.{0,20})?['\\\"][0-9]{13,17}['\\\"]", 39 | "Twitter Secret Key": "(?i)twitter(.{0,20})?['\\\"][0-9a-z]{35,44}['\\\"]", 40 | "Twitter Client ID": "(?i)twitter(.{0,20})?['\\\"][0-9a-z]{18,25}['\\\"]", 41 | "Github Key": "(?i)github(.{0,20})?(?-i)['\\\"][0-9a-zA-Z]{35,40}['\\\"]", 42 | "Heroku API key": "(?i)heroku(.{0,20})?['\"][0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}['\"]", 43 | "Linkedin Client ID": "(?i)linkedin(.{0,20})?(?-i)['\\\"][0-9a-z]{12}['\\\"]", 44 | "LinkedIn Secret Key": "(?i)linkedin(.{0,20})?['\\\"][0-9a-z]{16}['\\\"]" 45 | } -------------------------------------------------------------------------------- /utils.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "crypto/tls" 5 | "encoding/json" 6 | "fmt" 7 | "io/ioutil" 8 | "log" 9 | "net/http" 10 | "strings" 11 | "time" 12 | ) 13 | 14 | func serializeDB(fname string) error { 15 | dbdata, err := ioutil.ReadFile(fname) 16 | if err != nil { 17 | return err 18 | } 19 | if err := json.Unmarshal(dbdata, &dbData); err != nil { 20 | return err 21 | } 22 | return nil 23 | } 24 | 25 | func validateStatusCode(code int, codes []int) bool { 26 | for _, x := range codes { 27 | if x == code { 28 | return true 29 | } 30 | } 31 | return false 32 | } 33 | 34 | func serializeRegexDB(fname string) bool { 35 | dbdata, err := ioutil.ReadFile(fname) 36 | if err != nil { 37 | log.Println(err.Error()) 38 | return false 39 | } 40 | if err := json.Unmarshal(dbdata, ®exData); err != nil { 41 | log.Println(err.Error()) 42 | return false 43 | } 44 | return true 45 | } 46 | 47 | func makeRequest(url string) (string, *[]byte, error) { 48 | hostheader := strings.Split(url, "://")[1] 49 | client := &http.Client{ 50 | Transport: &http.Transport{ 51 | TLSClientConfig: &tls.Config{ 52 | InsecureSkipVerify: !verifySSL, 53 | ServerName: hostheader, 54 | }, 55 | }, 56 | Timeout: time.Duration(httpTimeout) * time.Second, 57 | /* 58 | CheckRedirect: func(req *http.Request, via []*http.Request) error { 59 | return http.ErrUseLastResponse 60 | }, 61 | */ 62 | } 63 | req, _ := http.NewRequest("GET", url, nil) 64 | req.Host = hostheader 65 | req.Header.Add("User-Agent", `Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36`) 66 | req.Header.Add("Accept", `text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9`) 67 | req.Header.Add("Accept-Language", `en-GB,en-US;q=0.9,en;q=0.8`) 68 | req.Header.Add("Accept-Encoding", `identity`) 69 | req.Header.Add("DNT", `1`) 70 | conn, err := client.Do(req) 71 | if err != nil { 72 | // log.Printf("Error making request to %s: %s", url, err.Error()) 73 | return "", nil, err 74 | } 75 | var respHeaders string 76 | for key, val := range conn.Header { 77 | respHeaders += fmt.Sprintf("%s: %s", key, strings.Join(val, "")) 78 | } 79 | body, err := ioutil.ReadAll(conn.Body) 80 | if err != nil { 81 | return "", nil, err 82 | } 83 | return respHeaders, &body, nil 84 | } 85 | -------------------------------------------------------------------------------- /worker.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "log" 6 | "os" 7 | "sync" 8 | ) 9 | 10 | var ( 11 | ProcChan = make(chan *ProcJob, maxWorkers) 12 | writer, _ = NewCSVWriter(OUTCSV) 13 | ) 14 | 15 | type ProcJob struct { 16 | Host string 17 | } 18 | 19 | func ProcessHosts(args []string) { 20 | if len(inpFile) > 0 { 21 | file, err := os.Open(inpFile) 22 | if err != nil { 23 | log.Println(err) 24 | return 25 | } 26 | defer file.Close() 27 | 28 | scanner := bufio.NewScanner(file) 29 | scanner.Split(bufio.ScanLines) 30 | for scanner.Scan() { 31 | ProcChan <- &ProcJob{ 32 | Host: scanner.Text(), 33 | } 34 | } 35 | 36 | if err := scanner.Err(); err != nil { 37 | log.Fatalln("Error reading from target file:", err.Error()) 38 | } 39 | } else { 40 | for _, target := range args { 41 | ProcChan <- &ProcJob{ 42 | Host: target, 43 | } 44 | } 45 | } 46 | close(ProcChan) 47 | } 48 | 49 | func execWorker(wg *sync.WaitGroup) { 50 | for job := range ProcChan { 51 | job.RunChecks() 52 | } 53 | wg.Done() 54 | } 55 | 56 | func InitDispatcher(workerno int) { 57 | wg := new(sync.WaitGroup) 58 | for i := 0; i < workerno; i++ { 59 | wg.Add(1) 60 | go execWorker(wg) 61 | } 62 | wg.Wait() 63 | } 64 | 65 | func (p *ProcJob) RunChecks() { 66 | p.FingerPrint() 67 | p.ExecuteCrawler() 68 | } 69 | -------------------------------------------------------------------------------- /writer.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "encoding/csv" 6 | "log" 7 | "os" 8 | "sync" 9 | ) 10 | 11 | type CSVWriter struct { 12 | mutex *sync.Mutex 13 | csvWriter *csv.Writer 14 | } 15 | 16 | func NewCSVWriter(fileName string) (*CSVWriter, error) { 17 | csvFile, err := os.Create(fileName) 18 | if err != nil { 19 | return nil, err 20 | } 21 | w := csv.NewWriter(csvFile) 22 | return &CSVWriter{ 23 | csvWriter: w, 24 | mutex: &sync.Mutex{}, 25 | }, nil 26 | } 27 | 28 | func (w *CSVWriter) Write(row []string) { 29 | w.mutex.Lock() 30 | w.csvWriter.Write(row) 31 | w.mutex.Unlock() 32 | } 33 | 34 | func (w *CSVWriter) Flush() { 35 | w.mutex.Lock() 36 | w.csvWriter.Flush() 37 | w.mutex.Unlock() 38 | } 39 | 40 | func handleInterrupt(c chan os.Signal, cancel *context.CancelFunc) { 41 | <-c 42 | (*cancel)() 43 | writer.Flush() 44 | log.Fatalln("Scan cancelled by user.") 45 | } 46 | --------------------------------------------------------------------------------