├── .github └── FUNDING.yml ├── .gitignore ├── README.md ├── arank └── main.go ├── bparse ├── README.md └── main.go ├── cdnfilter ├── README.md ├── cdncheck.go ├── go.mod ├── go.sum ├── main.go ├── ranges.go └── ranges_test.go ├── chrunk ├── README.md └── main.go ├── cinfo ├── README.md ├── go.mod ├── go.sum └── main.go ├── cleansub ├── README.md └── main.go ├── durl ├── README.md └── main.go ├── eip ├── README.md ├── go.mod ├── go.sum └── main.go ├── favinfo ├── go.mod ├── go.sum └── main.go ├── ftld ├── README.md ├── go.mod ├── go.sum ├── main.go ├── pkged.go └── public_suffix_list.dat ├── ghd ├── README.md ├── dorks.txt └── main.go ├── hparse └── main.go ├── junique ├── go.mod ├── go.sum └── main.go ├── nin └── main.go ├── oic ├── README.md ├── go.mod ├── go.sum └── main.go ├── ourl ├── README.md ├── go.mod ├── go.sum └── main.go ├── purl └── main.go ├── qscreenshot └── main.go ├── rdns └── main.go ├── strr ├── README.md └── main.go ├── urp ├── README.md └── main.go └── wlimit ├── go.mod └── main.go /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: j3ssie 5 | open_collective: osmedeus 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: [ 'https://docs.osmedeus.org/donation/', 'https://paypal.me/j3ssiejjj', 'https://www.buymeacoffee.com/j3ssie' ] -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_STORE 2 | configs 3 | .idea 4 | out 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Collection of auxiliary command line tools 2 | === 3 | 4 | ### Featured tools that you should try 5 | 6 | | Tools | Description | 7 | |-------------------|-------------------------------------| 8 | | **cinfo** | Extract domain from SSL info | 9 | | **eip** | Extend the IP range by CIDR | 10 | | **cleansub** | Clean up garbage subdomain from list of subdomain | 11 | | **durl** | Strip out similar URLs by unique hostname-path-paramName | 12 | | **urp** | Parse URLs in fuzz format | 13 | | **qscreenshot** | Do screenshot from list of URLs | 14 | | **bparse** | parsing burp XML file | 15 | | **arank** | Get alexa rank of list of urls | 16 | | **chrunk** | Run your command against really really big file. | 17 | | **cdnfilter** | Cleaning CDN IP Address and Private IPs from list of inputs | 18 | 19 | *** 20 | 21 | ### Install 22 | 23 | All the tools 24 | 25 | ```shell 26 | go install -u github.com/j3ssie/go-auxs/...@latest 27 | ``` 28 | 29 | or single tool 30 | 31 | ```shell 32 | go install -v github.com/j3ssie/go-auxs/@latest 33 | ``` 34 | 35 | ## Donation 36 | 37 | [![paypal](https://www.paypalobjects.com/en_US/i/btn/btn_donateCC_LG.gif)](https://paypal.me/j3ssiejjj) 38 | 39 | -------------------------------------------------------------------------------- /arank/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "encoding/xml" 6 | "flag" 7 | "fmt" 8 | "io/ioutil" 9 | "net/http" 10 | "os" 11 | "strings" 12 | "sync" 13 | ) 14 | 15 | // Get alexa rank of list of urls 16 | // Usage: echo '1.2.3.4/24' | arank -c 50 17 | var concurrency int 18 | 19 | func main() { 20 | // cli arguments 21 | flag.IntVar(&concurrency, "c", 30, "concurrency") 22 | 23 | // custom help 24 | flag.Usage = func() { 25 | os.Exit(1) 26 | } 27 | flag.Parse() 28 | 29 | var wg sync.WaitGroup 30 | jobs := make(chan string, concurrency) 31 | 32 | wg.Add(1) 33 | go func() { 34 | defer wg.Done() 35 | for job := range jobs { 36 | rank, _ := getAlexaRank(job) 37 | fmt.Printf("%v,%v\n", job, rank) 38 | } 39 | }() 40 | 41 | sc := bufio.NewScanner(os.Stdin) 42 | go func() { 43 | for sc.Scan() { 44 | url := strings.TrimSpace(sc.Text()) 45 | jobs <- url 46 | } 47 | close(jobs) 48 | }() 49 | wg.Wait() 50 | 51 | } 52 | 53 | func getAlexaRank(url string) (string, error) { 54 | rank := "-1" 55 | 56 | resp, err := http.Get("http://data.alexa.com/data?cli=10&dat=snbamz&url=" + url) 57 | if err != nil { 58 | return rank, err 59 | } 60 | 61 | defer resp.Body.Close() 62 | 63 | alexaData, err := ioutil.ReadAll(resp.Body) 64 | if err != nil { 65 | return rank, err 66 | } 67 | 68 | decoder := xml.NewDecoder(strings.NewReader(string(alexaData))) 69 | for { 70 | token, _ := decoder.Token() 71 | if token == nil { 72 | break 73 | } 74 | 75 | switch startElement := token.(type) { 76 | case xml.StartElement: 77 | if startElement.Name.Local == "POPULARITY" { 78 | if len(startElement.Attr) >= 2 { 79 | rank = startElement.Attr[1].Value 80 | } 81 | } 82 | } 83 | } 84 | return rank, nil 85 | } 86 | -------------------------------------------------------------------------------- /bparse/README.md: -------------------------------------------------------------------------------- 1 | ## Bparse 2 | parsing burp XML file 3 | 4 | ## Install 5 | ``` 6 | go get -u github.com/j3ssie/go-auxs/bparse 7 | ``` 8 | 9 | ## Usage 10 | ``` 11 | Usage: 12 | bparse -o output.csv burp-file 13 | bparse -n -o output.csv burp-file 14 | bparse -i burp-file -f -o output 15 | ``` 16 | 17 | -------------------------------------------------------------------------------- /bparse/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "encoding/base64" 6 | "encoding/csv" 7 | "encoding/xml" 8 | "flag" 9 | "fmt" 10 | "io" 11 | "io/ioutil" 12 | "net/http" 13 | "net/url" 14 | "os" 15 | "sort" 16 | "strings" 17 | 18 | "github.com/mitchellh/go-homedir" 19 | ) 20 | 21 | type Items struct { 22 | XMLName xml.Name `xml:"items"` 23 | Text string `xml:",chardata"` 24 | BurpVersion string `xml:"burpVersion,attr"` 25 | ExportTime string `xml:"exportTime,attr"` 26 | Item []struct { 27 | Text string `xml:",chardata"` 28 | Time string `xml:"time"` 29 | URL string `xml:"url"` 30 | Host struct { 31 | Text string `xml:",chardata"` 32 | Ip string `xml:"ip,attr"` 33 | } `xml:"host"` 34 | Port string `xml:"port"` 35 | Protocol string `xml:"protocol"` 36 | Method string `xml:"method"` 37 | Path string `xml:"path"` 38 | Extension string `xml:"extension"` 39 | Request struct { 40 | Text string `xml:",chardata"` 41 | Base64 string `xml:"base64,attr"` 42 | } `xml:"request"` 43 | Status string `xml:"status"` 44 | Responselength string `xml:"responselength"` 45 | Mimetype string `xml:"mimetype"` 46 | Response struct { 47 | Text string `xml:",chardata"` 48 | Base64 string `xml:"base64,attr"` 49 | } `xml:"response"` 50 | Comment string `xml:"comment"` 51 | } `xml:"item"` 52 | } 53 | 54 | var ( 55 | burpFile string 56 | output string 57 | isBase64 bool 58 | noBody bool 59 | flat bool 60 | stripComma bool 61 | ) 62 | 63 | // Usage: 64 | // bparse -o output.csv burp-file 65 | // bparse -n -o output.csv burp-file 66 | // bparse -i burp-file -f -o output 67 | 68 | func main() { 69 | // cli arguments 70 | flag.StringVar(&burpFile, "i", "", "Burp file (default is last argument)") 71 | flag.StringVar(&output, "o", "out", "Output file") 72 | flag.BoolVar(&noBody, "n", false, "Don't store body in csv output") 73 | flag.BoolVar(&isBase64, "b", true, "is Burp XML base64 encoded") 74 | flag.BoolVar(&flat, "f", false, "Store raw request base64 line by line") 75 | flag.BoolVar(&stripComma, "s", true, "Encode ',' in case it appear in data") 76 | flag.Parse() 77 | args := os.Args[1:] 78 | sort.Strings(args) 79 | if burpFile == "" { 80 | burpFile = args[len(args)-1] 81 | } 82 | content := GetFileContent(burpFile) 83 | if content == "" { 84 | fmt.Printf("failed to read content of %v \n", burpFile) 85 | return 86 | } 87 | 88 | // parsing content 89 | r := &Items{} 90 | err := xml.Unmarshal([]byte(content), r) 91 | if err != nil { 92 | fmt.Printf("failed to parse Burp XML file: %v \n", err) 93 | return 94 | } 95 | 96 | // URL, Host, Method, Path, IP:Port, Status, Responselength, Body 97 | csvHeader := []string{"URL", "Method", "Host", "Path", "Protocol", "IP:Port", "Status", "Responselength", "Body"} 98 | if noBody { 99 | csvHeader = []string{"URL", "Method", "Host", "Path", "Protocol", "IP:Port", "Status", "Responselength"} 100 | } 101 | csvData := [][]string{ 102 | csvHeader, 103 | } 104 | if !flat { 105 | fmt.Println(strings.Join(csvHeader, ",")) 106 | } 107 | 108 | var flatOutput []string 109 | // loop through data 110 | for _, item := range r.Item { 111 | if flat { 112 | flatOutput = append(flatOutput, item.Request.Text) 113 | fmt.Println(item.Request.Text) 114 | continue 115 | } 116 | if stripComma { 117 | if strings.Contains(item.URL, ",") { 118 | item.URL = strings.Replace(item.URL, ",", "%2c", -1) 119 | } 120 | if strings.Contains(item.Path, ",") { 121 | item.Path = strings.Replace(item.Path, ",", "%2c", -1) 122 | } 123 | } 124 | 125 | dest := fmt.Sprintf("%v:%v", item.Host.Ip, item.Port) 126 | data := []string{ 127 | item.URL, item.Method, item.Host.Text, item.Path, item.Protocol, dest, item.Status, item.Responselength, 128 | } 129 | if !noBody { 130 | body := GetReqBody(item.Request.Text) 131 | data = []string{ 132 | item.URL, item.Method, item.Host.Text, item.Path, item.Protocol, dest, item.Status, item.Responselength, body, 133 | } 134 | } 135 | csvData = append(csvData, data) 136 | fmt.Println(strings.Join(data, ",")) 137 | } 138 | if flat { 139 | WriteToFile(output, strings.Join(flatOutput, "\n")) 140 | return 141 | } 142 | 143 | // write to CSV 144 | csvFile, err := os.Create(output) 145 | if err != nil { 146 | fmt.Printf("failed to write csv data: %s \n", err) 147 | return 148 | } 149 | csvWriter := csv.NewWriter(csvFile) 150 | for _, empRow := range csvData { 151 | _ = csvWriter.Write(empRow) 152 | } 153 | csvWriter.Flush() 154 | csvFile.Close() 155 | } 156 | 157 | // GetFileContent Reading file and return content of it 158 | func GetFileContent(filename string) string { 159 | var result string 160 | if strings.Contains(filename, "~") { 161 | filename, _ = homedir.Expand(filename) 162 | } 163 | file, err := os.Open(filename) 164 | if err != nil { 165 | return result 166 | } 167 | defer file.Close() 168 | b, err := ioutil.ReadAll(file) 169 | if err != nil { 170 | return result 171 | } 172 | return string(b) 173 | } 174 | 175 | // WriteToFile write string to a file 176 | func WriteToFile(filename string, data string) (string, error) { 177 | file, err := os.Create(filename) 178 | if err != nil { 179 | return "", err 180 | } 181 | defer file.Close() 182 | 183 | _, err = io.WriteString(file, data+"\n") 184 | if err != nil { 185 | return "", err 186 | } 187 | return filename, file.Sync() 188 | } 189 | 190 | // GetReqBody parse burp style request 191 | func GetReqBody(raw string) string { 192 | var body string 193 | if isBase64 { 194 | raw, _ = Base64Decode(raw) 195 | } 196 | reader := bufio.NewReader(strings.NewReader(raw)) 197 | parsedReq, err := http.ReadRequest(reader) 198 | if err != nil { 199 | return raw 200 | } 201 | rBody, _ := ioutil.ReadAll(parsedReq.Body) 202 | body = string(rBody) 203 | if strings.Contains(body, ",") || strings.Contains(body, "\n") { 204 | body = URLEncode(body) 205 | } 206 | body = Base64Encode(body) 207 | return body 208 | } 209 | 210 | // Base64Encode just Base64 Encode 211 | func Base64Encode(raw string) string { 212 | return base64.StdEncoding.EncodeToString([]byte(raw)) 213 | } 214 | 215 | // URLEncode just URL Encode 216 | func URLEncode(raw string) string { 217 | return url.QueryEscape(raw) 218 | } 219 | 220 | // Base64Decode just Base64 Encode 221 | func Base64Decode(raw string) (string, error) { 222 | data, err := base64.StdEncoding.DecodeString(raw) 223 | if err != nil { 224 | return raw, err 225 | } 226 | return string(data), nil 227 | } 228 | -------------------------------------------------------------------------------- /cdnfilter/README.md: -------------------------------------------------------------------------------- 1 | # CDN Filter 2 | 3 | Cleaning CDN IP Address and Private IPs from list of inputs 4 | 5 | ## Install 6 | 7 | ```shell 8 | go get -u github.com/j3ssie/go-auxs/cdnfilter 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```shell 14 | cat list_of_ips.txt | cdnfilter -c cdn_out.txt -n not_cdn_out.txt 15 | ``` 16 | 17 | ## Credit 18 | 19 | Created by my friend @thebl4ckturtle 20 | -------------------------------------------------------------------------------- /cdnfilter/cdncheck.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "crypto/tls" 5 | "net" 6 | "net/http" 7 | "time" 8 | 9 | "github.com/yl2chen/cidranger" 10 | ) 11 | 12 | // Client checks for CDN based IPs which should be excluded 13 | // during scans since they belong to third party firewalls. 14 | type Client struct { 15 | Data map[string]struct{} 16 | ranger cidranger.Ranger 17 | } 18 | 19 | var defaultScrapers = map[string]scraperFunc{ 20 | "akamai": scrapeAkamai, 21 | "cloudflare": scrapeCloudflare, 22 | "incapsula": scrapeIncapsula, 23 | "sucuri": scrapeSucuri, 24 | "cloudfront": scrapeCloudfront, 25 | "fastly": scrapeFastly, 26 | "maxcdn": scrapeMaxCDN, 27 | "ddosguard": scrapeDDOSGuard, 28 | } 29 | 30 | var cachedScrapers = map[string]scraperFunc{ 31 | "projectdiscovery": scrapeProjectDiscovery, 32 | } 33 | 34 | // New creates a new firewall IP checking client. 35 | func NewCDNCheck() (*Client, error) { 36 | return new(false) 37 | } 38 | 39 | // NewWithCache creates a new firewall IP with cached data from project discovery (faster) 40 | func NewWithCache() (*Client, error) { 41 | return new(true) 42 | } 43 | 44 | func new(cache bool) (*Client, error) { 45 | httpClient := &http.Client{ 46 | Transport: &http.Transport{ 47 | MaxIdleConns: 100, 48 | MaxIdleConnsPerHost: 100, 49 | TLSClientConfig: &tls.Config{ 50 | Renegotiation: tls.RenegotiateOnceAsClient, 51 | InsecureSkipVerify: true, 52 | }, 53 | }, 54 | Timeout: time.Duration(30) * time.Second, 55 | } 56 | client := &Client{} 57 | 58 | var scrapers map[string]scraperFunc 59 | if cache { 60 | scrapers = cachedScrapers 61 | } else { 62 | scrapers = defaultScrapers 63 | } 64 | 65 | client.Data = make(map[string]struct{}) 66 | for _, scraper := range scrapers { 67 | cidrs, err := scraper(httpClient) 68 | if err != nil { 69 | return nil, err 70 | } 71 | for _, cidr := range cidrs { 72 | client.Data[cidr] = struct{}{} 73 | } 74 | } 75 | 76 | ranger := cidranger.NewPCTrieRanger() 77 | for cidr := range client.Data { 78 | _, network, err := net.ParseCIDR(cidr) 79 | if err != nil { 80 | continue 81 | } 82 | ranger.Insert(cidranger.NewBasicRangerEntry(*network)) 83 | } 84 | client.ranger = ranger 85 | 86 | return client, nil 87 | } 88 | 89 | // Check checks if an IP is contained in the blacklist 90 | func (c *Client) Check(ip net.IP) (bool, error) { 91 | return c.ranger.Contains(ip) 92 | } 93 | -------------------------------------------------------------------------------- /cdnfilter/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/cdnfilter 2 | 3 | go 1.16 4 | 5 | require ( 6 | github.com/stretchr/testify v1.7.0 7 | github.com/yl2chen/cidranger v1.0.2 8 | ) 9 | -------------------------------------------------------------------------------- /cdnfilter/go.sum: -------------------------------------------------------------------------------- 1 | github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= 2 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 3 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 4 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 5 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 6 | github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= 7 | github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= 8 | github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= 9 | github.com/yl2chen/cidranger v1.0.2 h1:lbOWZVCG1tCRX4u24kuM1Tb4nHqWkDxwLdoS+SevawU= 10 | github.com/yl2chen/cidranger v1.0.2/go.mod h1:9U1yz7WPYDwf0vpNWFaeRh0bjwz5RVgRy/9UEQfHl0g= 11 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 12 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 13 | gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 14 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= 15 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 16 | -------------------------------------------------------------------------------- /cdnfilter/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "log" 8 | "net" 9 | "os" 10 | "strings" 11 | ) 12 | 13 | // Most of the file literally copied from my friend @thebl4ckturtle code 14 | 15 | var ReservedCIDRs = []string{ 16 | "192.168.0.0/16", 17 | "172.16.0.0/12", 18 | "10.0.0.0/8", 19 | "127.0.0.0/8", 20 | "224.0.0.0/4", 21 | "240.0.0.0/4", 22 | "100.64.0.0/10", 23 | "198.18.0.0/15", 24 | "169.254.0.0/16", 25 | "192.88.99.0/24", 26 | "192.0.0.0/24", 27 | "192.0.2.0/24", 28 | "192.94.77.0/24", 29 | "192.94.78.0/24", 30 | "192.52.193.0/24", 31 | "192.12.109.0/24", 32 | "192.31.196.0/24", 33 | "192.0.0.0/29", 34 | } 35 | 36 | // The reserved network address ranges 37 | var reservedAddrRanges []*net.IPNet 38 | 39 | func init() { 40 | for _, cidr := range ReservedCIDRs { 41 | if _, ipnet, err := net.ParseCIDR(cidr); err == nil { 42 | reservedAddrRanges = append(reservedAddrRanges, ipnet) 43 | } 44 | } 45 | } 46 | 47 | func main() { 48 | var cdnOutputFile string 49 | var notCdnOutputFile string 50 | flag.StringVar(&cdnOutputFile, "c", "cdn.txt", "CDN output file") 51 | flag.StringVar(¬CdnOutputFile, "n", "non-cdn.txt", "None CDN output file") 52 | flag.Parse() 53 | if cdnOutputFile == "" || notCdnOutputFile == "" { 54 | fmt.Fprintf(os.Stderr, "Check your input again\n") 55 | os.Exit(1) 56 | } 57 | 58 | cdnOutput, err := os.OpenFile(cdnOutputFile, os.O_CREATE|os.O_APPEND|os.O_WRONLY, os.ModePerm) 59 | if err != nil { 60 | fmt.Fprintf(os.Stderr, "Failed to create/open cdnOutput\n") 61 | os.Exit(1) 62 | } 63 | defer cdnOutput.Close() 64 | 65 | notCdnOutput, err := os.OpenFile(notCdnOutputFile, os.O_CREATE|os.O_APPEND|os.O_WRONLY, os.ModePerm) 66 | if err != nil { 67 | fmt.Fprintf(os.Stderr, "Failed to create/open notCdnOutputFile\n") 68 | os.Exit(1) 69 | } 70 | defer notCdnOutput.Close() 71 | 72 | client, err := NewCDNCheck() 73 | if err != nil { 74 | log.Fatal(err) 75 | } 76 | sc := bufio.NewScanner(os.Stdin) 77 | for sc.Scan() { 78 | line := strings.TrimSpace(sc.Text()) 79 | if line == "" { 80 | continue 81 | } 82 | ip := net.ParseIP(line) 83 | if !isPrivateIP(ip) { 84 | continue 85 | } 86 | if ip == nil { 87 | continue 88 | } 89 | if line == "0.0.0.0" { 90 | continue 91 | } 92 | if localIP, _ := isReservedAddress(line); localIP { 93 | // fmt.Println("Reserved Address: ", localIP) 94 | continue 95 | } 96 | found, err := client.Check(ip) 97 | if err != nil { 98 | continue 99 | } 100 | if found { 101 | _, _ = cdnOutput.WriteString(line + ":80\n") 102 | _, _ = cdnOutput.WriteString(line + ":443\n") 103 | 104 | } else { 105 | fmt.Println(line) 106 | _, _ = notCdnOutput.WriteString(line + "\n") 107 | // print nonCDN ip out 108 | } 109 | } 110 | } 111 | 112 | // IsReservedAddress checks if the addr parameter is within one of the address ranges in the ReservedCIDRs slice. 113 | func isReservedAddress(addr string) (bool, string) { 114 | ip := net.ParseIP(addr) 115 | if ip == nil { 116 | return false, "" 117 | } 118 | 119 | var cidr string 120 | for _, block := range reservedAddrRanges { 121 | if block.Contains(ip) { 122 | cidr = block.String() 123 | break 124 | } 125 | } 126 | 127 | if cidr != "" { 128 | return true, cidr 129 | } 130 | return false, "" 131 | } 132 | 133 | // Copying from https://github.com/audiolion/ipip 134 | // isPrivateIP check if IP is private or not 135 | func isPrivateIP(ip net.IP) bool { 136 | if ip4 := ip.To4(); ip4 != nil { 137 | return ip4[0] == 10 || (ip4[0] == 172 && ip4[1]&0xf0 == 16) || (ip4[0] == 192 && ip4[1] == 168) 138 | } 139 | return len(ip) == net.IPv6len && ip[0]&0xfe == 0xfc 140 | } 141 | -------------------------------------------------------------------------------- /cdnfilter/ranges.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "io/ioutil" 5 | "net/http" 6 | "regexp" 7 | "strings" 8 | ) 9 | 10 | var cidrRegex = regexp.MustCompile(`[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\/[0-9]{1,3}`) 11 | 12 | const lookupASNNumbersURL = "https://api.hackertarget.com/aslookup/?q=" 13 | 14 | type scraperFunc func(httpClient *http.Client) ([]string, error) 15 | 16 | // scrapeCloudflare scrapes cloudflare firewall's CIDR ranges from their API 17 | func scrapeCloudflare(httpClient *http.Client) ([]string, error) { 18 | resp, err := httpClient.Get("https://www.cloudflare.com/ips-v4") 19 | if err != nil { 20 | return nil, err 21 | } 22 | defer resp.Body.Close() 23 | 24 | data, err := ioutil.ReadAll(resp.Body) 25 | if err != nil { 26 | return nil, err 27 | } 28 | body := string(data) 29 | 30 | cidrs := cidrRegex.FindAllString(body, -1) 31 | return cidrs, nil 32 | } 33 | 34 | // scrapeIncapsula scrapes incapsula firewall's CIDR ranges from their API 35 | func scrapeIncapsula(httpClient *http.Client) ([]string, error) { 36 | req, err := http.NewRequest(http.MethodPost, "https://my.incapsula.com/api/integration/v1/ips", strings.NewReader("resp_format=text")) 37 | if err != nil { 38 | return nil, err 39 | } 40 | req.Header.Set("Content-Type", "application/x-www-form-urlencoded") 41 | 42 | resp, err := httpClient.Do(req) 43 | if err != nil { 44 | return nil, err 45 | } 46 | defer resp.Body.Close() 47 | 48 | data, err := ioutil.ReadAll(resp.Body) 49 | if err != nil { 50 | return nil, err 51 | } 52 | body := string(data) 53 | 54 | cidrs := cidrRegex.FindAllString(body, -1) 55 | return cidrs, nil 56 | } 57 | 58 | // scrapeAkamai scrapes akamai firewall's CIDR ranges from ipinfo 59 | func scrapeAkamai(httpClient *http.Client) ([]string, error) { 60 | resp, err := httpClient.Get(lookupASNNumbersURL + "AS12222") 61 | if err != nil { 62 | return nil, err 63 | } 64 | defer resp.Body.Close() 65 | 66 | data, err := ioutil.ReadAll(resp.Body) 67 | if err != nil { 68 | return nil, err 69 | } 70 | body := string(data) 71 | 72 | cidrs := cidrRegex.FindAllString(body, -1) 73 | return cidrs, nil 74 | } 75 | 76 | // scrapeSucuri scrapes sucuri firewall's CIDR ranges from ipinfo 77 | func scrapeSucuri(httpClient *http.Client) ([]string, error) { 78 | resp, err := httpClient.Get(lookupASNNumbersURL + "AS30148") 79 | if err != nil { 80 | return nil, err 81 | } 82 | defer resp.Body.Close() 83 | 84 | data, err := ioutil.ReadAll(resp.Body) 85 | if err != nil { 86 | return nil, err 87 | } 88 | body := string(data) 89 | 90 | cidrs := cidrRegex.FindAllString(body, -1) 91 | return cidrs, nil 92 | } 93 | 94 | func scrapeDDOSGuard(httpClient *http.Client) ([]string, error) { 95 | resp, err := httpClient.Get(lookupASNNumbersURL + "AS262254") 96 | if err != nil { 97 | return nil, err 98 | } 99 | defer resp.Body.Close() 100 | 101 | data, err := ioutil.ReadAll(resp.Body) 102 | if err != nil { 103 | return nil, err 104 | } 105 | body := string(data) 106 | 107 | cidrs := cidrRegex.FindAllString(body, -1) 108 | return cidrs, nil 109 | } 110 | 111 | func scrapeProjectDiscovery(httpClient *http.Client) ([]string, error) { 112 | resp, err := httpClient.Get("https://cdn.projectdiscovery.io/cdn/cdn-ips") 113 | if err != nil { 114 | return nil, err 115 | } 116 | defer resp.Body.Close() 117 | 118 | data, err := ioutil.ReadAll(resp.Body) 119 | if err != nil { 120 | return nil, err 121 | } 122 | body := string(data) 123 | 124 | cidrs := cidrRegex.FindAllString(body, -1) 125 | return cidrs, nil 126 | } 127 | 128 | // scrapeCloudfront scrapes Cloudfront 129 | func scrapeCloudfront(httpClient *http.Client) ([]string, error) { 130 | resp, err := httpClient.Get("http://d7uri8nf7uskq.cloudfront.net/tools/list-cloudfront-ips") 131 | if err != nil { 132 | return nil, err 133 | } 134 | defer resp.Body.Close() 135 | 136 | data, err := ioutil.ReadAll(resp.Body) 137 | if err != nil { 138 | return nil, err 139 | } 140 | body := string(data) 141 | 142 | cidrs := cidrRegex.FindAllString(body, -1) 143 | return cidrs, nil 144 | } 145 | 146 | // scrapeFastly scrapes Fastly 147 | func scrapeFastly(httpClient *http.Client) ([]string, error) { 148 | resp, err := httpClient.Get("https://api.fastly.com/public-ip-list") 149 | if err != nil { 150 | return nil, err 151 | } 152 | defer resp.Body.Close() 153 | 154 | data, err := ioutil.ReadAll(resp.Body) 155 | if err != nil { 156 | return nil, err 157 | } 158 | body := string(data) 159 | 160 | cidrs := cidrRegex.FindAllString(body, -1) 161 | return cidrs, nil 162 | } 163 | 164 | func scrapeMaxCDN(httpClient *http.Client) ([]string, error) { 165 | resp, err := httpClient.Get("https://support.maxcdn.com/hc/en-us/article_attachments/360051920551/maxcdn_ips.txt") 166 | if err != nil { 167 | return nil, err 168 | } 169 | defer resp.Body.Close() 170 | 171 | data, err := ioutil.ReadAll(resp.Body) 172 | if err != nil { 173 | return nil, err 174 | } 175 | body := string(data) 176 | 177 | cidrs := cidrRegex.FindAllString(body, -1) 178 | return cidrs, nil 179 | } 180 | -------------------------------------------------------------------------------- /cdnfilter/ranges_test.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "crypto/tls" 5 | "net/http" 6 | "testing" 7 | "time" 8 | 9 | "github.com/stretchr/testify/require" 10 | ) 11 | 12 | func TestScrapeRanges(t *testing.T) { 13 | httpClient := &http.Client{ 14 | Transport: &http.Transport{ 15 | MaxIdleConns: 100, 16 | MaxIdleConnsPerHost: 100, 17 | TLSClientConfig: &tls.Config{ 18 | Renegotiation: tls.RenegotiateOnceAsClient, 19 | InsecureSkipVerify: true, 20 | }, 21 | }, 22 | 23 | Timeout: time.Duration(30) * time.Second, 24 | } 25 | 26 | t.Run("cloudflare", func(t *testing.T) { 27 | out, err := scrapeCloudflare(httpClient) 28 | t.Log(out) 29 | require.Nil(t, err, "Could not scrape cloudflare") 30 | }) 31 | t.Run("incapsula", func(t *testing.T) { 32 | out, err := scrapeIncapsula(httpClient) 33 | t.Log(out) 34 | require.Nil(t, err, "Could not scrape incapsula") 35 | }) 36 | t.Run("akamai", func(t *testing.T) { 37 | out, err := scrapeAkamai(httpClient) 38 | t.Log(out) 39 | require.Nil(t, err, "Could not scrape akamai") 40 | }) 41 | t.Run("sucuri", func(t *testing.T) { 42 | out, err := scrapeSucuri(httpClient) 43 | t.Log(out) 44 | require.Nil(t, err, "Could not scrape sucuri") 45 | }) 46 | t.Run("projectdiscovery", func(t *testing.T) { 47 | out, err := scrapeProjectDiscovery(httpClient) 48 | t.Log(out) 49 | require.Nil(t, err, "Could not scrape projectdiscovery") 50 | }) 51 | t.Run("cloudfront", func(t *testing.T) { 52 | out, err := scrapeCloudfront(httpClient) 53 | t.Log(out) 54 | require.Nil(t, err, "Could not scrape cloudfront") 55 | }) 56 | t.Run("fastly", func(t *testing.T) { 57 | out, err := scrapeFastly(httpClient) 58 | t.Log(out) 59 | require.Nil(t, err, "Could not scrape fastly") 60 | }) 61 | t.Run("maxcdn", func(t *testing.T) { 62 | out, err := scrapeMaxCDN(httpClient) 63 | t.Log(out) 64 | require.Nil(t, err, "Could not scrape maxcdn") 65 | }) 66 | t.Run("ddosguard", func(t *testing.T) { 67 | out, err := scrapeDDOSGuard(httpClient) 68 | t.Log(out) 69 | require.Nil(t, err, "Could not scrape ddosguard") 70 | }) 71 | } 72 | -------------------------------------------------------------------------------- /chrunk/README.md: -------------------------------------------------------------------------------- 1 | chrunk 2 | ====== 3 | Run your command against really really big file. 4 | 5 | ## Installation 6 | 7 | ```shell 8 | go get -u github.com/j3ssie/go-auxs/chrunk 9 | 10 | ``` 11 | 12 | ## Examples 13 | 14 | ```shell 15 | 16 | chrunk -i /tmp/really_big_file.txt -s 20000 17 | 18 | chrunk -i /tmp/really_big_file.txt -cmd 'echo "--> {}"' 19 | 20 | chrunk -p 20 -i /tmp/really_big_file.txt -cmd 'echo "--> {}"' 21 | 22 | cat really_big_file.txt | chrunk -p 10 -cmd 'echo "--> {}"' 23 | 24 | ``` 25 | 26 | ## Usage 27 | 28 | ``` 29 | Usage of chrunk: 30 | -c int 31 | Set the concurrency level (default 1) 32 | -clean 33 | Clean junk file after done 34 | -cmd string 35 | Command to run after chunked content 36 | -i string 37 | Input file to split 38 | -o string 39 | Output foldeer contains list of filename 40 | -p int 41 | Number of parts to split 42 | -prefix string 43 | Prefix output filename 44 | -s int 45 | Number of lines to split file (default 10000) 46 | ``` -------------------------------------------------------------------------------- /chrunk/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "io" 8 | "math/rand" 9 | "os" 10 | "os/exec" 11 | "path" 12 | "strings" 13 | "sync" 14 | "time" 15 | 16 | "github.com/mitchellh/go-homedir" 17 | "github.com/sirupsen/logrus" 18 | prefixed "github.com/x-cray/logrus-prefixed-formatter" 19 | ) 20 | 21 | // Extend the IP range by CIDR 22 | // Usage: chrunk -i /tmp/really_big_file.txt -cmd 'echo "--> {}"' 23 | 24 | var logger = logrus.New() 25 | 26 | var ( 27 | clean bool 28 | concurrency int 29 | part int 30 | size int 31 | filename string 32 | command string 33 | output string 34 | prefix string 35 | outDir string 36 | ) 37 | 38 | func main() { 39 | logger = &logrus.Logger{ 40 | Out: os.Stderr, 41 | Level: logrus.InfoLevel, 42 | Formatter: &prefixed.TextFormatter{ 43 | ForceColors: true, 44 | ForceFormatting: true, 45 | }, 46 | } 47 | 48 | // cli arguments 49 | flag.BoolVar(&clean, "clean", false, "Clean junk file after done") 50 | flag.IntVar(&part, "p", 0, "Number of parts to split") 51 | flag.IntVar(&size, "s", 10000, "Number of lines to split file") 52 | flag.StringVar(&filename, "i", "", "Input file to split") 53 | flag.StringVar(&prefix, "prefix", "", "Prefix output filename") 54 | flag.StringVar(&outDir, "o", "", "Output foldeer contains list of filename") 55 | flag.IntVar(&concurrency, "c", 1, "Set the concurrency level") 56 | flag.StringVar(&command, "cmd", "", "Command to run after chunked content") 57 | flag.Parse() 58 | 59 | if outDir == "" { 60 | outDir = path.Join(os.TempDir(), "chrunk-data") 61 | os.MkdirAll(outDir, 0755) 62 | logger.Infof("Set output folder: %v", outDir) 63 | } 64 | 65 | // input as stdin 66 | if filename == "" { 67 | var rawInput []string 68 | stat, _ := os.Stdin.Stat() 69 | // detect if anything came from std 70 | if (stat.Mode() & os.ModeCharDevice) == 0 { 71 | sc := bufio.NewScanner(os.Stdin) 72 | for sc.Scan() { 73 | url := strings.TrimSpace(sc.Text()) 74 | if err := sc.Err(); err == nil && url != "" { 75 | rawInput = append(rawInput, url) 76 | } 77 | } 78 | } 79 | 80 | filename = path.Join(outDir, fmt.Sprintf("raw-%v", RandomString(8))) 81 | logger.Infof("Write stdin data to: %v", filename) 82 | WriteToFile(filename, strings.Join(rawInput, "\n")) 83 | } 84 | 85 | if filename == "" { 86 | logger.Panic("No input provided") 87 | os.Exit(-1) 88 | } 89 | 90 | if prefix == "" { 91 | prefix = strings.TrimSuffix(path.Base(filename), path.Ext(filename)) 92 | logger.Infof("Set prefix output: %v", prefix) 93 | } 94 | 95 | var divided [][]string 96 | // really split file here 97 | if part == 0 { 98 | divided = ChunkFileBySize(filename, size) 99 | } else { 100 | divided = ChunkFileByPart(filename, part) 101 | } 102 | 103 | var chunkFiles []string 104 | // write data 105 | logger.Infof("Split input to %v parts", len(divided)) 106 | for index, chunk := range divided { 107 | outName := path.Join(outDir, fmt.Sprintf("%v-%v", prefix, index)) 108 | if command == "" { 109 | fmt.Println(outName) 110 | } 111 | WriteToFile(outName, strings.Join(chunk, "\n")) 112 | chunkFiles = append(chunkFiles, outName) 113 | } 114 | 115 | var commands []string 116 | if command != "" { 117 | // run command here 118 | for index, chunkFile := range chunkFiles { 119 | cmd := command 120 | cmd = strings.Replace(cmd, "{}", chunkFile, -1) 121 | cmd = strings.Replace(cmd, "{#}", fmt.Sprintf("%d", index), -1) 122 | // Execution(cmd) 123 | commands = append(commands, cmd) 124 | } 125 | } 126 | 127 | var wg sync.WaitGroup 128 | jobs := make(chan string, concurrency) 129 | 130 | for i := 0; i < concurrency; i++ { 131 | wg.Add(1) 132 | go func() { 133 | defer wg.Done() 134 | for job := range jobs { 135 | Execution(job) 136 | } 137 | }() 138 | } 139 | 140 | for _, command := range commands { 141 | jobs <- command 142 | } 143 | close(jobs) 144 | wg.Wait() 145 | 146 | // cleanup tmp data 147 | if clean || command != "" { 148 | logger.Infof("Clean up tmp data") 149 | for _, chunkFile := range chunkFiles { 150 | os.RemoveAll(chunkFile) 151 | } 152 | } 153 | } 154 | 155 | // ChunkFileByPart chunk file to multiple part 156 | func ChunkFileByPart(source string, chunk int) [][]string { 157 | var divided [][]string 158 | data := ReadingLines(source) 159 | if len(data) <= 0 || chunk > len(data) { 160 | if len(data) > 0 { 161 | divided = append(divided, data) 162 | } 163 | return divided 164 | } 165 | 166 | chunkSize := (len(data) + chunk - 1) / chunk 167 | for i := 0; i < len(data); i += chunkSize { 168 | end := i + chunkSize 169 | if end > len(data) { 170 | end = len(data) 171 | } 172 | 173 | divided = append(divided, data[i:end]) 174 | } 175 | return divided 176 | } 177 | 178 | // ChunkFileBySize chunk file to multiple part 179 | func ChunkFileBySize(source string, chunk int) [][]string { 180 | var divided [][]string 181 | data := ReadingLines(source) 182 | if len(data) <= 0 || chunk > len(data) { 183 | if len(data) > 0 { 184 | divided = append(divided, data) 185 | } 186 | return divided 187 | } 188 | 189 | chunkSize := chunk 190 | for i := 0; i < len(data); i += chunkSize { 191 | end := i + chunkSize 192 | if end > len(data) { 193 | end = len(data) 194 | } 195 | 196 | divided = append(divided, data[i:end]) 197 | } 198 | return divided 199 | } 200 | 201 | // ReadingLines Reading file and return content as []string 202 | func ReadingLines(filename string) []string { 203 | var result []string 204 | if strings.HasPrefix(filename, "~") { 205 | filename, _ = homedir.Expand(filename) 206 | } 207 | file, err := os.Open(filename) 208 | defer file.Close() 209 | if err != nil { 210 | return result 211 | } 212 | 213 | scanner := bufio.NewScanner(file) 214 | for scanner.Scan() { 215 | val := scanner.Text() 216 | if val == "" { 217 | continue 218 | } 219 | result = append(result, val) 220 | } 221 | 222 | if err := scanner.Err(); err != nil { 223 | return result 224 | } 225 | return result 226 | } 227 | 228 | // WriteToFile write string to a file 229 | func WriteToFile(filename string, data string) (string, error) { 230 | file, err := os.Create(filename) 231 | if err != nil { 232 | return "", err 233 | } 234 | defer file.Close() 235 | 236 | _, err = io.WriteString(file, data+"\n") 237 | if err != nil { 238 | return "", err 239 | } 240 | return filename, file.Sync() 241 | } 242 | 243 | // Execution Run a command 244 | func Execution(cmd string) (string, error) { 245 | command := []string{ 246 | "bash", 247 | "-c", 248 | cmd, 249 | } 250 | var output string 251 | logger.Infof("Execute: %s", cmd) 252 | realCmd := exec.Command(command[0], command[1:]...) 253 | // output command output to std too 254 | cmdReader, _ := realCmd.StdoutPipe() 255 | scanner := bufio.NewScanner(cmdReader) 256 | var out string 257 | go func() { 258 | for scanner.Scan() { 259 | out += scanner.Text() 260 | fmt.Println(scanner.Text()) 261 | } 262 | }() 263 | if err := realCmd.Start(); err != nil { 264 | return "", err 265 | } 266 | if err := realCmd.Wait(); err != nil { 267 | return "", err 268 | } 269 | return output, nil 270 | } 271 | 272 | // RandomString return a random string with length 273 | func RandomString(n int) string { 274 | var seededRand = rand.New(rand.NewSource(time.Now().UnixNano())) 275 | var letter = []rune("abcdefghijklmnopqrstuvwxyz") 276 | b := make([]rune, n) 277 | for i := range b { 278 | b[i] = letter[seededRand.Intn(len(letter))] 279 | } 280 | return string(b) 281 | } 282 | -------------------------------------------------------------------------------- /cinfo/README.md: -------------------------------------------------------------------------------- 1 | ## Cinfo 2 | 3 | Extract domain from SSL info 4 | 5 | ## Install 6 | 7 | ```shell 8 | GO111MODULE=on go get -u github.com/j3ssie/go-auxs/cinfo 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```shell 14 | # Basic Usage 15 | echo '1.2.3.4:443' | cinfo 16 | echo '1.2.3.4:443' | cinfo -json 17 | 18 | 19 | # probe for common SSL ports like 443,8443 20 | echo '1.2.3.4' | cinfo -e 21 | 22 | # get alexa rank of domains 23 | echo '1.2.3.4' | cinfo -e -a 24 | ``` -------------------------------------------------------------------------------- /cinfo/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/cinfo 2 | 3 | go 1.16 4 | 5 | require ( 6 | github.com/genkiroid/cert v0.0.0-20191007122723-897560fbbe50 7 | github.com/json-iterator/go v1.1.12 8 | golang.org/x/net v0.0.0-20211118161319-6a13c67c3ce4 9 | ) 10 | -------------------------------------------------------------------------------- /cinfo/go.sum: -------------------------------------------------------------------------------- 1 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 2 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 3 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 4 | github.com/genkiroid/cert v0.0.0-20191007122723-897560fbbe50 h1:vLwmYBduhnWWqShoUGbVgDulhcLdanoYtCQxYMzwaqQ= 5 | github.com/genkiroid/cert v0.0.0-20191007122723-897560fbbe50/go.mod h1:Pb7nyGYAfDyE/IkU6AJeRshIFko0wJC9cOqeYzYQffk= 6 | github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= 7 | github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= 8 | github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= 9 | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= 10 | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= 11 | github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= 12 | github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= 13 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 14 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 15 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 16 | github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= 17 | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= 18 | golang.org/x/net v0.0.0-20211118161319-6a13c67c3ce4 h1:DZshvxDdVoeKIbudAdFEKi+f70l51luSy/7b76ibTY0= 19 | golang.org/x/net v0.0.0-20211118161319-6a13c67c3ce4/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= 20 | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 21 | golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 22 | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= 23 | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= 24 | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 25 | -------------------------------------------------------------------------------- /cinfo/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "crypto/tls" 6 | "encoding/xml" 7 | "flag" 8 | "fmt" 9 | "golang.org/x/net/publicsuffix" 10 | "io/ioutil" 11 | "net/http" 12 | "net/url" 13 | "os" 14 | "sort" 15 | "strings" 16 | "sync" 17 | 18 | "github.com/genkiroid/cert" 19 | jsoniter "github.com/json-iterator/go" 20 | ) 21 | 22 | // Extract domain from SSL info 23 | // cat /tmp/list_of_IP | cinfo -c 100 24 | var ( 25 | verbose bool 26 | alexa bool 27 | extra bool 28 | jsonOutput bool 29 | ports string 30 | concurrency int 31 | ) 32 | 33 | func main() { 34 | // cli arguments 35 | flag.IntVar(&concurrency, "c", 20, "Set the concurrency level") 36 | flag.BoolVar(&jsonOutput, "json", false, "Show Output as Json format") 37 | flag.BoolVar(&alexa, "a", false, "Check Alexa Rank of domain") 38 | flag.BoolVar(&extra, "e", false, "Append common extra HTTPS port too") 39 | flag.StringVar(&ports, "p", "443,8443,9443", "Common extra HTTPS port too (default: 443,8443,9443)") 40 | flag.BoolVar(&verbose, "v", false, "Verbose output") 41 | flag.Parse() 42 | 43 | stat, _ := os.Stdin.Stat() 44 | if (stat.Mode() & os.ModeCharDevice) != 0 { 45 | args := os.Args[1:] 46 | sort.Strings(args) 47 | url := args[len(args)-1] 48 | 49 | hostname := getHostName(url, "") 50 | if !getCerts(hostname) { 51 | getCerts(getHostName(hostname, "443")) 52 | } 53 | os.Exit(0) 54 | } 55 | 56 | var wg sync.WaitGroup 57 | jobs := make(chan string, concurrency) 58 | 59 | for i := 0; i < concurrency; i++ { 60 | wg.Add(1) 61 | go func() { 62 | defer wg.Done() 63 | for job := range jobs { 64 | hostname := getHostName(job, "") 65 | if hostname != "" { 66 | if extra { 67 | if strings.Contains(hostname, ":") { 68 | hostname = strings.Split(hostname, ":")[0] 69 | } 70 | hostnames := moreHosts(hostname) 71 | for _, host := range hostnames { 72 | getCerts(host) 73 | } 74 | } 75 | if !getCerts(hostname) { 76 | getCerts(getHostName(job, "443")) 77 | } 78 | } 79 | } 80 | }() 81 | } 82 | 83 | sc := bufio.NewScanner(os.Stdin) 84 | go func() { 85 | for sc.Scan() { 86 | url := strings.TrimSpace(sc.Text()) 87 | if err := sc.Err(); err == nil && url != "" { 88 | jobs <- url 89 | } 90 | } 91 | close(jobs) 92 | }() 93 | wg.Wait() 94 | } 95 | 96 | func moreHosts(raw string) []string { 97 | var result []string 98 | mports := strings.Split(raw, ",") 99 | for _, mport := range mports { 100 | result = append(result, fmt.Sprintf("%s:%s", raw, mport)) 101 | } 102 | return result 103 | } 104 | 105 | func getHostName(raw string, port string) string { 106 | if !strings.HasPrefix(raw, "http") { 107 | raw = "https://" + raw 108 | } 109 | u, err := url.Parse(raw) 110 | if err != nil { 111 | fmt.Println(err) 112 | return "" 113 | } 114 | var hostname string 115 | if port != "" { 116 | return u.Hostname() + ":" + port 117 | } 118 | 119 | if u.Port() == "" { 120 | hostname = u.Hostname() 121 | } else { 122 | hostname = u.Hostname() + ":" + u.Port() 123 | } 124 | return hostname 125 | } 126 | 127 | type CertInfo struct { 128 | Input string `json:"input"` 129 | Domains []string `json:"domains"` 130 | Info string `json:"info"` 131 | } 132 | 133 | func getCerts(raw string) bool { 134 | var certs cert.Certs 135 | var err error 136 | var rank string 137 | 138 | cert.SkipVerify = true 139 | 140 | certs, err = cert.NewCerts([]string{raw}) 141 | if err != nil { 142 | return false 143 | } 144 | 145 | certInfo := CertInfo{ 146 | Input: raw, 147 | } 148 | 149 | for _, certItem := range certs { 150 | if verbose { 151 | info, err := GetCertificatesInfo(raw) 152 | certInfo.Info = info 153 | if err == nil { 154 | if !jsonOutput { 155 | fmt.Printf("%s - %s\n", raw, info) 156 | } 157 | } 158 | } 159 | 160 | for _, domain := range certItem.SANs { 161 | data := domain 162 | if alexa { 163 | rank, _ = getAlexaRank(domain) 164 | data = fmt.Sprintf("%v,%v,%s", raw, domain, rank) 165 | } else if !jsonOutput { 166 | data = fmt.Sprintf("%v,%v", raw, domain) 167 | } 168 | 169 | if jsonOutput { 170 | certInfo.Domains = append(certInfo.Domains, data) 171 | } else { 172 | fmt.Println(data) 173 | } 174 | } 175 | } 176 | 177 | if jsonOutput { 178 | if data, err := jsoniter.MarshalToString(certInfo); err == nil { 179 | fmt.Println(data) 180 | } 181 | } 182 | 183 | return true 184 | 185 | } 186 | 187 | func getAlexaRank(raw string) (string, error) { 188 | rank := "-1" 189 | 190 | if strings.Contains(raw, "*.") { 191 | raw = strings.ReplaceAll(raw, "*.", "") 192 | } 193 | 194 | // sub.example.com --> example.com 195 | suffix, ok := publicsuffix.PublicSuffix(raw) 196 | if ok { 197 | root := strings.ReplaceAll(raw, fmt.Sprintf(".%s", suffix), "") 198 | if strings.Contains(root, ".") { 199 | parts := strings.Split(root, ".") 200 | root = parts[len(parts)-1] 201 | raw = fmt.Sprintf("%s.%s", root, suffix) 202 | } 203 | } 204 | 205 | resp, err := http.Get("http://data.alexa.com/data?cli=10&dat=snbamz&url=" + raw) 206 | if err != nil { 207 | return rank, err 208 | } 209 | 210 | defer resp.Body.Close() 211 | alexaData, err := ioutil.ReadAll(resp.Body) 212 | if err != nil { 213 | return rank, err 214 | } 215 | 216 | decoder := xml.NewDecoder(strings.NewReader(string(alexaData))) 217 | for { 218 | token, _ := decoder.Token() 219 | if token == nil { 220 | break 221 | } 222 | 223 | switch startElement := token.(type) { 224 | case xml.StartElement: 225 | if startElement.Name.Local == "POPULARITY" { 226 | if len(startElement.Attr) >= 2 { 227 | rank = startElement.Attr[1].Value 228 | } 229 | } 230 | } 231 | } 232 | return rank, nil 233 | } 234 | 235 | func GetCertificatesInfo(address string) (string, error) { 236 | if !strings.Contains(address, ":") { 237 | address = fmt.Sprintf("%s:443", address) 238 | } 239 | conn, err := tls.Dial("tcp", address, &tls.Config{ 240 | InsecureSkipVerify: true, 241 | }) 242 | if err != nil { 243 | return "", err 244 | } 245 | defer conn.Close() 246 | return fmt.Sprintf("%v", conn.ConnectionState().PeerCertificates[0].Subject), nil 247 | } 248 | -------------------------------------------------------------------------------- /cleansub/README.md: -------------------------------------------------------------------------------- 1 | ## Clean Subdomains 2 | 3 | Clean your subdomain list 4 | 5 | ## Install 6 | 7 | ``` 8 | go get -u github.com/j3ssie/go-auxs/cleansub 9 | ``` 10 | 11 | ## Usage 12 | ```shell 13 | cat subdomains.txt 14 | target.com 15 | sub1.target.com 16 | aatarget.com 17 | sub2.target.com 18 | 19 | cat subdomains.txt | cleansub 20 | target.com 21 | sub1.target.com 22 | sub2.target.com 23 | ``` -------------------------------------------------------------------------------- /cleansub/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "os" 8 | "regexp" 9 | "strings" 10 | "sync" 11 | ) 12 | 13 | var nameStripRE = regexp.MustCompile(`^u[0-9a-f]{4}|20|22|25|2b|2f|3d|3a|40`) 14 | var subdomainRE = regexp.MustCompile(`(([a-zA-Z0-9]{1}|[_a-zA-Z0-9]{1}[_a-zA-Z0-9-]{0,61}[a-zA-Z0-9]{1})[.]{1})+[a-zA-Z]{2,61}`) 15 | var subwithIPv4 = regexp.MustCompile(`(?m)[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.`) 16 | var subwithIPv42 = regexp.MustCompile(`(?m)[0-9]{1,3}\-[0-9]{1,3}\-[0-9]{1,3}`) 17 | 18 | var ( 19 | target string 20 | 21 | concurrency int 22 | ) 23 | 24 | func main() { 25 | flag.StringVar(&target, "t", "", "Specify target to clean") 26 | flag.IntVar(&concurrency, "c", 20, "Set the concurrency level") 27 | 28 | flag.Parse() 29 | 30 | var wg sync.WaitGroup 31 | jobs := make(chan string, concurrency) 32 | 33 | for i := 0; i < concurrency; i++ { 34 | wg.Add(1) 35 | go func() { 36 | defer wg.Done() 37 | for job := range jobs { 38 | checkClean(job) 39 | } 40 | }() 41 | } 42 | 43 | sc := bufio.NewScanner(os.Stdin) 44 | go func() { 45 | for sc.Scan() { 46 | url := strings.TrimSpace(sc.Text()) 47 | if err := sc.Err(); err == nil && url != "" { 48 | jobs <- url 49 | } 50 | } 51 | close(jobs) 52 | }() 53 | wg.Wait() 54 | } 55 | 56 | func checkClean(line string) { 57 | name := subdomainRE.FindString(line) 58 | name = strings.ToLower(name) 59 | for { 60 | name = strings.Trim(name, "-.") 61 | if i := nameStripRE.FindStringIndex(name); i != nil { 62 | name = name[i[1]:] 63 | } else { 64 | break 65 | } 66 | } 67 | name = removeAsteriskLabel(name) 68 | 69 | if target != "" { 70 | // only accept sub.target.com and target.com 71 | if !strings.Contains(name, "."+target) && (name != target) { 72 | return 73 | } 74 | } 75 | 76 | isWildCard := removeWildcard(name) 77 | if isWildCard { 78 | return 79 | } 80 | 81 | fmt.Println(name) 82 | } 83 | 84 | func removeWildcard(s string) bool { 85 | matched := subwithIPv4.MatchString(s) 86 | if matched { 87 | return matched 88 | } 89 | 90 | matched = subwithIPv42.MatchString(s) 91 | if matched { 92 | return matched 93 | } 94 | return false 95 | } 96 | 97 | func removeAsteriskLabel(s string) string { 98 | startIndex := strings.LastIndex(s, "*.") 99 | 100 | if startIndex == -1 { 101 | return s 102 | } 103 | 104 | return s[startIndex+2:] 105 | } 106 | -------------------------------------------------------------------------------- /durl/README.md: -------------------------------------------------------------------------------- 1 | ## Diff URLs 2 | 3 | Strip out similar URLs by unique hostname-path-paramName 4 | 5 | ## Install 6 | ``` 7 | go get -u github.com/j3ssie/go-auxs/durl 8 | ``` 9 | 10 | ## Usage 11 | ``` 12 | cat wayback_urls.txt | durl | tee differ_urls.txt 13 | ``` -------------------------------------------------------------------------------- /durl/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "crypto/sha1" 6 | "flag" 7 | "fmt" 8 | "net/url" 9 | "os" 10 | "regexp" 11 | "sort" 12 | "strings" 13 | ) 14 | 15 | // Strip out similar URLs by unique hostname-path-paramName 16 | // cat urls.txt | durl 17 | // only grep url have parameter 18 | // cat urls.txt | durl -p 19 | 20 | var ( 21 | blacklist bool 22 | haveParam bool 23 | ext string 24 | ) 25 | 26 | func main() { 27 | // cli aguments 28 | flag.BoolVar(&blacklist, "b", true, "Enable blacklist") 29 | flag.BoolVar(&haveParam, "p", false, "Enable check if input have parameter") 30 | flag.StringVar(&ext, "e", "", "Blacklist regex string (default is static extentions)") 31 | flag.Parse() 32 | 33 | // default blacklist 34 | if ext == "" { 35 | ext = `(?i)\.(png|apng|bmp|gif|ico|cur|jpg|jpeg|jfif|pjp|pjpeg|svg|tif|tiff|webp|xbm|3gp|aac|flac|mpg|mpeg|mp3|mp4|m4a|m4v|m4p|oga|ogg|ogv|mov|wav|webm|eot|woff|woff2|ttf|otf|css)(?:\?|#|$)` 36 | } 37 | 38 | data := make(map[string]string) 39 | sc := bufio.NewScanner(os.Stdin) 40 | for sc.Scan() { 41 | raw := strings.TrimSpace(sc.Text()) 42 | if sc.Err() != nil && raw == "" { 43 | continue 44 | } 45 | 46 | if blacklist { 47 | if IsBlacklisted(raw) { 48 | continue 49 | } 50 | } 51 | 52 | hash := hashUrl(raw) 53 | if hash == "" { 54 | continue 55 | } 56 | _, exist := data[hash] 57 | if !exist { 58 | data[hash] = raw 59 | fmt.Println(raw) 60 | } 61 | } 62 | } 63 | 64 | // IsBlacklisted check if url is blacklisted or not 65 | func IsBlacklisted(raw string) bool { 66 | r, err := regexp.Compile(ext) 67 | if err != nil { 68 | return false 69 | } 70 | isBlacklisted := r.MatchString(raw) 71 | if isBlacklisted { 72 | return true 73 | } 74 | 75 | // check if have param 76 | if haveParam { 77 | p, _ := regexp.Compile(`\?.*\=`) 78 | return !p.MatchString(raw) 79 | } 80 | 81 | return false 82 | } 83 | 84 | // hashUrl gen unique hash base on url 85 | func hashUrl(raw string) string { 86 | u, err := url.Parse(raw) 87 | if err != nil { 88 | return "" 89 | } 90 | 91 | var queries []string 92 | for k := range u.Query() { 93 | queries = append(queries, k) 94 | } 95 | sort.Strings(queries) 96 | query := strings.Join(queries, "-") 97 | data := fmt.Sprintf("%v-%v-%v", u.Hostname(), u.Path, query) 98 | return genHash(data) 99 | } 100 | 101 | // genHash gen SHA1 hash from string 102 | func genHash(text string) string { 103 | h := sha1.New() 104 | h.Write([]byte(text)) 105 | hashed := h.Sum(nil) 106 | return fmt.Sprintf("%v", hashed) 107 | } 108 | -------------------------------------------------------------------------------- /eip/README.md: -------------------------------------------------------------------------------- 1 | ## Extend IP 2 | 3 | Extend the IP range by CIDR 4 | 5 | ## Install 6 | 7 | ``` 8 | go get -u github.com/j3ssie/go-auxs/eip 9 | ``` 10 | 11 | ## Usage 12 | ```shell 13 | # Basic usage 14 | echo '1.2.3.4/20' | eip -s 24 15 | 16 | # Append common port to ip 17 | 18 | echo '1.2.3.4/24' | eip -p s 19 | ``` -------------------------------------------------------------------------------- /eip/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/eip 2 | 3 | go 1.18 4 | 5 | require github.com/c-robinson/iplib v1.0.3 6 | -------------------------------------------------------------------------------- /eip/go.sum: -------------------------------------------------------------------------------- 1 | github.com/c-robinson/iplib v1.0.3 h1:NG0UF0GoEsrC1/vyfX1Lx2Ss7CySWl3KqqXh3q4DdPU= 2 | github.com/c-robinson/iplib v1.0.3/go.mod h1:i3LuuFL1hRT5gFpBRnEydzw8R6yhGkF4szNDIbF8pgo= 3 | -------------------------------------------------------------------------------- /eip/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "net" 8 | "net/url" 9 | "os" 10 | "strings" 11 | 12 | "github.com/c-robinson/iplib" 13 | ) 14 | 15 | // Extend the IP range by CIDR 16 | // Usage: echo '1.2.3.4/24' | eip -s 32 17 | // Usage: echo '1.2.3.4/24' | eip -p small 18 | 19 | var ( 20 | unique bool 21 | pURL bool 22 | sub int 23 | port string 24 | ports []string 25 | ) 26 | 27 | var ( 28 | pSmall []string 29 | pMedium []string 30 | pLarge []string 31 | ) 32 | 33 | func main() { 34 | pSmall = []string{"80", "443", "8000", "8080", "8081", "8443", "9000", "9200"} 35 | pMedium = append(pSmall, []string{"81", "3000", "6066", "6443", "8008", "8083", "8834", "8888", "9091", "9443"}...) 36 | pLarge = append(pMedium, []string{"591", "2082", "2087", "2095", "2096", "4444", "4040", "6066", "9092", "10250", "10251"}...) 37 | 38 | // cli arguments 39 | flag.BoolVar(&pURL, "U", true, "parse url pattern too (only affected with '-p' option)") 40 | flag.BoolVar(&unique, "u", true, "unique result") 41 | flag.IntVar(&sub, "s", 32, "CIDR subnet (e.g: 24, 22)") 42 | flag.StringVar(&port, "p", "", "Append port after each IP (some predefined value: full, xlarge, large,medium, small or f,x,l,m,s)") 43 | flag.Parse() 44 | 45 | if port != "" { 46 | ports = genPorts(port) 47 | } 48 | 49 | var result []string 50 | sc := bufio.NewScanner(os.Stdin) 51 | for sc.Scan() { 52 | job := strings.TrimSpace(sc.Text()) 53 | data := extendRange(job, sub) 54 | if len(data) > 0 { 55 | result = append(result, data...) 56 | } 57 | 58 | if pURL { 59 | data := extendURL(job) 60 | if len(data) > 0 { 61 | result = append(result, data...) 62 | } 63 | } 64 | 65 | } 66 | 67 | if !unique { 68 | fmt.Println(strings.Join(result, "\n")) 69 | return 70 | } 71 | 72 | unique := make(map[string]bool) 73 | for _, v := range result { 74 | if !unique[v] { 75 | unique[v] = true 76 | fmt.Println(v) 77 | } 78 | } 79 | } 80 | 81 | func extendURL(raw string) []string { 82 | var result []string 83 | u, err := url.Parse(raw) 84 | 85 | if err != nil || u.Scheme == "" || strings.Contains(u.Scheme, ".") { 86 | raw = fmt.Sprintf("http://%v", raw) 87 | u, err = url.Parse(raw) 88 | if err != nil { 89 | return result 90 | } 91 | } 92 | 93 | for _, p := range ports { 94 | result = append(result, fmt.Sprintf("%s:%s", u.Hostname(), p)) 95 | } 96 | return result 97 | } 98 | 99 | func extendRange(rangeIP string, sub int) []string { 100 | var result []string 101 | _, ipna, err := iplib.ParseCIDR(rangeIP) 102 | 103 | if err != nil { 104 | ip := net.ParseIP(rangeIP) 105 | if ip != nil { 106 | if port == "" || sub != 32 { 107 | ipb := net.ParseIP(ip.String()) 108 | ipnb := iplib.NewNet(ipb, sub) 109 | result = append(result, ipnb.String()) 110 | } else { 111 | for _, p := range ports { 112 | fmt.Printf("%s:%s\n", ip, p) 113 | } 114 | } 115 | } 116 | return result 117 | } 118 | 119 | extendedIPs, err := ipna.Subnet(sub) 120 | 121 | if err != nil { 122 | // change the subnet mask 123 | if err.Error() == "illegal mask length provided" { 124 | rangeIP = fmt.Sprintf("%v/%v", ipna.IP.String(), sub) 125 | _, ipna, err := iplib.ParseCIDR(rangeIP) 126 | if err == nil { 127 | extendedIPs, err = ipna.Subnet(sub) 128 | if err != nil { 129 | fmt.Fprint(os.Stderr, rangeIP, " -- ", err, "\n") 130 | return result 131 | } 132 | } 133 | } else { 134 | fmt.Fprint(os.Stderr, rangeIP, " -- ", err, "\n") 135 | return result 136 | } 137 | } 138 | 139 | for _, item := range extendedIPs { 140 | ip := item.String() 141 | if sub == 32 { 142 | ip = item.IP.String() 143 | } 144 | if port == "" || sub != 32 { 145 | result = append(result, ip) 146 | } else { 147 | for _, p := range ports { 148 | ipWithPort := fmt.Sprintf("%v:%v", ip, p) 149 | result = append(result, ipWithPort) 150 | } 151 | } 152 | } 153 | return result 154 | } 155 | 156 | func genPorts(port string) []string { 157 | switch port { 158 | case "small", "s": 159 | return pSmall 160 | 161 | case "medium", "m": 162 | return pMedium 163 | 164 | case "large", "l": 165 | return pLarge 166 | 167 | case "xlarge", "x": 168 | return []string{"80", "443", "81", "300", "591", "593", "832", "981", "1010", "1311", "2082", "2087", "2095", "2096", "2480", "3000", "3128", "3333", "4243", "4567", "4711", "4712", "4993", "5000", "5104", "5108", "5800", "6543", "7000", "7396", "7474", "8000", "8001", "8008", "8014", "8042", "8069", "8080", "8081", "8083", "8088", "8090", "8091", "8118", "8123", "8172", "8222", "8243", "8280", "8281", "8333", "8443", "8500", "8834", "8880", "8888", "8983", "9000", "9043", "9060", "9080", "9090", "9091", "9200", "9443", "9800", "9981", "12443", "16080", "18091", "18092", "20720", "28017"} 169 | case "full": 170 | var ports []string 171 | for i := 1; i <= 65535; i++ { 172 | ports = append(ports, fmt.Sprintf("%v", i)) 173 | } 174 | return ports 175 | case "f": 176 | var ports []string 177 | for i := 1; i <= 65535; i++ { 178 | ports = append(ports, fmt.Sprintf("%v", i)) 179 | } 180 | return ports 181 | default: 182 | return []string{"80", "443", "8000", "8080", "8443"} 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /favinfo/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/favinfo 2 | 3 | go 1.16 4 | 5 | require github.com/twmb/murmur3 v1.1.6 6 | -------------------------------------------------------------------------------- /favinfo/go.sum: -------------------------------------------------------------------------------- 1 | github.com/twmb/murmur3 v1.1.6 h1:mqrRot1BRxm+Yct+vavLMou2/iJt0tNVTTC0QoIjaZg= 2 | github.com/twmb/murmur3 v1.1.6/go.mod h1:Qq/R7NUyOfr65zD+6Q5IHKsJLwP7exErjN6lyyq3OSQ= 3 | -------------------------------------------------------------------------------- /favinfo/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "crypto/tls" 7 | "encoding/base64" 8 | "flag" 9 | "fmt" 10 | "io/ioutil" 11 | "net" 12 | "net/http" 13 | "net/url" 14 | "os" 15 | "strings" 16 | "sync" 17 | "time" 18 | 19 | "github.com/twmb/murmur3" 20 | ) 21 | 22 | var ( 23 | jsonOutput bool 24 | concurrency int 25 | ) 26 | 27 | func main() { 28 | // cli arguments 29 | flag.IntVar(&concurrency, "c", 20, "Set the concurrency level") 30 | flag.BoolVar(&jsonOutput, "json", false, "Show Output as Json format") 31 | flag.Parse() 32 | 33 | var wg sync.WaitGroup 34 | jobs := make(chan string, concurrency) 35 | 36 | for i := 0; i < concurrency; i++ { 37 | wg.Add(1) 38 | go func() { 39 | defer wg.Done() 40 | for job := range jobs { 41 | GetFavHash(job) 42 | } 43 | }() 44 | } 45 | 46 | sc := bufio.NewScanner(os.Stdin) 47 | go func() { 48 | for sc.Scan() { 49 | raw := strings.TrimSpace(sc.Text()) 50 | if err := sc.Err(); err == nil && raw != "" { 51 | jobs <- raw 52 | } 53 | } 54 | close(jobs) 55 | }() 56 | wg.Wait() 57 | } 58 | 59 | func GetFavHash(URL string) string { 60 | u, err := url.Parse(URL) 61 | if err != nil { 62 | return "" 63 | } 64 | hashURL := fmt.Sprintf("%v://%v/favicon.ico", u.Scheme, u.Host) 65 | err, data := BigResponseReq(hashURL) 66 | if err != nil { 67 | hashURL = URL 68 | err, data = BigResponseReq(hashURL) 69 | if data == "" { 70 | return "" 71 | } 72 | } 73 | 74 | hashedFav := Mmh3Hash32(StandBase64([]byte(data))) 75 | fmt.Printf("%s,%s\n", hashURL, hashedFav) 76 | return hashedFav 77 | } 78 | 79 | func Mmh3Hash32(raw []byte) string { 80 | h32 := murmur3.New32() 81 | _, err := h32.Write(raw) 82 | if err != nil { 83 | return "" 84 | } 85 | return fmt.Sprintf("%d", int32(h32.Sum32())) 86 | } 87 | 88 | // StandBase64 base64 from bytes 89 | func StandBase64(data []byte) []byte { 90 | raw := base64.StdEncoding.EncodeToString(data) 91 | var buffer bytes.Buffer 92 | for i := 0; i < len(raw); i++ { 93 | ch := raw[i] 94 | buffer.WriteByte(ch) 95 | if (i+1)%76 == 0 { 96 | buffer.WriteByte('\n') 97 | } 98 | } 99 | buffer.WriteByte('\n') 100 | return buffer.Bytes() 101 | } 102 | 103 | func BigResponseReq(baseUrl string) (error, string) { 104 | fmt.Fprintf(os.Stderr, "sending get %s\n", baseUrl) 105 | 106 | client := &http.Client{ 107 | Timeout: time.Duration(10*3) * time.Second, 108 | Transport: &http.Transport{ 109 | DialContext: (&net.Dialer{ 110 | Timeout: time.Second * 60, 111 | }).DialContext, 112 | MaxIdleConns: 1000, 113 | MaxIdleConnsPerHost: 500, 114 | MaxConnsPerHost: 500, 115 | TLSClientConfig: &tls.Config{InsecureSkipVerify: true, Renegotiation: tls.RenegotiateOnceAsClient}, 116 | }, 117 | CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse }, 118 | } 119 | 120 | req, _ := http.NewRequest("GET", baseUrl, nil) 121 | req.Header.Add("User-Agent", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36") 122 | resp, err := client.Do(req) 123 | if err != nil { 124 | fmt.Fprintf(os.Stderr, "sending get %v\n", err) 125 | return err, "" 126 | } 127 | defer resp.Body.Close() 128 | 129 | content, err := ioutil.ReadAll(resp.Body) 130 | if err != nil { 131 | fmt.Fprintf(os.Stderr, "sending get %v\n", err) 132 | return err, "" 133 | } 134 | 135 | if resp.StatusCode != 200 { 136 | fmt.Println(string(content)) 137 | return fmt.Errorf("no favicon"), string(content) 138 | } 139 | 140 | return nil, string(content) 141 | } 142 | -------------------------------------------------------------------------------- /ftld/README.md: -------------------------------------------------------------------------------- 1 | ## ftld 2 | 3 | Finding more TLD from Public Suffix 4 | 5 | ## Usage 6 | 7 | ```shell 8 | # simple usage 9 | echo 'target.com' | ftld -c 50 10 | 11 | target.es 12 | target.cn 13 | target.com.cn 14 | 15 | # input the org directly 16 | ftld -c 50 -o 'target' 17 | 18 | target.es 19 | target.cn 20 | www.target.com.cn 21 | 22 | # with prefix 23 | echo 'target.com' | ftld -c 50 -p 'www' -p 'dev' 24 | 25 | dev.target.es 26 | www.target.cn 27 | ``` 28 | -------------------------------------------------------------------------------- /ftld/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/ftld 2 | 3 | go 1.15 4 | 5 | require ( 6 | github.com/markbates/pkger v0.17.1 7 | github.com/panjf2000/ants v1.3.0 8 | github.com/thoas/go-funk v0.7.0 9 | golang.org/x/net v0.0.0-20210119194325-5f4716e94777 10 | ) 11 | -------------------------------------------------------------------------------- /ftld/go.sum: -------------------------------------------------------------------------------- 1 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 2 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 3 | github.com/gobuffalo/here v0.6.0 h1:hYrd0a6gDmWxBM4TnrGw8mQg24iSVoIkHEk7FodQcBI= 4 | github.com/gobuffalo/here v0.6.0/go.mod h1:wAG085dHOYqUpf+Ap+WOdrPTp5IYcDAs/x7PLa8Y5fM= 5 | github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= 6 | github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= 7 | github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= 8 | github.com/markbates/pkger v0.17.1 h1:/MKEtWqtc0mZvu9OinB9UzVN9iYCwLWuyUv4Bw+PCno= 9 | github.com/markbates/pkger v0.17.1/go.mod h1:0JoVlrol20BSywW79rN3kdFFsE5xYM+rSCQDXbLhiuI= 10 | github.com/panjf2000/ants v1.3.0 h1:8pQ+8leaLc9lys2viEEr8md0U4RN6uOSUCE9bOYjQ9M= 11 | github.com/panjf2000/ants v1.3.0/go.mod h1:AaACblRPzq35m1g3enqYcxspbbiOJJYaxU2wMpm1cXY= 12 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 13 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 14 | github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= 15 | github.com/thoas/go-funk v0.7.0 h1:GmirKrs6j6zJbhJIficOsz2aAI7700KsU/5YrdHRM1Y= 16 | github.com/thoas/go-funk v0.7.0/go.mod h1:+IWnUfUmFO1+WVYQWQtIJHeRRdaIyyYglZN7xzUPe4Q= 17 | golang.org/x/net v0.0.0-20210119194325-5f4716e94777 h1:003p0dJM77cxMSyCPFphvZf/Y5/NXf5fzg6ufd1/Oew= 18 | golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= 19 | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 20 | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= 21 | golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= 22 | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= 23 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 24 | gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 25 | gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 26 | gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 27 | -------------------------------------------------------------------------------- /ftld/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "github.com/markbates/pkger" 8 | "github.com/panjf2000/ants" 9 | "github.com/thoas/go-funk" 10 | "golang.org/x/net/publicsuffix" 11 | "log" 12 | "net" 13 | 14 | "io/ioutil" 15 | "net/url" 16 | "os" 17 | "path/filepath" 18 | "sort" 19 | "strings" 20 | "sync" 21 | "unicode" 22 | ) 23 | 24 | // public suffix finder 25 | // cat /tmp/list_of_IP | psuff -c 100 26 | 27 | type arrayFlags []string 28 | 29 | func (i *arrayFlags) String() string { 30 | return "any string" 31 | } 32 | 33 | func (i *arrayFlags) Set(value string) error { 34 | *i = append(*i, value) 35 | return nil 36 | } 37 | 38 | var ( 39 | verbose bool 40 | extra bool 41 | org string 42 | concurrency int 43 | publicSuffixContents []string 44 | prefixes arrayFlags 45 | domains arrayFlags 46 | ) 47 | 48 | var logger *log.Logger 49 | 50 | func main() { 51 | // cli aguments 52 | flag.IntVar(&concurrency, "c", 20, "Set the concurrency level") 53 | flag.StringVar(&org, "org", "", "Specific Org") 54 | flag.Var(&prefixes, "p", "prefix domains") 55 | flag.Var(&domains, "d", "input domains") 56 | flag.BoolVar(&verbose, "v", false, "Verbose output") 57 | flag.Parse() 58 | 59 | // prepare suffix data 60 | // update: wget https://publicsuffix.org/list/public_suffix_list.dat 61 | // pkger will discover that we need example.txt and embed it 62 | f, err := pkger.Open("/public_suffix_list.dat") 63 | if err != nil { 64 | panic(err) 65 | } 66 | contents, err := ioutil.ReadAll(f) 67 | if err != nil { 68 | panic(err) 69 | } 70 | ParseSuffix(string(contents)) 71 | 72 | stat, _ := os.Stdin.Stat() 73 | if (stat.Mode() & os.ModeCharDevice) != 0 { 74 | args := os.Args[1:] 75 | sort.Strings(args) 76 | raw := args[len(args)-1] 77 | domains = append(domains, raw) 78 | } 79 | 80 | // really start to do something 81 | var wg sync.WaitGroup 82 | p, _ := ants.NewPoolWithFunc(concurrency, func(i interface{}) { 83 | defer wg.Done() 84 | job := i.(string) 85 | 86 | line := getOrg(job) 87 | log.Printf("Parsed org: %s\n", line) 88 | doResolving(line) 89 | }, ants.WithPreAlloc(true)) 90 | defer p.Release() 91 | 92 | // reading input 93 | 94 | // detect if anything came from std 95 | if (stat.Mode() & os.ModeCharDevice) == 0 { 96 | sc := bufio.NewScanner(os.Stdin) 97 | for sc.Scan() { 98 | raw := strings.TrimSpace(sc.Text()) 99 | if err := sc.Err(); err == nil && raw != "" { 100 | domains = append(domains, raw) 101 | } 102 | } 103 | } 104 | domains = funk.UniqString(domains) 105 | if len(domains) > 0 { 106 | for _, domain := range domains { 107 | wg.Add(1) 108 | raw := strings.TrimSpace(domain) 109 | log.Printf("Parsed processing: %s\n", raw) 110 | p.Invoke(raw) 111 | } 112 | } 113 | wg.Wait() 114 | } 115 | 116 | func doResolving(org string) { 117 | var dg sync.WaitGroup 118 | p, _ := ants.NewPoolWithFunc(concurrency, func(i interface{}) { 119 | defer dg.Done() 120 | domain := i.(string) 121 | lookup(domain) 122 | 123 | }, ants.WithPreAlloc(true)) 124 | defer p.Release() 125 | 126 | for _, suffix := range publicSuffixContents { 127 | dg.Add(1) 128 | job := fmt.Sprintf("%s.%s", org, suffix) 129 | p.Invoke(job) 130 | 131 | if len(prefixes) > 0 { 132 | for _, prefix := range prefixes { 133 | dg.Add(1) 134 | job := fmt.Sprintf("%s.%s.%s", prefix, org, suffix) 135 | p.Invoke(job) 136 | } 137 | } 138 | } 139 | dg.Wait() 140 | } 141 | 142 | func lookup(domain string) { 143 | if resolved, err := net.LookupHost(domain); err == nil { 144 | if verbose { 145 | //resolved = funk.UniqString(resolved) 146 | for _, ip := range resolved { 147 | fmt.Printf("%s,%s\n", domain, ip) 148 | } 149 | return 150 | } 151 | fmt.Println(domain) 152 | } 153 | } 154 | 155 | func ParseSuffix(content string) { 156 | data := strings.Split(content, "\n") 157 | for _, line := range data { 158 | line = strings.TrimSpace(line) 159 | if strings.HasPrefix(line, "//") || strings.HasPrefix(line, "//") { 160 | continue 161 | } 162 | 163 | if isASCII(line) { 164 | publicSuffixContents = append(publicSuffixContents, line) 165 | } 166 | } 167 | log.Printf("Parsed %v suffixes\n", len(publicSuffixContents)) 168 | publicSuffixContents = funk.UniqString(publicSuffixContents) 169 | } 170 | 171 | func isASCII(s string) bool { 172 | for i := 0; i < len(s); i++ { 173 | if s[i] > unicode.MaxASCII { 174 | return false 175 | } 176 | } 177 | return true 178 | } 179 | 180 | func getOrg(raw string) string { 181 | data := ParseTarget(raw) 182 | if len(data) <= 1 { 183 | return raw 184 | } 185 | 186 | return data["Org"] 187 | } 188 | 189 | // ParseTarget parsing target and some variable for template 190 | func ParseTarget(raw string) map[string]string { 191 | target := make(map[string]string) 192 | if raw == "" { 193 | return target 194 | } 195 | target["Target"] = raw 196 | u, err := url.Parse(raw) 197 | 198 | // something wrong so parsing it again 199 | if err != nil || u.Scheme == "" || strings.Contains(u.Scheme, ".") { 200 | raw = fmt.Sprintf("https://%v", raw) 201 | u, err = url.Parse(raw) 202 | if err != nil { 203 | return target 204 | } 205 | // fmt.Println("parse again") 206 | } 207 | var hostname string 208 | var query string 209 | port := u.Port() 210 | // var domain string 211 | domain := u.Hostname() 212 | 213 | query = u.RawQuery 214 | if u.Port() == "" { 215 | if strings.Contains(u.Scheme, "https") { 216 | port = "443" 217 | } else { 218 | port = "80" 219 | } 220 | 221 | hostname = u.Hostname() 222 | } else { 223 | // ignore common port in Host 224 | if u.Port() == "443" || u.Port() == "80" { 225 | hostname = u.Hostname() 226 | } else { 227 | hostname = u.Hostname() + ":" + u.Port() 228 | } 229 | } 230 | 231 | target["Scheme"] = u.Scheme 232 | target["Path"] = u.Path 233 | target["Domain"] = domain 234 | 235 | target["Org"] = domain 236 | suffix, ok := publicsuffix.PublicSuffix(domain) 237 | if ok { 238 | target["Org"] = strings.Replace(domain, fmt.Sprintf(".%s", suffix), "", -1) 239 | } else { 240 | if strings.Contains(domain, ".") { 241 | parts := strings.Split(domain, ".") 242 | if len(parts) == 2 { 243 | target["Org"] = parts[0] 244 | } else { 245 | target["Org"] = parts[len(parts)-2] 246 | } 247 | } 248 | } 249 | 250 | target["Host"] = hostname 251 | target["Port"] = port 252 | target["RawQuery"] = query 253 | 254 | if (target["RawQuery"] != "") && (port == "80" || port == "443") { 255 | target["URL"] = fmt.Sprintf("%v://%v%v?%v", target["Scheme"], target["Host"], target["Path"], target["RawQuery"]) 256 | } else if port != "80" && port != "443" { 257 | target["URL"] = fmt.Sprintf("%v://%v:%v%v?%v", target["Scheme"], target["Domain"], target["Port"], target["Path"], target["RawQuery"]) 258 | } else { 259 | target["URL"] = fmt.Sprintf("%v://%v%v", target["Scheme"], target["Host"], target["Path"]) 260 | } 261 | 262 | uu, _ := url.Parse(raw) 263 | target["BaseURL"] = fmt.Sprintf("%v://%v", uu.Scheme, uu.Host) 264 | target["Extension"] = filepath.Ext(target["BaseURL"]) 265 | 266 | return target 267 | } 268 | -------------------------------------------------------------------------------- /ghd/README.md: -------------------------------------------------------------------------------- 1 | ## Github dorks 2 | 3 | ```bash 4 | $ cat dorks.txt 5 | 6 | https://github.com/search?q=%22{{.Raw}}%22+password&type=Code 7 | https://github.com/search?q=%22{{.Org}}%22+password&type=Code 8 | https://github.com/search?q=%22{{.Raw}}%22+npmrc%20_auth&type=Code 9 | https://github.com/search?q=%22{{.Org}}%22+npmrc%20_auth&type=Code 10 | 11 | $ ghd -d dorks.txt -u tesla.com 12 | 13 | https://github.com/search?q=%22tesla.com%22+password&type=Code 14 | https://github.com/search?q=%22tesla%22+password&type=Code 15 | https://github.com/search?q=%22tesla.com%22+npmrc%20_auth&type=Code 16 | https://github.com/search?q=%22tesla%22+npmrc%20_auth&type=Code 17 | 18 | ``` -------------------------------------------------------------------------------- /ghd/dorks.txt: -------------------------------------------------------------------------------- 1 | https://github.com/search?q=%22{{.Raw}}%22+password&type=Code 2 | https://github.com/search?q=%22{{.Org}}%22+password&type=Code 3 | https://github.com/search?q=%22{{.Raw}}%22+npmrc%20_auth&type=Code 4 | https://github.com/search?q=%22{{.Org}}%22+npmrc%20_auth&type=Code 5 | https://github.com/search?q=%22{{.Raw}}%22+dockercfg&type=Code 6 | https://github.com/search?q=%22{{.Org}}%22+dockercfg&type=Code 7 | https://github.com/search?q=%22{{.Raw}}%22+pem%20private&type=Code 8 | https://github.com/search?q=%22{{.Org}}%22+extension:pem%20private&type=Code 9 | https://github.com/search?q=%22{{.Raw}}%22+id_rsa&type=Code 10 | https://github.com/search?q=%22{{.Org}}%22+id_rsa&type=Code 11 | https://github.com/search?q=%22{{.Raw}}%22+aws_access_key_id&type=Code 12 | https://github.com/search?q=%22{{.Org}}%22+aws_access_key_id&type=Code 13 | https://github.com/search?q=%22{{.Raw}}%22+s3cfg&type=Code 14 | https://github.com/search?q=%22{{.Org}}%22+s3cfg&type=Code 15 | https://github.com/search?q=%22{{.Raw}}%22+htpasswd&type=Code 16 | https://github.com/search?q=%22{{.Org}}%22+htpasswd&type=Code 17 | https://github.com/search?q=%22{{.Raw}}%22+git-credentials&type=Code 18 | https://github.com/search?q=%22{{.Org}}%22+git-credentials&type=Code 19 | https://github.com/search?q=%22{{.Raw}}%22+bashrc%20password&type=Code 20 | https://github.com/search?q=%22{{.Org}}%22+bashrc%20password&type=Code 21 | https://github.com/search?q=%22{{.Raw}}%22+sshd_config&type=Code 22 | https://github.com/search?q=%22{{.Org}}%22+sshd_config&type=Code 23 | https://github.com/search?q=%22{{.Raw}}%22+xoxp%20OR%20xoxb%20OR%20xoxa&type=Code 24 | https://github.com/search?q=%22{{.Org}}%22+xoxp%20OR%20xoxb&type=Code 25 | https://github.com/search?q=%22{{.Raw}}%22+SECRET_KEY&type=Code 26 | https://github.com/search?q=%22{{.Org}}%22+SECRET_KEY&type=Code 27 | https://github.com/search?q=%22{{.Raw}}%22+client_secret&type=Code 28 | https://github.com/search?q=%22{{.Org}}%22+client_secret&type=Code 29 | https://github.com/search?q=%22{{.Raw}}%22+sshd_config&type=Code 30 | https://github.com/search?q=%22{{.Org}}%22+sshd_config&type=Code 31 | https://github.com/search?q=%22{{.Raw}}%22+github_token&type=Code 32 | https://github.com/search?q=%22{{.Org}}%22+github_token&type=Code 33 | https://github.com/search?q=%22{{.Raw}}%22+api_key&type=Code 34 | https://github.com/search?q=%22{{.Org}}%22+api_key&type=Code 35 | https://github.com/search?q=%22{{.Raw}}%22+FTP&type=Code 36 | https://github.com/search?q=%22{{.Org}}%22+FTP&type=Code 37 | https://github.com/search?q=%22{{.Raw}}%22+app_secret&type=Code 38 | https://github.com/search?q=%22{{.Org}}%22+app_secret&type=Code 39 | https://github.com/search?q=%22{{.Raw}}%22+passwd&type=Code 40 | https://github.com/search?q=%22{{.Org}}%22+passwd&type=Code 41 | https://github.com/search?q=%22{{.Raw}}%22+.env&type=Code 42 | https://github.com/search?q=%22{{.Org}}%22+.env&type=Code 43 | https://github.com/search?q=%22{{.Raw}}%22+.exs&type=Code 44 | https://github.com/search?q=%22{{.Org}}%22+.exs&type=Code 45 | https://github.com/search?q=%22{{.Raw}}%22+beanstalkd.yml&type=Code 46 | https://github.com/search?q=%22{{.Org}}%22+beanstalkd.yml&type=Code 47 | https://github.com/search?q=%22{{.Raw}}%22+deploy.rake&type=Code 48 | https://github.com/search?q=%22{{.Org}}%22+deploy.rake&type=Code 49 | https://github.com/search?q=%22{{.Raw}}%22+mysql&type=Code 50 | https://github.com/search?q=%22{{.Org}}%22+mysql&type=Code 51 | https://github.com/search?q=%22{{.Raw}}%22+credentials&type=Code 52 | https://github.com/search?q=%22{{.Org}}%22+credentials&type=Code 53 | https://github.com/search?q=%22{{.Raw}}%22+PWD&type=Code 54 | https://github.com/search?q=%22{{.Org}}%22+PWD&type=Code 55 | https://github.com/search?q=%22{{.Raw}}%22+deploy.rake&type=Code 56 | https://github.com/search?q=%22{{.Org}}%22+deploy.rake&type=Code 57 | https://github.com/search?q=%22{{.Raw}}%22+.bash_history&type=Code 58 | https://github.com/search?q=%22{{.Org}}%22+.bash_history&type=Code 59 | https://github.com/search?q=%22{{.Raw}}%22+.sls&type=Code 60 | https://github.com/search?q=%22{{.Org}}%22+PWD&type=Code 61 | https://github.com/search?q=%22{{.Raw}}%22+secrets&type=Code 62 | https://github.com/search?q=%22{{.Org}}%22+secrets&type=Code 63 | https://github.com/search?q=%22{{.Raw}}%22+composer.json&type=Code 64 | https://github.com/search?q=%22{{.Org}}%22+composer.json&type=Code -------------------------------------------------------------------------------- /ghd/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "flag" 7 | "fmt" 8 | "golang.org/x/net/publicsuffix" 9 | "net/url" 10 | "os" 11 | "path" 12 | "path/filepath" 13 | "strings" 14 | "text/template" 15 | ) 16 | 17 | var ( 18 | verbose bool 19 | concurrency int 20 | 21 | dorkFile string 22 | data string 23 | dataFile string 24 | ) 25 | 26 | func main() { 27 | // cli args 28 | flag.StringVar(&data, "u", "", "URL to open") 29 | flag.StringVar(&dataFile, "U", "", "URL file to open") 30 | flag.StringVar(&dorkFile, "d", "", "Dorks file") 31 | flag.BoolVar(&verbose, "v", false, "verbose mode") 32 | flag.IntVar(&concurrency, "c", 5, "number of tab at a time") 33 | flag.Parse() 34 | 35 | // detect if anything came from std 36 | var templateData, urls []string 37 | 38 | stat, _ := os.Stdin.Stat() 39 | if (stat.Mode() & os.ModeCharDevice) == 0 { 40 | sc := bufio.NewScanner(os.Stdin) 41 | for sc.Scan() { 42 | target := strings.TrimSpace(sc.Text()) 43 | if err := sc.Err(); err == nil && target != "" { 44 | urls = append(urls, target) 45 | } 46 | } 47 | } 48 | 49 | // get url input 50 | if data != "" { 51 | urls = append(urls, data) 52 | } 53 | if dataFile != "" { 54 | urls = append(urls, ReadingLines(dataFile)...) 55 | } 56 | 57 | // get dork data 58 | if dorkFile == "" { 59 | goPath, ok := os.LookupEnv("GOPATH") 60 | if ok { 61 | dorkFile = path.Join(goPath, "src/github.com/j3ssie/go-auxs/ghd/dorks.txt") 62 | } 63 | 64 | ghDork, ok := os.LookupEnv("GH_DORKS") 65 | if ok { 66 | dorkFile = ghDork 67 | } 68 | } 69 | 70 | if dorkFile == "" { 71 | fmt.Fprintf(os.Stderr, "Need to provide dork file via -d dorks.txt \n") 72 | os.Exit(-1) 73 | } 74 | templateData = ReadingLines(dorkFile) 75 | 76 | for _, u := range urls { 77 | data := ParseURL(u) 78 | for _, raw := range templateData { 79 | out := RenderTemplate(raw, data) 80 | fmt.Println(out) 81 | } 82 | } 83 | } 84 | 85 | func RenderTemplate(format string, data map[string]string) string { 86 | // ResolveData resolve template from signature file 87 | t := template.Must(template.New("").Parse(format)) 88 | buf := &bytes.Buffer{} 89 | err := t.Execute(buf, data) 90 | if err != nil { 91 | return format 92 | } 93 | return buf.String() 94 | } 95 | 96 | func ParseURL(raw string) map[string]string { 97 | target := make(map[string]string) 98 | if raw == "" { 99 | return target 100 | } 101 | target["Raw"] = raw 102 | u, err := url.Parse(raw) 103 | 104 | // something wrong so parsing it again 105 | if err != nil || u.Scheme == "" || strings.Contains(u.Scheme, ".") { 106 | raw = fmt.Sprintf("https://%v", raw) 107 | u, err = url.Parse(raw) 108 | if err != nil { 109 | return target 110 | } 111 | // fmt.Println("parse again") 112 | } 113 | var hostname string 114 | var query string 115 | port := u.Port() 116 | // var domain string 117 | domain := u.Hostname() 118 | 119 | query = u.RawQuery 120 | if u.Port() == "" { 121 | if strings.Contains(u.Scheme, "https") { 122 | port = "443" 123 | } else { 124 | port = "80" 125 | } 126 | 127 | hostname = u.Hostname() 128 | } else { 129 | // ignore common port in Host 130 | if u.Port() == "443" || u.Port() == "80" { 131 | hostname = u.Hostname() 132 | } else { 133 | hostname = u.Hostname() + ":" + u.Port() 134 | } 135 | } 136 | 137 | target["Scheme"] = u.Scheme 138 | target["Path"] = u.Path 139 | target["Domain"] = domain 140 | 141 | target["Org"] = domain 142 | suffix, ok := publicsuffix.PublicSuffix(domain) 143 | if ok { 144 | target["Org"] = strings.Replace(domain, fmt.Sprintf(".%s", suffix), "", -1) 145 | } else { 146 | if strings.Contains(domain, ".") { 147 | parts := strings.Split(domain, ".") 148 | if len(parts) == 2 { 149 | target["Org"] = parts[0] 150 | } else { 151 | target["Org"] = parts[len(parts)-2] 152 | } 153 | } 154 | } 155 | 156 | target["Host"] = hostname 157 | target["Port"] = port 158 | target["RawQuery"] = query 159 | 160 | if (target["RawQuery"] != "") && (port == "80" || port == "443") { 161 | target["URL"] = fmt.Sprintf("%v://%v%v?%v", target["Scheme"], target["Host"], target["Path"], target["RawQuery"]) 162 | } else if port != "80" && port != "443" { 163 | target["URL"] = fmt.Sprintf("%v://%v:%v%v?%v", target["Scheme"], target["Domain"], target["Port"], target["Path"], target["RawQuery"]) 164 | } else { 165 | target["URL"] = fmt.Sprintf("%v://%v%v", target["Scheme"], target["Host"], target["Path"]) 166 | } 167 | 168 | uu, _ := url.Parse(raw) 169 | target["BaseURL"] = fmt.Sprintf("%v://%v", uu.Scheme, uu.Host) 170 | target["Extension"] = filepath.Ext(target["BaseURL"]) 171 | 172 | return target 173 | } 174 | 175 | // ReadingLines Reading file and return content as []string 176 | func ReadingLines(filename string) []string { 177 | var result []string 178 | file, err := os.Open(filename) 179 | if err != nil { 180 | return result 181 | } 182 | defer file.Close() 183 | 184 | scanner := bufio.NewScanner(file) 185 | for scanner.Scan() { 186 | val := strings.TrimSpace(scanner.Text()) 187 | if val == "" { 188 | continue 189 | } 190 | result = append(result, val) 191 | } 192 | 193 | if err := scanner.Err(); err != nil { 194 | return result 195 | } 196 | return result 197 | } 198 | -------------------------------------------------------------------------------- /hparse/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "github.com/PuerkitoBio/goquery" 8 | "io/ioutil" 9 | "net/http" 10 | "os" 11 | "strings" 12 | "sync" 13 | ) 14 | 15 | // Finding attr in every HTML tag 16 | // cat /tmp/list_of_IP | hparse -t 'a' 17 | var ( 18 | tag string 19 | attr string 20 | ) 21 | 22 | func main() { 23 | // cli arguments 24 | var concurrency int 25 | flag.IntVar(&concurrency, "c", 20, "Set the concurrency level") 26 | flag.StringVar(&tag, "t", "a", "Tag name") 27 | flag.StringVar(&attr, "a", "href", "Attribute name") 28 | flag.Parse() 29 | 30 | var wg sync.WaitGroup 31 | jobs := make(chan string, concurrency) 32 | 33 | for i := 0; i < concurrency; i++ { 34 | wg.Add(1) 35 | go func() { 36 | defer wg.Done() 37 | for job := range jobs { 38 | // do something 39 | result, err := doParse(job, tag, attr) 40 | if err == nil { 41 | fmt.Print(result) 42 | } 43 | } 44 | }() 45 | } 46 | 47 | sc := bufio.NewScanner(os.Stdin) 48 | go func() { 49 | for sc.Scan() { 50 | u := strings.TrimSpace(sc.Text()) 51 | if err := sc.Err(); err == nil && u != "" { 52 | jobs <- u 53 | } 54 | } 55 | close(jobs) 56 | }() 57 | wg.Wait() 58 | } 59 | 60 | func doParse(url string, tag string, attr string) (string, error) { 61 | resp, err := http.Get(url) 62 | if err != nil { 63 | return "", err 64 | } 65 | defer resp.Body.Close() 66 | 67 | content, err := ioutil.ReadAll(resp.Body) 68 | if err != nil { 69 | return "", err 70 | } 71 | 72 | var result []string 73 | var data string 74 | doc, err := goquery.NewDocumentFromReader(strings.NewReader(string(content))) 75 | if err != nil { 76 | return "", err 77 | } 78 | doc.Find(tag).Each(func(i int, s *goquery.Selection) { 79 | if attr == "text" { 80 | result = append(result, s.Text()) 81 | return 82 | } 83 | href, ok := s.Attr(attr) 84 | if ok { 85 | result = append(result, href) 86 | } 87 | }) 88 | 89 | if len(result) > 0 { 90 | data = strings.Join(result, "\n") 91 | } 92 | return data, nil 93 | } 94 | -------------------------------------------------------------------------------- /junique/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/junique 2 | 3 | go 1.21.1 4 | 5 | require ( 6 | github.com/projectdiscovery/hmap v0.0.19 7 | github.com/tidwall/gjson v1.17.0 8 | ) 9 | 10 | require ( 11 | github.com/akrylysov/pogreb v0.10.1 // indirect 12 | github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect 13 | github.com/aymerick/douceur v0.2.0 // indirect 14 | github.com/golang/snappy v0.0.4 // indirect 15 | github.com/gorilla/css v1.0.0 // indirect 16 | github.com/microcosm-cc/bluemonday v1.0.25 // indirect 17 | github.com/nxadm/tail v1.4.8 // indirect 18 | github.com/pkg/errors v0.9.1 // indirect 19 | github.com/projectdiscovery/blackrock v0.0.1 // indirect 20 | github.com/projectdiscovery/utils v0.0.54 // indirect 21 | github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect 22 | github.com/syndtr/goleveldb v1.0.0 // indirect 23 | github.com/tidwall/btree v1.4.3 // indirect 24 | github.com/tidwall/buntdb v1.3.0 // indirect 25 | github.com/tidwall/grect v0.1.4 // indirect 26 | github.com/tidwall/match v1.1.1 // indirect 27 | github.com/tidwall/pretty v1.2.0 // indirect 28 | github.com/tidwall/rtred v0.1.2 // indirect 29 | github.com/tidwall/tinyqueue v0.1.1 // indirect 30 | go.etcd.io/bbolt v1.3.7 // indirect 31 | golang.org/x/net v0.14.0 // indirect 32 | golang.org/x/sys v0.11.0 // indirect 33 | gopkg.in/yaml.v3 v3.0.1 // indirect 34 | ) 35 | -------------------------------------------------------------------------------- /junique/go.sum: -------------------------------------------------------------------------------- 1 | github.com/akrylysov/pogreb v0.10.1 h1:FqlR8VR7uCbJdfUob916tPM+idpKgeESDXOA1K0DK4w= 2 | github.com/akrylysov/pogreb v0.10.1/go.mod h1:pNs6QmpQ1UlTJKDezuRWmaqkgUE2TuU0YTWyqJZ7+lI= 3 | github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ= 4 | github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= 5 | github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk= 6 | github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4= 7 | github.com/bits-and-blooms/bitset v1.8.0 h1:FD+XqgOZDUxxZ8hzoBFuV9+cGWY9CslN6d5MS5JVb4c= 8 | github.com/bits-and-blooms/bitset v1.8.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= 9 | github.com/bits-and-blooms/bloom/v3 v3.5.0 h1:AKDvi1V3xJCmSR6QhcBfHbCN4Vf8FfxeWkMNQfmAGhY= 10 | github.com/bits-and-blooms/bloom/v3 v3.5.0/go.mod h1:Y8vrn7nk1tPIlmLtW2ZPV+W7StdVMor6bC1xgpjMZFs= 11 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 12 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 13 | github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= 14 | github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= 15 | github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= 16 | github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= 17 | github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= 18 | github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= 19 | github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= 20 | github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= 21 | github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= 22 | github.com/hashicorp/golang-lru/v2 v2.0.6 h1:3xi/Cafd1NaoEnS/yDssIiuVeDVywU0QdFGl3aQaQHM= 23 | github.com/hashicorp/golang-lru/v2 v2.0.6/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= 24 | github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= 25 | github.com/microcosm-cc/bluemonday v1.0.25 h1:4NEwSfiJ+Wva0VxN5B8OwMicaJvD8r9tlJWm9rtloEg= 26 | github.com/microcosm-cc/bluemonday v1.0.25/go.mod h1:ZIOjCQp1OrzBBPIJmfX4qDYFuhU02nx4bn030ixfHLE= 27 | github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= 28 | github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= 29 | github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= 30 | github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= 31 | github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= 32 | github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= 33 | github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= 34 | github.com/onsi/gomega v1.16.0 h1:6gjqkI8iiRHMvdccRJM8rVKjCWk6ZIm6FTm3ddIe4/c= 35 | github.com/onsi/gomega v1.16.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= 36 | github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= 37 | github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= 38 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 39 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 40 | github.com/projectdiscovery/blackrock v0.0.1 h1:lHQqhaaEFjgf5WkuItbpeCZv2DUIE45k0VbGJyft6LQ= 41 | github.com/projectdiscovery/blackrock v0.0.1/go.mod h1:ANUtjDfaVrqB453bzToU+YB4cUbvBRpLvEwoWIwlTss= 42 | github.com/projectdiscovery/hmap v0.0.19 h1:v6a2en6INbS9mGRSUXOyxKa89VstnAtmk5lpJqOWPfo= 43 | github.com/projectdiscovery/hmap v0.0.19/go.mod h1:IKeB4OE7e/SjuSI3yhNZBuRtwaHlHInSOEAc/GN7QGM= 44 | github.com/projectdiscovery/utils v0.0.54 h1:qwTIalrK8pKYaxFObdeSfCtwDmVCN9qswc8+7jIpnBM= 45 | github.com/projectdiscovery/utils v0.0.54/go.mod h1:WhzbWSyGkTDn4Jvw+7jM2yP675/RARegNjoA6S7zYcc= 46 | github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI= 47 | github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU= 48 | github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= 49 | github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= 50 | github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= 51 | github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ= 52 | github.com/tidwall/assert v0.1.0 h1:aWcKyRBUAdLoVebxo95N7+YZVTFF/ASTr7BN4sLP6XI= 53 | github.com/tidwall/assert v0.1.0/go.mod h1:QLYtGyeqse53vuELQheYl9dngGCJQ+mTtlxcktb+Kj8= 54 | github.com/tidwall/btree v1.4.3 h1:Lf5U/66bk0ftNppOBjVoy/AIPBrLMkheBp4NnSNiYOo= 55 | github.com/tidwall/btree v1.4.3/go.mod h1:LGm8L/DZjPLmeWGjv5kFrY8dL4uVhMmzmmLYmsObdKE= 56 | github.com/tidwall/buntdb v1.3.0 h1:gdhWO+/YwoB2qZMeAU9JcWWsHSYU3OvcieYgFRS0zwA= 57 | github.com/tidwall/buntdb v1.3.0/go.mod h1:lZZrZUWzlyDJKlLQ6DKAy53LnG7m5kHyrEHvvcDmBpU= 58 | github.com/tidwall/gjson v1.12.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= 59 | github.com/tidwall/gjson v1.17.0 h1:/Jocvlh98kcTfpN2+JzGQWQcqrPQwDrVEMApx/M5ZwM= 60 | github.com/tidwall/gjson v1.17.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= 61 | github.com/tidwall/grect v0.1.4 h1:dA3oIgNgWdSspFzn1kS4S/RDpZFLrIxAZOdJKjYapOg= 62 | github.com/tidwall/grect v0.1.4/go.mod h1:9FBsaYRaR0Tcy4UwefBX/UDcDcDy9V5jUcxHzv2jd5Q= 63 | github.com/tidwall/lotsa v1.0.2 h1:dNVBH5MErdaQ/xd9s769R31/n2dXavsQ0Yf4TMEHHw8= 64 | github.com/tidwall/lotsa v1.0.2/go.mod h1:X6NiU+4yHA3fE3Puvpnn1XMDrFZrE9JO2/w+UMuqgR8= 65 | github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= 66 | github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= 67 | github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= 68 | github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= 69 | github.com/tidwall/rtred v0.1.2 h1:exmoQtOLvDoO8ud++6LwVsAMTu0KPzLTUrMln8u1yu8= 70 | github.com/tidwall/rtred v0.1.2/go.mod h1:hd69WNXQ5RP9vHd7dqekAz+RIdtfBogmglkZSRxCHFQ= 71 | github.com/tidwall/tinyqueue v0.1.1 h1:SpNEvEggbpyN5DIReaJ2/1ndroY8iyEGxPYxoSaymYE= 72 | github.com/tidwall/tinyqueue v0.1.1/go.mod h1:O/QNHwrnjqr6IHItYrzoHAKYhBkLI67Q096fQP5zMYw= 73 | go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ= 74 | go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw= 75 | golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= 76 | golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= 77 | golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= 78 | golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= 79 | golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= 80 | golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 81 | golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= 82 | golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 83 | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= 84 | golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= 85 | golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= 86 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 87 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 88 | gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= 89 | gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= 90 | gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= 91 | gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE= 92 | gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 93 | gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= 94 | gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 95 | -------------------------------------------------------------------------------- /junique/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "os" 8 | "strings" 9 | 10 | "github.com/projectdiscovery/hmap/store/hybrid" 11 | "github.com/tidwall/gjson" 12 | ) 13 | 14 | // cat raw.json | junique -k 'hash' | sort -u > unique-hosts.json 15 | 16 | func main() { 17 | var jKey string 18 | flag.StringVar(&jKey, "k", "", "Json key for unique ( https://github.com/tidwall/gjson )") 19 | flag.Parse() 20 | 21 | hm, err := hybrid.New(hybrid.DefaultDiskOptions) 22 | if err != nil { 23 | fmt.Fprintf(os.Stderr, "Failed to init map") 24 | os.Exit(1) 25 | } 26 | defer hm.Close() 27 | sc := bufio.NewScanner(os.Stdin) 28 | for sc.Scan() { 29 | line := strings.TrimSpace(sc.Text()) 30 | if line == "" { 31 | continue 32 | } 33 | jValue := gjson.Get(line, jKey).String() 34 | if jValue == "" { 35 | continue 36 | } 37 | if _, exist := hm.Get(jValue); !exist { 38 | hm.Set(jValue, []byte("0")) 39 | fmt.Println(line) 40 | } 41 | } 42 | 43 | } 44 | -------------------------------------------------------------------------------- /nin/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "log" 8 | "os" 9 | "sort" 10 | "strings" 11 | 12 | "github.com/mitchellh/go-homedir" 13 | "github.com/thoas/go-funk" 14 | ) 15 | 16 | // Exclude file from a file 17 | // cat file.txt | nin -e excldue.txt 18 | var ( 19 | verbose bool 20 | alexa bool 21 | extra bool 22 | exclude string 23 | ) 24 | var logger *log.Logger 25 | 26 | func main() { 27 | // cli aguments 28 | flag.StringVar(&exclude, "e", "", "Exclude File") 29 | flag.BoolVar(&verbose, "v", false, "Verbose output") 30 | flag.Parse() 31 | 32 | args := os.Args[1:] 33 | sort.Strings(args) 34 | exclude = args[len(args)-1] 35 | 36 | if !FileExists(exclude) { 37 | log.Printf("No input found: %s", exclude) 38 | os.Exit(-1) 39 | } 40 | 41 | var stdInputs, excludeInputs []string 42 | log.Printf("No input found: %s", exclude) 43 | excludeInputs = ReadingLines(exclude) 44 | 45 | // detect if anything came from std 46 | stat, _ := os.Stdin.Stat() 47 | if (stat.Mode() & os.ModeCharDevice) == 0 { 48 | sc := bufio.NewScanner(os.Stdin) 49 | for sc.Scan() { 50 | target := strings.TrimSpace(sc.Text()) 51 | if err := sc.Err(); err == nil && target != "" { 52 | stdInputs = append(stdInputs, target) 53 | } 54 | } 55 | } 56 | 57 | log.Printf("Excludes inputs length: %v", len(excludeInputs)) 58 | log.Printf("Inputs length: %v", len(stdInputs)) 59 | 60 | // really do something 61 | for _, input := range stdInputs { 62 | //for _, einput := range excludeInputs { 63 | if !funk.ContainsString(excludeInputs, input) { 64 | fmt.Println(input) 65 | } 66 | } 67 | 68 | } 69 | 70 | // FileExists check if file is exist or not 71 | func FileExists(filename string) bool { 72 | _, err := os.Stat(filename) 73 | if os.IsNotExist(err) { 74 | return false 75 | } 76 | return true 77 | } 78 | 79 | // ReadingLines Reading file and return content as []string 80 | func ReadingLines(filename string) []string { 81 | var result []string 82 | if strings.HasPrefix(filename, "~") { 83 | filename, _ = homedir.Expand(filename) 84 | } 85 | file, err := os.Open(filename) 86 | if err != nil { 87 | return result 88 | } 89 | defer file.Close() 90 | 91 | scanner := bufio.NewScanner(file) 92 | for scanner.Scan() { 93 | val := strings.TrimSpace(scanner.Text()) 94 | if val == "" { 95 | continue 96 | } 97 | result = append(result, val) 98 | } 99 | 100 | if err := scanner.Err(); err != nil { 101 | return result 102 | } 103 | return result 104 | } 105 | -------------------------------------------------------------------------------- /oic/README.md: -------------------------------------------------------------------------------- 1 | ## Open in Chrome 2 | Open URL with your real browser 3 | 4 | 5 | ## Install 6 | 7 | ```bash 8 | go get -u github.com/j3ssie/go-auxs/oic 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```bash 14 | cat urls.txt | oic 15 | cat urls.txt | oic -c 5 -proxy http://127.0.0.1:8080 16 | cat urls.txt | oic -c 5 -proxy http://127.0.0.1:8080 -q 17 | ``` -------------------------------------------------------------------------------- /oic/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/oic 2 | 3 | go 1.16 4 | 5 | require ( 6 | github.com/chromedp/chromedp v0.7.3 7 | github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 8 | ) 9 | -------------------------------------------------------------------------------- /oic/go.sum: -------------------------------------------------------------------------------- 1 | github.com/chromedp/cdproto v0.0.0-20210526005521-9e51b9051fd0 h1:aIcgRshD5I1MfJfB92KBDKpaXrYqj3fkqI8bHdtP3zA= 2 | github.com/chromedp/cdproto v0.0.0-20210526005521-9e51b9051fd0/go.mod h1:At5TxYYdxkbQL0TSefRjhLE3Q0lgvqKKMSFUglJ7i1U= 3 | github.com/chromedp/chromedp v0.7.3 h1:FvgJICfjvXtDX+miuMUY0NHuY8zQvjS/TcEQEG6Ldzs= 4 | github.com/chromedp/chromedp v0.7.3/go.mod h1:9gC521Yzgrk078Ulv6KIgG7hJ2x9aWrxMBBobTFk30A= 5 | github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic= 6 | github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= 7 | github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU= 8 | github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM= 9 | github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og= 10 | github.com/gobwas/pool v0.2.1/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= 11 | github.com/gobwas/ws v1.1.0-rc.5 h1:QOAag7FoBaBYYHRqzqkhhd8fq5RTubvI4v3Ft/gDVVQ= 12 | github.com/gobwas/ws v1.1.0-rc.5/go.mod h1:nzvNcVha5eUziGrbxFCo6qFIojQHjJV5cLYIbezhfL0= 13 | github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= 14 | github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= 15 | github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= 16 | github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= 17 | github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= 18 | github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= 19 | golang.org/x/sys v0.0.0-20201207223542-d4d67f95c62d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 20 | golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea h1:+WiDlPBBaO+h9vPNZi8uJ3k4BkKQB7Iow3aqwHVA5hI= 21 | golang.org/x/sys v0.0.0-20210525143221-35b2ab0089ea/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 22 | -------------------------------------------------------------------------------- /oic/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "context" 6 | "flag" 7 | "github.com/chromedp/chromedp" 8 | "log" 9 | "os" 10 | "path" 11 | "path/filepath" 12 | "sort" 13 | "strings" 14 | "sync" 15 | "time" 16 | ) 17 | 18 | // Open URL with your default browser 19 | // Usage: 20 | // cat urls.txt | oic 21 | // cat urls.txt | oic -c 5 -proxy http://127.0.0.1:8080 22 | 23 | var ( 24 | verbose bool 25 | headless bool 26 | concurrency int 27 | timeout int 28 | data string 29 | dataFile string 30 | proxy string 31 | ) 32 | 33 | func main() { 34 | // cli args 35 | flag.StringVar(&data, "u", "", "URL to open") 36 | flag.StringVar(&dataFile, "U", "", "URL to open") 37 | flag.IntVar(&concurrency, "c", 5, "number of tab at a time") 38 | flag.IntVar(&timeout, "t", 15, "timeout in second") 39 | flag.StringVar(&proxy, "proxy", "", "proxy to pass chrome to (eg: http://127.0.0.1:8080)") 40 | flag.BoolVar(&headless, "q", false, "enable headless") 41 | flag.Parse() 42 | 43 | // detect if anything came from std 44 | var inputs []string 45 | stat, _ := os.Stdin.Stat() 46 | if (stat.Mode() & os.ModeCharDevice) == 0 { 47 | sc := bufio.NewScanner(os.Stdin) 48 | for sc.Scan() { 49 | target := strings.TrimSpace(sc.Text()) 50 | if err := sc.Err(); err == nil && target != "" { 51 | inputs = append(inputs, target) 52 | } 53 | } 54 | } 55 | 56 | if data != "" { 57 | inputs = append(inputs, data) 58 | } 59 | if dataFile != "" { 60 | inputs = append(inputs, ReadingLines(dataFile)...) 61 | } 62 | 63 | if (stat.Mode()&os.ModeCharDevice) != 0 && len(inputs) == 0 { 64 | args := os.Args[1:] 65 | sort.Strings(args) 66 | raw := args[len(args)-1] 67 | RequestWithChrome(raw) 68 | os.Exit(0) 69 | } 70 | 71 | var wg sync.WaitGroup 72 | jobs := make(chan string, concurrency) 73 | 74 | for i := 0; i < concurrency; i++ { 75 | wg.Add(1) 76 | go func() { 77 | defer wg.Done() 78 | for job := range jobs { 79 | RequestWithChrome(job) 80 | } 81 | }() 82 | } 83 | 84 | go func() { 85 | for _, raw := range inputs { 86 | jobs <- raw 87 | } 88 | close(jobs) 89 | }() 90 | wg.Wait() 91 | } 92 | 93 | // RequestWithChrome Do request with real browser 94 | func RequestWithChrome(url string) string { 95 | // prepare the chrome options 96 | opts := append(chromedp.DefaultExecAllocatorOptions[:], 97 | chromedp.Flag("headless", headless), 98 | chromedp.Flag("ignore-certificate-errors", true), 99 | chromedp.Flag("disable-gpu", true), 100 | chromedp.Flag("enable-automation", true), 101 | chromedp.Flag("disable-extensions", true), 102 | chromedp.Flag("disable-setuid-sandbox", true), 103 | chromedp.Flag("disable-web-security", true), 104 | chromedp.Flag("no-first-run", true), 105 | chromedp.Flag("no-default-browser-check", true), 106 | //chromedp.UserDataDir(""), 107 | ) 108 | 109 | if proxy != "" { 110 | opts = append(opts, chromedp.ProxyServer(proxy)) 111 | } 112 | 113 | allocCtx, bcancel := chromedp.NewExecAllocator(context.Background(), opts...) 114 | defer bcancel() 115 | 116 | ctx, cancel := chromedp.NewContext(allocCtx, chromedp.WithLogf(log.Printf)) 117 | ctx, cancel = context.WithTimeout(ctx, time.Duration(timeout)*time.Second) 118 | defer cancel() 119 | 120 | // run task list 121 | var data string 122 | contentID := "main" 123 | err := chromedp.Run(ctx, 124 | chromedp.Navigate(url), 125 | chromedp.OuterHTML(contentID, &data, chromedp.NodeVisible, chromedp.ByID), 126 | ) 127 | 128 | cleanUp() 129 | if err != nil { 130 | return "" 131 | } 132 | return data 133 | } 134 | 135 | func cleanUp() { 136 | tmpFolder := path.Join(os.TempDir(), "chromedp-runner*") 137 | if _, err := os.Stat("/tmp/"); !os.IsNotExist(err) { 138 | tmpFolder = path.Join("/tmp/", "chromedp-runner*") 139 | } 140 | junks, err := filepath.Glob(tmpFolder) 141 | if err != nil { 142 | return 143 | } 144 | for _, junk := range junks { 145 | os.RemoveAll(junk) 146 | } 147 | } 148 | 149 | // ReadingLines Reading file and return content as []string 150 | func ReadingLines(filename string) []string { 151 | var result []string 152 | file, err := os.Open(filename) 153 | if err != nil { 154 | return result 155 | } 156 | defer file.Close() 157 | 158 | scanner := bufio.NewScanner(file) 159 | for scanner.Scan() { 160 | val := strings.TrimSpace(scanner.Text()) 161 | if val == "" { 162 | continue 163 | } 164 | result = append(result, val) 165 | } 166 | 167 | if err := scanner.Err(); err != nil { 168 | return result 169 | } 170 | return result 171 | } 172 | -------------------------------------------------------------------------------- /ourl/README.md: -------------------------------------------------------------------------------- 1 | # Old Urls 2 | Fetch known URLs from AlienVault's [Open Threat Exchange](https://otx.alienvault.com), the Wayback Machine, and Common Crawl. Originally built as a microservice. 3 | 4 | ### Usage: 5 | ``` 6 | ▻ printf 'example.com' | ourl 7 | ``` 8 | 9 | or 10 | 11 | ``` 12 | ▻ ourl example.com 13 | ``` 14 | 15 | ### install: 16 | ``` 17 | ▻ go get -u github.com/theblackturtle/ourl 18 | ``` 19 | 20 | ## Credits: 21 | Thanks @tomnomom for [waybackurls](https://github.com/tomnomnom/waybackurls)! 22 | Thanks @lc for gau! 23 | 24 | -------------------------------------------------------------------------------- /ourl/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/ourl 2 | 3 | go 1.17 4 | 5 | require ( 6 | github.com/cornelk/hashmap v1.0.1 7 | github.com/json-iterator/go v1.1.12 8 | github.com/panjf2000/ants v1.3.0 9 | ) 10 | 11 | require ( 12 | github.com/dchest/siphash v1.1.0 // indirect 13 | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect 14 | github.com/modern-go/reflect2 v1.0.2 // indirect 15 | ) 16 | -------------------------------------------------------------------------------- /ourl/go.sum: -------------------------------------------------------------------------------- 1 | github.com/cornelk/hashmap v1.0.1 h1:RXGcy29hEdLLV8T6aK4s+BAd4tq4+3Hq50N2GoG0uIg= 2 | github.com/cornelk/hashmap v1.0.1/go.mod h1:8wbysTUDnwJGrPZ1Iwsou3m+An6sldFrJItjRhfegCw= 3 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 4 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 5 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 6 | github.com/dchest/siphash v1.1.0 h1:1Rs9eTUlZLPBEvV+2sTaM8O0NWn0ppbgqS7p11aWawI= 7 | github.com/dchest/siphash v1.1.0/go.mod h1:q+IRvb2gOSrUnYoPqHiyHXS0FOBBOdl6tONBlVnOnt4= 8 | github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= 9 | github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= 10 | github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= 11 | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= 12 | github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= 13 | github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= 14 | github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= 15 | github.com/panjf2000/ants v1.3.0 h1:8pQ+8leaLc9lys2viEEr8md0U4RN6uOSUCE9bOYjQ9M= 16 | github.com/panjf2000/ants v1.3.0/go.mod h1:AaACblRPzq35m1g3enqYcxspbbiOJJYaxU2wMpm1cXY= 17 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 18 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 19 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 20 | github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= 21 | github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= 22 | -------------------------------------------------------------------------------- /ourl/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "errors" 6 | "flag" 7 | "fmt" 8 | "io/ioutil" 9 | "net/http" 10 | "net/url" 11 | "os" 12 | "strconv" 13 | "strings" 14 | "sync" 15 | "time" 16 | 17 | "github.com/cornelk/hashmap" 18 | jsoniter "github.com/json-iterator/go" 19 | "github.com/panjf2000/ants" 20 | ) 21 | 22 | type Raw struct { 23 | Url string `json:"url"` 24 | ArchiveUrl string `json:"archive_url"` 25 | Status string `json:"status"` 26 | Mime string `json:"mime"` 27 | Hash string `json:"hash"` 28 | Timestamp string `json:"time"` 29 | Length string `json:"length"` 30 | } 31 | 32 | var ( 33 | IncludeSubs bool 34 | PageCheck bool 35 | RawOutput bool 36 | Verbose bool 37 | GetDomainOnly bool 38 | FilterFlags string 39 | OutputSet = &hashmap.HashMap{} 40 | ) 41 | 42 | var client = &http.Client{ 43 | Timeout: 5 * time.Minute, // Some sources need long time to query 44 | } 45 | 46 | func main() { 47 | var domains []string 48 | flag.BoolVar(&IncludeSubs, "subs", false, "include subdomains of target domain") 49 | flag.BoolVar(&PageCheck, "p", false, "if the data is large, get by pages") 50 | flag.BoolVar(&RawOutput, "r", false, "print raw output (JSON format)") 51 | flag.BoolVar(&GetDomainOnly, "a", false, "print domain only") 52 | flag.BoolVar(&Verbose, "v", false, "enable verbose") 53 | flag.StringVar(&FilterFlags, "f", "", "Wayback Machine filter (filter=statuscode:200&filter=!mimetype:text/html)") 54 | flag.Parse() 55 | 56 | if flag.NArg() > 0 { 57 | domains = []string{flag.Arg(0)} 58 | } else { 59 | s := bufio.NewScanner(os.Stdin) 60 | for s.Scan() { 61 | line := strings.TrimSpace(s.Text()) 62 | if line != "" { 63 | domains = append(domains, line) 64 | } 65 | } 66 | } 67 | for _, domain := range domains { 68 | Run(domain) 69 | } 70 | } 71 | 72 | type fetch func(string) error 73 | 74 | func Run(domain string) { 75 | fetchers := []fetch{getWaybackUrls, getCommonCrawlURLs, getOtxUrls} 76 | for _, fn := range fetchers { 77 | err := fn(domain) 78 | if err != nil { 79 | fmt.Fprintf(os.Stderr, "Error: %s\n", err) 80 | continue 81 | } 82 | 83 | } 84 | } 85 | 86 | type OTXResult struct { 87 | URLList []struct { 88 | URL string `json:"url"` 89 | Date string `json:"date"` 90 | Domain string `json:"domain"` 91 | Hostname string `json:"hostname"` 92 | Result struct { 93 | Urlworker struct { 94 | IP string `json:"ip"` 95 | HTTPCode int `json:"http_code"` 96 | } `json:"urlworker"` 97 | Safebrowsing struct { 98 | Matches []interface{} `json:"matches"` 99 | } `json:"safebrowsing"` 100 | } `json:"result"` 101 | Httpcode int `json:"httpcode"` 102 | Gsb []interface{} `json:"gsb"` 103 | Encoded string `json:"encoded"` 104 | } `json:"url_list"` 105 | PageNum int `json:"page_num"` 106 | Limit int `json:"limit"` 107 | Paged bool `json:"paged"` 108 | HasNext bool `json:"has_next"` 109 | FullSize int `json:"full_size"` 110 | ActualSize int `json:"actual_size"` 111 | } 112 | 113 | func getOtxUrls(hostname string) error { 114 | page := 0 115 | for { 116 | currentURL := fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/url_list?limit=50&page=%d", hostname, page) 117 | 118 | r, err := client.Get(currentURL) 119 | if err != nil { 120 | return errors.New(fmt.Sprintf("http request to OTX failed: %s", err.Error())) 121 | } 122 | defer r.Body.Close() 123 | if r.StatusCode != 200 { 124 | return nil 125 | } 126 | if Verbose { 127 | printStderr(currentURL) 128 | } 129 | bytes, err := ioutil.ReadAll(r.Body) 130 | if err != nil { 131 | return errors.New(fmt.Sprintf("error reading body from alienvault: %s", err.Error())) 132 | } 133 | results := &OTXResult{} 134 | err = jsoniter.Unmarshal(bytes, results) 135 | if err != nil { 136 | return errors.New(fmt.Sprintf("could not decode json response from alienvault: %s", err.Error())) 137 | } 138 | for _, result := range results.URLList { 139 | if result.URL == "" { 140 | continue 141 | } 142 | 143 | if RawOutput { 144 | o := Raw{Url: result.URL, Status: strconv.Itoa(result.Httpcode)} 145 | if r, err := jsoniter.MarshalToString(o); err == nil { 146 | fmt.Println(r) 147 | } 148 | } else { 149 | if GetDomainOnly { 150 | if u, err := url.Parse(result.URL); err == nil { 151 | hostname := u.Hostname() 152 | if notExist := OutputSet.Insert(hostname, true); notExist { 153 | fmt.Println(hostname) 154 | } 155 | } 156 | } else { 157 | fmt.Println(result.URL) 158 | } 159 | } 160 | } 161 | if !results.HasNext { 162 | break 163 | } 164 | page++ 165 | } 166 | return nil 167 | } 168 | 169 | func getWaybackUrls(hostname string) error { 170 | wildcard := "*." 171 | if !IncludeSubs { 172 | wildcard = "" 173 | } 174 | 175 | // Remove `collapse` because we need to get as much as possible 176 | baseURL := fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=%s%s/*&output=json", wildcard, hostname) 177 | if FilterFlags != "" { 178 | baseURL += "&" + FilterFlags 179 | } 180 | printStderr("Base URL: " + baseURL) 181 | if PageCheck { 182 | // Check amount of page first 183 | pagesNumURL := baseURL + "&showNumPages=true" 184 | pagesNum := getWaybackPages(pagesNumURL) 185 | if pagesNum > 0 { 186 | printStderr(fmt.Sprintf("Total pages: %d", pagesNum)) 187 | 188 | var wg sync.WaitGroup 189 | pool, _ := ants.NewPoolWithFunc(15, func(i interface{}) { 190 | defer wg.Done() 191 | page := i.(int) 192 | pageDataURL := fmt.Sprintf("%s&page=%d", baseURL, page) 193 | if Verbose { 194 | printStderr(pageDataURL) 195 | } 196 | err := downloadWaybackResults(pageDataURL) 197 | if err != nil { 198 | printStderr(fmt.Sprintf("Failed to download url %s: %s", pageDataURL, err)) 199 | return 200 | } 201 | }) 202 | defer pool.Release() 203 | for i := 0; i <= pagesNum; i++ { 204 | printStderr(fmt.Sprintf("Downloading page: %d/%d", i, pagesNum)) 205 | wg.Add(1) 206 | pool.Invoke(i) 207 | } 208 | wg.Wait() 209 | } 210 | return nil 211 | } else { 212 | err := downloadWaybackResults(baseURL) 213 | if err != nil { 214 | return err 215 | } 216 | return nil 217 | } 218 | } 219 | 220 | func downloadWaybackResults(downloadURL string) error { 221 | retryTimes := 20 222 | for retryTimes > 0 { 223 | if retryTimes < 20 { 224 | printStderr(fmt.Sprintf("%s: retry %d", downloadURL, 60-retryTimes)) 225 | } 226 | time.Sleep(time.Second * 5) 227 | resp, err := client.Get(downloadURL) 228 | if err != nil { 229 | retryTimes-- 230 | continue 231 | } 232 | 233 | defer resp.Body.Close() 234 | switch resp.StatusCode { 235 | case 200: 236 | dec := jsoniter.NewDecoder(resp.Body) 237 | for dec.More() { 238 | first := true 239 | var results [][]string 240 | if err := dec.Decode(&results); err == nil { 241 | for _, result := range results { 242 | if first { 243 | // skip first result from wayback machine 244 | // always is "original" 245 | first = false 246 | continue 247 | } 248 | if result[3] != "" && result[3] == "warc/revisit" { 249 | continue 250 | } 251 | if result[2] == "" { 252 | continue 253 | } 254 | if GetDomainOnly { 255 | if u, err := url.Parse(result[2]); err == nil { 256 | hostname := u.Hostname() 257 | if notExist := OutputSet.Insert(hostname, true); notExist { 258 | fmt.Println(hostname) 259 | } 260 | } 261 | continue 262 | } 263 | 264 | if RawOutput { 265 | o := Raw{ 266 | Timestamp: result[1], 267 | Url: result[2], 268 | Mime: result[3], 269 | Status: result[4], 270 | Hash: result[5], 271 | Length: result[6], 272 | } 273 | o.ArchiveUrl = fmt.Sprintf("https://web.archive.org/web/%sid_/%s", o.Timestamp, o.Url) 274 | if r, err := jsoniter.MarshalToString(o); err == nil { 275 | fmt.Println(r) 276 | } 277 | } else { 278 | fmt.Println(result[2]) 279 | } 280 | } 281 | } 282 | } 283 | return nil 284 | case 400: 285 | return errors.New("Status code: 400") 286 | case 429: 287 | retryTimes-- 288 | default: 289 | retryTimes-- 290 | } 291 | } 292 | return errors.New("Max try times") 293 | } 294 | 295 | func getWaybackPages(url string) int { 296 | r, err := client.Get(url) 297 | if err != nil { 298 | printStderr("Request WayBack Pages error") 299 | return -1 300 | } 301 | defer r.Body.Close() 302 | resp, err := ioutil.ReadAll(r.Body) 303 | if err != nil { 304 | printStderr("Read WayBack body error") 305 | return -1 306 | } 307 | body := strings.TrimSpace(string(resp)) 308 | amount, err := strconv.Atoi(body) 309 | if err != nil { 310 | printStderr("Convert body to int error") 311 | return -1 312 | } 313 | return amount 314 | } 315 | 316 | func getCommonCrawlURLs(domain string) error { 317 | wildcard := "*." 318 | if !IncludeSubs { 319 | wildcard = "" 320 | } 321 | currentApis, err := getCurrentCC() 322 | if err != nil { 323 | return fmt.Errorf("error getting current commoncrawl url: %v", err) 324 | } 325 | var wg sync.WaitGroup 326 | pool, _ := ants.NewPoolWithFunc(5, func(i interface{}) { 327 | defer wg.Done() 328 | currentApi := i.(string) 329 | currentURL := fmt.Sprintf("%s?url=%s%s/*&output=json", currentApi, wildcard, domain) 330 | res, err := http.Get(currentURL) 331 | if err != nil { 332 | return 333 | } 334 | 335 | if Verbose { 336 | printStderr(currentURL) 337 | } 338 | defer res.Body.Close() 339 | 340 | sc := bufio.NewScanner(res.Body) 341 | 342 | for sc.Scan() { 343 | result := struct { 344 | Urlkey string `json:"urlkey"` 345 | Timestamp string `json:"timestamp"` 346 | URL string `json:"url"` 347 | Mime string `json:"mime"` 348 | MimeDetected string `json:"mime-detected"` 349 | Status string `json:"status"` 350 | Digest string `json:"digest"` 351 | Length string `json:"length"` 352 | Offset string `json:"offset"` 353 | Filename string `json:"filename"` 354 | Languages string `json:"languages"` 355 | Encoding string `json:"encoding"` 356 | }{} 357 | err = jsoniter.Unmarshal([]byte(sc.Text()), &result) 358 | 359 | if err != nil { 360 | continue 361 | } 362 | if result.URL == "" { 363 | continue 364 | } 365 | // ! We only need to unique in this case 366 | if GetDomainOnly { 367 | if u, err := url.Parse(result.URL); err == nil { 368 | hostname := u.Hostname() 369 | if notExist := OutputSet.Insert(hostname, true); notExist { 370 | fmt.Println(hostname) 371 | } 372 | } 373 | continue 374 | } 375 | 376 | if RawOutput { 377 | o := Raw{ 378 | Timestamp: result.Timestamp, 379 | Url: result.URL, 380 | Mime: result.Mime, 381 | Status: result.Status, 382 | Hash: result.Digest, 383 | Length: result.Length, 384 | } 385 | if r, err := jsoniter.MarshalToString(o); err == nil { 386 | fmt.Println(r) 387 | } 388 | } else { 389 | fmt.Println(result.URL) 390 | 391 | } 392 | } 393 | }) 394 | defer pool.Release() 395 | for _, currentApi := range currentApis { 396 | printStderr(fmt.Sprintf("Downloading page: %s", currentApi)) 397 | wg.Add(1) 398 | pool.Invoke(currentApi) 399 | } 400 | wg.Wait() 401 | 402 | return nil 403 | } 404 | 405 | type CommonCrawlInfo []struct { 406 | API string `json:"cdx-api"` 407 | } 408 | 409 | func getCurrentCC() ([]string, error) { 410 | r, err := client.Get("http://index.commoncrawl.org/collinfo.json") 411 | if err != nil { 412 | return []string{}, err 413 | } 414 | defer r.Body.Close() 415 | resp, err := ioutil.ReadAll(r.Body) 416 | if err != nil { 417 | return []string{}, err 418 | } 419 | var wrapper CommonCrawlInfo 420 | err = jsoniter.Unmarshal(resp, &wrapper) 421 | if err != nil { 422 | return []string{}, fmt.Errorf("could not unmarshal json from CC: %s", err.Error()) 423 | } 424 | if len(wrapper) < 1 { 425 | return []string{}, errors.New("unexpected response from commoncrawl.") 426 | } 427 | var CCes []string 428 | for _, CC := range wrapper { 429 | CCes = append(CCes, CC.API) 430 | } 431 | return CCes, nil 432 | } 433 | 434 | func printStderr(msg string) { 435 | fmt.Fprintf(os.Stderr, msg+"\n") 436 | } 437 | -------------------------------------------------------------------------------- /purl/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "flag" 7 | "fmt" 8 | "golang.org/x/net/publicsuffix" 9 | "net/url" 10 | "os" 11 | "strings" 12 | ) 13 | 14 | // Literally copied from: https://github.com/tomnomnom/unfurl 15 | // with some improvements 16 | var limit int 17 | 18 | func main() { 19 | var unique bool 20 | flag.BoolVar(&unique, "u", false, "") 21 | flag.BoolVar(&unique, "unique", false, "") 22 | 23 | var verbose bool 24 | flag.BoolVar(&verbose, "v", false, "") 25 | flag.BoolVar(&verbose, "verbose", false, "") 26 | flag.IntVar(&limit, "l", 100, "limit size") 27 | 28 | flag.Parse() 29 | 30 | mode := flag.Arg(0) 31 | fmtStr := flag.Arg(1) 32 | 33 | procFn, ok := map[string]urlProc{ 34 | "keys": keys, 35 | "values": values, 36 | "domains": domains, 37 | "paths": paths, 38 | "format": format, 39 | }[mode] 40 | 41 | if !ok { 42 | fmt.Fprintf(os.Stderr, "unknown mode: %s\n", mode) 43 | return 44 | } 45 | 46 | sc := bufio.NewScanner(os.Stdin) 47 | 48 | seen := make(map[string]bool) 49 | 50 | for sc.Scan() { 51 | u, err := url.Parse(sc.Text()) 52 | if err != nil { 53 | if verbose { 54 | fmt.Fprintf(os.Stderr, "parse failure: %s\n", err) 55 | } 56 | continue 57 | } 58 | 59 | // some urlProc functions return multiple things, 60 | // so it's just easier to always get a slice and 61 | // loop over it instead of having two kinds of 62 | // urlProc functions. 63 | for _, val := range procFn(u, fmtStr) { 64 | 65 | // you do see empty values sometimes 66 | if val == "" { 67 | continue 68 | } 69 | 70 | if seen[val] && unique { 71 | continue 72 | } 73 | 74 | fmt.Println(val) 75 | 76 | // no point using up memory if we're outputting dupes 77 | if unique { 78 | seen[val] = true 79 | } 80 | } 81 | } 82 | 83 | if err := sc.Err(); err != nil { 84 | fmt.Fprintf(os.Stderr, "failed to read input: %s\n", err) 85 | } 86 | } 87 | 88 | type urlProc func(*url.URL, string) []string 89 | 90 | func keys(u *url.URL, _ string) []string { 91 | out := make([]string, 0) 92 | for key, _ := range u.Query() { 93 | out = append(out, key) 94 | } 95 | return out 96 | } 97 | 98 | func values(u *url.URL, _ string) []string { 99 | out := make([]string, 0) 100 | for _, vals := range u.Query() { 101 | for _, val := range vals { 102 | out = append(out, val) 103 | } 104 | } 105 | return out 106 | } 107 | 108 | func domains(u *url.URL, f string) []string { 109 | return format(u, "%d") 110 | } 111 | 112 | func paths(u *url.URL, f string) []string { 113 | return format(u, "%p") 114 | } 115 | 116 | func format(u *url.URL, f string) []string { 117 | 118 | out := &bytes.Buffer{} 119 | inFormat := false 120 | for _, r := range f { 121 | 122 | if r == '%' && !inFormat { 123 | inFormat = true 124 | continue 125 | } 126 | 127 | if !inFormat { 128 | out.WriteRune(r) 129 | continue 130 | } 131 | 132 | switch r { 133 | case '%': 134 | out.WriteRune('%') 135 | 136 | case 's': 137 | out.WriteString(u.Scheme) 138 | case 'd': 139 | out.WriteString(u.Hostname()) 140 | case 'P': 141 | out.WriteString(u.Port()) 142 | case 'p': 143 | out.WriteString(u.EscapedPath()) 144 | case 'q': 145 | out.WriteString(u.RawQuery) 146 | case 'f': 147 | out.WriteString(u.Fragment) 148 | case 'n': 149 | out.WriteRune('\n') 150 | case 'o': 151 | domain := u.Host 152 | org := u.Host 153 | suffix, ok := publicsuffix.PublicSuffix(domain) 154 | if ok { 155 | org = strings.Replace(domain, fmt.Sprintf(".%s", suffix), "", -1) 156 | } else { 157 | if strings.Contains(domain, ".") { 158 | parts := strings.Split(domain, ".") 159 | if len(parts) == 2 { 160 | org = parts[0] 161 | } else { 162 | org = parts[len(parts)-2] 163 | } 164 | } 165 | } 166 | out.WriteString(org) 167 | case 'D': 168 | var dots string 169 | if strings.Contains(u.Host, ".") { 170 | dots = strings.Join(strings.Split(u.Host, "."), "\n") 171 | dots = strings.Trim(dots, "\n") 172 | } 173 | out.WriteString(dots) 174 | // get paths but in lists 175 | case 'E': 176 | rPaths := u.EscapedPath() 177 | var paths string 178 | if strings.Contains(rPaths, "/") { 179 | for _, p := range strings.Split(rPaths, "/") { 180 | if len(p) < limit { 181 | paths += p + "\n" 182 | } 183 | } 184 | paths = strings.Trim(paths, "\n") 185 | } 186 | out.WriteString(paths) 187 | // get query but in lists 188 | case 'Q': 189 | rQueries := u.Query() 190 | var queries string 191 | if len(rQueries) > 0 { 192 | for k := range rQueries { 193 | queries += k + "\n" 194 | } 195 | } 196 | queries = strings.Trim(queries, "\n") 197 | out.WriteString(queries) 198 | default: 199 | // output untouched 200 | out.WriteRune('%') 201 | out.WriteRune(r) 202 | } 203 | 204 | inFormat = false 205 | } 206 | 207 | return []string{out.String()} 208 | } 209 | 210 | func init() { 211 | flag.Usage = func() { 212 | h := "Format URLs provided on stdin\n\n" 213 | 214 | h += "Usage:\n" 215 | h += " purl [OPTIONS] [MODE] [FORMATSTRING]\n\n" 216 | 217 | h += "Options:\n" 218 | h += " -u, --unique Only output unique values\n" 219 | h += " -v, --verbose Verbose mode (output URL parse errors)\n\n" 220 | 221 | h += "Modes:\n" 222 | h += " keys Keys from the query string (one per line)\n" 223 | h += " values Values from the query string (one per line)\n" 224 | h += " domains The hostname (e.g. sub.example.com)\n" 225 | h += " paths The request path (e.g. /users)\n" 226 | h += " format Specify a custom format (see below)\n\n" 227 | 228 | h += "Format Directives:\n" 229 | h += " %% A literal percent character\n" 230 | h += " %n A new line character\n" 231 | h += " %s The request scheme (e.g. https)\n" 232 | h += " %d The domain (e.g. sub.example.com)\n" 233 | h += " %o The root domain (e.g. example)\n" 234 | h += " %P The port (e.g. 8080)\n" 235 | h += " %p The path (e.g. /users)\n" 236 | h += " %e The sinlge paths (e.g. user)\n" 237 | h += " %q The raw query string (e.g. a=1&b=2)\n" 238 | h += " %f The page fragment (e.g. page-section)\n\n" 239 | 240 | h += "Examples:\n" 241 | h += " cat urls.txt | purl keys\n" 242 | h += " cat urls.txt | purl format %s://%d%p?%q\n" 243 | h += " cat urls.txt | purl format %D%n%E%n%Q\n" 244 | 245 | fmt.Fprint(os.Stderr, h) 246 | } 247 | } 248 | -------------------------------------------------------------------------------- /qscreenshot/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "context" 6 | "flag" 7 | "fmt" 8 | "io/ioutil" 9 | "log" 10 | "math" 11 | "os" 12 | "path" 13 | "path/filepath" 14 | "strings" 15 | "sync" 16 | "time" 17 | 18 | "github.com/chromedp/cdproto/emulation" 19 | "github.com/chromedp/cdproto/page" 20 | "github.com/chromedp/chromedp" 21 | ) 22 | 23 | // Do screenshot from list of URLs 24 | // cat /tmp/list_of_urls.txt | qscreenshot -o ouput 25 | 26 | const ( 27 | QUALITY = 90 28 | ) 29 | 30 | var ( 31 | output string 32 | indexFile string 33 | concurrency int 34 | verbose bool 35 | absPath bool 36 | timeout int 37 | imgWidth int 38 | imgHeight int 39 | ) 40 | 41 | func main() { 42 | // cli arguments 43 | flag.IntVar(&concurrency, "c", 10, "Set the concurrency level") 44 | flag.StringVar(&output, "o", "screen", "Output Directory") 45 | flag.StringVar(&indexFile, "s", "", "Summary File") 46 | flag.BoolVar(&verbose, "v", false, "Verbose output") 47 | flag.BoolVar(&absPath, "a", false, "Use Absolute path in summary") 48 | flag.IntVar(&timeout, "timeout", 10, "screenshot timeout") 49 | flag.IntVar(&imgHeight, "height", 0, "height screenshot") 50 | flag.IntVar(&imgWidth, "width", 0, "width screenshot") 51 | flag.Parse() 52 | 53 | // prepare output 54 | if indexFile == "" { 55 | indexFile = path.Join(output, "screen-summary.txt") 56 | } 57 | err := os.MkdirAll(output, 0750) 58 | if err != nil { 59 | log.Fatal("Can't create output directory") 60 | } 61 | 62 | if absPath { 63 | output, _ = filepath.Abs(output) 64 | } 65 | 66 | var wg sync.WaitGroup 67 | jobs := make(chan string, concurrency) 68 | 69 | for i := 0; i < concurrency; i++ { 70 | wg.Add(1) 71 | go func() { 72 | defer wg.Done() 73 | for job := range jobs { 74 | log.Printf("[*] processing: %v", job) 75 | imgScreen := doScreenshot(job) 76 | if imgScreen != "" { 77 | log.Printf("[*] Store image: %v %v", job, imgScreen) 78 | sum := fmt.Sprintf("%v - %v", job, imgScreen) 79 | appendSummary(indexFile, sum) 80 | } 81 | } 82 | }() 83 | } 84 | 85 | sc := bufio.NewScanner(os.Stdin) 86 | go func() { 87 | for sc.Scan() { 88 | url := strings.TrimSpace(sc.Text()) 89 | if err := sc.Err(); err == nil && url != "" { 90 | jobs <- url 91 | } 92 | } 93 | close(jobs) 94 | }() 95 | wg.Wait() 96 | 97 | info, err := os.Stat(indexFile) 98 | if !os.IsNotExist(err) && info.IsDir() { 99 | log.Printf("[+] Store summary in: %v", indexFile) 100 | } 101 | } 102 | 103 | func doScreenshot(raw string) string { 104 | imageName := strings.Replace(raw, "://", "___", -1) 105 | imageScreen := path.Join(output, fmt.Sprintf("%v.png", strings.Replace(imageName, "/", "_", -1))) 106 | 107 | opts := append(chromedp.DefaultExecAllocatorOptions[:], 108 | chromedp.Flag("headless", true), 109 | chromedp.Flag("ignore-certificate-errors", true), 110 | chromedp.Flag("disable-gpu", true), 111 | chromedp.Flag("enable-automation", true), 112 | chromedp.Flag("disable-extensions", true), 113 | chromedp.Flag("disable-setuid-sandbox", true), 114 | chromedp.Flag("disable-web-security", true), 115 | chromedp.Flag("no-first-run", true), 116 | chromedp.Flag("no-default-browser-check", true), 117 | ) 118 | // create context 119 | allocCtx, bcancel := chromedp.NewExecAllocator(context.Background(), opts...) 120 | defer bcancel() 121 | ctx, cancel := chromedp.NewContext(allocCtx, chromedp.WithLogf(log.Printf)) 122 | ctx, cancel = context.WithTimeout(ctx, 10*time.Second) 123 | defer cancel() 124 | 125 | // capture screenshot of an element 126 | var buf []byte 127 | err := chromedp.Run(ctx, fullScreenshot(raw, QUALITY, &buf)) 128 | // clean chromedp-runner folder 129 | cleanUp() 130 | if err != nil { 131 | log.Printf("[-] screen err: %v", raw) 132 | return "" 133 | } 134 | 135 | // write image 136 | if err := ioutil.WriteFile(imageScreen, buf, 0644); err != nil { 137 | log.Printf("[-] screen err: %v", raw) 138 | } 139 | if absPath { 140 | return imageScreen 141 | } 142 | return path.Base(imageScreen) 143 | } 144 | 145 | // fullScreenshot takes a screenshot of the entire browser viewport. 146 | // Liberally copied from puppeteer's source. 147 | // Note: this will override the viewport emulation settings. 148 | func fullScreenshot(urlstr string, quality int64, res *[]byte) chromedp.Tasks { 149 | return chromedp.Tasks{ 150 | chromedp.Navigate(urlstr), 151 | chromedp.ActionFunc(func(ctx context.Context) error { 152 | // get layout metrics 153 | _, _, contentSize, err := page.GetLayoutMetrics().Do(ctx) 154 | if err != nil { 155 | return err 156 | } 157 | 158 | width, height := int64(math.Ceil(contentSize.Width)), int64(math.Ceil(contentSize.Height)) 159 | //imgWidth int 160 | //imgHeight int 161 | if imgWidth != 0 && imgHeight != 0 { 162 | width = int64(imgWidth) 163 | height = int64(imgHeight) 164 | } 165 | 166 | // force viewport emulation 167 | err = emulation.SetDeviceMetricsOverride(width, height, 1, false). 168 | WithScreenOrientation(&emulation.ScreenOrientation{ 169 | Type: emulation.OrientationTypePortraitPrimary, 170 | Angle: 0, 171 | }). 172 | Do(ctx) 173 | if err != nil { 174 | return err 175 | } 176 | 177 | // capture screenshot 178 | *res, err = page.CaptureScreenshot(). 179 | WithQuality(quality). 180 | WithClip(&page.Viewport{ 181 | X: contentSize.X, 182 | Y: contentSize.Y, 183 | Width: float64(width), 184 | Height: float64(height), 185 | Scale: 1, 186 | }).Do(ctx) 187 | if err != nil { 188 | return err 189 | } 190 | return nil 191 | }), 192 | } 193 | } 194 | 195 | func cleanUp() { 196 | tmpFolder := path.Join(os.TempDir(), "chromedp-runner*") 197 | if _, err := os.Stat("/tmp/"); !os.IsNotExist(err) { 198 | tmpFolder = path.Join("/tmp/", "chromedp-runner*") 199 | } 200 | junks, err := filepath.Glob(tmpFolder) 201 | if err != nil { 202 | return 203 | } 204 | for _, junk := range junks { 205 | os.RemoveAll(junk) 206 | } 207 | } 208 | 209 | // appendSummary append string to a file 210 | func appendSummary(filename string, data string) (string, error) { 211 | // If the file doesn't exist, create it, or append to the file 212 | f, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) 213 | if err != nil { 214 | return "", err 215 | } 216 | if _, err := f.Write([]byte(data + "\n")); err != nil { 217 | return "", err 218 | } 219 | if err := f.Close(); err != nil { 220 | return "", err 221 | } 222 | return filename, nil 223 | } 224 | -------------------------------------------------------------------------------- /rdns/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "context" 6 | "encoding/xml" 7 | "flag" 8 | "fmt" 9 | "io/ioutil" 10 | "net" 11 | "net/http" 12 | "net/url" 13 | "os" 14 | "sort" 15 | "strings" 16 | "sync" 17 | ) 18 | 19 | // Extract domain from IP Address info via reverse DNS 20 | // cat /tmp/list_of_IP | rdns -c 100 21 | var ( 22 | verbose bool 23 | alexa bool 24 | resolver string 25 | proto string 26 | ) 27 | 28 | func main() { 29 | // cli aguments 30 | var concurrency int 31 | flag.IntVar(&concurrency, "c", 20, "Set the concurrency level") 32 | flag.BoolVar(&alexa, "a", false, "Check Alexa Rank of domain") 33 | flag.BoolVar(&verbose, "v", false, "Verbose output") 34 | flag.StringVar(&resolver, "s", "8.8.8.8:53", "Resolver") 35 | flag.StringVar(&proto, "p", "tcp", "protocol to do reverse DNS") 36 | flag.Parse() 37 | 38 | stat, _ := os.Stdin.Stat() 39 | if (stat.Mode() & os.ModeCharDevice) != 0 { 40 | args := os.Args[1:] 41 | sort.Strings(args) 42 | raw := args[len(args)-1] 43 | getDomain(raw) 44 | os.Exit(0) 45 | } 46 | 47 | var wg sync.WaitGroup 48 | jobs := make(chan string, concurrency) 49 | 50 | for i := 0; i < concurrency; i++ { 51 | wg.Add(1) 52 | go func() { 53 | defer wg.Done() 54 | for job := range jobs { 55 | getDomain(job) 56 | } 57 | }() 58 | } 59 | 60 | sc := bufio.NewScanner(os.Stdin) 61 | go func() { 62 | for sc.Scan() { 63 | url := strings.TrimSpace(sc.Text()) 64 | if err := sc.Err(); err == nil && url != "" { 65 | jobs <- url 66 | } 67 | } 68 | close(jobs) 69 | }() 70 | wg.Wait() 71 | } 72 | 73 | func getDomain(raw string) { 74 | r := net.Resolver{ 75 | PreferGo: true, 76 | Dial: func(ctx context.Context, network, address string) (net.Conn, error) { 77 | d := net.Dialer{} 78 | return d.DialContext(ctx, proto, resolver) 79 | }, 80 | } 81 | 82 | domains, err := r.LookupAddr(context.Background(), raw) 83 | if err != nil { 84 | return 85 | } 86 | for _, d := range domains { 87 | domain := strings.TrimRight(d, ".") 88 | if !alexa { 89 | fmt.Printf("%s,%s\n", raw, domain) 90 | continue 91 | } 92 | rank, _ := getAlexaRank(domain) 93 | fmt.Printf("%s,%s,%s\n", raw, domain, rank) 94 | } 95 | } 96 | 97 | func getHostName(raw string, port string) string { 98 | if !strings.HasPrefix(raw, "http") { 99 | raw = "https://" + raw 100 | } 101 | u, err := url.Parse(raw) 102 | if err != nil { 103 | fmt.Println(err) 104 | return "" 105 | } 106 | var hostname string 107 | if port != "" { 108 | return u.Hostname() + ":" + port 109 | } 110 | 111 | if u.Port() == "" { 112 | hostname = u.Hostname() 113 | } else { 114 | hostname = u.Hostname() + ":" + u.Port() 115 | } 116 | return hostname 117 | } 118 | 119 | func getAlexaRank(raw string) (string, error) { 120 | rank := "-1" 121 | 122 | resp, err := http.Get("http://data.alexa.com/data?cli=10&dat=snbamz&url=" + raw) 123 | if err != nil { 124 | return rank, err 125 | } 126 | 127 | defer resp.Body.Close() 128 | 129 | alexaData, err := ioutil.ReadAll(resp.Body) 130 | if err != nil { 131 | return rank, err 132 | } 133 | 134 | decoder := xml.NewDecoder(strings.NewReader(string(alexaData))) 135 | for { 136 | token, _ := decoder.Token() 137 | if token == nil { 138 | break 139 | } 140 | 141 | switch startElement := token.(type) { 142 | case xml.StartElement: 143 | if startElement.Name.Local == "POPULARITY" { 144 | if len(startElement.Attr) >= 2 { 145 | rank = startElement.Attr[1].Value 146 | } 147 | } 148 | } 149 | } 150 | return rank, nil 151 | } 152 | -------------------------------------------------------------------------------- /strr/README.md: -------------------------------------------------------------------------------- 1 | ## strr 2 | 3 | String replace 4 | 5 | ## Install 6 | 7 | ``` 8 | go get -u github.com/j3ssie/go-auxs/strr 9 | ``` 10 | 11 | ## Usage 12 | 13 | ```shell 14 | echo 'domain.com' | strr -t '{}.{{.Raw}}' -I wordlists.txt 15 | 16 | echo 'www-{}.domain.com' | strr -I wordlists.txt 17 | 18 | cat domains.txt | strr -t '{}.{{.Raw}}' -i 'dev' 19 | ``` -------------------------------------------------------------------------------- /strr/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "github.com/mitchellh/go-homedir" 8 | "os" 9 | "sort" 10 | "strings" 11 | "sync" 12 | ) 13 | 14 | // Examples 15 | // echo 'domain.com' | strr -t '{}.{{.Raw}}' -I wordlists.txt 16 | // echo 'www-{}.domain.com' | strr -I wordlists.txt 17 | // cat domains.txt | strr -t '{}.{{.Raw}}' -i 'dev' 18 | 19 | var ( 20 | verbose bool 21 | alexa bool 22 | extra bool 23 | input string 24 | inputList string 25 | replaceString string 26 | templateString string 27 | ) 28 | 29 | func main() { 30 | // cli arguments 31 | var inputs []string 32 | var concurrency int 33 | flag.IntVar(&concurrency, "c", 20, "Set the concurrency level") 34 | flag.StringVar(&inputList, "I", "", "inputList") 35 | flag.StringVar(&input, "i", "", "inputList") 36 | flag.StringVar(&replaceString, "s", "{}", "replaceString") 37 | flag.StringVar(&templateString, "t", "", "templateString") 38 | flag.Parse() 39 | 40 | stat, _ := os.Stdin.Stat() 41 | if (stat.Mode() & os.ModeCharDevice) != 0 { 42 | args := os.Args[1:] 43 | sort.Strings(args) 44 | target := args[len(args)-1] 45 | if FileExists(target) { 46 | inputs = append(inputs, ReadingLines(target)...) 47 | } else { 48 | inputs = append(inputs, target) 49 | } 50 | } 51 | if input != "" { 52 | inputs = append(inputs, input) 53 | } 54 | if inputList != "" { 55 | inputs = append(inputs, ReadingLines(inputList)...) 56 | } 57 | 58 | var wg sync.WaitGroup 59 | jobs := make(chan string, concurrency) 60 | 61 | for i := 0; i < concurrency; i++ { 62 | wg.Add(1) 63 | go func() { 64 | defer wg.Done() 65 | for job := range jobs { 66 | for _, input := range inputs { 67 | doReplace(job, input) 68 | } 69 | } 70 | }() 71 | } 72 | 73 | sc := bufio.NewScanner(os.Stdin) 74 | go func() { 75 | for sc.Scan() { 76 | url := strings.TrimSpace(sc.Text()) 77 | if err := sc.Err(); err == nil && url != "" { 78 | jobs <- url 79 | } 80 | } 81 | close(jobs) 82 | }() 83 | wg.Wait() 84 | } 85 | 86 | func doReplace(raw string, replace string) { 87 | if templateString != "" { 88 | new := strings.ReplaceAll(templateString, "{{.Raw}}", raw) 89 | new = strings.ReplaceAll(new, replaceString, replace) 90 | fmt.Println(new) 91 | return 92 | } 93 | 94 | if strings.Contains(raw, replaceString) { 95 | new := strings.ReplaceAll(raw, replaceString, replace) 96 | fmt.Println(new) 97 | return 98 | } 99 | 100 | fmt.Printf("%s%s\n", raw, replace) 101 | } 102 | 103 | // FileExists check if file is exist or not 104 | func FileExists(filename string) bool { 105 | _, err := os.Stat(filename) 106 | if os.IsNotExist(err) { 107 | return false 108 | } 109 | return true 110 | } 111 | 112 | // ReadingLines Reading file and return content as []string 113 | func ReadingLines(filename string) []string { 114 | var result []string 115 | if strings.HasPrefix(filename, "~") { 116 | filename, _ = homedir.Expand(filename) 117 | } 118 | file, err := os.Open(filename) 119 | if err != nil { 120 | return result 121 | } 122 | defer file.Close() 123 | 124 | scanner := bufio.NewScanner(file) 125 | for scanner.Scan() { 126 | val := strings.TrimSpace(scanner.Text()) 127 | if val == "" { 128 | continue 129 | } 130 | result = append(result, val) 131 | } 132 | 133 | if err := scanner.Err(); err != nil { 134 | return result 135 | } 136 | return result 137 | } 138 | -------------------------------------------------------------------------------- /urp/README.md: -------------------------------------------------------------------------------- 1 | ## URLs Replace 2 | 3 | Parse URLs in fuzz format 4 | 5 | ## Install 6 | 7 | ``` 8 | go get -u github.com/j3ssie/go-auxs/urp 9 | ``` 10 | 11 | ## Usage 12 | 13 | > **NOTE:** always pass output to unique tools like `sort -u` 14 | 15 | ### For run dirbscan 16 | 17 | ```shell 18 | echo 'https://sub.target.com/foo/bar.php?order_id=0018200' | urp 19 | 20 | https://sub.target.com/foo/bar.php?order_id=FUZZ 21 | https://sub.target.com/FUZZ?order_id=0018200 22 | https://sub.target.com/foo/FUZZ?order_id=0018200 23 | 24 | echo 'https://sub.target.com/foo/bar.php?order_id=0018200' | urp -qq 25 | https://sub.target.com/FUZZ 26 | https://sub.target.com/foo/FUZZ 27 | 28 | ``` 29 | 30 | ### Get base Paths only (useful when using `jaeles --ba`) 31 | 32 | ```shell 33 | echo 'http://sub.target.com:80/34032/cegetel/fr-fr/index.php?q=123' | urp -I '' -qq 34 | 35 | http://sub.target.com 36 | http://sub.target.com/34032/ 37 | http://sub.target.com/34032/cegetel/ 38 | http://sub.target.com/34032/cegetel/fr-fr/ 39 | 40 | # Trim last '/' too 41 | echo 'http://sub.target.com:80/34032/cegetel/fr-fr/index.php?q=123' | urp -I '' -qq -ss 42 | 43 | http://sub.target.com 44 | http://sub.target.com/34032 45 | http://sub.target.com/34032/cegetel 46 | http://sub.target.com/34032/cegetel/fr-fr 47 | 48 | cat urls.txt | urp -I '' -qq | sort -u 49 | 50 | ``` 51 | -------------------------------------------------------------------------------- /urp/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "net/url" 8 | "os" 9 | path2 "path" 10 | "sort" 11 | "strconv" 12 | "strings" 13 | 14 | "github.com/jinzhu/copier" 15 | ) 16 | 17 | const ( 18 | ReplaceAll = "all" 19 | ReplaceOneByOne = "one-by-one" 20 | ) 21 | 22 | var ( 23 | IgnoreExtensions = []string{"png", "apng", "bmp", "gif", "ico", "cur", "jpg", "jpeg", "jfif", "pjp", "pjpeg", "svg", "tif", "tiff", "webp", "xbm", "3gp", "aac", "flac", "mpg", "mpeg", "mp3", "mp4", "m4a", "m4v", "m4p", "oga", "ogg", "ogv", "mov", "wav", "webm", "eot", "woff", "woff2", "ttf", "otf", "css"} 24 | ) 25 | 26 | // Literally copied from: https://github.com/theblackturtle/ureplace 27 | // with some improvements 28 | 29 | var ( 30 | appendMode bool 31 | query bool 32 | path bool 33 | removeMediaExt bool 34 | removeLastPath bool 35 | last bool 36 | paramName bool 37 | place string 38 | blacklistExt string 39 | toInjectList string 40 | injectWords string 41 | InjectAll bool 42 | RemoveQuery bool 43 | TrimLastSlash bool 44 | RemoveDummyPort bool 45 | payloadList []string 46 | ) 47 | 48 | func main() { 49 | flag.BoolVar(&appendMode, "a", false, "Append the value") 50 | flag.BoolVar(&removeMediaExt, "m", false, "Ignore media extensions") 51 | flag.BoolVar(&query, "n", false, "Inject payload to param name too") 52 | flag.BoolVar(¶mName, "l", false, "Append payload after the extension") 53 | flag.BoolVar(&path, "p", false, "Path only (default will replace both path and query)") 54 | flag.BoolVar(&last, "L", false, "Append payload after the extension") 55 | // remove some path 56 | flag.BoolVar(&removeLastPath, "pp", true, "Remove last path") 57 | flag.BoolVar(&RemoveDummyPort, "ppp", true, "Remove dummy port like :80") 58 | flag.BoolVar(&TrimLastSlash, "ss", false, "Trim Last Slash") 59 | flag.BoolVar(&RemoveQuery, "qq", false, "Remove Query String (useful when do dirbscan)") 60 | 61 | flag.StringVar(&blacklistExt, "b", "", "Additional blacklist extensions (Ex: js,html)") 62 | // new one 63 | flag.StringVar(&place, "i", "one-by-one", "Where to inject (when using with path or value)\n all: replace all\n one: replace one by one\n 2: replace the second path/param\n -2: replace the second path/param from the end") 64 | flag.BoolVar(&InjectAll, "A", true, "Inject All") 65 | flag.StringVar(&injectWords, "I", "FUZZ", "Inject Words to replace") 66 | flag.StringVar(&toInjectList, "iL", "", "Payload list") 67 | flag.Parse() 68 | 69 | // prepare words 70 | payloadList = append(payloadList, injectWords) 71 | if toInjectList != "" { 72 | pf, err := os.Open(toInjectList) 73 | if err != nil { 74 | panic(err) 75 | } 76 | defer pf.Close() 77 | payloadList = []string{} 78 | scPayload := bufio.NewScanner(pf) 79 | for scPayload.Scan() { 80 | line := strings.TrimSpace(scPayload.Text()) 81 | if line != "" { 82 | payloadList = append(payloadList, line) 83 | } 84 | } 85 | } 86 | 87 | if blacklistExt != "" { 88 | bl := strings.Split(blacklistExt, ",") 89 | for _, e := range bl { 90 | e = strings.TrimSpace(e) 91 | if e == "" { 92 | continue 93 | } 94 | IgnoreExtensions = append(IgnoreExtensions, e) 95 | } 96 | } 97 | sort.Strings(IgnoreExtensions) 98 | 99 | sc := bufio.NewScanner(os.Stdin) 100 | for sc.Scan() { 101 | raw := strings.TrimSpace(sc.Text()) 102 | 103 | 104 | if RemoveDummyPort { 105 | raw = strings.Replace(raw, ":80/", "/", -1) 106 | } 107 | 108 | u, err := url.Parse(raw) 109 | if err != nil { 110 | fmt.Fprintf(os.Stderr, "failed to parse url %s [%s]\n", sc.Text(), err) 111 | continue 112 | } 113 | 114 | if removeMediaExt { 115 | if BlacklistExt(u) { 116 | continue 117 | } 118 | } 119 | 120 | // really start to do something 121 | for _, payload := range payloadList { 122 | var finalUrls []string 123 | 124 | switch { 125 | case query: 126 | urls, err := QueryBuilder(u.String(), payload) 127 | finalUrls = append(finalUrls, urls...) 128 | if err != nil { 129 | fmt.Fprintf(os.Stderr, "[QUERY] Failed to generate %s with the payload %s\n", u.String(), payload) 130 | continue 131 | } 132 | case path: 133 | urls, err := PathBuilder(u.String(), payload) 134 | finalUrls = append(finalUrls, urls...) 135 | 136 | if err != nil { 137 | fmt.Fprintf(os.Stderr, "[PATH] Failed to generate %s with the payload %s\n", u.String(), payload) 138 | continue 139 | } 140 | default: 141 | // query 142 | if !RemoveQuery { 143 | urls, err := QueryBuilder(u.String(), payload) 144 | finalUrls = append(finalUrls, urls...) 145 | if err != nil { 146 | fmt.Fprintf(os.Stderr, "[QUERY] Failed to generate %s with the payload %s\n", u.String(), payload) 147 | continue 148 | } 149 | } 150 | 151 | // path 152 | urls, err := PathBuilder(u.String(), payload) 153 | finalUrls = append(finalUrls, urls...) 154 | 155 | if err != nil { 156 | fmt.Fprintf(os.Stderr, "[PATH] Failed to generate %s with the payload %s\n", u.String(), payload) 157 | continue 158 | } 159 | } 160 | 161 | for _, gU := range finalUrls { 162 | if TrimLastSlash { 163 | gU = strings.TrimRight(gU, "/") 164 | } 165 | fmt.Println(gU) 166 | } 167 | } 168 | } 169 | 170 | } 171 | 172 | func QueryBuilder(urlString string, payload string) ([]string, error) { 173 | pp := make([]string, 0) 174 | urlList := make([]string, 0) 175 | 176 | u, err := url.Parse(urlString) 177 | if err != nil { 178 | return urlList, err 179 | } 180 | 181 | if len(u.Query()) == 0 { 182 | return urlList, fmt.Errorf("no query") 183 | } 184 | 185 | for p := range u.Query() { 186 | pp = append(pp, p) 187 | } 188 | sort.Strings(pp) 189 | 190 | switch place { 191 | case ReplaceAll: 192 | qs := url.Values{} 193 | for param, vv := range u.Query() { 194 | if appendMode { 195 | qs.Set(param, vv[0]+payload) 196 | } else { 197 | qs.Set(param, payload) 198 | } 199 | } 200 | u.RawQuery = qs.Encode() 201 | uRawQuery, _ := url.QueryUnescape(u.String()) 202 | urlList = append(urlList, uRawQuery) 203 | case ReplaceOneByOne: 204 | for i := 0; i < len(pp); i++ { 205 | cloneURL := &url.URL{} 206 | err := copier.Copy(cloneURL, u) 207 | if err != nil { 208 | return []string{}, fmt.Errorf("Failed to clone url") 209 | } 210 | qs := cloneURL.Query() 211 | if appendMode { 212 | qs.Set(pp[i], qs.Get(pp[i])+payload) 213 | } else { 214 | qs.Set(pp[i], payload) 215 | } 216 | cloneURL.RawQuery = qs.Encode() 217 | cloneURLRawQuery, _ := url.QueryUnescape(cloneURL.String()) 218 | urlList = append(urlList, cloneURLRawQuery) 219 | } 220 | default: 221 | var toReplacePlace int 222 | 223 | if strings.HasPrefix(place, "-") { 224 | p, err := strconv.Atoi(place[1:]) 225 | if err != nil { 226 | p = 0 227 | } 228 | toReplacePlace = len(pp[:len(pp)-p]) 229 | } else { 230 | p, err := strconv.Atoi(place) 231 | if err != nil { 232 | // fmt.Fprintf(os.Stderr, "failed to convert \"place\" string to int\n") 233 | p = 0 234 | } 235 | toReplacePlace = p 236 | } 237 | 238 | if toReplacePlace >= len(pp) { 239 | toReplacePlace = len(pp) - 1 240 | } 241 | 242 | qs := u.Query() 243 | if appendMode { 244 | qs.Set(pp[toReplacePlace], qs.Get(pp[toReplacePlace])+payload) 245 | } else { 246 | qs.Set(pp[toReplacePlace], payload) 247 | } 248 | u.RawQuery = qs.Encode() 249 | uRawQuery, _ := url.QueryUnescape(u.String()) 250 | urlList = append(urlList, uRawQuery) 251 | } 252 | return urlList, nil 253 | } 254 | 255 | func PathBuilder(urlString string, payload string) ([]string, error) { 256 | urlList := make([]string, 0) 257 | 258 | u, err := url.Parse(urlString) 259 | if err != nil { 260 | return urlList, err 261 | } 262 | 263 | if RemoveQuery { 264 | q := u.Query() 265 | for k, _ := range u.Query() { 266 | q.Del(k) 267 | } 268 | u.RawQuery = q.Encode() 269 | } 270 | 271 | path := strings.TrimPrefix(u.EscapedPath(), "/") 272 | paths := strings.Split(path, "/") 273 | 274 | if len(paths) == 0 { 275 | return urlList, fmt.Errorf("no paths") 276 | } 277 | 278 | switch place { 279 | case ReplaceAll: 280 | for i := range paths { 281 | if appendMode { 282 | paths[i] = paths[i] + payload 283 | } else { 284 | paths[i] = payload 285 | } 286 | } 287 | u.Path = strings.Join(paths, "/") 288 | uRawPath, _ := url.PathUnescape(u.String()) 289 | urlList = append(urlList, uRawPath) 290 | case ReplaceOneByOne: 291 | for i := 0; i < len(paths); i++ { 292 | 293 | cloneURL := &url.URL{} 294 | err := copier.Copy(cloneURL, u) 295 | if err != nil { 296 | return []string{}, fmt.Errorf("Failed to clone url") 297 | } 298 | pathClone := append(paths[:0:0], paths...) 299 | if appendMode { 300 | pathClone[i] = pathClone[i] + payload 301 | } else { 302 | pathClone[i] = payload 303 | // remove last paths after the payload 304 | if removeLastPath { 305 | pathClone = pathClone[:i+1] 306 | } 307 | } 308 | 309 | cloneURL.Path = strings.Join(pathClone, "/") 310 | cloneURLRawPath, _ := url.PathUnescape(cloneURL.String()) 311 | urlList = append(urlList, cloneURLRawPath) 312 | } 313 | default: 314 | var toReplacePlace int 315 | if strings.HasPrefix(place, "-") { 316 | p, err := strconv.Atoi(place[1:]) 317 | if err != nil { 318 | // fmt.Fprintf(os.Stderr, "Failed to convert \"place\" string to int\n") 319 | p = 0 320 | } 321 | toReplacePlace = len(paths[:len(paths)-p]) 322 | } else { 323 | p, err := strconv.Atoi(place) 324 | if err != nil { 325 | // fmt.Fprintf(os.Stderr, "Failed to convert \"place\" string to int\n") 326 | p = 0 327 | } 328 | toReplacePlace = p 329 | } 330 | if toReplacePlace >= len(paths) { 331 | toReplacePlace = len(paths) - 1 332 | } 333 | 334 | if appendMode { 335 | paths[toReplacePlace] = paths[toReplacePlace] + payload 336 | } else { 337 | paths[toReplacePlace] = payload 338 | } 339 | u.Path = strings.Join(paths, "/") 340 | uRawPath, _ := url.PathUnescape(u.String()) 341 | urlList = append(urlList, uRawPath) 342 | } 343 | 344 | if last { 345 | cloneURL := &url.URL{} 346 | err := copier.Copy(cloneURL, u) 347 | if err != nil { 348 | return []string{}, fmt.Errorf("Failed to clone url") 349 | } 350 | pathClone := append(paths[:0:0], paths...) 351 | cloneURL.Path = strings.Join(pathClone, "/") + payload 352 | cloneURLRawPath, _ := url.PathUnescape(cloneURL.String()) 353 | urlList = append(urlList, cloneURLRawPath) 354 | 355 | cloneURL.Path = strings.Join(pathClone, "/") + "?" + payload 356 | cloneURLRawPath, _ = url.PathUnescape(cloneURL.String()) 357 | urlList = append(urlList, cloneURLRawPath) 358 | } 359 | 360 | return urlList, nil 361 | } 362 | 363 | // Return true if in blacklist 364 | func BlacklistExt(u *url.URL) bool { 365 | e := strings.TrimPrefix(path2.Ext(u.Path), ".") 366 | 367 | i := sort.Search(len(IgnoreExtensions), func(i int) bool { return e <= IgnoreExtensions[i] }) 368 | if i < len(IgnoreExtensions) && IgnoreExtensions[i] == e { 369 | return true 370 | } else { 371 | return false 372 | } 373 | } 374 | -------------------------------------------------------------------------------- /wlimit/go.mod: -------------------------------------------------------------------------------- 1 | module github.com/j3ssie/go-auxs/wlimit 2 | 3 | go 1.17 4 | -------------------------------------------------------------------------------- /wlimit/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "flag" 6 | "fmt" 7 | "os" 8 | "strings" 9 | "sync" 10 | ) 11 | 12 | var ( 13 | onlyAscii bool 14 | stringCount int 15 | stringToCount string 16 | concurrency int 17 | limit int 18 | ) 19 | 20 | // only get word with 100 21 | // cat scope | wlimit -l 100 22 | // # get only the line with 1 '.' 23 | // cat list-of-domains | wlimit -sc 1 -s '.' 24 | 25 | func main() { 26 | flag.IntVar(&concurrency, "c", 50, "Set the concurrency level") 27 | flag.IntVar(&limit, "l", 100, "String length limit") 28 | flag.StringVar(&stringToCount, "s", "", "String to count") 29 | flag.IntVar(&stringCount, "sc", 1, "Number of string to count") 30 | 31 | flag.Parse() 32 | 33 | var wg sync.WaitGroup 34 | jobs := make(chan string, concurrency) 35 | 36 | for i := 0; i < concurrency; i++ { 37 | wg.Add(1) 38 | go func() { 39 | defer wg.Done() 40 | for job := range jobs { 41 | checkClean(job) 42 | } 43 | }() 44 | } 45 | 46 | sc := bufio.NewScanner(os.Stdin) 47 | go func() { 48 | for sc.Scan() { 49 | url := strings.TrimSpace(sc.Text()) 50 | if err := sc.Err(); err == nil && url != "" { 51 | jobs <- url 52 | } 53 | } 54 | close(jobs) 55 | }() 56 | wg.Wait() 57 | } 58 | 59 | func checkClean(line string) { 60 | if len(line) > limit { 61 | return 62 | } 63 | 64 | if stringToCount != "" { 65 | count := strings.Count(line, stringToCount) 66 | if count > stringCount { 67 | return 68 | } 69 | } 70 | 71 | fmt.Println(line) 72 | } 73 | --------------------------------------------------------------------------------