├── banner_image.png
├── README.md
├── GTM-subdomain-enum
├── README.md
└── main.go
└── URLFilter
├── README.md
└── main.go
/banner_image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blackbird-eu/community-scripts/HEAD/banner_image.png
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Community Scripts
2 | A list of open-source community scripts published to help you automate your work.
3 |
4 |
5 | ## Tools:
6 | - [Google Tag Manager Subdomain Enumeration](GTM-subdomain-enum)
7 | - [URLFilter](URLFilter)
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/GTM-subdomain-enum/README.md:
--------------------------------------------------------------------------------
1 | # Google Tag Manager Subdomain Enumeration script
2 | Enumerate subdomains using Google Tag Manager!
3 | ## Description
4 | Enumerate subdomains using Google Tag Manager! GTM Scripts can contain subdomains of your target domain and you can use these to include them into your recon process.
5 |
6 | **This is only if your target makes use of Google Tag Manager.**
7 |
8 | ## Usage
9 | ```
10 | $ go run main.go -target example.com
11 | ```
12 |
13 | ## Installation
14 | No third-party dependencies are required. Latest version of Golang is recommended.
15 |
16 | ```
17 | $ git clone https://github.com/blackbird-eu/community-scripts.git
18 | $ cd community-scripts/GTM-subdomain-enum
19 | ```
20 |
21 |
22 | **Start scanning for vulnerabilities today:**
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/URLFilter/README.md:
--------------------------------------------------------------------------------
1 | # URLFilter
2 | Filter URLs for duplicates
3 |
4 | ## Description
5 | URLFilter is a simple tool to filter a list of URLs for duplicates quickly and return unique results.
6 |
7 | Our approach involves looking for common patterns and filtering for unique results. More filters will be added over time. Feedback and contributions are welcome as always of course.
8 |
9 | ## Usage
10 | ```
11 | $ cat urls.txt | go run main.go
12 | ```
13 |
14 | ## Installation
15 | No third-party dependencies are required. Latest version of Golang is recommended.
16 |
17 | ```
18 | $ git clone https://github.com/novasecurityio/community-scripts.git
19 | $ cd community-scripts/URLFilter
20 | ```
21 |
22 | You can additionally build and compile the Golang script and move/add it to your $PATH.
23 |
24 |
25 | **Start scanning your website for vulnerabilities today:**
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/URLFilter/main.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "os"
5 | "fmt"
6 | "bufio"
7 | "regexp"
8 | "net/url"
9 | "strings"
10 | )
11 |
12 | func FilterURLs(urls []string) []string {
13 | URLs := []string{}
14 | uniqueURLs := make(map[string]bool)
15 |
16 | for _, u := range urls {
17 | parsedURL, err := url.Parse(u)
18 | if err != nil {
19 | fmt.Println("Error parsing URL:", err)
20 | continue
21 | }
22 |
23 | // Parse the URI path and query
24 | pathSegments := parsedURL.Path
25 | query := parsedURL.Query()
26 |
27 | // Ignore fragment and query parameters for comparison
28 | parsedURL.Fragment = ""
29 | parsedURL.RawQuery = ""
30 |
31 | // Match and replace UUIDs in the path with a placeholder like "00000000-0000-0000-0000-000000000000"
32 | re := regexp.MustCompile(`[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}`)
33 | normalizedPath := re.ReplaceAllStringFunc(pathSegments, func(s string) string {
34 | return "00000000-0000-0000-0000-000000000000"
35 | })
36 |
37 | // Match and replace numerical path parameters with a placeholder like "1234"
38 | re = regexp.MustCompile(`/[0-9]+(/)?`)
39 | normalizedPath = re.ReplaceAllStringFunc(normalizedPath, func(s string) string {
40 | if strings.HasSuffix(s, "/") {
41 | return "/1234/"
42 | } else {
43 | return "/1234"
44 | }
45 | })
46 |
47 | // Update the path in the parsed URL
48 | parsedURL.Path = normalizedPath
49 |
50 | // Use the updated URL as the key for the map
51 | key := parsedURL.String()
52 |
53 | // Check if the key already exists
54 | if _, exists := uniqueURLs[key]; !exists {
55 | uniqueURLs[key] = true
56 |
57 | // Restore original values
58 | parsedURL.Path = pathSegments
59 |
60 | if len(query) > 0 {
61 | parsedURL, _ = url.Parse(fmt.Sprintf("%s?%s", key, query.Encode()))
62 | }
63 |
64 | URLs = append(URLs, parsedURL.String())
65 | }
66 | }
67 |
68 | return URLs
69 | }
70 |
71 | func main() {
72 | URLs := []string{}
73 | scanner := bufio.NewScanner(os.Stdin)
74 |
75 | for scanner.Scan() {
76 | l := scanner.Text()
77 | URLs = append(URLs, fmt.Sprintf("%v", l))
78 | }
79 |
80 | if err := scanner.Err(); err != nil {
81 | fmt.Fprintln(os.Stderr, "Error: Failed reading input from stdin!", err)
82 | }
83 |
84 | URLs = FilterURLs(URLs)
85 |
86 | // Print unique URLs
87 | for _, u := range URLs {
88 | fmt.Println(u)
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/GTM-subdomain-enum/main.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "fmt"
5 | "flag"
6 | "time"
7 | "regexp"
8 | "strings"
9 | "context"
10 | "net/http"
11 | "io/ioutil"
12 | )
13 |
14 | func main() {
15 | targetFlag := flag.String("target", "", "Specify your target domain name")
16 | flag.Parse()
17 |
18 | if *targetFlag == "" {
19 | flag.Usage()
20 | return
21 | }
22 |
23 | var target string = *targetFlag
24 |
25 | tag := FetchGTMTag(target)
26 | subdomains := FetchDomains(target, tag)
27 |
28 | subdomains = RemoveDuplicates(subdomains)
29 |
30 | for _, s := range subdomains {
31 | fmt.Println(s)
32 | }
33 | }
34 |
35 | func FetchGTMTag(target string) string {
36 | ctx, cancel := context.WithTimeout(context.Background(), 7 * time.Second)
37 | defer cancel()
38 |
39 | var tag string = ""
40 |
41 | req, err := http.NewRequest("GET", fmt.Sprintf("https://%s", target), nil)
42 | if err != nil {
43 | fmt.Printf("ERROR: Failed to send request %s (%s)\n", req, err)
44 | }
45 |
46 | req.WithContext(ctx)
47 |
48 | req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36")
49 |
50 | client := &http.Client{}
51 |
52 | res, err := client.Do(req)
53 | if err != nil {
54 | fmt.Printf("ERROR: Failed to read response %v (%s)\n", res, err)
55 | }
56 |
57 | if res != nil {
58 | defer res.Body.Close()
59 |
60 | body, err := ioutil.ReadAll(res.Body)
61 | if err != nil {
62 | fmt.Println("ERROR: Failed reading response body:", err)
63 | return tag
64 | }
65 |
66 | // Match and return a GTM ID if present
67 | re := regexp.MustCompile(`GTM-[A-Z0-9]{7}`)
68 | tag = re.FindString(string(body))
69 | }
70 |
71 | return tag
72 | }
73 |
74 | func FetchDomains(target string, tag string) []string {
75 | ctx, cancel := context.WithTimeout(context.Background(), 7 * time.Second)
76 | defer cancel()
77 |
78 | var subdomains []string
79 |
80 | req, err := http.NewRequest("GET", fmt.Sprintf("https://googletagmanager.com/gtm.js?id=%s", tag), nil)
81 | if err != nil {
82 | fmt.Printf("ERROR: Failed to send request %s (%s)\n", req, err)
83 | }
84 |
85 | req.WithContext(ctx)
86 |
87 | req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36")
88 |
89 | client := &http.Client{
90 | CheckRedirect: func(req *http.Request, via []*http.Request) error {
91 | return http.ErrUseLastResponse
92 | },
93 | }
94 |
95 | res, err := client.Do(req)
96 | if err != nil {
97 | fmt.Printf("ERROR: Failed to read response %v (%s)\n", res, err)
98 | }
99 |
100 | if res != nil {
101 | defer res.Body.Close()
102 |
103 | body, err := ioutil.ReadAll(res.Body)
104 | if err != nil {
105 | fmt.Println("ERROR: Failed reading response body:", err)
106 | return subdomains
107 | }
108 |
109 | // Root domain used to match domains
110 | rootDomain := ParseRootDomain(target)
111 |
112 | re := regexp.MustCompile(fmt.Sprintf(`(([a-zA-Z0-9-\.]+)?\.)?%s\.[a-zA-Z]{0,3}(\.[a-zA-Z]{0,3})?`, rootDomain))
113 | domains := re.FindAllString(string(body), -1)
114 |
115 | for _, domain := range domains {
116 | // Inscope regexp to match inscope domains
117 | inScope, _ := regexp.MatchString(fmt.Sprintf(`^(.*\.)?%s$`, target), domain)
118 |
119 | if inScope {
120 | subdomains = append(subdomains, domain)
121 | }
122 | }
123 | }
124 |
125 | return subdomains
126 | }
127 |
128 | func ParseRootDomain(target string) string {
129 | // Remove any subdomains
130 | domainParts := strings.Split(target, ".")
131 | numParts := len(domainParts)
132 |
133 | var rootDomain string = ""
134 |
135 | // Check if the TLD is a two-letter country code or a single part TLD
136 | if numParts >= 3 && (len(domainParts[numParts-1]) == 2 || len(domainParts[numParts-1]) == 3) {
137 | rootDomain = domainParts[numParts-3]
138 | } else if numParts >= 2 {
139 | rootDomain = domainParts[numParts-2]
140 | }
141 |
142 | return rootDomain
143 | }
144 |
145 | func RemoveDuplicates(domains []string) []string {
146 | encountered := map[string]bool{}
147 | uniqueDomains := []string{}
148 |
149 | for _, v := range domains {
150 | if encountered[v] != true {
151 | encountered[v] = true
152 | uniqueDomains = append(uniqueDomains, fmt.Sprintf("%s", v))
153 | }
154 |
155 | continue
156 | }
157 |
158 | return uniqueDomains
159 | }
160 |
--------------------------------------------------------------------------------