├── .gitignore
├── Dockerfile
├── LICENSE
├── NOTICE
├── README.md
├── buildBinariesLinux.sh
├── go.mod
├── go.sum
├── pkg
├── config.go
├── deception.go
├── flags.go
├── recon.go
├── report.go
├── request_smuggling.go
├── requests.go
├── techniques.go
├── utils.go
└── wordlists.go
├── templates
├── body_file
├── config_file
├── cookie_list
├── header_list
├── parameter_list
├── recdomains_list
└── url_list
├── web-cache-vulnerability-scanner.go
└── wordlists
├── headers
└── parameters
/.gitignore:
--------------------------------------------------------------------------------
1 | # ignore Log, Completed, Report
2 | WCVS_*
3 | # ignore the url list file
4 | urls.list
5 | # ignore test folder
6 | test/
7 | # ignore vscode
8 | .vscode/
9 | # ignore compiled binaries
10 | Web-Cache-Vulnerability-Scanner
11 | Web-Cache-Vulnerability-Scanner.exe
12 | # ignore build folder
13 | build/
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # BUILDER
2 | FROM golang:latest AS builder
3 | WORKDIR /go/src/app
4 | COPY . .
5 |
6 | RUN go get -d -v ./...
7 | RUN go build -o wcvs
8 |
9 | # RUNNING
10 | FROM debian:buster
11 | RUN mkdir /app
12 | COPY --from=builder /go/src/app/wcvs /wcvs
13 | WORKDIR /app/
14 | COPY wordlists/ wordlists/
15 | CMD ["/wcvs"]
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Licensed under the Apache License, Version 2.0 (the "License");
2 | you may not use this file except in compliance with the License.
3 | You may obtain a copy of the License at
4 |
5 | http://www.apache.org/licenses/LICENSE-2.0
6 |
7 | Unless required by applicable law or agreed to in writing, software
8 | distributed under the License is distributed on an "AS IS" BASIS,
9 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | See the License for the specific language governing permissions and
11 | limitations under the License.
12 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | [](https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner/releases/latest)
6 | [](https://goreportcard.com/report/github.com/Hackmanit/Web-Cache-Vulnerability-Scanner)
7 | [](https://golang.org/)
8 | [](https://www.apache.org/licenses/LICENSE-2.0)
9 |
10 | Web Cache Vulnerability Scanner (WCVS) is a fast and versatile CLI scanner for [web cache poisoning](#background-information) and web cache deception developed by [Hackmanit](https://hackmanit.de) and [Maximilian Hildebrand](https://www.github.com/m10x).
11 |
12 | The scanner supports many different web cache poisoning and web cache deception techniques, includes a crawler to identify further URLs to test,
13 | and can adapt to a specific web cache for more efficient testing. It is highly customizable and can be easily integrated into existing CI/CD pipelines.
14 |
15 | - [Features](#features)
16 | - [Installation](#installation)
17 | - [Option 1: Pre-built Binary](#option-1-pre-built-binary)
18 | - [Option 2: Kali Linux / BlackArch Repository](#option-2-kali-linux--blackarch-repository)
19 | - [Option 3: Install Using Go](#option-3-install-using-go)
20 | - [Option 4: Docker](#option-4-docker)
21 | - [Usage](#usage)
22 | - [Specify Headers, Parameters, Cookies, and More](#specify-headers-parameters-cookies-and-more)
23 | - [Generate a JSON Report](#generate-a-json-report)
24 | - [Crawl for URLs](#crawl-for-urls)
25 | - [Use a Proxy](#use-a-proxy)
26 | - [Throttle or Accelerate](#throttle-or-accelerate)
27 | - [Further Flags](#further-flags)
28 | - [Background Information](#background-information)
29 | - [License](#license)
30 |
31 | # Features
32 | - Support for 10 web cache poisoning techniques:
33 | 1. Unkeyed header poisoning
34 | 2. Unkeyed parameter poisoning
35 | 3. Parameter cloaking
36 | 4. Fat GET
37 | 5. HTTP response splitting
38 | 6. HTTP request smuggling
39 | 7. HTTP header oversize (HHO)
40 | 8. HTTP meta character (HMC)
41 | 9. HTTP method override (HMO)
42 | 10. Parameter pollution
43 | - Support for multiple web cache deception techniques:
44 | 1. Path Parameter
45 | 2. Path Traversal (.css file, /static directory and /robots.txt)
46 | 3. Appended special characters (both encoded and not encoded)
47 | - Analyzing a web cache before testing and adapting to it for more efficient testing
48 | - Generating a report in JSON format
49 | - Crawling websites for further URLs to scan
50 | - Routing traffic through a proxy (e.g., Burp Suite)
51 | - Limiting requests per second to bypass rate limiting
52 |
53 | # Installation
54 | ## Option 1: Pre-built Binary
55 | Prebuilt binaries of WCVS are provided on the [releases page](https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner/releases).
56 | ## Option 2: Kali Linux / BlackArch Repository
57 | - Kali Linux: `apt install web-cache-vulnerability-scanner`
58 | - BlackArch: `pacman -S wcvs`
59 | ## Option 3: Install Using Go
60 | The repository can be installed using Go.
61 |
62 | go1.21 and higher
63 | ```
64 | go install -v github.com/Hackmanit/Web-Cache-Vulnerability-Scanner@latest
65 | ```
66 |
67 | ## Option 4: Docker
68 | ### 1. Clone repository or download the [latest source code release](https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner/releases/latest)
69 | ### 2. Build image (the wordlists folder will also be copied)
70 | ```
71 | $ docker build .
72 | Sending build context to Docker daemon 29.54MB
73 | Step 1/10 : FROM golang:latest AS builder
74 | ---> 05c8f6d2538a
75 | Step 2/10 : WORKDIR /go/src/app
76 | ---> Using cache
77 | ---> f591f24be8cf
78 | Step 3/10 : COPY . .
79 | ---> 38b358dd3472
80 | Step 4/10 : RUN go get -d -v ./...
81 | ---> Running in 41f53de436c5
82 | ....
83 | Removing intermediate container 9e2e84d14ff3
84 | ---> 1668edcf6ee3
85 | Successfully built 1668edcf6ee3
86 | ```
87 |
88 | ### 3. Run wcvs
89 | ```
90 | $ docker run -it 1668edcf6ee3 /wcvs --help
91 | https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner
92 | version 1.0.0
93 | ```
94 |
95 | # Usage
96 | WCVS is highly customizable using its flags. Many of the flags can either contain a value directly or the path to a file.
97 |
98 | The only mandatory flag is `-u/--url` to provide the target URL which should be tested for web cache poisoning/deception. The target URL can be provided in different formats,
99 |
100 | WCVS needs two wordlists in order to test for the first 5 techniques - one wordlist with header names and one with parameter names. The wordlists can either be present in the same directory WCVS is executed from or specified using the `--headerwordlist/-hw` and `--parameterwordlist/-pw` flags.
101 |
102 | ### Examples:
103 | ```
104 | wcvs -u 127.0.0.1
105 | wcvs -u http://127.0.0.1
106 | wcvs -u https://example.com
107 | wcvs -u file:path/to/url_list
108 |
109 | wcvs -u https://example.com -hw "file:/home/user/Documents/wordlist-header.txt"
110 | wcvs -u https://example.com -pw "file:/home/user/Documents/wordlist-parameter.txt"
111 | wcvs -u https://example.com -hw "file:/home/user/Documents/wordlist-header.txt" -pw "file:/home/user/Documents/wordlist-parameter.txt"
112 | ```
113 |
114 | ## Specify Headers, Parameters, Cookies, and More
115 | - `--cacheheader/-ch` specifies a custom cache header which will be checked for cache hits and misses
116 | - `--setcookies/-sc` specifies cookies which shall be added to the request
117 | - `--setheaders/-sh` specifies headers which shall be added to the request
118 | - `--setparameters/-sp` specifies parameters which shall be added to the request. While it is also possible to simply add them to the URL, it might be more useful in some cases to add them via this flag.
119 | - `--post/-post` changes the HTTP method from GET to POST
120 | - `--setbody/-sb` specifies the body which shall be added to the request
121 | - `--contenttype/-ct` specifies the value of the Content-Type header
122 | - `--useragentchrome/-uac` changes the User-Agent from `WebCacheVulnerabilityScanner v{Version-Number}` to `Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36`. While the same can be achieved with e.g. `-sh "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) ...`, this flag provides a quicker way.
123 | - `--cacheheader/-ch` specify a custom cache header (case-insensitive)
124 |
125 | #### If you want to specify more than 1 cookie, parameter or header you need to specify a file which contains them. Take a look at the [available templates](https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner/tree/master/templates).
126 |
127 | ### Examples:
128 | ```
129 | wcvs -u https://example.com -ch "X-Custom-Header-ABC"
130 |
131 | wcvs -u https://example.com -sc "PHPSESSID=123"
132 | wcvs -u https://example.com -sc "file:/home/user/Documents/cookies.txt"
133 |
134 | wcvs -u https://example.com -sh "Referer: localhost"
135 | wcvs -u https://example.com -sh "file:/home/user/Documents/headers.txt"
136 |
137 | wcvs -u https://example.com -sp "admin=true"
138 | wcvs -u https://example.com -sp "file:/home/user/Documents/parameters.txt"
139 |
140 | wcvs -u https://example.com -post -sb "admin=true"
141 | wcvs -u https://example.com -post -sb "file:/home/user/Documents/body.txt"
142 |
143 | wcvs -u https://example.com -post -sb "{}" -ct "application/json"
144 |
145 | wcvs -u https://example.com -uac
146 |
147 | wcvs -u https://example.com -ch "X-Custom-Cache-Header"
148 | ```
149 |
150 | ## Generate a JSON Report
151 | A JSON report is generated and updated after each scanned URL if the flag `--generatereport/-gr` is set.
152 | The report is written, just like a log file, into the same directory WCVS is executed from. In order to change the directory for all output files use `--generatepath/-gp`.
153 | If HTML special chars shall be encoded in the report, use `--escapejson/-ej`.
154 |
155 | ### Examples:
156 | ```
157 | wcvs -u https://example.com -gr
158 | wcvs -u https://example.com -gr -ej
159 | wcvs -u https://example.com -gr -gp /home/user/Documents
160 | wcvs -u https://example.com -gr -gp /home/user/Documents -ej
161 | ```
162 |
163 | ## Crawl for URLs
164 | In order to crawl for URLs, `--recursivity/-r` needs to be set. It specifies how deep the crawler shall go recursion-wise.
165 | By default WCVS only crawls for URLs of the same domain. To also crawl for other domains, `--recdomains/red` can be used.
166 | To only crawl URLs which inherit a specific string, `--recinclude/-rin` can be used.
167 | `--reclimit/-rl` limits how many URLs are crawled for each recursion depth.
168 | Also, a list with URLs which shall not be crawled can be specified with `--recexclude/-rex`. `--generatecompleted/-gc` can, for example, be used to generate a list with all already tested URLs. If a scan is repeated, but WCVS shall not crawl and test again the same URLs, this list can be used for `--recexclude/-rex`.
169 | ### Examples:
170 | ```
171 | wcvs -u https://example.com -r 5
172 | wcvs -u https://example.com -r 5 -red /home/user/Documents/mydomains.txt
173 | wcvs -u https://example.com -r 5 -rl 2
174 | wcvs -u https://example.com -r 5 -rex /home/user/Documents/donttest.txt
175 | ```
176 |
177 | ## Use a Proxy
178 | To use a proxy, specify `--useproxy/-up`. If you are using Burp, make sure to uncheck *"Settings > Network > HTTP > HTTP/2 > Default to HTTP/2 if the server supports it"*. Otherwise some techniques, which rely on non-RFC-compliant headers, will not work.
179 | The default URL for the proxy is `http://127.0.0.1:8080`. In order to change it, use `--proxyurl/-purl`.
180 |
181 | ### Examples:
182 | ```
183 | wcvs -u https://example.com -up
184 | wcvs -u https://example.com -up -purl http://127.0.0.1:8081
185 | ```
186 |
187 | ## Throttle or Accelerate
188 | The number of maximum allowed requests per second can be set with `--reqrate/-rr`. By default, this number is unrestricted.
189 | Contrary, the number of requests per second can be increased potentially, if `--threads/-t` is used to increase the number of concurrent threads WCVS utilizes. The default value is 20.
190 |
191 | ### Examples:
192 | ```
193 | wcvs -u https://example.com -rr 10
194 | wcvs -u https://example.com -rr 1
195 | wcvs -u https://example.com -rr 0.5
196 | wcvs -u https://example.com -t 50
197 | ```
198 |
199 | ## Further Flags
200 | WCVS provides even more than the beforehand mentioned flags and options. `--help/-h` provides a list of each flag, its meaning, and how to use it.
201 |
202 | ### Example:
203 | ```
204 | wcvs -h
205 | ```
206 |
207 | ## Background Information
208 | A short series of blog posts giving more information about web cache poisoning and WCVS can be found here:
209 |
210 | 1. [Is Your Application Vulnerable to Web Cache Poisoning?](https://www.hackmanit.de/en/blog-en/142-is-your-application-vulnerable-to-web-cache-poisoning)
211 | 2. [Web Cache Vulnerability Scanner (WCVS) - Free, Customizable, Easy-To-Use](https://www.hackmanit.de/en/blog-en/145-web-cache-vulnerability-scanner-wcvs-free-customizable-easy-to-use)
212 |
213 | The first version of Web Cache Vulnerability Scanner (WCVS) was developed as a part of a [bachelor's thesis by Maximilian Hildebrand](https://hackmanit.de/images/download/thesis/Automated-Scanning-for-Web-Cache-Poisoning-Vulnerabilities.pdf).
214 | ## License
215 | WCVS is developed by [Hackmanit](https://hackmanit.de) and [Maximilian Hildebrand](https://www.github.com/m10x) and licensed under the [Apache License, Version 2.0](license.txt).
216 |
217 |
218 |
--------------------------------------------------------------------------------
/buildBinariesLinux.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | version=1.4.2
3 |
4 | rm build/*
5 |
6 | # Windows amd64
7 | goos=windows
8 | goarch=amd64
9 | GOOS=$goos GOARCH=$goarch go build -o wcvs.exe
10 | zip build/web-cache-vulnerability-scanner_"$version"_"$goos"_"$goarch".zip wcvs.exe
11 |
12 | # Linux amd64
13 | goos=linux
14 | goarch=amd64
15 | GOOS=$goos GOARCH=$goarch go build -o wcvs
16 | tar cfvz build/web-cache-vulnerability-scanner_"$version"_"$goos"_"$goarch".tar.gz wcvs
17 |
18 | # Linux arm64
19 | goos=linux
20 | goarch=arm64
21 | GOOS=$goos GOARCH=$goarch go build -o wcvs
22 | tar cfvz build/web-cache-vulnerability-scanner_"$version"_"$goos"_"$goarch".tar.gz wcvs
23 |
24 | # Darwin/MacOS amd64
25 | goos=darwin
26 | goarch=amd64
27 | GOOS=$goos GOARCH=$goarch go build -o wcvs
28 | tar cfvz build/web-cache-vulnerability-scanner_"$version"_"$goos"_"$goarch".tar.gz wcvs
29 |
30 | # Darwin/MacOS arm64
31 | goos=darwin
32 | goarch=arm64
33 | GOOS=$goos GOARCH=$goarch go build -o wcvs
34 | tar cfvz build/web-cache-vulnerability-scanner_"$version"_"$goos"_"$goarch".tar.gz wcvs
35 |
36 | # FreeBSD amd64
37 | goos=freebsd
38 | goarch=amd64
39 | GOOS=$goos GOARCH=$goarch go build -o wcvs
40 | tar cfvz build/web-cache-vulnerability-scanner_"$version"_"$goos"_"$goarch".tar.gz wcvs
41 |
42 | # OpenBSD amd64
43 | goos=openbsd
44 | goarch=amd64
45 | GOOS=$goos GOARCH=$goarch go build -o wcvs
46 | tar cfvz build/web-cache-vulnerability-scanner_"$version"_"$goos"_"$goarch".tar.gz wcvs
47 |
48 | # reset GOOS and GOARCH
49 | set GOOS=
50 | set GOARCH=
51 |
52 | # remove wcvs
53 | rm wcvs
54 | rm wcvs.exe
55 |
56 | # generate checksum file
57 | find build/ -type f \( -iname "*.tar.gz" -or -iname "*.zip" \) -exec sha256sum {} + > build/web-cache-vulnerability-scanner_"$version"_checksums_sha256.txt
58 |
--------------------------------------------------------------------------------
/go.mod:
--------------------------------------------------------------------------------
1 | module github.com/Hackmanit/Web-Cache-Vulnerability-Scanner
2 |
3 | go 1.23.0
4 |
5 | toolchain go1.23.4
6 |
7 | require (
8 | github.com/fatih/color v1.18.0
9 | github.com/xplorfin/fasthttp2curl v0.28.0
10 | golang.org/x/net v0.43.0
11 | golang.org/x/time v0.12.0
12 | )
13 |
14 | require (
15 | github.com/andybalholm/brotli v1.2.0 // indirect
16 | github.com/klauspost/compress v1.18.0 // indirect
17 | github.com/valyala/bytebufferpool v1.0.0 // indirect
18 | )
19 |
20 | require (
21 | github.com/mattn/go-colorable v0.1.14 // indirect
22 | github.com/mattn/go-isatty v0.0.20 // indirect
23 | github.com/valyala/fasthttp v1.65.0
24 | golang.org/x/sys v0.35.0 // indirect
25 | golang.org/x/text v0.28.0 // indirect
26 | )
27 |
--------------------------------------------------------------------------------
/go.sum:
--------------------------------------------------------------------------------
1 | github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
2 | github.com/andybalholm/brotli v1.2.0 h1:ukwgCxwYrmACq68yiUqwIWnGY0cTPox/M94sVwToPjQ=
3 | github.com/andybalholm/brotli v1.2.0/go.mod h1:rzTDkvFWvIrjDXZHkuS16NPggd91W3kUSvPlQ1pLaKY=
4 | github.com/brianvoe/gofakeit/v5 v5.11.2 h1:Ny5Nsf4z2023ZvYP8ujW8p5B1t5sxhdFaQ/0IYXbeSA=
5 | github.com/brianvoe/gofakeit/v5 v5.11.2/go.mod h1:/ZENnKqX+XrN8SORLe/fu5lZDIo1tuPncWuRD+eyhSI=
6 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
7 | github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
8 | github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
9 | github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a h1:v6zMvHuY9yue4+QkG/HQ/W67wvtQmWJ4SDo9aK/GIno=
10 | github.com/go-http-utils/headers v0.0.0-20181008091004-fed159eddc2a/go.mod h1:I79BieaU4fxrw4LMXby6q5OS9XnoR9UIKLOzDFjUmuw=
11 | github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
12 | github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
13 | github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
14 | github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
15 | github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ=
16 | github.com/mattn/go-colorable v0.1.14 h1:9A9LHSqF/7dyVVX6g0U9cwm9pG3kP9gSzcuIPHPsaIE=
17 | github.com/mattn/go-colorable v0.1.14/go.mod h1:6LmQG8QLFO4G5z1gPvYEzlUgJ2wF+stgPZH1UqBm1s8=
18 | github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
19 | github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
20 | github.com/moul/http2curl v1.0.0 h1:dRMWoAtb+ePxMlLkrCbAqh4TlPHXvoGUSQ323/9Zahs=
21 | github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ=
22 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
23 | github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
24 | github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
25 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
26 | github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
27 | github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
28 | github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
29 | github.com/valyala/fasthttp v1.37.0/go.mod h1:t/G+3rLek+CyY9bnIE+YlMRddxVAAGjhxndDB4i4C0I=
30 | github.com/valyala/fasthttp v1.65.0 h1:j/u3uzFEGFfRxw79iYzJN+TteTJwbYkru9uDp3d0Yf8=
31 | github.com/valyala/fasthttp v1.65.0/go.mod h1:P/93/YkKPMsKSnATEeELUCkG8a7Y+k99uxNHVbKINr4=
32 | github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
33 | github.com/xplorfin/fasthttp2curl v0.28.0 h1:wfjx5DoIYfuJ1APP3pd3UV/y4KCDP4tdueBAK51niTw=
34 | github.com/xplorfin/fasthttp2curl v0.28.0/go.mod h1:w10nqy3kfq+yEqGKrpnZz2eGtbq68vkr1BFZyX0a/zM=
35 | github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
36 | github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
37 | golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
38 | golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
39 | golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
40 | golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
41 | golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
42 | golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE=
43 | golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg=
44 | golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
45 | golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
46 | golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
47 | golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
48 | golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
49 | golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
50 | golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
51 | golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI=
52 | golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
53 | golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
54 | golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
55 | golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
56 | golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
57 | golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
58 | golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng=
59 | golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU=
60 | golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
61 | golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
62 | golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
63 | golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
64 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
65 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
66 |
--------------------------------------------------------------------------------
/pkg/config.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "net/url"
5 |
6 | "golang.org/x/time/rate"
7 | )
8 |
9 | var Config ConfigStruct
10 |
11 | type (
12 | ConfigStruct struct {
13 | Threads int
14 | ReqRate float64
15 | Verbosity int
16 | DoPost bool
17 | ContentType string
18 | QuerySeparator string
19 | CacheBuster string
20 | TimeOut int
21 | DeclineCookies bool
22 | Force bool
23 | UseHTTP bool
24 | ReasonTypes string
25 | CLDiff int
26 | HMDiff int
27 | SkipTimebased bool
28 | SkipWordlistCachebuster bool
29 | CacheHeader string
30 | DisableColor bool
31 | DisableStatusLine bool
32 | IgnoreStatus []int
33 |
34 | Recursivity int
35 | RecInclude string
36 | RecExclude []string
37 | RecDomains []string
38 | RecLimit int
39 |
40 | Urls []string
41 | Cookies []string
42 | Headers []string
43 | Parameters []string
44 | Body string
45 |
46 | OnlyTest string
47 | SkipTest string
48 |
49 | GeneratePath string
50 | GenerateReport bool
51 | EscapeJSON bool
52 | GenerateCompleted bool
53 | GenerateLog bool
54 |
55 | UseProxy bool
56 | ProxyURL string
57 |
58 | HeaderWordlist string
59 | ParameterWordlist string
60 |
61 | Intitialized bool
62 | Limiter *rate.Limiter `json:"-"`
63 | Website WebsiteStruct `json:"-"`
64 | }
65 |
66 | WebsiteStruct struct {
67 | Headers map[string][]string
68 | Body string
69 | Cookies map[string]string
70 | Url *url.URL
71 | UrlWOQueries string
72 | Queries map[string]string
73 | StatusCode int
74 | Cache CacheStruct
75 | Domain string
76 | }
77 |
78 | CacheStruct struct {
79 | CBwasFound bool
80 | CBisParameter bool
81 | CBisHeader bool
82 | CBisCookie bool
83 | CBisHTTPMethod bool
84 | CBName string
85 |
86 | //HitMissVerbose bool
87 | //HitMissTime bool
88 |
89 | NoCache bool
90 | Indicator string
91 | TimeIndicator bool
92 | }
93 | )
94 |
95 | func init() {
96 | }
97 |
98 | func ReadConfigFile() ConfigStruct {
99 | config := ConfigStruct{
100 | Threads: 0,
101 | Recursivity: 0,
102 | Verbosity: 0,
103 | DoPost: false,
104 | ContentType: "",
105 | QuerySeparator: "",
106 | CacheBuster: "",
107 | TimeOut: 0,
108 | DeclineCookies: false,
109 | Urls: nil,
110 | Cookies: nil,
111 | Headers: nil,
112 | Parameters: nil,
113 | Body: "",
114 | OnlyTest: "",
115 | SkipTest: "",
116 | ProxyURL: "",
117 | HeaderWordlist: "",
118 | ParameterWordlist: "",
119 | Website: WebsiteStruct{
120 | Body: "",
121 | Cookies: nil,
122 | Url: nil,
123 | UrlWOQueries: "",
124 | Queries: nil,
125 | StatusCode: 0,
126 | },
127 | }
128 |
129 | return config
130 | }
131 |
--------------------------------------------------------------------------------
/pkg/deception.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "strings"
7 | "time"
8 |
9 | "github.com/valyala/fasthttp"
10 | "github.com/xplorfin/fasthttp2curl"
11 | )
12 |
13 | func TestWebCacheDeception() reportResult {
14 | var repResult reportResult
15 | repResult.Technique = "Cache Deception"
16 |
17 | // cacheable extensions: class, css, jar, js, jpg, jpeg, gif, ico, png, bmp, pict, csv, doc, docx, xls, xlsx, ps, pdf, pls, ppt, pptx, tif, tiff, ttf, otf, webp, woff, woff2, svg, svgz, eot, eps, ejs, swf, torrent, midi, mid
18 |
19 | appendings := []string{
20 | "/.css", // Path parameter
21 | "/nonexistent1.css", // Path parameter
22 | "/../nonexistent2.css", // Path traversal
23 | "/%2e%2e/nonexistent3.css", // Encoded path traversal
24 | "%0Anonexistent4.css", // Encoded Newline
25 | "%00nonexistent5.css", // Encoded Null Byte
26 | "%09nonexistent6.css", // Encoded Tab
27 | "%3Bnonexistent7.css", // Encoded Semicolon
28 | "%23nonexistent8.css", // Encoded Pound
29 | "%3Fname=valnonexistent9.css", // Encoded Question Mark
30 | "%26name=valnonexistent10.css", // Encoded Ampersand
31 | ";nonexistent11.css", // Semicolon
32 | "?nonexistent12.css", // Question Mark
33 | "&nonexistent13.css", // Ampersand
34 | "%0A%2f%2e%2e%2fresources%2fnonexistent1.css", // Encoded Path Traversal to static directory using Encoded Newline
35 | "%00%2f%2e%2e%2fresources%2fnonexistent2.css", // Encoded Path Traversal to static directory using Encoded Null Byte
36 | "%09%2f%2e%2e%2fresources%2fnonexistent3.css", // Encoded Path Traversal to static directory using Encoded Tab
37 | "%3B%2f%2e%2e%2fresources%2fnonexistent4.css", // Encoded Path Traversal to static directoryEncoded using Semicolon
38 | "%23%2f%2e%2e%2fresources%2fnonexistent5.css", // Encoded Path Traversal to static directory using Encoded Pound
39 | "%3F%2f%2e%2e%2fresources%2fnonexistent6.css", // Encoded Path Traversal to static directory using Encoded Question Mark
40 | "%26%2f%2e%2e%2fresources%2fnonexistent7.css", // Encoded Path Traversal to static directory using Encoded Ampersand
41 | ";%2f%2e%2e%2fresources%2fnonexistent8.css", // Encoded Path Traversal to static directory using Semicolon
42 | "?%2f%2e%2e%2fresources%2fnonexistent9.css", // Encoded Path Traversal to static directoy using Question Mark
43 | "&%2f%2e%2e%2fresources%2fnonexistent10.css", // Encoded Path Traversal to static directory using Ampersand
44 | "%0A%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt using Encoded Newline
45 | "%00%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directory using Encoded Null Byte
46 | "%09%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directory using Encoded Tab
47 | "%3B%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directoryEncoded using Semicolon
48 | "%23%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directory using Encoded Pound
49 | "%3F%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directory using Encoded Question Mark
50 | "%26%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directory using Encoded Ampersand
51 | ";%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directory using Semicolon
52 | "?%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directoy using Question Mark
53 | "&%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2f%2e%2e%2frobots.txt", // Encoded Path Traversal to robots.txt directory using Ampersand
54 | }
55 | // TODO add "Exploiting normalization by the origin server" cache deception which needs to prepend something before the url path
56 |
57 | if Config.Website.StatusCode != 200 || Config.Website.Body == "" {
58 | msg := "Skipping Web Cache Deception test, as it requires a valid website configuration with a status code of 200 and a non-empty body.\n"
59 | Print(msg, Yellow)
60 | repResult.HasError = true
61 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
62 | return repResult
63 | }
64 | PrintVerbose("Testing for Web Cache Deception\n", NoColor, 1)
65 |
66 | // test each appending one after another
67 | for _, appendStr := range appendings {
68 | err := webCacheDeceptionTemplate(&repResult, appendStr)
69 | if err != nil {
70 | repResult.HasError = true
71 | repResult.ErrorMessages = append(repResult.ErrorMessages, err.Error())
72 | }
73 | }
74 |
75 | return repResult
76 | }
77 |
78 | func webCacheDeceptionTemplate(repResult *reportResult, appendStr string) error {
79 | var msg string
80 | var repCheck reportCheck
81 | req := fasthttp.AcquireRequest()
82 | resp := fasthttp.AcquireResponse()
83 | defer fasthttp.ReleaseRequest(req)
84 | defer fasthttp.ReleaseResponse(resp)
85 | var err error
86 |
87 | rUrl := Config.Website.Url.String()
88 | // Überprüfen, ob der String genau zwei `//` enthält
89 | if strings.Count(rUrl, "/") == 2 && !strings.HasPrefix(appendStr, "/") {
90 | // append `/`, so e.g. https://example%0A does not throw an error when building the request
91 | rUrl += "/"
92 | }
93 |
94 | req.Header.SetMethod("GET")
95 | req.SetRequestURI(rUrl + appendStr)
96 | setRequest(req, false, "", nil, false)
97 |
98 | err = client.Do(req, resp)
99 | if err != nil {
100 | msg = fmt.Sprintf("webCacheDeceptionTemplate: %s: client.Do: %s\n", appendStr, err.Error())
101 | Print(msg, Red)
102 | return errors.New(msg)
103 | }
104 |
105 | waitLimiter("Web Cache Deception")
106 |
107 | if resp.StatusCode() != Config.Website.StatusCode || string(resp.Body()) != Config.Website.Body {
108 | return nil // no cache deception, as the response is not the same as the original one
109 | }
110 |
111 | if Config.Website.Cache.NoCache || Config.Website.Cache.Indicator == "age" {
112 | time.Sleep(1 * time.Second) // wait a second to ensure that age header is not set to 0
113 | }
114 |
115 | waitLimiter("Web Cache Deception")
116 |
117 | err = client.Do(req, resp)
118 | if err != nil {
119 | msg = fmt.Sprintf("webCacheDeceptionTemplate: %s: client.Do: %s\n", appendStr, err.Error())
120 | Print(msg, Red)
121 | return errors.New(msg)
122 | }
123 | respHeader := headerToMultiMap(&resp.Header)
124 |
125 | // Add the request as curl command to the report
126 | command, err := fasthttp2curl.GetCurlCommandFastHttp(req)
127 | if err != nil {
128 | PrintVerbose("Error: fasthttp2curl: "+err.Error()+"\n", Yellow, 1)
129 | }
130 |
131 | repCheck.Request.CurlCommand = command.String()
132 | PrintVerbose("Curl command: "+repCheck.Request.CurlCommand+"\n", NoColor, 2)
133 |
134 | var cacheIndicators []string
135 | if Config.Website.Cache.Indicator == "" { // check if now a cache indicator exists
136 | cacheIndicators = analyzeCacheIndicator(respHeader)
137 | } else {
138 | cacheIndicators = []string{Config.Website.Cache.Indicator}
139 | }
140 |
141 | hit := false
142 | for _, indicator := range cacheIndicators {
143 | for _, v := range respHeader[indicator] {
144 | indicValue := strings.TrimSpace(strings.ToLower(v))
145 | if checkCacheHit(indicValue, Config.Website.Cache.Indicator) {
146 | hit = true
147 | Config.Website.Cache.Indicator = indicator
148 | }
149 | }
150 | }
151 |
152 | // check if there's a cache hit and if the body didn't change (otherwise it could be a cached error page, for example)
153 | if hit && string(resp.Body()) == Config.Website.Body && resp.StatusCode() == Config.Website.StatusCode {
154 | repResult.Vulnerable = true
155 | repCheck.Reason = "The response got cached due to Web Cache Deception"
156 | msg = fmt.Sprintf("%s was successfully decepted! appended: %s\n", rUrl, appendStr)
157 | Print(msg, Green)
158 | msg = "Curl: " + repCheck.Request.CurlCommand + "\n\n"
159 | Print(msg, Green)
160 |
161 | repCheck.Identifier = appendStr
162 | repCheck.URL = req.URI().String()
163 | // Dump the request
164 | repCheck.Request.Request = string(req.String())
165 | // Dump the response without the body
166 | resp.SkipBody = true
167 | repCheck.Request.Response = string(resp.String())
168 |
169 | repResult.Checks = append(repResult.Checks, repCheck)
170 | } else {
171 | PrintVerbose("Curl command: "+repCheck.Request.CurlCommand+"\n", NoColor, 2)
172 | }
173 |
174 | return nil
175 | }
176 |
--------------------------------------------------------------------------------
/pkg/flags.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "flag"
5 | "fmt"
6 | "log"
7 | "net/url"
8 | "os"
9 | "runtime"
10 | "slices"
11 | "strconv"
12 | "strings"
13 | "text/tabwriter"
14 |
15 | "github.com/fatih/color"
16 | "golang.org/x/time/rate"
17 | )
18 |
19 | var (
20 | version string
21 | useragent string
22 |
23 | generalOptions []FlagStruct
24 | generateOptions []FlagStruct
25 | requestOptions []FlagStruct
26 | crawlOptions []FlagStruct
27 | wordlistOptions []FlagStruct
28 | )
29 |
30 | type FlagStruct struct {
31 | LongFlag string
32 | ShortFlag string
33 | Description string
34 | }
35 |
36 | func init() {
37 |
38 | }
39 |
40 | func ParseFlags(vers string) {
41 | /* Getting Command-line flags */
42 | version = vers
43 | useragent = "WebCacheVulnerabilityScanner v" + version
44 | pathPrefix := ""
45 | if runtime.GOOS == "windows" {
46 | pathPrefix = "C:"
47 | }
48 |
49 | // General Options
50 | techniqueNames := "deception,cookies,css,forwarding,smuggling,dos,headers,parameters,fatget,cloaking,splitting,pollution,encoding"
51 |
52 | var ignoreStatus string
53 |
54 | appendInt(&generalOptions, &Config.Verbosity,
55 | "verbosity", "v", 1, "Set verbosity. 0 = quiet, 1 = normal, 2 = verbose")
56 | appendFloat(&generalOptions, &Config.ReqRate,
57 | "reqrate", "rr", float64(rate.Inf), "Requests per second. Float value. Has to be greater than 0. Default value is infinite")
58 | appendInt(&generalOptions, &Config.Threads,
59 | "threads", "t", 20, "Threads to use. Default value is 20")
60 | appendInt(&generalOptions, &Config.TimeOut,
61 | "timeout", "to", 15, "Seconds until timeout. Default value is 15")
62 | appendString(&generalOptions, &Config.OnlyTest,
63 | "onlytest", "ot", "", "Choose which tests to run. Use the , separator to specify multiple ones. Example: -onlytest '"+techniqueNames+"'")
64 | appendString(&generalOptions, &Config.SkipTest,
65 | "skiptest", "st", "", "Choose which tests to not run. Use the , separator to specify multiple ones. Example: -skiptest '"+techniqueNames+"'")
66 | appendBoolean(&generalOptions, &Config.UseProxy,
67 | "useproxy", "up", false, "Do you want to use a proxy?")
68 | appendString(&generalOptions, &Config.ProxyURL,
69 | "proxyurl", "purl", "http://127.0.0.1:8080", "Url for the proxy. Default value is http://127.0.0.1:8080")
70 | appendBoolean(&generalOptions, &Config.Force,
71 | "force", "f", false, "Perform the tests no matter if there is a cache or even the cachebuster works or not")
72 | appendString(&generalOptions, &ignoreStatus,
73 | "ignorestatus", "is", "", "Ignore a specific status code for cache poisoning")
74 | appendString(&generalOptions, &Config.ReasonTypes,
75 | "reasontypes", "rt", "body,header,status,length", "Choose which reason types to use for cache poisoning. Choose from: body (reflection in body),header (reflection in header), status (change of status code), length (change of body length). Default is 'body,header,status,length'")
76 | appendInt(&generalOptions, &Config.CLDiff,
77 | "contentlengthdifference", "cldiff", 5000, "Threshold for reporting possible Finding, when 'poisoned' response differs more from the original length. Default is 5000. 0 = don't check. May be prone to false positives!")
78 | appendInt(&generalOptions, &Config.HMDiff,
79 | "hitmissdifference", "hmdiff", 30, "Threshold for time difference (milliseconds) between cache hit and cache miss responses. Default is 30")
80 | appendBoolean(&generalOptions, &Config.SkipTimebased,
81 | "skiptimebased", "stime", false, "Skip checking if a repsonse gets cached by measuring time differences (may be prone to false positives, or increase hitmissdifference)")
82 | appendBoolean(&generalOptions, &Config.SkipWordlistCachebuster,
83 | "skipwordlistcachbuster", "swordlistcb", false, "Skip using wordlists to check for cachebusters (may be time intensive)")
84 | appendString(&generalOptions, &Config.CacheHeader,
85 | "cacheheader", "ch", "", "Specify a custom cache header")
86 | appendBoolean(&generalOptions, &Config.DisableColor,
87 | "nocolor", "nc", false, "Disable colored output")
88 | appendBoolean(&generalOptions, &Config.DisableStatusLine,
89 | "nostatusline", "ns", false, "Disable status line output")
90 |
91 | // Generate Options
92 | appendString(&generateOptions, &Config.GeneratePath,
93 | "generatepath", "gp", "./", "Path all files (log, report, completed) will be written to. Example: -gp '"+pathPrefix+"/p/a/t/h/'. Default is './'")
94 | appendBoolean(&generateOptions, &Config.GenerateReport,
95 | "generatereport", "gr", false, "Do you want a report to be generated?")
96 | appendBoolean(&generateOptions, &Config.EscapeJSON,
97 | "escapejson", "ej", false, "Do you want HTML special chars to be encoded in the report?")
98 | appendBoolean(&generateOptions, &Config.GenerateCompleted,
99 | "generatecompleted", "gc", false, "Do you want a list with completed URLs to be generated?")
100 | appendBoolean(&generateOptions, &Config.GenerateLog,
101 | "generatelog", "gl", false, "Do you want a log file to be created?")
102 |
103 | // Request Options
104 | var (
105 | urlStr string
106 | setCookiesStr string
107 | setHeadersStr string
108 | setParametersStr string
109 | setBodyStr string
110 | userAgentChrome bool
111 | )
112 |
113 | appendString(&requestOptions, &urlStr,
114 | "url", "u", "", "Url to scan. Has to start with http:// or https://. Otherwise use file: to specify a file with (multiple) urls. E.g. -u https://www.example.com or -u file:templates/url_list")
115 | appendBoolean(&requestOptions, &Config.UseHTTP,
116 | "usehttp", "http", false, "Use http instead of https for URLs, which doesn't specify either one")
117 | appendBoolean(&requestOptions, &Config.DeclineCookies,
118 | "declineCookies", "dc", false, "Do you don't want to use cookies, which are received in the response of the first request?")
119 | appendString(&requestOptions, &Config.CacheBuster,
120 | "cachebuster", "cb", "cbwcvs", "Specify the cachebuster to use. The default value is cbwcvs")
121 | appendString(&requestOptions, &setCookiesStr,
122 | "setcookies", "sc", "", "Set a Cookie. Otherwise use file: to specify a file with urls. E.g. -sc uid=123 or -sc file:templates/cookie_list")
123 | appendString(&requestOptions, &setHeadersStr,
124 | "setheaders", "sh", "", "Set a Header. Otherwise use file: to specify a file with urls. E.g. -sh 'User-Agent: Safari/1.1' or -sh file:templates/header_list")
125 | appendString(&requestOptions, &setParametersStr,
126 | "setparameters", "sp", "", "Set a Query Parameter. Otherwise use file: to specify a file with urls. E.g. -sp user=admin or -sp file:templates/parameter_list")
127 | appendString(&requestOptions, &setBodyStr,
128 | "setbody", "sb", "", "Set the requests' body. Otherwise use file: to specify a file with urls. E.g. -sb 'admin=true' or -sh file:templates/body_file")
129 | appendBoolean(&requestOptions, &Config.DoPost,
130 | "post", "post", false, "Do a POST request instead of a GET request")
131 | appendString(&requestOptions, &Config.ContentType,
132 | "contenttype", "ct", "application/x-www-form-urlencoded", "Set the contenttype for a POST Request. Default is application/x-www-form-urlencoded. If you don't want a content-type to be used at all use -ct ''")
133 | appendString(&requestOptions, &Config.QuerySeparator,
134 | "parameterseparator", "ps", "&", "Specify the separator for parameters. The default value is &")
135 | appendBoolean(&requestOptions, &userAgentChrome,
136 | "useragentchrome", "uac", false, "Set chrome as User-Agent. Default is "+useragent)
137 |
138 | // Crawl Options
139 | var (
140 | recExcludeStr string
141 | recDomainsStr string
142 | )
143 |
144 | appendInt(&crawlOptions, &Config.Recursivity,
145 | "recursivity", "r", 0, "Put (via href or src specified) urls at the end of the queue if the domain is the same. Specify how deep the recursivity shall go. Default value is 0 (no recursivity)")
146 | appendInt(&crawlOptions, &Config.RecLimit,
147 | "reclimit", "rl", 0, "Define a limit, how many files shall be checked recursively. Default is 0 (unlimited)")
148 | appendString(&crawlOptions, &Config.RecInclude,
149 | "recinclude", "rin", "", "Choose which links should be included. Separate with a space. E.g: -rin '.js .css'")
150 | appendString(&crawlOptions, &recExcludeStr,
151 | "recexclude", "rex", "", "Use -cp (-completedpath) or -gc (-generatecompleted) to generate a list of already completed URLs. Use -rex path/to/file so the already completed URLs won't be tested again recursively.")
152 | appendString(&crawlOptions, &recDomainsStr,
153 | "recdomains", "red", "", "Define an additional domain which is allowed to be added recursively. Otherwise use file: to specify a file with urls. E.g. -sh 'api.example.com' or -sh file:templates/recdomains_list")
154 |
155 | // Wordlist Options
156 | appendString(&wordlistOptions, &Config.HeaderWordlist,
157 | "headerwordlist", "hw", "", "Wordlist for headers to test.")
158 | appendString(&wordlistOptions, &Config.ParameterWordlist,
159 | "parameterwordlist", "pw", "", "Wordlist for query parameters to test.")
160 |
161 | flag.CommandLine.Usage = help
162 |
163 | // flags need to be parsed, before they are used
164 | flag.Parse()
165 |
166 | // Check if color should be disabled
167 | if Config.DisableColor {
168 | color.NoColor = true
169 | }
170 | fmt.Printf(getLogo()+"\nWCVS - the Web Cache Vulnerability Scanner. (v%s)"+"\n\n", version)
171 |
172 | /* Checking values of Flags */
173 | if len(flag.Args()) > 0 {
174 | msg := fmt.Sprintf("%s: Args are not supported! Use flags. Use -h or --help to get a list of all supported flags\n", flag.Args())
175 | PrintFatal(msg)
176 | }
177 | if urlStr == "" {
178 | msg := "No url specified. Use -url or -u. Use -h or --help to get a list of all supported flags\n"
179 | PrintFatal(msg)
180 | }
181 |
182 | // Change User Agent
183 | if userAgentChrome {
184 | useragent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36"
185 | }
186 |
187 | // IgnoreStatus string to int slice
188 | if ignoreStatus != "" {
189 | statusSlice := strings.Split(ignoreStatus, ",")
190 | for _, status := range statusSlice {
191 | statusInt, err := strconv.Atoi(strings.TrimSpace(status))
192 | if err != nil {
193 | fmt.Printf(getLogo()+"\nWCVS - the Web Cache Vulnerability Scanner. (v%s)"+"\n\n", version)
194 | PrintFatal("Error converting to int: " + err.Error())
195 | }
196 | Config.IgnoreStatus = append(Config.IgnoreStatus, statusInt)
197 | }
198 | }
199 |
200 | // Read RecExcludeURL(s)
201 | if recExcludeStr != "" {
202 | Config.RecExclude = ReadLocalFile(recExcludeStr, "RecExclude")
203 | }
204 |
205 | // Read URL(s)
206 | Config.Urls = readFile(urlStr, Config.Urls, "URL")
207 |
208 | // Read RecDomain(s)
209 | Config.RecDomains = readFile(recDomainsStr, Config.RecDomains, "RecDomain")
210 | for _, u := range Config.Urls { // add all domains from urls to recdomains
211 | urlParsed, err := url.Parse(strings.TrimSpace(u))
212 | if err != nil {
213 | log.Fatal("Error parsing recdomains: ", err)
214 | }
215 | domain := urlParsed.Hostname()
216 | if !slices.Contains(Config.RecDomains, domain) {
217 | Config.RecDomains = append(Config.RecDomains, domain)
218 | }
219 | }
220 |
221 | // Read Cookie(s)
222 | Config.Cookies = readFile(setCookiesStr, Config.Cookies, "Cookie")
223 |
224 | // Read Header(s)
225 | Config.Headers = readFile(setHeadersStr, Config.Headers, "Headers")
226 |
227 | // Read Parameter(s)
228 | Config.Parameters = readFile(setParametersStr, Config.Parameters, "Parameter")
229 |
230 | /* Read Body */
231 | if strings.HasPrefix(setBodyStr, "path:") {
232 | bodySlice := ReadLocalFile(setBodyStr, "Body")
233 | for _, l := range bodySlice {
234 | l = strings.TrimSuffix(l, "\r")
235 | l = strings.TrimSpace(l)
236 | if strings.HasPrefix(l, "//") || l == "" {
237 | continue
238 | }
239 | Config.Body += l
240 | }
241 | } else {
242 | Config.Body = setBodyStr
243 | }
244 |
245 | // Set Limiter
246 | Config.Limiter = rate.NewLimiter(rate.Limit(Config.ReqRate), 1)
247 |
248 | Config.OnlyTest = strings.ToLower(Config.OnlyTest)
249 | Config.SkipTest = strings.ToLower(Config.SkipTest)
250 | }
251 |
252 | func readFile(str string, field []string, name string) []string {
253 | if strings.HasPrefix(str, "file:") {
254 | return ReadLocalFile(str, name)
255 | } else {
256 | return append(field, str)
257 | }
258 | }
259 |
260 | func help() {
261 | w := new(tabwriter.Writer)
262 | w.Init(os.Stdout, 8, 8, 0, '\t', 0)
263 |
264 | fmt.Printf(getLogo()+"\nWCVS - the Web Cache Vulnerability Scanner. (v%s)"+"\n\n", version)
265 |
266 | fmt.Println("Published by Hackmanit under http://www.apache.org/licenses/LICENSE-2.0")
267 | fmt.Println("Author: Maximilian Hildebrand")
268 | fmt.Println("Repository: https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner")
269 | fmt.Println("Blog Post: https://hackmanit.de/en/blog-en/145-web-cache-vulnerability-scanner-wcvs-free-customizable-easy-to-use")
270 | fmt.Print("Usage: Web-Cache-Vulnerability-Scanner(.exe) [options]\n\n")
271 |
272 | fmt.Println("General Options:")
273 | fmt.Fprintf(w, "%s\t%s\t%s\n", "--help", "-h", "Show this help and quit")
274 | writeToWriter(w, generalOptions)
275 |
276 | fmt.Println("\nGenerate Options:")
277 | writeToWriter(w, generateOptions)
278 |
279 | fmt.Println("\nRequest Options:")
280 | writeToWriter(w, requestOptions)
281 |
282 | fmt.Println("\nCrawl Options:")
283 | writeToWriter(w, crawlOptions)
284 |
285 | fmt.Println("\nWordlist Options:")
286 | writeToWriter(w, wordlistOptions)
287 |
288 | os.Exit(0)
289 | }
290 |
291 | func getLogo() string {
292 | // source: https://patorjk.com/software/taag/#p=display&f=Slant%20Relief&t=wcvs
293 | logo := `
294 | __/\\____/\\___/\\_____/\\\\\\\\__/\\\____/\\\__/\\\\\\\\\\_
295 | _\/\\\__/\\\\_/\\\___/\\\//////__\//\\\__/\\\__\/\\\//////__
296 | _\//\\\/\\\\\/\\\___/\\\__________\//\\\/\\\___\/\\\\\\\\\\_
297 | __\//\\\\\/\\\\\___\//\\\__________\//\\\\\____\////////\\\_
298 | ___\//\\\\//\\\_____\///\\\\\\\\____\//\\\______/\\\\\\\\\\_
299 | ____\///__\///________\////////______\///______\//////////__`
300 |
301 | logo = strings.ReplaceAll(logo, "_", color.HiRedString("_"))
302 | return logo
303 | }
304 |
305 | func writeToWriter(w *tabwriter.Writer, flagStruct []FlagStruct) {
306 | for _, ts := range flagStruct {
307 | fmt.Fprintf(w, "--%s\t-%s\t%s\n", ts.LongFlag, ts.ShortFlag, ts.Description)
308 | }
309 | w.Flush()
310 | }
311 |
312 | func appendString(options *[]FlagStruct, varString *string, longFlag string, shortFlag string, defaultValue string, description string) {
313 | flag.StringVar(varString, longFlag, defaultValue, "")
314 | if shortFlag != longFlag {
315 | flag.StringVar(varString, shortFlag, defaultValue, "")
316 | }
317 | *options = append(*options, FlagStruct{
318 | LongFlag: longFlag,
319 | ShortFlag: shortFlag,
320 | Description: description})
321 | }
322 |
323 | func appendInt(options *[]FlagStruct, varInt *int, longFlag string, shortFlag string, defaultValue int, description string) {
324 | flag.IntVar(varInt, longFlag, defaultValue, "")
325 | if shortFlag != longFlag {
326 | flag.IntVar(varInt, shortFlag, defaultValue, "")
327 | }
328 | *options = append(*options, FlagStruct{
329 | LongFlag: longFlag,
330 | ShortFlag: shortFlag,
331 | Description: description})
332 | }
333 |
334 | func appendFloat(options *[]FlagStruct, varFloat *float64, longFlag string, shortFlag string, defaultValue float64, description string) {
335 | flag.Float64Var(varFloat, longFlag, defaultValue, "")
336 | if shortFlag != longFlag {
337 | flag.Float64Var(varFloat, shortFlag, defaultValue, "")
338 | }
339 | *options = append(*options, FlagStruct{
340 | LongFlag: longFlag,
341 | ShortFlag: shortFlag,
342 | Description: description})
343 | }
344 |
345 | func appendBoolean(options *[]FlagStruct, varBoolean *bool, longFlag string, shortFlag string, defaultValue bool, description string) {
346 | flag.BoolVar(varBoolean, longFlag, defaultValue, "")
347 | if shortFlag != longFlag {
348 | flag.BoolVar(varBoolean, shortFlag, defaultValue, "")
349 | }
350 | *options = append(*options, FlagStruct{
351 | LongFlag: longFlag,
352 | ShortFlag: shortFlag,
353 | Description: description})
354 | }
355 |
--------------------------------------------------------------------------------
/pkg/recon.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "io"
7 | "net/url"
8 | "path"
9 | "strings"
10 | "time"
11 |
12 | "github.com/valyala/fasthttp"
13 | "golang.org/x/net/html"
14 | )
15 |
16 | func cbFoundDifference(times []int64, identifier string) { // TODO: Remove this function as it is only used for statistics
17 | if len(times)%2 == 0 {
18 | for i := 0; i < len(times); i += 2 {
19 | dif := times[i] - times[i+1]
20 | if dif < int64(Config.HMDiff) {
21 | msg := fmt.Sprintf("The time difference (%d) was smaller than the threshold (%d)\n", dif, Config.HMDiff)
22 | PrintVerbose(msg, NoColor, 2)
23 | return
24 | }
25 | }
26 | } else {
27 | msg := fmt.Sprintf("%s: len(times) mod 2 != 0\n", identifier)
28 | Print(msg, Yellow)
29 | }
30 | }
31 |
32 | func cbNotFoundDifference(times []int64, identifier string) {
33 | if len(times)%2 == 0 {
34 | for i := 0; i < len(times); i += 2 {
35 | dif := times[i] - times[i+1]
36 | if dif >= int64(Config.HMDiff) {
37 | msg := fmt.Sprintf("The time difference (%d) was equal or higher than the threshold (%d)", dif, Config.HMDiff)
38 | Print(msg, Yellow)
39 | return
40 | }
41 | }
42 | } else {
43 | msg := fmt.Sprintf("%s: len(times) mod 2 != 0", identifier)
44 | Print(msg, Yellow)
45 | }
46 | }
47 |
48 | /* Check if the parameter "cb" (or any other defined by flag -cb), the headers "accept-encoding, accept, cookie, origin" or any cookie can be used as cachebuster */
49 | func CheckCache(parameterList []string, headerList []string) (CacheStruct, bool, []error) {
50 | var cache CacheStruct
51 | var errSlice []error
52 |
53 | // analyze the website headers for cache indicators
54 | cacheIndicators := analyzeCacheIndicator(Config.Website.Headers)
55 |
56 | alwaysMiss := true
57 | if len(cacheIndicators) == 0 {
58 | msg := "No x-cache (or other cache hit/miss header) header was found\nThe time will be measured as cache hit/miss indicator\n"
59 | Print(msg, Yellow)
60 | } else {
61 | for _, cacheIndicator := range cacheIndicators {
62 | miss, err := checkIfAlwaysMiss(cacheIndicator)
63 | if err != nil {
64 | errSlice = append(errSlice, err)
65 | }
66 | if !miss {
67 | alwaysMiss = false
68 | msg := fmt.Sprintf("The following cache indicator indicated a hit: %s\n", cacheIndicator)
69 | PrintVerbose(msg, Cyan, 1)
70 | cache.Indicator = cacheIndicator
71 | }
72 | }
73 |
74 | if cache.Indicator == "" && !cache.TimeIndicator {
75 | msg := "Time measurement as indicator is deactivated, skipping cachebuster tests\n"
76 | Print(msg, Yellow)
77 | } else {
78 | // test for cachebuster, if the cache doesnt always return a miss
79 | if !alwaysMiss {
80 | // Check first if a parameter can be used as cachebuster
81 | if !cache.CBwasFound {
82 | errs := cachebusterParameter(&cache, nil)
83 | if len(errs) > 0 {
84 | errSlice = append(errSlice, errs...)
85 | }
86 | }
87 |
88 | // Check second if a header can be used as cachebuster
89 | if !cache.CBwasFound {
90 | errs := cachebusterHeader(&cache, nil)
91 | if len(errs) > 0 {
92 | errSlice = append(errSlice, errs...)
93 | }
94 | }
95 |
96 | // Check third if a cookie can be used as cachebuster
97 | if !cache.CBwasFound {
98 | errs := cachebusterCookie(&cache)
99 | if len(errs) > 0 {
100 | errSlice = append(errSlice, errs...)
101 | }
102 | }
103 |
104 | if Config.SkipWordlistCachebuster {
105 | msg := "Skipping wordlist cachebuster tests\n"
106 | PrintVerbose(msg, Yellow, 1)
107 | } else {
108 | // Check fourth if a parameter from the wordlist can be used as cachebuster
109 | if !cache.CBwasFound {
110 | errs := cachebusterParameter(&cache, parameterList)
111 | if len(errs) > 0 {
112 | errSlice = append(errSlice, errs...)
113 | }
114 | }
115 |
116 | // Check fivth if a header can be used as cachebuster
117 | if !cache.CBwasFound {
118 | errs := cachebusterHeader(&cache, headerList)
119 | if len(errs) > 0 {
120 | errSlice = append(errSlice, errs...)
121 | }
122 | }
123 | }
124 |
125 | // Check last if a HTTP Method can be used as cachebuster. Can't do multithreading if HTTP Method is used
126 | if !cache.CBwasFound {
127 | errs := cachebusterHTTPMethod(&cache)
128 | if len(errs) > 0 {
129 | errSlice = append(errSlice, errs...)
130 | }
131 | }
132 | }
133 | }
134 |
135 | if cache.Indicator == "" && !cache.TimeIndicator {
136 | msg := "No cache indicator could be found"
137 | Print(msg+"\n", Yellow)
138 | errSlice = append(errSlice, errors.New(strings.ToLower(msg)))
139 | } else {
140 | if !cache.CBwasFound {
141 | msg := "No cachebuster could be found"
142 | Print(msg+"\n", Yellow)
143 | errSlice = append(errSlice, errors.New(strings.ToLower(msg)))
144 | }
145 | }
146 |
147 | if (!cache.CBwasFound || (cache.Indicator == "" && !cache.TimeIndicator)) && !Config.Force {
148 | msg := "Use -f/-force to force the test\n"
149 | Print(msg, Yellow)
150 | }
151 |
152 | }
153 | return cache, alwaysMiss, errSlice
154 | }
155 |
156 | func checkIfAlwaysMiss(cacheIndicator string) (bool, error) {
157 | errorString := "checkIfAlwaysMiss"
158 |
159 | req := fasthttp.AcquireRequest()
160 | resp := fasthttp.AcquireResponse()
161 | defer fasthttp.ReleaseRequest(req)
162 | defer fasthttp.ReleaseResponse(resp)
163 | var err error
164 |
165 | weburl := Config.Website.Url.String()
166 | if Config.DoPost {
167 | req.Header.SetMethod("POST")
168 | req.SetBodyString(Config.Body)
169 | } else {
170 | req.Header.SetMethod("GET")
171 | if Config.Body != "" {
172 | req.SetBodyString(Config.Body)
173 |
174 | }
175 | }
176 | req.SetRequestURI(weburl)
177 |
178 | setRequest(req, Config.DoPost, "", nil, false)
179 |
180 | waitLimiter(errorString)
181 | err = client.Do(req, resp)
182 | if err != nil {
183 | msg := fmt.Sprintf("%s: client.Do: %s", errorString, err.Error())
184 | Print(msg+"\n", Red)
185 | return false, errors.New(msg)
186 | }
187 |
188 | firstUnix := time.Now().Unix()
189 |
190 | if resp.StatusCode() != Config.Website.StatusCode {
191 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
192 | Print(msg, Yellow)
193 | }
194 |
195 | setRequest(req, Config.DoPost, "", nil, false)
196 |
197 | waitLimiter(errorString)
198 |
199 | secondUnix := time.Now().Unix()
200 | timeDiff := secondUnix - firstUnix
201 | // make sure that there is at least 2 sec difference.
202 | // So that first req has Age=0 and second req has Age>=2
203 | if timeDiff <= 1 && strings.EqualFold("age", cacheIndicator) {
204 | time.Sleep(2 * time.Second)
205 | }
206 |
207 | err = client.Do(req, resp)
208 | if err != nil {
209 | msg := fmt.Sprintf("%s: client.Do: %s", errorString, err.Error())
210 | Print(msg+"\n", Red)
211 | return false, errors.New(msg)
212 | }
213 |
214 | if resp.StatusCode() != Config.Website.StatusCode {
215 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
216 | Print(msg, Yellow)
217 | }
218 |
219 | respHeader := headerToMultiMap(&resp.Header)
220 | hit := false
221 | for _, v := range respHeader[cacheIndicator] {
222 | indicValue := strings.TrimSpace(strings.ToLower(v))
223 | hit = hit || checkCacheHit(indicValue, cacheIndicator)
224 | }
225 |
226 | if !hit {
227 | msg := "cache returns always miss"
228 | Print(msg+"\n", Yellow)
229 | return true, errors.New(msg)
230 | }
231 |
232 | return false, nil
233 | }
234 |
235 | func cachebusterCookie(cache *CacheStruct) []error {
236 | var errSlice []error
237 | for k, _ := range Config.Website.Cookies {
238 | errorString := "cachebusterCookie " + k
239 | identifier := "Cookie " + k + " as Cachebuster"
240 |
241 | req := fasthttp.AcquireRequest()
242 | resp := fasthttp.AcquireResponse()
243 | defer fasthttp.ReleaseRequest(req)
244 | defer fasthttp.ReleaseResponse(resp)
245 | var err error
246 | var times []int64
247 | newCookie := map[string]string{}
248 |
249 | if cache.Indicator == "" {
250 | // No Cache Indicator was found. So time will be used as Indicator
251 |
252 | var cb string
253 | for ii := range 5 * 2 {
254 | weburl := Config.Website.Url.String()
255 |
256 | if Config.DoPost {
257 | req.Header.SetMethod("POST")
258 | req.SetBodyString(Config.Body)
259 | } else {
260 | req.Header.SetMethod("GET")
261 | if Config.Body != "" {
262 | req.SetBodyString(Config.Body)
263 |
264 | }
265 | }
266 | req.SetRequestURI(weburl)
267 |
268 | if ii%2 == 0 {
269 | cb = "cb" + randInt()
270 | newCookie["key"] = k
271 | newCookie["value"] = cb
272 | }
273 | setRequest(req, Config.DoPost, "", newCookie, false)
274 |
275 | waitLimiter(errorString)
276 | start := time.Now()
277 |
278 | err = client.Do(req, resp)
279 | if err != nil {
280 | msg := fmt.Sprintf("%s: client.Do: %s", errorString, err.Error())
281 | Print(msg+"\n", Red)
282 | errSlice = append(errSlice, errors.New(msg))
283 | continue
284 | }
285 | elapsed := time.Since(start).Milliseconds()
286 | times = append(times, elapsed)
287 |
288 | if resp.StatusCode() != Config.Website.StatusCode {
289 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
290 | Print(msg, Yellow)
291 | }
292 | }
293 | msg := fmt.Sprintf("measured times: %d\n", times)
294 | PrintVerbose(msg, NoColor, 2)
295 |
296 | skip := false
297 | for ii := range times {
298 | // Cache miss has to take 30ms (misshitdif) longer than cache hit
299 | if ii%2 == 1 && times[ii-1]-times[ii] < int64(Config.HMDiff) {
300 | msg := fmt.Sprintf("%s was not successful (Cookie)\n", identifier)
301 | PrintVerbose(msg, NoColor, 2)
302 | skip = true
303 | break
304 | }
305 | }
306 | if skip {
307 | continue
308 | }
309 | cache.TimeIndicator = true
310 | cache.CBwasFound = true
311 | cache.CBisCookie = true
312 | cache.CBisHTTPMethod = false
313 | cache.CBisHeader = false
314 | cache.CBisParameter = false
315 | cache.CBName = k
316 |
317 | msg = fmt.Sprintf("%s was successful (Cookie, time was used as indicator)\n", identifier)
318 | Print(msg, Cyan)
319 |
320 | return errSlice
321 | } else {
322 | // A hit miss Indicator was found. Sending 2 requests, each with a new cachebuster, expecting 2 misses
323 | weburl := Config.Website.Url.String()
324 | if Config.DoPost {
325 | req.Header.SetMethod("POST")
326 | req.SetBodyString(Config.Body)
327 | } else {
328 | req.Header.SetMethod("GET")
329 | if Config.Body != "" {
330 | req.SetBodyString(Config.Body)
331 |
332 | }
333 | }
334 | req.SetRequestURI(weburl)
335 |
336 | cb := "cb" + randInt()
337 | newCookie["value"] = cb
338 | setRequest(req, Config.DoPost, "", newCookie, false)
339 |
340 | waitLimiter(errorString)
341 |
342 | start := time.Now()
343 |
344 | err = client.Do(req, resp)
345 | if err != nil {
346 | msg := fmt.Sprintf("%s: client.Do: %s", errorString, err.Error())
347 | Print(msg+"\n", Red)
348 | errSlice = append(errSlice, errors.New(msg))
349 | continue
350 | }
351 |
352 | elapsed := time.Since(start).Milliseconds()
353 | times = append(times, elapsed)
354 |
355 | firstUnix := time.Now().Unix()
356 |
357 | if resp.StatusCode() != Config.Website.StatusCode {
358 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
359 | Print(msg, Yellow)
360 | }
361 |
362 | respHeader := headerToMultiMap(&resp.Header)
363 | hit := false
364 | for _, v := range respHeader[cache.Indicator] {
365 | indicValue := strings.TrimSpace(strings.ToLower(v))
366 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
367 | }
368 |
369 | if hit {
370 | // If there is a hit, the cachebuster didn't work
371 | msg := fmt.Sprintf("%s was not successful (Cookie)\n", identifier)
372 | PrintVerbose(msg, NoColor, 2)
373 | continue
374 | } else {
375 | if Config.DoPost {
376 | req.Header.SetMethod("POST")
377 | req.SetBodyString(Config.Body)
378 | } else {
379 | req.Header.SetMethod("GET")
380 | if Config.Body != "" {
381 | req.SetBodyString(Config.Body)
382 |
383 | }
384 | }
385 | req.SetRequestURI(weburl)
386 |
387 | cb := "cb" + randInt()
388 | newCookie["value"] = cb
389 |
390 | setRequest(req, Config.DoPost, "", newCookie, false)
391 |
392 | waitLimiter(errorString)
393 |
394 | secondUnix := time.Now().Unix()
395 | timeDiff := secondUnix - firstUnix
396 | // make sure that there is at least 2 sec difference.
397 | // So that first req has Age=0 and second req has Age>=2
398 | if timeDiff <= 1 && strings.EqualFold("age", cache.Indicator) {
399 | time.Sleep(2 * time.Second)
400 | }
401 |
402 | start := time.Now()
403 | err = client.Do(req, resp)
404 | elapsed := time.Since(start).Milliseconds()
405 | times = append(times, elapsed)
406 | if err != nil {
407 | msg := fmt.Sprintf("%s: client.Do: %s", errorString, err.Error())
408 | Print(msg+"\n", Red)
409 | errSlice = append(errSlice, errors.New(msg))
410 | continue
411 | }
412 |
413 | if resp.StatusCode() != Config.Website.StatusCode {
414 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
415 | Print(msg, Yellow)
416 | }
417 |
418 | respHeader := headerToMultiMap(&resp.Header)
419 | hit := false
420 | for _, v := range respHeader[cache.Indicator] {
421 | indicValue := strings.TrimSpace(strings.ToLower(v))
422 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
423 | }
424 | if hit {
425 | // If there is a hit, the cachebuster didn't work
426 | msg := fmt.Sprintf("%s was not successful (Cookie)\n", identifier)
427 | PrintVerbose(msg, NoColor, 2)
428 | cbNotFoundDifference(times, identifier)
429 | } else {
430 | cache.CBwasFound = true
431 | cache.CBisCookie = true
432 | cache.CBisHTTPMethod = false
433 | cache.CBisHeader = false
434 | cache.CBisParameter = false
435 | cache.CBName = k
436 |
437 | msg := fmt.Sprintf("%s was successful (Cookie)\n", identifier)
438 | Print(msg, Cyan)
439 |
440 | cbFoundDifference(times, identifier)
441 |
442 | return errSlice
443 | }
444 | }
445 | }
446 | }
447 |
448 | return errSlice
449 | }
450 |
451 | func cachebusterHeader(cache *CacheStruct, headerList []string) []error {
452 | headers := []string{}
453 | values := []string{}
454 | if len(headerList) > 0 {
455 | headers = append(headers, headerList...)
456 | } else {
457 | headers = append(headers, "Accept-Encoding", "Accept", "Cookie", "Origin")
458 | values = append(values, "gzip, deflate, ", "*/*, text/", "wcvs_cookie=")
459 | for _, header := range Config.Headers {
460 | headers = append(headers, strings.TrimSpace(strings.Split(header, ":")[0])) // Only add headername
461 | }
462 | }
463 |
464 | var errSlice []error
465 |
466 | for i, header := range headers {
467 | errorString := "cachebusterHeader " + header
468 | identifier := "Header " + header + " as Cachebuster"
469 |
470 | if len(values) < i+1 { // prevent index out of range
471 | values = append(values, "")
472 | }
473 |
474 | if header == "" { // skip empty headers
475 | continue
476 | }
477 |
478 | req := fasthttp.AcquireRequest()
479 | resp := fasthttp.AcquireResponse()
480 | defer fasthttp.ReleaseRequest(req)
481 | defer fasthttp.ReleaseResponse(resp)
482 | var err error
483 | var times []int64
484 |
485 | if cache.Indicator == "" {
486 | // No Cache Indicator was found. So time will be used as Indicator
487 |
488 | for ii := range 5 * 2 {
489 | weburl := Config.Website.Url.String()
490 |
491 | if Config.DoPost {
492 | req.Header.SetMethod("POST")
493 | req.SetBodyString(Config.Body)
494 | } else {
495 | req.Header.SetMethod("GET")
496 | if Config.Body != "" {
497 | req.SetBodyString(Config.Body)
498 |
499 | }
500 | }
501 | req.SetRequestURI(weburl)
502 |
503 | setRequest(req, Config.DoPost, "", nil, false)
504 | if ii%2 == 0 {
505 | cbvalue := values[i] + "cb" + randInt()
506 | if h := req.Header.Peek(header); h != nil {
507 | msg := fmt.Sprintf("Overwriting %s:%s with %s:%s\n", header, h, header, cbvalue)
508 | Print(msg, NoColor)
509 | }
510 | req.Header.Set(header, cbvalue)
511 | }
512 |
513 | waitLimiter(errorString)
514 | start := time.Now()
515 | err = client.Do(req, resp)
516 | elapsed := time.Since(start).Milliseconds()
517 | times = append(times, elapsed)
518 | if err != nil {
519 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
520 | Print(msg+"\n", Red)
521 | errSlice = append(errSlice, errors.New(msg))
522 | continue
523 | }
524 |
525 | if resp.StatusCode() != Config.Website.StatusCode {
526 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
527 | Print(msg, Yellow)
528 | }
529 | }
530 | msg := fmt.Sprintf("measured times: %d\n", times)
531 | PrintVerbose(msg, NoColor, 2)
532 |
533 | skip := false
534 | for ii := range times {
535 | // Cache miss has to take 30ms (misshitdif) longer than cache hit
536 | if ii%2 == 1 && times[ii-1]-times[ii] < int64(Config.HMDiff) {
537 | msg := fmt.Sprintf("%s was not successful (Header)\n", identifier)
538 | PrintVerbose(msg, NoColor, 2)
539 | skip = true
540 | break
541 | }
542 | }
543 | if skip {
544 | continue
545 | }
546 |
547 | cache.TimeIndicator = true
548 | cache.CBwasFound = true
549 | cache.CBisHeader = true
550 | cache.CBisCookie = false
551 | cache.CBisHTTPMethod = false
552 | cache.CBisParameter = false
553 | cache.CBName = header
554 |
555 | msg = fmt.Sprintf("%s was successful (Header, time was used as indicator)\n", identifier)
556 | Print(msg, Cyan)
557 |
558 | return errSlice
559 | } else {
560 | // A hit miss Indicator was found. Sending 2 requests, each with a new cachebuster, expecting 2 misses
561 | weburl := Config.Website.Url.String()
562 |
563 | if Config.DoPost {
564 | req.Header.SetMethod("POST")
565 | req.SetBodyString(Config.Body)
566 | } else {
567 | req.Header.SetMethod("GET")
568 | if Config.Body != "" {
569 | req.SetBodyString(Config.Body)
570 |
571 | }
572 | }
573 | req.SetRequestURI(weburl)
574 |
575 | setRequest(req, Config.DoPost, "", nil, false)
576 | cbvalue := values[i] + "cb" + randInt()
577 | if h := req.Header.Peek(header); h != nil {
578 | msg := fmt.Sprintf("Overwriting %s:%s with %s:%s\n", header, h, header, cbvalue)
579 | Print(msg, NoColor)
580 | }
581 | req.Header.Set(header, cbvalue)
582 |
583 | waitLimiter(errorString)
584 | start := time.Now()
585 | err = client.Do(req, resp)
586 | elapsed := time.Since(start).Milliseconds()
587 | times = append(times, elapsed)
588 | if err != nil {
589 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
590 | Print(msg+"\n", Red)
591 | errSlice = append(errSlice, errors.New(msg))
592 | continue
593 | }
594 |
595 | firstUnix := time.Now().Unix()
596 |
597 | if resp.StatusCode() != Config.Website.StatusCode {
598 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
599 | Print(msg, Yellow)
600 | }
601 |
602 | respHeader := headerToMultiMap(&resp.Header)
603 | hit := false
604 | for _, v := range respHeader[cache.Indicator] {
605 | indicValue := strings.TrimSpace(strings.ToLower(v))
606 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
607 | }
608 | if hit {
609 | // If there is a hit, the cachebuster didn't work
610 | msg := fmt.Sprintf("%s was not successful (Header)\n", identifier)
611 | PrintVerbose(msg, NoColor, 2)
612 | continue
613 | } else {
614 |
615 | if Config.DoPost {
616 | req.Header.SetMethod("POST")
617 | req.SetBodyString(Config.Body)
618 | } else {
619 | req.Header.SetMethod("GET")
620 | if Config.Body != "" {
621 | req.SetBodyString(Config.Body)
622 |
623 | }
624 | }
625 | req.SetRequestURI(weburl)
626 |
627 | setRequest(req, Config.DoPost, "", nil, false)
628 | cbvalue := values[i] + "cb" + randInt()
629 | if h := req.Header.Peek(header); h != nil {
630 | msg := fmt.Sprintf("Overwriting %s:%s with %s:%s\n", header, h, header, cbvalue)
631 | Print(msg, NoColor)
632 | }
633 | req.Header.Set(header, cbvalue)
634 |
635 | waitLimiter(errorString)
636 |
637 | secondUnix := time.Now().Unix()
638 | timeDiff := secondUnix - firstUnix
639 | // make sure that there is at least 2 sec difference.
640 | // So that first req has Age=0 and second req has Age>=2
641 | if timeDiff <= 1 && strings.EqualFold("age", cache.Indicator) {
642 | time.Sleep(2 * time.Second)
643 | }
644 |
645 | start := time.Now()
646 | err = client.Do(req, resp)
647 | elapsed := time.Since(start).Milliseconds()
648 | times = append(times, elapsed)
649 | if err != nil {
650 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
651 | Print(msg+"\n", Red)
652 | errSlice = append(errSlice, errors.New(msg))
653 | continue
654 | }
655 |
656 | if resp.StatusCode() != Config.Website.StatusCode {
657 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
658 | Print(msg, Yellow)
659 | }
660 |
661 | respHeader := headerToMultiMap(&resp.Header)
662 | hit := false
663 | for _, v := range respHeader[cache.Indicator] {
664 | indicValue := strings.TrimSpace(strings.ToLower(v))
665 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
666 | }
667 | if hit {
668 | // If there is a hit, the cachebuster didn't work
669 | msg := fmt.Sprintf("%s was not successful (Header)\n", identifier)
670 | PrintVerbose(msg, NoColor, 2)
671 |
672 | cbNotFoundDifference(times, identifier)
673 | } else {
674 | cache.CBwasFound = true
675 | cache.CBisHeader = true
676 | cache.CBisCookie = false
677 | cache.CBisHTTPMethod = false
678 | cache.CBisParameter = false
679 | cache.CBName = header
680 |
681 | msg := fmt.Sprintf("%s was successful (Header)\n", identifier)
682 | Print(msg, Cyan)
683 |
684 | cbFoundDifference(times, identifier)
685 | return errSlice
686 | }
687 | }
688 | }
689 | }
690 | return errSlice
691 | }
692 |
693 | func cachebusterParameter(cache *CacheStruct, parameterList []string) []error {
694 | parameters := []string{}
695 | values := []string{}
696 |
697 | if len(parameterList) > 0 {
698 | parameters = parameterList
699 | } else {
700 | parameters = append(parameters, Config.CacheBuster)
701 | values = append(values, "")
702 | for k, v := range Config.Website.Queries {
703 | parameters = append(parameters, k)
704 | values = append(values, v)
705 | }
706 | }
707 |
708 | var errSlice []error
709 |
710 | for i, parameter := range parameters {
711 | errorString := "cachebusterParameter"
712 | identifier := "Parameter " + parameter + " as Cachebuster"
713 |
714 | if len(values) < i+1 { // prevent index out of range
715 | values = append(values, "")
716 | }
717 |
718 | if parameter == "" { // skip empty parameter
719 | continue
720 | }
721 |
722 | req := fasthttp.AcquireRequest()
723 | resp := fasthttp.AcquireResponse()
724 | defer fasthttp.ReleaseRequest(req)
725 | defer fasthttp.ReleaseResponse(resp)
726 | var err error
727 | var times []int64
728 |
729 | if cache.Indicator == "" {
730 | // No Cache Indicator was found. So time will be used as Indicator
731 |
732 | var urlCb string
733 | for ii := range 5 * 2 {
734 | if ii%2 == 0 {
735 | urlCb, _ = addCachebusterParameter(Config.Website.Url.String(), values[i], parameter, false)
736 | }
737 | if Config.DoPost {
738 | req.Header.SetMethod("POST")
739 | req.SetBodyString(Config.Body)
740 | } else {
741 | req.Header.SetMethod("GET")
742 | if Config.Body != "" {
743 | req.SetBodyString(Config.Body)
744 |
745 | }
746 | }
747 | req.SetRequestURI(urlCb)
748 |
749 | setRequest(req, Config.DoPost, "", nil, false)
750 |
751 | waitLimiter(errorString)
752 | start := time.Now()
753 | err = client.Do(req, resp)
754 | elapsed := time.Since(start).Milliseconds()
755 | times = append(times, elapsed)
756 | if err != nil {
757 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
758 | Print(msg+"\n", Red)
759 | errSlice = append(errSlice, errors.New(msg))
760 | continue
761 | }
762 |
763 | if resp.StatusCode() != Config.Website.StatusCode {
764 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
765 | Print(msg, Yellow)
766 | }
767 | }
768 | msg := fmt.Sprintf("measured times: %d\n", times)
769 | PrintVerbose(msg, NoColor, 2)
770 |
771 | skip := false
772 | for ii := range times {
773 | // Cache miss has to take 30ms (misshitdif) longer than cache hit
774 | if ii%2 == 1 && times[ii-1]-times[ii] < int64(Config.HMDiff) {
775 | msg := fmt.Sprintf("%s was not successful (Parameter)\n", identifier)
776 | PrintVerbose(msg, NoColor, 2)
777 | skip = true
778 | break
779 | }
780 | }
781 | if skip {
782 | continue
783 | }
784 |
785 | cache.TimeIndicator = true
786 | cache.CBwasFound = true
787 | cache.CBisParameter = true
788 | cache.CBisHeader = false
789 | cache.CBisCookie = false
790 | cache.CBisHTTPMethod = false
791 | cache.CBName = parameter
792 |
793 | msg = fmt.Sprintf("%s was successful (Parameter, time was used as indicator)\n", identifier)
794 | Print(msg, Cyan)
795 | return errSlice
796 | } else {
797 | // A hit miss Indicator was found. Sending 2 requests, each with a new cachebuster, expecting 2 misses
798 | urlCb, _ := addCachebusterParameter(Config.Website.Url.String(), values[i], parameter, false)
799 |
800 | if Config.DoPost {
801 | req.Header.SetMethod("POST")
802 | req.SetBodyString(Config.Body)
803 | } else {
804 | req.Header.SetMethod("GET")
805 | if Config.Body != "" {
806 | req.SetBodyString(Config.Body)
807 |
808 | }
809 | }
810 | req.SetRequestURI(urlCb)
811 |
812 | setRequest(req, Config.DoPost, "", nil, false)
813 | waitLimiter(errorString)
814 | start := time.Now()
815 | err = client.Do(req, resp)
816 | elapsed := time.Since(start).Milliseconds()
817 | times = append(times, elapsed)
818 | if err != nil {
819 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
820 | Print(msg+"\n", Red)
821 | errSlice = append(errSlice, errors.New(msg))
822 | continue
823 | }
824 |
825 | firstUnix := time.Now().Unix()
826 |
827 | if resp.StatusCode() != Config.Website.StatusCode {
828 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
829 | Print(msg, Yellow)
830 | }
831 |
832 | respHeader := headerToMultiMap(&resp.Header)
833 | hit := false
834 | for _, v := range respHeader[cache.Indicator] {
835 | indicValue := strings.TrimSpace(strings.ToLower(v))
836 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
837 | }
838 | if hit {
839 | // If there is a hit, the cachebuster didn't work
840 | msg := fmt.Sprintf("%s was not successful (Parameter)\n", identifier)
841 | PrintVerbose(msg, NoColor, 2)
842 | } else {
843 | urlCb, _ := addCachebusterParameter(Config.Website.Url.String(), values[i], parameter, false)
844 |
845 | if Config.DoPost {
846 | req.Header.SetMethod("POST")
847 | req.SetBodyString(Config.Body)
848 | } else {
849 | req.Header.SetMethod("GET")
850 | if Config.Body != "" {
851 | req.SetBodyString(Config.Body)
852 |
853 | }
854 | }
855 | req.SetRequestURI(urlCb)
856 |
857 | setRequest(req, Config.DoPost, "", nil, false)
858 | waitLimiter(errorString)
859 |
860 | secondUnix := time.Now().Unix()
861 | timeDiff := secondUnix - firstUnix
862 | // make sure that there is at least 2 sec difference.
863 | // So that first req has Age=0 and second req has Age>=2
864 | if timeDiff <= 1 && strings.EqualFold("age", cache.Indicator) {
865 | time.Sleep(2 * time.Second)
866 | }
867 |
868 | start := time.Now()
869 | err = client.Do(req, resp)
870 | elapsed := time.Since(start).Milliseconds()
871 | times = append(times, elapsed)
872 | if err != nil {
873 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
874 | Print(msg+"\n", Red)
875 | errSlice = append(errSlice, errors.New(msg))
876 | continue
877 | }
878 |
879 | if resp.StatusCode() != Config.Website.StatusCode {
880 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
881 | Print(msg, Yellow)
882 | }
883 |
884 | respHeader := headerToMultiMap(&resp.Header)
885 | hit := false
886 | for _, v := range respHeader[cache.Indicator] {
887 | indicValue := strings.TrimSpace(strings.ToLower(v))
888 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
889 | }
890 | if hit {
891 | // If there is a hit, the cachebuster didn't work
892 | msg := fmt.Sprintf("%s was not successful (Parameter)\n", identifier)
893 | PrintVerbose(msg, NoColor, 2)
894 |
895 | cbNotFoundDifference(times, identifier)
896 | } else {
897 | cache.CBwasFound = true
898 | cache.CBisParameter = true
899 | cache.CBisHeader = false
900 | cache.CBisCookie = false
901 | cache.CBisHTTPMethod = false
902 | cache.CBName = parameter
903 |
904 | msg := fmt.Sprintf("%s was successful (Parameter)\n", identifier)
905 | Print(msg, Cyan)
906 |
907 | cbFoundDifference(times, identifier)
908 |
909 | return errSlice
910 | }
911 | }
912 | }
913 | }
914 |
915 | return errSlice
916 | }
917 |
918 | func cachebusterHTTPMethod(cache *CacheStruct) []error {
919 | http_methods := []string{"PURGE", "FASTLYPURGE"}
920 |
921 | var errSlice []error
922 |
923 | for _, method := range http_methods {
924 | errorString := "cachebusterHTTPMethod " + method
925 | identifier := "HTTP Method " + method + " as Cachebuster"
926 |
927 | req := fasthttp.AcquireRequest()
928 | resp := fasthttp.AcquireResponse()
929 | defer fasthttp.ReleaseRequest(req)
930 | defer fasthttp.ReleaseResponse(resp)
931 | var err error
932 | var times []int64
933 |
934 | if cache.Indicator == "" {
935 | // No Cache Indicator was found. So time will be used as Indicator
936 |
937 | skip := false
938 | for ii := range 5 * 2 {
939 | weburl := Config.Website.Url.String()
940 | if ii%2 == 0 {
941 | req.Header.SetMethod(method)
942 | if Config.Body != "" {
943 | req.SetBodyString(Config.Body)
944 | }
945 | } else {
946 | if Config.DoPost {
947 | req.Header.SetMethod("POST")
948 | req.SetBodyString(Config.Body)
949 | } else {
950 | req.Header.SetMethod("GET")
951 | if Config.Body != "" {
952 | req.SetBodyString(Config.Body)
953 |
954 | }
955 | }
956 |
957 | }
958 | req.SetRequestURI(weburl)
959 |
960 | setRequest(req, Config.DoPost, "", nil, false)
961 |
962 | waitLimiter(errorString)
963 | start := time.Now()
964 | err = client.Do(req, resp)
965 | elapsed := time.Since(start).Milliseconds()
966 | times = append(times, elapsed)
967 | if err != nil {
968 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
969 | Print(msg+"\n", Red)
970 | errSlice = append(errSlice, errors.New(msg))
971 | continue
972 | }
973 |
974 | if resp.StatusCode() != Config.Website.StatusCode {
975 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
976 | Print(msg, Yellow)
977 | }
978 | if resp.StatusCode() >= 400 {
979 | skip = true
980 | break
981 | }
982 | }
983 | if skip {
984 | continue
985 | }
986 | msg := fmt.Sprintf("measured times: %d\n", times)
987 | PrintVerbose(msg, NoColor, 2)
988 |
989 | skip = false
990 | for ii := range times {
991 | // Cache miss has to take 30ms (misshitdif) longer than cache hit
992 | if ii%2 == 1 && times[ii-1]-times[ii] < int64(Config.HMDiff) {
993 | msg := fmt.Sprintf("%s was not successful (HTTP Method)\n", identifier)
994 | PrintVerbose(msg, NoColor, 2)
995 | skip = true
996 | break
997 | }
998 | }
999 | if skip {
1000 | continue
1001 | }
1002 |
1003 | cache.TimeIndicator = true
1004 | cache.CBwasFound = true
1005 | cache.CBisHTTPMethod = true
1006 | cache.CBisParameter = false
1007 | cache.CBisHeader = false
1008 | cache.CBisCookie = false
1009 | cache.CBName = method
1010 |
1011 | msg = fmt.Sprintf("%s was successful (HTTP Method, time was used as indicator)\n", identifier)
1012 | Print(msg, Cyan)
1013 |
1014 | return errSlice
1015 | } else {
1016 | // A hit miss Indicator was found. Sending 2 requests, each with a new cachebuster, expecting 2 misses
1017 | weburl := Config.Website.Url.String()
1018 |
1019 | req.Header.SetMethod(method)
1020 | if Config.Body != "" {
1021 | req.SetBodyString(Config.Body)
1022 |
1023 | }
1024 |
1025 | req.SetRequestURI(weburl)
1026 |
1027 | setRequest(req, Config.DoPost, "", nil, false)
1028 |
1029 | waitLimiter(errorString)
1030 | start := time.Now()
1031 | err = client.Do(req, resp)
1032 | elapsed := time.Since(start).Milliseconds()
1033 | times = append(times, elapsed)
1034 | if err != nil {
1035 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
1036 | Print(msg+"\n", Red)
1037 | errSlice = append(errSlice, errors.New(msg))
1038 | continue
1039 | }
1040 |
1041 | firstUnix := time.Now().Unix()
1042 |
1043 | if resp.StatusCode() != Config.Website.StatusCode {
1044 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
1045 | Print(msg, Yellow)
1046 | }
1047 | if resp.StatusCode() >= 400 {
1048 | continue
1049 | }
1050 |
1051 | respHeader := headerToMultiMap(&resp.Header)
1052 | hit := false
1053 | for _, v := range respHeader[cache.Indicator] {
1054 | indicValue := strings.TrimSpace(strings.ToLower(v))
1055 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
1056 | }
1057 | if hit {
1058 | // If there is a hit, the cachebuster didn't work
1059 | msg := fmt.Sprintf("%s was not successful (HTTP Method)\n", identifier)
1060 | PrintVerbose(msg, NoColor, 2)
1061 | } else {
1062 | req.Header.SetMethod(method)
1063 | if Config.Body != "" {
1064 | req.SetBodyString(Config.Body)
1065 |
1066 | }
1067 |
1068 | req.SetRequestURI(weburl)
1069 |
1070 | setRequest(req, Config.DoPost, "", nil, false)
1071 | waitLimiter(errorString)
1072 |
1073 | secondUnix := time.Now().Unix()
1074 | timeDiff := secondUnix - firstUnix
1075 | // make sure that there is at least 2 sec difference.
1076 | // So that first req has Age=0 and second req has Age>=2
1077 | if timeDiff <= 1 && strings.EqualFold("age", cache.Indicator) {
1078 | time.Sleep(2 * time.Second)
1079 | }
1080 |
1081 | start := time.Now()
1082 | err = client.Do(req, resp)
1083 | elapsed := time.Since(start).Milliseconds()
1084 | times = append(times, elapsed)
1085 | if err != nil {
1086 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error())
1087 | Print(msg+"\n", Red)
1088 | errSlice = append(errSlice, errors.New(msg))
1089 | continue
1090 | }
1091 |
1092 | if resp.StatusCode() != Config.Website.StatusCode {
1093 | msg := fmt.Sprintf("%s: Unexpected Status Code: %d\n", errorString, resp.StatusCode())
1094 | Print(msg, Yellow)
1095 | }
1096 | if resp.StatusCode() >= 400 {
1097 | continue
1098 | }
1099 |
1100 | respHeader := headerToMultiMap(&resp.Header)
1101 | hit := false
1102 | for _, v := range respHeader[cache.Indicator] {
1103 | indicValue := strings.TrimSpace(strings.ToLower(v))
1104 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
1105 | }
1106 | if hit {
1107 | // If there is a hit, the cachebuster didn't work
1108 | msg := fmt.Sprintf("%s was not successful (HTTP Method)\n", identifier)
1109 | PrintVerbose(msg, NoColor, 2)
1110 |
1111 | cbNotFoundDifference(times, identifier)
1112 | } else {
1113 | cache.CBwasFound = true
1114 | cache.CBisHTTPMethod = true
1115 | cache.CBisParameter = false
1116 | cache.CBisHeader = false
1117 | cache.CBisCookie = false
1118 | cache.CBName = method
1119 |
1120 | msg := fmt.Sprintf("%s was successful (HTTP Method)\n", identifier)
1121 | Print(msg, Cyan)
1122 |
1123 | cbFoundDifference(times, identifier)
1124 |
1125 | return errSlice
1126 | }
1127 | }
1128 | }
1129 | }
1130 | return errSlice
1131 | }
1132 |
1133 | /* Simple get request to get the body of a normal response and the cookies */
1134 | func GetWebsite(requrl string, setStatusCode bool, cacheBuster bool) (WebsiteStruct, error) {
1135 | errorString := "GetWebsite"
1136 |
1137 | var web WebsiteStruct
1138 | cache := Config.Website.Cache
1139 | queryParameterMap := make(map[string]string)
1140 |
1141 | // get domain
1142 | domainParts := strings.SplitN(requrl, "/", 4)
1143 | domain := domainParts[0] + "//" + domainParts[2]
1144 |
1145 | // splitting url like {https://www.m10x.de/}?{name=max&role=admin}
1146 | urlSlice := strings.SplitN(requrl, "?", 2)
1147 |
1148 | // splitting queries like {name=max}&{role=admin}
1149 | var parameterSlice []string
1150 | if strings.Contains(requrl, "?") {
1151 | parameterSlice = strings.Split(urlSlice[1], Config.QuerySeparator)
1152 | }
1153 |
1154 | if len(parameterSlice) > 0 {
1155 | queryParameterMap = setQueryParameterMap(queryParameterMap, parameterSlice)
1156 | }
1157 |
1158 | if len(Config.Parameters) > 0 {
1159 | queryParameterMap = setQueryParameterMap(queryParameterMap, Config.Parameters)
1160 | }
1161 |
1162 | requrl = urlSlice[0]
1163 | urlNoQueries := urlSlice[0]
1164 |
1165 | // adding query parameter
1166 | for key, val := range queryParameterMap {
1167 | if !strings.Contains(requrl, "?") {
1168 | requrl += "?"
1169 | } else {
1170 | requrl += Config.QuerySeparator
1171 | }
1172 | requrl += key + "=" + val
1173 | }
1174 |
1175 | cb := ""
1176 | if cacheBuster {
1177 | cb = "cb" + randInt()
1178 | }
1179 |
1180 | req := fasthttp.AcquireRequest()
1181 | resp := fasthttp.AcquireResponse()
1182 | defer fasthttp.ReleaseRequest(req)
1183 | defer fasthttp.ReleaseResponse(resp)
1184 | var err error
1185 | if Config.Website.Cache.CBisHTTPMethod {
1186 | req.Header.SetMethod(Config.Website.Cache.CBName)
1187 | } else if Config.DoPost {
1188 | req.Header.SetMethod("POST")
1189 | } else {
1190 | req.Header.SetMethod("GET")
1191 | }
1192 | if Config.Body != "" {
1193 | req.SetBodyString(Config.Body)
1194 | }
1195 | req.SetRequestURI(requrl)
1196 |
1197 | setRequest(req, Config.DoPost, cb, nil, false)
1198 | waitLimiter(errorString)
1199 |
1200 | err = client.Do(req, resp)
1201 | if err != nil {
1202 | msg := fmt.Sprintf("%s: http.DefaultClient.Do: %s", errorString, err.Error()) // Error: context deadline exceeded -> panic; runtime error
1203 |
1204 | Print(msg+"\n", Red)
1205 | return web, errors.New(msg)
1206 | }
1207 |
1208 | weburl, err := url.Parse(requrl)
1209 | if err != nil {
1210 | msg := fmt.Sprintf("%s: url.Parse: %s", errorString, err.Error())
1211 | Print(msg+"\n", Red)
1212 | return web, errors.New(msg)
1213 | }
1214 |
1215 | tempStatusCode := Config.Website.StatusCode
1216 | // Only overwrite statuscode if 1. it wasn't set via flag 2. its the first and only request or the second of two requests
1217 | if setStatusCode && tempStatusCode != resp.StatusCode() {
1218 | tempStatusCode = resp.StatusCode()
1219 |
1220 | cache = Config.Website.Cache
1221 |
1222 | msg := fmt.Sprintf("The default status code was set to %d\n", tempStatusCode)
1223 | Print(msg, Cyan)
1224 | }
1225 |
1226 | // if retrieveCookies is false, only the specified cookies will be used
1227 | // otherwise the by the server given cookies AND the specified cookies will be used
1228 | cookiesWebsite := Config.Website.Cookies
1229 | if !Config.DeclineCookies {
1230 | cookiesWebsite = responseCookiesToMap(resp, cookiesWebsite)
1231 | }
1232 |
1233 | /*
1234 | weburl.Host: www.example.com
1235 | weburl.Path: /
1236 | weburl.Hostname():www.example.com
1237 | weburl.String(): https://www.example.com/?test=12
1238 | domain: https://www.example.com
1239 | urlNoQueries: https://www.example.com/
1240 | */
1241 |
1242 | web = WebsiteStruct{
1243 | Headers: headerToMultiMap(&resp.Header),
1244 | Body: string(resp.Body()),
1245 | Cookies: cookiesWebsite,
1246 | StatusCode: tempStatusCode,
1247 | Url: weburl,
1248 | UrlWOQueries: urlNoQueries,
1249 | Queries: queryParameterMap,
1250 | Cache: cache,
1251 | Domain: domain,
1252 | //make map doesnt work here. is now in main method
1253 | //Added: make(map[string]bool),
1254 | }
1255 |
1256 | return web, nil
1257 | }
1258 |
1259 | func setQueryParameterMap(queryParameterMap map[string]string, querySlice []string) map[string]string {
1260 | for _, q := range querySlice {
1261 | q = strings.TrimSuffix(q, "\r")
1262 | q = strings.TrimSpace(q)
1263 | if q == "" {
1264 | continue
1265 | } else if !strings.Contains(q, "=") {
1266 | msg := fmt.Sprintf("Specified parameter %s doesn't contain a = and will be skipped\n", q)
1267 | Print(msg, Yellow)
1268 | continue
1269 | } else {
1270 | query := strings.SplitN(q, "=", 2)
1271 | // ok is true, if a query already is set
1272 | val, ok := queryParameterMap[query[0]]
1273 | if ok {
1274 | msg := fmt.Sprintf("Overwriting %s=%s with %s=%s\n", query[0], val, query[0], query[1])
1275 | Print(msg, NoColor)
1276 | }
1277 | queryParameterMap[query[0]] = query[1]
1278 | }
1279 | }
1280 |
1281 | return queryParameterMap
1282 | }
1283 |
1284 | func addDomain(x string, domain string) string {
1285 | if strings.HasPrefix(x, "#") || strings.HasPrefix(x, "mailto:") || strings.HasPrefix(x, "tel:") || strings.HasPrefix(x, "data:") || strings.HasPrefix(x, "javascript:") || (strings.Contains(x, "://") && !strings.HasPrefix(x, "http")) { // we only want http,https,// or relative
1286 | return ""
1287 | }
1288 | x = strings.SplitN(x, "#", 2)[0] // remove everything after # (anchor)
1289 | if strings.HasPrefix(x, "https://"+domain) || strings.HasPrefix(x, "http://"+domain) {
1290 | return x
1291 | } else if strings.HasPrefix(x, "//"+domain) {
1292 | return Config.Website.Url.Scheme + ":" + x // add the scheme to the url
1293 | } else if !strings.HasPrefix(x, "http://") && !strings.HasPrefix(x, "https://") && !strings.HasPrefix(x, "//") {
1294 | if strings.HasPrefix(x, "/") {
1295 | return Config.Website.Domain + x
1296 | } else { // we need to add the basepath to the relative path
1297 | basePath := path.Dir(Config.Website.Url.Path)
1298 | return Config.Website.Domain + strings.TrimSuffix(basePath, "/") + "/" + strings.TrimPrefix(x, "/")
1299 | }
1300 | } else {
1301 | for i, d := range Config.RecDomains {
1302 | if Config.RecDomains[i] == "" {
1303 | continue
1304 | }
1305 | if strings.HasPrefix(x, "https://"+d) || strings.HasPrefix(x, "http://"+d) {
1306 | return x
1307 | }
1308 | if strings.HasPrefix(x, "//"+d) {
1309 | return Config.Website.Url.Scheme + ":" + x // add the scheme to the url
1310 | }
1311 | }
1312 |
1313 | msg := fmt.Sprintf("%s doesn't have %s as domain\n", x, domain)
1314 | PrintVerbose(msg, NoColor, 2)
1315 |
1316 | return ""
1317 | }
1318 | }
1319 |
1320 | func checkRecInclude(x string, recInclude string) bool {
1321 | for _, inc := range strings.Split(recInclude, " ") {
1322 | // remove spaces and skip if someone used multiple spaces instead of one
1323 | if inc == "" {
1324 | continue
1325 | }
1326 | if strings.Contains(x, inc) {
1327 | return true
1328 | }
1329 | }
1330 | return false
1331 | }
1332 |
1333 | func addUrl(urls []string, url string, added map[string]bool, excluded map[string]bool) []string {
1334 | url = addDomain(url, Config.Website.Url.Hostname())
1335 |
1336 | if url != "" {
1337 | // Check if url isnt added yet and if it satisfies RecInclude (=contains it)
1338 | if excluded[url] {
1339 | msg := fmt.Sprintf("Skipped to add %s to the queue, because it is on the exclude list\n", url)
1340 | PrintVerbose(msg, NoColor, 2)
1341 | } else if added[url] {
1342 | msg := fmt.Sprintf("Skipped to add %s to the queue, because it was already added\n", url)
1343 | PrintVerbose(msg, NoColor, 2)
1344 | } else if Config.RecInclude == "" || checkRecInclude(url, Config.RecInclude) {
1345 | urls = append(urls, url)
1346 | added[url] = true
1347 | } else {
1348 | msg := fmt.Sprintf("Skipped to add %s to the queue, because it doesn't satisfy RecInclude\n", url)
1349 | PrintVerbose(msg, NoColor, 2)
1350 | }
1351 | }
1352 |
1353 | return urls
1354 | }
1355 |
1356 | func CrawlUrls(u string, added map[string]bool, excluded map[string]bool) []string {
1357 | webStruct, err := GetWebsite(u, false, false) // get body without cachebuster. TODO use response w/o cachebuster from recon, so it doesn't have to be fetched again
1358 | if err != nil {
1359 | msg := fmt.Sprintf("Error while crawling %s: %s\n", u, err.Error())
1360 | Print(msg, Red)
1361 | return []string{}
1362 | }
1363 | bodyReader := strings.NewReader(webStruct.Body)
1364 | tokenizer := html.NewTokenizer(bodyReader)
1365 |
1366 | var urls []string
1367 |
1368 | eof := false
1369 | for !eof {
1370 | tokentype := tokenizer.Next()
1371 |
1372 | switch tokentype {
1373 | case html.StartTagToken, html.SelfClosingTagToken:
1374 |
1375 | token := tokenizer.Token()
1376 |
1377 | if token.Data == "a" || token.Data == "link" {
1378 | for _, a := range token.Attr {
1379 | if a.Key == "href" {
1380 | //if strings.HasSuffix(a.Val, ".css") && strings.Contains(a.Val, ".css?") { // TODO: Flag to exclude css files from thorough scanning
1381 | // break
1382 | //}
1383 | urls = addUrl(urls, a.Val, added, excluded)
1384 | break
1385 | }
1386 | }
1387 | } else if token.Data == "script" {
1388 | for _, a := range token.Attr {
1389 | if a.Key == "src" {
1390 | urls = addUrl(urls, a.Val, added, excluded)
1391 | break
1392 | }
1393 | }
1394 | }
1395 |
1396 | // When EOF is reached a html.ErrorToken appears
1397 | case html.ErrorToken:
1398 | err := tokenizer.Err()
1399 | if err == io.EOF {
1400 | eof = true
1401 | break
1402 | }
1403 | msg := fmt.Sprintf("error tokenizing HTML: %+v", tokenizer.Err())
1404 | Print(msg, Yellow)
1405 | }
1406 | }
1407 |
1408 | // redirect
1409 | if webStruct.Headers["Location"] != nil {
1410 | if h := webStruct.Headers["Location"][0]; h != "" {
1411 | urls = addUrl(urls, h, added, excluded)
1412 | }
1413 | }
1414 |
1415 | return urls
1416 | }
1417 |
--------------------------------------------------------------------------------
/pkg/report.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "bytes"
5 | "encoding/json"
6 | "fmt"
7 | "os"
8 | )
9 |
10 | type (
11 | reportResult struct {
12 | Technique string `json:"technique"`
13 | HasError bool `json:"hasError"`
14 | ErrorMessages []string `json:"errorMessages"`
15 | Vulnerable bool `json:"isVulnerable"`
16 | Checks []reportCheck `json:"checks"`
17 | }
18 |
19 | reportCheck struct {
20 | URL string `json:"-"`
21 | Identifier string `json:"identifier"`
22 | Reason string `json:"reason"`
23 | Reflections []string `json:"reflections,omitempty"`
24 | Request reportRequest `json:"request"`
25 | SecondRequest *reportRequest `json:"secondRequest,omitempty"`
26 | }
27 |
28 | reportRequest struct {
29 | CurlCommand string `json:"curlCommand"`
30 | Request string `json:"request"`
31 | Response string `json:"response"`
32 | }
33 |
34 | reportSettings struct {
35 | ReportPath string `json:"-"`
36 | IndentPrefix string `json:"-"`
37 | IndentSuffix string `json:"-"`
38 | }
39 |
40 | ReportWebsite struct {
41 | URL string `json:"url"`
42 | Vulnerable bool `json:"isVulnerable"`
43 | HasError bool `json:"hasError"`
44 | CacheIndicator string `json:"cacheIndicator"`
45 | CBwasFound bool `json:"cacheBusterFound"`
46 | CBName string `json:"cacheBuster"`
47 | ErrorMessages []string `json:"errorMessages"`
48 | Results []reportResult `json:"results"`
49 | }
50 |
51 | Report struct {
52 | Settings reportSettings `json:"-"`
53 | Name string `json:"name"`
54 | Version string `json:"version"`
55 | Vulnerable bool `json:"foundVulnerabilities"`
56 | HasError bool `json:"hasError"`
57 | ErrorMessages []string `json:"errorMessages"`
58 | Date string `json:"date"`
59 | Duration string `json:"duration"`
60 | Command string `json:"command"`
61 |
62 | Config *ConfigStruct `json:"config,omitempty"`
63 |
64 | Websites []ReportWebsite `json:"websites"`
65 | }
66 | )
67 |
68 | func init() {
69 |
70 | }
71 |
72 | func GenerateReport(report Report, filePath string) {
73 | reportPath := filePath + "_Report.json"
74 |
75 | var file *os.File
76 | defer file.Close()
77 |
78 | file, err := os.OpenFile(reportPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0666)
79 | if err != nil {
80 | msg := fmt.Sprintf("GenerateReport: os.OpenFile: %s\n", err.Error())
81 | PrintFatal(msg)
82 | }
83 |
84 | report.Settings.IndentPrefix = ""
85 | report.Settings.IndentSuffix = " "
86 | if Config.EscapeJSON {
87 | j, err := json.MarshalIndent(report, report.Settings.IndentPrefix, report.Settings.IndentSuffix)
88 | if err != nil {
89 | msg := fmt.Sprintf("Generator: json.MarshalIndent: %s\n", err.Error())
90 | PrintFatal(msg)
91 | }
92 |
93 | file.WriteString(string(j))
94 | } else {
95 | bf := bytes.NewBuffer([]byte{})
96 | jsonEncoder := json.NewEncoder(bf)
97 | jsonEncoder.SetEscapeHTML(false)
98 | jsonEncoder.SetIndent(report.Settings.IndentPrefix, report.Settings.IndentSuffix)
99 | jsonEncoder.Encode(report)
100 |
101 | file.WriteString(bf.String())
102 | }
103 | msg := fmt.Sprintf("Exported report %s\n", reportPath)
104 | PrintVerbose(msg, NoColor, 1)
105 |
106 | report.Settings.ReportPath = reportPath
107 | }
108 |
--------------------------------------------------------------------------------
/pkg/request_smuggling.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "bufio"
5 | "crypto/tls"
6 | "fmt"
7 | "net"
8 | "net/url"
9 | "strings"
10 | "time"
11 |
12 | "golang.org/x/net/proxy"
13 | )
14 |
15 | func init() {
16 |
17 | }
18 |
19 | func GenerateHeaderString() string {
20 | headers := ""
21 | userAgent := useragent
22 | cache := Config.Website.Cache
23 | i := 0
24 | for k, v := range Config.Website.Cookies {
25 | if i == 0 {
26 | headers += "Cookie: "
27 | } else {
28 | headers += "; "
29 | }
30 | i++
31 | valToAdd := v
32 | if cache.CBisCookie && k == cache.CBName {
33 | valToAdd = "cb" + randInt()
34 | }
35 | headers += k + "=" + valToAdd
36 | }
37 |
38 | if headers != "" {
39 | headers += "\r\n"
40 | }
41 |
42 | headerSlice := Config.Headers
43 | if cache.CBisHeader {
44 | headerSlice = append(headerSlice, cache.CBName+": cb"+randInt())
45 | }
46 |
47 | for i, h := range headerSlice {
48 | h = strings.TrimSuffix(h, "\r")
49 | h = strings.TrimSpace(h)
50 | if h == "" {
51 | continue
52 | } else if !strings.Contains(h, ":") {
53 | msg := "Specified header" + h + "doesn't contain a : and will be skipped"
54 | Print(msg, NoColor)
55 | continue
56 | } else {
57 | hSplitted := strings.SplitN(h, ":", 2)
58 | hSplitted[0] = strings.TrimSpace(hSplitted[0])
59 | hSplitted[1] = strings.TrimSpace(hSplitted[1])
60 |
61 | // Only add header that has the same name as the cachebuster header, if it is the last header of the slice
62 | if cache.CBisHeader && strings.EqualFold(hSplitted[0], cache.CBName) && i+1 != len(headerSlice) {
63 | continue
64 | }
65 |
66 | headers += h + "\r\n"
67 |
68 | if strings.EqualFold(hSplitted[0], "User-Agent") {
69 | userAgent = hSplitted[1]
70 | }
71 | }
72 | }
73 | // if its the same, the useragent wasnt added yet
74 | if userAgent == useragent {
75 | headers += "User-Agent: " + useragent + "\r\n"
76 | }
77 |
78 | return headers
79 | }
80 |
81 | /* Run clte before tecl. Dont test for tecl if clte already works! */
82 | func clte(path string, headers string) string {
83 | httpMethod := "POST"
84 | if Config.Website.Cache.CBisHTTPMethod {
85 | httpMethod = Config.Website.Cache.CBName
86 | }
87 | payload := fmt.Sprintf(""+
88 | "%s %s HTTP/1.1\r\n"+ // POST /about HTTP/1.1
89 | "Host: %s\r\n"+ // Host: example.com
90 | "%s"+ // *Additional Headers generated*
91 | "Transfer-Encoding: chunked\r\n"+ // Transfer-Encoding: chunked
92 | "Content-Length: 4\r\n"+ // Content-Length: 4
93 | "\r\n"+ //
94 | "1\r\n"+ // 1
95 | "Z\r\n"+ // Z
96 | "Q"+ // Q
97 | "", httpMethod, path, Config.Website.Url.Host, headers)
98 |
99 | return payload
100 | }
101 |
102 | func tecl(path string, headers string) string {
103 | httpMethod := "POST"
104 | if Config.Website.Cache.CBisHTTPMethod {
105 | httpMethod = Config.Website.Cache.CBName
106 | }
107 | payload := fmt.Sprintf(""+
108 | "%s %s HTTP/1.1\r\n"+ // POST /about HTTP/1.1
109 | "Host: %s\r\n"+ // Host: example.com
110 | "%s"+ // *Additional Headers generated*
111 | "Transfer-Encoding: chunked\r\n"+ // Transfer-Encoding: chunked
112 | "Content-Length: 6\r\n"+ // Content-Length: 6
113 | "\r\n"+ //
114 | "0\r\n"+ // 0
115 | "\r\n"+ //
116 | "X"+ // X
117 | "", httpMethod, path, Config.Website.Url.Host, headers)
118 | return payload
119 | }
120 |
121 | func clcl(path string, headers string) string {
122 | httpMethod := "POST"
123 | if Config.Website.Cache.CBisHTTPMethod {
124 | httpMethod = Config.Website.Cache.CBName
125 | }
126 | payload := fmt.Sprintf(""+
127 | "%s %s HTTP/1.1\r\n"+ // POST /about HTTP/1.1
128 | "Host: %s\r\n"+ // Host: example.com
129 | "%s"+ // *Additional Headers generated*
130 | "Content-Length: 10\r\n"+ // Content-Length: 10
131 | "Content-Length: 11\r\n"+ // Content-Length: 11
132 | "\r\n"+ //
133 | "M\r\n"+ // M
134 | "1\r\n"+ // 1
135 | "0\r\n"+ // 0
136 | "X"+ // X
137 | "", httpMethod, path, Config.Website.Url.Host, headers)
138 |
139 | return payload
140 | }
141 |
142 | func clcl2(path string, headers string) string {
143 | httpMethod := "POST"
144 | if Config.Website.Cache.CBisHTTPMethod {
145 | httpMethod = Config.Website.Cache.CBName
146 | }
147 | payload := fmt.Sprintf(""+
148 | "%s %s HTTP/1.1\r\n"+ // POST /about HTTP/1.1
149 | "Host: %s\r\n"+ // Host: example.com
150 | "%s"+ // *Additional Headers generated*
151 | "Content-Length: 6\r\n"+ // Content-Length: 11
152 | "Content-Length: 4\r\n"+ // Content-Length: 10
153 | "\r\n"+ //
154 | "M\r\n"+ // M
155 | "1\r\n"+ // 1
156 | "0\r\n"+ // 0
157 | "X"+ // X
158 | "", httpMethod, path, Config.Website.Url.Host, headers)
159 |
160 | return payload
161 | }
162 |
163 | func httpRequestSmuggling(req string, result *reportResult, proxyUrl *url.URL) {
164 | /*
165 | dialer, err := proxy.SOCKS5("tcp", strings.TrimPrefix(Config.ProxyURL, "http://"), nil, nil)
166 | if err != nil {
167 | PrintFatal(err.Error())
168 | }
169 | Print("ads", NoColor)*/
170 | errorString := "httpRequestSmuggling"
171 |
172 | httpsUsed := false
173 | proxyUsed := false
174 | address := Config.Website.Domain
175 | address = strings.TrimSuffix(address, "/")
176 |
177 | if strings.HasPrefix(address, "https://") {
178 | httpsUsed = true
179 | address = strings.TrimPrefix(address, "https://")
180 | if !strings.Contains(address, ":") {
181 | address += ":443"
182 | }
183 | } else if strings.HasPrefix(address, "http://") {
184 | address = strings.TrimPrefix(address, "http://")
185 | if !strings.Contains(address, ":") {
186 | address += ":80"
187 | }
188 | } else {
189 | msg := "Request Smuggling: " + address + " doesn't has http:// or https:// as prefix\n"
190 | Print(msg, Yellow)
191 | result.HasError = true
192 | result.ErrorMessages = append(result.ErrorMessages, msg)
193 | return
194 | }
195 |
196 | if Config.UseProxy {
197 | //proxyUsed = true
198 | proxyUsed = false
199 | }
200 |
201 | timeOutCount := 0
202 | for i := 0; i < 3; i++ {
203 | var err error
204 | var connS *tls.Conn
205 | var conn net.Conn
206 |
207 | var resp string
208 | var msg string
209 |
210 | waitLimiter(fmt.Sprintf("%s %d", errorString, i))
211 | if proxyUsed {
212 | dialerP, err := proxy.FromURL(proxyUrl, proxy.Direct)
213 | /*dialerP, err := proxy.SOCKS5("tcp", proxyUrl.Host, nil, &net.Dialer{
214 | Timeout: 15 * time.Second,
215 | KeepAlive: 15 * time.Second,
216 | })*/
217 | if err != nil {
218 | msg = fmt.Sprintf("%s: proxy.FromURL: %s", errorString, err.Error())
219 | Print(msg+"\n", Red)
220 | result.HasError = true
221 | result.ErrorMessages = append(result.ErrorMessages, msg)
222 | return
223 | }
224 | conn, err = dialerP.Dial("tcp", address)
225 |
226 | if err != nil {
227 | msg = fmt.Sprintf("%s: dialerP.Dial: %s", errorString, err.Error())
228 | Print(msg+"\n", Red)
229 | result.HasError = true
230 | result.ErrorMessages = append(result.ErrorMessages, msg)
231 | }
232 | } else if httpsUsed {
233 | /* Das hat teilweise zu Errorn geführt ohne InsecureSkipVerify
234 | if tlsConfig == nil {
235 | tlsConfig = new(tls.Config)
236 | }
237 | */
238 | tlsConfig := &tls.Config{
239 | InsecureSkipVerify: true,
240 | }
241 | connS, err = tls.Dial("tcp", address, tlsConfig)
242 |
243 | if err != nil {
244 | msg = fmt.Sprintf("%s: tls.Dial: %s", errorString, err.Error())
245 | Print(msg+"\n", Red)
246 | result.HasError = true
247 | result.ErrorMessages = append(result.ErrorMessages, msg)
248 | return
249 | }
250 | } else {
251 | dialer := net.Dialer{Timeout: time.Duration(Config.TimeOut) * time.Second}
252 | conn, err = dialer.Dial("tcp", address)
253 |
254 | if err != nil {
255 | msg = fmt.Sprintf("%s: dialerP.Dial: %s", errorString, err.Error())
256 | Print(msg+"\n", Red)
257 | result.HasError = true
258 | result.ErrorMessages = append(result.ErrorMessages, msg)
259 | return
260 | }
261 | }
262 |
263 | err = nil
264 | if proxyUsed {
265 | defer conn.Close()
266 |
267 | fmt.Fprint(conn, req)
268 | conn.SetReadDeadline(time.Now().Add(time.Duration(Config.TimeOut) * time.Second))
269 | resp, err = bufio.NewReader(conn).ReadString('\n')
270 | } else if httpsUsed {
271 | defer connS.Close()
272 |
273 | fmt.Fprint(connS, req)
274 | connS.SetReadDeadline(time.Now().Add(time.Duration(Config.TimeOut) * time.Second))
275 | resp, err = bufio.NewReader(connS).ReadString('\n')
276 | } else {
277 | defer conn.Close()
278 |
279 | fmt.Fprint(conn, req)
280 | conn.SetReadDeadline(time.Now().Add(time.Duration(Config.TimeOut) * time.Second))
281 | resp, err = bufio.NewReader(conn).ReadString('\n')
282 | }
283 |
284 | if err != nil {
285 | msg = fmt.Sprintf("%s: bufio.NewReader.ReadString: %s", errorString, err.Error())
286 | Print(msg+"\n", Yellow)
287 |
288 | // Time out error is same for TLS and Conn. Both use net.Error.Timeout
289 | nerr, _ := err.(net.Error)
290 | if nerr != nil && nerr.Timeout() {
291 | timeOutCount++
292 | msg = fmt.Sprintf("(%d/3) timeouts to confirm Request Smuggling\n", i+1)
293 | Print(msg, Yellow)
294 | //TODO: Wenn timeout, dann noch ein normales request senden, welches nicht auch timeouten darf?
295 | } else {
296 | msg = "Aborting test because of: " + err.Error() + "\n"
297 | Print(msg, Yellow)
298 | result.HasError = true
299 | result.ErrorMessages = append(result.ErrorMessages, msg)
300 | return
301 | }
302 | } else {
303 | // When there isn't a timout this means that the Request Smuggling technique wasn't successful!
304 | // TODO: Print entfernen!
305 | if strings.Contains(resp, "500") {
306 | reason := "Server returned 500 Internal Server Error. It *may* be vulnerable to this Request Smuggling technique."
307 | fillRequest(result, reason, req, Config.Website.Url.String())
308 | msg = "Response:" + resp + reason + "\n"
309 | Print(msg, Green)
310 | } else {
311 | msg = "Response:" + resp + "Request didn't time out and therefore *likely* isn't vulnerable to this Request Smuggling technique.\n"
312 | PrintVerbose(msg, NoColor, 2)
313 | }
314 | return
315 | }
316 | }
317 |
318 | if timeOutCount == 3 {
319 | msg := "The request timed out 3 times in a row. It *may* be vulnerable to this Request Smuggling technique."
320 | fillRequest(result, msg, req, Config.Website.Url.String())
321 | Print(msg+"\n", Green)
322 | }
323 | }
324 |
325 | func fillRequest(result *reportResult, reason string, req string, reqURL string) {
326 | result.Vulnerable = true
327 | var repCheck reportCheck
328 | repCheck.Reason = reason
329 | repCheck.Request.Request = req
330 | repCheck.Request.Response = "n/a"
331 | repCheck.URL = reqURL
332 | result.Checks = append(result.Checks, repCheck)
333 | }
334 |
--------------------------------------------------------------------------------
/pkg/requests.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "net/http"
7 | "slices"
8 | "strconv"
9 | "strings"
10 | "sync"
11 | "time"
12 |
13 | "github.com/valyala/fasthttp"
14 | "github.com/xplorfin/fasthttp2curl"
15 | )
16 |
17 | const (
18 | RESP_SPLIT_HEADER = "Web_Cache"
19 | RESP_SPLIT_VALUE = "Vulnerability_Scanner"
20 | NO_DUPE_HEADER = 0
21 | DUPE_HEADER_BEFORE = 1
22 | DUPE_HEADER_AFTER = 2
23 | )
24 |
25 | type requestParams struct {
26 | repResult *reportResult
27 | headers []string
28 | values []string
29 | parameters []string
30 | technique string
31 | name string
32 | identifier string
33 | poison string
34 | ogParam string
35 | url string
36 | cb string
37 | success string
38 | bodyString string
39 | prependCB bool
40 | forcePost bool
41 | duplicateHeaders int
42 | newCookie map[string]string
43 | m *sync.Mutex
44 | }
45 |
46 | func getRespSplit() string {
47 | return "\\r\\n" + RESP_SPLIT_HEADER + ": " + RESP_SPLIT_VALUE
48 | }
49 |
50 | func getHeaderReflections(header http.Header, headersWithPoison []string) []string {
51 | var parts []string
52 | for _, name := range headersWithPoison {
53 | if val, ok := header[name]; ok {
54 | // strings.Join if a header has multiple values
55 | parts = append(parts, fmt.Sprintf("%s: %s", name, strings.Join(val, ",")))
56 | }
57 | }
58 | return parts
59 | }
60 |
61 | func checkPoisoningIndicators(repResult *reportResult, repCheck reportCheck, success string, body string, poison string, statusCode1 int, statusCode2 int, sameBodyLength bool, header http.Header, recursive bool) []string {
62 | headersWithPoison := []string{}
63 | // Response splitting check
64 | if strings.Contains(repCheck.Identifier, "response splitting") {
65 | for x := range header {
66 | if x == RESP_SPLIT_HEADER && header.Get(x) == RESP_SPLIT_VALUE {
67 | repCheck.Reason = "HTTP Response Splitting"
68 | break
69 | }
70 | }
71 | if repCheck.Reason == "" {
72 | return headersWithPoison // no response splitting header found, return empty slice
73 | }
74 | // Other checks
75 | } else {
76 |
77 | if strings.Contains(Config.ReasonTypes, "header") && header != nil && poison != "" && poison != "http" && poison != "https" && poison != "nothttps" && poison != "1" { // dont check for reflection of http/https/nothttps (used by forwarded headers), 1 (used by DOS) or empty poison
78 | for x := range header {
79 | if x == RESP_SPLIT_HEADER && header.Get(x) == RESP_SPLIT_VALUE {
80 | repCheck.Reason = "HTTP Response Splitting"
81 | }
82 | if strings.Contains(header.Get(x), poison) {
83 | headersWithPoison = append(headersWithPoison, x)
84 | }
85 | }
86 | }
87 |
88 | if repCheck.Reason == "" {
89 | // check for reflection in body
90 | if strings.Contains(Config.ReasonTypes, "body") && poison != "" && poison != "http" && poison != "https" && poison != "nothttps" && poison != "1" && strings.Contains(body, poison) { // dont check for reflection of http/https/nothttps (used by forwarded headers), 1 (used by DOS) or empty poison
91 | if len(headersWithPoison) > 0 {
92 | repCheck.Reason = fmt.Sprintf("Reflection Body and Header: Response Body contained poison value %s %d times and Response Header(s) %s contained poison value %s", poison, strings.Count(body, poison), strings.Join(headersWithPoison, ", "), poison)
93 | } else {
94 | repCheck.Reason = fmt.Sprintf("Reflection Body: Response Body contained poison value %s %d times", poison, strings.Count(body, poison))
95 | }
96 | repCheck.Reflections = findOccurrencesWithContext(body, poison, 25)
97 | repCheck.Reflections = append(repCheck.Reflections, getHeaderReflections(header, headersWithPoison)...)
98 | // check for reflection in headers
99 | } else if len(headersWithPoison) > 0 {
100 | repCheck.Reason = fmt.Sprintf("Reflection Header: Response Header(s) %s contained poison value %s", strings.Join(headersWithPoison, ", "), poison)
101 | repCheck.Reflections = getHeaderReflections(header, headersWithPoison)
102 | // check for different status code
103 | } else if strings.Contains(Config.ReasonTypes, "status") && statusCode1 >= 0 && statusCode1 != Config.Website.StatusCode && statusCode1 == statusCode2 {
104 | // check if status code should be ignored
105 | for _, status := range Config.IgnoreStatus {
106 | if statusCode1 == status || Config.Website.StatusCode == status {
107 | PrintVerbose("Skipped Status Code "+strconv.Itoa(status)+"\n", Cyan, 1) // TODO is it necessary to check if default status code changed?
108 | return headersWithPoison
109 | }
110 | }
111 |
112 | if !recursive {
113 | var tmpWebsite WebsiteStruct
114 | var err error
115 |
116 | // try up to 3 times
117 | count := 3
118 | for i := range count {
119 | Print(fmt.Sprintln("Status Code", statusCode1, "differed from the default", Config.Website.StatusCode, ", sending verification request", i+1, "from up to 3"), Yellow)
120 | tmpWebsite, err = GetWebsite(Config.Website.Url.String(), true, true)
121 | if err == nil {
122 | Print(fmt.Sprintln("The verification request returned the Status Code", tmpWebsite.StatusCode), Yellow)
123 | break
124 | }
125 | }
126 | if err != nil {
127 | repResult.HasError = true
128 | msg := fmt.Sprintf("%s: couldn't verify if status code %d is the new default status code, because the verification encountered the following error %d times: %s", repCheck.URL, statusCode1, count, err.Error())
129 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
130 | } else {
131 | Config.Website = tmpWebsite
132 | }
133 | return checkPoisoningIndicators(repResult, repCheck, success, body, poison, statusCode1, statusCode2, sameBodyLength, header, true)
134 | } else {
135 | repCheck.Reason = fmt.Sprintf("Changed Status Code: Status Code %d differed from %d", statusCode1, Config.Website.StatusCode)
136 | }
137 | // check for different body length
138 | } else if strings.Contains(Config.ReasonTypes, "length") && Config.CLDiff != 0 && success != "" && sameBodyLength && len(body) > 0 && compareLengths(len(body), len(Config.Website.Body), Config.CLDiff) {
139 | if !recursive {
140 | var tmpWebsite WebsiteStruct
141 | var err error
142 |
143 | // try up to 3 times
144 | count := 3
145 | for range count {
146 | tmpWebsite, err = GetWebsite(Config.Website.Url.String(), true, true)
147 | if err == nil {
148 | break
149 | }
150 | }
151 | if err != nil {
152 | repResult.HasError = true
153 | msg := fmt.Sprintf("%s: couldn't verify if body length %d is the new default body length, because the verification request encountered the following error %d times: %s", repCheck.URL, statusCode1, count, err.Error())
154 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
155 | } else {
156 | Config.Website = tmpWebsite
157 | }
158 | return checkPoisoningIndicators(repResult, repCheck, success, body, poison, statusCode1, statusCode2, sameBodyLength, header, true)
159 | } else {
160 | repCheck.Reason = fmt.Sprintf("Changed Content Length: Length %d differed more than %d bytes from normal length %d", len(body), Config.CLDiff, len(Config.Website.Body))
161 | }
162 | } else {
163 | return headersWithPoison
164 | }
165 | }
166 | }
167 |
168 | PrintNewLine()
169 | Print(success, Green)
170 | msg := "URL: " + repCheck.URL + "\n"
171 | Print(msg, Green)
172 | msg = "Reason: " + repCheck.Reason + "\n"
173 | Print(msg, Green)
174 | if len(repCheck.Reflections) > 0 {
175 | msg = "Reflections: " + strings.Join(repCheck.Reflections, " ... ") + "\n"
176 | Print(msg, Green)
177 | }
178 | msg = "Curl 1st Request: " + repCheck.Request.CurlCommand + "\n"
179 | Print(msg, Green)
180 | msg = "Curl 2nd Request: " + repCheck.SecondRequest.CurlCommand + "\n\n"
181 | Print(msg, Green)
182 | repResult.Vulnerable = true
183 | repResult.Checks = append(repResult.Checks, repCheck)
184 | return headersWithPoison
185 | }
186 |
187 | func compareLengths(len1 int, len2 int, limit int) bool {
188 |
189 | var diff int
190 | if len1 >= len2 {
191 | diff = len1 - len2
192 | } else {
193 | diff = len2 - len1
194 | }
195 |
196 | return diff > limit
197 | }
198 |
199 | /* Check if the second response makes sense or the continuation shall be stopped. Stop if body, status code and headers are equal to the default response */
200 | func stopContinuation(body []byte, statusCode int, headers map[string][]string) bool {
201 | if string(body) != Config.Website.Body {
202 | return false
203 | } else if statusCode != Config.Website.StatusCode {
204 | return false
205 | } else if len(headers) != len(Config.Website.Headers) {
206 | return false
207 | }
208 | for k, v := range headers {
209 | v2 := Config.Website.Headers[k]
210 |
211 | if !slices.Equal(v, v2) {
212 | return false
213 | }
214 | }
215 | return true
216 | }
217 |
218 | func addParameters(urlStr *string, parameters []string) {
219 | for _, p := range parameters {
220 | if p == "" {
221 | continue
222 | }
223 | if !strings.Contains(*urlStr, "?") {
224 | *urlStr += "?"
225 | } else {
226 | *urlStr += Config.QuerySeparator
227 | }
228 | *urlStr += p
229 | }
230 | }
231 |
232 | func firstRequest(rp requestParams) (body []byte, respStatusCode int, repRequest reportRequest, respHeaders map[string][]string, err error) {
233 | req := fasthttp.AcquireRequest()
234 | resp := fasthttp.AcquireResponse()
235 | defer fasthttp.ReleaseRequest(req)
236 | defer fasthttp.ReleaseResponse(resp)
237 | req.Header.DisableNormalizing()
238 |
239 | var msg string
240 |
241 | if rp.headers == nil {
242 | rp.headers = []string{""}
243 | }
244 | if rp.values == nil {
245 | rp.values = []string{""}
246 | }
247 | if rp.parameters == nil {
248 | rp.parameters = []string{""}
249 | }
250 |
251 | if rp.values[0] == "2ndrequest" {
252 | rp.identifier = fmt.Sprintf("2nd request of %s", rp.identifier)
253 | } else {
254 | rp.identifier = fmt.Sprintf("1st request of %s", rp.identifier)
255 | }
256 |
257 | // check if headers and values have the same length
258 | if len(rp.headers) != len(rp.values) && rp.values[0] != "2ndrequest" {
259 | msg = fmt.Sprintf("%s: len(header) %s %d != len(value) %s %d\n", rp.identifier, rp.headers, len(rp.headers), rp.values, len(rp.values))
260 | Print(msg, Red)
261 | return body, -1, repRequest, nil, errors.New(msg)
262 | }
263 |
264 | addParameters(&rp.url, rp.parameters)
265 |
266 | if !rp.forcePost && Config.Website.Cache.CBisHTTPMethod && rp.values[0] != "2ndrequest" {
267 | req.Header.SetMethod(Config.Website.Cache.CBName)
268 | } else if Config.DoPost || rp.forcePost {
269 | if rp.bodyString == "" {
270 | rp.bodyString = Config.Body
271 | }
272 | req.Header.SetMethod("POST")
273 | req.SetBodyString(rp.bodyString)
274 | } else {
275 | req.Header.SetMethod("GET")
276 | if rp.bodyString != "" {
277 | req.SetBodyString(rp.bodyString)
278 | }
279 | }
280 | req.SetRequestURI(rp.url)
281 |
282 | setRequest(req, Config.DoPost, rp.cb, rp.newCookie, rp.prependCB)
283 | repRequest.Request = req.String()
284 |
285 | for i := range rp.headers {
286 | if rp.headers[i] == "" {
287 | continue
288 | }
289 | if rp.values[0] == "2ndrequest" {
290 | msg = rp.identifier + "2nd request doesnt allow headers to be set\n"
291 | Print(msg, Red)
292 | break
293 | }
294 | if strings.EqualFold(rp.headers[i], "Host") {
295 | switch rp.duplicateHeaders {
296 | case NO_DUPE_HEADER:
297 | msg := fmt.Sprintf("Overwriting Host:%s with Host:%s\n", req.Host(), rp.values[i])
298 | PrintVerbose(msg, NoColor, 2)
299 | req.Header.SetHost(rp.values[i])
300 | case DUPE_HEADER_BEFORE:
301 | req.Header.SetProtocol("HTTP/1.1\r\n" + rp.headers[i] + ": " + rp.values[i])
302 | case DUPE_HEADER_AFTER:
303 | req.Header.SetHost(string(req.Host()) + "\r\n" + rp.headers[i] + ": " + rp.values[i])
304 | }
305 | } else if rp.headers[i] != "" {
306 | if h := req.Header.Peek(rp.headers[i]); h != nil {
307 | if rp.duplicateHeaders != NO_DUPE_HEADER { // TODO differentiate between before and after
308 | msg := fmt.Sprintf("Overwriting %s:%s with %s:%s\n", rp.headers[i], h, rp.headers[i], rp.values[i])
309 | PrintVerbose(msg, NoColor, 2)
310 | req.Header.Set(rp.headers[i], rp.values[i])
311 | } else {
312 | req.Header.Add(rp.headers[i], rp.values[i])
313 | }
314 | } else {
315 | req.Header.Set(rp.headers[i], rp.values[i])
316 | }
317 | }
318 | }
319 | waitLimiter(rp.identifier)
320 |
321 | // Do request
322 | err = client.Do(req, resp)
323 | if err != nil {
324 | msg = fmt.Sprintf("%s: client.Do: %s\n", rp.identifier, err.Error())
325 | Print(msg, Red)
326 | return body, -1, repRequest, nil, errors.New(msg)
327 | }
328 | body = resp.Body()
329 | respHeaders = headerToMultiMap(&resp.Header)
330 |
331 | if resp.StatusCode() != Config.Website.StatusCode {
332 | msg = fmt.Sprintf("Unexpected Status Code %d for %s\n", resp.StatusCode(), rp.identifier)
333 | Print(msg, Yellow)
334 | }
335 |
336 | if stopContinuation(body, resp.StatusCode(), respHeaders) {
337 | msg := "stop"
338 | return body, resp.StatusCode(), repRequest, respHeaders, errors.New(msg)
339 | }
340 |
341 | // Add the request as curl command to the report
342 | command, err := fasthttp2curl.GetCurlCommandFastHttp(req)
343 | if err != nil {
344 | PrintVerbose("Error: fasthttp2curl: "+err.Error()+"\n", Yellow, 1)
345 | }
346 |
347 | repRequest.CurlCommand = command.String()
348 | PrintVerbose("Curl command: "+repRequest.CurlCommand+"\n", NoColor, 2)
349 |
350 | // Add response without body to report
351 | resp.SkipBody = true
352 | repRequest.Response = string(resp.String())
353 |
354 | return body, resp.StatusCode(), repRequest, respHeaders, nil
355 | }
356 |
357 | func secondRequest(rpFirst requestParams) ([]byte, int, reportRequest, map[string][]string, error) {
358 | var parameter []string
359 | if !strings.Contains(rpFirst.ogParam, NOOGPARAM) { // Only add original parameter if it existed
360 | parameter = append(parameter, rpFirst.ogParam)
361 | }
362 |
363 | rp := requestParams{
364 | parameters: parameter,
365 | values: []string{"2ndrequest"},
366 | identifier: rpFirst.identifier,
367 | url: rpFirst.url,
368 | cb: rpFirst.cb,
369 | }
370 |
371 | body, statusCode, repRequest, header, err := firstRequest(rp)
372 |
373 | return body, statusCode, repRequest, header, err
374 | }
375 |
376 | /* return values:first bool is needed for responsesplitting, second bool is only needed for ScanParameters */
377 | func issueRequests(rp requestParams) (respsplit []string, impact bool, unkeyed bool) {
378 | var repCheck reportCheck
379 | repCheck.Identifier = rp.identifier
380 | repCheck.URL = rp.url
381 |
382 | body1, statusCode1, repRequest, header1, err := firstRequest(rp)
383 | if err != nil {
384 | if err.Error() != "stop" {
385 | if rp.m != nil {
386 | rp.m.Lock()
387 | defer rp.m.Unlock()
388 | }
389 | rp.repResult.HasError = true
390 | rp.repResult.ErrorMessages = append(rp.repResult.ErrorMessages, err.Error())
391 | }
392 |
393 | return nil, false, false
394 | }
395 | repCheck.Request = repRequest
396 |
397 | impactful := firstRequestPoisoningIndicator(rp.identifier, body1, rp.poison, header1, Config.Website.Cache.CBName == rp.name, rp.cb, statusCode1)
398 |
399 | if Config.Website.Cache.NoCache || Config.Website.Cache.Indicator == "age" {
400 | time.Sleep(1 * time.Second) // wait a second to ensure that age header is not set to 0
401 | }
402 |
403 | body2, statusCode2, repRequest, respHeader, err := secondRequest(rp)
404 | if err != nil {
405 | if err.Error() != "stop" {
406 | if rp.m != nil {
407 | rp.m.Lock()
408 | defer rp.m.Unlock()
409 | }
410 | rp.repResult.HasError = true
411 | rp.repResult.ErrorMessages = append(rp.repResult.ErrorMessages, err.Error())
412 | }
413 | return nil, impactful, false
414 | }
415 | repCheck.SecondRequest = &repRequest
416 | sameBodyLength := len(body1) == len(body2)
417 |
418 | // Check for cache hit
419 | hit := false
420 | for _, v := range respHeader[Config.Website.Cache.Indicator] {
421 | indicValue := strings.TrimSpace(strings.ToLower(v))
422 | hit = hit || checkCacheHit(indicValue, Config.Website.Cache.Indicator)
423 | }
424 |
425 | // Lock here, to prevent false positives and too many GetWebsite requests
426 | if rp.m != nil {
427 | rp.m.Lock()
428 | defer rp.m.Unlock()
429 | }
430 | responseSplittingHeaders := checkPoisoningIndicators(rp.repResult, repCheck, rp.success, string(body2), rp.poison, statusCode1, statusCode2, sameBodyLength, respHeader, false)
431 |
432 | return responseSplittingHeaders, impactful, hit
433 | }
434 |
435 | func firstRequestPoisoningIndicator(identifier string, body []byte, poison string, header map[string][]string, identifierIsCB bool, cb string, statusCode int) bool {
436 | var reason string
437 | if poison != "" && poison != "http" && poison != "https" && poison != "nothttps" && poison != "1" { // dont check for reflection of http/https/nothttps (used by forwarded headers), 1 (used by DOS) or empty poison
438 | if strings.Contains(string(body), poison) || (identifierIsCB && strings.Contains(string(body), cb)) { //
439 | reason = "Response Body contained " + poison
440 | }
441 | var reflections []string
442 | for x := range header {
443 | for _, v := range header[x] {
444 | if strings.Contains(v, poison) || (identifierIsCB && strings.Contains(v, cb)) {
445 | reflections = append(reflections, x)
446 | }
447 | }
448 | }
449 | if len(reflections) > 0 {
450 | reason = "Response Header(s) " + strings.Join(reflections, ", ") + " contained " + poison
451 | }
452 | }
453 | if Config.Website.StatusCode != statusCode && reason == "" {
454 | reason = fmt.Sprintf("Status Code %d differed from %d", statusCode, Config.Website.StatusCode)
455 | }
456 | if Config.CLDiff != 0 && reason == "" && len(body) > 0 && compareLengths(len(body), len(Config.Website.Body), Config.CLDiff) {
457 | reason = fmt.Sprintf("Length %d differed more than %d bytes from normal length %d", len(body), Config.CLDiff, len(Config.Website.Body))
458 | }
459 |
460 | if reason != "" {
461 | msg := identifier + ": " + reason + "\n"
462 | Print(msg, Cyan)
463 | return true
464 | } else {
465 | return false
466 | }
467 |
468 | }
469 |
--------------------------------------------------------------------------------
/pkg/techniques.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "fmt"
5 | "io"
6 | "net/url"
7 | "slices"
8 | "strconv"
9 | "strings"
10 | "sync"
11 |
12 | "golang.org/x/net/html"
13 | )
14 |
15 | var impactfulQueries []string
16 | var unkeyedQueries []string
17 |
18 | const NOOGPARAM = "NoOGParameter"
19 |
20 | func init() {
21 | }
22 |
23 | /* Scan cookies for poisoning */
24 | func ScanCookies() reportResult {
25 | var repResult reportResult
26 | repResult.Technique = "Cookies"
27 | i := 0
28 | for k, v := range Config.Website.Cookies {
29 | poison := "p" + randInt()
30 | msg := fmt.Sprintf("Checking cookie %s (%d/%d)\n", k, i+1, len(Config.Website.Cookies))
31 | Print(msg, NoColor)
32 | i++
33 |
34 | rUrl := Config.Website.Url.String()
35 | cb := "cb" + randInt()
36 | success := fmt.Sprintf("Cookie %s was successfully poisoned! cachebuster %s: %s poison: %s\n", k, Config.Website.Cache.CBName, cb, poison)
37 | identifier := k + "=" + v
38 | msg = fmt.Sprintf("Overwriting %s=%s with %s=%s\n", k, v, k, poison)
39 | Print(msg, NoColor)
40 |
41 | newCookie := map[string]string{}
42 | newCookie["key"] = k
43 | newCookie["value"] = poison
44 |
45 | rp := requestParams{
46 | repResult: &repResult,
47 | headers: []string{""},
48 | values: []string{""},
49 | name: k,
50 | identifier: identifier,
51 | poison: poison,
52 | url: rUrl,
53 | cb: cb,
54 | success: success,
55 | bodyString: "",
56 | forcePost: false,
57 | m: nil,
58 | newCookie: newCookie,
59 | }
60 | responseSplittingHeaders, _, _ := issueRequests(rp)
61 |
62 | // check for response splitting, if poison was reflected in a header
63 | for _, responseSplittingHeader := range responseSplittingHeaders {
64 | msg := fmt.Sprintf("Checking cookie %s for Response Splitting, because it was reflected in the header %s\n", k, responseSplittingHeader)
65 | PrintVerbose(msg, Cyan, 1)
66 |
67 | rp.poison += getRespSplit()
68 | rp.url = rUrl
69 | rp.cb = "cb" + randInt()
70 | rp.success = fmt.Sprintf("Cookie %s successfully poisoned the header %s with Response Splitting! cachebuster %s: %s poison: %s\n", k, responseSplittingHeader, Config.Website.Cache.CBName, rp.cb, rp.poison)
71 | rp.identifier += " response splitting"
72 |
73 | msg = fmt.Sprintf("Overwriting %s=%s with %s=%s\n", k, v, k, rp.poison)
74 | Print(msg, NoColor)
75 |
76 | issueRequests(rp)
77 | }
78 | }
79 | return repResult
80 | }
81 |
82 | func ScanForwardingHeaders() reportResult {
83 | var repResult reportResult
84 | repResult.Technique = "Forward/Host Headers"
85 |
86 | // Host header
87 | header := "Host"
88 |
89 | portInt := 31337
90 | // Check if port is already contained in default response
91 | for searchBodyHeadersForString(strconv.Itoa(portInt), Config.Website.Body, Config.Website.Headers) {
92 | portInt++
93 | }
94 | port := strconv.Itoa(portInt)
95 |
96 | values := []string{Config.Website.Url.Host + ":" + port, Config.Website.Url.Host + ":@" + port, Config.Website.Url.Host + " " + port, Config.Website.Url.Host + ".p" + randInt(), "p" + randInt() + "." + Config.Website.Url.Host}
97 | for _, value := range values {
98 | PrintVerbose("Port-Number "+strconv.Itoa(portInt)+" was already present in websites response. Adding 1 to it.\n", NoColor, 2)
99 | ForwardHeadersTemplate(&repResult, []string{header}, []string{value}, header, value, NO_DUPE_HEADER)
100 | }
101 |
102 | // Duplicate Host header
103 | headers := []string{"Host", "hOsT"}
104 | dupes := []int{DUPE_HEADER_BEFORE, DUPE_HEADER_AFTER}
105 | for _, header := range headers {
106 | for _, dupe := range dupes {
107 | poison := "p" + randInt()
108 | ForwardHeadersTemplate(&repResult, []string{header}, []string{poison}, header, poison, dupe)
109 | }
110 | }
111 |
112 | // X-Forwarded Headers
113 | headers = []string{"X-Forwarded-Host", "X-Forwarded-Scheme"}
114 | poison := "p" + randInt()
115 | values = []string{poison, "nothttps"}
116 | identifier := "X-Forwarded-Host and X-Forwarded-Scheme"
117 | ForwardHeadersTemplate(&repResult, headers, values, identifier, poison, NO_DUPE_HEADER)
118 |
119 | values = []string{poison, "https"}
120 | ForwardHeadersTemplate(&repResult, headers, values, identifier, poison, NO_DUPE_HEADER)
121 |
122 | // Forwarded Header
123 | header = "Forwarded"
124 | poison = "p" + randInt()
125 | value := "host=" + poison
126 | ForwardHeadersTemplate(&repResult, []string{header}, []string{value}, header, poison, NO_DUPE_HEADER)
127 |
128 | // X-Forwarded-Port Header
129 | header = "X-Forwarded-Port"
130 | value = port
131 | ForwardHeadersTemplate(&repResult, []string{header}, []string{value}, header, value, NO_DUPE_HEADER)
132 |
133 | // X-Forwarded-Port Scheme
134 | header = "X-Forwarded-Scheme"
135 | values = []string{"http", "https"}
136 | for _, value := range values {
137 | ForwardHeadersTemplate(&repResult, []string{header}, []string{value}, header, value, NO_DUPE_HEADER)
138 | }
139 |
140 | // X-Original-URL Header (https://samcurry.net/abusing-http-path-normalization-and-cache-poisoning-to-steal-rocket-league-accounts)
141 | header = "X-Original-URL"
142 | poison = "p" + randInt()
143 | value = "http:\\\\" + poison + "/foo"
144 | ForwardHeadersTemplate(&repResult, []string{header}, []string{value}, header, poison, NO_DUPE_HEADER)
145 |
146 | return repResult
147 | }
148 |
149 | func ForwardHeadersTemplate(repResult *reportResult, headers []string, values []string, identifier string, poison string, duplicateHeaders int) {
150 | rUrl := Config.Website.Url.String()
151 | cb := "cb" + randInt()
152 | success := fmt.Sprintf("%s was successfully poisoned! cachebuster %s: %s poison: %s\n", headers, Config.Website.Cache.CBName, cb, values)
153 |
154 | rp := requestParams{
155 | repResult: repResult,
156 | headers: headers,
157 | values: values,
158 | identifier: identifier,
159 | poison: poison,
160 | url: rUrl,
161 | cb: cb,
162 | success: success,
163 | bodyString: "",
164 | forcePost: false,
165 | duplicateHeaders: duplicateHeaders,
166 | m: nil,
167 | }
168 | responseSplittingHeaders, _, _ := issueRequests(rp)
169 |
170 | // check for response splitting, if poison was reflected in a header
171 | for _, responseSplittingHeader := range responseSplittingHeaders {
172 | rp.values[0] += getRespSplit()
173 | msg := fmt.Sprintf("Checking header(s) %s with value(s) %s for Response Splitting, because it was reflected in the header %s\n", rp.headers, rp.values, responseSplittingHeader)
174 | PrintVerbose(msg, Cyan, 1)
175 |
176 | rp.poison += getRespSplit()
177 | rp.url = rUrl
178 | rp.cb = "cb" + randInt()
179 | rp.success = fmt.Sprintf("%s successfully poisoned the header %s with Response Splitting! cachebuster %s: %s poison: %s\n", headers, responseSplittingHeader, Config.Website.Cache.CBName, rp.cb, rp.values)
180 | rp.identifier += " response splitting"
181 |
182 | issueRequests(rp)
183 | }
184 | }
185 |
186 | func ScanHTTPRequestSmuggling(proxyURL *url.URL) reportResult {
187 | var repResult reportResult
188 | identifier := "HTTP Request Smuggling"
189 | repResult.Technique = identifier
190 |
191 | path := Config.Website.Url.Path
192 | if Config.Website.Cache.CBisParameter {
193 | path, _ = addCachebusterParameter(path, "", Config.Website.Cache.CBName, false)
194 | }
195 | if path == "" {
196 | path = "/"
197 | }
198 | headers := GenerateHeaderString()
199 |
200 | PrintVerbose("Trying CLTE Request Smuggling\n", NoColor, 1)
201 | req := clte(path, headers)
202 | httpRequestSmuggling(req, &repResult, proxyURL)
203 |
204 | if !repResult.Vulnerable {
205 | PrintVerbose("Trying TECL Request Smuggling\n", NoColor, 1)
206 | req = tecl(path, headers)
207 | httpRequestSmuggling(req, &repResult, proxyURL)
208 | }
209 |
210 | if !repResult.Vulnerable {
211 | PrintVerbose("Trying CLCL Request Smuggling\n", NoColor, 1)
212 | req = clcl(path, headers)
213 | httpRequestSmuggling(req, &repResult, proxyURL)
214 | }
215 |
216 | if !repResult.Vulnerable {
217 | PrintVerbose("Trying CLCL2 Request Smuggling\n", NoColor, 1)
218 | req = clcl2(path, headers)
219 | httpRequestSmuggling(req, &repResult, proxyURL)
220 | }
221 |
222 | return repResult
223 | }
224 |
225 | /* Scan headers for poisoning */
226 | func ScanHeaders(headerList []string) reportResult {
227 | var repResult reportResult
228 | repResult.Technique = "Headers"
229 | for _, header := range Config.Headers { // add custom headers to list
230 | headerVal := strings.Split(header, ":")[0]
231 | headerList = append(headerList, headerVal)
232 | }
233 |
234 | threads := Config.Threads
235 | if Config.Website.Cache.CBisHTTPMethod {
236 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
237 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
238 | }
239 | sem := make(chan int, threads)
240 | var wg sync.WaitGroup
241 | wg.Add(len(headerList))
242 | var m sync.Mutex
243 |
244 | msg := fmt.Sprintf("Testing %d headers\n", len(headerList))
245 | PrintVerbose(msg, NoColor, 1)
246 |
247 | for i, header := range headerList {
248 | header = strings.Trim(header, "\r")
249 | if header == "" {
250 | msg := fmt.Sprintf("Skipping empty header (%d/%d)\n", i+1, len(headerList))
251 | PrintVerbose(msg, NoColor, 2)
252 |
253 | wg.Done()
254 | continue
255 | }
256 |
257 | go func(i int, header string) {
258 | defer wg.Done()
259 | sem <- 1
260 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
261 |
262 | msg := fmt.Sprintf("Testing now (%d/%d) %s\n", i+1, len(headerList), header)
263 | PrintVerbose(msg, NoColor, 2)
264 | rUrl := Config.Website.Url.String()
265 | poison := "p" + randInt()
266 | cb := "cb" + randInt()
267 | success := fmt.Sprintf("Header %s was successfully poisoned! cachebuster %s: %s poison: %s\n", header, Config.Website.Cache.CBName, cb, poison)
268 | identifier := fmt.Sprintf("header %s", header)
269 |
270 | rp := requestParams{
271 | repResult: &repResult,
272 | headers: []string{header},
273 | values: []string{poison},
274 | name: header,
275 | identifier: identifier,
276 | poison: poison,
277 | url: rUrl,
278 | cb: cb,
279 | success: success,
280 | bodyString: "",
281 | forcePost: false,
282 | m: &m,
283 | }
284 | responseSplittingHeaders, _, _ := issueRequests(rp)
285 |
286 | // check for response splitting, if poison was reflected in a header
287 | for _, responseSplittingHeader := range responseSplittingHeaders {
288 | msg := fmt.Sprintf("Testing now (%d/%d) %s for Response Splitting, because it was reflected in the header %s\n", i+1, len(headerList), header, responseSplittingHeader)
289 | PrintVerbose(msg, Cyan, 1)
290 |
291 | rp.url = rUrl
292 | rp.cb = "cb" + randInt()
293 | rp.poison += getRespSplit()
294 | rp.success = fmt.Sprintf("Header %s successfully poisoned the header %s with Response Splitting! cachebuster %s: %s poison: %s\n", header, responseSplittingHeader, Config.Website.Cache.CBName, rp.cb, rp.poison)
295 | rp.identifier += " response splitting"
296 |
297 | issueRequests(rp)
298 | }
299 | }(i, header)
300 |
301 | }
302 | wg.Wait()
303 |
304 | return repResult
305 | }
306 |
307 | /* Scan query parameters for poisoning */
308 | func ScanParameters(parameterList []string) reportResult {
309 | var repResult reportResult
310 | repResult.Technique = "Parameters"
311 |
312 | parametersToTest := parameterList
313 | for k := range Config.Website.Queries {
314 | if !slices.Contains(parameterList, k) { // only add parameters which are not already in the list
315 | parametersToTest = append(parametersToTest, k) // add custom parameters to list
316 | }
317 | }
318 |
319 | threads := Config.Threads
320 | if Config.Website.Cache.CBisHTTPMethod {
321 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
322 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
323 | }
324 | sem := make(chan int, threads)
325 | var wg sync.WaitGroup
326 | wg.Add(len(parametersToTest))
327 | var m sync.Mutex
328 |
329 | impactfulQueries = []string{}
330 | unkeyedQueries = []string{}
331 |
332 | msg := fmt.Sprintf("Testing %d parameters\n", len(parametersToTest))
333 | PrintVerbose(msg, NoColor, 1)
334 |
335 | for i, parameter := range parametersToTest {
336 | if parameter == "" {
337 | msg := fmt.Sprintf("Skipping empty query (%d/%d) %s\n", i+1, len(parametersToTest), parameter)
338 | PrintVerbose(msg, NoColor, 2)
339 | wg.Done()
340 | continue
341 | }
342 |
343 | go func(i int, parameter string) {
344 | defer wg.Done()
345 | sem <- 1
346 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
347 |
348 | parameter = strings.Trim(parameter, "\r")
349 | msg := fmt.Sprintf("Testing now Parameter (%d/%d) %s\n", i+1, len(parametersToTest), parameter)
350 | PrintVerbose(msg, NoColor, 2)
351 |
352 | rUrl := Config.Website.Url.String()
353 | poison := "p" + randInt()
354 | cb := "cb" + randInt()
355 | success := fmt.Sprintf("Query Parameter %s was successfully poisoned! cachebuster %s: %s poison: %s\n", parameter, Config.Website.Cache.CBName, cb, poison)
356 | identifier := fmt.Sprintf("parameter %s", parameter)
357 |
358 | ogValue := NOOGPARAM
359 | if strings.Contains(strings.ToLower(rUrl), "?"+parameter+"=") || strings.Contains(strings.ToLower(rUrl), "&"+parameter+"=") { // remove param if it already existed, so that it will be set only one time and that being with the poison value
360 | rUrl, ogValue, _ = removeParam(rUrl, parameter)
361 | }
362 |
363 | rp := requestParams{
364 | repResult: &repResult,
365 | headers: []string{""},
366 | values: []string{poison},
367 | parameters: []string{parameter + "=" + poison},
368 | name: parameter,
369 | identifier: identifier,
370 | poison: poison,
371 | ogParam: parameter + "=" + ogValue,
372 | url: rUrl,
373 | cb: cb,
374 | technique: "parameter",
375 | success: success,
376 | bodyString: "",
377 | forcePost: false,
378 | m: &m,
379 | }
380 | responseSplittingHeaders, impactful, unkeyed := issueRequests(rp)
381 |
382 | if impactful && !unkeyed {
383 | impactfulQueries = append(impactfulQueries, parameter)
384 | } else if unkeyed {
385 | unkeyedQueries = append(unkeyedQueries, parameter)
386 | }
387 |
388 | // check for response splitting, if poison was reflected in a header
389 | for _, responseSplittingHeader := range responseSplittingHeaders {
390 | msg := fmt.Sprintf("Testing now Parameter (%d/%d) %s for Response Splitting, because it was reflected in the header %s\n", i+1, len(parametersToTest), parameter, responseSplittingHeader)
391 | PrintVerbose(msg, Cyan, 1)
392 |
393 | rp.poison += getRespSplit()
394 | rp.parameters = []string{parameter + "=" + rp.poison}
395 | rp.url = rUrl
396 | rp.cb = "cb" + randInt()
397 | rp.success = fmt.Sprintf("Query Parameter %s successfully poisoned the header %s with Response Splitting! cachebuster %s: %s poison: %s\n", parameter, responseSplittingHeader, Config.Website.Cache.CBName, rp.cb, rp.poison)
398 | rp.identifier += " response splitting"
399 | issueRequests(rp)
400 | }
401 | }(i, parameter)
402 |
403 | }
404 | wg.Wait()
405 |
406 | return repResult
407 | }
408 |
409 | /* Check for fat GET */
410 | func ScanFatGET() reportResult {
411 | var repResult reportResult
412 | repResult.Technique = "Fat GET"
413 |
414 | if len(impactfulQueries) == 0 {
415 | msg := "No impactful query parameters were found beforehand. Run the query parameter scan (maybe with a different wordlist)."
416 | Print(msg+"\n", Yellow)
417 | repResult.HasError = true
418 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
419 | return repResult
420 | } else {
421 | msg := fmt.Sprintf("The following parameters were found to be impactful and will be tested for parameter cloaking: %s\n", impactfulQueries)
422 | Print(msg, Cyan)
423 | }
424 |
425 | threads := Config.Threads
426 | if Config.Website.Cache.CBisHTTPMethod {
427 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
428 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
429 | }
430 | sem := make(chan int, threads)
431 | var wg sync.WaitGroup
432 | wg.Add(len(impactfulQueries))
433 | var m sync.Mutex
434 |
435 | headers := []string{"", "", "X-HTTP-Method-Override", "X-HTTP-Method", "X-Method-Override"}
436 | values := []string{"", "", "POST", "POST", "POST"}
437 |
438 | for method := 0; method < 5; method++ {
439 | var identifier string
440 | forcePost := false
441 | if method == 0 {
442 | identifier = "simple Fat GET"
443 | } else if method == 1 {
444 | identifier = "POST Fat GET"
445 | forcePost = true
446 | } else {
447 | identifier = fmt.Sprintf("%s Fat GET", headers[method])
448 | }
449 | msg := "Testing now " + identifier + "\n"
450 | Print(msg, NoColor)
451 |
452 | for i, s := range impactfulQueries {
453 | // Parameter Limit
454 | if i >= 500 {
455 | if i == 500 {
456 | Print("Parameter Limit at 500\n", Red)
457 | }
458 | wg.Done()
459 | continue
460 | }
461 | poison := "p" + randInt()
462 |
463 | go func(i int, s string, poison string) {
464 | defer wg.Done()
465 | sem <- 1
466 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
467 |
468 | msg := fmt.Sprintf("(%d/%d) %s\n", i+1, len(impactfulQueries), s)
469 | PrintVerbose(msg, NoColor, 2)
470 | rUrl := Config.Website.Url.String()
471 | cb := "cb" + randInt()
472 | bodyString := s + "=" + poison
473 | success := fmt.Sprintf("Query Parameter %s was successfully poisoned via %s! cachebuster %s: %s poison:%s\n", s, identifier, Config.Website.Cache.CBName, cb, poison)
474 |
475 | rp := requestParams{
476 | repResult: &repResult,
477 | headers: []string{headers[method]},
478 | values: []string{values[method]},
479 | identifier: identifier,
480 | poison: poison,
481 | url: rUrl,
482 | cb: cb,
483 | success: success,
484 | bodyString: bodyString,
485 | forcePost: forcePost,
486 | m: &m,
487 | }
488 | responseSplittingHeaders, _, _ := issueRequests(rp)
489 |
490 | // check for response splitting, if poison was reflected in a header
491 | for _, responseSplittingHeader := range responseSplittingHeaders {
492 | msg := fmt.Sprintf("Testing now (%d/%d) %s for Response Splitting, because it was reflected in the header %s\n", i+1, len(impactfulQueries), s, responseSplittingHeader)
493 | PrintVerbose(msg, Cyan, 1)
494 |
495 | rp.url = rUrl
496 | rp.cb = "cb" + randInt()
497 | rp.poison += getRespSplit()
498 | rp.bodyString += getRespSplit()
499 | rp.identifier += " response splitting"
500 | rp.success = fmt.Sprintf("Query Parameter %s successfully poisoned the header %s via %s with Response Splitting! cachebuster %s: %s poison:%s\n", s, responseSplittingHeader, identifier, Config.Website.Cache.CBName, rp.cb, rp.poison)
501 |
502 | issueRequests(rp)
503 | }
504 | }(i, s, poison)
505 | }
506 | wg.Wait()
507 | wg.Add(len(impactfulQueries))
508 | }
509 |
510 | return repResult
511 | }
512 |
513 | /* Check for Parameter Cloaking */
514 | func ScanParameterCloaking() reportResult {
515 | var repResult reportResult
516 | repResult.Technique = "Parameter Cloaking"
517 |
518 | if len(impactfulQueries) == 0 {
519 | msg := "No impactful query parameters were found beforehand. Run the query parameter scan (maybe with a different wordlist)."
520 | Print(msg+"\n", Yellow)
521 | repResult.HasError = true
522 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
523 | return repResult
524 | } else {
525 | msg := fmt.Sprintf("The following parameters were found to be impactful and will be tested for parameter cloaking: %s\n", impactfulQueries)
526 | Print(msg, Cyan)
527 | }
528 |
529 | utm_parameters := []string{"utm_source", "utm_medium", "utm_campaign", "utm_content", "utm_term", "gad_campaignid"}
530 | parameters_to_test := utm_parameters
531 | for _, k := range unkeyedQueries {
532 | if !slices.Contains(parameters_to_test, k) { // only add parameters which are not already in the list
533 | parameters_to_test = append(parameters_to_test, k)
534 | }
535 | }
536 |
537 | /***********TODO Check if urlWithCb already contains utm parameter.
538 | Check if ? or querySeperator is needed
539 | ****************/
540 |
541 | // The first request is made so a cache miss is forced and the following responses will only
542 | //have a cache hit, if they are unkeyed
543 | rUrl := Config.Website.Url.String()
544 | cb := "cb" + randInt()
545 | rp := requestParams{
546 | identifier: "first request %s",
547 | url: rUrl,
548 | cb: cb,
549 | }
550 | firstRequest(rp)
551 |
552 | threads := Config.Threads
553 | if Config.Website.Cache.CBisHTTPMethod {
554 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
555 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
556 | }
557 | sem := make(chan int, threads)
558 | var wg sync.WaitGroup
559 | var m sync.Mutex
560 |
561 | unkeyed_parameters := []string{}
562 | cache := Config.Website.Cache
563 | if cache.Indicator == "" || cache.TimeIndicator {
564 | msg := "hit/miss isn't verbose. Can't check which parameters unkeyed, so all utm_parameters and query parameters will be used\n"
565 | Print(msg, Yellow)
566 | unkeyed_parameters = parameters_to_test
567 |
568 | } else {
569 | //Test which parameters are unkeyed
570 | wg.Add(len(parameters_to_test))
571 |
572 | for i, s := range parameters_to_test {
573 | go func(i int, s string) {
574 | defer wg.Done()
575 | sem <- 1
576 | defer func() { <-sem }() // Prevent Deadlocks
577 |
578 | msg := fmt.Sprintf("Testing now for unkeyed parameters (%d/%d) %s\n", i+1, len(parameters_to_test), s)
579 | PrintVerbose(msg, NoColor, 2)
580 |
581 | identifier := fmt.Sprintf("unkeyed parameter %s", s)
582 | //TODO: TimeOut behandeln!!!
583 | rp := requestParams{
584 | identifier: identifier,
585 | url: rUrl,
586 | cb: cb,
587 | parameters: []string{s + "=foobar"}, // parameter with nonsense value
588 | }
589 | _, _, _, respHeader, err := firstRequest(rp)
590 | if err != nil && err.Error() != "stop" { // stop is expected for successful unkeyed parameters,because the first request should be the same as the default/cached request!
591 | m.Lock()
592 | repResult.HasError = true
593 | repResult.ErrorMessages = append(repResult.ErrorMessages, err.Error())
594 | m.Unlock()
595 | return
596 | }
597 | hit := false
598 | for _, v := range respHeader[cache.Indicator] {
599 | indicValue := strings.TrimSpace(strings.ToLower(v))
600 | hit = hit || checkCacheHit(indicValue, cache.Indicator)
601 | } // TODO add check for timebased cache indicator! + remove then cache.TimeIndicator from the if above
602 | if hit {
603 | m.Lock()
604 | unkeyed_parameters = append(unkeyed_parameters, s)
605 | m.Unlock()
606 | }
607 | }(i, s)
608 | }
609 | wg.Wait()
610 | }
611 |
612 | if len(unkeyed_parameters) == 0 {
613 | msg := "No unkeyed parameters could be found. Parameter Cloaking is not possible.\n"
614 | Print(msg, Yellow)
615 | } else {
616 | msg := fmt.Sprintf("The following parameters were found to be unkeyed and will be used for parameter cloaking: %s\n", unkeyed_parameters)
617 | Print(msg, Cyan)
618 | }
619 |
620 | cloak := ";"
621 | if Config.QuerySeparator == ";" {
622 | cloak = "&"
623 | }
624 |
625 | if slices.Contains(impactfulQueries, cache.CBName) { // If the cachebuster is impactful, it shall be tested two times. One time appended and one time prepended.
626 | impactfulQueries = append(impactfulQueries, cache.CBName)
627 | }
628 |
629 | var cbNameCount int
630 | var cbNameCountMutex sync.Mutex
631 |
632 | for iu, u := range unkeyed_parameters {
633 |
634 | //its sufficient to only test one unkeyed_parameter as it should behave the same way as the others. However, in the case of no cache indicator, test all parameters
635 | if iu > 0 && cache.Indicator != "" {
636 | break
637 | }
638 | wg.Add(len(impactfulQueries))
639 |
640 | for is, s := range impactfulQueries {
641 | // Parameter Limit
642 | if is >= 500 {
643 | if is == 500 {
644 | Print("Parameter Limit at 500\n", Red)
645 | }
646 | wg.Done()
647 | continue
648 | }
649 |
650 | poison := "p" + randInt()
651 |
652 | go func(iu int, u string, is int, s string, poison string) {
653 | defer wg.Done()
654 | sem <- 1
655 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
656 |
657 | msg := fmt.Sprintf("Testing now Parameter Cloaking (%d/%d) %s%s%s\n", iu+is+1, len(impactfulQueries)*len(unkeyed_parameters), u, cloak, s)
658 | PrintVerbose(msg, NoColor, 2)
659 | cb := "cb" + randInt()
660 | success := fmt.Sprintf("Query Parameter %s was successfully poisoned via Parameter Cloaking using %s! cachebuster %s:%s poison:%s\n", s, u, Config.Website.Cache.CBName, cb, poison)
661 | identifier := fmt.Sprintf("parameter cloaking %s %s", u, s)
662 |
663 | prependCB := false
664 | if s == cache.CBName { // shall be true for the second test of the cachebuster and false in all other cases
665 | cbNameCountMutex.Lock()
666 | cbNameCount++
667 | if cbNameCount == 2 {
668 | prependCB = true
669 | }
670 | cbNameCountMutex.Unlock()
671 | }
672 |
673 | rp := requestParams{
674 | repResult: &repResult,
675 | headers: []string{""},
676 | values: []string{poison},
677 | parameters: []string{u + "=foobar" + cloak + s + "=" + poison},
678 | identifier: identifier,
679 | poison: poison,
680 | url: rUrl,
681 | cb: cb,
682 | prependCB: prependCB,
683 | success: success,
684 | bodyString: "",
685 | forcePost: false,
686 | m: &m,
687 | newCookie: nil,
688 | }
689 | responseSplittingHeaders, _, _ := issueRequests(rp)
690 |
691 | // check for response splitting, if poison was reflected in a header
692 | for _, responseSplittingHeader := range responseSplittingHeaders {
693 | msg := fmt.Sprintf("Testing now Parameter Cloaking (%d/%d) %s%s%s for Response Splitting, because it was reflected in the header %s\n", iu+is+1, len(impactfulQueries)*len(unkeyed_parameters), u, cloak, s, responseSplittingHeader)
694 | PrintVerbose(msg, Cyan, 1)
695 |
696 | rp.url = rUrl
697 | rp.cb = "cb" + randInt()
698 | rp.poison += getRespSplit()
699 | rp.parameters = []string{u + "=foobar" + cloak + s + "=" + rp.poison}
700 | rp.success = fmt.Sprintf("Query Parameter %s successfully poisoned the header %s with Response Splitting using %s with Parameter Cloaking! cachebuster %s:%s poison:%s\n", s, responseSplittingHeader, u, Config.Website.Cache.CBName, rp.cb, rp.poison)
701 | rp.identifier += " response splitting"
702 |
703 | issueRequests(rp)
704 | }
705 | }(iu, u, is, s, poison)
706 | }
707 | wg.Wait()
708 | }
709 |
710 | return repResult
711 | }
712 |
713 | /* Check for Parameter Pollution */
714 | func ScanParameterPollution() reportResult {
715 | var repResult reportResult
716 | repResult.Technique = "Parameter Pollution"
717 |
718 | if len(impactfulQueries) == 0 {
719 | msg := "No impactful query parameters were found beforehand. Run the query parameter scan (maybe with a different wordlist)."
720 | Print(msg+"\n", Yellow)
721 | repResult.HasError = true
722 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
723 | return repResult
724 | } else {
725 | msg := fmt.Sprintf("The following parameters were found to be impactful and will be tested for parameter pollution: %s\n", impactfulQueries)
726 | Print(msg, Cyan)
727 | }
728 |
729 | threads := Config.Threads
730 | if Config.Website.Cache.CBisHTTPMethod {
731 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
732 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
733 | }
734 | sem := make(chan int, threads)
735 | var wg sync.WaitGroup
736 | var m sync.Mutex
737 |
738 | if len(impactfulQueries) > 500 { // only test first 500 impactful queries. TODO decrease amount as such a high amount most likely means they are false positives anyways
739 | shortenedQueries := []string{}
740 | for i := range 500 {
741 | shortenedQueries = append(shortenedQueries, impactfulQueries[i])
742 | }
743 | impactfulQueries = shortenedQueries
744 | }
745 | impactfulQueries = append(impactfulQueries, impactfulQueries...) // we want to test each impactful query parameter 2times. One time with the poison in the first and second with the poison in the second appearance
746 |
747 | wg.Add(len(impactfulQueries))
748 |
749 | for is, s := range impactfulQueries {
750 | poison := "p" + randInt()
751 | prependCB := false // shall be true for the second test of the cachebuster
752 |
753 | go func(is int, s string, poison string, secondHalf bool) {
754 | defer wg.Done()
755 | sem <- 1
756 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
757 |
758 | url := Config.Website.Url.String()
759 | ogValue := "foobar"
760 | if strings.Contains(strings.ToLower(url), "?"+s+"=") || strings.Contains(strings.ToLower(url), "&"+s+"=") {
761 | url, ogValue, _ = removeParam(url, s)
762 | }
763 |
764 | var parameters []string
765 | if is >= len(impactfulQueries)/2 {
766 | parameters = []string{s + "=" + poison, s + "=" + ogValue}
767 | } else {
768 | parameters = []string{s + "=" + ogValue, s + "=" + poison}
769 | }
770 |
771 | msg := fmt.Sprintf("Testing now Parameter Pollution (%d/%d) %s\n", is+1, len(impactfulQueries)*2, s)
772 | PrintVerbose(msg, NoColor, 2)
773 | cb := "cb" + randInt()
774 | success := fmt.Sprintf("Query Parameter %s was successfully poisoned via Parameter Pollution! cachebuster %s:%s poison:%s\n", s, Config.Website.Cache.CBName, cb, poison)
775 | identifier := fmt.Sprintf("parameter Pollution %s", s)
776 |
777 | rp := requestParams{
778 | repResult: &repResult,
779 | headers: []string{""},
780 | values: []string{poison},
781 | parameters: parameters,
782 | technique: "pollution",
783 | identifier: identifier,
784 | poison: poison,
785 | url: url,
786 | cb: cb,
787 | ogParam: s + "=" + ogValue,
788 | prependCB: prependCB,
789 | success: success,
790 | bodyString: "",
791 | forcePost: false,
792 | m: &m,
793 | newCookie: nil,
794 | }
795 | responseSplittingHeaders, _, _ := issueRequests(rp)
796 |
797 | // check for response splitting, if poison was reflected in a header
798 | for _, responseSplittingHeader := range responseSplittingHeaders {
799 | msg := fmt.Sprintf("Testing now Parameter Pollution (%d/%d) %s for Response Splitting, because it was reflected in the header %s\n", is+1, len(impactfulQueries)*2, s, responseSplittingHeader)
800 | PrintVerbose(msg, Cyan, 1)
801 |
802 | rp.url = url
803 | rp.cb = "cb" + randInt()
804 | rp.poison += getRespSplit()
805 |
806 | if is >= len(impactfulQueries) {
807 | parameters = []string{s + "=" + rp.poison, s + "=foobar"}
808 | } else {
809 | parameters = []string{s + "=foobar", s + "=" + rp.poison}
810 | }
811 |
812 | rp.parameters = parameters
813 | rp.success = fmt.Sprintf("Query Parameter %s successfully poisoned the header %s with Response Splitting with Parameter Pollution! cachebuster %s:%s poison:%s\n", s, responseSplittingHeader, Config.Website.Cache.CBName, rp.cb, rp.poison)
814 | rp.identifier += " response splitting"
815 |
816 | issueRequests(rp)
817 | }
818 | }(is, s, poison, is >= len(impactfulQueries)/2)
819 | }
820 | wg.Wait()
821 |
822 | return repResult
823 | }
824 |
825 | /* Check for Parameter Encoding */
826 | func ScanParameterEncoding() reportResult {
827 | var repResult reportResult
828 | repResult.Technique = "Parameter Encoding"
829 |
830 | if len(impactfulQueries) == 0 {
831 | msg := "No impactful query parameters were found beforehand. Run the query parameter scan (maybe with a different wordlist)."
832 | Print(msg+"\n", Yellow)
833 | repResult.HasError = true
834 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
835 | return repResult
836 | } else {
837 | msg := fmt.Sprintf("The following parameters were found to be impactful and will be tested for parameter encoding: %s\n", impactfulQueries)
838 | Print(msg, Cyan)
839 | }
840 |
841 | threads := Config.Threads
842 | if Config.Website.Cache.CBisHTTPMethod {
843 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
844 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
845 | }
846 | sem := make(chan int, threads)
847 | var wg sync.WaitGroup
848 | var m sync.Mutex
849 |
850 | if len(impactfulQueries) > 500 { // only test first 500 impactful queries. TODO decrease amount as such a high amount most likely means they are false positives anyways
851 | shortenedQueries := []string{}
852 | for i := range 500 {
853 | shortenedQueries = append(shortenedQueries, impactfulQueries[i])
854 | }
855 | impactfulQueries = shortenedQueries
856 | }
857 | impactfulQueries = append(impactfulQueries, impactfulQueries...) // we want to test each impactful query parameter 2times. One time with the poison in the first and second with the poison in the second appearance
858 |
859 | wg.Add(len(impactfulQueries))
860 |
861 | for is, s := range impactfulQueries {
862 | poison := "p" + randInt()
863 | prependCB := false // shall be true for the second test of the cachebuster
864 |
865 | go func(is int, s string, poison string, secondHalf bool) {
866 | defer wg.Done()
867 | sem <- 1
868 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
869 |
870 | url := Config.Website.Url.String()
871 | ogValue := "foobar"
872 | if strings.Contains(strings.ToLower(url), "?"+s+"=") || strings.Contains(strings.ToLower(url), "&"+s+"=") {
873 | url, ogValue, _ = removeParam(url, s)
874 | }
875 |
876 | var parameters []string
877 | if is >= len(impactfulQueries)/2 {
878 | parameters = []string{urlEncodeAll(s) + "=" + poison, s + "=" + ogValue}
879 | } else {
880 | parameters = []string{s + "=" + ogValue, urlEncodeAll(s) + "=" + poison}
881 | }
882 |
883 | msg := fmt.Sprintf("Testing now Parameter Encoding (%d/%d) %s\n", is+1, len(impactfulQueries)*2, s)
884 | PrintVerbose(msg, NoColor, 2)
885 | cb := "cb" + randInt()
886 | success := fmt.Sprintf("Query Parameter %s was successfully poisoned via Parameter Encoding! cachebuster %s:%s poison:%s\n", s, Config.Website.Cache.CBName, cb, poison)
887 | identifier := fmt.Sprintf("parameter Encoding %s", s)
888 |
889 | rp := requestParams{
890 | repResult: &repResult,
891 | headers: []string{""},
892 | values: []string{poison},
893 | parameters: parameters,
894 | technique: "encoding",
895 | identifier: identifier,
896 | poison: poison,
897 | url: url,
898 | cb: cb,
899 | ogParam: s + "=" + ogValue,
900 | prependCB: prependCB,
901 | success: success,
902 | bodyString: "",
903 | forcePost: false,
904 | m: &m,
905 | newCookie: nil,
906 | }
907 | responseSplittingHeaders, _, _ := issueRequests(rp)
908 |
909 | // check for response splitting, if poison was reflected in a header
910 | for _, responseSplittingHeader := range responseSplittingHeaders {
911 | msg := fmt.Sprintf("Testing now Parameter Encoding (%d/%d) %s for Response Splitting, because it was reflected in the header %s\n", is+1, len(impactfulQueries)*2, s, responseSplittingHeader)
912 | PrintVerbose(msg, Cyan, 1)
913 |
914 | rp.url = url
915 | rp.cb = "cb" + randInt()
916 | rp.poison += getRespSplit()
917 |
918 | if is >= len(impactfulQueries) {
919 | parameters = []string{urlEncodeAll(s) + "=" + rp.poison, s + "=foobar"}
920 | } else {
921 | parameters = []string{s + "=foobar", urlEncodeAll(s) + "=" + rp.poison}
922 | }
923 |
924 | rp.parameters = parameters
925 | rp.success = fmt.Sprintf("Query Parameter %s successfully poisoned the header %s with Response Splitting with Parameter Encoding! cachebuster %s:%s poison:%s\n", s, responseSplittingHeader, Config.Website.Cache.CBName, rp.cb, rp.poison)
926 | rp.identifier += " response splitting"
927 |
928 | issueRequests(rp)
929 | }
930 | }(is, s, poison, is >= len(impactfulQueries)/2)
931 | }
932 | wg.Wait()
933 |
934 | return repResult
935 | }
936 |
937 | /* Check for different DOS techniques */
938 | func DOS() reportResult {
939 | var repResult reportResult
940 | repResult.Technique = "DOS"
941 |
942 | hho(&repResult)
943 |
944 | // HMC (Header Metachar Character)
945 | headers := []string{"X-Metachar-Header"}
946 | values := []string{"n\nn", "r\rr", "a\aa", "x00\x00x00", "b\bb", "x1b\x1bx1b", "v\vv", "f\ff", "u0000\u0000u0000"} // TODO put not functional meta chars, due to fasthttp header value restrictions, in the value using \r\n in the header name
947 |
948 | for _, header := range headers {
949 | headerDOSTemplate(&repResult, values, header, "HMC ", true)
950 | }
951 |
952 | headers = []string{"X-Meta\nchar-Header", "X-Meta\rchar-Header", "X-Meta\achar-Header", "X-Meta\x00char-Header", "X-Meta\bchar-Header", "X-Meta\x1bchar-Header", "X-Meta\vchar-Header", "X-Meta\fchar-Header", "X-Meta\u0000char-Header"}
953 | values = []string{"n", "r", "a", "x00", "b", "x1b", "v", "ff", "u0000"}
954 |
955 | for i, header := range headers {
956 | headerDOSTemplate(&repResult, []string{values[i]}, header, "HMC ", true)
957 | }
958 |
959 | // HMO (HTTP Method Override)
960 | values = []string{"GET", "POST", "DELETE", "HEAD", "OPTIONS", "CONNECT", "PATCH", "PUT", "TRACE", "NONSENSE"}
961 | headers = []string{"X-HTTP-Method-Override", "X-HTTP-Method", "X-Method-Override"}
962 | for _, header := range headers {
963 | headerDOSTemplate(&repResult, values, header, "HMO ", true)
964 | }
965 |
966 | // DOS via not implemented transferEncoding
967 | values = []string{"asdf"}
968 | headerDOSTemplate(&repResult, values, "zTRANSFER-ENCODING", "Not supported Transfer-Encoding ", true)
969 |
970 | // DOS via incompatible/outdated browser agent
971 | values = []string{"Mozilla/5.0 (Windows; U; MSIE 9.0; WIndows NT 9.0; en-US))"}
972 | headerDOSTemplate(&repResult, values, "User-Agent", "incompatible browser ", true)
973 |
974 | // DOS via blacklisted security scanner user agent // TODO: Also add bots? Or will the IP be blocked too fast
975 | values = []string{"WebCacheVulnerabilityScanner v" + version, "Fuzz Faster U Fool", "Nuclei - Open-source project (github.com/projectdiscovery/nuclei)", "sqlmap/1.3.11#stable (http://sqlmap.org)", "gobuster/3.1.0", "Wfuzz/2.2", "Mozilla/5.0 (compatible; Nmap Scripting Engine; https://nmap.org/book/nse.html)", "masscan/1.3", "blekkobot"}
976 | headerDOSTemplate(&repResult, values, "User-Agent", "blacklisted security scanners ", true)
977 |
978 | // DOS via illegal header name
979 | /* Currently disabled because of net/http throws error because of illegal character TODO: workaround, see https://stackoverflow.com/questions/70678016/how-to-bypass-golangs-http-request-net-http-rfc-compliance
980 | values = []string{"foobar"}
981 | headerDOSTemplate(&repResult, values, "Ill\\egal", "illegal header name ", true)
982 | */
983 |
984 | // DOS via Max-Forwards (Webserver/Cache returns request)
985 | values = []string{"0", "1", "2"}
986 | headerDOSTemplate(&repResult, values, "Max-Forwards", "", true)
987 |
988 | // DOS via waf blocking because of a blacklist word
989 | // TODO: change header to probably whitelisted header, More Blacklist words?
990 | values = []string{".burpcollaborator.net", ""}
991 | headerDOSTemplate(&repResult, values, "Any-Header", "blacklist ", true)
992 |
993 | // DOS via Range
994 | values = []string{"bytes=m10x", "bytes=9-4", "bytes=-1024,0", "bytes=0-,0-,0-,0-"}
995 | headerDOSTemplate(&repResult, values, "Range", "", true)
996 |
997 | // DOS via X-Forwarded-Protocol
998 | values = []string{"http", "https", "ssl", "m10x"}
999 | headerDOSTemplate(&repResult, values, "X-Forwarded-Protocol", "", true)
1000 |
1001 | // DOS via X-Forwarded-Scheme
1002 | values = []string{"http", "https", "nothttps", "m10x"}
1003 | headerDOSTemplate(&repResult, values, "X-Forwarded-Scheme", "", true)
1004 |
1005 | // DOS via X-Fordwarded-SSL
1006 | values = []string{"on", "off", "m10x"}
1007 | headerDOSTemplate(&repResult, values, "X-Forwarded-SSL", "", true)
1008 |
1009 | // DOS via Upgrade
1010 | values = []string{"HTTP/0.9", "Websocket, RTA/x11", "HTTP/2.0, SHTTP/1.3, IRC/6.9", "m10x"}
1011 | headerDOSTemplate(&repResult, values, "Upgrade", "", true)
1012 |
1013 | // DOS via invalid Content-Type
1014 | values = []string{"m10x"}
1015 | headerDOSTemplate(&repResult, values, "Content-Type", "", true)
1016 |
1017 | // DOS via middleware prefetch (next.js specific, CVE-2023-46298) TODO check for {} in response
1018 | values = []string{"1"}
1019 | headerDOSTemplate(&repResult, values, "X-Middleware-Prefetch", "", true)
1020 |
1021 | // DOS via Rsc (next.js specific) TODO check for format in response
1022 | values = []string{"1"}
1023 | headerDOSTemplate(&repResult, values, "Rsc", "", true)
1024 |
1025 | return repResult
1026 | }
1027 |
1028 | /* HTTP Header Oversize */
1029 | func hho(repResult *reportResult) {
1030 | repetitions := []int{50, 100, 200} //4k, 8k, 16k
1031 |
1032 | msg := fmt.Sprintf("Testing now HHO with Size Limits of ~80*%d bytes\n", repetitions)
1033 | PrintVerbose(msg, NoColor, 2)
1034 |
1035 | threads := Config.Threads
1036 | if Config.Website.Cache.CBisHTTPMethod {
1037 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
1038 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
1039 | }
1040 | sem := make(chan int, threads)
1041 | var wg sync.WaitGroup
1042 | wg.Add(len(repetitions))
1043 | var m sync.Mutex
1044 |
1045 | for _, repetition := range repetitions {
1046 | go func(repetition int) {
1047 | defer wg.Done()
1048 | sem <- 1
1049 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
1050 |
1051 | limit := repetition * 8 / 100
1052 | //msg := fmt.Sprintf("Testing now HHO with Size Limit %dk bytes\n", limit)
1053 | //Print(msg, NoColor)
1054 |
1055 | headers := []string{}
1056 | values := []string{}
1057 |
1058 | for i := range repetition {
1059 | headername := fmt.Sprintf("X-Oversized-Header-%d", i+1)
1060 | value := "Big-Value-000000000000000000000000000000000000000000000000000000000000000000000000000000"
1061 | headers = append(headers, headername)
1062 | values = append(values, value)
1063 | }
1064 |
1065 | rUrl := Config.Website.Url.String()
1066 | cb := "cb" + randInt()
1067 | success := fmt.Sprintf("HHO DOS was successfully poisoned! cachebuster %s: %s \n%s\n", Config.Website.Cache.CBName, cb, rUrl)
1068 | identifier := fmt.Sprintf("HHO with limit of %dk bytes", limit)
1069 | rp := requestParams{
1070 | repResult: repResult,
1071 | headers: headers,
1072 | values: values,
1073 | identifier: identifier,
1074 | url: rUrl,
1075 | cb: cb,
1076 | success: success,
1077 | m: &m,
1078 | }
1079 |
1080 | _, _, _ = issueRequests(rp)
1081 | }(repetition)
1082 | }
1083 |
1084 | wg.Wait()
1085 | }
1086 |
1087 | func headerDOSTemplate(repResult *reportResult, values []string, header string, msgextra string, httpConform bool) {
1088 | msg := fmt.Sprintf("Testing now %sDOS with header %s and values %s\n", msgextra, header, values)
1089 | PrintVerbose(msg, NoColor, 2)
1090 |
1091 | threads := Config.Threads
1092 | if Config.Website.Cache.CBisHTTPMethod {
1093 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
1094 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
1095 | }
1096 | sem := make(chan int, threads)
1097 | var wg sync.WaitGroup
1098 | wg.Add(len(values))
1099 | var m sync.Mutex
1100 |
1101 | for _, value := range values {
1102 |
1103 | go func(value string, httpConform bool) {
1104 | defer wg.Done()
1105 | sem <- 1
1106 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
1107 |
1108 | msg := fmt.Sprintf("Testing now %q Header DOS with %q\n", header, value) // %q for raw printing of control characters
1109 | PrintVerbose(msg, NoColor, 2)
1110 | rUrl := Config.Website.Url.String()
1111 | cb := "cb" + randInt()
1112 | success := fmt.Sprintf("%sDOS with header %q was successfully poisoned! cachebuster %s: %s poison: %q\n", msgextra, header, Config.Website.Cache.CBName, cb, value)
1113 | identifier := fmt.Sprintf("%s%q with %q", msgextra, header, value)
1114 |
1115 | rp := requestParams{
1116 | repResult: repResult,
1117 | headers: []string{header},
1118 | values: []string{value},
1119 | identifier: identifier,
1120 | poison: "",
1121 | url: rUrl,
1122 | cb: cb,
1123 | success: success,
1124 | bodyString: "",
1125 | forcePost: false,
1126 | m: &m,
1127 | newCookie: nil,
1128 | }
1129 | responseSplittingHeaders, _, _ := issueRequests(rp)
1130 |
1131 | // check for response splitting, if poison was reflected in a header
1132 | for _, responseSplittingHeader := range responseSplittingHeaders {
1133 | msg := fmt.Sprintf("Testing now %s Header DOS with %s\n for Response Splitting, because it was reflected in the header %s", header, value, responseSplittingHeader)
1134 | PrintVerbose(msg, Cyan, 1)
1135 |
1136 | rp.values[0] += getRespSplit()
1137 | rp.url = rUrl
1138 | rp.cb = "cb" + randInt()
1139 | rp.success = fmt.Sprintf("%sDOS with header %s successfully poisoned the header %s with Response Splitting! cachebuster %s: %s poison: %s\n", msgextra, header, responseSplittingHeader, Config.Website.Cache.CBName, rp.cb, rp.values[0])
1140 | rp.identifier += getRespSplit() + " with response splitting"
1141 |
1142 | issueRequests(rp)
1143 | }
1144 | }(value, httpConform)
1145 | }
1146 | wg.Wait()
1147 | }
1148 |
1149 | func ScanCSS() reportResult {
1150 | var repResult reportResult
1151 | repResult.Technique = "CSS poisoning"
1152 |
1153 | webStruct, err := GetWebsite(Config.Website.Url.String(), false, false) // get body without cachebuster. TODO use response w/o cachebuster from recon, so it doesn't have to be fetched again
1154 | if err != nil {
1155 | msg := fmt.Sprintf("Error while fetching css files %s: %s\n", Config.Website.Url.String(), err.Error())
1156 | Print(msg, Red)
1157 | repResult.ErrorMessages = append(repResult.ErrorMessages, msg)
1158 | return repResult
1159 | }
1160 | bodyReader := strings.NewReader(webStruct.Body) // use body without cachebuster, so the css files can be found
1161 | tokenizer := html.NewTokenizer(bodyReader)
1162 |
1163 | var urls []string
1164 |
1165 | eof := false
1166 | for !eof {
1167 | tokentype := tokenizer.Next()
1168 |
1169 | switch tokentype {
1170 | case html.StartTagToken, html.SelfClosingTagToken:
1171 |
1172 | token := tokenizer.Token()
1173 |
1174 | if token.Data == "link" {
1175 | for _, a := range token.Attr {
1176 | if a.Key == "href" {
1177 | if !strings.HasSuffix(a.Val, ".css") && !strings.Contains(a.Val, ".css?") {
1178 | break
1179 | }
1180 | tempURL := addDomain(a.Val, Config.Website.Domain)
1181 | if tempURL != "" {
1182 | urls = append(urls, tempURL)
1183 | }
1184 | break
1185 | }
1186 | }
1187 | }
1188 | // When EOF is reached a html.ErrorToken appears
1189 | case html.ErrorToken:
1190 | err := tokenizer.Err()
1191 | if err == io.EOF {
1192 | eof = true
1193 | break
1194 | }
1195 | msg := fmt.Sprintf("error tokenizing HTML: %+v", tokenizer.Err())
1196 | Print(msg, Yellow)
1197 | }
1198 | }
1199 |
1200 | if len(urls) == 0 {
1201 | msg := "No CSS files were found.\n"
1202 | PrintVerbose(msg, Yellow, 1)
1203 |
1204 | return repResult
1205 | }
1206 | msg := fmt.Sprintf("Testing the following CSS files for poisoning\n%s\n", urls)
1207 | PrintVerbose(msg, NoColor, 1)
1208 |
1209 | threads := Config.Threads
1210 | if Config.Website.Cache.CBisHTTPMethod {
1211 | threads = 1 // No multithreading if HTTP Method is used... Otherwise there will be a lot of false negatives/positives
1212 | PrintVerbose("Can only scan single threaded because a HTTP Method is used as Cachebuster...\n", Yellow, 1)
1213 | }
1214 | sem := make(chan int, threads)
1215 | var wg sync.WaitGroup
1216 | wg.Add(len(urls))
1217 | var m sync.Mutex
1218 |
1219 | for _, url := range urls {
1220 |
1221 | go func(url string) {
1222 | defer wg.Done()
1223 | sem <- 1
1224 | defer func() { <-sem }() // Freigabe der Semaphore, egal was passiert. Dadurch werden Deadlocks verhindert
1225 |
1226 | //msg := fmt.Sprintf("Testing now %s Header DOS with %s\n", header, value)
1227 | //Print(msg, NoColor)
1228 |
1229 | urlWithCb, cb := addCachebusterParameter(url, "", Config.Website.Cache.CBName, false)
1230 | var repCheck reportCheck
1231 | repCheck.URL = url
1232 | repCheck.Identifier = "n/a"
1233 |
1234 | identifier := url
1235 | rp := requestParams{
1236 | identifier: identifier,
1237 | url: urlWithCb,
1238 | cb: "cb" + randInt(),
1239 | }
1240 | body, status, repRequest, _, err := firstRequest(rp)
1241 | if err != nil {
1242 | if err.Error() != "stop" {
1243 | m.Lock()
1244 | repResult.HasError = true
1245 | repResult.ErrorMessages = append(repResult.ErrorMessages, err.Error())
1246 | m.Unlock()
1247 | }
1248 | return
1249 | }
1250 | repCheck.Request = repRequest
1251 | if status != 200 {
1252 | return
1253 | }
1254 |
1255 | if strings.Contains(string(body), cb) {
1256 | msg = fmt.Sprintf("The following CSS file reflects the url with the cb %s\n%s\n", cb, url)
1257 | Print(msg, Cyan)
1258 | }
1259 |
1260 | body, _, repRequest, _, err = secondRequest(rp)
1261 | if err != nil {
1262 | if err.Error() != "stop" {
1263 | m.Lock()
1264 | repResult.HasError = true
1265 | repResult.ErrorMessages = append(repResult.ErrorMessages, err.Error())
1266 | m.Unlock()
1267 | }
1268 | return
1269 | }
1270 | repCheck.SecondRequest = &repRequest
1271 |
1272 | if strings.Contains(string(body), cb) {
1273 | PrintNewLine()
1274 | msg = fmt.Sprintf("A CSS file was successfully poisoned! cachebuster %s: %s\nURL: %s\n", Config.Website.Cache.CBName, cb, url)
1275 | Print(msg, Green)
1276 | Reason := "CSS reflects URL"
1277 | msg = fmt.Sprintf("Reason: %s\n", Reason)
1278 | Print(msg, Green)
1279 | repCheck.Reason = Reason
1280 |
1281 | m.Lock()
1282 | repResult.Vulnerable = true
1283 | repResult.Checks = append(repResult.Checks, repCheck)
1284 | m.Unlock()
1285 | }
1286 | }(url)
1287 |
1288 | }
1289 | wg.Wait()
1290 |
1291 | return repResult
1292 | }
1293 |
--------------------------------------------------------------------------------
/pkg/utils.go:
--------------------------------------------------------------------------------
1 | package pkg
2 |
3 | import (
4 | "context"
5 | "crypto/rand"
6 | "crypto/tls"
7 | "fmt"
8 | "log"
9 | "math/big"
10 | "net/http"
11 | "net/url"
12 | "os"
13 | "strconv"
14 | "strings"
15 | "time"
16 |
17 | "github.com/fatih/color"
18 | "github.com/valyala/fasthttp"
19 | "github.com/valyala/fasthttp/fasthttpproxy"
20 | )
21 |
22 | var (
23 | NoColor = 0
24 | Red = 1
25 | Yellow = 2
26 | Green = 3
27 | Cyan = 4
28 | )
29 |
30 | var client *fasthttp.Client
31 |
32 | func InitClient() {
33 | var dialer fasthttp.DialFunc
34 | if Config.UseProxy {
35 | dialer = fasthttpproxy.FasthttpHTTPDialer(Config.ProxyURL)
36 | }
37 |
38 | client = &fasthttp.Client{
39 | Dial: dialer,
40 | DisablePathNormalizing: true, // needed for path traversal (cache deception)
41 | DisableHeaderNamesNormalizing: true, // needed for request smuggling and other techniques using non-normalized headers
42 | ReadTimeout: time.Duration(Config.TimeOut) * time.Second,
43 | WriteTimeout: time.Duration(Config.TimeOut) * time.Second,
44 | TLSConfig: &tls.Config{InsecureSkipVerify: true},
45 | ReadBufferSize: 8 * 1024}
46 | }
47 |
48 | func PrintNewLine() {
49 | Print("\n", NoColor)
50 | }
51 |
52 | func PrintLog(msg string) {
53 | if Config.GenerateLog && Config.Intitialized {
54 | log.Print(msg)
55 | }
56 | }
57 |
58 | func PrintVerbose(msg string, c int, threshold int) {
59 | switch c {
60 | case Red:
61 | PrintLog("[ERR] " + msg)
62 | msg = color.RedString("[ERR] ") + msg
63 | case Yellow:
64 | PrintLog("[!] " + msg)
65 | msg = color.YellowString("[!] ") + msg
66 | case Green:
67 | PrintLog("[+] " + msg)
68 | msg = color.GreenString("[+] ") + msg
69 | case Cyan:
70 | PrintLog("[*] " + msg)
71 | msg = color.CyanString("[*] ") + msg
72 | default:
73 | PrintLog(msg)
74 | }
75 |
76 | if Config.Verbosity >= threshold || !Config.Intitialized {
77 | fmt.Print(msg)
78 | }
79 | }
80 |
81 | func Print(msg string, c int) {
82 | PrintVerbose(msg, c, 0)
83 | }
84 |
85 | func PrintFatal(msg string) {
86 | Print(msg, Red)
87 | os.Exit(1)
88 | }
89 |
90 | func ReadLocalFile(path string, name string) []string {
91 | path = strings.TrimPrefix(path, "file:")
92 |
93 | if strings.HasPrefix(strings.ToLower(path), "file:") {
94 | PrintFatal("Please make sure that path: is lowercase")
95 | }
96 |
97 | w, err := os.ReadFile(path)
98 | if err != nil {
99 | additional := ""
100 | if name == "header" {
101 | additional = "Use the flag \"-hw path/to/wordlist\" to specify the path to a header wordlist\n"
102 | } else if name == "parameter" {
103 | additional = "Use the flag \"-pw path/to/wordlist\" to specify the path to a parameter wordlist\n"
104 | }
105 | PrintFatal("The specified " + name + " file path " + path + " couldn't be found: " + err.Error() + "\n" + additional)
106 | }
107 |
108 | return strings.Split(string(w), "\n")
109 | }
110 |
111 | func setRequest(req *fasthttp.Request, doPost bool, cb string, cookie map[string]string, prependCB bool) {
112 |
113 | cache := Config.Website.Cache
114 | if cb != "" && cache.CBisParameter {
115 | var newUrl string
116 | newUrl, _ = addCachebusterParameter(req.URI().String(), cb, cache.CBName, prependCB)
117 |
118 | newURL, err := url.Parse(newUrl)
119 | if err != nil {
120 | msg := "Converting " + newUrl + " to URL:" + err.Error() + "\n"
121 | Print(msg, Red)
122 | }
123 | req.SetRequestURI(newURL.String())
124 | req.UseHostHeader = true
125 | }
126 |
127 | setRequestHeaders(req, cb)
128 | setRequestCookies(req, cb, cookie)
129 |
130 | // Overwrite the content type if specified
131 | if doPost {
132 | if Config.ContentType != "" {
133 | req.Header.SetContentType(Config.ContentType)
134 | }
135 | }
136 | }
137 |
138 | func responseCookiesToMap(resp *fasthttp.Response, cookieMap map[string]string) map[string]string {
139 | resp.Header.VisitAllCookie(func(key, value []byte) {
140 | c := &fasthttp.Cookie{}
141 | c.ParseBytes(value)
142 | if err := c.ParseBytes(value); err == nil {
143 | cookieMap[string(key)] = string(c.Value())
144 | } else {
145 | msg := fmt.Sprintf("Error parsing cookie %s: %s\n", string(value), err.Error())
146 | Print(msg, Red)
147 | }
148 | })
149 |
150 | return cookieMap
151 | }
152 |
153 | func urlEncodeAll(input string) string {
154 | encoded := ""
155 | for i := 0; i < len(input); i++ {
156 | encoded += fmt.Sprintf("%%%02X", input[i])
157 | }
158 | return encoded
159 | }
160 |
161 | /* TODO wie bei requestCookies nur die erste occurrence eines headers aufnehmen */
162 | func setRequestHeaders(req *fasthttp.Request, cb string) {
163 | cache := Config.Website.Cache
164 |
165 | req.Header.Set("User-Agent", useragent)
166 | for _, h := range Config.Headers {
167 | h = strings.TrimSuffix(h, "\r")
168 | h = strings.TrimSpace(h)
169 | if h == "" {
170 | continue
171 | } else if !strings.Contains(h, ":") {
172 | msg := "Specified header" + h + "doesn't contain a : and will be skipped"
173 | Print(msg, Yellow)
174 | continue
175 | } else {
176 | hSplitted := strings.SplitN(h, ":", 2)
177 |
178 | // is this header the cachebuster?
179 | if cb != "" && cache.CBisHeader && strings.EqualFold(hSplitted[0], cache.CBName) {
180 | req.Header.Set(cache.CBName, cb)
181 | }
182 |
183 | req.Header.Set(strings.TrimSpace(hSplitted[0]), strings.TrimSpace(hSplitted[1]))
184 | }
185 | }
186 | }
187 |
188 | func setRequestCookies(req *fasthttp.Request, cb string, cookie map[string]string) {
189 | cache := Config.Website.Cache
190 |
191 | for k, v := range Config.Website.Cookies {
192 | if cb != "" && cache.CBisCookie && k == cache.CBName {
193 | if k == cookie["key"] {
194 | msg := "Can't test cookie " + k + " for Web Cache Poisoning, as it is used as Cachebuster\n"
195 | Print(msg, Yellow)
196 | continue
197 | }
198 | k = cb
199 | } else if k == cookie["key"] {
200 | v = cookie["value"]
201 | }
202 | req.Header.SetCookie(k, v)
203 | }
204 | }
205 |
206 | func addCachebusterParameter(strUrl string, cbvalue string, cb string, prepend bool) (string, string) {
207 | if cbvalue == "" {
208 | cbvalue = "cb" + randInt()
209 | }
210 | if cb == "" {
211 | cb = Config.Website.Cache.CBName
212 | }
213 | if !strings.Contains(strUrl, "?") {
214 | strUrl += "?" + cb + "=" + cbvalue
215 | } else {
216 | if prepend {
217 | parts := strings.SplitN(strUrl, "?", 2)
218 | strUrl = parts[0] + "?" + cb + "=" + cbvalue + Config.QuerySeparator + parts[1]
219 | } else {
220 | strUrl += Config.QuerySeparator + cb + "=" + cbvalue
221 | }
222 | }
223 |
224 | return strUrl, cbvalue
225 | }
226 |
227 | // RandomString generates a random string of the specified length
228 | func RandomString(length int) string {
229 | const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
230 | result := make([]byte, length)
231 | for i := range result {
232 | n, err := rand.Int(rand.Reader, big.NewInt(int64(len(charset))))
233 | if err != nil {
234 | Print(err.Error(), Red)
235 | return "99999999"
236 | }
237 | result[i] = charset[n.Int64()]
238 | }
239 | return string(result)
240 | }
241 |
242 | func removeParam(rawURL string, paramToRemove string) (string, string, error) {
243 | // Parse the URL
244 | parsedURL, err := url.Parse(rawURL)
245 | if err != nil {
246 | return "", "", err
247 | }
248 |
249 | // Get current query parameters
250 | query := parsedURL.Query()
251 | ogValue := query.Get(paramToRemove)
252 |
253 | // Check if the parameter exists and remove it
254 | if _, exists := query[paramToRemove]; exists {
255 | query.Del(paramToRemove)
256 | parsedURL.RawQuery = query.Encode()
257 | }
258 |
259 | return parsedURL.String(), ogValue, nil
260 | }
261 |
262 | /* Create a random long integer */
263 | func randInt() string {
264 | min := int64(100000000000)
265 | max := int64(999999999999)
266 | // Range size
267 | rangeSize := max - min + 1
268 |
269 | n, err := rand.Int(rand.Reader, big.NewInt(rangeSize))
270 | if err != nil {
271 | Print(err.Error(), Red)
272 | return "999999999999"
273 | }
274 |
275 | result := n.Int64() + min
276 | return strconv.FormatInt(result, 10)
277 | }
278 |
279 | func waitLimiter(identifier string) {
280 | err := Config.Limiter.Wait(context.Background())
281 | if err != nil {
282 | msg := identifier + " rate Wait: " + err.Error()
283 | Print(msg, Red)
284 | }
285 | }
286 |
287 | func searchBodyHeadersForString(cb string, body string, headers http.Header) bool {
288 | if strings.Contains(body, cb) {
289 | return true
290 | }
291 | for _, h := range headers {
292 | for _, v := range h {
293 | if strings.Contains(v, cb) {
294 | return true
295 | }
296 | }
297 | }
298 | return false
299 | }
300 |
301 | // check if cache was hit
302 | func checkCacheHit(value string, indicator string) bool {
303 | if indicator == "" {
304 | indicator = Config.Website.Cache.Indicator
305 | }
306 | if strings.EqualFold("age", indicator) {
307 | value = strings.TrimSpace(value)
308 | if value != "0" && value != "" {
309 | return true
310 | }
311 | } else if strings.EqualFold("x-iinfo", indicator) {
312 | // String anhand von Leerzeichen aufteilen
313 | parts := strings.Split(value, " ")
314 |
315 | // Prüfen, ob der zweite Part existiert
316 | if len(parts) > 1 {
317 | secondPart := parts[1]
318 |
319 | // Sicherstellen, dass der zweite Part mindestens zwei Zeichen lang ist
320 | if len(secondPart) > 1 {
321 | secondChar := strings.ToUpper(string(secondPart[1]))
322 | if secondChar == "C" || secondChar == "V" {
323 | return true
324 | }
325 | }
326 | }
327 | // Cache Hit may have 0,>0 or >0,0 as value. Both responses are cached
328 | } else if strings.EqualFold("x-cache-hits", indicator) {
329 | for _, x := range strings.Split(indicator, ",") {
330 | x = strings.TrimSpace(x)
331 | if x != "0" {
332 | return true
333 | }
334 | }
335 | } else if strings.EqualFold("x-cc-via", indicator) {
336 | if strings.Contains(indicator, "[H,") {
337 | return true
338 | }
339 | // Some Headers may have "miss,hit" or "hit,miss" as value. But both are cached responses.
340 | } else if strings.Contains(strings.ToLower(value), "hit") || strings.Contains(strings.ToLower(value), "cached") {
341 | return true
342 | }
343 | return false
344 | }
345 |
346 | // like grep -C
347 | func findOccurrencesWithContext(body, search string, context int) []string {
348 | var results []string
349 | inputLen := len(body)
350 | searchLen := len(search)
351 |
352 | for i := 0; i <= inputLen-searchLen; {
353 | if body[i:i+searchLen] == search {
354 | start := i - context
355 | if start < 0 {
356 | start = 0
357 | }
358 | end := i + searchLen + context
359 | if end > inputLen {
360 | end = inputLen
361 | }
362 | results = append(results, body[start:end])
363 | i += searchLen // skip past this match
364 | } else {
365 | i++
366 | }
367 | }
368 |
369 | return results
370 | }
371 |
372 | func headerToMultiMap(h *fasthttp.ResponseHeader) map[string][]string {
373 | m := make(map[string][]string)
374 | h.VisitAll(func(key, value []byte) {
375 | k := string(key)
376 | v := string(value)
377 | m[k] = append(m[k], v)
378 | })
379 | return m
380 | }
381 |
382 | func analyzeCacheIndicator(headers http.Header) (indicators []string) {
383 | customCacheHeader := strings.ToLower(Config.CacheHeader)
384 | for key, val := range headers {
385 | switch strings.ToLower(key) {
386 | case "cache-control", "pragma", "vary", "expires":
387 | msg := fmt.Sprintf("%s header was found: %s \n", key, val)
388 | PrintVerbose(msg, Cyan, 1)
389 | case "x-cache", "cf-cache-status", "x-drupal-cache", "x-varnish-cache", "akamai-cache-status", "server-timing", "x-iinfo", "x-nc", "x-hs-cf-cache-status", "x-proxy-cache", "x-cache-hits", "x-cache-status", "x-cache-info", "x-rack-cache", "cdn_cache_status", "cache_status", "x-akamai-cache", "x-akamai-cache-remote", "x-cache-remote", "x-litespeed-cache", "x-kinsta-cache", "x-ac", "cache-status", "ki-cf-cache-status", "eo-cache-status", "x-77-cache", "x-cache-lookup", "x-cc-via", customCacheHeader:
390 | // CacheHeader flag might not be set (=> ""). Continue in this case
391 | if key == "" {
392 | continue
393 | }
394 | indicators = append(indicators, key)
395 | msg := fmt.Sprintf("%s header was found: %s \n", key, val)
396 | PrintVerbose(msg, Cyan, 1)
397 | case "age":
398 | // only set it it wasn't set to x-cache or sth. similar beforehand
399 | indicators = append(indicators, key)
400 | msg := fmt.Sprintf("%s header was found: %s\n", key, val)
401 | PrintVerbose(msg, Cyan, 1)
402 | }
403 | }
404 | return indicators
405 | }
406 |
--------------------------------------------------------------------------------
/templates/body_file:
--------------------------------------------------------------------------------
1 | // lines which start with // or are empty will be ignored
2 |
3 | // Example:
4 | //admin=true
--------------------------------------------------------------------------------
/templates/config_file:
--------------------------------------------------------------------------------
1 | // lines which start with // or are empty will be ignored
2 |
3 | // Example Config:
4 | //
5 | //
--------------------------------------------------------------------------------
/templates/cookie_list:
--------------------------------------------------------------------------------
1 | // lines which start with // or are empty will be ignored
2 |
3 | // Example Cookies:
4 | //uid=123456
5 | //session=ABCDEF
--------------------------------------------------------------------------------
/templates/header_list:
--------------------------------------------------------------------------------
1 | // lines which start with // or are empty will be ignored
2 | // Cookies specified here won't work. Use --setcookies (-sc) instead
3 |
4 | // Example Headers:
5 | //X-Forwarded-Host:example.com
6 | //User-Agent: Safari/1.1
--------------------------------------------------------------------------------
/templates/parameter_list:
--------------------------------------------------------------------------------
1 | // lines which start with // or are empty will be ignored
2 |
3 | // Example Parameters:
4 | //admin=true
5 | //verbosity=2
6 | //username=max
--------------------------------------------------------------------------------
/templates/recdomains_list:
--------------------------------------------------------------------------------
1 | // lines which start with // or are empty will be ignored
2 |
3 | // Example Domains:
4 | //www.example.com
5 | //api.example.com
6 | //blog.example.com
--------------------------------------------------------------------------------
/templates/url_list:
--------------------------------------------------------------------------------
1 | // lines which start with // or are empty will be ignored
2 |
3 | // Example URLs:
4 | //https://www.example.com
5 | //http://api.example.com/?admin=true&verbosity=2
--------------------------------------------------------------------------------
/web-cache-vulnerability-scanner.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "fmt"
5 | "log"
6 | "net/http"
7 | "net/url"
8 | "os"
9 | "strings"
10 | "time"
11 |
12 | "github.com/Hackmanit/Web-Cache-Vulnerability-Scanner/pkg"
13 | )
14 |
15 | const version = "2.0.0"
16 |
17 | var (
18 | currentDate string
19 | filePath string
20 | report pkg.Report
21 | completedFile *os.File
22 | proxyURL *url.URL
23 | headerList []string
24 | parameterList []string
25 | noTestPreference bool
26 | excluded map[string]bool
27 | added map[string]bool
28 | )
29 |
30 | func main() {
31 | //pkg.ReadConfigFile()
32 | pkg.ParseFlags(version)
33 | /*****************************/
34 |
35 | /**** SET EXPORT STRUCT ****/
36 | report.Name = "Web_Cache_Vulnerability_Scanner"
37 | report.Version = version
38 |
39 | report.Config = &pkg.Config
40 |
41 | currentDate = time.Now().Format("2006-01-02_15-04-05")
42 | report.Date = currentDate
43 | report.Duration = "Not finished yet"
44 | /***************************/
45 | //Create generatePath directory
46 | if pkg.Config.GeneratePath != "./" {
47 | if !strings.HasSuffix(pkg.Config.GeneratePath, "/") {
48 | pkg.Config.GeneratePath += "/"
49 | }
50 | if _, err := os.Stat(pkg.Config.GeneratePath); err != nil {
51 | if os.IsNotExist(err) {
52 | err := os.Mkdir(pkg.Config.GeneratePath, 0755)
53 | if err != nil {
54 | msg := fmt.Sprintf("Error while creating Directory: %s\n", err.Error())
55 | pkg.PrintFatal(msg)
56 | }
57 | }
58 | }
59 | }
60 |
61 | filePath = fmt.Sprintf("%sWCVS_%s_%s", pkg.Config.GeneratePath, currentDate, pkg.RandomString(8))
62 |
63 | /* Setting Logoutput to Log file */
64 | if pkg.Config.GenerateLog {
65 | f, err := os.OpenFile(filePath+"_Log.txt", os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
66 | if err != nil {
67 | msg := fmt.Sprintf("Error while creating/opening Log File: %s\n", err.Error())
68 | pkg.PrintFatal(msg)
69 | }
70 | defer f.Close()
71 | log.SetOutput(f)
72 | }
73 |
74 | report.Config.Intitialized = true
75 |
76 | report.Command = fmt.Sprint(os.Args)
77 | report.Command = strings.TrimPrefix(report.Command, "[")
78 | report.Command = strings.TrimSuffix(report.Command, "]")
79 | pkg.PrintVerbose(report.Command+"\n\n", pkg.Cyan, 2)
80 | /******************************************/
81 | if pkg.Config.Verbosity < 0 || pkg.Config.Verbosity > 2 {
82 | msg := fmt.Sprintf("%d is not a valid verbosity between 0 and 2!\n", pkg.Config.Verbosity)
83 | pkg.PrintFatal(msg)
84 | }
85 |
86 | /* print copyright etc */
87 | pkg.PrintVerbose("Published by Hackmanit under http://www.apache.org/licenses/LICENSE-2.0\n", pkg.NoColor, 1)
88 | pkg.PrintVerbose("Author: Maximilian Hildebrand\n", pkg.NoColor, 1)
89 | pkg.PrintVerbose("Repository: https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner\n\n", pkg.NoColor, 1)
90 |
91 | // Print starting time
92 | msg := fmt.Sprintf("WCVS v%s started at %s\n", version, currentDate)
93 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
94 |
95 | start := time.Now()
96 |
97 | noTestPreference = true
98 | if pkg.Config.OnlyTest != "" && pkg.Config.SkipTest != "" {
99 | msg = "You can't set both doTest and dontTest\n"
100 | pkg.PrintFatal(msg)
101 | } else if pkg.Config.OnlyTest != "" {
102 | noTestPreference = false
103 | } else if pkg.Config.SkipTest != "" {
104 | noTestPreference = false
105 | }
106 |
107 | if pkg.Config.GenerateReport {
108 | pkg.GenerateReport(report, filePath)
109 | }
110 | if pkg.Config.GenerateCompleted {
111 | completedFile = createCompletedURLs()
112 | }
113 | pkg.InitClient()
114 | /***************************/
115 |
116 | // Reading header wordlist, only if it is needed
117 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "header") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "header")) {
118 | if pkg.Config.HeaderWordlist != "" {
119 | headerList = pkg.ReadLocalFile(pkg.Config.HeaderWordlist, "header")
120 | } else {
121 | headerList = pkg.DefaultHeaders
122 | }
123 | }
124 |
125 | // Reading parameter wordlist, only if it is needed
126 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "parameter") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "parameter")) {
127 | if pkg.Config.ParameterWordlist != "" {
128 | parameterList = pkg.ReadLocalFile(pkg.Config.ParameterWordlist, "parameter")
129 | } else {
130 | parameterList = pkg.DefaultParameters
131 | }
132 |
133 | }
134 |
135 | /*******************************************************/
136 |
137 | excluded = make(map[string]bool)
138 |
139 | for _, u := range pkg.Config.RecExclude {
140 | u = strings.TrimSuffix(u, "\r")
141 | u = strings.TrimSpace(u)
142 |
143 | // check if empty or is a comment
144 | if u == "" || strings.HasPrefix(u, "//") {
145 | continue
146 | }
147 | if !strings.HasPrefix(u, "http://") && !strings.HasPrefix(u, "https://") {
148 | msg = fmt.Sprintf("URL %s doesn't begin with http:// or https:// and gets skipped\n", u)
149 | pkg.Print(msg, pkg.Yellow)
150 | continue
151 | }
152 |
153 | excluded[u] = true
154 | }
155 |
156 | added = make(map[string]bool)
157 |
158 | var testUrls []string
159 | for _, u := range pkg.Config.Urls {
160 | u = strings.TrimSuffix(u, "\r")
161 | u = strings.TrimSpace(u)
162 |
163 | // check if empty or is a comment
164 | if u == "" || strings.HasPrefix(u, "//") {
165 | continue
166 | }
167 | if !strings.HasPrefix(u, "http://") && !strings.HasPrefix(u, "https://") {
168 | prefix := "https"
169 | if pkg.Config.UseHTTP {
170 | prefix = "http"
171 | }
172 | msg = fmt.Sprintf("URL %s gets the prefix %s\n", u, prefix)
173 | pkg.PrintVerbose(msg, pkg.Yellow, 2)
174 |
175 | u = prefix + "://" + u
176 | }
177 |
178 | added[u] = true
179 | testUrls = append(testUrls, u)
180 | }
181 |
182 | for i, u := range testUrls {
183 | var recUrls []string
184 | var progress string
185 |
186 | progress = fmt.Sprintf("(%d/%d)", i+1, len(testUrls))
187 | runTests(0, u, progress, &recUrls, "sub_")
188 |
189 | for rec := 1; rec <= pkg.Config.Recursivity; rec++ {
190 | var urlsToAdd []string
191 |
192 | for ii, uu := range recUrls {
193 | if ii != 0 && ii == pkg.Config.RecLimit {
194 | msg = "RecLimit was reached. The next URL - if available - will be tested\n"
195 | pkg.Print(msg, pkg.NoColor)
196 | break
197 | }
198 | progress = fmt.Sprintf("(%d/%d):(%d/%d)", i+1, len(testUrls), ii+1, len(recUrls))
199 | runTests(rec, uu, progress, &urlsToAdd, "crawl_")
200 | }
201 |
202 | recUrls = urlsToAdd
203 | }
204 | }
205 |
206 | /* Scan finished */
207 | msg = "Successfully finished the scan\n"
208 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
209 |
210 | duration := time.Since(start)
211 | msg = fmt.Sprintf("Duration: %s\n\n", duration)
212 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
213 | /****************/
214 |
215 | if pkg.Config.GenerateReport {
216 | report.Duration = duration.String()
217 | pkg.GenerateReport(report, filePath)
218 | }
219 | }
220 |
221 | func runTests(rec int, u string, progress string, foundUrls *[]string, stat string) {
222 | var repWebsite pkg.ReportWebsite
223 | var err error
224 |
225 | msg := fmt.Sprintf("\nTesting website%s: %s\n", progress, u)
226 | pkg.Print(msg, pkg.NoColor)
227 | pkg.Print("===============================================================\n", pkg.NoColor)
228 |
229 | if !strings.HasPrefix(u, "http://") && !strings.HasPrefix(u, "https://") {
230 | if pkg.Config.UseHTTP {
231 | u = "http://" + u
232 | } else {
233 | u = "https://" + u
234 | }
235 | }
236 | repWebsite.URL = u
237 |
238 | /* Setting up client: cookies and noredirect */
239 | msg = "Setting up client\n"
240 | pkg.PrintVerbose(msg, pkg.NoColor, 2)
241 |
242 | // Setting cookies, specified by setcookies
243 | pkg.Config.Website.Cookies = map[string]string{}
244 | for _, c := range pkg.Config.Cookies {
245 | c = strings.TrimSuffix(c, "\r")
246 | c = strings.TrimSpace(c)
247 | if c == "" {
248 | continue
249 | } else if !strings.Contains(c, "=") {
250 | msg = "Specified cookie %s doesn't contain a = and will be skipped\n"
251 | pkg.PrintVerbose(msg, pkg.NoColor, 2)
252 | continue
253 | } else {
254 | cSlice := strings.SplitAfterN(c, "=", 2)
255 | cSlice[0] = strings.TrimSuffix(cSlice[0], "=")
256 |
257 | pkg.Config.Website.Cookies[cSlice[0]] = cSlice[1]
258 | }
259 | }
260 |
261 | timeOutDuration := time.Duration(time.Duration(pkg.Config.TimeOut) * time.Second)
262 | clientNoRedir := &http.Client{
263 | CheckRedirect: func(redirRequest *http.Request, via []*http.Request) error {
264 | /* Commented out, because it unnecessary bloats up logs, especially for 301/302 links
265 | msg := fmt.Sprintf("Redirect Request denied: %s\n", redirRequest.Header)
266 | pkg.PrintVerbose(msg, pkg.Yellow, 2)
267 | */
268 | return http.ErrUseLastResponse
269 | },
270 | Timeout: timeOutDuration,
271 | }
272 |
273 | http.DefaultClient = clientNoRedir
274 |
275 | // retrieve cookies, headers etc. Only setStatusCode if no cookies shall be accepted. Otherwise the next request with set Cookies sets the status code
276 | if !pkg.Config.DeclineCookies {
277 | pkg.Config.Website, err = pkg.GetWebsite(u, false, false)
278 | } else {
279 | pkg.Config.Website, err = pkg.GetWebsite(u, true, false)
280 | }
281 | if err != nil {
282 | repWebsite.HasError = true
283 | repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
284 | report.Websites = append(report.Websites, repWebsite)
285 |
286 | msg := fmt.Sprintf("Couldn't test url: %s\n", err.Error())
287 | pkg.Print(msg, pkg.Red)
288 | return
289 | }
290 |
291 | if strings.HasPrefix(pkg.Config.Website.Body, "Burp Suite") {
292 | msg := fmt.Sprintf("Couldn't connect to given url: \n%s\n", pkg.Config.Website.Body)
293 | pkg.Print(msg, pkg.Red)
294 | return
295 | }
296 |
297 | if !pkg.Config.DeclineCookies {
298 | // retrieve response with all cookies set
299 | pkg.Config.Website, err = pkg.GetWebsite(u, true, false)
300 | if err != nil {
301 | repWebsite.HasError = true
302 | repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
303 | report.Websites = append(report.Websites, repWebsite)
304 |
305 | msg := fmt.Sprintf("Couldn't test url: %s\n", err.Error())
306 | pkg.Print(msg, pkg.Red)
307 | return
308 | }
309 | }
310 |
311 | // check if there's a cache and the cachebuster works
312 | var errSlice []error
313 | var alwaysMiss bool
314 | pkg.Config.Website.Cache, alwaysMiss, errSlice = pkg.CheckCache(parameterList, headerList)
315 | for _, err := range errSlice {
316 | if err != nil {
317 | repWebsite.HasError = true
318 | repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
319 | }
320 | }
321 | /* dont return when there's an error. Because the crawler shall be run anyways. Also there might was a cachebuster found and an error for an other cachebuster which doesnt matter
322 | if len(errSlice) > 0 && !pkg.Config.Force {
323 | return
324 | }
325 | */
326 |
327 | // retrieve response with cachebuster if cachebuster was found
328 | if pkg.Config.Website.Cache.CBwasFound {
329 | pkg.Config.Website, err = pkg.GetWebsite(u, true, true)
330 | if err != nil {
331 | repWebsite.HasError = true
332 | repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
333 | report.Websites = append(report.Websites, repWebsite)
334 |
335 | msg := fmt.Sprintf("Couldn't test url: %s\n", err.Error())
336 | pkg.Print(msg, pkg.Red)
337 | return
338 | }
339 | }
340 |
341 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "decep") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "decep")) {
342 | msg = addSeparator("Web Cache Deception")
343 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
344 |
345 | if alwaysMiss || pkg.Config.Website.Cache.Indicator == "" {
346 | repWebsite.Results = append(repWebsite.Results, pkg.TestWebCacheDeception())
347 | } else {
348 | msg = "The response already gets cached!"
349 | pkg.Print(msg+"\n", pkg.Yellow)
350 | }
351 | } else {
352 | msg = addSeparator("Skipping Web Cache Deception")
353 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
354 | }
355 |
356 | /*******************************************/
357 | if pkg.Config.Website.Cache.CBwasFound || pkg.Config.Force {
358 | repWebsite.CacheIndicator = pkg.Config.Website.Cache.Indicator
359 | repWebsite.CBName = pkg.Config.Website.Cache.CBName
360 | repWebsite.CBwasFound = pkg.Config.Website.Cache.CBwasFound
361 |
362 | if !pkg.Config.Website.Cache.CBwasFound && pkg.Config.Force {
363 | fmt.Println()
364 | pkg.Print("No Cachebuster was found. Forcing the parameter "+pkg.Config.CacheBuster+" as Cachebuster.\n", pkg.Cyan)
365 | pkg.Config.Website.Cache.CBwasFound = true
366 | pkg.Config.Website.Cache.CBisParameter = true
367 | pkg.Config.Website.Cache.CBName = pkg.Config.CacheBuster
368 | }
369 |
370 | /* Testing for cookie poisoning */
371 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "cookie") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "cookie")) {
372 | msg = addSeparator("Cookie Poisoning")
373 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
374 |
375 | repWebsite.Results = append(repWebsite.Results, pkg.ScanCookies())
376 |
377 | if len(pkg.Config.Website.Cookies) == 0 {
378 | msg = "There were no cookies to test!"
379 | pkg.Print(msg+"\n", pkg.Yellow)
380 |
381 | repWebsite.Results[len(repWebsite.Results)-1].ErrorMessages = append(repWebsite.Results[len(repWebsite.Results)-1].ErrorMessages, msg)
382 | repWebsite.Results[len(repWebsite.Results)-1].HasError = true
383 | }
384 | } else {
385 | msg = addSeparator("Skipping Cookie Poisoning")
386 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
387 | }
388 | /*****************************/
389 |
390 | /* Testing for css poisoning */
391 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "css") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "css")) {
392 | msg = addSeparator("CSS Poisoning")
393 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
394 |
395 | repWebsite.Results = append(repWebsite.Results, pkg.ScanCSS())
396 | } else {
397 | msg = addSeparator("Skipping CSS Poisoning")
398 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
399 | }
400 | /*****************************/
401 |
402 | /* Testing for multiple forwarding headers for poisoning */
403 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "forward") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "forward")) {
404 | msg = addSeparator("Multiple Forwarding Headers Poisoning")
405 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
406 |
407 | repWebsite.Results = append(repWebsite.Results, pkg.ScanForwardingHeaders())
408 | } else {
409 | msg = addSeparator("Skipping Multiple Forwarding Headers Poisoning")
410 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
411 | }
412 | /*************************************************************/
413 |
414 | /* Testing for HTTP request smuggling poisoning */
415 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "smuggl") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "smuggl")) {
416 | msg = addSeparator("HTTP Request Smuggling Poisoning")
417 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
418 |
419 | repWebsite.Results = append(repWebsite.Results, pkg.ScanHTTPRequestSmuggling(proxyURL))
420 | } else {
421 | msg = addSeparator("Skipping HTTP Request Smuggling Poisoning")
422 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
423 | }
424 | /*************************************************************/
425 |
426 | /* Testing for multiple Cache Poisoned Denial Of Service Techniques */
427 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "dos") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "dos")) {
428 | msg = addSeparator("Cache Poisoned Denial Of Service")
429 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
430 |
431 | repWebsite.Results = append(repWebsite.Results, pkg.DOS())
432 | } else {
433 | msg = addSeparator("Skipping Cache Poisoned Denial Of Service")
434 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
435 | }
436 | /***********************************************************/
437 |
438 | /* Testing for header poisoning */
439 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "header") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "header")) {
440 | msg = addSeparator("Header Poisoning")
441 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
442 |
443 | repWebsite.Results = append(repWebsite.Results, pkg.ScanHeaders(headerList))
444 | } else {
445 | msg = addSeparator("Skipping Header Poisoning")
446 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
447 | }
448 | /********************************/
449 |
450 | /* Testing for query parameter poisoning */
451 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "parameter") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "parameter")) {
452 | msg = addSeparator("Query Parameter Poisoning")
453 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
454 | repWebsite.Results = append(repWebsite.Results, pkg.ScanParameters(parameterList))
455 | } else {
456 | msg = addSeparator("Skipping Query Parameter Poisoning")
457 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
458 | }
459 | /*****************************************/
460 |
461 | /* Testing for Fat GET */
462 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "fat") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "fat")) {
463 | msg = addSeparator("Fat GET Poisoning")
464 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
465 |
466 | if pkg.Config.DoPost {
467 | msg = "Can't check for Fat GET Poisoning, because POST was specified\n"
468 | pkg.PrintVerbose(msg, pkg.Yellow, 1)
469 | } else {
470 | repWebsite.Results = append(repWebsite.Results, pkg.ScanFatGET())
471 | }
472 | } else {
473 | msg = addSeparator("Skipping Fat GET Poisoning")
474 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
475 | }
476 | /**********************/
477 |
478 | /* Testing for Parameter Cloaking */
479 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "cloaking") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "cloaking")) {
480 | msg = addSeparator("Parameter Cloaking Poisoning")
481 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
482 |
483 | if pkg.Config.DoPost {
484 | msg = "Can't check for Parameter Cloaking Poisoning, because POST was specified\n"
485 | pkg.PrintVerbose(msg, pkg.Yellow, 1)
486 | } else {
487 | repWebsite.Results = append(repWebsite.Results, pkg.ScanParameterCloaking())
488 | }
489 | } else {
490 | msg = addSeparator("Skipping Parameter Cloaking Poisoning")
491 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
492 | }
493 | /**********************************/
494 |
495 | /* Testing for Parameter Pollution */
496 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "pollution") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "pollution")) {
497 | msg = addSeparator("Parameter Pollution Poisoning")
498 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
499 | repWebsite.Results = append(repWebsite.Results, pkg.ScanParameterPollution())
500 | } else {
501 | msg = addSeparator("Skipping Parameter Pollution Poisoning")
502 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
503 | }
504 | /**********************************/
505 |
506 | /* Testing for Parameter Encoding */
507 | if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "encoding") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "encoding")) {
508 | msg = addSeparator("Parameter Encoding Poisoning")
509 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
510 | repWebsite.Results = append(repWebsite.Results, pkg.ScanParameterEncoding())
511 | } else {
512 | msg = addSeparator("Skipping Parameter Encoding Poisoning")
513 | pkg.PrintVerbose(msg, pkg.NoColor, 1)
514 | }
515 | /**********************************/
516 | }
517 |
518 | /* Check for linked files */
519 | if pkg.Config.Recursivity > rec {
520 | msg = fmt.Sprintf("\nChecking recursively for urls (%d/%d)\n", rec+1, pkg.Config.Recursivity)
521 | pkg.Print(msg, pkg.NoColor)
522 |
523 | tempUrls := pkg.CrawlUrls(u, added, excluded)
524 |
525 | if len(tempUrls) > 0 {
526 | msg = fmt.Sprintf("Found %d url(s)\n", len(tempUrls))
527 | pkg.Print(msg, pkg.NoColor)
528 |
529 | msg = "Adding the following urls to the Queue:"
530 | pkg.PrintVerbose(msg+"\n", pkg.NoColor, 1)
531 | for _, u := range tempUrls {
532 | pkg.PrintVerbose(u+"\n", pkg.NoColor, 1)
533 | }
534 |
535 | *foundUrls = append(*foundUrls, tempUrls...)
536 | } else {
537 | msg = "No urls were found to add to the queue\n"
538 | pkg.Print(msg, pkg.NoColor)
539 | }
540 | }
541 | /**************************/
542 |
543 | if pkg.Config.GenerateCompleted {
544 | _, err = completedFile.WriteString(u + "\n")
545 | if err != nil {
546 | pkg.Print("Couldn't write to WCVS_Completed File: %s\n"+err.Error(), pkg.Red)
547 | }
548 | }
549 |
550 | if pkg.Config.GenerateReport {
551 | for _, r := range repWebsite.Results {
552 | if r.Vulnerable {
553 | repWebsite.Vulnerable = true
554 | break
555 | }
556 | }
557 | report.Websites = append(report.Websites, repWebsite)
558 |
559 | report.Vulnerable = report.Vulnerable || repWebsite.Vulnerable
560 | pkg.PrintNewLine()
561 | pkg.GenerateReport(report, filePath)
562 | }
563 |
564 | pkg.Print("===============================================================\n\n", pkg.NoColor)
565 | }
566 |
567 | func addSeparator(msg string) string {
568 | separator := " --------------------------------------------------------------"
569 | return fmt.Sprintf("\n%s\n| %s\n%s\n", separator, msg, separator)
570 | }
571 |
572 | func createCompletedURLs() *os.File {
573 | completedPath := filePath + "_Completed.txt"
574 |
575 | _, err := os.Stat(completedPath)
576 |
577 | var file *os.File
578 | defer file.Close()
579 |
580 | if !os.IsNotExist(err) {
581 | msg := fmt.Sprintf("The file %s will be overwritten, as it already exists\n", completedPath)
582 | pkg.PrintVerbose(msg, pkg.Yellow, 1)
583 | file, err = os.OpenFile(completedPath, os.O_WRONLY, 0666)
584 | } else {
585 | file, err = os.Create(completedPath)
586 | }
587 | if err != nil {
588 | msg := "Couldn't create WCVS_Completed file: " + err.Error() + "\n"
589 | pkg.PrintFatal(msg)
590 | }
591 |
592 | return file
593 | }
594 |
--------------------------------------------------------------------------------