├── LICENSE ├── README.md ├── compile.txt ├── corprint └── main.go ├── libfuzzer └── fuzzapi.go ├── pcap2corp └── main.go ├── stringdict └── main.go └── webfuzz └── webfuzz.go /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Catena cyber 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # webfuzz 2 | 3 | This is a fuzzer against web applications. 4 | It uses request URI and response to infer some coverage to guid fuzzing 5 | It uses libFuzzer extra counters 6 | 7 | Utils 8 | ------ 9 | 10 | * Pcap2corp : takes a pcap as input and extracts a seed corpus out of it (ie the HTTP requests) 11 | * stringdict : parses a Go file and extract the constant strings out of them to generate a libFuzzer dictionary 12 | 13 | TODOs 14 | ------ 15 | 16 | * use the replies to infer URIs and parameters (ie add them to the dictionary) ie crawling capabilities 17 | * have a flexible way to get more valid requests (ie less fuzzing of the HTTP protocol, and the json parser...) 18 | * find duplicate coverage (ie if uri /foo/bar, /foo/baz and /foo/whatever are aliases to the same code) 19 | * create an authenticated session (without needing to reuse the cookie from the seed corpus) 20 | -------------------------------------------------------------------------------- /compile.txt: -------------------------------------------------------------------------------- 1 | go build -v -o webfuzz.a -buildmode=c-archive libfuzzer/fuzzapi.go 2 | clang++ -O1 -fno-omit-frame-pointer -gline-tables-only -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION -fsanitize=address -fsanitize-address-use-after-scope -fsanitize=fuzzer-no-link -stdlib=libc++ -fsanitize=fuzzer webfuzz.a -o fuzzweb 3 | 4 | -------------------------------------------------------------------------------- /corprint/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "io/ioutil" 6 | "net/http/httputil" 7 | "os" 8 | "path/filepath" 9 | 10 | "github.com/catenacyber/webfuzz/webfuzz" 11 | ) 12 | 13 | func main() { 14 | if len(os.Args) != 2 { 15 | fmt.Printf("Expects only a directory as argument\n") 16 | } 17 | var files []string 18 | 19 | err := filepath.Walk(os.Args[1], func(path string, info os.FileInfo, err error) error { 20 | files = append(files, path) 21 | return nil 22 | }) 23 | if err != nil { 24 | fmt.Printf("Error reading directory %s\n", err) 25 | panic(err) 26 | } 27 | for _, filename := range files { 28 | data, err := ioutil.ReadFile(filename) 29 | fmt.Printf("File %s\n", filename) 30 | if err != nil { 31 | fmt.Printf("Failed to read corpus file %s\n", err) 32 | continue 33 | } 34 | req, err := webfuzz.UnserializeRequest(data) 35 | if err != nil { 36 | fmt.Printf("Error for unserializing request %s\n", err) 37 | continue 38 | } 39 | rData, err := httputil.DumpRequest(req, true) 40 | if err != nil { 41 | fmt.Printf("Error for printing request %s\n", err) 42 | continue 43 | } 44 | fmt.Printf("Request:\n %q\n\n\n", rData) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /libfuzzer/fuzzapi.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "reflect" 5 | "unsafe" 6 | 7 | "github.com/catenacyber/webfuzz/webfuzz" 8 | ) 9 | 10 | // #cgo CFLAGS: -Wall -Werror 11 | // #ifdef __linux__ 12 | // __attribute__((weak, section("__libfuzzer_extra_counters"))) 13 | // #else 14 | // #error Currently only Linux is supported 15 | // #endif 16 | // unsigned char LibfuzzerExtraCounters[0x10000]; 17 | import "C" 18 | 19 | //export LLVMFuzzerInitialize 20 | func LLVMFuzzerInitialize(argc uintptr, argv uintptr) int { 21 | webfuzz.WebfuzzInitialize(unsafe.Pointer(&C.LibfuzzerExtraCounters[0]), 0x10000) 22 | return 0 23 | } 24 | 25 | //export LLVMFuzzerTestOneInput 26 | func LLVMFuzzerTestOneInput(data uintptr, size uint64) int { 27 | sh := &reflect.SliceHeader{ 28 | Data: data, 29 | Len: int(size), 30 | Cap: int(size), 31 | } 32 | input := *(*[]byte)(unsafe.Pointer(sh)) 33 | webfuzz.WebfuzzProcess(input) 34 | // always return 0 as it is expected by libFuzzer 35 | return 0 36 | } 37 | 38 | func main() {} 39 | -------------------------------------------------------------------------------- /pcap2corp/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "bufio" 5 | "crypto/sha1" 6 | "fmt" 7 | "io/ioutil" 8 | "net/http" 9 | "os" 10 | "strings" 11 | 12 | "github.com/catenacyber/webfuzz/webfuzz" 13 | 14 | "github.com/google/gopacket" 15 | "github.com/google/gopacket/layers" 16 | "github.com/google/gopacket/pcap" 17 | ) 18 | 19 | func main() { 20 | if len(os.Args) != 3 { 21 | fmt.Printf("Expects a pcap file, and output directory as arguments\n") 22 | } 23 | if handle, err := pcap.OpenOffline(os.Args[1]); err != nil { 24 | fmt.Printf("Unable to open pcap file\n") 25 | panic(err) 26 | } else { 27 | packetSource := gopacket.NewPacketSource(handle, handle.LinkType()) 28 | //loop over packets 29 | for packet := range packetSource.Packets() { 30 | tcp := packet.Layer(layers.LayerTypeTCP).(*layers.TCP) 31 | if tcp != nil { 32 | //filter interesting packets 33 | if tcp.DstPort == 8065 && len(tcp.Payload) > 0 { 34 | req, err := http.ReadRequest(bufio.NewReader(strings.NewReader(string(tcp.Payload)))) 35 | if err != nil { 36 | fmt.Printf("Error reading request %s\n", err) 37 | continue 38 | } 39 | data, err := webfuzz.SerializeRequest(req) 40 | if err != nil { 41 | fmt.Printf("Failed serializing request\n") 42 | continue 43 | } 44 | fname := os.Args[2] + "/" + fmt.Sprintf("%x", sha1.Sum(data)) 45 | err = ioutil.WriteFile(fname, data, 0644) 46 | if err != nil { 47 | fmt.Printf("Failed writing output file %s\n", fname) 48 | panic(err) 49 | } 50 | } 51 | } 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /stringdict/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "go/ast" 6 | "go/parser" 7 | "go/token" 8 | "os" 9 | "strings" 10 | "unicode" 11 | ) 12 | 13 | func main() { 14 | if len(os.Args) != 2 { 15 | fmt.Printf("Expects a go file as arguments\n") 16 | } 17 | 18 | // Create the AST by parsing src. 19 | fset := token.NewFileSet() // positions are relative to fset 20 | f, err := parser.ParseFile(fset, os.Args[1], nil, 0) 21 | if err != nil { 22 | panic(err) 23 | } 24 | 25 | // Inspect the AST and print all identifiers and literals. 26 | ast.Inspect(f, func(n ast.Node) bool { 27 | var s string 28 | switch x := n.(type) { 29 | case *ast.BasicLit: 30 | if x.Kind == token.STRING { 31 | s = x.Value 32 | } 33 | case *ast.GenDecl: 34 | if x.Tok == token.IMPORT { 35 | //exclude imported packages 36 | return false 37 | } 38 | } 39 | if len(s) > 4 { 40 | name := fset.Position(n.Pos()).String() 41 | name = strings.Replace(name, ".go", "", 1) 42 | name = strings.ReplaceAll(name, ":", "_") 43 | if s[0] != '"' || s[len(s)-1] != '"' { 44 | return true 45 | } 46 | if strings.Contains(s, "\\") { 47 | return true 48 | } 49 | for i := 0; i < len(s); i++ { 50 | if s[i] > unicode.MaxASCII { 51 | return false 52 | } 53 | } 54 | fmt.Printf("%s=%s\n", name, s) 55 | } 56 | return true 57 | }) 58 | } 59 | -------------------------------------------------------------------------------- /webfuzz/webfuzz.go: -------------------------------------------------------------------------------- 1 | package webfuzz 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "fmt" 7 | "hash/crc32" 8 | "io" 9 | "io/ioutil" 10 | "net/http" 11 | "net/http/httputil" 12 | "strconv" 13 | "strings" 14 | "time" 15 | "unsafe" 16 | ) 17 | 18 | const CoverSize = 0x10000 19 | 20 | var CoverTab = new([CoverSize]byte) 21 | 22 | var host = "http://localhost:8065" 23 | 24 | var client *http.Client 25 | 26 | var debug = false 27 | 28 | func WebfuzzInitialize(coverTabPtr unsafe.Pointer, coverTabSize uint64) { 29 | if coverTabSize != CoverSize { 30 | panic("Incorrect cover tab size") 31 | } 32 | CoverTab = (*[CoverSize]byte)(coverTabPtr) 33 | client = &http.Client{ 34 | Transport: &http.Transport{ 35 | MaxIdleConnsPerHost: 4, 36 | }, 37 | Timeout: time.Duration(3) * time.Second, 38 | } 39 | content, err := ioutil.ReadFile("host.txt") 40 | if err == nil { 41 | host = strings.TrimRight(string(content), "\r\n") 42 | } 43 | } 44 | 45 | func SerializeRequest(req *http.Request) ([]byte, error) { 46 | req.Host = "" 47 | rData, err := httputil.DumpRequest(req, true) 48 | return rData, err 49 | } 50 | 51 | func uriNorm(u string) string { 52 | r := strings.ReplaceAll(u, "/./", "/") 53 | l := len(r) 54 | for { 55 | r = strings.ReplaceAll(r, "//", "/") 56 | if len(r) >= l { 57 | break 58 | } 59 | l = len(r) 60 | } 61 | return r 62 | } 63 | 64 | func UnserializeRequest(input []byte) (*http.Request, error) { 65 | req, err := http.ReadRequest(bufio.NewReader(strings.NewReader(string(input)))) 66 | if err != nil { 67 | return nil, err 68 | } 69 | req2, err := http.NewRequest(req.Method, host+uriNorm(req.RequestURI), req.Body) 70 | if err != nil { 71 | return nil, err 72 | } 73 | for name, _ := range req.Header { 74 | if name != "Content-Length" { 75 | req2.Header.Add(name, req.Header.Get(name)) 76 | } 77 | } 78 | return req2, err 79 | } 80 | 81 | func WebfuzzProcess(input []byte) int { 82 | //show we have some coverage 83 | CoverTab[0]++ 84 | req, err := UnserializeRequest(input) 85 | if err != nil { 86 | return -1 87 | } 88 | if debug { 89 | rData, err := httputil.DumpRequest(req, true) 90 | if err == nil { 91 | fmt.Printf("request %q\n", rData) 92 | } 93 | } 94 | var rdr2 io.ReadCloser 95 | if req.Body != nil { 96 | buf, err := ioutil.ReadAll(req.Body) 97 | if err != nil { 98 | req.Body = nil 99 | } else { 100 | rdr1 := ioutil.NopCloser(bytes.NewBuffer(buf)) 101 | rdr2 = ioutil.NopCloser(bytes.NewBuffer(buf)) 102 | req.Body = rdr1 103 | } 104 | } 105 | resp, err := client.Do(req) 106 | if err != nil { 107 | //can happen with net/http: invalid header field name "\x00\x00\x00" 108 | return -2 109 | } 110 | CoverTab[0xFFFF]++ 111 | 112 | computeCoverage(req, resp, client, rdr2) 113 | io.Copy(ioutil.Discard, resp.Body) 114 | resp.Body.Close() 115 | return 0 116 | } 117 | 118 | var seenCodes = new([512]bool) 119 | var reproducibleHeaders = map[string]bool{} 120 | var alreadyCovered = new([65536]bool) 121 | var validUris = map[string]bool{} 122 | 123 | func computeCoverage(req *http.Request, resp *http.Response, client *http.Client, rdr io.ReadCloser) { 124 | //specific bypass 125 | if resp.ContentLength == 3556 || resp.ContentLength < 0 { 126 | return 127 | } 128 | //512 counters for status code coverage 129 | if resp.StatusCode >= 100 && resp.StatusCode < 611 { 130 | CoverTab[resp.StatusCode-99]++ 131 | if !seenCodes[resp.StatusCode-99] { 132 | seenCodes[resp.StatusCode-99] = true 133 | fmt.Printf("NEW webfuzz status code %d\n", resp.StatusCode) 134 | } 135 | } else { 136 | fmt.Printf("Unknown response code %d\n", resp.StatusCode) 137 | panic(fmt.Sprintf("Unknown response code %d", resp.StatusCode)) 138 | } 139 | 140 | //resp headers names coverage 141 | reproduce := false 142 | for name, _ := range resp.Header { 143 | h := crc32.ChecksumIEEE([]byte(name)) 144 | CoverTab[h&0xFFFF]++ 145 | _, seen := reproducibleHeaders[name] 146 | if !seen { 147 | fmt.Printf("NEW webfuzz header %s\n", name) 148 | reproduce = true 149 | } 150 | } 151 | 152 | if reproduce { 153 | req.Body = rdr 154 | resp2, err := client.Do(req) 155 | io.Copy(ioutil.Discard, resp2.Body) 156 | resp2.Body.Close() 157 | if err != nil { 158 | fmt.Printf("Cannot reproduce request : %s\n", err) 159 | for name, _ := range resp.Header { 160 | reproducibleHeaders[name] = false 161 | } 162 | return 163 | } 164 | for name, _ := range resp.Header { 165 | _, seen := reproducibleHeaders[name] 166 | v1 := resp.Header.Get(name) 167 | v2 := resp2.Header.Get(name) 168 | if !seen { 169 | switch name { 170 | case "Date", "Content-Length": 171 | reproducibleHeaders[name] = false 172 | default: 173 | reproducibleHeaders[name] = (v1 == v2) 174 | } 175 | } 176 | } 177 | } 178 | 179 | //resp headers values coverage 180 | for name, _ := range resp.Header { 181 | repro, seen := reproducibleHeaders[name] 182 | if seen && repro { 183 | value := resp.Header.Get(name) 184 | h := crc32.ChecksumIEEE([]byte(name + ":" + value)) 185 | CoverTab[h&0xFFFF]++ 186 | cov := alreadyCovered[h&0xFFFF] 187 | if !cov { 188 | alreadyCovered[h&0xFFFF] = true 189 | fmt.Printf("Adding for header %s value %s\n", name, value) 190 | } 191 | } 192 | } 193 | 194 | _, validu := validUris[req.URL.Path] 195 | if resp.StatusCode < 300 || validu { 196 | if resp.StatusCode == 200 && !validu { 197 | fmt.Printf("New valid uri %s\n", req.URL.Path) 198 | validUris[req.URL.Path] = true 199 | } 200 | //uri coverage 201 | uri := strings.Split(req.URL.Path, "/") 202 | i := 0 203 | for _, p := range uri { 204 | if len(p) == 0 { 205 | break 206 | } 207 | h := crc32.ChecksumIEEE([]byte(strconv.Itoa(i) + p)) 208 | i++ 209 | CoverTab[h&0xFFFF]++ 210 | cov := alreadyCovered[h&0xFFFF] 211 | if !cov { 212 | alreadyCovered[h&0xFFFF] = true 213 | fmt.Printf("Adding for uri %s (%s) status %d\n", req.URL.Path, p, resp.StatusCode) 214 | } 215 | 216 | } 217 | hu := crc32.ChecksumIEEE([]byte(req.URL.Path)) 218 | hr := crc32.ChecksumIEEE([]byte(resp.Status)) 219 | CoverTab[(hu^hr)&0xFFFF]++ 220 | for name, _ := range resp.Header { 221 | hh := crc32.ChecksumIEEE([]byte(name)) 222 | CoverTab[(hu^hh)&0xFFFF]++ 223 | } 224 | /*if req.Method != "HEAD" && resp.StatusCode < 300 { 225 | //resp body coverage 226 | body, err := ioutil.ReadAll(resp.Body) 227 | if err != nil { 228 | fmt.Printf("Cannot read response body : %s", err) 229 | panic(fmt.Sprintf("Cannot read response body : %s", err)) 230 | } 231 | hb := crc32.ChecksumIEEE(body) 232 | CoverTab[(hu^hb)&0xFFFF]++ 233 | }*/ 234 | } else if resp.StatusCode == 500 { 235 | fmt.Printf("Server crashed with %d", resp.StatusCode) 236 | panic(fmt.Sprintf("Server crashed with %d", resp.StatusCode)) 237 | } 238 | } 239 | --------------------------------------------------------------------------------