├── .gitignore ├── .github ├── FUNDING.yml ├── pull_request_template.md └── workflows │ ├── golangci-lint.yml │ └── codeql-analysis.yml ├── _img ├── offsec-logo.png ├── ffuf_mascot_600.png ├── ffuf_waving_250.png ├── ffuf_juggling_250.png ├── ffuf_run_logo_600.png └── ffuf_running_250.png ├── pkg ├── input │ ├── const.go │ ├── const_windows.go │ ├── wordlist_test.go │ ├── command.go │ ├── wordlist.go │ └── input.go ├── interactive │ ├── posix.go │ ├── windows.go │ └── termhandler.go ├── ffuf │ ├── progress.go │ ├── constants.go │ ├── multierror.go │ ├── util_test.go │ ├── valuerange.go │ ├── optrange.go │ ├── response.go │ ├── rate.go │ ├── history.go │ ├── util.go │ ├── optionsparser_test.go │ ├── interfaces.go │ ├── configmarshaller.go │ ├── config.go │ ├── request.go │ ├── request_test.go │ ├── autocalibration.go │ └── job.go ├── output │ ├── const_windows.go │ ├── output.go │ ├── const.go │ ├── file_csv_test.go │ ├── file_md.go │ ├── file_csv.go │ ├── file_json.go │ ├── file_html.go │ └── stdout.go ├── runner │ ├── runner.go │ └── simple.go └── filter │ ├── size_test.go │ ├── status_test.go │ ├── time_test.go │ ├── regexp_test.go │ ├── lines_test.go │ ├── words_test.go │ ├── filter_test.go │ ├── regex.go │ ├── time.go │ ├── size.go │ ├── lines.go │ ├── words.go │ ├── status.go │ └── filter.go ├── go.mod ├── .goreleaser.yml ├── LICENSE ├── go.sum ├── CONTRIBUTORS.md ├── ffufrc.example ├── help.go ├── CHANGELOG.md ├── README.md └── main.go /.gitignore: -------------------------------------------------------------------------------- 1 | /ffuf 2 | .idea 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: [joohoi] 2 | -------------------------------------------------------------------------------- /_img/offsec-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tomnomnom/ffuf/HEAD/_img/offsec-logo.png -------------------------------------------------------------------------------- /_img/ffuf_mascot_600.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tomnomnom/ffuf/HEAD/_img/ffuf_mascot_600.png -------------------------------------------------------------------------------- /_img/ffuf_waving_250.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tomnomnom/ffuf/HEAD/_img/ffuf_waving_250.png -------------------------------------------------------------------------------- /_img/ffuf_juggling_250.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tomnomnom/ffuf/HEAD/_img/ffuf_juggling_250.png -------------------------------------------------------------------------------- /_img/ffuf_run_logo_600.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tomnomnom/ffuf/HEAD/_img/ffuf_run_logo_600.png -------------------------------------------------------------------------------- /_img/ffuf_running_250.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tomnomnom/ffuf/HEAD/_img/ffuf_running_250.png -------------------------------------------------------------------------------- /pkg/input/const.go: -------------------------------------------------------------------------------- 1 | // +build !windows 2 | 3 | package input 4 | 5 | const ( 6 | SHELL_CMD = "/bin/sh" 7 | SHELL_ARG = "-c" 8 | ) 9 | -------------------------------------------------------------------------------- /pkg/input/const_windows.go: -------------------------------------------------------------------------------- 1 | // +build windows 2 | 3 | package input 4 | 5 | const ( 6 | SHELL_CMD = "cmd.exe" 7 | SHELL_ARG = "/C" 8 | ) 9 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/ffuf/ffuf 2 | 3 | go 1.13 4 | 5 | require ( 6 | github.com/adrg/xdg v0.4.0 7 | github.com/pelletier/go-toml v1.8.1 8 | ) 9 | -------------------------------------------------------------------------------- /pkg/interactive/posix.go: -------------------------------------------------------------------------------- 1 | // +build !windows 2 | 3 | package interactive 4 | 5 | import "os" 6 | 7 | func termHandle() (*os.File, error) { 8 | return os.Open("/dev/tty") 9 | } 10 | -------------------------------------------------------------------------------- /pkg/ffuf/progress.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | type Progress struct { 8 | StartedAt time.Time 9 | ReqCount int 10 | ReqTotal int 11 | ReqSec int64 12 | QueuePos int 13 | QueueTotal int 14 | ErrorCount int 15 | } 16 | -------------------------------------------------------------------------------- /pkg/output/const_windows.go: -------------------------------------------------------------------------------- 1 | // +build windows 2 | 3 | package output 4 | 5 | const ( 6 | TERMINAL_CLEAR_LINE = "\r\r" 7 | ANSI_CLEAR = "" 8 | ANSI_RED = "" 9 | ANSI_GREEN = "" 10 | ANSI_BLUE = "" 11 | ANSI_YELLOW = "" 12 | ) 13 | -------------------------------------------------------------------------------- /pkg/output/output.go: -------------------------------------------------------------------------------- 1 | package output 2 | 3 | import ( 4 | "github.com/ffuf/ffuf/pkg/ffuf" 5 | ) 6 | 7 | func NewOutputProviderByName(name string, conf *ffuf.Config) ffuf.OutputProvider { 8 | //We have only one outputprovider at the moment 9 | return NewStdoutput(conf) 10 | } 11 | -------------------------------------------------------------------------------- /pkg/runner/runner.go: -------------------------------------------------------------------------------- 1 | package runner 2 | 3 | import ( 4 | "github.com/ffuf/ffuf/pkg/ffuf" 5 | ) 6 | 7 | func NewRunnerByName(name string, conf *ffuf.Config, replay bool) ffuf.RunnerProvider { 8 | // We have only one Runner at the moment 9 | return NewSimpleRunner(conf, replay) 10 | } 11 | -------------------------------------------------------------------------------- /pkg/output/const.go: -------------------------------------------------------------------------------- 1 | // +build !windows 2 | 3 | package output 4 | 5 | const ( 6 | TERMINAL_CLEAR_LINE = "\r\x1b[2K" 7 | ANSI_CLEAR = "\x1b[0m" 8 | ANSI_RED = "\x1b[31m" 9 | ANSI_GREEN = "\x1b[32m" 10 | ANSI_BLUE = "\x1b[34m" 11 | ANSI_YELLOW = "\x1b[33m" 12 | ) 13 | -------------------------------------------------------------------------------- /pkg/ffuf/constants.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "github.com/adrg/xdg" 5 | "path/filepath" 6 | ) 7 | 8 | var ( 9 | //VERSION holds the current version number 10 | VERSION = "1.5.0" 11 | //VERSION_APPENDIX holds additional version definition 12 | VERSION_APPENDIX = "-dev" 13 | CONFIGDIR = filepath.Join(xdg.ConfigHome, "ffuf") 14 | HISTORYDIR = filepath.Join(CONFIGDIR, "history") 15 | ) 16 | -------------------------------------------------------------------------------- /pkg/interactive/windows.go: -------------------------------------------------------------------------------- 1 | // +build windows 2 | 3 | package interactive 4 | 5 | import ( 6 | "os" 7 | "syscall" 8 | ) 9 | 10 | func termHandle() (*os.File, error) { 11 | var tty *os.File 12 | _, err := syscall.Open("CONIN$", syscall.O_RDWR, 0) 13 | if err != nil { 14 | return tty, err 15 | } 16 | tty, err = os.Open("CONIN$") 17 | if err != nil { 18 | return tty, err 19 | } 20 | return tty, nil 21 | } 22 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | # Description 2 | 3 | Please add a short description of pull request contents. 4 | If this PR addresses an existing issue, please add the issue number below. 5 | 6 | Fixes: #(issue number) 7 | 8 | ## Additonally 9 | 10 | - [ ] If this is the first time you are contributing to ffuf, add your name to `CONTRIBUTORS.md`. 11 | The file should be alphabetically ordered. 12 | - [ ] Add a short description of the fix to `CHANGELOG.md` 13 | 14 | Thanks for contributing to ffuf :) 15 | -------------------------------------------------------------------------------- /pkg/input/wordlist_test.go: -------------------------------------------------------------------------------- 1 | package input 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | func TestStripCommentsIgnoresCommentLines(t *testing.T) { 8 | text, _ := stripComments("# text") 9 | 10 | if text != "" { 11 | t.Errorf("Returned text was not a blank string") 12 | } 13 | } 14 | 15 | func TestStripCommentsStripsCommentAfterText(t *testing.T) { 16 | text, _ := stripComments("text # comment") 17 | 18 | if text != "text" { 19 | t.Errorf("Comment was not stripped or pre-comment text was not returned") 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /pkg/ffuf/multierror.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "fmt" 5 | ) 6 | 7 | type Multierror struct { 8 | errors []error 9 | } 10 | 11 | //NewMultierror returns a new Multierror 12 | func NewMultierror() Multierror { 13 | return Multierror{} 14 | } 15 | 16 | func (m *Multierror) Add(err error) { 17 | m.errors = append(m.errors, err) 18 | } 19 | 20 | func (m *Multierror) ErrorOrNil() error { 21 | var errString string 22 | if len(m.errors) > 0 { 23 | errString += fmt.Sprintf("%d errors occured.\n", len(m.errors)) 24 | for _, e := range m.errors { 25 | errString += fmt.Sprintf("\t* %s\n", e) 26 | } 27 | return fmt.Errorf("%s", errString) 28 | } 29 | return nil 30 | } 31 | -------------------------------------------------------------------------------- /pkg/ffuf/util_test.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "math/rand" 5 | "testing" 6 | ) 7 | 8 | func TestRandomString(t *testing.T) { 9 | length := 1 + rand.Intn(65535) 10 | str := RandomString(length) 11 | 12 | if len(str) != length { 13 | t.Errorf("Length of generated string was %d, was expecting %d", len(str), length) 14 | } 15 | } 16 | 17 | func TestUniqStringSlice(t *testing.T) { 18 | slice := []string{"foo", "foo", "bar", "baz", "baz", "foo", "baz", "baz", "foo"} 19 | expectedLength := 3 20 | 21 | uniqSlice := UniqStringSlice(slice) 22 | 23 | if len(uniqSlice) != expectedLength { 24 | t.Errorf("Length of slice was %d, was expecting %d", len(uniqSlice), expectedLength) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /.goreleaser.yml: -------------------------------------------------------------------------------- 1 | builds: 2 | - id: ffuf 3 | binary: ffuf 4 | flags: 5 | - -trimpath 6 | env: 7 | - CGO_ENABLED=0 8 | asmflags: 9 | - all=-trimpath={{.Env.GOPATH}} 10 | gcflags: 11 | - all=-trimpath={{.Env.GOPATH}} 12 | ldflags: | 13 | -s -w -X github.com/ffuf/ffuf/pkg/ffuf.VERSION_APPENDIX= -extldflags '-static' 14 | goos: 15 | - linux 16 | - windows 17 | - freebsd 18 | - openbsd 19 | - darwin 20 | goarch: 21 | - amd64 22 | - 386 23 | - arm 24 | - arm64 25 | ignore: 26 | - goos: freebsd 27 | goarch: arm64 28 | 29 | archives: 30 | - id: tgz 31 | format: tar.gz 32 | replacements: 33 | darwin: macOS 34 | format_overrides: 35 | - goos: windows 36 | format: zip 37 | 38 | signs: 39 | - artifacts: checksum 40 | -------------------------------------------------------------------------------- /pkg/output/file_csv_test.go: -------------------------------------------------------------------------------- 1 | package output 2 | 3 | import ( 4 | "reflect" 5 | "testing" 6 | "time" 7 | 8 | "github.com/ffuf/ffuf/pkg/ffuf" 9 | ) 10 | 11 | func TestToCSV(t *testing.T) { 12 | result := ffuf.Result{ 13 | Input: map[string][]byte{"x": {66}}, 14 | Position: 1, 15 | StatusCode: 200, 16 | ContentLength: 3, 17 | ContentWords: 4, 18 | ContentLines: 5, 19 | ContentType: "application/json", 20 | RedirectLocation: "http://no.pe", 21 | Url: "http://as.df", 22 | Duration: time.Duration(123), 23 | ResultFile: "resultfile", 24 | Host: "host", 25 | } 26 | 27 | csv := toCSV(result) 28 | 29 | if !reflect.DeepEqual(csv, []string{ 30 | "B", 31 | "http://as.df", 32 | "http://no.pe", 33 | "1", 34 | "200", 35 | "3", 36 | "4", 37 | "5", 38 | "application/json", 39 | "123ns", 40 | "resultfile"}) { 41 | 42 | t.Errorf("CSV was not generated in expected format") 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /.github/workflows/golangci-lint.yml: -------------------------------------------------------------------------------- 1 | name: golangci-lint 2 | on: 3 | push: 4 | tags: 5 | - v* 6 | branches: 7 | - master 8 | pull_request: 9 | jobs: 10 | golangci: 11 | name: lint 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/setup-go@v3 15 | with: 16 | go-version: 1.17 17 | - uses: actions/checkout@v3 18 | - name: golangci-lint 19 | uses: golangci/golangci-lint-action@v3 20 | with: 21 | # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version. 22 | version: v1.29 23 | 24 | # Optional: working directory, useful for monorepos 25 | # working-directory: somedir 26 | 27 | # Optional: golangci-lint command line arguments. 28 | # args: --issues-exit-code=0 29 | 30 | # Optional: show only new issues if it's a pull request. The default value is `false`. 31 | # only-new-issues: true -------------------------------------------------------------------------------- /pkg/ffuf/valuerange.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "fmt" 5 | "regexp" 6 | "strconv" 7 | ) 8 | 9 | type ValueRange struct { 10 | Min, Max int64 11 | } 12 | 13 | func ValueRangeFromString(instr string) (ValueRange, error) { 14 | // is the value a range 15 | minmax := regexp.MustCompile(`^(\d+)-(\d+)$`).FindAllStringSubmatch(instr, -1) 16 | if minmax != nil { 17 | // yes 18 | minval, err := strconv.ParseInt(minmax[0][1], 10, 0) 19 | if err != nil { 20 | return ValueRange{}, fmt.Errorf("Invalid value: %s", minmax[0][1]) 21 | } 22 | maxval, err := strconv.ParseInt(minmax[0][2], 10, 0) 23 | if err != nil { 24 | return ValueRange{}, fmt.Errorf("Invalid value: %s", minmax[0][2]) 25 | } 26 | if minval >= maxval { 27 | return ValueRange{}, fmt.Errorf("Minimum has to be smaller than maximum") 28 | } 29 | return ValueRange{minval, maxval}, nil 30 | } else { 31 | // no, a single value or something else 32 | intval, err := strconv.ParseInt(instr, 10, 0) 33 | if err != nil { 34 | return ValueRange{}, fmt.Errorf("Invalid value: %s", instr) 35 | } 36 | return ValueRange{intval, intval}, nil 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Joona Hoikkala 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /pkg/filter/size_test.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "strings" 5 | "testing" 6 | 7 | "github.com/ffuf/ffuf/pkg/ffuf" 8 | ) 9 | 10 | func TestNewSizeFilter(t *testing.T) { 11 | f, _ := NewSizeFilter("1,2,3,444,5-90") 12 | sizeRepr := f.Repr() 13 | if !strings.Contains(sizeRepr, "1,2,3,444,5-90") { 14 | t.Errorf("Size filter was expected to have 5 values") 15 | } 16 | } 17 | 18 | func TestNewSizeFilterError(t *testing.T) { 19 | _, err := NewSizeFilter("invalid") 20 | if err == nil { 21 | t.Errorf("Was expecting an error from errenous input data") 22 | } 23 | } 24 | 25 | func TestFiltering(t *testing.T) { 26 | f, _ := NewSizeFilter("1,2,3,5-90,444") 27 | for i, test := range []struct { 28 | input int64 29 | output bool 30 | }{ 31 | {1, true}, 32 | {2, true}, 33 | {3, true}, 34 | {4, false}, 35 | {5, true}, 36 | {70, true}, 37 | {90, true}, 38 | {91, false}, 39 | {444, true}, 40 | } { 41 | resp := ffuf.Response{ContentLength: test.input} 42 | filterReturn, _ := f.Filter(&resp) 43 | if filterReturn != test.output { 44 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /pkg/filter/status_test.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "strings" 5 | "testing" 6 | 7 | "github.com/ffuf/ffuf/pkg/ffuf" 8 | ) 9 | 10 | func TestNewStatusFilter(t *testing.T) { 11 | f, _ := NewStatusFilter("200,301,400-410,500") 12 | statusRepr := f.Repr() 13 | if !strings.Contains(statusRepr, "200,301,400-410,500") { 14 | t.Errorf("Status filter was expected to have 4 values") 15 | } 16 | } 17 | 18 | func TestNewStatusFilterError(t *testing.T) { 19 | _, err := NewStatusFilter("invalid") 20 | if err == nil { 21 | t.Errorf("Was expecting an error from errenous input data") 22 | } 23 | } 24 | 25 | func TestStatusFiltering(t *testing.T) { 26 | f, _ := NewStatusFilter("200,301,400-498,500") 27 | for i, test := range []struct { 28 | input int64 29 | output bool 30 | }{ 31 | {200, true}, 32 | {301, true}, 33 | {500, true}, 34 | {4, false}, 35 | {399, false}, 36 | {400, true}, 37 | {444, true}, 38 | {498, true}, 39 | {499, false}, 40 | {302, false}, 41 | } { 42 | resp := ffuf.Response{StatusCode: test.input} 43 | filterReturn, _ := f.Filter(&resp) 44 | if filterReturn != test.output { 45 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /pkg/filter/time_test.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "testing" 5 | "time" 6 | 7 | "github.com/ffuf/ffuf/pkg/ffuf" 8 | ) 9 | 10 | func TestNewTimeFilter(t *testing.T) { 11 | fp, _ := NewTimeFilter(">100") 12 | 13 | f := fp.(*TimeFilter) 14 | 15 | if !f.gt || f.lt { 16 | t.Errorf("Time filter was expected to have greater-than") 17 | } 18 | 19 | if f.ms != 100 { 20 | t.Errorf("Time filter was expected to have ms == 100") 21 | } 22 | } 23 | 24 | func TestNewTimeFilterError(t *testing.T) { 25 | _, err := NewTimeFilter("100>") 26 | if err == nil { 27 | t.Errorf("Was expecting an error from errenous input data") 28 | } 29 | } 30 | 31 | func TestTimeFiltering(t *testing.T) { 32 | f, _ := NewTimeFilter(">100") 33 | 34 | for i, test := range []struct { 35 | input int64 36 | output bool 37 | }{ 38 | {1342, true}, 39 | {2000, true}, 40 | {35000, true}, 41 | {1458700, true}, 42 | {99, false}, 43 | {2, false}, 44 | } { 45 | resp := ffuf.Response{ 46 | Data: []byte("dahhhhhtaaaaa"), 47 | Time: time.Duration(test.input * int64(time.Millisecond)), 48 | } 49 | filterReturn, _ := f.Filter(&resp) 50 | if filterReturn != test.output { 51 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /pkg/filter/regexp_test.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "strings" 5 | "testing" 6 | 7 | "github.com/ffuf/ffuf/pkg/ffuf" 8 | ) 9 | 10 | func TestNewRegexpFilter(t *testing.T) { 11 | f, _ := NewRegexpFilter("s([a-z]+)arch") 12 | statusRepr := f.Repr() 13 | if !strings.Contains(statusRepr, "s([a-z]+)arch") { 14 | t.Errorf("Status filter was expected to have a regexp value") 15 | } 16 | } 17 | 18 | func TestNewRegexpFilterError(t *testing.T) { 19 | _, err := NewRegexpFilter("r((") 20 | if err == nil { 21 | t.Errorf("Was expecting an error from errenous input data") 22 | } 23 | } 24 | 25 | func TestRegexpFiltering(t *testing.T) { 26 | f, _ := NewRegexpFilter("s([a-z]+)arch") 27 | for i, test := range []struct { 28 | input string 29 | output bool 30 | }{ 31 | {"search", true}, 32 | {"text and search", true}, 33 | {"sbarch in beginning", true}, 34 | {"midd scarch le", true}, 35 | {"s1arch", false}, 36 | {"invalid", false}, 37 | } { 38 | inp := make(map[string][]byte) 39 | resp := ffuf.Response{ 40 | Data: []byte(test.input), 41 | Request: &ffuf.Request{ 42 | Input: inp, 43 | }, 44 | } 45 | filterReturn, _ := f.Filter(&resp) 46 | if filterReturn != test.output { 47 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /pkg/filter/lines_test.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "strings" 5 | "testing" 6 | 7 | "github.com/ffuf/ffuf/pkg/ffuf" 8 | ) 9 | 10 | func TestNewLineFilter(t *testing.T) { 11 | f, _ := NewLineFilter("200,301,400-410,500") 12 | linesRepr := f.Repr() 13 | if !strings.Contains(linesRepr, "200,301,400-410,500") { 14 | t.Errorf("Word filter was expected to have 4 values") 15 | } 16 | } 17 | 18 | func TestNewLineFilterError(t *testing.T) { 19 | _, err := NewLineFilter("invalid") 20 | if err == nil { 21 | t.Errorf("Was expecting an error from errenous input data") 22 | } 23 | } 24 | 25 | func TestLineFiltering(t *testing.T) { 26 | f, _ := NewLineFilter("200,301,402-450,500") 27 | for i, test := range []struct { 28 | input int64 29 | output bool 30 | }{ 31 | {200, true}, 32 | {301, true}, 33 | {500, true}, 34 | {4, false}, 35 | {444, true}, 36 | {302, false}, 37 | {401, false}, 38 | {402, true}, 39 | {450, true}, 40 | {451, false}, 41 | } { 42 | var data []string 43 | for i := int64(0); i < test.input; i++ { 44 | data = append(data, "A") 45 | } 46 | resp := ffuf.Response{Data: []byte(strings.Join(data, "\n"))} 47 | filterReturn, _ := f.Filter(&resp) 48 | if filterReturn != test.output { 49 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /pkg/filter/words_test.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "strings" 5 | "testing" 6 | 7 | "github.com/ffuf/ffuf/pkg/ffuf" 8 | ) 9 | 10 | func TestNewWordFilter(t *testing.T) { 11 | f, _ := NewWordFilter("200,301,400-410,500") 12 | wordsRepr := f.Repr() 13 | if !strings.Contains(wordsRepr, "200,301,400-410,500") { 14 | t.Errorf("Word filter was expected to have 4 values") 15 | } 16 | } 17 | 18 | func TestNewWordFilterError(t *testing.T) { 19 | _, err := NewWordFilter("invalid") 20 | if err == nil { 21 | t.Errorf("Was expecting an error from errenous input data") 22 | } 23 | } 24 | 25 | func TestWordFiltering(t *testing.T) { 26 | f, _ := NewWordFilter("200,301,402-450,500") 27 | for i, test := range []struct { 28 | input int64 29 | output bool 30 | }{ 31 | {200, true}, 32 | {301, true}, 33 | {500, true}, 34 | {4, false}, 35 | {444, true}, 36 | {302, false}, 37 | {401, false}, 38 | {402, true}, 39 | {450, true}, 40 | {451, false}, 41 | } { 42 | var data []string 43 | for i := int64(0); i < test.input; i++ { 44 | data = append(data, "A") 45 | } 46 | resp := ffuf.Response{Data: []byte(strings.Join(data, " "))} 47 | filterReturn, _ := f.Filter(&resp) 48 | if filterReturn != test.output { 49 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /pkg/filter/filter_test.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | func TestNewFilterByName(t *testing.T) { 8 | scf, _ := NewFilterByName("status", "200") 9 | if _, ok := scf.(*StatusFilter); !ok { 10 | t.Errorf("Was expecting statusfilter") 11 | } 12 | 13 | szf, _ := NewFilterByName("size", "200") 14 | if _, ok := szf.(*SizeFilter); !ok { 15 | t.Errorf("Was expecting sizefilter") 16 | } 17 | 18 | wf, _ := NewFilterByName("word", "200") 19 | if _, ok := wf.(*WordFilter); !ok { 20 | t.Errorf("Was expecting wordfilter") 21 | } 22 | 23 | lf, _ := NewFilterByName("line", "200") 24 | if _, ok := lf.(*LineFilter); !ok { 25 | t.Errorf("Was expecting linefilter") 26 | } 27 | 28 | ref, _ := NewFilterByName("regexp", "200") 29 | if _, ok := ref.(*RegexpFilter); !ok { 30 | t.Errorf("Was expecting regexpfilter") 31 | } 32 | 33 | tf, _ := NewFilterByName("time", "200") 34 | if _, ok := tf.(*TimeFilter); !ok { 35 | t.Errorf("Was expecting timefilter") 36 | } 37 | } 38 | 39 | func TestNewFilterByNameError(t *testing.T) { 40 | _, err := NewFilterByName("status", "invalid") 41 | if err == nil { 42 | t.Errorf("Was expecing an error") 43 | } 44 | } 45 | 46 | func TestNewFilterByNameNotFound(t *testing.T) { 47 | _, err := NewFilterByName("nonexistent", "invalid") 48 | if err == nil { 49 | t.Errorf("Was expecing an error with invalid filter name") 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /pkg/filter/regex.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "regexp" 7 | "strings" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | type RegexpFilter struct { 13 | Value *regexp.Regexp 14 | valueRaw string 15 | } 16 | 17 | func NewRegexpFilter(value string) (ffuf.FilterProvider, error) { 18 | re, err := regexp.Compile(value) 19 | if err != nil { 20 | return &RegexpFilter{}, fmt.Errorf("Regexp filter or matcher (-fr / -mr): invalid value: %s", value) 21 | } 22 | return &RegexpFilter{Value: re, valueRaw: value}, nil 23 | } 24 | 25 | func (f *RegexpFilter) MarshalJSON() ([]byte, error) { 26 | return json.Marshal(&struct { 27 | Value string `json:"value"` 28 | }{ 29 | Value: f.valueRaw, 30 | }) 31 | } 32 | 33 | func (f *RegexpFilter) Filter(response *ffuf.Response) (bool, error) { 34 | matchheaders := "" 35 | for k, v := range response.Headers { 36 | for _, iv := range v { 37 | matchheaders += k + ": " + iv + "\r\n" 38 | } 39 | } 40 | matchdata := []byte(matchheaders) 41 | matchdata = append(matchdata, response.Data...) 42 | pattern := f.valueRaw 43 | for keyword, inputitem := range response.Request.Input { 44 | pattern = strings.ReplaceAll(pattern, keyword, regexp.QuoteMeta(string(inputitem))) 45 | } 46 | matched, err := regexp.Match(pattern, matchdata) 47 | if err != nil { 48 | return false, nil 49 | } 50 | return matched, nil 51 | } 52 | 53 | func (f *RegexpFilter) Repr() string { 54 | return f.valueRaw 55 | } 56 | 57 | func (f *RegexpFilter) ReprVerbose() string { 58 | return fmt.Sprintf("Regexp: %s", f.valueRaw) 59 | } 60 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/adrg/xdg v0.4.0 h1:RzRqFcjH4nE5C6oTAxhBtoE2IRyjBSa62SCbyPidvls= 2 | github.com/adrg/xdg v0.4.0/go.mod h1:N6ag73EX4wyxeaoeHctc1mas01KZgsj5tYiAIwqJE/E= 3 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 4 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 5 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 6 | github.com/pelletier/go-toml v1.8.1 h1:1Nf83orprkJyknT6h7zbuEGUEjcyVlCxSUGTENmNCRM= 7 | github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= 8 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 9 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 10 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 11 | github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= 12 | github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= 13 | golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359 h1:2B5p2L5IfGiD7+b9BOoRMC6DgObAVZV+Fsp050NqXik= 14 | golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 15 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 16 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 17 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= 18 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 19 | -------------------------------------------------------------------------------- /pkg/filter/time.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | type TimeFilter struct { 13 | ms int64 // milliseconds since first response byte 14 | gt bool // filter if response time is greater than 15 | lt bool // filter if response time is less than 16 | valueRaw string 17 | } 18 | 19 | func NewTimeFilter(value string) (ffuf.FilterProvider, error) { 20 | var milliseconds int64 21 | gt, lt := false, false 22 | 23 | gt = strings.HasPrefix(value, ">") 24 | lt = strings.HasPrefix(value, "<") 25 | 26 | if (!lt && !gt) || (lt && gt) { 27 | return &TimeFilter{}, fmt.Errorf("Time filter or matcher (-ft / -mt): invalid value: %s", value) 28 | } 29 | 30 | milliseconds, err := strconv.ParseInt(value[1:], 10, 64) 31 | if err != nil { 32 | return &TimeFilter{}, fmt.Errorf("Time filter or matcher (-ft / -mt): invalid value: %s", value) 33 | } 34 | return &TimeFilter{ms: milliseconds, gt: gt, lt: lt, valueRaw: value}, nil 35 | } 36 | 37 | func (f *TimeFilter) MarshalJSON() ([]byte, error) { 38 | return json.Marshal(&struct { 39 | Value string `json:"value"` 40 | }{ 41 | Value: f.valueRaw, 42 | }) 43 | } 44 | 45 | func (f *TimeFilter) Filter(response *ffuf.Response) (bool, error) { 46 | if f.gt { 47 | if response.Time.Milliseconds() > f.ms { 48 | return true, nil 49 | } 50 | 51 | } else if f.lt { 52 | if response.Time.Milliseconds() < f.ms { 53 | return true, nil 54 | } 55 | } 56 | 57 | return false, nil 58 | } 59 | 60 | func (f *TimeFilter) Repr() string { 61 | return f.valueRaw 62 | } 63 | 64 | func (f *TimeFilter) ReprVerbose() string { 65 | return fmt.Sprintf("Response time: %s", f.Repr()) 66 | } 67 | -------------------------------------------------------------------------------- /pkg/output/file_md.go: -------------------------------------------------------------------------------- 1 | package output 2 | 3 | import ( 4 | "html/template" 5 | "os" 6 | "time" 7 | 8 | "github.com/ffuf/ffuf/pkg/ffuf" 9 | ) 10 | 11 | const ( 12 | markdownTemplate = `# FFUF Report 13 | 14 | Command line : ` + "`{{.CommandLine}}`" + ` 15 | Time: ` + "{{ .Time }}" + ` 16 | 17 | {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | Duration | ResultFile | 18 | {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- | 19 | {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .Duration}} | {{ .ResultFile }} | 20 | {{end}}` // The template format is not pretty but follows the markdown guide 21 | ) 22 | 23 | func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) error { 24 | ti := time.Now() 25 | 26 | keywords := make([]string, 0) 27 | for _, inputprovider := range config.InputProviders { 28 | keywords = append(keywords, inputprovider.Keyword) 29 | } 30 | 31 | outMD := htmlFileOutput{ 32 | CommandLine: config.CommandLine, 33 | Time: ti.Format(time.RFC3339), 34 | Results: res, 35 | Keys: keywords, 36 | } 37 | 38 | f, err := os.Create(filename) 39 | if err != nil { 40 | return err 41 | } 42 | defer f.Close() 43 | 44 | templateName := "output.md" 45 | t := template.New(templateName).Delims("{{", "}}") 46 | _, err = t.Parse(markdownTemplate) 47 | if err != nil { 48 | return err 49 | } 50 | err = t.Execute(f, outMD) 51 | return err 52 | } 53 | -------------------------------------------------------------------------------- /pkg/filter/size.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | type SizeFilter struct { 13 | Value []ffuf.ValueRange 14 | } 15 | 16 | func NewSizeFilter(value string) (ffuf.FilterProvider, error) { 17 | var intranges []ffuf.ValueRange 18 | for _, sv := range strings.Split(value, ",") { 19 | vr, err := ffuf.ValueRangeFromString(sv) 20 | if err != nil { 21 | return &SizeFilter{}, fmt.Errorf("Size filter or matcher (-fs / -ms): invalid value: %s", sv) 22 | } 23 | 24 | intranges = append(intranges, vr) 25 | } 26 | return &SizeFilter{Value: intranges}, nil 27 | } 28 | 29 | func (f *SizeFilter) MarshalJSON() ([]byte, error) { 30 | value := make([]string, 0) 31 | for _, v := range f.Value { 32 | if v.Min == v.Max { 33 | value = append(value, strconv.FormatInt(v.Min, 10)) 34 | } else { 35 | value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max)) 36 | } 37 | } 38 | return json.Marshal(&struct { 39 | Value string `json:"value"` 40 | }{ 41 | Value: strings.Join(value, ","), 42 | }) 43 | } 44 | 45 | func (f *SizeFilter) Filter(response *ffuf.Response) (bool, error) { 46 | for _, iv := range f.Value { 47 | if iv.Min <= response.ContentLength && response.ContentLength <= iv.Max { 48 | return true, nil 49 | } 50 | } 51 | return false, nil 52 | } 53 | 54 | func (f *SizeFilter) Repr() string { 55 | var strval []string 56 | for _, iv := range f.Value { 57 | if iv.Min == iv.Max { 58 | strval = append(strval, strconv.Itoa(int(iv.Min))) 59 | } else { 60 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) 61 | } 62 | } 63 | return strings.Join(strval, ",") 64 | } 65 | 66 | func (f *SizeFilter) ReprVerbose() string { 67 | return fmt.Sprintf("Response size: %s", f.Repr()) 68 | } 69 | -------------------------------------------------------------------------------- /pkg/filter/lines.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | type LineFilter struct { 13 | Value []ffuf.ValueRange 14 | } 15 | 16 | func NewLineFilter(value string) (ffuf.FilterProvider, error) { 17 | var intranges []ffuf.ValueRange 18 | for _, sv := range strings.Split(value, ",") { 19 | vr, err := ffuf.ValueRangeFromString(sv) 20 | if err != nil { 21 | return &LineFilter{}, fmt.Errorf("Line filter or matcher (-fl / -ml): invalid value: %s", sv) 22 | } 23 | intranges = append(intranges, vr) 24 | } 25 | return &LineFilter{Value: intranges}, nil 26 | } 27 | 28 | func (f *LineFilter) MarshalJSON() ([]byte, error) { 29 | value := make([]string, 0) 30 | for _, v := range f.Value { 31 | if v.Min == v.Max { 32 | value = append(value, strconv.FormatInt(v.Min, 10)) 33 | } else { 34 | value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max)) 35 | } 36 | } 37 | return json.Marshal(&struct { 38 | Value string `json:"value"` 39 | }{ 40 | Value: strings.Join(value, ","), 41 | }) 42 | } 43 | 44 | func (f *LineFilter) Filter(response *ffuf.Response) (bool, error) { 45 | linesSize := len(strings.Split(string(response.Data), "\n")) 46 | for _, iv := range f.Value { 47 | if iv.Min <= int64(linesSize) && int64(linesSize) <= iv.Max { 48 | return true, nil 49 | } 50 | } 51 | return false, nil 52 | } 53 | 54 | func (f *LineFilter) Repr() string { 55 | var strval []string 56 | for _, iv := range f.Value { 57 | if iv.Min == iv.Max { 58 | strval = append(strval, strconv.Itoa(int(iv.Min))) 59 | } else { 60 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) 61 | } 62 | } 63 | return strings.Join(strval, ",") 64 | } 65 | 66 | func (f *LineFilter) ReprVerbose() string { 67 | return fmt.Sprintf("Response lines: %s", f.Repr()) 68 | } 69 | -------------------------------------------------------------------------------- /pkg/filter/words.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | type WordFilter struct { 13 | Value []ffuf.ValueRange 14 | } 15 | 16 | func NewWordFilter(value string) (ffuf.FilterProvider, error) { 17 | var intranges []ffuf.ValueRange 18 | for _, sv := range strings.Split(value, ",") { 19 | vr, err := ffuf.ValueRangeFromString(sv) 20 | if err != nil { 21 | return &WordFilter{}, fmt.Errorf("Word filter or matcher (-fw / -mw): invalid value: %s", sv) 22 | } 23 | intranges = append(intranges, vr) 24 | } 25 | return &WordFilter{Value: intranges}, nil 26 | } 27 | 28 | func (f *WordFilter) MarshalJSON() ([]byte, error) { 29 | value := make([]string, 0) 30 | for _, v := range f.Value { 31 | if v.Min == v.Max { 32 | value = append(value, strconv.FormatInt(v.Min, 10)) 33 | } else { 34 | value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max)) 35 | } 36 | } 37 | return json.Marshal(&struct { 38 | Value string `json:"value"` 39 | }{ 40 | Value: strings.Join(value, ","), 41 | }) 42 | } 43 | 44 | func (f *WordFilter) Filter(response *ffuf.Response) (bool, error) { 45 | wordsSize := len(strings.Split(string(response.Data), " ")) 46 | for _, iv := range f.Value { 47 | if iv.Min <= int64(wordsSize) && int64(wordsSize) <= iv.Max { 48 | return true, nil 49 | } 50 | } 51 | return false, nil 52 | } 53 | 54 | func (f *WordFilter) Repr() string { 55 | var strval []string 56 | for _, iv := range f.Value { 57 | if iv.Min == iv.Max { 58 | strval = append(strval, strconv.Itoa(int(iv.Min))) 59 | } else { 60 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) 61 | } 62 | } 63 | return strings.Join(strval, ",") 64 | } 65 | 66 | func (f *WordFilter) ReprVerbose() string { 67 | return fmt.Sprintf("Response words: %s", f.Repr()) 68 | } 69 | -------------------------------------------------------------------------------- /pkg/ffuf/optrange.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | ) 9 | 10 | //optRange stores either a single float, in which case the value is stored in min and IsRange is false, 11 | //or a range of floats, in which case IsRange is true 12 | type optRange struct { 13 | Min float64 14 | Max float64 15 | IsRange bool 16 | HasDelay bool 17 | } 18 | 19 | type optRangeJSON struct { 20 | Value string `json:"value"` 21 | } 22 | 23 | func (o *optRange) MarshalJSON() ([]byte, error) { 24 | value := "" 25 | if o.Min == o.Max { 26 | value = fmt.Sprintf("%.2f", o.Min) 27 | } else { 28 | value = fmt.Sprintf("%.2f-%.2f", o.Min, o.Max) 29 | } 30 | return json.Marshal(&optRangeJSON{ 31 | Value: value, 32 | }) 33 | } 34 | 35 | func (o *optRange) UnmarshalJSON(b []byte) error { 36 | var inc optRangeJSON 37 | err := json.Unmarshal(b, &inc) 38 | if err != nil { 39 | return err 40 | } 41 | return o.Initialize(inc.Value) 42 | } 43 | 44 | //Initialize sets up the optRange from string value 45 | func (o *optRange) Initialize(value string) error { 46 | var err, err2 error 47 | d := strings.Split(value, "-") 48 | if len(d) > 2 { 49 | return fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"") 50 | } else if len(d) == 2 { 51 | o.IsRange = true 52 | o.HasDelay = true 53 | o.Min, err = strconv.ParseFloat(d[0], 64) 54 | o.Max, err2 = strconv.ParseFloat(d[1], 64) 55 | if err != nil || err2 != nil { 56 | return fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5") 57 | } 58 | } else if len(value) > 0 { 59 | o.IsRange = false 60 | o.HasDelay = true 61 | o.Min, err = strconv.ParseFloat(value, 64) 62 | if err != nil { 63 | return fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"") 64 | } 65 | } 66 | return nil 67 | } 68 | -------------------------------------------------------------------------------- /pkg/output/file_csv.go: -------------------------------------------------------------------------------- 1 | package output 2 | 3 | import ( 4 | "encoding/base64" 5 | "encoding/csv" 6 | "os" 7 | "strconv" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "duration", "resultfile"} 13 | 14 | func writeCSV(filename string, config *ffuf.Config, res []ffuf.Result, encode bool) error { 15 | header := make([]string, 0) 16 | f, err := os.Create(filename) 17 | if err != nil { 18 | return err 19 | } 20 | defer f.Close() 21 | 22 | w := csv.NewWriter(f) 23 | defer w.Flush() 24 | 25 | for _, inputprovider := range config.InputProviders { 26 | header = append(header, inputprovider.Keyword) 27 | } 28 | header = append(header, staticheaders...) 29 | 30 | if err := w.Write(header); err != nil { 31 | return err 32 | } 33 | for _, r := range res { 34 | if encode { 35 | inputs := make(map[string][]byte, len(r.Input)) 36 | for k, v := range r.Input { 37 | inputs[k] = []byte(base64encode(v)) 38 | } 39 | r.Input = inputs 40 | } 41 | 42 | err := w.Write(toCSV(r)) 43 | if err != nil { 44 | return err 45 | } 46 | } 47 | return nil 48 | } 49 | 50 | func base64encode(in []byte) string { 51 | return base64.StdEncoding.EncodeToString(in) 52 | } 53 | 54 | func toCSV(r ffuf.Result) []string { 55 | res := make([]string, 0) 56 | for _, v := range r.Input { 57 | res = append(res, string(v)) 58 | } 59 | res = append(res, r.Url) 60 | res = append(res, r.RedirectLocation) 61 | res = append(res, strconv.Itoa(r.Position)) 62 | res = append(res, strconv.FormatInt(r.StatusCode, 10)) 63 | res = append(res, strconv.FormatInt(r.ContentLength, 10)) 64 | res = append(res, strconv.FormatInt(r.ContentWords, 10)) 65 | res = append(res, strconv.FormatInt(r.ContentLines, 10)) 66 | res = append(res, r.ContentType) 67 | res = append(res, r.Duration.String()) 68 | res = append(res, r.ResultFile) 69 | return res 70 | } 71 | -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | # Contributors 2 | 3 | * [adamtlangley](https://github.com/adamtlangley) 4 | * [adilsoybali](https://github.com/adilsoybali) 5 | * [AverageSecurityGuy](https://github.com/averagesecurityguy) 6 | * [bp0](https://github.com/bp0lr) 7 | * [bjhulst](https://github.com/bjhulst) 8 | * [bsysop](https://twitter.com/bsysop) 9 | * [ccsplit](https://github.com/ccsplit) 10 | * [choket](https://github.com/choket) 11 | * [codingo](https://github.com/codingo) 12 | * [c_sto](https://github.com/c-sto) 13 | * [Damian89](https://github.com/Damian89) 14 | * [Daviey](https://github.com/Daviey) 15 | * [delic](https://github.com/delic) 16 | * [denandz](https://github.com/denandz) 17 | * [erbbysam](https://github.com/erbbysam) 18 | * [eur0pa](https://github.com/eur0pa) 19 | * [fabiobauer](https://github.com/fabiobauer) 20 | * [fang0654](https://github.com/fang0654) 21 | * [Hazegard](https://github.com/Hazegard) 22 | * [helpermika](https://github.com/helpermika) 23 | * [h1x](https://github.com/h1x-lnx) 24 | * [Ice3man543](https://github.com/Ice3man543) 25 | * [JamTookTheBait](https://github.com/JamTookTheBait) 26 | * [jimen0](https://github.com/jimen0) 27 | * [joohoi](https://github.com/joohoi) 28 | * [JoshuaMulliken](https://github.com/JoshuaMulliken) 29 | * [jsgv](https://github.com/jsgv) 30 | * [justinsteven](https://github.com/justinsteven) 31 | * [jvesiluoma](https://github.com/jvesiluoma) 32 | * [Kiblyn11](https://github.com/Kiblyn11) 33 | * [l4yton](https://github.com/l4yton) 34 | * [lc](https://github.com/lc) 35 | * [mprencipe](https://github.com/mprencipe) 36 | * [nnwakelam](https://twitter.com/nnwakelam) 37 | * [noraj](https://pwn.by/noraj) 38 | * [oh6hay](https://github.com/oh6hay) 39 | * [penguinxoxo](https://github.com/penguinxoxo) 40 | * [putsi](https://github.com/putsi) 41 | * [SakiiR](https://github.com/SakiiR) 42 | * [seblw](https://github.com/seblw) 43 | * [Shaked](https://github.com/Shaked) 44 | * [Skyehopper](https://github.com/Skyehopper) 45 | * [SolomonSklash](https://github.com/SolomonSklash) 46 | * [TomNomNom](https://github.com/tomnomnom) 47 | * [xfgusta](https://github.com/xfgusta) 48 | 49 | -------------------------------------------------------------------------------- /ffufrc.example: -------------------------------------------------------------------------------- 1 | # This is an example of a ffuf configuration file. 2 | # https://github.com/ffuf/ffuf 3 | 4 | [http] 5 | cookies = [ 6 | "cookiename=cookievalue" 7 | ] 8 | data = "post=data&key=value" 9 | followredirects = false 10 | headers = [ 11 | "X-Header-Name: value", 12 | "X-Another-Header: value" 13 | ] 14 | ignorebody = false 15 | method = "GET" 16 | proxyurl = "http://127.0.0.1:8080" 17 | recursion = false 18 | recursion_depth = 0 19 | recursion_strategy = "default" 20 | replayproxyurl = "http://127.0.0.1:8080" 21 | timeout = 10 22 | url = "https://example.org/FUZZ" 23 | 24 | [general] 25 | autocalibration = false 26 | autocalibrationstrings = [ 27 | "randomtest", 28 | "admin" 29 | ] 30 | autocalibration_strategy = "basic" 31 | autocalibration_keyword = "FUZZ" 32 | autocalibration_perhost = false 33 | colors = false 34 | delay = "" 35 | maxtime = 0 36 | maxtimejob = 0 37 | noninteractive = false 38 | quiet = false 39 | rate = 0 40 | stopon403 = false 41 | stoponall = false 42 | stoponerrors = false 43 | threads = 40 44 | verbose = false 45 | json = false 46 | 47 | [input] 48 | dirsearchcompat = false 49 | extensions = "" 50 | ignorewordlistcomments = false 51 | inputmode = "clusterbomb" 52 | inputnum = 100 53 | inputcommands = [ 54 | "seq 1 100:CUSTOMKEYWORD" 55 | ] 56 | request = "requestfile.txt" 57 | requestproto = "https" 58 | wordlists = [ 59 | "/path/to/wordlist:FUZZ", 60 | "/path/to/hostlist:HOST" 61 | ] 62 | 63 | 64 | [output] 65 | debuglog = "debug.log" 66 | outputdirectory = "/tmp/rawoutputdir" 67 | outputfile = "output.json" 68 | outputformat = "json" 69 | outputcreateemptyfile = false 70 | 71 | [filter] 72 | mode = "or" 73 | lines = "" 74 | regexp = "" 75 | size = "" 76 | status = "" 77 | time = "" 78 | words = "" 79 | 80 | [matcher] 81 | mode = "or" 82 | lines = "" 83 | regexp = "" 84 | size = "" 85 | status = "200,204,301,302,307,401,403,405,500" 86 | time = "" 87 | words = "" 88 | -------------------------------------------------------------------------------- /pkg/filter/status.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | const AllStatuses = 0 13 | 14 | type StatusFilter struct { 15 | Value []ffuf.ValueRange 16 | } 17 | 18 | func NewStatusFilter(value string) (ffuf.FilterProvider, error) { 19 | var intranges []ffuf.ValueRange 20 | for _, sv := range strings.Split(value, ",") { 21 | if sv == "all" { 22 | intranges = append(intranges, ffuf.ValueRange{Min: AllStatuses, Max: AllStatuses}) 23 | } else { 24 | vr, err := ffuf.ValueRangeFromString(sv) 25 | if err != nil { 26 | return &StatusFilter{}, fmt.Errorf("Status filter or matcher (-fc / -mc): invalid value %s", sv) 27 | } 28 | intranges = append(intranges, vr) 29 | } 30 | } 31 | return &StatusFilter{Value: intranges}, nil 32 | } 33 | 34 | func (f *StatusFilter) MarshalJSON() ([]byte, error) { 35 | value := make([]string, 0) 36 | for _, v := range f.Value { 37 | if v.Min == 0 && v.Max == 0 { 38 | value = append(value, "all") 39 | } else { 40 | if v.Min == v.Max { 41 | value = append(value, strconv.FormatInt(v.Min, 10)) 42 | } else { 43 | value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max)) 44 | } 45 | } 46 | } 47 | return json.Marshal(&struct { 48 | Value string `json:"value"` 49 | }{ 50 | Value: strings.Join(value, ","), 51 | }) 52 | } 53 | 54 | func (f *StatusFilter) Filter(response *ffuf.Response) (bool, error) { 55 | for _, iv := range f.Value { 56 | if iv.Min == AllStatuses && iv.Max == AllStatuses { 57 | // Handle the "all" case 58 | return true, nil 59 | } 60 | if iv.Min <= response.StatusCode && response.StatusCode <= iv.Max { 61 | return true, nil 62 | } 63 | } 64 | return false, nil 65 | } 66 | 67 | func (f *StatusFilter) Repr() string { 68 | var strval []string 69 | for _, iv := range f.Value { 70 | if iv.Min == AllStatuses && iv.Max == AllStatuses { 71 | strval = append(strval, "all") 72 | } else if iv.Min == iv.Max { 73 | strval = append(strval, strconv.Itoa(int(iv.Min))) 74 | } else { 75 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) 76 | } 77 | } 78 | return strings.Join(strval, ",") 79 | } 80 | 81 | func (f *StatusFilter) ReprVerbose() string { 82 | return fmt.Sprintf("Response status: %s", f.Repr()) 83 | } 84 | -------------------------------------------------------------------------------- /pkg/ffuf/response.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "net/http" 5 | "net/url" 6 | "time" 7 | ) 8 | 9 | // Response struct holds the meaningful data returned from request and is meant for passing to filters 10 | type Response struct { 11 | StatusCode int64 12 | Headers map[string][]string 13 | Data []byte 14 | ContentLength int64 15 | ContentWords int64 16 | ContentLines int64 17 | ContentType string 18 | Cancelled bool 19 | Request *Request 20 | Raw string 21 | ResultFile string 22 | Time time.Duration 23 | } 24 | 25 | // GetRedirectLocation returns the redirect location for a 3xx redirect HTTP response 26 | func (resp *Response) GetRedirectLocation(absolute bool) string { 27 | 28 | redirectLocation := "" 29 | if resp.StatusCode >= 300 && resp.StatusCode <= 399 { 30 | if loc, ok := resp.Headers["Location"]; ok { 31 | if len(loc) > 0 { 32 | redirectLocation = loc[0] 33 | } 34 | } 35 | } 36 | 37 | if absolute { 38 | redirectUrl, err := url.Parse(redirectLocation) 39 | if err != nil { 40 | return redirectLocation 41 | } 42 | baseUrl, err := url.Parse(resp.Request.Url) 43 | if err != nil { 44 | return redirectLocation 45 | } 46 | if redirectUrl.IsAbs() && UrlEqual(redirectUrl, baseUrl) { 47 | redirectLocation = redirectUrl.Scheme + "://" + 48 | baseUrl.Host + redirectUrl.Path 49 | } else { 50 | redirectLocation = baseUrl.ResolveReference(redirectUrl).String() 51 | } 52 | } 53 | 54 | return redirectLocation 55 | } 56 | 57 | func UrlEqual(url1, url2 *url.URL) bool { 58 | if url1.Hostname() != url2.Hostname() { 59 | return false 60 | } 61 | if url1.Scheme != url2.Scheme { 62 | return false 63 | } 64 | p1, p2 := getUrlPort(url1), getUrlPort(url2) 65 | return p1 == p2 66 | } 67 | 68 | func getUrlPort(url *url.URL) string { 69 | var portMap = map[string]string{ 70 | "http": "80", 71 | "https": "443", 72 | } 73 | p := url.Port() 74 | if p == "" { 75 | p = portMap[url.Scheme] 76 | } 77 | return p 78 | } 79 | 80 | func NewResponse(httpresp *http.Response, req *Request) Response { 81 | var resp Response 82 | resp.Request = req 83 | resp.StatusCode = int64(httpresp.StatusCode) 84 | resp.ContentType = httpresp.Header.Get("Content-Type") 85 | resp.Headers = httpresp.Header 86 | resp.Cancelled = false 87 | resp.Raw = "" 88 | resp.ResultFile = "" 89 | return resp 90 | } 91 | -------------------------------------------------------------------------------- /pkg/input/command.go: -------------------------------------------------------------------------------- 1 | package input 2 | 3 | import ( 4 | "bytes" 5 | "os" 6 | "os/exec" 7 | "strconv" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | type CommandInput struct { 13 | config *ffuf.Config 14 | count int 15 | active bool 16 | keyword string 17 | command string 18 | shell string 19 | } 20 | 21 | func NewCommandInput(keyword string, value string, conf *ffuf.Config) (*CommandInput, error) { 22 | var cmd CommandInput 23 | cmd.active = true 24 | cmd.keyword = keyword 25 | cmd.config = conf 26 | cmd.count = 0 27 | cmd.command = value 28 | cmd.shell = SHELL_CMD 29 | 30 | if cmd.config.InputShell != "" { 31 | cmd.shell = cmd.config.InputShell 32 | } 33 | 34 | return &cmd, nil 35 | } 36 | 37 | // Keyword returns the keyword assigned to this InternalInputProvider 38 | func (c *CommandInput) Keyword() string { 39 | return c.keyword 40 | } 41 | 42 | // Position will return the current position in the input list 43 | func (c *CommandInput) Position() int { 44 | return c.count 45 | } 46 | 47 | // SetPosition will set the current position of the inputprovider 48 | func (c *CommandInput) SetPosition(pos int) { 49 | c.count = pos 50 | } 51 | 52 | // ResetPosition will reset the current position of the InternalInputProvider 53 | func (c *CommandInput) ResetPosition() { 54 | c.count = 0 55 | } 56 | 57 | // IncrementPosition increments the current position in the inputprovider 58 | func (c *CommandInput) IncrementPosition() { 59 | c.count += 1 60 | } 61 | 62 | // Next will increment the cursor position, and return a boolean telling if there's iterations left 63 | func (c *CommandInput) Next() bool { 64 | return c.count < c.config.InputNum 65 | } 66 | 67 | // Value returns the input from command stdoutput 68 | func (c *CommandInput) Value() []byte { 69 | var stdout bytes.Buffer 70 | os.Setenv("FFUF_NUM", strconv.Itoa(c.count)) 71 | cmd := exec.Command(c.shell, SHELL_ARG, c.command) 72 | cmd.Stdout = &stdout 73 | err := cmd.Run() 74 | if err != nil { 75 | return []byte("") 76 | } 77 | return stdout.Bytes() 78 | } 79 | 80 | // Total returns the size of wordlist 81 | func (c *CommandInput) Total() int { 82 | return c.config.InputNum 83 | } 84 | 85 | func (c *CommandInput) Active() bool { 86 | return c.active 87 | } 88 | 89 | func (c *CommandInput) Enable() { 90 | c.active = true 91 | } 92 | 93 | func (c *CommandInput) Disable() { 94 | c.active = false 95 | } 96 | -------------------------------------------------------------------------------- /pkg/ffuf/rate.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "container/ring" 5 | "sync" 6 | "time" 7 | ) 8 | 9 | type RateThrottle struct { 10 | rateCounter *ring.Ring 11 | Config *Config 12 | RateMutex sync.Mutex 13 | RateLimiter *time.Ticker 14 | lastAdjustment time.Time 15 | } 16 | 17 | func NewRateThrottle(conf *Config) *RateThrottle { 18 | r := &RateThrottle{ 19 | Config: conf, 20 | lastAdjustment: time.Now(), 21 | } 22 | if conf.Rate > 0 { 23 | r.rateCounter = ring.New(int(conf.Rate * 5)) 24 | } else { 25 | r.rateCounter = ring.New(conf.Threads * 5) 26 | } 27 | if conf.Rate > 0 { 28 | ratemicros := 1000000 / conf.Rate 29 | r.RateLimiter = time.NewTicker(time.Microsecond * time.Duration(ratemicros)) 30 | } else { 31 | //Million rps is probably a decent hardcoded upper speedlimit 32 | r.RateLimiter = time.NewTicker(time.Microsecond * 1) 33 | } 34 | return r 35 | } 36 | 37 | // CurrentRate calculates requests/second value from circular list of rate 38 | func (r *RateThrottle) CurrentRate() int64 { 39 | n := r.rateCounter.Len() 40 | lowest := int64(0) 41 | highest := int64(0) 42 | r.rateCounter.Do(func(r interface{}) { 43 | switch val := r.(type) { 44 | case int64: 45 | if lowest == 0 || val < lowest { 46 | lowest = val 47 | } 48 | if val > highest { 49 | highest = val 50 | } 51 | default: 52 | // circular list entry was nil, happens when < number_of_threads * 5 responses have been recorded. 53 | // the total number of entries is less than length of the list 54 | n -= 1 55 | } 56 | }) 57 | 58 | earliest := time.UnixMicro(lowest) 59 | latest := time.UnixMicro(highest) 60 | elapsed := latest.Sub(earliest) 61 | if n > 0 && elapsed.Milliseconds() > 1 { 62 | return int64(1000 * int64(n) / elapsed.Milliseconds()) 63 | } 64 | return 0 65 | } 66 | 67 | func (r *RateThrottle) ChangeRate(rate int) { 68 | ratemicros := 1000000 / rate 69 | r.RateLimiter.Stop() 70 | r.RateLimiter = time.NewTicker(time.Microsecond * time.Duration(ratemicros)) 71 | r.Config.Rate = int64(rate) 72 | // reset the rate counter 73 | r.rateCounter = ring.New(rate * 5) 74 | } 75 | 76 | // rateTick adds a new duration measurement tick to rate counter 77 | func (r *RateThrottle) Tick(start, end time.Time) { 78 | r.RateMutex.Lock() 79 | defer r.RateMutex.Unlock() 80 | r.rateCounter = r.rateCounter.Next() 81 | r.rateCounter.Value = end.UnixMicro() 82 | } 83 | -------------------------------------------------------------------------------- /pkg/ffuf/history.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "crypto/sha256" 5 | "encoding/json" 6 | "errors" 7 | "fmt" 8 | "os" 9 | "path/filepath" 10 | "strconv" 11 | "strings" 12 | "time" 13 | ) 14 | 15 | type ConfigOptionsHistory struct { 16 | ConfigOptions 17 | Time time.Time `json:"time"` 18 | } 19 | 20 | func WriteHistoryEntry(conf *Config) (string, error) { 21 | options := ConfigOptionsHistory{ 22 | ConfigOptions: conf.ToOptions(), 23 | Time: time.Now(), 24 | } 25 | jsonoptions, err := json.Marshal(options) 26 | if err != nil { 27 | return "", err 28 | } 29 | hashstr := calculateHistoryHash(jsonoptions) 30 | err = createConfigDir(filepath.Join(HISTORYDIR, hashstr)) 31 | if err != nil { 32 | return "", err 33 | } 34 | err = os.WriteFile(filepath.Join(HISTORYDIR, hashstr, "options"), jsonoptions, 0640) 35 | return hashstr, err 36 | } 37 | 38 | func calculateHistoryHash(options []byte) string { 39 | return fmt.Sprintf("%x", sha256.Sum256(options)) 40 | } 41 | 42 | func SearchHash(hash string) ([]ConfigOptionsHistory, int, error) { 43 | coptions := make([]ConfigOptionsHistory, 0) 44 | if len(hash) < 6 { 45 | return coptions, 0, errors.New("bad FFUFHASH value") 46 | } 47 | historypart := hash[0:5] 48 | position, err := strconv.ParseInt(hash[5:], 16, 32) 49 | if err != nil { 50 | return coptions, 0, errors.New("bad positional value in FFUFHASH") 51 | } 52 | all_dirs, err := os.ReadDir(HISTORYDIR) 53 | if err != nil { 54 | return coptions, 0, err 55 | } 56 | matched_dirs := make([]string, 0) 57 | for _, filename := range all_dirs { 58 | if filename.IsDir() { 59 | if strings.HasPrefix(strings.ToLower(filename.Name()), strings.ToLower(historypart)) { 60 | matched_dirs = append(matched_dirs, filename.Name()) 61 | } 62 | } 63 | } 64 | for _, dirname := range matched_dirs { 65 | copts, err := configFromHistory(filepath.Join(HISTORYDIR, dirname)) 66 | if err != nil { 67 | continue 68 | } 69 | coptions = append(coptions, copts) 70 | 71 | } 72 | return coptions, int(position), err 73 | } 74 | 75 | func configFromHistory(dirname string) (ConfigOptionsHistory, error) { 76 | jsonOptions, err := os.ReadFile(filepath.Join(dirname, "options")) 77 | if err != nil { 78 | return ConfigOptionsHistory{}, err 79 | } 80 | tmpOptions := ConfigOptionsHistory{} 81 | err = json.Unmarshal(jsonOptions, &tmpOptions) 82 | return tmpOptions, err 83 | /* 84 | // These are dummy values for this use case 85 | ctx, cancel := context.WithCancel(context.Background()) 86 | conf, err := ConfigFromOptions(&tmpOptions.ConfigOptions, ctx, cancel) 87 | job.Input, errs = input.NewInputProvider(conf) 88 | return conf, tmpOptions.Time, err 89 | */ 90 | } 91 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | name: "CodeQL" 7 | 8 | on: 9 | push: 10 | branches: [master] 11 | pull_request: 12 | # The branches below must be a subset of the branches above 13 | branches: [master] 14 | schedule: 15 | - cron: '0 9 * * 3' 16 | 17 | jobs: 18 | analyze: 19 | name: Analyze 20 | runs-on: ubuntu-latest 21 | 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | # Override automatic language detection by changing the below list 26 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] 27 | language: ['go'] 28 | # Learn more... 29 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection 30 | 31 | steps: 32 | - name: Checkout repository 33 | uses: actions/checkout@v2 34 | with: 35 | # We must fetch at least the immediate parents so that if this is 36 | # a pull request then we can checkout the head. 37 | fetch-depth: 2 38 | 39 | # If this run was triggered by a pull request event, then checkout 40 | # the head of the pull request instead of the merge commit. 41 | - run: git checkout HEAD^2 42 | if: ${{ github.event_name == 'pull_request' }} 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v1 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v1 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v1 72 | -------------------------------------------------------------------------------- /pkg/ffuf/util.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "errors" 5 | "fmt" 6 | "math/rand" 7 | "net/url" 8 | "os" 9 | "strings" 10 | ) 11 | 12 | // used for random string generation in calibration function 13 | var chars = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ") 14 | 15 | // RandomString returns a random string of length of parameter n 16 | func RandomString(n int) string { 17 | s := make([]rune, n) 18 | for i := range s { 19 | s[i] = chars[rand.Intn(len(chars))] 20 | } 21 | return string(s) 22 | } 23 | 24 | // UniqStringSlice returns an unordered slice of unique strings. The duplicates are dropped 25 | func UniqStringSlice(inslice []string) []string { 26 | found := map[string]bool{} 27 | 28 | for _, v := range inslice { 29 | found[v] = true 30 | } 31 | ret := []string{} 32 | for k := range found { 33 | ret = append(ret, k) 34 | } 35 | return ret 36 | } 37 | 38 | // FileExists checks if the filepath exists and is not a directory. 39 | // Returns false in case it's not possible to describe the named file. 40 | func FileExists(path string) bool { 41 | md, err := os.Stat(path) 42 | if err != nil { 43 | return false 44 | } 45 | 46 | return !md.IsDir() 47 | } 48 | 49 | // RequestContainsKeyword checks if a keyword is present in any field of a request 50 | func RequestContainsKeyword(req Request, kw string) bool { 51 | if strings.Contains(req.Host, kw) { 52 | return true 53 | } 54 | if strings.Contains(req.Url, kw) { 55 | return true 56 | } 57 | if strings.Contains(req.Method, kw) { 58 | return true 59 | } 60 | if strings.Contains(string(req.Data), kw) { 61 | return true 62 | } 63 | for k, v := range req.Headers { 64 | if strings.Contains(k, kw) || strings.Contains(v, kw) { 65 | return true 66 | } 67 | } 68 | return false 69 | } 70 | 71 | // HostURLFromRequest gets a host + path without the filename or last part of the URL path 72 | func HostURLFromRequest(req Request) string { 73 | u, _ := url.Parse(req.Url) 74 | u.Host = req.Host 75 | pathparts := strings.Split(u.Path, "/") 76 | trimpath := strings.TrimSpace(strings.Join(pathparts[:len(pathparts)-1], "/")) 77 | return u.Host + trimpath 78 | } 79 | 80 | // Version returns the ffuf version string 81 | func Version() string { 82 | return fmt.Sprintf("%s%s", VERSION, VERSION_APPENDIX) 83 | } 84 | 85 | func CheckOrCreateConfigDir() error { 86 | var err error 87 | err = createConfigDir(CONFIGDIR) 88 | if err != nil { 89 | return err 90 | } 91 | err = createConfigDir(HISTORYDIR) 92 | return err 93 | } 94 | 95 | func createConfigDir(path string) error { 96 | _, err := os.Stat(path) 97 | if err != nil { 98 | var pError *os.PathError 99 | if errors.As(err, &pError) { 100 | return os.MkdirAll(path, 0750) 101 | } 102 | return err 103 | } 104 | return nil 105 | } 106 | -------------------------------------------------------------------------------- /pkg/ffuf/optionsparser_test.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "testing" 5 | ) 6 | 7 | func TestTemplatePresent(t *testing.T) { 8 | template := "§" 9 | 10 | headers := make(map[string]string) 11 | headers["foo"] = "§bar§" 12 | headers["omg"] = "bbq" 13 | headers["§world§"] = "Ooo" 14 | 15 | goodConf := Config{ 16 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", 17 | Method: "PO§ST§", 18 | Headers: headers, 19 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", 20 | } 21 | 22 | if !templatePresent(template, &goodConf) { 23 | t.Errorf("Expected-good config failed validation") 24 | } 25 | 26 | badConfMethod := Config{ 27 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", 28 | Method: "POST§", 29 | Headers: headers, 30 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=§true§", 31 | } 32 | 33 | if templatePresent(template, &badConfMethod) { 34 | t.Errorf("Expected-bad config (Method) failed validation") 35 | } 36 | 37 | badConfURL := Config{ 38 | Url: "https://example.com/fooo/bar?test=§value§&order[0§]=§foo§", 39 | Method: "§POST§", 40 | Headers: headers, 41 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=§true§", 42 | } 43 | 44 | if templatePresent(template, &badConfURL) { 45 | t.Errorf("Expected-bad config (URL) failed validation") 46 | } 47 | 48 | badConfData := Config{ 49 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", 50 | Method: "§POST§", 51 | Headers: headers, 52 | Data: "line=Can we pull back the §veil of §static§ and reach in to the source of §all§ being?&commit=§true§", 53 | } 54 | 55 | if templatePresent(template, &badConfData) { 56 | t.Errorf("Expected-bad config (Data) failed validation") 57 | } 58 | 59 | headers["kingdom"] = "§candy" 60 | 61 | badConfHeaderValue := Config{ 62 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", 63 | Method: "PO§ST§", 64 | Headers: headers, 65 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", 66 | } 67 | 68 | if templatePresent(template, &badConfHeaderValue) { 69 | t.Errorf("Expected-bad config (Header value) failed validation") 70 | } 71 | 72 | headers["kingdom"] = "candy" 73 | headers["§kingdom"] = "candy" 74 | 75 | badConfHeaderKey := Config{ 76 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", 77 | Method: "PO§ST§", 78 | Headers: headers, 79 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", 80 | } 81 | 82 | if templatePresent(template, &badConfHeaderKey) { 83 | t.Errorf("Expected-bad config (Header key) failed validation") 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /pkg/output/file_json.go: -------------------------------------------------------------------------------- 1 | package output 2 | 3 | import ( 4 | "encoding/json" 5 | "os" 6 | "time" 7 | 8 | "github.com/ffuf/ffuf/pkg/ffuf" 9 | ) 10 | 11 | type ejsonFileOutput struct { 12 | CommandLine string `json:"commandline"` 13 | Time string `json:"time"` 14 | Results []ffuf.Result `json:"results"` 15 | Config *ffuf.Config `json:"config"` 16 | } 17 | 18 | type JsonResult struct { 19 | Input map[string]string `json:"input"` 20 | Position int `json:"position"` 21 | StatusCode int64 `json:"status"` 22 | ContentLength int64 `json:"length"` 23 | ContentWords int64 `json:"words"` 24 | ContentLines int64 `json:"lines"` 25 | ContentType string `json:"content-type"` 26 | RedirectLocation string `json:"redirectlocation"` 27 | Duration time.Duration `json:"duration"` 28 | ResultFile string `json:"resultfile"` 29 | Url string `json:"url"` 30 | Host string `json:"host"` 31 | } 32 | 33 | type jsonFileOutput struct { 34 | CommandLine string `json:"commandline"` 35 | Time string `json:"time"` 36 | Results []JsonResult `json:"results"` 37 | Config *ffuf.Config `json:"config"` 38 | } 39 | 40 | func writeEJSON(filename string, config *ffuf.Config, res []ffuf.Result) error { 41 | t := time.Now() 42 | outJSON := ejsonFileOutput{ 43 | CommandLine: config.CommandLine, 44 | Time: t.Format(time.RFC3339), 45 | Results: res, 46 | } 47 | 48 | outBytes, err := json.Marshal(outJSON) 49 | if err != nil { 50 | return err 51 | } 52 | err = os.WriteFile(filename, outBytes, 0644) 53 | if err != nil { 54 | return err 55 | } 56 | return nil 57 | } 58 | 59 | func writeJSON(filename string, config *ffuf.Config, res []ffuf.Result) error { 60 | t := time.Now() 61 | jsonRes := make([]JsonResult, 0) 62 | for _, r := range res { 63 | strinput := make(map[string]string) 64 | for k, v := range r.Input { 65 | strinput[k] = string(v) 66 | } 67 | jsonRes = append(jsonRes, JsonResult{ 68 | Input: strinput, 69 | Position: r.Position, 70 | StatusCode: r.StatusCode, 71 | ContentLength: r.ContentLength, 72 | ContentWords: r.ContentWords, 73 | ContentLines: r.ContentLines, 74 | ContentType: r.ContentType, 75 | RedirectLocation: r.RedirectLocation, 76 | Duration: r.Duration, 77 | ResultFile: r.ResultFile, 78 | Url: r.Url, 79 | Host: r.Host, 80 | }) 81 | } 82 | outJSON := jsonFileOutput{ 83 | CommandLine: config.CommandLine, 84 | Time: t.Format(time.RFC3339), 85 | Results: jsonRes, 86 | Config: config, 87 | } 88 | outBytes, err := json.Marshal(outJSON) 89 | if err != nil { 90 | return err 91 | } 92 | err = os.WriteFile(filename, outBytes, 0644) 93 | if err != nil { 94 | return err 95 | } 96 | return nil 97 | } 98 | -------------------------------------------------------------------------------- /pkg/ffuf/interfaces.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "time" 5 | ) 6 | 7 | // MatcherManager provides functions for managing matchers and filters 8 | type MatcherManager interface { 9 | SetCalibrated(calibrated bool) 10 | SetCalibratedForHost(host string, calibrated bool) 11 | AddFilter(name string, option string, replace bool) error 12 | AddPerDomainFilter(domain string, name string, option string) error 13 | RemoveFilter(name string) 14 | AddMatcher(name string, option string) error 15 | GetFilters() map[string]FilterProvider 16 | GetMatchers() map[string]FilterProvider 17 | FiltersForDomain(domain string) map[string]FilterProvider 18 | CalibratedForDomain(domain string) bool 19 | Calibrated() bool 20 | } 21 | 22 | // FilterProvider is a generic interface for both Matchers and Filters 23 | type FilterProvider interface { 24 | Filter(response *Response) (bool, error) 25 | Repr() string 26 | ReprVerbose() string 27 | } 28 | 29 | // RunnerProvider is an interface for request executors 30 | type RunnerProvider interface { 31 | Prepare(input map[string][]byte, basereq *Request) (Request, error) 32 | Execute(req *Request) (Response, error) 33 | Dump(req *Request) ([]byte, error) 34 | } 35 | 36 | // InputProvider interface handles the input data for RunnerProvider 37 | type InputProvider interface { 38 | ActivateKeywords([]string) 39 | AddProvider(InputProviderConfig) error 40 | Keywords() []string 41 | Next() bool 42 | Position() int 43 | SetPosition(int) 44 | Reset() 45 | Value() map[string][]byte 46 | Total() int 47 | } 48 | 49 | // InternalInputProvider interface handles providing input data to InputProvider 50 | type InternalInputProvider interface { 51 | Keyword() string 52 | Next() bool 53 | Position() int 54 | SetPosition(int) 55 | ResetPosition() 56 | IncrementPosition() 57 | Value() []byte 58 | Total() int 59 | Active() bool 60 | Enable() 61 | Disable() 62 | } 63 | 64 | // OutputProvider is responsible of providing output from the RunnerProvider 65 | type OutputProvider interface { 66 | Banner() 67 | Finalize() error 68 | Progress(status Progress) 69 | Info(infostring string) 70 | Error(errstring string) 71 | Raw(output string) 72 | Warning(warnstring string) 73 | Result(resp Response) 74 | PrintResult(res Result) 75 | SaveFile(filename, format string) error 76 | GetCurrentResults() []Result 77 | SetCurrentResults(results []Result) 78 | Reset() 79 | Cycle() 80 | } 81 | 82 | type Result struct { 83 | Input map[string][]byte `json:"input"` 84 | Position int `json:"position"` 85 | StatusCode int64 `json:"status"` 86 | ContentLength int64 `json:"length"` 87 | ContentWords int64 `json:"words"` 88 | ContentLines int64 `json:"lines"` 89 | ContentType string `json:"content-type"` 90 | RedirectLocation string `json:"redirectlocation"` 91 | Url string `json:"url"` 92 | Duration time.Duration `json:"duration"` 93 | ResultFile string `json:"resultfile"` 94 | Host string `json:"host"` 95 | HTMLColor string `json:"-"` 96 | } 97 | -------------------------------------------------------------------------------- /pkg/ffuf/configmarshaller.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "fmt" 5 | "strings" 6 | ) 7 | 8 | func (c *Config) ToOptions() ConfigOptions { 9 | o := ConfigOptions{} 10 | // HTTP options 11 | o.HTTP.Cookies = []string{} 12 | o.HTTP.Data = c.Data 13 | o.HTTP.FollowRedirects = c.FollowRedirects 14 | o.HTTP.Headers = make([]string, 0) 15 | for k, v := range c.Headers { 16 | o.HTTP.Headers = append(o.HTTP.Headers, fmt.Sprintf("%s: %s", k, v)) 17 | } 18 | o.HTTP.IgnoreBody = c.IgnoreBody 19 | o.HTTP.Method = c.Method 20 | o.HTTP.ProxyURL = c.ProxyURL 21 | o.HTTP.Recursion = c.Recursion 22 | o.HTTP.RecursionDepth = c.RecursionDepth 23 | o.HTTP.RecursionStrategy = c.RecursionStrategy 24 | o.HTTP.ReplayProxyURL = c.ReplayProxyURL 25 | o.HTTP.SNI = c.SNI 26 | o.HTTP.Timeout = c.Timeout 27 | o.HTTP.URL = c.Url 28 | o.HTTP.Http2 = c.Http2 29 | 30 | o.General.AutoCalibration = c.AutoCalibration 31 | o.General.AutoCalibrationKeyword = c.AutoCalibrationKeyword 32 | o.General.AutoCalibrationPerHost = c.AutoCalibrationPerHost 33 | o.General.AutoCalibrationStrategy = c.AutoCalibrationStrategy 34 | o.General.AutoCalibrationStrings = c.AutoCalibrationStrings 35 | o.General.Colors = c.Colors 36 | o.General.ConfigFile = "" 37 | if c.Delay.HasDelay { 38 | if c.Delay.IsRange { 39 | o.General.Delay = fmt.Sprintf("%.2f-%.2f", c.Delay.Min, c.Delay.Max) 40 | } else { 41 | o.General.Delay = fmt.Sprintf("%.2f", c.Delay.Min) 42 | } 43 | } else { 44 | o.General.Delay = "" 45 | } 46 | o.General.Json = c.Json 47 | o.General.MaxTime = c.MaxTime 48 | o.General.MaxTimeJob = c.MaxTimeJob 49 | o.General.Noninteractive = c.Noninteractive 50 | o.General.Quiet = c.Quiet 51 | o.General.Rate = int(c.Rate) 52 | o.General.StopOn403 = c.StopOn403 53 | o.General.StopOnAll = c.StopOnAll 54 | o.General.StopOnErrors = c.StopOnErrors 55 | o.General.Threads = c.Threads 56 | o.General.Verbose = c.Verbose 57 | 58 | o.Input.DirSearchCompat = c.DirSearchCompat 59 | o.Input.Extensions = strings.Join(c.Extensions, ",") 60 | o.Input.IgnoreWordlistComments = c.IgnoreWordlistComments 61 | o.Input.InputMode = c.InputMode 62 | o.Input.InputNum = c.InputNum 63 | o.Input.InputShell = c.InputShell 64 | o.Input.Inputcommands = []string{} 65 | for _, v := range c.InputProviders { 66 | if v.Name == "command" { 67 | o.Input.Inputcommands = append(o.Input.Inputcommands, fmt.Sprintf("%s:%s", v.Value, v.Keyword)) 68 | } 69 | } 70 | o.Input.Request = c.RequestFile 71 | o.Input.RequestProto = c.RequestProto 72 | o.Input.Wordlists = c.Wordlists 73 | 74 | o.Output.DebugLog = c.Debuglog 75 | o.Output.OutputDirectory = c.OutputDirectory 76 | o.Output.OutputFile = c.OutputFile 77 | o.Output.OutputFormat = c.OutputFormat 78 | o.Output.OutputSkipEmptyFile = c.OutputSkipEmptyFile 79 | 80 | o.Filter.Mode = c.FilterMode 81 | o.Filter.Lines = "" 82 | o.Filter.Regexp = "" 83 | o.Filter.Size = "" 84 | o.Filter.Status = "" 85 | o.Filter.Time = "" 86 | o.Filter.Words = "" 87 | for name, filter := range c.MatcherManager.GetFilters() { 88 | switch name { 89 | case "line": 90 | o.Filter.Lines = filter.Repr() 91 | case "regexp": 92 | o.Filter.Regexp = filter.Repr() 93 | case "size": 94 | o.Filter.Size = filter.Repr() 95 | case "status": 96 | o.Filter.Status = filter.Repr() 97 | case "time": 98 | o.Filter.Time = filter.Repr() 99 | case "words": 100 | o.Filter.Words = filter.Repr() 101 | } 102 | } 103 | o.Matcher.Mode = c.MatcherMode 104 | o.Matcher.Lines = "" 105 | o.Matcher.Regexp = "" 106 | o.Matcher.Size = "" 107 | o.Matcher.Status = "" 108 | o.Matcher.Time = "" 109 | o.Matcher.Words = "" 110 | for name, filter := range c.MatcherManager.GetMatchers() { 111 | switch name { 112 | case "line": 113 | o.Matcher.Lines = filter.Repr() 114 | case "regexp": 115 | o.Matcher.Regexp = filter.Repr() 116 | case "size": 117 | o.Matcher.Size = filter.Repr() 118 | case "status": 119 | o.Matcher.Status = filter.Repr() 120 | case "time": 121 | o.Matcher.Time = filter.Repr() 122 | case "words": 123 | o.Matcher.Words = filter.Repr() 124 | } 125 | } 126 | return o 127 | } 128 | -------------------------------------------------------------------------------- /pkg/input/wordlist.go: -------------------------------------------------------------------------------- 1 | package input 2 | 3 | import ( 4 | "bufio" 5 | "os" 6 | "regexp" 7 | "strings" 8 | 9 | "github.com/ffuf/ffuf/pkg/ffuf" 10 | ) 11 | 12 | type WordlistInput struct { 13 | active bool 14 | config *ffuf.Config 15 | data [][]byte 16 | position int 17 | keyword string 18 | } 19 | 20 | func NewWordlistInput(keyword string, value string, conf *ffuf.Config) (*WordlistInput, error) { 21 | var wl WordlistInput 22 | wl.active = true 23 | wl.keyword = keyword 24 | wl.config = conf 25 | wl.position = 0 26 | var valid bool 27 | var err error 28 | // stdin? 29 | if value == "-" { 30 | // yes 31 | valid = true 32 | } else { 33 | // no 34 | valid, err = wl.validFile(value) 35 | } 36 | if err != nil { 37 | return &wl, err 38 | } 39 | if valid { 40 | err = wl.readFile(value) 41 | } 42 | return &wl, err 43 | } 44 | 45 | // Position will return the current position in the input list 46 | func (w *WordlistInput) Position() int { 47 | return w.position 48 | } 49 | 50 | // SetPosition sets the current position of the inputprovider 51 | func (w *WordlistInput) SetPosition(pos int) { 52 | w.position = pos 53 | } 54 | 55 | // ResetPosition resets the position back to beginning of the wordlist. 56 | func (w *WordlistInput) ResetPosition() { 57 | w.position = 0 58 | } 59 | 60 | // Keyword returns the keyword assigned to this InternalInputProvider 61 | func (w *WordlistInput) Keyword() string { 62 | return w.keyword 63 | } 64 | 65 | // Next will return a boolean telling if there's words left in the list 66 | func (w *WordlistInput) Next() bool { 67 | return w.position < len(w.data) 68 | } 69 | 70 | // IncrementPosition will increment the current position in the inputprovider data slice 71 | func (w *WordlistInput) IncrementPosition() { 72 | w.position += 1 73 | } 74 | 75 | // Value returns the value from wordlist at current cursor position 76 | func (w *WordlistInput) Value() []byte { 77 | return w.data[w.position] 78 | } 79 | 80 | // Total returns the size of wordlist 81 | func (w *WordlistInput) Total() int { 82 | return len(w.data) 83 | } 84 | 85 | // Active returns boolean if the inputprovider is active 86 | func (w *WordlistInput) Active() bool { 87 | return w.active 88 | } 89 | 90 | // Enable sets the inputprovider as active 91 | func (w *WordlistInput) Enable() { 92 | w.active = true 93 | } 94 | 95 | // Disable disables the inputprovider 96 | func (w *WordlistInput) Disable() { 97 | w.active = false 98 | } 99 | 100 | // validFile checks that the wordlist file exists and can be read 101 | func (w *WordlistInput) validFile(path string) (bool, error) { 102 | _, err := os.Stat(path) 103 | if err != nil { 104 | return false, err 105 | } 106 | f, err := os.Open(path) 107 | if err != nil { 108 | return false, err 109 | } 110 | f.Close() 111 | return true, nil 112 | } 113 | 114 | // readFile reads the file line by line to a byte slice 115 | func (w *WordlistInput) readFile(path string) error { 116 | var file *os.File 117 | var err error 118 | if path == "-" { 119 | file = os.Stdin 120 | } else { 121 | file, err = os.Open(path) 122 | if err != nil { 123 | return err 124 | } 125 | } 126 | defer file.Close() 127 | 128 | var data [][]byte 129 | var ok bool 130 | reader := bufio.NewScanner(file) 131 | re := regexp.MustCompile(`(?i)%ext%`) 132 | for reader.Scan() { 133 | if w.config.DirSearchCompat && len(w.config.Extensions) > 0 { 134 | text := []byte(reader.Text()) 135 | if re.Match(text) { 136 | for _, ext := range w.config.Extensions { 137 | contnt := re.ReplaceAll(text, []byte(ext)) 138 | data = append(data, []byte(contnt)) 139 | } 140 | } else { 141 | text := reader.Text() 142 | 143 | if w.config.IgnoreWordlistComments { 144 | text, ok = stripComments(text) 145 | if !ok { 146 | continue 147 | } 148 | } 149 | data = append(data, []byte(text)) 150 | } 151 | } else { 152 | text := reader.Text() 153 | 154 | if w.config.IgnoreWordlistComments { 155 | text, ok = stripComments(text) 156 | if !ok { 157 | continue 158 | } 159 | } 160 | data = append(data, []byte(text)) 161 | if w.keyword == "FUZZ" && len(w.config.Extensions) > 0 { 162 | for _, ext := range w.config.Extensions { 163 | data = append(data, []byte(text+ext)) 164 | } 165 | } 166 | } 167 | } 168 | w.data = data 169 | return reader.Err() 170 | } 171 | 172 | // stripComments removes all kind of comments from the word 173 | func stripComments(text string) (string, bool) { 174 | // If the line starts with a # ignoring any space on the left, 175 | // return blank. 176 | if strings.HasPrefix(strings.TrimLeft(text, " "), "#") { 177 | return "", false 178 | } 179 | 180 | // If the line has # later after a space, that's a comment. 181 | // Only send the word upto space to the routine. 182 | index := strings.Index(text, " #") 183 | if index == -1 { 184 | return text, true 185 | } 186 | return text[:index], true 187 | } 188 | -------------------------------------------------------------------------------- /pkg/filter/filter.go: -------------------------------------------------------------------------------- 1 | package filter 2 | 3 | import ( 4 | "fmt" 5 | "github.com/ffuf/ffuf/pkg/ffuf" 6 | "sync" 7 | ) 8 | 9 | // MatcherManager handles both filters and matchers. 10 | type MatcherManager struct { 11 | IsCalibrated bool 12 | Mutex sync.Mutex 13 | Matchers map[string]ffuf.FilterProvider 14 | Filters map[string]ffuf.FilterProvider 15 | PerDomainFilters map[string]*PerDomainFilter 16 | } 17 | 18 | type PerDomainFilter struct { 19 | IsCalibrated bool 20 | Filters map[string]ffuf.FilterProvider 21 | } 22 | 23 | func NewPerDomainFilter(globfilters map[string]ffuf.FilterProvider) *PerDomainFilter { 24 | return &PerDomainFilter{IsCalibrated: false, Filters: globfilters} 25 | } 26 | 27 | func (p *PerDomainFilter) SetCalibrated(value bool) { 28 | p.IsCalibrated = value 29 | } 30 | 31 | func NewMatcherManager() ffuf.MatcherManager { 32 | return &MatcherManager{ 33 | IsCalibrated: false, 34 | Matchers: make(map[string]ffuf.FilterProvider), 35 | Filters: make(map[string]ffuf.FilterProvider), 36 | PerDomainFilters: make(map[string]*PerDomainFilter), 37 | } 38 | } 39 | 40 | func (f *MatcherManager) SetCalibrated(value bool) { 41 | f.IsCalibrated = value 42 | } 43 | 44 | func (f *MatcherManager) SetCalibratedForHost(host string, value bool) { 45 | if f.PerDomainFilters[host] != nil { 46 | f.PerDomainFilters[host].IsCalibrated = value 47 | } else { 48 | newFilter := NewPerDomainFilter(f.Filters) 49 | newFilter.IsCalibrated = true 50 | f.PerDomainFilters[host] = newFilter 51 | } 52 | } 53 | 54 | func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) { 55 | if name == "status" { 56 | return NewStatusFilter(value) 57 | } 58 | if name == "size" { 59 | return NewSizeFilter(value) 60 | } 61 | if name == "word" { 62 | return NewWordFilter(value) 63 | } 64 | if name == "line" { 65 | return NewLineFilter(value) 66 | } 67 | if name == "regexp" { 68 | return NewRegexpFilter(value) 69 | } 70 | if name == "time" { 71 | return NewTimeFilter(value) 72 | } 73 | return nil, fmt.Errorf("Could not create filter with name %s", name) 74 | } 75 | 76 | //AddFilter adds a new filter to MatcherManager 77 | func (f *MatcherManager) AddFilter(name string, option string, replace bool) error { 78 | f.Mutex.Lock() 79 | defer f.Mutex.Unlock() 80 | newf, err := NewFilterByName(name, option) 81 | if err == nil { 82 | // valid filter create or append 83 | if f.Filters[name] == nil || replace { 84 | f.Filters[name] = newf 85 | } else { 86 | newoption := f.Filters[name].Repr() + "," + option 87 | newerf, err := NewFilterByName(name, newoption) 88 | if err == nil { 89 | f.Filters[name] = newerf 90 | } 91 | } 92 | } 93 | return err 94 | } 95 | 96 | //AddPerDomainFilter adds a new filter to PerDomainFilter configuration 97 | func (f *MatcherManager) AddPerDomainFilter(domain string, name string, option string) error { 98 | f.Mutex.Lock() 99 | defer f.Mutex.Unlock() 100 | var pdFilters *PerDomainFilter 101 | if filter, ok := f.PerDomainFilters[domain]; ok { 102 | pdFilters = filter 103 | } else { 104 | pdFilters = NewPerDomainFilter(f.Filters) 105 | } 106 | newf, err := NewFilterByName(name, option) 107 | if err == nil { 108 | // valid filter create or append 109 | if pdFilters.Filters[name] == nil { 110 | pdFilters.Filters[name] = newf 111 | } else { 112 | newoption := pdFilters.Filters[name].Repr() + "," + option 113 | newerf, err := NewFilterByName(name, newoption) 114 | if err == nil { 115 | pdFilters.Filters[name] = newerf 116 | } 117 | } 118 | } 119 | f.PerDomainFilters[domain] = pdFilters 120 | return err 121 | } 122 | 123 | //RemoveFilter removes a filter of a given type 124 | func (f *MatcherManager) RemoveFilter(name string) { 125 | f.Mutex.Lock() 126 | defer f.Mutex.Unlock() 127 | delete(f.Filters, name) 128 | } 129 | 130 | //AddMatcher adds a new matcher to Config 131 | func (f *MatcherManager) AddMatcher(name string, option string) error { 132 | f.Mutex.Lock() 133 | defer f.Mutex.Unlock() 134 | newf, err := NewFilterByName(name, option) 135 | if err == nil { 136 | // valid filter create or append 137 | if f.Matchers[name] == nil { 138 | f.Matchers[name] = newf 139 | } else { 140 | newoption := f.Matchers[name].Repr() + "," + option 141 | newerf, err := NewFilterByName(name, newoption) 142 | if err == nil { 143 | f.Matchers[name] = newerf 144 | } 145 | } 146 | } 147 | return err 148 | } 149 | 150 | func (f *MatcherManager) GetFilters() map[string]ffuf.FilterProvider { 151 | return f.Filters 152 | } 153 | 154 | func (f *MatcherManager) GetMatchers() map[string]ffuf.FilterProvider { 155 | return f.Matchers 156 | } 157 | 158 | func (f *MatcherManager) FiltersForDomain(domain string) map[string]ffuf.FilterProvider { 159 | if f.PerDomainFilters[domain] == nil { 160 | return f.Filters 161 | } 162 | return f.PerDomainFilters[domain].Filters 163 | } 164 | 165 | func (f *MatcherManager) CalibratedForDomain(domain string) bool { 166 | if f.PerDomainFilters[domain] != nil { 167 | return f.PerDomainFilters[domain].IsCalibrated 168 | } 169 | return false 170 | } 171 | 172 | func (f *MatcherManager) Calibrated() bool { 173 | return f.IsCalibrated 174 | } 175 | -------------------------------------------------------------------------------- /pkg/ffuf/config.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "context" 5 | ) 6 | 7 | type Config struct { 8 | AutoCalibration bool `json:"autocalibration"` 9 | AutoCalibrationKeyword string `json:"autocalibration_keyword"` 10 | AutoCalibrationPerHost bool `json:"autocalibration_perhost"` 11 | AutoCalibrationStrategy string `json:"autocalibration_strategy"` 12 | AutoCalibrationStrings []string `json:"autocalibration_strings"` 13 | Cancel context.CancelFunc `json:"-"` 14 | Colors bool `json:"colors"` 15 | CommandKeywords []string `json:"-"` 16 | CommandLine string `json:"cmdline"` 17 | ConfigFile string `json:"configfile"` 18 | Context context.Context `json:"-"` 19 | Data string `json:"postdata"` 20 | Debuglog string `json:"debuglog"` 21 | Delay optRange `json:"delay"` 22 | DirSearchCompat bool `json:"dirsearch_compatibility"` 23 | Extensions []string `json:"extensions"` 24 | FilterMode string `json:"fmode"` 25 | FollowRedirects bool `json:"follow_redirects"` 26 | Headers map[string]string `json:"headers"` 27 | IgnoreBody bool `json:"ignorebody"` 28 | IgnoreWordlistComments bool `json:"ignore_wordlist_comments"` 29 | InputMode string `json:"inputmode"` 30 | InputNum int `json:"cmd_inputnum"` 31 | InputProviders []InputProviderConfig `json:"inputproviders"` 32 | InputShell string `json:"inputshell"` 33 | Json bool `json:"json"` 34 | MatcherManager MatcherManager `json:"matchers"` 35 | MatcherMode string `json:"mmode"` 36 | MaxTime int `json:"maxtime"` 37 | MaxTimeJob int `json:"maxtime_job"` 38 | Method string `json:"method"` 39 | Noninteractive bool `json:"noninteractive"` 40 | OutputDirectory string `json:"outputdirectory"` 41 | OutputFile string `json:"outputfile"` 42 | OutputFormat string `json:"outputformat"` 43 | OutputSkipEmptyFile bool `json:"OutputSkipEmptyFile"` 44 | ProgressFrequency int `json:"-"` 45 | ProxyURL string `json:"proxyurl"` 46 | Quiet bool `json:"quiet"` 47 | Rate int64 `json:"rate"` 48 | Recursion bool `json:"recursion"` 49 | RecursionDepth int `json:"recursion_depth"` 50 | RecursionStrategy string `json:"recursion_strategy"` 51 | ReplayProxyURL string `json:"replayproxyurl"` 52 | RequestFile string `json:"requestfile"` 53 | RequestProto string `json:"requestproto"` 54 | SNI string `json:"sni"` 55 | StopOn403 bool `json:"stop_403"` 56 | StopOnAll bool `json:"stop_all"` 57 | StopOnErrors bool `json:"stop_errors"` 58 | Threads int `json:"threads"` 59 | Timeout int `json:"timeout"` 60 | Url string `json:"url"` 61 | Verbose bool `json:"verbose"` 62 | Wordlists []string `json:"wordlists"` 63 | Http2 bool `json:"http2"` 64 | } 65 | 66 | type InputProviderConfig struct { 67 | Name string `json:"name"` 68 | Keyword string `json:"keyword"` 69 | Value string `json:"value"` 70 | Template string `json:"template"` // the templating string used for sniper mode (usually "§") 71 | } 72 | 73 | func NewConfig(ctx context.Context, cancel context.CancelFunc) Config { 74 | var conf Config 75 | conf.AutoCalibrationKeyword = "FUZZ" 76 | conf.AutoCalibrationStrategy = "basic" 77 | conf.AutoCalibrationStrings = make([]string, 0) 78 | conf.CommandKeywords = make([]string, 0) 79 | conf.Context = ctx 80 | conf.Cancel = cancel 81 | conf.Data = "" 82 | conf.Debuglog = "" 83 | conf.Delay = optRange{0, 0, false, false} 84 | conf.DirSearchCompat = false 85 | conf.Extensions = make([]string, 0) 86 | conf.FilterMode = "or" 87 | conf.FollowRedirects = false 88 | conf.Headers = make(map[string]string) 89 | conf.IgnoreWordlistComments = false 90 | conf.InputMode = "clusterbomb" 91 | conf.InputNum = 0 92 | conf.InputShell = "" 93 | conf.InputProviders = make([]InputProviderConfig, 0) 94 | conf.Json = false 95 | conf.MatcherMode = "or" 96 | conf.MaxTime = 0 97 | conf.MaxTimeJob = 0 98 | conf.Method = "GET" 99 | conf.Noninteractive = false 100 | conf.ProgressFrequency = 125 101 | conf.ProxyURL = "" 102 | conf.Quiet = false 103 | conf.Rate = 0 104 | conf.Recursion = false 105 | conf.RecursionDepth = 0 106 | conf.RecursionStrategy = "default" 107 | conf.RequestFile = "" 108 | conf.RequestProto = "https" 109 | conf.SNI = "" 110 | conf.StopOn403 = false 111 | conf.StopOnAll = false 112 | conf.StopOnErrors = false 113 | conf.Timeout = 10 114 | conf.Url = "" 115 | conf.Verbose = false 116 | conf.Wordlists = []string{} 117 | conf.Http2 = false 118 | return conf 119 | } 120 | 121 | func (c *Config) SetContext(ctx context.Context, cancel context.CancelFunc) { 122 | c.Context = ctx 123 | c.Cancel = cancel 124 | } 125 | -------------------------------------------------------------------------------- /help.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "flag" 5 | "fmt" 6 | "os" 7 | 8 | "github.com/ffuf/ffuf/pkg/ffuf" 9 | ) 10 | 11 | type UsageSection struct { 12 | Name string 13 | Description string 14 | Flags []UsageFlag 15 | Hidden bool 16 | ExpectedFlags []string 17 | } 18 | 19 | // PrintSection prints out the section name, description and each of the flags 20 | func (u *UsageSection) PrintSection(max_length int, extended bool) { 21 | // Do not print if extended usage not requested and section marked as hidden 22 | if !extended && u.Hidden { 23 | return 24 | } 25 | fmt.Printf("%s:\n", u.Name) 26 | for _, f := range u.Flags { 27 | f.PrintFlag(max_length) 28 | } 29 | fmt.Printf("\n") 30 | } 31 | 32 | type UsageFlag struct { 33 | Name string 34 | Description string 35 | Default string 36 | } 37 | 38 | // PrintFlag prints out the flag name, usage string and default value 39 | func (f *UsageFlag) PrintFlag(max_length int) { 40 | // Create format string, used for padding 41 | format := fmt.Sprintf(" -%%-%ds %%s", max_length) 42 | if f.Default != "" { 43 | format = format + " (default: %s)\n" 44 | fmt.Printf(format, f.Name, f.Description, f.Default) 45 | } else { 46 | format = format + "\n" 47 | fmt.Printf(format, f.Name, f.Description) 48 | } 49 | } 50 | 51 | func Usage() { 52 | u_http := UsageSection{ 53 | Name: "HTTP OPTIONS", 54 | Description: "Options controlling the HTTP request and its parts.", 55 | Flags: make([]UsageFlag, 0), 56 | Hidden: false, 57 | ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "recursion-strategy", "replay-proxy", "timeout", "ignore-body", "x", "sni", "http2"}, 58 | } 59 | u_general := UsageSection{ 60 | Name: "GENERAL OPTIONS", 61 | Description: "", 62 | Flags: make([]UsageFlag, 0), 63 | Hidden: false, 64 | ExpectedFlags: []string{"ac", "acc", "ack", "ach", "acs", "c", "config", "json", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "search", "s", "sa", "se", "sf", "t", "v", "V"}, 65 | } 66 | u_compat := UsageSection{ 67 | Name: "COMPATIBILITY OPTIONS", 68 | Description: "Options to ensure compatibility with other pieces of software.", 69 | Flags: make([]UsageFlag, 0), 70 | Hidden: true, 71 | ExpectedFlags: []string{"compressed", "cookie", "data", "data-ascii", "data-binary", "i", "k"}, 72 | } 73 | u_matcher := UsageSection{ 74 | Name: "MATCHER OPTIONS", 75 | Description: "Matchers for the response filtering.", 76 | Flags: make([]UsageFlag, 0), 77 | Hidden: false, 78 | ExpectedFlags: []string{"mmode", "mc", "ml", "mr", "ms", "mt", "mw"}, 79 | } 80 | u_filter := UsageSection{ 81 | Name: "FILTER OPTIONS", 82 | Description: "Filters for the response filtering.", 83 | Flags: make([]UsageFlag, 0), 84 | Hidden: false, 85 | ExpectedFlags: []string{"fmode", "fc", "fl", "fr", "fs", "ft", "fw"}, 86 | } 87 | u_input := UsageSection{ 88 | Name: "INPUT OPTIONS", 89 | Description: "Options for input data for fuzzing. Wordlists and input generators.", 90 | Flags: make([]UsageFlag, 0), 91 | Hidden: false, 92 | ExpectedFlags: []string{"D", "ic", "input-cmd", "input-num", "input-shell", "mode", "request", "request-proto", "e", "w"}, 93 | } 94 | u_output := UsageSection{ 95 | Name: "OUTPUT OPTIONS", 96 | Description: "Options for output. Output file formats, file names and debug file locations.", 97 | Flags: make([]UsageFlag, 0), 98 | Hidden: false, 99 | ExpectedFlags: []string{"debug-log", "o", "of", "od", "or"}, 100 | } 101 | sections := []UsageSection{u_http, u_general, u_compat, u_matcher, u_filter, u_input, u_output} 102 | 103 | // Populate the flag sections 104 | max_length := 0 105 | flag.VisitAll(func(f *flag.Flag) { 106 | found := false 107 | for i, section := range sections { 108 | if strInSlice(f.Name, section.ExpectedFlags) { 109 | sections[i].Flags = append(sections[i].Flags, UsageFlag{ 110 | Name: f.Name, 111 | Description: f.Usage, 112 | Default: f.DefValue, 113 | }) 114 | found = true 115 | } 116 | } 117 | if !found { 118 | fmt.Printf("DEBUG: Flag %s was found but not defined in help.go.\n", f.Name) 119 | os.Exit(1) 120 | } 121 | if len(f.Name) > max_length { 122 | max_length = len(f.Name) 123 | } 124 | }) 125 | 126 | fmt.Printf("Fuzz Faster U Fool - v%s\n\n", ffuf.Version()) 127 | 128 | // Print out the sections 129 | for _, section := range sections { 130 | section.PrintSection(max_length, false) 131 | } 132 | 133 | // Usage examples. 134 | fmt.Printf("EXAMPLE USAGE:\n") 135 | 136 | fmt.Printf(" Fuzz file paths from wordlist.txt, match all responses but filter out those with content-size 42.\n") 137 | fmt.Printf(" Colored, verbose output.\n") 138 | fmt.Printf(" ffuf -w wordlist.txt -u https://example.org/FUZZ -mc all -fs 42 -c -v\n\n") 139 | 140 | fmt.Printf(" Fuzz Host-header, match HTTP 200 responses.\n") 141 | fmt.Printf(" ffuf -w hosts.txt -u https://example.org/ -H \"Host: FUZZ\" -mc 200\n\n") 142 | 143 | fmt.Printf(" Fuzz POST JSON data. Match all responses not containing text \"error\".\n") 144 | fmt.Printf(" ffuf -w entries.txt -u https://example.org/ -X POST -H \"Content-Type: application/json\" \\\n") 145 | fmt.Printf(" -d '{\"name\": \"FUZZ\", \"anotherkey\": \"anothervalue\"}' -fr \"error\"\n\n") 146 | 147 | fmt.Printf(" Fuzz multiple locations. Match only responses reflecting the value of \"VAL\" keyword. Colored.\n") 148 | fmt.Printf(" ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr \"VAL\" -c\n\n") 149 | 150 | fmt.Printf(" More information and examples: https://github.com/ffuf/ffuf\n\n") 151 | } 152 | 153 | func strInSlice(val string, slice []string) bool { 154 | for _, v := range slice { 155 | if v == val { 156 | return true 157 | } 158 | } 159 | return false 160 | } 161 | -------------------------------------------------------------------------------- /pkg/runner/simple.go: -------------------------------------------------------------------------------- 1 | package runner 2 | 3 | import ( 4 | "bytes" 5 | "crypto/tls" 6 | "fmt" 7 | "io" 8 | "net" 9 | "net/http" 10 | "net/http/httptrace" 11 | "net/http/httputil" 12 | "net/textproto" 13 | "net/url" 14 | "strconv" 15 | "strings" 16 | "time" 17 | 18 | "github.com/ffuf/ffuf/pkg/ffuf" 19 | ) 20 | 21 | // Download results < 5MB 22 | const MAX_DOWNLOAD_SIZE = 5242880 23 | 24 | type SimpleRunner struct { 25 | config *ffuf.Config 26 | client *http.Client 27 | } 28 | 29 | func NewSimpleRunner(conf *ffuf.Config, replay bool) ffuf.RunnerProvider { 30 | var simplerunner SimpleRunner 31 | proxyURL := http.ProxyFromEnvironment 32 | customProxy := "" 33 | 34 | if replay { 35 | customProxy = conf.ReplayProxyURL 36 | } else { 37 | customProxy = conf.ProxyURL 38 | } 39 | if len(customProxy) > 0 { 40 | pu, err := url.Parse(customProxy) 41 | if err == nil { 42 | proxyURL = http.ProxyURL(pu) 43 | } 44 | } 45 | simplerunner.config = conf 46 | simplerunner.client = &http.Client{ 47 | CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse }, 48 | Timeout: time.Duration(time.Duration(conf.Timeout) * time.Second), 49 | Transport: &http.Transport{ 50 | ForceAttemptHTTP2: conf.Http2, 51 | Proxy: proxyURL, 52 | MaxIdleConns: 1000, 53 | MaxIdleConnsPerHost: 500, 54 | MaxConnsPerHost: 500, 55 | DialContext: (&net.Dialer{ 56 | Timeout: time.Duration(time.Duration(conf.Timeout) * time.Second), 57 | }).DialContext, 58 | TLSHandshakeTimeout: time.Duration(time.Duration(conf.Timeout) * time.Second), 59 | TLSClientConfig: &tls.Config{ 60 | InsecureSkipVerify: true, 61 | Renegotiation: tls.RenegotiateOnceAsClient, 62 | ServerName: conf.SNI, 63 | }, 64 | }} 65 | 66 | if conf.FollowRedirects { 67 | simplerunner.client.CheckRedirect = nil 68 | } 69 | return &simplerunner 70 | } 71 | 72 | func (r *SimpleRunner) Prepare(input map[string][]byte, basereq *ffuf.Request) (ffuf.Request, error) { 73 | req := ffuf.CopyRequest(basereq) 74 | 75 | for keyword, inputitem := range input { 76 | req.Method = strings.ReplaceAll(req.Method, keyword, string(inputitem)) 77 | headers := make(map[string]string, len(req.Headers)) 78 | for h, v := range req.Headers { 79 | var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.ReplaceAll(h, keyword, string(inputitem))) 80 | headers[CanonicalHeader] = strings.ReplaceAll(v, keyword, string(inputitem)) 81 | } 82 | req.Headers = headers 83 | req.Url = strings.ReplaceAll(req.Url, keyword, string(inputitem)) 84 | req.Data = []byte(strings.ReplaceAll(string(req.Data), keyword, string(inputitem))) 85 | } 86 | 87 | req.Input = input 88 | return req, nil 89 | } 90 | 91 | func (r *SimpleRunner) Execute(req *ffuf.Request) (ffuf.Response, error) { 92 | var httpreq *http.Request 93 | var err error 94 | var rawreq []byte 95 | data := bytes.NewReader(req.Data) 96 | 97 | var start time.Time 98 | var firstByteTime time.Duration 99 | 100 | trace := &httptrace.ClientTrace{ 101 | WroteRequest: func(wri httptrace.WroteRequestInfo) { 102 | start = time.Now() // begin the timer after the request is fully written 103 | }, 104 | GotFirstResponseByte: func() { 105 | firstByteTime = time.Since(start) // record when the first byte of the response was received 106 | }, 107 | } 108 | 109 | httpreq, err = http.NewRequestWithContext(r.config.Context, req.Method, req.Url, data) 110 | 111 | if err != nil { 112 | return ffuf.Response{}, err 113 | } 114 | 115 | // set default User-Agent header if not present 116 | if _, ok := req.Headers["User-Agent"]; !ok { 117 | req.Headers["User-Agent"] = fmt.Sprintf("%s v%s", "Fuzz Faster U Fool", ffuf.Version()) 118 | } 119 | 120 | // Handle Go http.Request special cases 121 | if _, ok := req.Headers["Host"]; ok { 122 | httpreq.Host = req.Headers["Host"] 123 | } 124 | 125 | req.Host = httpreq.Host 126 | httpreq = httpreq.WithContext(httptrace.WithClientTrace(r.config.Context, trace)) 127 | for k, v := range req.Headers { 128 | httpreq.Header.Set(k, v) 129 | } 130 | 131 | if len(r.config.OutputDirectory) > 0 { 132 | rawreq, _ = httputil.DumpRequestOut(httpreq, true) 133 | } 134 | httpresp, err := r.client.Do(httpreq) 135 | if err != nil { 136 | return ffuf.Response{}, err 137 | } 138 | 139 | resp := ffuf.NewResponse(httpresp, req) 140 | defer httpresp.Body.Close() 141 | 142 | // Check if we should download the resource or not 143 | size, err := strconv.Atoi(httpresp.Header.Get("Content-Length")) 144 | if err == nil { 145 | resp.ContentLength = int64(size) 146 | if (r.config.IgnoreBody) || (size > MAX_DOWNLOAD_SIZE) { 147 | resp.Cancelled = true 148 | return resp, nil 149 | } 150 | } 151 | 152 | if len(r.config.OutputDirectory) > 0 { 153 | rawresp, _ := httputil.DumpResponse(httpresp, true) 154 | resp.Request.Raw = string(rawreq) 155 | resp.Raw = string(rawresp) 156 | } 157 | 158 | if respbody, err := io.ReadAll(httpresp.Body); err == nil { 159 | resp.ContentLength = int64(len(string(respbody))) 160 | resp.Data = respbody 161 | } 162 | 163 | wordsSize := len(strings.Split(string(resp.Data), " ")) 164 | linesSize := len(strings.Split(string(resp.Data), "\n")) 165 | resp.ContentWords = int64(wordsSize) 166 | resp.ContentLines = int64(linesSize) 167 | resp.Time = firstByteTime 168 | 169 | return resp, nil 170 | } 171 | 172 | func (r *SimpleRunner) Dump(req *ffuf.Request) ([]byte, error) { 173 | var httpreq *http.Request 174 | var err error 175 | data := bytes.NewReader(req.Data) 176 | httpreq, err = http.NewRequestWithContext(r.config.Context, req.Method, req.Url, data) 177 | if err != nil { 178 | return []byte{}, err 179 | } 180 | 181 | // set default User-Agent header if not present 182 | if _, ok := req.Headers["User-Agent"]; !ok { 183 | req.Headers["User-Agent"] = fmt.Sprintf("%s v%s", "Fuzz Faster U Fool", ffuf.Version()) 184 | } 185 | 186 | // Handle Go http.Request special cases 187 | if _, ok := req.Headers["Host"]; ok { 188 | httpreq.Host = req.Headers["Host"] 189 | } 190 | 191 | req.Host = httpreq.Host 192 | for k, v := range req.Headers { 193 | httpreq.Header.Set(k, v) 194 | } 195 | return httputil.DumpRequestOut(httpreq, true) 196 | } 197 | -------------------------------------------------------------------------------- /pkg/ffuf/request.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "strings" 5 | ) 6 | 7 | // Request holds the meaningful data that is passed for runner for making the query 8 | type Request struct { 9 | Method string 10 | Host string 11 | Url string 12 | Headers map[string]string 13 | Data []byte 14 | Input map[string][]byte 15 | Position int 16 | Raw string 17 | } 18 | 19 | func NewRequest(conf *Config) Request { 20 | var req Request 21 | req.Method = conf.Method 22 | req.Url = conf.Url 23 | req.Headers = make(map[string]string) 24 | return req 25 | } 26 | 27 | // BaseRequest returns a base request struct populated from the main config 28 | func BaseRequest(conf *Config) Request { 29 | req := NewRequest(conf) 30 | req.Headers = conf.Headers 31 | req.Data = []byte(conf.Data) 32 | return req 33 | } 34 | 35 | // RecursionRequest returns a base request for a recursion target 36 | func RecursionRequest(conf *Config, path string) Request { 37 | r := BaseRequest(conf) 38 | r.Url = path 39 | return r 40 | } 41 | 42 | // CopyRequest performs a deep copy of a request and returns a new struct 43 | func CopyRequest(basereq *Request) Request { 44 | var req Request 45 | req.Method = basereq.Method 46 | req.Host = basereq.Host 47 | req.Url = basereq.Url 48 | 49 | req.Headers = make(map[string]string, len(basereq.Headers)) 50 | for h, v := range basereq.Headers { 51 | req.Headers[h] = v 52 | } 53 | 54 | req.Data = make([]byte, len(basereq.Data)) 55 | copy(req.Data, basereq.Data) 56 | 57 | if len(basereq.Input) > 0 { 58 | req.Input = make(map[string][]byte, len(basereq.Input)) 59 | for k, v := range basereq.Input { 60 | req.Input[k] = v 61 | } 62 | } 63 | 64 | req.Position = basereq.Position 65 | req.Raw = basereq.Raw 66 | 67 | return req 68 | } 69 | 70 | // SniperRequests returns an array of requests, each with one of the templated locations replaced by a keyword 71 | func SniperRequests(basereq *Request, template string) []Request { 72 | var reqs []Request 73 | keyword := "FUZZ" 74 | 75 | // Search for input location identifiers, these must exist in pairs 76 | if c := strings.Count(basereq.Method, template); c > 0 { 77 | if c%2 == 0 { 78 | tokens := templateLocations(template, basereq.Method) 79 | 80 | for i := 0; i < len(tokens); i = i + 2 { 81 | newreq := CopyRequest(basereq) 82 | newreq.Method = injectKeyword(basereq.Method, keyword, tokens[i], tokens[i+1]) 83 | scrubTemplates(&newreq, template) 84 | reqs = append(reqs, newreq) 85 | } 86 | } 87 | } 88 | 89 | if c := strings.Count(basereq.Url, template); c > 0 { 90 | if c%2 == 0 { 91 | tokens := templateLocations(template, basereq.Url) 92 | 93 | for i := 0; i < len(tokens); i = i + 2 { 94 | newreq := CopyRequest(basereq) 95 | newreq.Url = injectKeyword(basereq.Url, keyword, tokens[i], tokens[i+1]) 96 | scrubTemplates(&newreq, template) 97 | reqs = append(reqs, newreq) 98 | } 99 | } 100 | } 101 | 102 | data := string(basereq.Data) 103 | if c := strings.Count(data, template); c > 0 { 104 | if c%2 == 0 { 105 | tokens := templateLocations(template, data) 106 | 107 | for i := 0; i < len(tokens); i = i + 2 { 108 | newreq := CopyRequest(basereq) 109 | newreq.Data = []byte(injectKeyword(data, keyword, tokens[i], tokens[i+1])) 110 | scrubTemplates(&newreq, template) 111 | reqs = append(reqs, newreq) 112 | } 113 | } 114 | } 115 | 116 | for k, v := range basereq.Headers { 117 | if c := strings.Count(k, template); c > 0 { 118 | if c%2 == 0 { 119 | tokens := templateLocations(template, k) 120 | 121 | for i := 0; i < len(tokens); i = i + 2 { 122 | newreq := CopyRequest(basereq) 123 | newreq.Headers[injectKeyword(k, keyword, tokens[i], tokens[i+1])] = v 124 | delete(newreq.Headers, k) 125 | scrubTemplates(&newreq, template) 126 | reqs = append(reqs, newreq) 127 | } 128 | } 129 | } 130 | if c := strings.Count(v, template); c > 0 { 131 | if c%2 == 0 { 132 | tokens := templateLocations(template, v) 133 | 134 | for i := 0; i < len(tokens); i = i + 2 { 135 | newreq := CopyRequest(basereq) 136 | newreq.Headers[k] = injectKeyword(v, keyword, tokens[i], tokens[i+1]) 137 | scrubTemplates(&newreq, template) 138 | reqs = append(reqs, newreq) 139 | } 140 | } 141 | } 142 | } 143 | 144 | return reqs 145 | } 146 | 147 | // templateLocations returns an array of template character locations in input 148 | func templateLocations(template string, input string) []int { 149 | var tokens []int 150 | 151 | for k, i := range []rune(input) { 152 | if i == []rune(template)[0] { 153 | tokens = append(tokens, k) 154 | } 155 | } 156 | 157 | return tokens 158 | } 159 | 160 | // injectKeyword takes a string, a keyword, and a start/end offset. The data between 161 | // the start/end offset in string is removed, and replaced by keyword 162 | func injectKeyword(input string, keyword string, startOffset int, endOffset int) string { 163 | 164 | // some basic sanity checking, return the original string unchanged if offsets didnt make sense 165 | if startOffset > len(input) || endOffset > len(input) || startOffset > endOffset { 166 | return input 167 | } 168 | 169 | inputslice := []rune(input) 170 | keywordslice := []rune(keyword) 171 | 172 | prefix := inputslice[:startOffset] 173 | suffix := inputslice[endOffset+1:] 174 | 175 | var outputslice []rune 176 | outputslice = append(outputslice, prefix...) 177 | outputslice = append(outputslice, keywordslice...) 178 | outputslice = append(outputslice, suffix...) 179 | 180 | return string(outputslice) 181 | } 182 | 183 | // scrubTemplates removes all template (§) strings from the request struct 184 | func scrubTemplates(req *Request, template string) { 185 | req.Method = strings.Join(strings.Split(req.Method, template), "") 186 | req.Url = strings.Join(strings.Split(req.Url, template), "") 187 | req.Data = []byte(strings.Join(strings.Split(string(req.Data), template), "")) 188 | 189 | for k, v := range req.Headers { 190 | if c := strings.Count(k, template); c > 0 { 191 | if c%2 == 0 { 192 | delete(req.Headers, k) 193 | req.Headers[strings.Join(strings.Split(k, template), "")] = v 194 | } 195 | } 196 | if c := strings.Count(v, template); c > 0 { 197 | if c%2 == 0 { 198 | req.Headers[k] = strings.Join(strings.Split(v, template), "") 199 | } 200 | } 201 | } 202 | } 203 | -------------------------------------------------------------------------------- /pkg/output/file_html.go: -------------------------------------------------------------------------------- 1 | package output 2 | 3 | import ( 4 | "html/template" 5 | "os" 6 | "time" 7 | 8 | "github.com/ffuf/ffuf/pkg/ffuf" 9 | ) 10 | 11 | type htmlFileOutput struct { 12 | CommandLine string 13 | Time string 14 | Keys []string 15 | Results []ffuf.Result 16 | } 17 | 18 | const ( 19 | htmlTemplate = ` 20 | 21 | 22 | 23 | 24 | 28 | FFUF Report - 29 | 30 | 31 | 35 | 39 | 44 | 45 | 46 | 47 | 48 | 55 | 56 |
57 |
58 |

59 |

FFUF Report

60 |
61 | 62 |
{{ .CommandLine }}
63 |
{{ .Time }}
64 | 65 | 66 | 67 |
68 | |result_raw|StatusCode|Input|Position|ContentLength|ContentWords|ContentLines| 69 |
70 | 71 | 72 | {{ range .Keys }} 73 | {{ end }} 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | {{range $result := .Results}} 88 |
89 | |result_raw|{{ $result.StatusCode }}{{ range $keyword, $value := $result.Input }}|{{ $value | printf "%s" }}{{ end }}|{{ $result.Url }}|{{ $result.RedirectLocation }}|{{ $result.Position }}|{{ $result.ContentLength }}|{{ $result.ContentWords }}|{{ $result.ContentLines }}|{{ $result.ContentType }}| 90 |
91 | 92 | 93 | {{ range $keyword, $value := $result.Input }} 94 | 95 | {{ end }} 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | {{ end }} 107 | 108 |
Status{{ . }}URLRedirect locationPositionLengthWordsLinesTypeDurationResultfile
{{ $result.StatusCode }}{{ $value | printf "%s" }}{{ $result.Url }}{{ $result.RedirectLocation }}{{ $result.Position }}{{ $result.ContentLength }}{{ $result.ContentWords }}{{ $result.ContentLines }}{{ $result.ContentType }}{{ $result.Duration }}{{ $result.ResultFile }}
109 | 110 |
111 |

112 |
113 |
114 | 115 | 116 | 117 | 118 | 119 | 132 | 143 | 144 | 145 | 146 | ` 147 | ) 148 | 149 | // colorizeResults returns a new slice with HTMLColor attribute 150 | func colorizeResults(results []ffuf.Result) []ffuf.Result { 151 | newResults := make([]ffuf.Result, 0) 152 | 153 | for _, r := range results { 154 | result := r 155 | result.HTMLColor = "black" 156 | 157 | s := result.StatusCode 158 | 159 | if s >= 200 && s <= 299 { 160 | result.HTMLColor = "#adea9e" 161 | } 162 | 163 | if s >= 300 && s <= 399 { 164 | result.HTMLColor = "#bbbbe6" 165 | } 166 | 167 | if s >= 400 && s <= 499 { 168 | result.HTMLColor = "#d2cb7e" 169 | } 170 | 171 | if s >= 500 && s <= 599 { 172 | result.HTMLColor = "#de8dc1" 173 | } 174 | 175 | newResults = append(newResults, result) 176 | } 177 | 178 | return newResults 179 | } 180 | 181 | func writeHTML(filename string, config *ffuf.Config, results []ffuf.Result) error { 182 | results = colorizeResults(results) 183 | 184 | ti := time.Now() 185 | 186 | keywords := make([]string, 0) 187 | for _, inputprovider := range config.InputProviders { 188 | keywords = append(keywords, inputprovider.Keyword) 189 | } 190 | 191 | outHTML := htmlFileOutput{ 192 | CommandLine: config.CommandLine, 193 | Time: ti.Format(time.RFC3339), 194 | Results: results, 195 | Keys: keywords, 196 | } 197 | 198 | f, err := os.Create(filename) 199 | if err != nil { 200 | return err 201 | } 202 | defer f.Close() 203 | 204 | templateName := "output.html" 205 | t := template.New(templateName).Delims("{{", "}}") 206 | _, err = t.Parse(htmlTemplate) 207 | if err != nil { 208 | return err 209 | } 210 | err = t.Execute(f, outHTML) 211 | return err 212 | } 213 | -------------------------------------------------------------------------------- /pkg/input/input.go: -------------------------------------------------------------------------------- 1 | package input 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/ffuf/ffuf/pkg/ffuf" 7 | ) 8 | 9 | type MainInputProvider struct { 10 | Providers []ffuf.InternalInputProvider 11 | Config *ffuf.Config 12 | position int 13 | msbIterator int 14 | } 15 | 16 | func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, ffuf.Multierror) { 17 | validmode := false 18 | errs := ffuf.NewMultierror() 19 | for _, mode := range []string{"clusterbomb", "pitchfork", "sniper"} { 20 | if conf.InputMode == mode { 21 | validmode = true 22 | } 23 | } 24 | if !validmode { 25 | errs.Add(fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode)) 26 | return &MainInputProvider{}, errs 27 | } 28 | mainip := MainInputProvider{Config: conf, msbIterator: 0} 29 | // Initialize the correct inputprovider 30 | for _, v := range conf.InputProviders { 31 | err := mainip.AddProvider(v) 32 | if err != nil { 33 | errs.Add(err) 34 | } 35 | } 36 | return &mainip, errs 37 | } 38 | 39 | func (i *MainInputProvider) AddProvider(provider ffuf.InputProviderConfig) error { 40 | if provider.Name == "command" { 41 | newcomm, _ := NewCommandInput(provider.Keyword, provider.Value, i.Config) 42 | i.Providers = append(i.Providers, newcomm) 43 | } else { 44 | // Default to wordlist 45 | newwl, err := NewWordlistInput(provider.Keyword, provider.Value, i.Config) 46 | if err != nil { 47 | return err 48 | } 49 | i.Providers = append(i.Providers, newwl) 50 | } 51 | return nil 52 | } 53 | 54 | // ActivateKeywords enables / disables wordlists based on list of active keywords 55 | func (i *MainInputProvider) ActivateKeywords(kws []string) { 56 | for _, p := range i.Providers { 57 | if sliceContains(kws, p.Keyword()) { 58 | p.Active() 59 | } else { 60 | p.Disable() 61 | } 62 | } 63 | } 64 | 65 | // Position will return the current position of progress 66 | func (i *MainInputProvider) Position() int { 67 | return i.position 68 | } 69 | 70 | // SetPosition will reset the MainInputProvider to a specific position 71 | func (i *MainInputProvider) SetPosition(pos int) { 72 | if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" { 73 | i.setclusterbombPosition(pos) 74 | } else { 75 | i.setpitchforkPosition(pos) 76 | } 77 | } 78 | 79 | // Keywords returns a slice of all keywords in the inputprovider 80 | func (i *MainInputProvider) Keywords() []string { 81 | kws := make([]string, 0) 82 | for _, p := range i.Providers { 83 | kws = append(kws, p.Keyword()) 84 | } 85 | return kws 86 | } 87 | 88 | // Next will increment the cursor position, and return a boolean telling if there's inputs left 89 | func (i *MainInputProvider) Next() bool { 90 | if i.position >= i.Total() { 91 | return false 92 | } 93 | i.position++ 94 | return true 95 | } 96 | 97 | // Value returns a map of inputs for keywords 98 | func (i *MainInputProvider) Value() map[string][]byte { 99 | retval := make(map[string][]byte) 100 | if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" { 101 | retval = i.clusterbombValue() 102 | } 103 | if i.Config.InputMode == "pitchfork" { 104 | retval = i.pitchforkValue() 105 | } 106 | return retval 107 | } 108 | 109 | // Reset resets all the inputproviders and counters 110 | func (i *MainInputProvider) Reset() { 111 | for _, p := range i.Providers { 112 | p.ResetPosition() 113 | } 114 | i.position = 0 115 | i.msbIterator = 0 116 | } 117 | 118 | // pitchforkValue returns a map of keyword:value pairs including all inputs. 119 | // This mode will iterate through wordlists in lockstep. 120 | func (i *MainInputProvider) pitchforkValue() map[string][]byte { 121 | values := make(map[string][]byte) 122 | for _, p := range i.Providers { 123 | if !p.Active() { 124 | // The inputprovider is disabled 125 | continue 126 | } 127 | if !p.Next() { 128 | // Loop to beginning if the inputprovider has been exhausted 129 | p.ResetPosition() 130 | } 131 | values[p.Keyword()] = p.Value() 132 | p.IncrementPosition() 133 | } 134 | return values 135 | } 136 | 137 | func (i *MainInputProvider) setpitchforkPosition(pos int) { 138 | for _, p := range i.Providers { 139 | p.SetPosition(pos) 140 | } 141 | } 142 | 143 | // clusterbombValue returns map of keyword:value pairs including all inputs. 144 | // this mode will iterate through all possible combinations. 145 | func (i *MainInputProvider) clusterbombValue() map[string][]byte { 146 | values := make(map[string][]byte) 147 | // Should we signal the next InputProvider in the slice to increment 148 | signalNext := false 149 | first := true 150 | index := 0 151 | for _, p := range i.Providers { 152 | if !p.Active() { 153 | continue 154 | } 155 | if signalNext { 156 | p.IncrementPosition() 157 | signalNext = false 158 | } 159 | if !p.Next() { 160 | // No more inputs in this inputprovider 161 | if index == i.msbIterator { 162 | // Reset all previous wordlists and increment the msb counter 163 | i.msbIterator += 1 164 | i.clusterbombIteratorReset() 165 | // Start again 166 | return i.clusterbombValue() 167 | } 168 | p.ResetPosition() 169 | signalNext = true 170 | } 171 | values[p.Keyword()] = p.Value() 172 | if first { 173 | p.IncrementPosition() 174 | first = false 175 | } 176 | index += 1 177 | } 178 | return values 179 | } 180 | 181 | func (i *MainInputProvider) setclusterbombPosition(pos int) { 182 | i.Reset() 183 | if pos > i.Total() { 184 | // noop 185 | return 186 | } 187 | for i.position < pos-1 { 188 | i.Next() 189 | i.Value() 190 | } 191 | } 192 | 193 | func (i *MainInputProvider) clusterbombIteratorReset() { 194 | index := 0 195 | for _, p := range i.Providers { 196 | if !p.Active() { 197 | continue 198 | } 199 | if index < i.msbIterator { 200 | p.ResetPosition() 201 | } 202 | if index == i.msbIterator { 203 | p.IncrementPosition() 204 | } 205 | index += 1 206 | } 207 | } 208 | 209 | // Total returns the amount of input combinations available 210 | func (i *MainInputProvider) Total() int { 211 | count := 0 212 | if i.Config.InputMode == "pitchfork" { 213 | for _, p := range i.Providers { 214 | if !p.Active() { 215 | continue 216 | } 217 | if p.Total() > count { 218 | count = p.Total() 219 | } 220 | } 221 | } 222 | if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" { 223 | count = 1 224 | for _, p := range i.Providers { 225 | if !p.Active() { 226 | continue 227 | } 228 | count = count * p.Total() 229 | } 230 | } 231 | return count 232 | } 233 | 234 | // sliceContains is a helper function that returns true if a string is included in a string slice 235 | func sliceContains(sslice []string, str string) bool { 236 | for _, v := range sslice { 237 | if v == str { 238 | return true 239 | } 240 | } 241 | return false 242 | } 243 | -------------------------------------------------------------------------------- /pkg/ffuf/request_test.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "reflect" 5 | "testing" 6 | ) 7 | 8 | func TestBaseRequest(t *testing.T) { 9 | headers := make(map[string]string) 10 | headers["foo"] = "bar" 11 | headers["baz"] = "wibble" 12 | headers["Content-Type"] = "application/json" 13 | 14 | data := "{\"quote\":\"I'll still be here tomorrow to high five you yesterday, my friend. Peace.\"}" 15 | 16 | expectedreq := Request{Method: "POST", Url: "http://example.com/aaaa", Headers: headers, Data: []byte(data)} 17 | config := Config{Method: "POST", Url: "http://example.com/aaaa", Headers: headers, Data: data} 18 | basereq := BaseRequest(&config) 19 | 20 | if !reflect.DeepEqual(basereq, expectedreq) { 21 | t.Errorf("BaseRequest does not return a struct with expected values") 22 | } 23 | 24 | } 25 | 26 | func TestCopyRequest(t *testing.T) { 27 | headers := make(map[string]string) 28 | headers["foo"] = "bar" 29 | headers["omg"] = "bbq" 30 | 31 | data := "line=Is+that+where+creativity+comes+from?+From+sad+biz?" 32 | 33 | input := make(map[string][]byte) 34 | input["matthew"] = []byte("If you are the head that floats atop the §ziggurat§, then the stairs that lead to you must be infinite.") 35 | 36 | basereq := Request{Method: "POST", 37 | Host: "testhost.local", 38 | Url: "http://example.com/aaaa", 39 | Headers: headers, 40 | Data: []byte(data), 41 | Input: input, 42 | Position: 2, 43 | Raw: "We're not oil and water, we're oil and vinegar! It's good. It's yummy.", 44 | } 45 | 46 | copiedreq := CopyRequest(&basereq) 47 | 48 | if !reflect.DeepEqual(basereq, copiedreq) { 49 | t.Errorf("CopyRequest does not return an equal struct") 50 | } 51 | } 52 | 53 | func TestSniperRequests(t *testing.T) { 54 | headers := make(map[string]string) 55 | headers["foo"] = "§bar§" 56 | headers["§omg§"] = "bbq" 57 | 58 | testreq := Request{ 59 | Method: "§POST§", 60 | Url: "http://example.com/aaaa?param=§lemony§", 61 | Headers: headers, 62 | Data: []byte("line=§yo yo, it's grease§"), 63 | } 64 | 65 | requests := SniperRequests(&testreq, "§") 66 | 67 | if len(requests) != 5 { 68 | t.Errorf("SniperRequests returned an incorrect number of requests") 69 | } 70 | 71 | headers = make(map[string]string) 72 | headers["foo"] = "bar" 73 | headers["omg"] = "bbq" 74 | 75 | var expected Request 76 | expected = Request{ // Method 77 | Method: "FUZZ", 78 | Url: "http://example.com/aaaa?param=lemony", 79 | Headers: headers, 80 | Data: []byte("line=yo yo, it's grease"), 81 | } 82 | 83 | pass := false 84 | for _, req := range requests { 85 | if reflect.DeepEqual(req, expected) { 86 | pass = true 87 | } 88 | } 89 | 90 | if !pass { 91 | t.Errorf("SniperRequests does not return expected values (Method)") 92 | } 93 | 94 | expected = Request{ // URL 95 | Method: "POST", 96 | Url: "http://example.com/aaaa?param=FUZZ", 97 | Headers: headers, 98 | Data: []byte("line=yo yo, it's grease"), 99 | } 100 | 101 | pass = false 102 | for _, req := range requests { 103 | if reflect.DeepEqual(req, expected) { 104 | pass = true 105 | } 106 | } 107 | 108 | if !pass { 109 | t.Errorf("SniperRequests does not return expected values (Url)") 110 | } 111 | 112 | expected = Request{ // Data 113 | Method: "POST", 114 | Url: "http://example.com/aaaa?param=lemony", 115 | Headers: headers, 116 | Data: []byte("line=FUZZ"), 117 | } 118 | 119 | pass = false 120 | for _, req := range requests { 121 | if reflect.DeepEqual(req, expected) { 122 | pass = true 123 | } 124 | } 125 | 126 | if !pass { 127 | t.Errorf("SniperRequests does not return expected values (Data)") 128 | } 129 | 130 | headers = make(map[string]string) 131 | headers["foo"] = "FUZZ" 132 | headers["omg"] = "bbq" 133 | 134 | expected = Request{ // Header value 135 | Method: "POST", 136 | Url: "http://example.com/aaaa?param=lemony", 137 | Headers: headers, 138 | Data: []byte("line=yo yo, it's grease"), 139 | } 140 | 141 | pass = false 142 | for _, req := range requests { 143 | if reflect.DeepEqual(req, expected) { 144 | pass = true 145 | } 146 | } 147 | 148 | if !pass { 149 | t.Errorf("SniperRequests does not return expected values (Header value)") 150 | } 151 | 152 | headers = make(map[string]string) 153 | headers["foo"] = "bar" 154 | headers["FUZZ"] = "bbq" 155 | 156 | expected = Request{ // Header key 157 | Method: "POST", 158 | Url: "http://example.com/aaaa?param=lemony", 159 | Headers: headers, 160 | Data: []byte("line=yo yo, it's grease"), 161 | } 162 | 163 | pass = false 164 | for _, req := range requests { 165 | if reflect.DeepEqual(req, expected) { 166 | pass = true 167 | } 168 | } 169 | 170 | if !pass { 171 | t.Errorf("SniperRequests does not return expected values (Header key)") 172 | } 173 | 174 | } 175 | 176 | func TestTemplateLocations(t *testing.T) { 177 | test := "this is my 1§template locator§ test" 178 | arr := templateLocations("§", test) 179 | expected := []int{12, 29} 180 | if !reflect.DeepEqual(arr, expected) { 181 | t.Errorf("templateLocations does not return expected values") 182 | } 183 | 184 | test2 := "§template locator§" 185 | arr = templateLocations("§", test2) 186 | expected = []int{0, 17} 187 | if !reflect.DeepEqual(arr, expected) { 188 | t.Errorf("templateLocations does not return expected values") 189 | } 190 | 191 | if len(templateLocations("§", "te§st2")) != 1 { 192 | t.Errorf("templateLocations does not return expected values") 193 | } 194 | } 195 | 196 | func TestInjectKeyword(t *testing.T) { 197 | input := "§Greetings, creator§" 198 | offsetTuple := templateLocations("§", input) 199 | expected := "FUZZ" 200 | 201 | result := injectKeyword(input, "FUZZ", offsetTuple[0], offsetTuple[1]) 202 | if result != expected { 203 | t.Errorf("injectKeyword returned unexpected result: " + result) 204 | } 205 | 206 | if injectKeyword(input, "FUZZ", -32, 44) != input { 207 | t.Errorf("injectKeyword offset validation failed") 208 | } 209 | 210 | if injectKeyword(input, "FUZZ", 12, 2) != input { 211 | t.Errorf("injectKeyword offset validation failed") 212 | } 213 | 214 | if injectKeyword(input, "FUZZ", 0, 25) != input { 215 | t.Errorf("injectKeyword offset validation failed") 216 | } 217 | 218 | input = "id=§a§&sort=desc" 219 | offsetTuple = templateLocations("§", input) 220 | expected = "id=FUZZ&sort=desc" 221 | 222 | result = injectKeyword(input, "FUZZ", offsetTuple[0], offsetTuple[1]) 223 | if result != expected { 224 | t.Errorf("injectKeyword returned unexpected result: " + result) 225 | } 226 | 227 | input = "feature=aaa&thingie=bbb&array[§0§]=baz" 228 | offsetTuple = templateLocations("§", input) 229 | expected = "feature=aaa&thingie=bbb&array[FUZZ]=baz" 230 | 231 | result = injectKeyword(input, "FUZZ", offsetTuple[0], offsetTuple[1]) 232 | if result != expected { 233 | t.Errorf("injectKeyword returned unexpected result: " + result) 234 | } 235 | } 236 | 237 | func TestScrubTemplates(t *testing.T) { 238 | headers := make(map[string]string) 239 | headers["foo"] = "§bar§" 240 | headers["§omg§"] = "bbq" 241 | 242 | testreq := Request{Method: "§POST§", 243 | Url: "http://example.com/aaaa?param=§lemony§", 244 | Headers: headers, 245 | Data: []byte("line=§yo yo, it's grease§"), 246 | } 247 | 248 | headers = make(map[string]string) 249 | headers["foo"] = "bar" 250 | headers["omg"] = "bbq" 251 | 252 | expectedreq := Request{Method: "POST", 253 | Url: "http://example.com/aaaa?param=lemony", 254 | Headers: headers, 255 | Data: []byte("line=yo yo, it's grease"), 256 | } 257 | 258 | scrubTemplates(&testreq, "§") 259 | 260 | if !reflect.DeepEqual(testreq, expectedreq) { 261 | t.Errorf("scrubTemplates does not return expected values") 262 | } 263 | } 264 | -------------------------------------------------------------------------------- /pkg/ffuf/autocalibration.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "fmt" 5 | "log" 6 | "math/rand" 7 | "strconv" 8 | "time" 9 | ) 10 | 11 | func (j *Job) autoCalibrationStrings() map[string][]string { 12 | rand.Seed(time.Now().UnixNano()) 13 | cInputs := make(map[string][]string) 14 | if len(j.Config.AutoCalibrationStrings) < 1 { 15 | cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(16)) 16 | cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(8)) 17 | cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(16)) 18 | cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(8)) 19 | cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(16)) 20 | cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(8)) 21 | if j.Config.AutoCalibrationStrategy == "advanced" { 22 | // Add directory tests and .htaccess too 23 | cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(16)+"/") 24 | cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(8)+"/") 25 | cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(16)+"/") 26 | cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(8)+"/") 27 | } 28 | } else { 29 | cInputs["custom"] = append(cInputs["custom"], j.Config.AutoCalibrationStrings...) 30 | } 31 | return cInputs 32 | } 33 | 34 | func (j *Job) calibrationRequest(inputs map[string][]byte) (Response, error) { 35 | basereq := BaseRequest(j.Config) 36 | req, err := j.Runner.Prepare(inputs, &basereq) 37 | if err != nil { 38 | j.Output.Error(fmt.Sprintf("Encountered an error while preparing autocalibration request: %s\n", err)) 39 | j.incError() 40 | log.Printf("%s", err) 41 | return Response{}, err 42 | } 43 | resp, err := j.Runner.Execute(&req) 44 | if err != nil { 45 | j.Output.Error(fmt.Sprintf("Encountered an error while executing autocalibration request: %s\n", err)) 46 | j.incError() 47 | log.Printf("%s", err) 48 | return Response{}, err 49 | } 50 | // Only calibrate on responses that would be matched otherwise 51 | if j.isMatch(resp) { 52 | return resp, nil 53 | } 54 | return resp, fmt.Errorf("Response wouldn't be matched") 55 | } 56 | 57 | // CalibrateForHost runs autocalibration for a specific host 58 | func (j *Job) CalibrateForHost(host string, baseinput map[string][]byte) error { 59 | if j.Config.MatcherManager.CalibratedForDomain(host) { 60 | return nil 61 | } 62 | if baseinput[j.Config.AutoCalibrationKeyword] == nil { 63 | return fmt.Errorf("Autocalibration keyword \"%s\" not found in the request.", j.Config.AutoCalibrationKeyword) 64 | } 65 | cStrings := j.autoCalibrationStrings() 66 | input := make(map[string][]byte) 67 | for k, v := range baseinput { 68 | input[k] = v 69 | } 70 | for _, v := range cStrings { 71 | responses := make([]Response, 0) 72 | for _, cs := range v { 73 | input[j.Config.AutoCalibrationKeyword] = []byte(cs) 74 | resp, err := j.calibrationRequest(input) 75 | if err != nil { 76 | continue 77 | } 78 | responses = append(responses, resp) 79 | err = j.calibrateFilters(responses, true) 80 | if err != nil { 81 | j.Output.Error(fmt.Sprintf("%s", err)) 82 | } 83 | } 84 | } 85 | j.Config.MatcherManager.SetCalibratedForHost(host, true) 86 | return nil 87 | } 88 | 89 | // CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests 90 | func (j *Job) Calibrate(input map[string][]byte) error { 91 | if j.Config.MatcherManager.Calibrated() { 92 | return nil 93 | } 94 | cInputs := j.autoCalibrationStrings() 95 | 96 | for _, v := range cInputs { 97 | responses := make([]Response, 0) 98 | for _, cs := range v { 99 | input[j.Config.AutoCalibrationKeyword] = []byte(cs) 100 | resp, err := j.calibrationRequest(input) 101 | if err != nil { 102 | continue 103 | } 104 | responses = append(responses, resp) 105 | } 106 | _ = j.calibrateFilters(responses, false) 107 | } 108 | j.Config.MatcherManager.SetCalibrated(true) 109 | return nil 110 | } 111 | 112 | // CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and 113 | // 114 | // configuring the filters accordingly 115 | func (j *Job) CalibrateIfNeeded(host string, input map[string][]byte) error { 116 | j.calibMutex.Lock() 117 | defer j.calibMutex.Unlock() 118 | if !j.Config.AutoCalibration { 119 | return nil 120 | } 121 | if j.Config.AutoCalibrationPerHost { 122 | return j.CalibrateForHost(host, input) 123 | } 124 | return j.Calibrate(input) 125 | } 126 | 127 | func (j *Job) calibrateFilters(responses []Response, perHost bool) error { 128 | // Work down from the most specific common denominator 129 | if len(responses) > 0 { 130 | // Content length 131 | baselineSize := responses[0].ContentLength 132 | sizeMatch := true 133 | for _, r := range responses { 134 | if baselineSize != r.ContentLength { 135 | sizeMatch = false 136 | } 137 | } 138 | if sizeMatch { 139 | if perHost { 140 | // Check if already filtered 141 | for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { 142 | match, _ := f.Filter(&responses[0]) 143 | if match { 144 | // Already filtered 145 | return nil 146 | } 147 | } 148 | _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "size", strconv.FormatInt(baselineSize, 10)) 149 | return nil 150 | } else { 151 | // Check if already filtered 152 | for _, f := range j.Config.MatcherManager.GetFilters() { 153 | match, _ := f.Filter(&responses[0]) 154 | if match { 155 | // Already filtered 156 | return nil 157 | } 158 | } 159 | _ = j.Config.MatcherManager.AddFilter("size", strconv.FormatInt(baselineSize, 10), false) 160 | return nil 161 | } 162 | } 163 | 164 | // Content words 165 | baselineWords := responses[0].ContentWords 166 | wordsMatch := true 167 | for _, r := range responses { 168 | if baselineWords != r.ContentWords { 169 | wordsMatch = false 170 | } 171 | } 172 | if wordsMatch { 173 | if perHost { 174 | // Check if already filtered 175 | for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { 176 | match, _ := f.Filter(&responses[0]) 177 | if match { 178 | // Already filtered 179 | return nil 180 | } 181 | } 182 | _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "word", strconv.FormatInt(baselineWords, 10)) 183 | return nil 184 | } else { 185 | // Check if already filtered 186 | for _, f := range j.Config.MatcherManager.GetFilters() { 187 | match, _ := f.Filter(&responses[0]) 188 | if match { 189 | // Already filtered 190 | return nil 191 | } 192 | } 193 | _ = j.Config.MatcherManager.AddFilter("word", strconv.FormatInt(baselineWords, 10), false) 194 | return nil 195 | } 196 | } 197 | 198 | // Content lines 199 | baselineLines := responses[0].ContentLines 200 | linesMatch := true 201 | for _, r := range responses { 202 | if baselineLines != r.ContentLines { 203 | linesMatch = false 204 | } 205 | } 206 | if linesMatch { 207 | if perHost { 208 | // Check if already filtered 209 | for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { 210 | match, _ := f.Filter(&responses[0]) 211 | if match { 212 | // Already filtered 213 | return nil 214 | } 215 | } 216 | _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "line", strconv.FormatInt(baselineLines, 10)) 217 | return nil 218 | } else { 219 | // Check if already filtered 220 | for _, f := range j.Config.MatcherManager.GetFilters() { 221 | match, _ := f.Filter(&responses[0]) 222 | if match { 223 | // Already filtered 224 | return nil 225 | } 226 | } 227 | _ = j.Config.MatcherManager.AddFilter("line", strconv.FormatInt(baselineLines, 10), false) 228 | return nil 229 | } 230 | } 231 | } 232 | return fmt.Errorf("No common filtering values found") 233 | } 234 | -------------------------------------------------------------------------------- /pkg/interactive/termhandler.go: -------------------------------------------------------------------------------- 1 | package interactive 2 | 3 | import ( 4 | "bufio" 5 | "fmt" 6 | "strconv" 7 | "strings" 8 | "time" 9 | 10 | "github.com/ffuf/ffuf/pkg/ffuf" 11 | ) 12 | 13 | type interactive struct { 14 | Job *ffuf.Job 15 | paused bool 16 | } 17 | 18 | func Handle(job *ffuf.Job) error { 19 | i := interactive{job, false} 20 | tty, err := termHandle() 21 | if err != nil { 22 | return err 23 | } 24 | defer tty.Close() 25 | inreader := bufio.NewScanner(tty) 26 | inreader.Split(bufio.ScanLines) 27 | for inreader.Scan() { 28 | i.handleInput(inreader.Bytes()) 29 | } 30 | return nil 31 | } 32 | 33 | func (i *interactive) handleInput(in []byte) { 34 | instr := string(in) 35 | args := strings.Split(strings.TrimSpace(instr), " ") 36 | if len(args) == 1 && args[0] == "" { 37 | // Enter pressed - toggle interactive state 38 | i.paused = !i.paused 39 | if i.paused { 40 | i.Job.Pause() 41 | time.Sleep(500 * time.Millisecond) 42 | i.printBanner() 43 | } else { 44 | i.Job.Resume() 45 | } 46 | } else { 47 | switch args[0] { 48 | case "?": 49 | i.printHelp() 50 | case "help": 51 | i.printHelp() 52 | case "resume": 53 | i.paused = false 54 | i.Job.Resume() 55 | case "restart": 56 | i.Job.Reset(false) 57 | i.paused = false 58 | i.Job.Output.Info("Restarting the current ffuf job!") 59 | i.Job.Resume() 60 | case "show": 61 | for _, r := range i.Job.Output.GetCurrentResults() { 62 | i.Job.Output.PrintResult(r) 63 | } 64 | case "savejson": 65 | if len(args) < 2 { 66 | i.Job.Output.Error("Please define the filename") 67 | } else if len(args) > 2 { 68 | i.Job.Output.Error("Too many arguments for \"savejson\"") 69 | } else { 70 | err := i.Job.Output.SaveFile(args[1], "json") 71 | if err != nil { 72 | i.Job.Output.Error(fmt.Sprintf("%s", err)) 73 | } else { 74 | i.Job.Output.Info("Output file successfully saved!") 75 | } 76 | } 77 | case "fc": 78 | if len(args) < 2 { 79 | i.Job.Output.Error("Please define a value for status code filter, or \"none\" for removing it") 80 | } else if len(args) > 2 { 81 | i.Job.Output.Error("Too many arguments for \"fc\"") 82 | } else { 83 | i.updateFilter("status", args[1], true) 84 | i.Job.Output.Info("New status code filter value set") 85 | } 86 | case "afc": 87 | if len(args) < 2 { 88 | i.Job.Output.Error("Please define a value to append to status code filter") 89 | } else if len(args) > 2 { 90 | i.Job.Output.Error("Too many arguments for \"afc\"") 91 | } else { 92 | i.appendFilter("status", args[1]) 93 | i.Job.Output.Info("New status code filter value set") 94 | } 95 | case "fl": 96 | if len(args) < 2 { 97 | i.Job.Output.Error("Please define a value for line count filter, or \"none\" for removing it") 98 | } else if len(args) > 2 { 99 | i.Job.Output.Error("Too many arguments for \"fl\"") 100 | } else { 101 | i.updateFilter("line", args[1], true) 102 | i.Job.Output.Info("New line count filter value set") 103 | } 104 | case "afl": 105 | if len(args) < 2 { 106 | i.Job.Output.Error("Please define a value to append to line count filter") 107 | } else if len(args) > 2 { 108 | i.Job.Output.Error("Too many arguments for \"afl\"") 109 | } else { 110 | i.appendFilter("line", args[1]) 111 | i.Job.Output.Info("New line count filter value set") 112 | } 113 | case "fw": 114 | if len(args) < 2 { 115 | i.Job.Output.Error("Please define a value for word count filter, or \"none\" for removing it") 116 | } else if len(args) > 2 { 117 | i.Job.Output.Error("Too many arguments for \"fw\"") 118 | } else { 119 | i.updateFilter("word", args[1], true) 120 | i.Job.Output.Info("New word count filter value set") 121 | } 122 | case "afw": 123 | if len(args) < 2 { 124 | i.Job.Output.Error("Please define a value to append to word count filter") 125 | } else if len(args) > 2 { 126 | i.Job.Output.Error("Too many arguments for \"afw\"") 127 | } else { 128 | i.appendFilter("word", args[1]) 129 | i.Job.Output.Info("New word count filter value set") 130 | } 131 | case "fs": 132 | if len(args) < 2 { 133 | i.Job.Output.Error("Please define a value for response size filter, or \"none\" for removing it") 134 | } else if len(args) > 2 { 135 | i.Job.Output.Error("Too many arguments for \"fs\"") 136 | } else { 137 | i.updateFilter("size", args[1], true) 138 | i.Job.Output.Info("New response size filter value set") 139 | } 140 | case "afs": 141 | if len(args) < 2 { 142 | i.Job.Output.Error("Please define a value to append to size filter") 143 | } else if len(args) > 2 { 144 | i.Job.Output.Error("Too many arguments for \"afs\"") 145 | } else { 146 | i.appendFilter("size", args[1]) 147 | i.Job.Output.Info("New response size filter value set") 148 | } 149 | case "ft": 150 | if len(args) < 2 { 151 | i.Job.Output.Error("Please define a value for response time filter, or \"none\" for removing it") 152 | } else if len(args) > 2 { 153 | i.Job.Output.Error("Too many arguments for \"ft\"") 154 | } else { 155 | i.updateFilter("time", args[1], true) 156 | i.Job.Output.Info("New response time filter value set") 157 | } 158 | case "aft": 159 | if len(args) < 2 { 160 | i.Job.Output.Error("Please define a value to append to response time filter") 161 | } else if len(args) > 2 { 162 | i.Job.Output.Error("Too many arguments for \"aft\"") 163 | } else { 164 | i.appendFilter("time", args[1]) 165 | i.Job.Output.Info("New response time filter value set") 166 | } 167 | case "queueshow": 168 | i.printQueue() 169 | case "queuedel": 170 | if len(args) < 2 { 171 | i.Job.Output.Error("Please define the index of a queued job to remove. Use \"queueshow\" for listing of jobs.") 172 | } else if len(args) > 2 { 173 | i.Job.Output.Error("Too many arguments for \"queuedel\"") 174 | } else { 175 | i.deleteQueue(args[1]) 176 | } 177 | case "queueskip": 178 | i.Job.SkipQueue() 179 | i.Job.Output.Info("Skipping to the next queued job") 180 | case "rate": 181 | if len(args) < 2 { 182 | i.Job.Output.Error("Please define the new rate") 183 | } else if len(args) > 2 { 184 | i.Job.Output.Error("Too many arguments for \"rate\"") 185 | } else { 186 | newrate, err := strconv.Atoi(args[1]) 187 | if err != nil { 188 | i.Job.Output.Error(fmt.Sprintf("Could not adjust rate: %s", err)) 189 | } else { 190 | i.Job.Rate.ChangeRate(newrate) 191 | } 192 | } 193 | 194 | default: 195 | if i.paused { 196 | i.Job.Output.Warning(fmt.Sprintf("Unknown command: \"%s\". Enter \"help\" for a list of available commands", args[0])) 197 | } else { 198 | i.Job.Output.Error("NOPE") 199 | } 200 | } 201 | } 202 | 203 | if i.paused { 204 | i.printPrompt() 205 | } 206 | } 207 | 208 | func (i *interactive) refreshResults() { 209 | results := make([]ffuf.Result, 0) 210 | filters := i.Job.Config.MatcherManager.GetFilters() 211 | for _, filter := range filters { 212 | for _, res := range i.Job.Output.GetCurrentResults() { 213 | fakeResp := &ffuf.Response{ 214 | StatusCode: res.StatusCode, 215 | ContentLines: res.ContentLength, 216 | ContentWords: res.ContentWords, 217 | ContentLength: res.ContentLength, 218 | } 219 | filterOut, _ := filter.Filter(fakeResp) 220 | if !filterOut { 221 | results = append(results, res) 222 | } 223 | } 224 | } 225 | i.Job.Output.SetCurrentResults(results) 226 | } 227 | 228 | func (i *interactive) updateFilter(name, value string, replace bool) { 229 | if value == "none" { 230 | i.Job.Config.MatcherManager.RemoveFilter(name) 231 | } else { 232 | _ = i.Job.Config.MatcherManager.AddFilter(name, value, replace) 233 | } 234 | i.refreshResults() 235 | } 236 | 237 | func (i *interactive) appendFilter(name, value string) { 238 | i.updateFilter(name, value, false) 239 | } 240 | 241 | func (i *interactive) printQueue() { 242 | if len(i.Job.QueuedJobs()) > 0 { 243 | i.Job.Output.Raw("Queued jobs:\n") 244 | for index, job := range i.Job.QueuedJobs() { 245 | postfix := "" 246 | if index == 0 { 247 | postfix = " (active job)" 248 | } 249 | i.Job.Output.Raw(fmt.Sprintf(" [%d] : %s%s\n", index, job.Url, postfix)) 250 | } 251 | } else { 252 | i.Job.Output.Info("Job queue is empty") 253 | } 254 | } 255 | 256 | func (i *interactive) deleteQueue(in string) { 257 | index, err := strconv.Atoi(in) 258 | if err != nil { 259 | i.Job.Output.Warning(fmt.Sprintf("Not a number: %s", in)) 260 | } else { 261 | if index < 0 || index > len(i.Job.QueuedJobs())-1 { 262 | i.Job.Output.Warning("No such queued job. Use \"queueshow\" to list the jobs in queue") 263 | } else if index == 0 { 264 | i.Job.Output.Warning("Cannot delete the currently running job. Use \"queueskip\" to advance to the next one") 265 | } else { 266 | i.Job.DeleteQueueItem(index) 267 | i.Job.Output.Info("Job successfully deleted!") 268 | } 269 | } 270 | } 271 | func (i *interactive) printBanner() { 272 | i.Job.Output.Raw("entering interactive mode\ntype \"help\" for a list of commands, or ENTER to resume.\n") 273 | } 274 | 275 | func (i *interactive) printPrompt() { 276 | i.Job.Output.Raw("> ") 277 | } 278 | 279 | func (i *interactive) printHelp() { 280 | var fc, fl, fs, ft, fw string 281 | for name, filter := range i.Job.Config.MatcherManager.GetFilters() { 282 | switch name { 283 | case "status": 284 | fc = "(active: " + filter.Repr() + ")" 285 | case "line": 286 | fl = "(active: " + filter.Repr() + ")" 287 | case "word": 288 | fw = "(active: " + filter.Repr() + ")" 289 | case "size": 290 | fs = "(active: " + filter.Repr() + ")" 291 | case "time": 292 | ft = "(active: " + filter.Repr() + ")" 293 | } 294 | } 295 | rate := fmt.Sprintf("(active: %d)", i.Job.Config.Rate) 296 | help := ` 297 | available commands: 298 | afc [value] - append to status code filter %s 299 | fc [value] - (re)configure status code filter %s 300 | afl [value] - append to line count filter %s 301 | fl [value] - (re)configure line count filter %s 302 | afw [value] - append to word count filter %s 303 | fw [value] - (re)configure word count filter %s 304 | afs [value] - append to size filter %s 305 | fs [value] - (re)configure size filter %s 306 | aft [value] - append to time filter %s 307 | ft [value] - (re)configure time filter %s 308 | rate [value] - adjust rate of requests per second %s 309 | queueshow - show job queue 310 | queuedel [number] - delete a job in the queue 311 | queueskip - advance to the next queued job 312 | restart - restart and resume the current ffuf job 313 | resume - resume current ffuf job (or: ENTER) 314 | show - show results for the current job 315 | savejson [filename] - save current matches to a file 316 | help - you are looking at it 317 | ` 318 | i.Job.Output.Raw(fmt.Sprintf(help, fc, fc, fl, fl, fw, fw, fs, fs, ft, ft, rate)) 319 | } 320 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## Changelog 2 | - master 3 | - New 4 | - Added a new, dynamic keyword `FFUFHASH` that generates hash from job configuration and wordlist position to map blind payloads back to the initial request. 5 | - New command line parameter for searching a hash: `-search FFUFHASH` 6 | - Changed 7 | - Multiline output prints out alphabetically sorted by keyword 8 | - Default configuration directories now follow `XDG_CONFIG_HOME` variable (less spam in your home directory) 9 | - Fixed issue with autocalibration of line & words filter 10 | - Made JSON (`-json`) output format take precedence over quiet output mode, to allow JSON output without the banner etc 11 | 12 | 13 | - v1.5.0 14 | - New 15 | - New autocalibration options: `-ach`, `-ack` and `-acs`. Revamped the whole autocalibration process 16 | - Configurable modes for matchers and filters (CLI flags: `fmode` and `mmode`): "and" and "or" 17 | - Changed 18 | 19 | - v1.4.1 20 | - New 21 | - Changed 22 | - Fixed a bug with recursion, introduced in the 1.4.0 release 23 | - Recursion now works better with multiple wordlists, disabling unnecessary wordlists for queued jobs where needed 24 | 25 | - v1.4.0 26 | - New 27 | - Added response time logging and filtering 28 | - Added a CLI flag to specify TLS SNI value 29 | - Added full line colors 30 | - Added `-json` to emit newline delimited JSON output 31 | - Added 500 Internal Server Error to list of status codes matched by default 32 | - Changed 33 | - Fixed an issue where output file was created regardless of `-or` 34 | - Fixed an issue where output (often a lot of it) would be printed after entering interactive mode 35 | - Fixed an issue when reading wordlist files from ffufrc 36 | - Fixed an issue where `-of all` option only creates one output file (instead of all formats) 37 | - Fixed an issue where redirection to the same domain in recursive mode dropped port info from URL 38 | - Added HTTP2 support 39 | 40 | - v1.3.1 41 | - New 42 | - Added a CLI flag to disable the interactive mode 43 | - Changed 44 | - Do not read the last newline in the end of the raw request file when using -request 45 | - Fixed an issue with storing the matches for recursion jobs 46 | - Fixed the way the "size" is calculated, it should match content-length now 47 | - Fixed an issue with header canonicalization when a keyword was just a part of the header name 48 | - Fixed output writing so it doesn't silently fail if it needs to create directories recursively 49 | 50 | - v1.3.0 51 | - New 52 | - All output file formats now include the `Content-Type`. 53 | - New CLI flag `-recursion-strategy` that allows adding new queued recursion jobs for non-redirect responses. 54 | - Ability to enter interactive mode by pressing `ENTER` during the ffuf execution. The interactive mode allows 55 | user to change filters, manage recursion queue, save snapshot of matches to a file etc. 56 | - Changed 57 | - Fix a badchar in progress output 58 | 59 | - v1.2.1 60 | - Changed 61 | - Fixed a build breaking bug in `input-shell` parameter 62 | 63 | - v1.2.0 64 | - New 65 | - Added 405 Method Not Allowed to list of status codes matched by default. 66 | - New CLI flag `-rate` to set maximum rate of requests per second. The adjustment is dynamic. 67 | - New CLI flag `-config` to define a configuration file with preconfigured settings for the job. 68 | - Ffuf now reads a default configuration file `$HOME/.ffufrc` upon startup. Options set in this file 69 | are overwritten by the ones provided on CLI. 70 | - Change banner logging to stderr instead of stdout. 71 | - New CLI flag `-or` to avoid creating result files if we didn't get any. 72 | - New CLI flag `-input-shell` to set the shell to be used by `input-cmd` 73 | 74 | - Changed 75 | - Pre-flight errors are now displayed also after the usage text to prevent the need to scroll through backlog. 76 | - Cancelling via SIGINT (Ctrl-C) is now more responsive 77 | - Fixed issue where a thread would hang due to TCP errors 78 | - Fixed the issue where the option -ac was overwriting existing filters. Now auto-calibration will add them where needed. 79 | - The `-w` flag now accepts comma delimited values in the form of `file1:W1,file2:W2`. 80 | - Links in the HTML report are now clickable 81 | - Fixed panic during wordlist flag parsing in Windows systems. 82 | 83 | - v1.1.0 84 | - New 85 | - New CLI flag `-maxtime-job` to set max. execution time per job. 86 | - Changed behaviour of `-maxtime`, can now be used for entire process. 87 | - A new flag `-ignore-body` so ffuf does not fetch the response content. Default value=false. 88 | - Added the wordlists to the header information. 89 | - Added support to output "all" formats (specify the path/filename sans file extension and ffuf will add the appropriate suffix for the filetype) 90 | 91 | - Changed 92 | - Fixed a bug related to the autocalibration feature making the random seed initialization also to take place before autocalibration needs it. 93 | - Added tls renegotiation flag to fix #193 in http.Client 94 | - Fixed HTML report to display select/combo-box for rows per page (and increased default from 10 to 250 rows). 95 | - Added Host information to JSON output file 96 | - Fixed request method when supplying request file 97 | - Fixed crash with 3XX responses that weren't redirects (304 Not Modified, 300 Multiple Choices etc) 98 | 99 | - v1.0.2 100 | - Changed 101 | - Write POST request data properly to file when ran with `-od`. 102 | - Fixed a bug by using header canonicaliztion related to HTTP headers being case insensitive. 103 | - Properly handle relative redirect urls with `-recursion` 104 | - Calculate req/sec correctly for when using recursion 105 | - When `-request` is used, allow the user to override URL using `-u` 106 | 107 | - v1.0.1 108 | - Changed 109 | - Fixed a bug where regex matchers and filters would fail if `-od` was used to store the request & response contents. 110 | 111 | - v1.0 112 | - New 113 | - New CLI flag `-ic` to ignore comments from wordlist. 114 | - New CLI flags `-request` to specify the raw request file to build the actual request from and `-request-proto` to define the new request format. 115 | - New CLI flag `-od` (output directory) to enable writing requests and responses for matched results to a file for postprocessing or debugging purposes. 116 | - New CLI flag `-maxtime` to limit the running time of ffuf 117 | - New CLI flags `-recursion` and `-recursion-depth` to control recursive ffuf jobs if directories are found. This requires the `-u` to end with FUZZ keyword. 118 | - New CLI flag `-replay-proxy` to replay matched requests using a custom proxy. 119 | - Changed 120 | - Limit the use of `-e` (extensions) to a single keyword: FUZZ 121 | - Regexp matching and filtering (-mr/-fr) allow using keywords in patterns 122 | - Take 429 responses into account when -sa (stop on all error cases) is used 123 | - Remove -k flag support, convert to dummy flag #134 124 | - Write configuration to output JSON 125 | - Better help text. 126 | - If any matcher is set, ignore -mc default value. 127 | 128 | - v0.12 129 | - New 130 | - Added a new flag to select a multi wordlist operation mode: `--mode`, possible values: `clusterbomb` and `pitchfork`. 131 | - Added a new output file format eJSON, for always base64 encoding the input data. 132 | - Redirect location is always shown in the output files (when using `-o`) 133 | - Full URL is always shown in the output files (when using `-o`) 134 | - HTML output format got [DataTables](https://datatables.net/) support allowing realtime searches, sorting by column etc. 135 | - New CLI flag `-v` for verbose output. Including full URL, and redirect location. 136 | - SIGTERM monitoring, in order to catch keyboard interrupts an such, to be able to write `-o` files before exiting. 137 | - Changed 138 | - Fixed a bug in the default multi wordlist mode 139 | - Fixed JSON output regression, where all the input data was always encoded in base64 140 | - `--debug-log` no correctly logs connection errors 141 | - Removed `-l` flag in favor of `-v` 142 | - More verbose information in banner shown in startup. 143 | 144 | - v0.11 145 | - New 146 | 147 | - New CLI flag: -l, shows target location of redirect responses 148 | - New CLI flac: -acc, custom auto-calibration strings 149 | - New CLI flag: -debug-log, writes the debug logging to the specified file. 150 | - New CLI flags -ml and -fl, filters/matches line count in response 151 | - Ability to use multiple wordlists / keywords by defining multiple -w command line flags. The if no keyword is defined, the default is FUZZ to keep backwards compatibility. Example: `-w "wordlists/custom.txt:CUSTOM" -H "RandomHeader: CUSTOM"`. 152 | 153 | - Changed 154 | - New CLI flag: -i, dummy flag that does nothing. for compatibility with copy as curl. 155 | - New CLI flag: -b/--cookie, cookie data for compatibility with copy as curl. 156 | - New Output format are available: HTML and Markdown table. 157 | - New CLI flag: -l, shows target location of redirect responses 158 | - Filtering and matching by status code, response size or word count now allow using ranges in addition to single values 159 | - The internal logging information to be discarded, and can be written to a file with the new `-debug-log` flag. 160 | 161 | - v0.10 162 | - New 163 | - New CLI flag: -ac to autocalibrate response size and word filters based on few preset URLs. 164 | - New CLI flag: -timeout to specify custom timeouts for all HTTP requests. 165 | - New CLI flag: --data for compatibility with copy as curl functionality of browsers. 166 | - New CLI flag: --compressed, dummy flag that does nothing. for compatibility with copy as curl. 167 | - New CLI flags: --input-cmd, and --input-num to handle input generation using external commands. Mutators for example. Environment variable FFUF_NUM will be updated on every call of the command. 168 | - When --input-cmd is used, display position instead of the payload in results. The output file (of all formats) will include the payload in addition to the position however. 169 | 170 | - Changed 171 | - Wordlist can also be read from standard input 172 | - Defining -d or --data implies POST method if -X doesn't set it to something else than GET 173 | 174 | - v0.9 175 | - New 176 | - New output file formats: CSV and eCSV (CSV with base64 encoded input field to avoid CSV breakage with payloads containing a comma) 177 | - New CLI flag to follow redirects 178 | - Erroring connections will be retried once 179 | - Error counter in status bar 180 | - New CLI flags: -se (stop on spurious errors) and -sa (stop on all errors, implies -se and -sf) 181 | - New CLI flags: -e to provide a list of extensions to add to wordlist entries, and -D to provide DirSearch wordlist format compatibility. 182 | - Wildcard option for response status code matcher. 183 | - v0.8 184 | - New 185 | - New CLI flag to write output to a file in JSON format 186 | - New CLI flag to stop on spurious 403 responses 187 | - Changed 188 | - Regex matching / filtering now matches the headers alongside of the response body 189 | -------------------------------------------------------------------------------- /pkg/output/stdout.go: -------------------------------------------------------------------------------- 1 | package output 2 | 3 | import ( 4 | "crypto/md5" 5 | "encoding/json" 6 | "fmt" 7 | "os" 8 | "path" 9 | "sort" 10 | "strconv" 11 | "strings" 12 | "time" 13 | 14 | "github.com/ffuf/ffuf/pkg/ffuf" 15 | ) 16 | 17 | const ( 18 | BANNER_HEADER = ` 19 | /'___\ /'___\ /'___\ 20 | /\ \__/ /\ \__/ __ __ /\ \__/ 21 | \ \ ,__\\ \ ,__\/\ \/\ \ \ \ ,__\ 22 | \ \ \_/ \ \ \_/\ \ \_\ \ \ \ \_/ 23 | \ \_\ \ \_\ \ \____/ \ \_\ 24 | \/_/ \/_/ \/___/ \/_/ 25 | ` 26 | BANNER_SEP = "________________________________________________" 27 | ) 28 | 29 | type Stdoutput struct { 30 | config *ffuf.Config 31 | fuzzkeywords []string 32 | Results []ffuf.Result 33 | CurrentResults []ffuf.Result 34 | } 35 | 36 | func NewStdoutput(conf *ffuf.Config) *Stdoutput { 37 | var outp Stdoutput 38 | outp.config = conf 39 | outp.Results = make([]ffuf.Result, 0) 40 | outp.CurrentResults = make([]ffuf.Result, 0) 41 | outp.fuzzkeywords = make([]string, 0) 42 | for _, ip := range conf.InputProviders { 43 | outp.fuzzkeywords = append(outp.fuzzkeywords, ip.Keyword) 44 | } 45 | sort.Strings(outp.fuzzkeywords) 46 | return &outp 47 | } 48 | 49 | func (s *Stdoutput) Banner() { 50 | version := strings.ReplaceAll(ffuf.Version(), "<3", fmt.Sprintf("%s<3%s", ANSI_RED, ANSI_CLEAR)) 51 | fmt.Fprintf(os.Stderr, "%s\n v%s\n%s\n\n", BANNER_HEADER, version, BANNER_SEP) 52 | printOption([]byte("Method"), []byte(s.config.Method)) 53 | printOption([]byte("URL"), []byte(s.config.Url)) 54 | 55 | // Print wordlists 56 | for _, provider := range s.config.InputProviders { 57 | if provider.Name == "wordlist" { 58 | printOption([]byte("Wordlist"), []byte(provider.Keyword+": "+provider.Value)) 59 | } 60 | } 61 | 62 | // Print headers 63 | if len(s.config.Headers) > 0 { 64 | for k, v := range s.config.Headers { 65 | printOption([]byte("Header"), []byte(fmt.Sprintf("%s: %s", k, v))) 66 | } 67 | } 68 | // Print POST data 69 | if len(s.config.Data) > 0 { 70 | printOption([]byte("Data"), []byte(s.config.Data)) 71 | } 72 | 73 | // Print extensions 74 | if len(s.config.Extensions) > 0 { 75 | exts := "" 76 | for _, ext := range s.config.Extensions { 77 | exts = fmt.Sprintf("%s%s ", exts, ext) 78 | } 79 | printOption([]byte("Extensions"), []byte(exts)) 80 | } 81 | 82 | // Output file info 83 | if len(s.config.OutputFile) > 0 { 84 | 85 | // Use filename as specified by user 86 | OutputFile := s.config.OutputFile 87 | 88 | if s.config.OutputFormat == "all" { 89 | // Actually... append all extensions 90 | OutputFile += ".{json,ejson,html,md,csv,ecsv}" 91 | } 92 | 93 | printOption([]byte("Output file"), []byte(OutputFile)) 94 | printOption([]byte("File format"), []byte(s.config.OutputFormat)) 95 | } 96 | 97 | // Follow redirects? 98 | follow := fmt.Sprintf("%t", s.config.FollowRedirects) 99 | printOption([]byte("Follow redirects"), []byte(follow)) 100 | 101 | // Autocalibration 102 | autocalib := fmt.Sprintf("%t", s.config.AutoCalibration) 103 | printOption([]byte("Calibration"), []byte(autocalib)) 104 | 105 | // Proxies 106 | if len(s.config.ProxyURL) > 0 { 107 | printOption([]byte("Proxy"), []byte(s.config.ProxyURL)) 108 | } 109 | if len(s.config.ReplayProxyURL) > 0 { 110 | printOption([]byte("ReplayProxy"), []byte(s.config.ReplayProxyURL)) 111 | } 112 | 113 | // Timeout 114 | timeout := fmt.Sprintf("%d", s.config.Timeout) 115 | printOption([]byte("Timeout"), []byte(timeout)) 116 | 117 | // Threads 118 | threads := fmt.Sprintf("%d", s.config.Threads) 119 | printOption([]byte("Threads"), []byte(threads)) 120 | 121 | // Delay? 122 | if s.config.Delay.HasDelay { 123 | delay := "" 124 | if s.config.Delay.IsRange { 125 | delay = fmt.Sprintf("%.2f - %.2f seconds", s.config.Delay.Min, s.config.Delay.Max) 126 | } else { 127 | delay = fmt.Sprintf("%.2f seconds", s.config.Delay.Min) 128 | } 129 | printOption([]byte("Delay"), []byte(delay)) 130 | } 131 | 132 | // Print matchers 133 | for _, f := range s.config.MatcherManager.GetMatchers() { 134 | printOption([]byte("Matcher"), []byte(f.ReprVerbose())) 135 | } 136 | // Print filters 137 | for _, f := range s.config.MatcherManager.GetFilters() { 138 | printOption([]byte("Filter"), []byte(f.ReprVerbose())) 139 | } 140 | fmt.Fprintf(os.Stderr, "%s\n\n", BANNER_SEP) 141 | } 142 | 143 | // Reset resets the result slice 144 | func (s *Stdoutput) Reset() { 145 | s.CurrentResults = make([]ffuf.Result, 0) 146 | } 147 | 148 | // Cycle moves the CurrentResults to Results and resets the results slice 149 | func (s *Stdoutput) Cycle() { 150 | s.Results = append(s.Results, s.CurrentResults...) 151 | s.Reset() 152 | } 153 | 154 | // GetResults returns the result slice 155 | func (s *Stdoutput) GetCurrentResults() []ffuf.Result { 156 | return s.CurrentResults 157 | } 158 | 159 | // SetResults sets the result slice 160 | func (s *Stdoutput) SetCurrentResults(results []ffuf.Result) { 161 | s.CurrentResults = results 162 | } 163 | 164 | func (s *Stdoutput) Progress(status ffuf.Progress) { 165 | if s.config.Quiet { 166 | // No progress for quiet mode 167 | return 168 | } 169 | 170 | dur := time.Since(status.StartedAt) 171 | runningSecs := int(dur / time.Second) 172 | var reqRate int64 173 | if runningSecs > 0 { 174 | reqRate = status.ReqSec 175 | } else { 176 | reqRate = 0 177 | } 178 | 179 | hours := dur / time.Hour 180 | dur -= hours * time.Hour 181 | mins := dur / time.Minute 182 | dur -= mins * time.Minute 183 | secs := dur / time.Second 184 | 185 | fmt.Fprintf(os.Stderr, "%s:: Progress: [%d/%d] :: Job [%d/%d] :: %d req/sec :: Duration: [%d:%02d:%02d] :: Errors: %d ::", TERMINAL_CLEAR_LINE, status.ReqCount, status.ReqTotal, status.QueuePos, status.QueueTotal, reqRate, hours, mins, secs, status.ErrorCount) 186 | } 187 | 188 | func (s *Stdoutput) Info(infostring string) { 189 | if s.config.Quiet { 190 | fmt.Fprintf(os.Stderr, "%s", infostring) 191 | } else { 192 | if !s.config.Colors { 193 | fmt.Fprintf(os.Stderr, "%s[INFO] %s\n\n", TERMINAL_CLEAR_LINE, infostring) 194 | } else { 195 | fmt.Fprintf(os.Stderr, "%s[%sINFO%s] %s\n\n", TERMINAL_CLEAR_LINE, ANSI_BLUE, ANSI_CLEAR, infostring) 196 | } 197 | } 198 | } 199 | 200 | func (s *Stdoutput) Error(errstring string) { 201 | if s.config.Quiet { 202 | fmt.Fprintf(os.Stderr, "%s", errstring) 203 | } else { 204 | if !s.config.Colors { 205 | fmt.Fprintf(os.Stderr, "%s[ERR] %s\n", TERMINAL_CLEAR_LINE, errstring) 206 | } else { 207 | fmt.Fprintf(os.Stderr, "%s[%sERR%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_RED, ANSI_CLEAR, errstring) 208 | } 209 | } 210 | } 211 | 212 | func (s *Stdoutput) Warning(warnstring string) { 213 | if s.config.Quiet { 214 | fmt.Fprintf(os.Stderr, "%s", warnstring) 215 | } else { 216 | if !s.config.Colors { 217 | fmt.Fprintf(os.Stderr, "%s[WARN] %s\n", TERMINAL_CLEAR_LINE, warnstring) 218 | } else { 219 | fmt.Fprintf(os.Stderr, "%s[%sWARN%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_RED, ANSI_CLEAR, warnstring) 220 | } 221 | } 222 | } 223 | 224 | func (s *Stdoutput) Raw(output string) { 225 | fmt.Fprintf(os.Stderr, "%s%s", TERMINAL_CLEAR_LINE, output) 226 | } 227 | 228 | func (s *Stdoutput) writeToAll(filename string, config *ffuf.Config, res []ffuf.Result) error { 229 | var err error 230 | var BaseFilename string = s.config.OutputFile 231 | 232 | // Go through each type of write, adding 233 | // the suffix to each output file. 234 | 235 | s.config.OutputFile = BaseFilename + ".json" 236 | err = writeJSON(s.config.OutputFile, s.config, res) 237 | if err != nil { 238 | s.Error(err.Error()) 239 | } 240 | 241 | s.config.OutputFile = BaseFilename + ".ejson" 242 | err = writeEJSON(s.config.OutputFile, s.config, res) 243 | if err != nil { 244 | s.Error(err.Error()) 245 | } 246 | 247 | s.config.OutputFile = BaseFilename + ".html" 248 | err = writeHTML(s.config.OutputFile, s.config, res) 249 | if err != nil { 250 | s.Error(err.Error()) 251 | } 252 | 253 | s.config.OutputFile = BaseFilename + ".md" 254 | err = writeMarkdown(s.config.OutputFile, s.config, res) 255 | if err != nil { 256 | s.Error(err.Error()) 257 | } 258 | 259 | s.config.OutputFile = BaseFilename + ".csv" 260 | err = writeCSV(s.config.OutputFile, s.config, res, false) 261 | if err != nil { 262 | s.Error(err.Error()) 263 | } 264 | 265 | s.config.OutputFile = BaseFilename + ".ecsv" 266 | err = writeCSV(s.config.OutputFile, s.config, res, true) 267 | if err != nil { 268 | s.Error(err.Error()) 269 | } 270 | 271 | return nil 272 | 273 | } 274 | 275 | // SaveFile saves the current results to a file of a given type 276 | func (s *Stdoutput) SaveFile(filename, format string) error { 277 | var err error 278 | if s.config.OutputSkipEmptyFile && len(s.Results) == 0 { 279 | s.Info("No results and -or defined, output file not written.") 280 | return err 281 | } 282 | switch format { 283 | case "all": 284 | err = s.writeToAll(filename, s.config, append(s.Results, s.CurrentResults...)) 285 | case "json": 286 | err = writeJSON(filename, s.config, append(s.Results, s.CurrentResults...)) 287 | case "ejson": 288 | err = writeEJSON(filename, s.config, append(s.Results, s.CurrentResults...)) 289 | case "html": 290 | err = writeHTML(filename, s.config, append(s.Results, s.CurrentResults...)) 291 | case "md": 292 | err = writeMarkdown(filename, s.config, append(s.Results, s.CurrentResults...)) 293 | case "csv": 294 | err = writeCSV(filename, s.config, append(s.Results, s.CurrentResults...), false) 295 | case "ecsv": 296 | err = writeCSV(filename, s.config, append(s.Results, s.CurrentResults...), true) 297 | } 298 | return err 299 | } 300 | 301 | // Finalize gets run after all the ffuf jobs are completed 302 | func (s *Stdoutput) Finalize() error { 303 | var err error 304 | if s.config.OutputFile != "" { 305 | err = s.SaveFile(s.config.OutputFile, s.config.OutputFormat) 306 | if err != nil { 307 | s.Error(err.Error()) 308 | } 309 | } 310 | fmt.Fprintf(os.Stderr, "\n") 311 | return nil 312 | } 313 | 314 | func (s *Stdoutput) Result(resp ffuf.Response) { 315 | // Do we want to write request and response to a file 316 | if len(s.config.OutputDirectory) > 0 { 317 | resp.ResultFile = s.writeResultToFile(resp) 318 | } 319 | 320 | inputs := make(map[string][]byte, len(resp.Request.Input)) 321 | for k, v := range resp.Request.Input { 322 | inputs[k] = v 323 | } 324 | sResult := ffuf.Result{ 325 | Input: inputs, 326 | Position: resp.Request.Position, 327 | StatusCode: resp.StatusCode, 328 | ContentLength: resp.ContentLength, 329 | ContentWords: resp.ContentWords, 330 | ContentLines: resp.ContentLines, 331 | ContentType: resp.ContentType, 332 | RedirectLocation: resp.GetRedirectLocation(false), 333 | Url: resp.Request.Url, 334 | Duration: resp.Time, 335 | ResultFile: resp.ResultFile, 336 | Host: resp.Request.Host, 337 | } 338 | s.CurrentResults = append(s.CurrentResults, sResult) 339 | // Output the result 340 | s.PrintResult(sResult) 341 | } 342 | 343 | func (s *Stdoutput) writeResultToFile(resp ffuf.Response) string { 344 | var fileContent, fileName, filePath string 345 | // Create directory if needed 346 | if s.config.OutputDirectory != "" { 347 | err := os.MkdirAll(s.config.OutputDirectory, 0750) 348 | if err != nil { 349 | if !os.IsExist(err) { 350 | s.Error(err.Error()) 351 | return "" 352 | } 353 | } 354 | } 355 | fileContent = fmt.Sprintf("%s\n---- ↑ Request ---- Response ↓ ----\n\n%s", resp.Request.Raw, resp.Raw) 356 | 357 | // Create file name 358 | fileName = fmt.Sprintf("%x", md5.Sum([]byte(fileContent))) 359 | 360 | filePath = path.Join(s.config.OutputDirectory, fileName) 361 | err := os.WriteFile(filePath, []byte(fileContent), 0640) 362 | if err != nil { 363 | s.Error(err.Error()) 364 | } 365 | return fileName 366 | } 367 | 368 | func (s *Stdoutput) PrintResult(res ffuf.Result) { 369 | switch { 370 | case s.config.Json: 371 | s.resultJson(res) 372 | case s.config.Quiet: 373 | s.resultQuiet(res) 374 | case len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0: 375 | // Print a multi-line result (when using multiple input keywords and wordlists) 376 | s.resultMultiline(res) 377 | default: 378 | s.resultNormal(res) 379 | } 380 | } 381 | 382 | func (s *Stdoutput) prepareInputsOneLine(res ffuf.Result) string { 383 | inputs := "" 384 | if len(res.Input) > 1 { 385 | for k, v := range res.Input { 386 | if inSlice(k, s.config.CommandKeywords) { 387 | // If we're using external command for input, display the position instead of input 388 | inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(res.Position)) 389 | } else { 390 | inputs = fmt.Sprintf("%s%s : %s ", inputs, k, v) 391 | } 392 | } 393 | } else { 394 | for k, v := range res.Input { 395 | if inSlice(k, s.config.CommandKeywords) { 396 | // If we're using external command for input, display the position instead of input 397 | inputs = strconv.Itoa(res.Position) 398 | } else { 399 | inputs = string(v) 400 | } 401 | } 402 | } 403 | return inputs 404 | } 405 | 406 | func (s *Stdoutput) resultQuiet(res ffuf.Result) { 407 | fmt.Println(s.prepareInputsOneLine(res)) 408 | } 409 | 410 | func (s *Stdoutput) resultMultiline(res ffuf.Result) { 411 | var res_hdr, res_str string 412 | res_str = "%s%s * %s: %s\n" 413 | res_hdr = fmt.Sprintf("%s%s[Status: %d, Size: %d, Words: %d, Lines: %d, Duration: %dms]%s", TERMINAL_CLEAR_LINE, s.colorize(res.StatusCode), res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines, res.Duration.Milliseconds(), ANSI_CLEAR) 414 | reslines := "" 415 | if s.config.Verbose { 416 | reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, res.Url) 417 | redirectLocation := res.RedirectLocation 418 | if redirectLocation != "" { 419 | reslines = fmt.Sprintf("%s%s| --> | %s\n", reslines, TERMINAL_CLEAR_LINE, redirectLocation) 420 | } 421 | } 422 | if res.ResultFile != "" { 423 | reslines = fmt.Sprintf("%s%s| RES | %s\n", reslines, TERMINAL_CLEAR_LINE, res.ResultFile) 424 | } 425 | for _, k := range s.fuzzkeywords { 426 | if inSlice(k, s.config.CommandKeywords) { 427 | // If we're using external command for input, display the position instead of input 428 | reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(res.Position)) 429 | } else { 430 | // Wordlist input 431 | reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, res.Input[k]) 432 | } 433 | } 434 | fmt.Printf("%s\n%s\n", res_hdr, reslines) 435 | } 436 | 437 | func (s *Stdoutput) resultNormal(res ffuf.Result) { 438 | resnormal := fmt.Sprintf("%s%s%-23s [Status: %d, Size: %d, Words: %d, Lines: %d, Duration: %dms]%s", TERMINAL_CLEAR_LINE, s.colorize(res.StatusCode), s.prepareInputsOneLine(res), res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines, res.Duration.Milliseconds(), ANSI_CLEAR) 439 | fmt.Println(resnormal) 440 | } 441 | 442 | func (s *Stdoutput) resultJson(res ffuf.Result) { 443 | resBytes, err := json.Marshal(res) 444 | if err != nil { 445 | s.Error(err.Error()) 446 | } else { 447 | fmt.Fprint(os.Stderr, TERMINAL_CLEAR_LINE) 448 | fmt.Println(string(resBytes)) 449 | } 450 | } 451 | 452 | func (s *Stdoutput) colorize(status int64) string { 453 | if !s.config.Colors { 454 | return "" 455 | } 456 | colorCode := ANSI_CLEAR 457 | if status >= 200 && status < 300 { 458 | colorCode = ANSI_GREEN 459 | } 460 | if status >= 300 && status < 400 { 461 | colorCode = ANSI_BLUE 462 | } 463 | if status >= 400 && status < 500 { 464 | colorCode = ANSI_YELLOW 465 | } 466 | if status >= 500 && status < 600 { 467 | colorCode = ANSI_RED 468 | } 469 | return colorCode 470 | } 471 | 472 | func printOption(name []byte, value []byte) { 473 | fmt.Fprintf(os.Stderr, " :: %-16s : %s\n", name, value) 474 | } 475 | 476 | func inSlice(key string, slice []string) bool { 477 | for _, v := range slice { 478 | if v == key { 479 | return true 480 | } 481 | } 482 | return false 483 | } 484 | -------------------------------------------------------------------------------- /pkg/ffuf/job.go: -------------------------------------------------------------------------------- 1 | package ffuf 2 | 3 | import ( 4 | "fmt" 5 | "log" 6 | "math/rand" 7 | "os" 8 | "os/signal" 9 | "sync" 10 | "syscall" 11 | "time" 12 | ) 13 | 14 | // Job ties together Config, Runner, Input and Output 15 | type Job struct { 16 | Config *Config 17 | ErrorMutex sync.Mutex 18 | Input InputProvider 19 | Runner RunnerProvider 20 | ReplayRunner RunnerProvider 21 | Output OutputProvider 22 | Jobhash string 23 | Counter int 24 | ErrorCounter int 25 | SpuriousErrorCounter int 26 | Total int 27 | Running bool 28 | RunningJob bool 29 | Paused bool 30 | Count403 int 31 | Count429 int 32 | Error string 33 | Rate *RateThrottle 34 | startTime time.Time 35 | startTimeJob time.Time 36 | queuejobs []QueueJob 37 | queuepos int 38 | skipQueue bool 39 | currentDepth int 40 | calibMutex sync.Mutex 41 | pauseWg sync.WaitGroup 42 | } 43 | 44 | type QueueJob struct { 45 | Url string 46 | depth int 47 | req Request 48 | } 49 | 50 | func NewJob(conf *Config) *Job { 51 | var j Job 52 | j.Config = conf 53 | j.Counter = 0 54 | j.ErrorCounter = 0 55 | j.SpuriousErrorCounter = 0 56 | j.Running = false 57 | j.RunningJob = false 58 | j.Paused = false 59 | j.queuepos = 0 60 | j.queuejobs = make([]QueueJob, 0) 61 | j.currentDepth = 0 62 | j.Rate = NewRateThrottle(conf) 63 | j.skipQueue = false 64 | return &j 65 | } 66 | 67 | // incError increments the error counter 68 | func (j *Job) incError() { 69 | j.ErrorMutex.Lock() 70 | defer j.ErrorMutex.Unlock() 71 | j.ErrorCounter++ 72 | j.SpuriousErrorCounter++ 73 | } 74 | 75 | // inc403 increments the 403 response counter 76 | func (j *Job) inc403() { 77 | j.ErrorMutex.Lock() 78 | defer j.ErrorMutex.Unlock() 79 | j.Count403++ 80 | } 81 | 82 | // inc429 increments the 429 response counter 83 | func (j *Job) inc429() { 84 | j.ErrorMutex.Lock() 85 | defer j.ErrorMutex.Unlock() 86 | j.Count429++ 87 | } 88 | 89 | // resetSpuriousErrors resets the spurious error counter 90 | func (j *Job) resetSpuriousErrors() { 91 | j.ErrorMutex.Lock() 92 | defer j.ErrorMutex.Unlock() 93 | j.SpuriousErrorCounter = 0 94 | } 95 | 96 | // DeleteQueueItem deletes a recursion job from the queue by its index in the slice 97 | func (j *Job) DeleteQueueItem(index int) { 98 | index = j.queuepos + index - 1 99 | j.queuejobs = append(j.queuejobs[:index], j.queuejobs[index+1:]...) 100 | } 101 | 102 | // QueuedJobs returns the slice of queued recursive jobs 103 | func (j *Job) QueuedJobs() []QueueJob { 104 | return j.queuejobs[j.queuepos-1:] 105 | } 106 | 107 | // Start the execution of the Job 108 | func (j *Job) Start() { 109 | if j.startTime.IsZero() { 110 | j.startTime = time.Now() 111 | } 112 | 113 | basereq := BaseRequest(j.Config) 114 | 115 | if j.Config.InputMode == "sniper" { 116 | // process multiple payload locations and create a queue job for each location 117 | reqs := SniperRequests(&basereq, j.Config.InputProviders[0].Template) 118 | for _, r := range reqs { 119 | j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0, req: r}) 120 | } 121 | j.Total = j.Input.Total() * len(reqs) 122 | } else { 123 | // Add the default job to job queue 124 | j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0, req: BaseRequest(j.Config)}) 125 | j.Total = j.Input.Total() 126 | } 127 | 128 | rand.Seed(time.Now().UnixNano()) 129 | defer j.Stop() 130 | 131 | j.Running = true 132 | j.RunningJob = true 133 | //Show banner if not running in silent mode 134 | if !j.Config.Quiet { 135 | j.Output.Banner() 136 | } 137 | // Monitor for SIGTERM and do cleanup properly (writing the output files etc) 138 | j.interruptMonitor() 139 | for j.jobsInQueue() { 140 | j.prepareQueueJob() 141 | j.Reset(true) 142 | j.RunningJob = true 143 | j.startExecution() 144 | } 145 | 146 | err := j.Output.Finalize() 147 | if err != nil { 148 | j.Output.Error(err.Error()) 149 | } 150 | } 151 | 152 | // Reset resets the counters and wordlist position for a job 153 | func (j *Job) Reset(cycle bool) { 154 | j.Input.Reset() 155 | j.Counter = 0 156 | j.skipQueue = false 157 | j.startTimeJob = time.Now() 158 | if cycle { 159 | j.Output.Cycle() 160 | } else { 161 | j.Output.Reset() 162 | } 163 | } 164 | 165 | func (j *Job) jobsInQueue() bool { 166 | return j.queuepos < len(j.queuejobs) 167 | } 168 | 169 | func (j *Job) prepareQueueJob() { 170 | j.Config.Url = j.queuejobs[j.queuepos].Url 171 | j.currentDepth = j.queuejobs[j.queuepos].depth 172 | 173 | //Find all keywords present in new queued job 174 | kws := j.Input.Keywords() 175 | found_kws := make([]string, 0) 176 | for _, k := range kws { 177 | if RequestContainsKeyword(j.queuejobs[j.queuepos].req, k) { 178 | found_kws = append(found_kws, k) 179 | } 180 | } 181 | //And activate / disable inputproviders as needed 182 | j.Input.ActivateKeywords(found_kws) 183 | j.queuepos += 1 184 | j.Jobhash, _ = WriteHistoryEntry(j.Config) 185 | } 186 | 187 | // SkipQueue allows to skip the current job and advance to the next queued recursion job 188 | func (j *Job) SkipQueue() { 189 | j.skipQueue = true 190 | } 191 | 192 | func (j *Job) sleepIfNeeded() { 193 | var sleepDuration time.Duration 194 | if j.Config.Delay.HasDelay { 195 | if j.Config.Delay.IsRange { 196 | sTime := j.Config.Delay.Min + rand.Float64()*(j.Config.Delay.Max-j.Config.Delay.Min) 197 | sleepDuration = time.Duration(sTime * 1000) 198 | } else { 199 | sleepDuration = time.Duration(j.Config.Delay.Min * 1000) 200 | } 201 | sleepDuration = sleepDuration * time.Millisecond 202 | } 203 | // makes the sleep cancellable by context 204 | select { 205 | case <-j.Config.Context.Done(): // cancelled 206 | case <-time.After(sleepDuration): // sleep 207 | } 208 | } 209 | 210 | // Pause pauses the job process 211 | func (j *Job) Pause() { 212 | if !j.Paused { 213 | j.Paused = true 214 | j.pauseWg.Add(1) 215 | j.Output.Info("------ PAUSING ------") 216 | } 217 | } 218 | 219 | // Resume resumes the job process 220 | func (j *Job) Resume() { 221 | if j.Paused { 222 | j.Paused = false 223 | j.Output.Info("------ RESUMING -----") 224 | j.pauseWg.Done() 225 | } 226 | } 227 | 228 | func (j *Job) startExecution() { 229 | var wg sync.WaitGroup 230 | wg.Add(1) 231 | go j.runBackgroundTasks(&wg) 232 | 233 | // Print the base URL when starting a new recursion or sniper queue job 234 | if j.queuepos > 1 { 235 | if j.Config.InputMode == "sniper" { 236 | j.Output.Info(fmt.Sprintf("Starting queued sniper job (%d of %d) on target: %s", j.queuepos, len(j.queuejobs), j.Config.Url)) 237 | } else { 238 | j.Output.Info(fmt.Sprintf("Starting queued job on target: %s", j.Config.Url)) 239 | } 240 | } 241 | 242 | //Limiter blocks after reaching the buffer, ensuring limited concurrency 243 | threadlimiter := make(chan bool, j.Config.Threads) 244 | 245 | for j.Input.Next() && !j.skipQueue { 246 | // Check if we should stop the process 247 | j.CheckStop() 248 | 249 | if !j.Running { 250 | defer j.Output.Warning(j.Error) 251 | break 252 | } 253 | j.pauseWg.Wait() 254 | // Handle the rate & thread limiting 255 | threadlimiter <- true 256 | // Ratelimiter handles the rate ticker 257 | <-j.Rate.RateLimiter.C 258 | nextInput := j.Input.Value() 259 | nextPosition := j.Input.Position() 260 | // Add FFUFHASH and its value 261 | nextInput["FFUFHASH"] = j.ffufHash(nextPosition) 262 | 263 | wg.Add(1) 264 | j.Counter++ 265 | 266 | go func() { 267 | defer func() { <-threadlimiter }() 268 | defer wg.Done() 269 | threadStart := time.Now() 270 | j.runTask(nextInput, nextPosition, false) 271 | j.sleepIfNeeded() 272 | threadEnd := time.Now() 273 | j.Rate.Tick(threadStart, threadEnd) 274 | }() 275 | if !j.RunningJob { 276 | defer j.Output.Warning(j.Error) 277 | return 278 | } 279 | } 280 | wg.Wait() 281 | j.updateProgress() 282 | } 283 | 284 | func (j *Job) interruptMonitor() { 285 | sigChan := make(chan os.Signal, 2) 286 | signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM) 287 | go func() { 288 | for range sigChan { 289 | j.Error = "Caught keyboard interrupt (Ctrl-C)\n" 290 | // resume if paused 291 | if j.Paused { 292 | j.pauseWg.Done() 293 | } 294 | // Stop the job 295 | j.Stop() 296 | } 297 | }() 298 | } 299 | 300 | func (j *Job) runBackgroundTasks(wg *sync.WaitGroup) { 301 | defer wg.Done() 302 | totalProgress := j.Input.Total() 303 | for j.Counter <= totalProgress && !j.skipQueue { 304 | j.pauseWg.Wait() 305 | if !j.Running { 306 | break 307 | } 308 | j.updateProgress() 309 | if j.Counter == totalProgress { 310 | return 311 | } 312 | if !j.RunningJob { 313 | return 314 | } 315 | time.Sleep(time.Millisecond * time.Duration(j.Config.ProgressFrequency)) 316 | } 317 | } 318 | 319 | func (j *Job) updateProgress() { 320 | prog := Progress{ 321 | StartedAt: j.startTimeJob, 322 | ReqCount: j.Counter, 323 | ReqTotal: j.Input.Total(), 324 | ReqSec: j.Rate.CurrentRate(), 325 | QueuePos: j.queuepos, 326 | QueueTotal: len(j.queuejobs), 327 | ErrorCount: j.ErrorCounter, 328 | } 329 | j.Output.Progress(prog) 330 | } 331 | 332 | func (j *Job) isMatch(resp Response) bool { 333 | matched := false 334 | var matchers map[string]FilterProvider 335 | var filters map[string]FilterProvider 336 | if j.Config.AutoCalibrationPerHost { 337 | filters = j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*resp.Request)) 338 | } else { 339 | filters = j.Config.MatcherManager.GetFilters() 340 | } 341 | matchers = j.Config.MatcherManager.GetMatchers() 342 | for _, m := range matchers { 343 | match, err := m.Filter(&resp) 344 | if err != nil { 345 | continue 346 | } 347 | if match { 348 | matched = true 349 | } else if j.Config.MatcherMode == "and" { 350 | // we already know this isn't "and" match 351 | return false 352 | 353 | } 354 | } 355 | // The response was not matched, return before running filters 356 | if !matched { 357 | return false 358 | } 359 | for _, f := range filters { 360 | fv, err := f.Filter(&resp) 361 | if err != nil { 362 | continue 363 | } 364 | if fv { 365 | // return false 366 | if j.Config.FilterMode == "or" { 367 | // return early, as filter matched 368 | return false 369 | } 370 | } else { 371 | if j.Config.FilterMode == "and" { 372 | // return early as not all filters matched in "and" mode 373 | return true 374 | } 375 | } 376 | } 377 | if len(filters) > 0 && j.Config.FilterMode == "and" { 378 | // we did not return early, so all filters were matched 379 | return false 380 | } 381 | return true 382 | } 383 | 384 | func (j *Job) ffufHash(pos int) []byte { 385 | hashstring := "" 386 | r := []rune(j.Jobhash) 387 | if len(r) > 5 { 388 | hashstring = string(r[:5]) 389 | } 390 | hashstring += fmt.Sprintf("%x", pos) 391 | return []byte(hashstring) 392 | } 393 | 394 | func (j *Job) runTask(input map[string][]byte, position int, retried bool) { 395 | basereq := j.queuejobs[j.queuepos-1].req 396 | req, err := j.Runner.Prepare(input, &basereq) 397 | req.Position = position 398 | if err != nil { 399 | j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err)) 400 | j.incError() 401 | log.Printf("%s", err) 402 | return 403 | } 404 | 405 | resp, err := j.Runner.Execute(&req) 406 | if err != nil { 407 | if retried { 408 | j.incError() 409 | log.Printf("%s", err) 410 | } else { 411 | j.runTask(input, position, true) 412 | } 413 | return 414 | } 415 | if j.SpuriousErrorCounter > 0 { 416 | j.resetSpuriousErrors() 417 | } 418 | if j.Config.StopOn403 || j.Config.StopOnAll { 419 | // Increment Forbidden counter if we encountered one 420 | if resp.StatusCode == 403 { 421 | j.inc403() 422 | } 423 | } 424 | if j.Config.StopOnAll { 425 | // increment 429 counter if the response code is 429 426 | if resp.StatusCode == 429 { 427 | j.inc429() 428 | } 429 | } 430 | j.pauseWg.Wait() 431 | 432 | // Handle autocalibration, must be done after the actual request to ensure sane value in req.Host 433 | _ = j.CalibrateIfNeeded(HostURLFromRequest(req), input) 434 | 435 | if j.isMatch(resp) { 436 | // Re-send request through replay-proxy if needed 437 | if j.ReplayRunner != nil { 438 | replayreq, err := j.ReplayRunner.Prepare(input, &basereq) 439 | replayreq.Position = position 440 | if err != nil { 441 | j.Output.Error(fmt.Sprintf("Encountered an error while preparing replayproxy request: %s\n", err)) 442 | j.incError() 443 | log.Printf("%s", err) 444 | } else { 445 | _, _ = j.ReplayRunner.Execute(&replayreq) 446 | } 447 | } 448 | j.Output.Result(resp) 449 | 450 | // Refresh the progress indicator as we printed something out 451 | j.updateProgress() 452 | if j.Config.Recursion && j.Config.RecursionStrategy == "greedy" { 453 | j.handleGreedyRecursionJob(resp) 454 | } 455 | } 456 | 457 | if j.Config.Recursion && j.Config.RecursionStrategy == "default" && len(resp.GetRedirectLocation(false)) > 0 { 458 | j.handleDefaultRecursionJob(resp) 459 | } 460 | } 461 | 462 | // handleGreedyRecursionJob adds a recursion job to the queue if the maximum depth has not been reached 463 | func (j *Job) handleGreedyRecursionJob(resp Response) { 464 | // Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob 465 | if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { 466 | recUrl := resp.Request.Url + "/" + "FUZZ" 467 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)} 468 | j.queuejobs = append(j.queuejobs, newJob) 469 | j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) 470 | } else { 471 | j.Output.Warning(fmt.Sprintf("Maximum recursion depth reached. Ignoring: %s", resp.Request.Url)) 472 | } 473 | } 474 | 475 | // handleDefaultRecursionJob adds a new recursion job to the job queue if a new directory is found and maximum depth has 476 | // not been reached 477 | func (j *Job) handleDefaultRecursionJob(resp Response) { 478 | recUrl := resp.Request.Url + "/" + "FUZZ" 479 | if (resp.Request.Url + "/") != resp.GetRedirectLocation(true) { 480 | // Not a directory, return early 481 | return 482 | } 483 | if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { 484 | // We have yet to reach the maximum recursion depth 485 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)} 486 | j.queuejobs = append(j.queuejobs, newJob) 487 | j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) 488 | } else { 489 | j.Output.Warning(fmt.Sprintf("Directory found, but recursion depth exceeded. Ignoring: %s", resp.GetRedirectLocation(true))) 490 | } 491 | } 492 | 493 | // CheckStop stops the job if stopping conditions are met 494 | func (j *Job) CheckStop() { 495 | if j.Counter > 50 { 496 | // We have enough samples 497 | if j.Config.StopOn403 || j.Config.StopOnAll { 498 | if float64(j.Count403)/float64(j.Counter) > 0.95 { 499 | // Over 95% of requests are 403 500 | j.Error = "Getting an unusual amount of 403 responses, exiting." 501 | j.Stop() 502 | } 503 | } 504 | if j.Config.StopOnErrors || j.Config.StopOnAll { 505 | if j.SpuriousErrorCounter > j.Config.Threads*2 { 506 | // Most of the requests are erroring 507 | j.Error = "Receiving spurious errors, exiting." 508 | j.Stop() 509 | } 510 | 511 | } 512 | if j.Config.StopOnAll && (float64(j.Count429)/float64(j.Counter) > 0.2) { 513 | // Over 20% of responses are 429 514 | j.Error = "Getting an unusual amount of 429 responses, exiting." 515 | j.Stop() 516 | } 517 | } 518 | 519 | // Check for runtime of entire process 520 | if j.Config.MaxTime > 0 { 521 | dur := time.Since(j.startTime) 522 | runningSecs := int(dur / time.Second) 523 | if runningSecs >= j.Config.MaxTime { 524 | j.Error = "Maximum running time for entire process reached, exiting." 525 | j.Stop() 526 | } 527 | } 528 | 529 | // Check for runtime of current job 530 | if j.Config.MaxTimeJob > 0 { 531 | dur := time.Since(j.startTimeJob) 532 | runningSecs := int(dur / time.Second) 533 | if runningSecs >= j.Config.MaxTimeJob { 534 | j.Error = "Maximum running time for this job reached, continuing with next job if one exists." 535 | j.Next() 536 | 537 | } 538 | } 539 | } 540 | 541 | // Stop the execution of the Job 542 | func (j *Job) Stop() { 543 | j.Running = false 544 | j.Config.Cancel() 545 | } 546 | 547 | // Stop current, resume to next 548 | func (j *Job) Next() { 549 | j.RunningJob = false 550 | } 551 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![ffuf mascot](_img/ffuf_run_logo_600.png) 2 | # ffuf - Fuzz Faster U Fool 3 | 4 | A fast web fuzzer written in Go. 5 | 6 | - [Installation](https://github.com/ffuf/ffuf#installation) 7 | - [Example usage](https://github.com/ffuf/ffuf#example-usage) 8 | - [Content discovery](https://github.com/ffuf/ffuf#typical-directory-discovery) 9 | - [Vhost discovery](https://github.com/ffuf/ffuf#virtual-host-discovery-without-dns-records) 10 | - [Parameter fuzzing](https://github.com/ffuf/ffuf#get-parameter-fuzzing) 11 | - [POST data fuzzing](https://github.com/ffuf/ffuf#post-data-fuzzing) 12 | - [Using external mutator](https://github.com/ffuf/ffuf#using-external-mutator-to-produce-test-cases) 13 | - [Configuration files](https://github.com/ffuf/ffuf#configuration-files) 14 | - [Help](https://github.com/ffuf/ffuf#usage) 15 | - [Interactive mode](https://github.com/ffuf/ffuf#interactive-mode) 16 | - [Sponsorware?](https://github.com/ffuf/ffuf#sponsorware) 17 | 18 | ## Sponsors 19 | [![Offensive Security](_img/offsec-logo.png)](https://www.offensive-security.com/) 20 | 21 | ## Official Discord Channel 22 | 23 | ffuf has a channel at Porchetta Industries Discord server alongside of channels for many other tools. 24 | 25 | Come to hang out & to discuss about ffuf, it's usage and development! 26 | 27 | [![Porchetta Industries](https://discordapp.com/api/guilds/736724457258745996/widget.png?style=banner2)](https://discord.gg/VWcdZCUsQP) 28 | 29 | ## Installation 30 | 31 | - [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run! 32 | 33 | _or_ 34 | - If you are on mac with [homebrew](https://brew.sh) installed `brew install ffuf` 35 | 36 | _or_ 37 | - If you have recent go compiler installed: `go install github.com/ffuf/ffuf@latest` (the same command works for updating) 38 | 39 | _or_ 40 | - `git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build` 41 | 42 | Ffuf depends on Go 1.16 or greater. 43 | 44 | ## Example usage 45 | 46 | The usage examples below show just the simplest tasks you can accomplish using `ffuf`. 47 | 48 | For more extensive documentation, with real life usage examples and tips, be sure to check out the awesome guide: 49 | "[Everything you need to know about FFUF](https://codingo.io/tools/ffuf/bounty/2020/09/17/everything-you-need-to-know-about-ffuf.html)" by 50 | Michael Skelton ([@codingo](https://github.com/codingo)). 51 | 52 | You can also practise your ffuf scans against a live host with different lessons and use cases either locally by using the docker container https://github.com/adamtlangley/ffufme or against the live hosted version at http://ffuf.me created by Adam Langley [@adamtlangley](https://twitter.com/adamtlangley). 53 | 54 | ### Typical directory discovery 55 | 56 | [![asciicast](https://asciinema.org/a/211350.png)](https://asciinema.org/a/211350) 57 | 58 | By using the FUZZ keyword at the end of URL (`-u`): 59 | 60 | ``` 61 | ffuf -w /path/to/wordlist -u https://target/FUZZ 62 | ``` 63 | 64 | ### Virtual host discovery (without DNS records) 65 | 66 | [![asciicast](https://asciinema.org/a/211360.png)](https://asciinema.org/a/211360) 67 | 68 | Assuming that the default virtualhost response size is 4242 bytes, we can filter out all the responses of that size (`-fs 4242`)while fuzzing the Host - header: 69 | 70 | ``` 71 | ffuf -w /path/to/vhost/wordlist -u https://target -H "Host: FUZZ" -fs 4242 72 | ``` 73 | 74 | ### GET parameter fuzzing 75 | 76 | GET parameter name fuzzing is very similar to directory discovery, and works by defining the `FUZZ` keyword as a part of the URL. This also assumes an response size of 4242 bytes for invalid GET parameter name. 77 | 78 | ``` 79 | ffuf -w /path/to/paramnames.txt -u https://target/script.php?FUZZ=test_value -fs 4242 80 | ``` 81 | 82 | If the parameter name is known, the values can be fuzzed the same way. This example assumes a wrong parameter value returning HTTP response code 401. 83 | 84 | ``` 85 | ffuf -w /path/to/values.txt -u https://target/script.php?valid_name=FUZZ -fc 401 86 | ``` 87 | 88 | ### POST data fuzzing 89 | 90 | This is a very straightforward operation, again by using the `FUZZ` keyword. This example is fuzzing only part of the POST request. We're again filtering out the 401 responses. 91 | 92 | ``` 93 | ffuf -w /path/to/postdata.txt -X POST -d "username=admin\&password=FUZZ" -u https://target/login.php -fc 401 94 | ``` 95 | 96 | ### Maximum execution time 97 | 98 | If you don't want ffuf to run indefinitely, you can use the `-maxtime`. This stops __the entire__ process after a given time (in seconds). 99 | 100 | ``` 101 | ffuf -w /path/to/wordlist -u https://target/FUZZ -maxtime 60 102 | ``` 103 | 104 | When working with recursion, you can control the maxtime __per job__ using `-maxtime-job`. This will stop the current job after a given time (in seconds) and continue with the next one. New jobs are created when the recursion functionality detects a subdirectory. 105 | 106 | ``` 107 | ffuf -w /path/to/wordlist -u https://target/FUZZ -maxtime-job 60 -recursion -recursion-depth 2 108 | ``` 109 | 110 | It is also possible to combine both flags limiting the per job maximum execution time as well as the overall execution time. If you do not use recursion then both flags behave equally. 111 | 112 | ### Using external mutator to produce test cases 113 | 114 | For this example, we'll fuzz JSON data that's sent over POST. [Radamsa](https://gitlab.com/akihe/radamsa) is used as the mutator. 115 | 116 | When `--input-cmd` is used, ffuf will display matches as their position. This same position value will be available for the callee as an environment variable `$FFUF_NUM`. We'll use this position value as the seed for the mutator. Files example1.txt and example2.txt contain valid JSON payloads. We are matching all the responses, but filtering out response code `400 - Bad request`: 117 | 118 | ``` 119 | ffuf --input-cmd 'radamsa --seed $FFUF_NUM example1.txt example2.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/FUZZ -mc all -fc 400 120 | ``` 121 | 122 | It of course isn't very efficient to call the mutator for each payload, so we can also pre-generate the payloads, still using [Radamsa](https://gitlab.com/akihe/radamsa) as an example: 123 | 124 | ``` 125 | # Generate 1000 example payloads 126 | radamsa -n 1000 -o %n.txt example1.txt example2.txt 127 | 128 | # This results into files 1.txt ... 1000.txt 129 | # Now we can just read the payload data in a loop from file for ffuf 130 | 131 | ffuf --input-cmd 'cat $FFUF_NUM.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400 132 | ``` 133 | 134 | ### Configuration files 135 | 136 | When running ffuf, it first checks if a default configuration file exists. The file path for it is `~/.ffufrc` / `$HOME/.ffufrc` 137 | for most *nixes (for example `/home/joohoi/.ffufrc`) and `%USERPROFILE%\.ffufrc` for Windows. You can configure one or 138 | multiple options in this file, and they will be applied on every subsequent ffuf job. An example of .ffufrc file can be 139 | found [here](https://github.com/ffuf/ffuf/blob/master/ffufrc.example). 140 | 141 | The configuration options provided on the command line override the ones loaded from `~/.ffufrc`. 142 | Note: this does not apply for CLI flags that can be provided more than once. One of such examples is `-H` (header) flag. 143 | In this case, the `-H` values provided on the command line will be _appended_ to the ones from the config file instead. 144 | 145 | Additionally, in case you wish to use bunch of configuration files for different use cases, you can do this by defining 146 | the configuration file path using `-config` command line flag that takes the file path to the configuration file as its 147 | parameter. 148 | 149 |

150 | 151 |

152 | 153 | ## Usage 154 | 155 | To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-u`), headers (`-H`), or POST data (`-d`). 156 | 157 | ``` 158 | Fuzz Faster U Fool - v1.3.0-dev 159 | 160 | HTTP OPTIONS: 161 | -H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted. 162 | -X HTTP method to use 163 | -b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality. 164 | -d POST data 165 | -ignore-body Do not fetch the response content. (default: false) 166 | -r Follow redirects (default: false) 167 | -recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false) 168 | -recursion-depth Maximum recursion depth. (default: 0) 169 | -recursion-strategy Recursion strategy: "default" for a redirect based, and "greedy" to recurse on all matches (default: default) 170 | -replay-proxy Replay matched requests using this proxy. 171 | -sni Target TLS SNI, does not support FUZZ keyword 172 | -timeout HTTP request timeout in seconds. (default: 10) 173 | -u Target URL 174 | -x Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080 175 | 176 | GENERAL OPTIONS: 177 | -V Show version information. (default: false) 178 | -ac Automatically calibrate filtering options (default: false) 179 | -acc Custom auto-calibration string. Can be used multiple times. Implies -ac 180 | -c Colorize output. (default: false) 181 | -config Load configuration from a file 182 | -maxtime Maximum running time in seconds for entire process. (default: 0) 183 | -maxtime-job Maximum running time in seconds per job. (default: 0) 184 | -noninteractive Disable the interactive console functionality (default: false) 185 | -p Seconds of `delay` between requests, or a range of random delay. For example "0.1" or "0.1-2.0" 186 | -rate Rate of requests per second (default: 0) 187 | -s Do not print additional information (silent mode) (default: false) 188 | -sa Stop on all error cases. Implies -sf and -se. (default: false) 189 | -se Stop on spurious errors (default: false) 190 | -sf Stop when > 95% of responses return 403 Forbidden (default: false) 191 | -t Number of concurrent threads. (default: 40) 192 | -v Verbose output, printing full URL and redirect location (if any) with the results. (default: false) 193 | 194 | MATCHER OPTIONS: 195 | -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403,405,500) 196 | -ml Match amount of lines in response 197 | -mr Match regexp 198 | -ms Match HTTP response size 199 | -mt Match how many milliseconds to the first response byte, either greater or less than. EG: ">100" or "<100" 200 | -mw Match amount of words in response 201 | 202 | FILTER OPTIONS: 203 | -fc Filter HTTP status codes from response. Comma separated list of codes and ranges 204 | -fl Filter by amount of lines in response. Comma separated list of line counts and ranges 205 | -fr Filter regexp 206 | -fs Filter HTTP response size. Comma separated list of sizes and ranges 207 | -ft Filter by number of milliseconds to the first response byte, either greater or less than. EG: ">100" or "<100" 208 | -fw Filter by amount of words in response. Comma separated list of word counts and ranges 209 | 210 | INPUT OPTIONS: 211 | -D DirSearch wordlist compatibility mode. Used in conjunction with -e flag. (default: false) 212 | -e Comma separated list of extensions. Extends FUZZ keyword. 213 | -ic Ignore wordlist comments (default: false) 214 | -input-cmd Command producing the input. --input-num is required when using this input method. Overrides -w. 215 | -input-num Number of inputs to test. Used in conjunction with --input-cmd. (default: 100) 216 | -input-shell Shell to be used for running command 217 | -mode Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork, sniper (default: clusterbomb) 218 | -request File containing the raw http request 219 | -request-proto Protocol to use along with raw request (default: https) 220 | -w Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD' 221 | 222 | OUTPUT OPTIONS: 223 | -debug-log Write all of the internal logging to the specified file. 224 | -o Write output to file 225 | -od Directory path to store matched results to. 226 | -of Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats) (default: json) 227 | -or Don't create the output file if we don't have results (default: false) 228 | 229 | EXAMPLE USAGE: 230 | Fuzz file paths from wordlist.txt, match all responses but filter out those with content-size 42. 231 | Colored, verbose output. 232 | ffuf -w wordlist.txt -u https://example.org/FUZZ -mc all -fs 42 -c -v 233 | 234 | Fuzz Host-header, match HTTP 200 responses. 235 | ffuf -w hosts.txt -u https://example.org/ -H "Host: FUZZ" -mc 200 236 | 237 | Fuzz POST JSON data. Match all responses not containing text "error". 238 | ffuf -w entries.txt -u https://example.org/ -X POST -H "Content-Type: application/json" \ 239 | -d '{"name": "FUZZ", "anotherkey": "anothervalue"}' -fr "error" 240 | 241 | Fuzz multiple locations. Match only responses reflecting the value of "VAL" keyword. Colored. 242 | ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr "VAL" -c 243 | 244 | More information and examples: https://github.com/ffuf/ffuf 245 | 246 | ``` 247 | 248 | ### Interactive mode 249 | 250 | By pressing `ENTER` during ffuf execution, the process is paused and user is dropped to a shell-like interactive mode: 251 | ``` 252 | entering interactive mode 253 | type "help" for a list of commands, or ENTER to resume. 254 | > help 255 | 256 | available commands: 257 | fc [value] - (re)configure status code filter 258 | fl [value] - (re)configure line count filter 259 | fw [value] - (re)configure word count filter 260 | fs [value] - (re)configure size filter 261 | queueshow - show recursive job queue 262 | queuedel [number] - delete a recursion job in the queue 263 | queueskip - advance to the next queued recursion job 264 | restart - restart and resume the current ffuf job 265 | resume - resume current ffuf job (or: ENTER) 266 | show - show results for the current job 267 | savejson [filename] - save current matches to a file 268 | help - you are looking at it 269 | > 270 | ``` 271 | 272 | in this mode, filters can be reconfigured, queue managed and the current state saved to disk. 273 | 274 | When (re)configuring the filters, they get applied posthumously and all the false positive matches from memory that 275 | would have been filtered out by the newly added filters get deleted. 276 | 277 | The new state of matches can be printed out with a command `show` that will print out all the matches as like they 278 | would have been found by `ffuf`. 279 | 280 | As "negative" matches are not stored to memory, relaxing the filters cannot unfortunately bring back the lost matches. 281 | For this kind of scenario, the user is able to use the command `restart`, which resets the state and starts the current 282 | job from the beginning. 283 | 284 |

285 | 286 |

287 | 288 | 289 | ## Sponsorware 290 | 291 | `ffuf` employs a sponsorware model. This means that all new features developed by its author are initially exclusively 292 | available for their sponsors. 30 days after the exclusive release, all the new features will be released at the freely 293 | available open source repository at https://github.com/ffuf/ffuf . 294 | 295 | This model enables me to provide concrete benefits for the generous individuals and companies that enable me to work on 296 | `ffuf`. The different sponsorship tiers can be seen [here](https://github.com/sponsors/joohoi). 297 | 298 | All the community contributions are and will be available directly in the freely available open source repository. The 299 | exclusive version benefits only include new features created by [@joohoi](https://github.com/joohoi) 300 | 301 | ### Access the sponsorware through code contributions 302 | 303 | People that create significant contributions to the `ffuf` project itself should and will have access to the sponsorware 304 | as well. If you are planning to create such a contribution, please contact [@joohoi](https://github.com/joohoi) 305 | first to ensure that there aren't other people working on the same feature. 306 | 307 | ## Helper scripts and advanced payloads 308 | 309 | See [ffuf-scripts](https://github.com/ffuf/ffuf-scripts) repository for helper scripts and payload generators 310 | for different workflows and usage scenarios. 311 | 312 | ## License 313 | 314 | ffuf is released under MIT license. See [LICENSE](https://github.com/ffuf/ffuf/blob/master/LICENSE). 315 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "context" 5 | "flag" 6 | "fmt" 7 | "github.com/ffuf/ffuf/pkg/ffuf" 8 | "github.com/ffuf/ffuf/pkg/filter" 9 | "github.com/ffuf/ffuf/pkg/input" 10 | "github.com/ffuf/ffuf/pkg/interactive" 11 | "github.com/ffuf/ffuf/pkg/output" 12 | "github.com/ffuf/ffuf/pkg/runner" 13 | "io" 14 | "log" 15 | "os" 16 | "strings" 17 | "time" 18 | ) 19 | 20 | type multiStringFlag []string 21 | type wordlistFlag []string 22 | 23 | func (m *multiStringFlag) String() string { 24 | return "" 25 | } 26 | 27 | func (m *wordlistFlag) String() string { 28 | return "" 29 | } 30 | 31 | func (m *multiStringFlag) Set(value string) error { 32 | *m = append(*m, value) 33 | return nil 34 | } 35 | 36 | func (m *wordlistFlag) Set(value string) error { 37 | delimited := strings.Split(value, ",") 38 | 39 | if len(delimited) > 1 { 40 | *m = append(*m, delimited...) 41 | } else { 42 | *m = append(*m, value) 43 | } 44 | 45 | return nil 46 | } 47 | 48 | // ParseFlags parses the command line flags and (re)populates the ConfigOptions struct 49 | func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions { 50 | var ignored bool 51 | var cookies, autocalibrationstrings, headers, inputcommands multiStringFlag 52 | var wordlists wordlistFlag 53 | 54 | cookies = opts.HTTP.Cookies 55 | autocalibrationstrings = opts.General.AutoCalibrationStrings 56 | headers = opts.HTTP.Headers 57 | inputcommands = opts.Input.Inputcommands 58 | wordlists = opts.Input.Wordlists 59 | 60 | flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)") 61 | flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)") 62 | flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility") 63 | flag.BoolVar(&opts.Output.OutputSkipEmptyFile, "or", opts.Output.OutputSkipEmptyFile, "Don't create the output file if we don't have results") 64 | flag.BoolVar(&opts.General.AutoCalibration, "ac", opts.General.AutoCalibration, "Automatically calibrate filtering options") 65 | flag.BoolVar(&opts.General.AutoCalibrationPerHost, "ach", opts.General.AutoCalibration, "Per host autocalibration") 66 | flag.BoolVar(&opts.General.Colors, "c", opts.General.Colors, "Colorize output.") 67 | flag.BoolVar(&opts.General.Json, "json", opts.General.Json, "JSON output, printing newline-delimited JSON records") 68 | flag.BoolVar(&opts.General.Noninteractive, "noninteractive", opts.General.Noninteractive, "Disable the interactive console functionality") 69 | flag.BoolVar(&opts.General.Quiet, "s", opts.General.Quiet, "Do not print additional information (silent mode)") 70 | flag.BoolVar(&opts.General.ShowVersion, "V", opts.General.ShowVersion, "Show version information.") 71 | flag.BoolVar(&opts.General.StopOn403, "sf", opts.General.StopOn403, "Stop when > 95% of responses return 403 Forbidden") 72 | flag.BoolVar(&opts.General.StopOnAll, "sa", opts.General.StopOnAll, "Stop on all error cases. Implies -sf and -se.") 73 | flag.BoolVar(&opts.General.StopOnErrors, "se", opts.General.StopOnErrors, "Stop on spurious errors") 74 | flag.BoolVar(&opts.General.Verbose, "v", opts.General.Verbose, "Verbose output, printing full URL and redirect location (if any) with the results.") 75 | flag.BoolVar(&opts.HTTP.FollowRedirects, "r", opts.HTTP.FollowRedirects, "Follow redirects") 76 | flag.BoolVar(&opts.HTTP.IgnoreBody, "ignore-body", opts.HTTP.IgnoreBody, "Do not fetch the response content.") 77 | flag.BoolVar(&opts.HTTP.Recursion, "recursion", opts.HTTP.Recursion, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.") 78 | flag.BoolVar(&opts.HTTP.Http2, "http2", opts.HTTP.Http2, "Use HTTP2 protocol") 79 | flag.BoolVar(&opts.Input.DirSearchCompat, "D", opts.Input.DirSearchCompat, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.") 80 | flag.BoolVar(&opts.Input.IgnoreWordlistComments, "ic", opts.Input.IgnoreWordlistComments, "Ignore wordlist comments") 81 | flag.IntVar(&opts.General.MaxTime, "maxtime", opts.General.MaxTime, "Maximum running time in seconds for entire process.") 82 | flag.IntVar(&opts.General.MaxTimeJob, "maxtime-job", opts.General.MaxTimeJob, "Maximum running time in seconds per job.") 83 | flag.IntVar(&opts.General.Rate, "rate", opts.General.Rate, "Rate of requests per second") 84 | flag.IntVar(&opts.General.Threads, "t", opts.General.Threads, "Number of concurrent threads.") 85 | flag.IntVar(&opts.HTTP.RecursionDepth, "recursion-depth", opts.HTTP.RecursionDepth, "Maximum recursion depth.") 86 | flag.IntVar(&opts.HTTP.Timeout, "timeout", opts.HTTP.Timeout, "HTTP request timeout in seconds.") 87 | flag.IntVar(&opts.Input.InputNum, "input-num", opts.Input.InputNum, "Number of inputs to test. Used in conjunction with --input-cmd.") 88 | flag.StringVar(&opts.General.AutoCalibrationKeyword, "ack", opts.General.AutoCalibrationKeyword, "Autocalibration keyword") 89 | flag.StringVar(&opts.General.AutoCalibrationStrategy, "acs", opts.General.AutoCalibrationStrategy, "Autocalibration strategy: \"basic\" or \"advanced\"") 90 | flag.StringVar(&opts.General.ConfigFile, "config", "", "Load configuration from a file") 91 | flag.StringVar(&opts.Filter.Mode, "fmode", opts.Filter.Mode, "Filter set operator. Either of: and, or") 92 | flag.StringVar(&opts.Filter.Lines, "fl", opts.Filter.Lines, "Filter by amount of lines in response. Comma separated list of line counts and ranges") 93 | flag.StringVar(&opts.Filter.Regexp, "fr", opts.Filter.Regexp, "Filter regexp") 94 | flag.StringVar(&opts.Filter.Size, "fs", opts.Filter.Size, "Filter HTTP response size. Comma separated list of sizes and ranges") 95 | flag.StringVar(&opts.Filter.Status, "fc", opts.Filter.Status, "Filter HTTP status codes from response. Comma separated list of codes and ranges") 96 | flag.StringVar(&opts.Filter.Time, "ft", opts.Filter.Time, "Filter by number of milliseconds to the first response byte, either greater or less than. EG: >100 or <100") 97 | flag.StringVar(&opts.Filter.Words, "fw", opts.Filter.Words, "Filter by amount of words in response. Comma separated list of word counts and ranges") 98 | flag.StringVar(&opts.General.Delay, "p", opts.General.Delay, "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"") 99 | flag.StringVar(&opts.General.Searchhash, "search", opts.General.Searchhash, "Search for a FFUFHASH payload from ffuf history") 100 | flag.StringVar(&opts.HTTP.Data, "d", opts.HTTP.Data, "POST data") 101 | flag.StringVar(&opts.HTTP.Data, "data", opts.HTTP.Data, "POST data (alias of -d)") 102 | flag.StringVar(&opts.HTTP.Data, "data-ascii", opts.HTTP.Data, "POST data (alias of -d)") 103 | flag.StringVar(&opts.HTTP.Data, "data-binary", opts.HTTP.Data, "POST data (alias of -d)") 104 | flag.StringVar(&opts.HTTP.Method, "X", opts.HTTP.Method, "HTTP method to use") 105 | flag.StringVar(&opts.HTTP.ProxyURL, "x", opts.HTTP.ProxyURL, "Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080") 106 | flag.StringVar(&opts.HTTP.ReplayProxyURL, "replay-proxy", opts.HTTP.ReplayProxyURL, "Replay matched requests using this proxy.") 107 | flag.StringVar(&opts.HTTP.RecursionStrategy, "recursion-strategy", opts.HTTP.RecursionStrategy, "Recursion strategy: \"default\" for a redirect based, and \"greedy\" to recurse on all matches") 108 | flag.StringVar(&opts.HTTP.URL, "u", opts.HTTP.URL, "Target URL") 109 | flag.StringVar(&opts.HTTP.SNI, "sni", opts.HTTP.SNI, "Target TLS SNI, does not support FUZZ keyword") 110 | flag.StringVar(&opts.Input.Extensions, "e", opts.Input.Extensions, "Comma separated list of extensions. Extends FUZZ keyword.") 111 | flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork, sniper") 112 | flag.StringVar(&opts.Input.InputShell, "input-shell", opts.Input.InputShell, "Shell to be used for running command") 113 | flag.StringVar(&opts.Input.Request, "request", opts.Input.Request, "File containing the raw http request") 114 | flag.StringVar(&opts.Input.RequestProto, "request-proto", opts.Input.RequestProto, "Protocol to use along with raw request") 115 | flag.StringVar(&opts.Matcher.Mode, "mmode", opts.Matcher.Mode, "Matcher set operator. Either of: and, or") 116 | flag.StringVar(&opts.Matcher.Lines, "ml", opts.Matcher.Lines, "Match amount of lines in response") 117 | flag.StringVar(&opts.Matcher.Regexp, "mr", opts.Matcher.Regexp, "Match regexp") 118 | flag.StringVar(&opts.Matcher.Size, "ms", opts.Matcher.Size, "Match HTTP response size") 119 | flag.StringVar(&opts.Matcher.Status, "mc", opts.Matcher.Status, "Match HTTP status codes, or \"all\" for everything.") 120 | flag.StringVar(&opts.Matcher.Time, "mt", opts.Matcher.Time, "Match how many milliseconds to the first response byte, either greater or less than. EG: >100 or <100") 121 | flag.StringVar(&opts.Matcher.Words, "mw", opts.Matcher.Words, "Match amount of words in response") 122 | flag.StringVar(&opts.Output.DebugLog, "debug-log", opts.Output.DebugLog, "Write all of the internal logging to the specified file.") 123 | flag.StringVar(&opts.Output.OutputDirectory, "od", opts.Output.OutputDirectory, "Directory path to store matched results to.") 124 | flag.StringVar(&opts.Output.OutputFile, "o", opts.Output.OutputFile, "Write output to file") 125 | flag.StringVar(&opts.Output.OutputFormat, "of", opts.Output.OutputFormat, "Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats)") 126 | flag.Var(&autocalibrationstrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac") 127 | flag.Var(&cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.") 128 | flag.Var(&cookies, "cookie", "Cookie data (alias of -b)") 129 | flag.Var(&headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.") 130 | flag.Var(&inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.") 131 | flag.Var(&wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'") 132 | flag.Usage = Usage 133 | flag.Parse() 134 | 135 | opts.General.AutoCalibrationStrings = autocalibrationstrings 136 | opts.HTTP.Cookies = cookies 137 | opts.HTTP.Headers = headers 138 | opts.Input.Inputcommands = inputcommands 139 | opts.Input.Wordlists = wordlists 140 | return opts 141 | } 142 | 143 | func main() { 144 | 145 | var err, optserr error 146 | ctx, cancel := context.WithCancel(context.Background()) 147 | defer cancel() 148 | // prepare the default config options from default config file 149 | var opts *ffuf.ConfigOptions 150 | opts, optserr = ffuf.ReadDefaultConfig() 151 | 152 | opts = ParseFlags(opts) 153 | 154 | // Handle searchhash functionality and exit 155 | if opts.General.Searchhash != "" { 156 | coptions, pos, err := ffuf.SearchHash(opts.General.Searchhash) 157 | if err != nil { 158 | fmt.Printf("[ERR] %s\n", err) 159 | os.Exit(1) 160 | } 161 | if len(coptions) > 0 { 162 | fmt.Printf("Request candidate(s) for hash %s\n", opts.General.Searchhash) 163 | } 164 | for _, copt := range coptions { 165 | conf, err := ffuf.ConfigFromOptions(&copt.ConfigOptions, ctx, cancel) 166 | if err != nil { 167 | continue 168 | } 169 | printSearchResults(conf, pos, copt.Time, opts.General.Searchhash) 170 | } 171 | if err != nil { 172 | fmt.Printf("[ERR] %s\n", err) 173 | } 174 | os.Exit(0) 175 | } 176 | 177 | if opts.General.ShowVersion { 178 | fmt.Printf("ffuf version: %s\n", ffuf.Version()) 179 | os.Exit(0) 180 | } 181 | if len(opts.Output.DebugLog) != 0 { 182 | f, err := os.OpenFile(opts.Output.DebugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) 183 | if err != nil { 184 | fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err) 185 | log.SetOutput(io.Discard) 186 | } else { 187 | log.SetOutput(f) 188 | defer f.Close() 189 | } 190 | } else { 191 | log.SetOutput(io.Discard) 192 | } 193 | if optserr != nil { 194 | log.Printf("Error while opening default config file: %s", optserr) 195 | } 196 | 197 | if opts.General.ConfigFile != "" { 198 | opts, err = ffuf.ReadConfig(opts.General.ConfigFile) 199 | if err != nil { 200 | fmt.Fprintf(os.Stderr, "Encoutered error(s): %s\n", err) 201 | Usage() 202 | fmt.Fprintf(os.Stderr, "Encoutered error(s): %s\n", err) 203 | os.Exit(1) 204 | } 205 | // Reset the flag package state 206 | flag.CommandLine = flag.NewFlagSet(os.Args[0], flag.ExitOnError) 207 | // Re-parse the cli options 208 | opts = ParseFlags(opts) 209 | } 210 | 211 | // Set up Config struct 212 | conf, err := ffuf.ConfigFromOptions(opts, ctx, cancel) 213 | if err != nil { 214 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) 215 | Usage() 216 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) 217 | os.Exit(1) 218 | } 219 | 220 | job, err := prepareJob(conf) 221 | if err != nil { 222 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) 223 | Usage() 224 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) 225 | os.Exit(1) 226 | } 227 | if err := SetupFilters(opts, conf); err != nil { 228 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) 229 | Usage() 230 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) 231 | os.Exit(1) 232 | } 233 | 234 | if !conf.Noninteractive { 235 | go func() { 236 | err := interactive.Handle(job) 237 | if err != nil { 238 | log.Printf("Error while trying to initialize interactive session: %s", err) 239 | } 240 | }() 241 | } 242 | 243 | // Job handles waiting for goroutines to complete itself 244 | job.Start() 245 | } 246 | 247 | func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) { 248 | job := ffuf.NewJob(conf) 249 | var errs ffuf.Multierror 250 | job.Input, errs = input.NewInputProvider(conf) 251 | // TODO: implement error handling for runnerprovider and outputprovider 252 | // We only have http runner right now 253 | job.Runner = runner.NewRunnerByName("http", conf, false) 254 | if len(conf.ReplayProxyURL) > 0 { 255 | job.ReplayRunner = runner.NewRunnerByName("http", conf, true) 256 | } 257 | // We only have stdout outputprovider right now 258 | job.Output = output.NewOutputProviderByName("stdout", conf) 259 | return job, errs.ErrorOrNil() 260 | } 261 | 262 | func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error { 263 | errs := ffuf.NewMultierror() 264 | conf.MatcherManager = filter.NewMatcherManager() 265 | // If any other matcher is set, ignore -mc default value 266 | matcherSet := false 267 | statusSet := false 268 | warningIgnoreBody := false 269 | flag.Visit(func(f *flag.Flag) { 270 | if f.Name == "mc" { 271 | statusSet = true 272 | } 273 | if f.Name == "ms" { 274 | matcherSet = true 275 | warningIgnoreBody = true 276 | } 277 | if f.Name == "ml" { 278 | matcherSet = true 279 | warningIgnoreBody = true 280 | } 281 | if f.Name == "mr" { 282 | matcherSet = true 283 | } 284 | if f.Name == "mt" { 285 | matcherSet = true 286 | } 287 | if f.Name == "mw" { 288 | matcherSet = true 289 | warningIgnoreBody = true 290 | } 291 | }) 292 | // Only set default matchers if no 293 | if statusSet || !matcherSet { 294 | if err := conf.MatcherManager.AddMatcher("status", parseOpts.Matcher.Status); err != nil { 295 | errs.Add(err) 296 | } 297 | } 298 | 299 | if parseOpts.Filter.Status != "" { 300 | if err := conf.MatcherManager.AddFilter("status", parseOpts.Filter.Status, false); err != nil { 301 | errs.Add(err) 302 | } 303 | } 304 | if parseOpts.Filter.Size != "" { 305 | warningIgnoreBody = true 306 | if err := conf.MatcherManager.AddFilter("size", parseOpts.Filter.Size, false); err != nil { 307 | errs.Add(err) 308 | } 309 | } 310 | if parseOpts.Filter.Regexp != "" { 311 | if err := conf.MatcherManager.AddFilter("regexp", parseOpts.Filter.Regexp, false); err != nil { 312 | errs.Add(err) 313 | } 314 | } 315 | if parseOpts.Filter.Words != "" { 316 | warningIgnoreBody = true 317 | if err := conf.MatcherManager.AddFilter("word", parseOpts.Filter.Words, false); err != nil { 318 | errs.Add(err) 319 | } 320 | } 321 | if parseOpts.Filter.Lines != "" { 322 | warningIgnoreBody = true 323 | if err := conf.MatcherManager.AddFilter("line", parseOpts.Filter.Lines, false); err != nil { 324 | errs.Add(err) 325 | } 326 | } 327 | if parseOpts.Filter.Time != "" { 328 | if err := conf.MatcherManager.AddFilter("time", parseOpts.Filter.Time, false); err != nil { 329 | errs.Add(err) 330 | } 331 | } 332 | if parseOpts.Matcher.Size != "" { 333 | if err := conf.MatcherManager.AddMatcher("size", parseOpts.Matcher.Size); err != nil { 334 | errs.Add(err) 335 | } 336 | } 337 | if parseOpts.Matcher.Regexp != "" { 338 | if err := conf.MatcherManager.AddMatcher("regexp", parseOpts.Matcher.Regexp); err != nil { 339 | errs.Add(err) 340 | } 341 | } 342 | if parseOpts.Matcher.Words != "" { 343 | if err := conf.MatcherManager.AddMatcher("word", parseOpts.Matcher.Words); err != nil { 344 | errs.Add(err) 345 | } 346 | } 347 | if parseOpts.Matcher.Lines != "" { 348 | if err := conf.MatcherManager.AddMatcher("line", parseOpts.Matcher.Lines); err != nil { 349 | errs.Add(err) 350 | } 351 | } 352 | if parseOpts.Matcher.Time != "" { 353 | if err := conf.MatcherManager.AddMatcher("time", parseOpts.Matcher.Time); err != nil { 354 | errs.Add(err) 355 | } 356 | } 357 | if conf.IgnoreBody && warningIgnoreBody { 358 | fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n") 359 | } 360 | return errs.ErrorOrNil() 361 | } 362 | 363 | func printSearchResults(conf *ffuf.Config, pos int, exectime time.Time, hash string) { 364 | inp, err := input.NewInputProvider(conf) 365 | if err.ErrorOrNil() != nil { 366 | fmt.Printf("-------------------------------------------\n") 367 | fmt.Println("Encountered error that prevents reproduction of the request:") 368 | fmt.Println(err.ErrorOrNil()) 369 | return 370 | } 371 | inp.SetPosition(pos) 372 | inputdata := inp.Value() 373 | inputdata["FFUFHASH"] = []byte(hash) 374 | basereq := ffuf.BaseRequest(conf) 375 | dummyrunner := runner.NewRunnerByName("simple", conf, false) 376 | ffufreq, _ := dummyrunner.Prepare(inputdata, &basereq) 377 | rawreq, _ := dummyrunner.Dump(&ffufreq) 378 | fmt.Printf("-------------------------------------------\n") 379 | fmt.Printf("ffuf job started at: %s\n\n", exectime.Format(time.RFC3339)) 380 | fmt.Printf("%s\n", string(rawreq)) 381 | } 382 | --------------------------------------------------------------------------------