diff --git a/CHANGELOG.md b/CHANGELOG.md index cf97877..89d9a41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,33 @@ - master - New - Changed + +- v1.5.0 + - New + - New autocalibration options: `-ach`, `-ack` and `-acs`. Revamped the whole autocalibration process + - Configurable modes for matchers and filters (CLI flags: `fmode` and `mmode`): "and" and "or" + - Changed + +- v1.4.1 + - New + - Changed + - Fixed a bug with recursion, introduced in the 1.4.0 release + - Recursion now works better with multiple wordlists, disabling unnecessary wordlists for queued jobs where needed + +- v1.4.0 + - New + - Added response time logging and filtering + - Added a CLI flag to specify TLS SNI value + - Added full line colors + - Added `-json` to emit newline delimited JSON output + - Added 500 Internal Server Error to list of status codes matched by default + - Changed + - Fixed an issue where output file was created regardless of `-or` + - Fixed an issue where output (often a lot of it) would be printed after entering interactive mode + - Fixed an issue when reading wordlist files from ffufrc + - Fixed an issue where `-of all` option only creates one output file (instead of all formats) + - Fixed an issue where redirection to the same domain in recursive mode dropped port info from URL + - Added HTTP2 support - v1.3.1 - New diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index 641db38..6a15f06 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -1,4 +1,7 @@ # Contributors + +* [adamtlangley](https://github.com/adamtlangley) +* [adilsoybali](https://github.com/adilsoybali) * [AverageSecurityGuy](https://github.com/averagesecurityguy) * [bp0](https://github.com/bp0lr) * [bjhulst](https://github.com/bjhulst) @@ -10,15 +13,20 @@ * [Damian89](https://github.com/Damian89) * [Daviey](https://github.com/Daviey) * [delic](https://github.com/delic) +* [denandz](https://github.com/denandz) +* [erbbysam](https://github.com/erbbysam) * [eur0pa](https://github.com/eur0pa) * [fabiobauer](https://github.com/fabiobauer) * [fang0654](https://github.com/fang0654) +* [Hazegard](https://github.com/Hazegard) * [helpermika](https://github.com/helpermika) +* [h1x](https://github.com/h1x-lnx) * [Ice3man543](https://github.com/Ice3man543) * [JamTookTheBait](https://github.com/JamTookTheBait) * [jimen0](https://github.com/jimen0) * [joohoi](https://github.com/joohoi) * [jsgv](https://github.com/jsgv) +* [justinsteven](https://github.com/justinsteven) * [jvesiluoma](https://github.com/jvesiluoma) * [Kiblyn11](https://github.com/Kiblyn11) * [lc](https://github.com/lc) @@ -26,9 +34,12 @@ * [nnwakelam](https://twitter.com/nnwakelam) * [noraj](https://pwn.by/noraj) * [oh6hay](https://github.com/oh6hay) +* [penguinxoxo](https://github.com/penguinxoxo) * [putsi](https://github.com/putsi) * [SakiiR](https://github.com/SakiiR) * [seblw](https://github.com/seblw) * [Shaked](https://github.com/Shaked) +* [Skyehopper](https://github.com/Skyehopper) * [SolomonSklash](https://github.com/SolomonSklash) * [l4yton](https://github.com/l4yton) +* [xfgusta](https://github.com/xfgusta) diff --git a/README.md b/README.md index 69c6b3f..262ce0d 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,4 @@ -``` - /'___\ /'___\ /'___\ - /\ \__/ /\ \__/ __ __ /\ \__/ - \ \ ,__\\ \ ,__\/\ \/\ \ \ \ ,__\ - \ \ \_/ \ \ \_/\ \ \_\ \ \ \ \_/ - \ \_\ \ \_\ \ \____/ \ \_\ - \/_/ \/_/ \/___/ \/_/ -``` - +![ffuf mascot](_img/ffuf_run_logo_600.png) # ffuf - Fuzz Faster U Fool A fast web fuzzer written in Go. @@ -34,20 +26,17 @@ [![Porchetta Industries](https://discordapp.com/api/guilds/736724457258745996/widget.png?style=banner2)](https://discord.gg/VWcdZCUsQP) - - - ## Installation - [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run! _or_ -- If you have recent go compiler installed: `go get -u github.com/ffuf/ffuf` (the same command works for updating) +- If you have recent go compiler installed: `go install github.com/ffuf/ffuf@latest` (the same command works for updating) _or_ -- git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build - -Ffuf depends on Go 1.13 or greater. +- `git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build` + +Ffuf depends on Go 1.16 or greater. ## Example usage @@ -57,6 +46,7 @@ "[Everything you need to know about FFUF](https://codingo.io/tools/ffuf/bounty/2020/09/17/everything-you-need-to-know-about-ffuf.html)" by Michael Skelton ([@codingo](https://github.com/codingo)). +You can also practise your ffuf scans against a live host with different lessons and use cases either locally by using the docker container https://github.com/adamtlangley/ffufme or against the live hosted version at http://ffuf.me created by Adam Langley [@adamtlangley](https://twitter.com/adamtlangley). ### Typical directory discovery @@ -152,6 +142,10 @@ Additionally, in case you wish to use bunch of configuration files for different use cases, you can do this by defining the configuration file path using `-config` command line flag that takes the file path to the configuration file as its parameter. + +

+ +

## Usage @@ -171,6 +165,7 @@ -recursion-depth Maximum recursion depth. (default: 0) -recursion-strategy Recursion strategy: "default" for a redirect based, and "greedy" to recurse on all matches (default: default) -replay-proxy Replay matched requests using this proxy. + -sni Target TLS SNI, does not support FUZZ keyword -timeout HTTP request timeout in seconds. (default: 10) -u Target URL -x Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080 @@ -194,10 +189,11 @@ -v Verbose output, printing full URL and redirect location (if any) with the results. (default: false) MATCHER OPTIONS: - -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403,405) + -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403,405,500) -ml Match amount of lines in response -mr Match regexp -ms Match HTTP response size + -mt Match how many milliseconds to the first response byte, either greater or less than. EG: >100 or <100 -mw Match amount of words in response FILTER OPTIONS: @@ -205,6 +201,7 @@ -fl Filter by amount of lines in response. Comma separated list of line counts and ranges -fr Filter regexp -fs Filter HTTP response size. Comma separated list of sizes and ranges + -ft Filter by number of milliseconds to the first response byte, either greater or less than. EG: >100 or <100 -fw Filter by amount of words in response. Comma separated list of word counts and ranges INPUT OPTIONS: @@ -214,7 +211,7 @@ -input-cmd Command producing the input. --input-num is required when using this input method. Overrides -w. -input-num Number of inputs to test. Used in conjunction with --input-cmd. (default: 100) -input-shell Shell to be used for running command - -mode Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork (default: clusterbomb) + -mode Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork, sniper (default: clusterbomb) -request File containing the raw http request -request-proto Protocol to use along with raw request (default: https) -w Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD' @@ -281,6 +278,10 @@ For this kind of scenario, the user is able to use the command `restart`, which resets the state and starts the current job from the beginning. +

+ +

+ ## Sponsorware diff --git a/_img/ffuf_juggling_250.png b/_img/ffuf_juggling_250.png new file mode 100644 index 0000000..e24e1ed Binary files /dev/null and b/_img/ffuf_juggling_250.png differ diff --git a/_img/ffuf_mascot_600.png b/_img/ffuf_mascot_600.png new file mode 100644 index 0000000..72b6305 Binary files /dev/null and b/_img/ffuf_mascot_600.png differ diff --git a/_img/ffuf_run_logo_600.png b/_img/ffuf_run_logo_600.png new file mode 100644 index 0000000..d95621a Binary files /dev/null and b/_img/ffuf_run_logo_600.png differ diff --git a/_img/ffuf_running_250.png b/_img/ffuf_running_250.png new file mode 100644 index 0000000..0e30653 Binary files /dev/null and b/_img/ffuf_running_250.png differ diff --git a/_img/ffuf_waving_250.png b/_img/ffuf_waving_250.png new file mode 100644 index 0000000..a04d976 Binary files /dev/null and b/_img/ffuf_waving_250.png differ diff --git a/ffufrc.example b/ffufrc.example index 964fabb..6d6b1ec 100644 --- a/ffufrc.example +++ b/ffufrc.example @@ -27,6 +27,9 @@ "randomtest", "admin" ] + autocalibration_strategy = "basic" + autocalibration_keyword = "FUZZ" + autocalibration_perhost = false colors = false delay = "" maxtime = 0 @@ -39,6 +42,7 @@ stoponerrors = false threads = 40 verbose = false + json = false [input] dirsearchcompat = false @@ -65,15 +69,19 @@ outputcreateemptyfile = false [filter] + mode = "or" lines = "" regexp = "" size = "" status = "" + time = "" words = "" [matcher] + mode = "or" lines = "" regexp = "" size = "" - status = "200,204,301,302,307,401,403,405" + status = "200,204,301,302,307,401,403,405,500" + time = "" words = "" diff --git a/help.go b/help.go index 74f2d4d..4d429c4 100644 --- a/help.go +++ b/help.go @@ -54,14 +54,14 @@ Description: "Options controlling the HTTP request and its parts.", Flags: make([]UsageFlag, 0), Hidden: false, - ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "recursion-strategy", "replay-proxy", "timeout", "ignore-body", "x"}, + ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "recursion-strategy", "replay-proxy", "timeout", "ignore-body", "x", "sni", "http2"}, } u_general := UsageSection{ Name: "GENERAL OPTIONS", Description: "", Flags: make([]UsageFlag, 0), Hidden: false, - ExpectedFlags: []string{"ac", "acc", "c", "config", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"}, + ExpectedFlags: []string{"ac", "acc", "ack", "ach", "acs", "c", "config", "json", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"}, } u_compat := UsageSection{ Name: "COMPATIBILITY OPTIONS", @@ -75,14 +75,14 @@ Description: "Matchers for the response filtering.", Flags: make([]UsageFlag, 0), Hidden: false, - ExpectedFlags: []string{"mc", "ml", "mr", "ms", "mw"}, + ExpectedFlags: []string{"mmode", "mc", "ml", "mr", "ms", "mt", "mw"}, } u_filter := UsageSection{ Name: "FILTER OPTIONS", Description: "Filters for the response filtering.", Flags: make([]UsageFlag, 0), Hidden: false, - ExpectedFlags: []string{"fc", "fl", "fr", "fs", "fw"}, + ExpectedFlags: []string{"fmode", "fc", "fl", "fr", "fs", "ft", "fw"}, } u_input := UsageSection{ Name: "INPUT OPTIONS", diff --git a/main.go b/main.go index 7da9b8c..f51663f 100644 --- a/main.go +++ b/main.go @@ -4,16 +4,17 @@ "context" "flag" "fmt" + "github.com/ffuf/ffuf/pkg/filter" + "io/ioutil" + "log" + "os" + "strings" + "github.com/ffuf/ffuf/pkg/ffuf" - "github.com/ffuf/ffuf/pkg/filter" "github.com/ffuf/ffuf/pkg/input" "github.com/ffuf/ffuf/pkg/interactive" "github.com/ffuf/ffuf/pkg/output" "github.com/ffuf/ffuf/pkg/runner" - "io/ioutil" - "log" - "os" - "strings" ) type multiStringFlag []string @@ -54,13 +55,16 @@ autocalibrationstrings = opts.General.AutoCalibrationStrings headers = opts.HTTP.Headers inputcommands = opts.Input.Inputcommands + wordlists = opts.Input.Wordlists flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)") flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)") flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility") - flag.BoolVar(&opts.Output.OutputCreateEmptyFile, "or", opts.Output.OutputCreateEmptyFile, "Don't create the output file if we don't have results") + flag.BoolVar(&opts.Output.OutputSkipEmptyFile, "or", opts.Output.OutputSkipEmptyFile, "Don't create the output file if we don't have results") flag.BoolVar(&opts.General.AutoCalibration, "ac", opts.General.AutoCalibration, "Automatically calibrate filtering options") + flag.BoolVar(&opts.General.AutoCalibrationPerHost, "ach", opts.General.AutoCalibration, "Per host autocalibration") flag.BoolVar(&opts.General.Colors, "c", opts.General.Colors, "Colorize output.") + flag.BoolVar(&opts.General.Json, "json", opts.General.Json, "JSON output, printing newline-delimited JSON records") flag.BoolVar(&opts.General.Noninteractive, "noninteractive", opts.General.Noninteractive, "Disable the interactive console functionality") flag.BoolVar(&opts.General.Quiet, "s", opts.General.Quiet, "Do not print additional information (silent mode)") flag.BoolVar(&opts.General.ShowVersion, "V", opts.General.ShowVersion, "Show version information.") @@ -71,6 +75,7 @@ flag.BoolVar(&opts.HTTP.FollowRedirects, "r", opts.HTTP.FollowRedirects, "Follow redirects") flag.BoolVar(&opts.HTTP.IgnoreBody, "ignore-body", opts.HTTP.IgnoreBody, "Do not fetch the response content.") flag.BoolVar(&opts.HTTP.Recursion, "recursion", opts.HTTP.Recursion, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.") + flag.BoolVar(&opts.HTTP.Http2, "http2", opts.HTTP.Http2, "Use HTTP2 protocol") flag.BoolVar(&opts.Input.DirSearchCompat, "D", opts.Input.DirSearchCompat, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.") flag.BoolVar(&opts.Input.IgnoreWordlistComments, "ic", opts.Input.IgnoreWordlistComments, "Ignore wordlist comments") flag.IntVar(&opts.General.MaxTime, "maxtime", opts.General.MaxTime, "Maximum running time in seconds for entire process.") @@ -80,11 +85,15 @@ flag.IntVar(&opts.HTTP.RecursionDepth, "recursion-depth", opts.HTTP.RecursionDepth, "Maximum recursion depth.") flag.IntVar(&opts.HTTP.Timeout, "timeout", opts.HTTP.Timeout, "HTTP request timeout in seconds.") flag.IntVar(&opts.Input.InputNum, "input-num", opts.Input.InputNum, "Number of inputs to test. Used in conjunction with --input-cmd.") + flag.StringVar(&opts.General.AutoCalibrationKeyword, "ack", opts.General.AutoCalibrationKeyword, "Autocalibration keyword") + flag.StringVar(&opts.General.AutoCalibrationStrategy, "acs", opts.General.AutoCalibrationStrategy, "Autocalibration strategy: \"basic\" or \"advanced\"") flag.StringVar(&opts.General.ConfigFile, "config", "", "Load configuration from a file") + flag.StringVar(&opts.Filter.Mode, "fmode", opts.Filter.Mode, "Filter set operator. Either of: and, or") flag.StringVar(&opts.Filter.Lines, "fl", opts.Filter.Lines, "Filter by amount of lines in response. Comma separated list of line counts and ranges") flag.StringVar(&opts.Filter.Regexp, "fr", opts.Filter.Regexp, "Filter regexp") flag.StringVar(&opts.Filter.Size, "fs", opts.Filter.Size, "Filter HTTP response size. Comma separated list of sizes and ranges") flag.StringVar(&opts.Filter.Status, "fc", opts.Filter.Status, "Filter HTTP status codes from response. Comma separated list of codes and ranges") + flag.StringVar(&opts.Filter.Time, "ft", opts.Filter.Time, "Filter by number of milliseconds to the first response byte, either greater or less than. EG: >100 or <100") flag.StringVar(&opts.Filter.Words, "fw", opts.Filter.Words, "Filter by amount of words in response. Comma separated list of word counts and ranges") flag.StringVar(&opts.General.Delay, "p", opts.General.Delay, "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"") flag.StringVar(&opts.HTTP.Data, "d", opts.HTTP.Data, "POST data") @@ -96,15 +105,18 @@ flag.StringVar(&opts.HTTP.ReplayProxyURL, "replay-proxy", opts.HTTP.ReplayProxyURL, "Replay matched requests using this proxy.") flag.StringVar(&opts.HTTP.RecursionStrategy, "recursion-strategy", opts.HTTP.RecursionStrategy, "Recursion strategy: \"default\" for a redirect based, and \"greedy\" to recurse on all matches") flag.StringVar(&opts.HTTP.URL, "u", opts.HTTP.URL, "Target URL") + flag.StringVar(&opts.HTTP.SNI, "sni", opts.HTTP.SNI, "Target TLS SNI, does not support FUZZ keyword") flag.StringVar(&opts.Input.Extensions, "e", opts.Input.Extensions, "Comma separated list of extensions. Extends FUZZ keyword.") - flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork") + flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork, sniper") flag.StringVar(&opts.Input.InputShell, "input-shell", opts.Input.InputShell, "Shell to be used for running command") flag.StringVar(&opts.Input.Request, "request", opts.Input.Request, "File containing the raw http request") flag.StringVar(&opts.Input.RequestProto, "request-proto", opts.Input.RequestProto, "Protocol to use along with raw request") + flag.StringVar(&opts.Matcher.Mode, "mmode", opts.Matcher.Mode, "Matcher set operator. Either of: and, or") flag.StringVar(&opts.Matcher.Lines, "ml", opts.Matcher.Lines, "Match amount of lines in response") flag.StringVar(&opts.Matcher.Regexp, "mr", opts.Matcher.Regexp, "Match regexp") flag.StringVar(&opts.Matcher.Size, "ms", opts.Matcher.Size, "Match HTTP response size") flag.StringVar(&opts.Matcher.Status, "mc", opts.Matcher.Status, "Match HTTP status codes, or \"all\" for everything.") + flag.StringVar(&opts.Matcher.Time, "mt", opts.Matcher.Time, "Match how many milliseconds to the first response byte, either greater or less than. EG: >100 or <100") flag.StringVar(&opts.Matcher.Words, "mw", opts.Matcher.Words, "Match amount of words in response") flag.StringVar(&opts.Output.DebugLog, "debug-log", opts.Output.DebugLog, "Write all of the internal logging to the specified file.") flag.StringVar(&opts.Output.OutputDirectory, "od", opts.Output.OutputDirectory, "Directory path to store matched results to.") @@ -188,17 +200,13 @@ fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) os.Exit(1) } - if err := filter.SetupFilters(opts, conf); err != nil { + if err := SetupFilters(opts, conf); err != nil { fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) Usage() fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) os.Exit(1) } - if err := filter.CalibrateIfNeeded(job); err != nil { - fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err) - os.Exit(1) - } if !conf.Noninteractive { go func() { err := interactive.Handle(job) @@ -226,3 +234,104 @@ job.Output = output.NewOutputProviderByName("stdout", conf) return job, errs.ErrorOrNil() } + +func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error { + errs := ffuf.NewMultierror() + conf.MatcherManager = filter.NewMatcherManager() + // If any other matcher is set, ignore -mc default value + matcherSet := false + statusSet := false + warningIgnoreBody := false + flag.Visit(func(f *flag.Flag) { + if f.Name == "mc" { + statusSet = true + } + if f.Name == "ms" { + matcherSet = true + warningIgnoreBody = true + } + if f.Name == "ml" { + matcherSet = true + warningIgnoreBody = true + } + if f.Name == "mr" { + matcherSet = true + } + if f.Name == "mt" { + matcherSet = true + } + if f.Name == "mw" { + matcherSet = true + warningIgnoreBody = true + } + }) + // Only set default matchers if no + if statusSet || !matcherSet { + if err := conf.MatcherManager.AddMatcher("status", parseOpts.Matcher.Status); err != nil { + errs.Add(err) + } + } + + if parseOpts.Filter.Status != "" { + if err := conf.MatcherManager.AddFilter("status", parseOpts.Filter.Status, false); err != nil { + errs.Add(err) + } + } + if parseOpts.Filter.Size != "" { + warningIgnoreBody = true + if err := conf.MatcherManager.AddFilter("size", parseOpts.Filter.Size, false); err != nil { + errs.Add(err) + } + } + if parseOpts.Filter.Regexp != "" { + if err := conf.MatcherManager.AddFilter("regexp", parseOpts.Filter.Regexp, false); err != nil { + errs.Add(err) + } + } + if parseOpts.Filter.Words != "" { + warningIgnoreBody = true + if err := conf.MatcherManager.AddFilter("word", parseOpts.Filter.Words, false); err != nil { + errs.Add(err) + } + } + if parseOpts.Filter.Lines != "" { + warningIgnoreBody = true + if err := conf.MatcherManager.AddFilter("line", parseOpts.Filter.Lines, false); err != nil { + errs.Add(err) + } + } + if parseOpts.Filter.Time != "" { + if err := conf.MatcherManager.AddFilter("time", parseOpts.Filter.Time, false); err != nil { + errs.Add(err) + } + } + if parseOpts.Matcher.Size != "" { + if err := conf.MatcherManager.AddMatcher("size", parseOpts.Matcher.Size); err != nil { + errs.Add(err) + } + } + if parseOpts.Matcher.Regexp != "" { + if err := conf.MatcherManager.AddMatcher("regexp", parseOpts.Matcher.Regexp); err != nil { + errs.Add(err) + } + } + if parseOpts.Matcher.Words != "" { + if err := conf.MatcherManager.AddMatcher("word", parseOpts.Matcher.Words); err != nil { + errs.Add(err) + } + } + if parseOpts.Matcher.Lines != "" { + if err := conf.MatcherManager.AddMatcher("line", parseOpts.Matcher.Lines); err != nil { + errs.Add(err) + } + } + if parseOpts.Matcher.Time != "" { + if err := conf.MatcherManager.AddFilter("time", parseOpts.Matcher.Time, false); err != nil { + errs.Add(err) + } + } + if conf.IgnoreBody && warningIgnoreBody { + fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n") + } + return errs.ErrorOrNil() +} diff --git a/pkg/ffuf/autocalibration.go b/pkg/ffuf/autocalibration.go new file mode 100644 index 0000000..d6d02e5 --- /dev/null +++ b/pkg/ffuf/autocalibration.go @@ -0,0 +1,235 @@ +package ffuf + +import ( + "fmt" + "log" + "math/rand" + "strconv" + "time" +) + +func (j *Job) autoCalibrationStrings() map[string][]string { + rand.Seed(time.Now().UnixNano()) + cInputs := make(map[string][]string) + if len(j.Config.AutoCalibrationStrings) < 1 { + cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(16)) + cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(8)) + cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(16)) + cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(8)) + cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(16)) + cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(8)) + if j.Config.AutoCalibrationStrategy == "advanced" { + // Add directory tests and .htaccess too + cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(16)+"/") + cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(8)+"/") + cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(16)+"/") + cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(8)+"/") + } + } else { + cInputs["custom"] = append(cInputs["custom"], j.Config.AutoCalibrationStrings...) + } + return cInputs +} + +func (j *Job) calibrationRequest(inputs map[string][]byte) (Response, error) { + basereq := BaseRequest(j.Config) + req, err := j.Runner.Prepare(inputs, &basereq) + if err != nil { + j.Output.Error(fmt.Sprintf("Encountered an error while preparing autocalibration request: %s\n", err)) + j.incError() + log.Printf("%s", err) + return Response{}, err + } + resp, err := j.Runner.Execute(&req) + if err != nil { + j.Output.Error(fmt.Sprintf("Encountered an error while executing autocalibration request: %s\n", err)) + j.incError() + log.Printf("%s", err) + return Response{}, err + } + // Only calibrate on responses that would be matched otherwise + if j.isMatch(resp) { + return resp, nil + } + return resp, fmt.Errorf("Response wouldn't be matched") +} + +//CalibrateForHost runs autocalibration for a specific host +func (j *Job) CalibrateForHost(host string, baseinput map[string][]byte) error { + if j.Config.MatcherManager.CalibratedForDomain(host) { + return nil + } + if baseinput[j.Config.AutoCalibrationKeyword] == nil { + return fmt.Errorf("Autocalibration keyword \"%s\" not found in the request.", j.Config.AutoCalibrationKeyword) + } + cStrings := j.autoCalibrationStrings() + input := make(map[string][]byte) + for k, v := range baseinput { + input[k] = v + } + for _, v := range cStrings { + responses := make([]Response, 0) + for _, cs := range v { + input[j.Config.AutoCalibrationKeyword] = []byte(cs) + resp, err := j.calibrationRequest(input) + if err != nil { + continue + } + responses = append(responses, resp) + err = j.calibrateFilters(responses, true) + if err != nil { + j.Output.Error(fmt.Sprintf("%s", err)) + } + } + } + j.Config.MatcherManager.SetCalibratedForHost(host, true) + return nil +} + +//CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests +func (j *Job) Calibrate(input map[string][]byte) error { + if j.Config.MatcherManager.Calibrated() { + return nil + } + cInputs := j.autoCalibrationStrings() + + for _, v := range cInputs { + responses := make([]Response, 0) + for _, cs := range v { + input[j.Config.AutoCalibrationKeyword] = []byte(cs) + resp, err := j.calibrationRequest(input) + if err != nil { + continue + } + responses = append(responses, resp) + err = j.calibrateFilters(responses, false) + if err != nil { + j.Output.Error(fmt.Sprintf("%s", err)) + } + } + } + j.Config.MatcherManager.SetCalibrated(true) + return nil +} + +//CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and +// configuring the filters accordingly +func (j *Job) CalibrateIfNeeded(host string, input map[string][]byte) error { + j.calibMutex.Lock() + defer j.calibMutex.Unlock() + if !j.Config.AutoCalibration { + return nil + } + if j.Config.AutoCalibrationPerHost { + return j.CalibrateForHost(host, input) + } + return j.Calibrate(input) +} + +func (j *Job) calibrateFilters(responses []Response, perHost bool) error { + // Work down from the most specific common denominator + if len(responses) > 0 { + // Content length + baselineSize := responses[0].ContentLength + sizeMatch := true + for _, r := range responses { + if baselineSize != r.ContentLength { + sizeMatch = false + } + } + if sizeMatch { + if perHost { + // Check if already filtered + for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { + match, _ := f.Filter(&responses[0]) + if match { + // Already filtered + return nil + } + } + _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "size", strconv.FormatInt(baselineSize, 10)) + return nil + } else { + // Check if already filtered + for _, f := range j.Config.MatcherManager.GetFilters() { + match, _ := f.Filter(&responses[0]) + if match { + // Already filtered + return nil + } + } + _ = j.Config.MatcherManager.AddFilter("size", strconv.FormatInt(baselineSize, 10), false) + return nil + } + } + + // Content words + baselineWords := responses[0].ContentWords + wordsMatch := true + for _, r := range responses { + if baselineWords != r.ContentWords { + wordsMatch = false + } + } + if wordsMatch { + if perHost { + // Check if already filtered + for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { + match, _ := f.Filter(&responses[0]) + if match { + // Already filtered + return nil + } + } + _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "word", strconv.FormatInt(baselineWords, 10)) + return nil + } else { + // Check if already filtered + for _, f := range j.Config.MatcherManager.GetFilters() { + match, _ := f.Filter(&responses[0]) + if match { + // Already filtered + return nil + } + } + _ = j.Config.MatcherManager.AddFilter("word", strconv.FormatInt(baselineSize, 10), false) + return nil + } + } + + // Content lines + baselineLines := responses[0].ContentLines + linesMatch := true + for _, r := range responses { + if baselineLines != r.ContentLines { + linesMatch = false + } + } + if linesMatch { + if perHost { + // Check if already filtered + for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { + match, _ := f.Filter(&responses[0]) + if match { + // Already filtered + return nil + } + } + _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "line", strconv.FormatInt(baselineLines, 10)) + return nil + } else { + // Check if already filtered + for _, f := range j.Config.MatcherManager.GetFilters() { + match, _ := f.Filter(&responses[0]) + if match { + // Already filtered + return nil + } + } + _ = j.Config.MatcherManager.AddFilter("line", strconv.FormatInt(baselineSize, 10), false) + return nil + } + } + } + return fmt.Errorf("No common filtering values found") +} diff --git a/pkg/ffuf/config.go b/pkg/ffuf/config.go index 1b7fe58..e2f21e3 100644 --- a/pkg/ffuf/config.go +++ b/pkg/ffuf/config.go @@ -5,61 +5,71 @@ ) type Config struct { - AutoCalibration bool `json:"autocalibration"` - AutoCalibrationStrings []string `json:"autocalibration_strings"` - Cancel context.CancelFunc `json:"-"` - Colors bool `json:"colors"` - CommandKeywords []string `json:"-"` - CommandLine string `json:"cmdline"` - ConfigFile string `json:"configfile"` - Context context.Context `json:"-"` - Data string `json:"postdata"` - Delay optRange `json:"delay"` - DirSearchCompat bool `json:"dirsearch_compatibility"` - Extensions []string `json:"extensions"` - Filters map[string]FilterProvider `json:"filters"` - FollowRedirects bool `json:"follow_redirects"` - Headers map[string]string `json:"headers"` - IgnoreBody bool `json:"ignorebody"` - IgnoreWordlistComments bool `json:"ignore_wordlist_comments"` - InputMode string `json:"inputmode"` - InputNum int `json:"cmd_inputnum"` - InputProviders []InputProviderConfig `json:"inputproviders"` - InputShell string `json:"inputshell"` - Matchers map[string]FilterProvider `json:"matchers"` - MaxTime int `json:"maxtime"` - MaxTimeJob int `json:"maxtime_job"` - Method string `json:"method"` - Noninteractive bool `json:"noninteractive"` - OutputDirectory string `json:"outputdirectory"` - OutputFile string `json:"outputfile"` - OutputFormat string `json:"outputformat"` - OutputCreateEmptyFile bool `json:"OutputCreateEmptyFile"` - ProgressFrequency int `json:"-"` - ProxyURL string `json:"proxyurl"` - Quiet bool `json:"quiet"` - Rate int64 `json:"rate"` - Recursion bool `json:"recursion"` - RecursionDepth int `json:"recursion_depth"` - RecursionStrategy string `json:"recursion_strategy"` - ReplayProxyURL string `json:"replayproxyurl"` - StopOn403 bool `json:"stop_403"` - StopOnAll bool `json:"stop_all"` - StopOnErrors bool `json:"stop_errors"` - Threads int `json:"threads"` - Timeout int `json:"timeout"` - Url string `json:"url"` - Verbose bool `json:"verbose"` + AutoCalibration bool `json:"autocalibration"` + AutoCalibrationKeyword string `json:"autocalibration_keyword"` + AutoCalibrationPerHost bool `json:"autocalibration_perhost"` + AutoCalibrationStrategy string `json:"autocalibration_strategy"` + AutoCalibrationStrings []string `json:"autocalibration_strings"` + Cancel context.CancelFunc `json:"-"` + Colors bool `json:"colors"` + CommandKeywords []string `json:"-"` + CommandLine string `json:"cmdline"` + ConfigFile string `json:"configfile"` + Context context.Context `json:"-"` + Data string `json:"postdata"` + Delay optRange `json:"delay"` + DirSearchCompat bool `json:"dirsearch_compatibility"` + Extensions []string `json:"extensions"` + FilterMode string `json:"fmode"` + FollowRedirects bool `json:"follow_redirects"` + Headers map[string]string `json:"headers"` + IgnoreBody bool `json:"ignorebody"` + IgnoreWordlistComments bool `json:"ignore_wordlist_comments"` + InputMode string `json:"inputmode"` + InputNum int `json:"cmd_inputnum"` + InputProviders []InputProviderConfig `json:"inputproviders"` + InputShell string `json:"inputshell"` + Json bool `json:"json"` + MatcherManager MatcherManager `json:"matchers"` + MatcherMode string `json:"mmode"` + MaxTime int `json:"maxtime"` + MaxTimeJob int `json:"maxtime_job"` + Method string `json:"method"` + Noninteractive bool `json:"noninteractive"` + OutputDirectory string `json:"outputdirectory"` + OutputFile string `json:"outputfile"` + OutputFormat string `json:"outputformat"` + OutputSkipEmptyFile bool `json:"OutputSkipEmptyFile"` + ProgressFrequency int `json:"-"` + ProxyURL string `json:"proxyurl"` + Quiet bool `json:"quiet"` + Rate int64 `json:"rate"` + Recursion bool `json:"recursion"` + RecursionDepth int `json:"recursion_depth"` + RecursionStrategy string `json:"recursion_strategy"` + ReplayProxyURL string `json:"replayproxyurl"` + SNI string `json:"sni"` + StopOn403 bool `json:"stop_403"` + StopOnAll bool `json:"stop_all"` + StopOnErrors bool `json:"stop_errors"` + Threads int `json:"threads"` + Timeout int `json:"timeout"` + Url string `json:"url"` + Verbose bool `json:"verbose"` + Http2 bool `json:"http2"` } type InputProviderConfig struct { - Name string `json:"name"` - Keyword string `json:"keyword"` - Value string `json:"value"` + Name string `json:"name"` + Keyword string `json:"keyword"` + Value string `json:"value"` + Template string `json:"template"` // the templating string used for sniper mode (usually "§") } func NewConfig(ctx context.Context, cancel context.CancelFunc) Config { var conf Config + conf.AutoCalibrationKeyword = "FUZZ" + conf.AutoCalibrationStrategy = "basic" conf.AutoCalibrationStrings = make([]string, 0) conf.CommandKeywords = make([]string, 0) conf.Context = ctx @@ -68,7 +78,7 @@ conf.Delay = optRange{0, 0, false, false} conf.DirSearchCompat = false conf.Extensions = make([]string, 0) - conf.Filters = make(map[string]FilterProvider) + conf.FilterMode = "or" conf.FollowRedirects = false conf.Headers = make(map[string]string) conf.IgnoreWordlistComments = false @@ -76,7 +86,8 @@ conf.InputNum = 0 conf.InputShell = "" conf.InputProviders = make([]InputProviderConfig, 0) - conf.Matchers = make(map[string]FilterProvider) + conf.Json = false + conf.MatcherMode = "or" conf.MaxTime = 0 conf.MaxTimeJob = 0 conf.Method = "GET" @@ -88,12 +99,14 @@ conf.Recursion = false conf.RecursionDepth = 0 conf.RecursionStrategy = "default" + conf.SNI = "" conf.StopOn403 = false conf.StopOnAll = false conf.StopOnErrors = false conf.Timeout = 10 conf.Url = "" conf.Verbose = false + conf.Http2 = false return conf } diff --git a/pkg/ffuf/interfaces.go b/pkg/ffuf/interfaces.go index 048b8c3..6879992 100644 --- a/pkg/ffuf/interfaces.go +++ b/pkg/ffuf/interfaces.go @@ -1,4 +1,21 @@ package ffuf + +import "time" + +//MatcherManager provides functions for managing matchers and filters +type MatcherManager interface { + SetCalibrated(calibrated bool) + SetCalibratedForHost(host string, calibrated bool) + AddFilter(name string, option string, replace bool) error + AddPerDomainFilter(domain string, name string, option string) error + RemoveFilter(name string) + AddMatcher(name string, option string) error + GetFilters() map[string]FilterProvider + GetMatchers() map[string]FilterProvider + FiltersForDomain(domain string) map[string]FilterProvider + CalibratedForDomain(domain string) bool + Calibrated() bool +} //FilterProvider is a generic interface for both Matchers and Filters type FilterProvider interface { @@ -9,13 +26,15 @@ //RunnerProvider is an interface for request executors type RunnerProvider interface { - Prepare(input map[string][]byte) (Request, error) + Prepare(input map[string][]byte, basereq *Request) (Request, error) Execute(req *Request) (Response, error) } //InputProvider interface handles the input data for RunnerProvider type InputProvider interface { + ActivateKeywords([]string) AddProvider(InputProviderConfig) error + Keywords() []string Next() bool Position() int Reset() @@ -32,6 +51,9 @@ IncrementPosition() Value() []byte Total() int + Active() bool + Enable() + Disable() } //OutputProvider is responsible of providing output from the RunnerProvider @@ -62,6 +84,7 @@ ContentType string `json:"content-type"` RedirectLocation string `json:"redirectlocation"` Url string `json:"url"` + Duration time.Duration `json:"duration"` ResultFile string `json:"resultfile"` Host string `json:"host"` HTMLColor string `json:"-"` diff --git a/pkg/ffuf/job.go b/pkg/ffuf/job.go index 00ee641..5bea32c 100644 --- a/pkg/ffuf/job.go +++ b/pkg/ffuf/job.go @@ -36,12 +36,14 @@ queuepos int skipQueue bool currentDepth int + calibMutex sync.Mutex pauseWg sync.WaitGroup } type QueueJob struct { Url string depth int + req Request } func NewJob(conf *Config) *Job { @@ -107,10 +109,22 @@ j.startTime = time.Now() } - // Add the default job to job queue - j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0}) + basereq := BaseRequest(j.Config) + + if j.Config.InputMode == "sniper" { + // process multiple payload locations and create a queue job for each location + reqs := SniperRequests(&basereq, j.Config.InputProviders[0].Template) + for _, r := range reqs { + j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0, req: r}) + } + j.Total = j.Input.Total() * len(reqs) + } else { + // Add the default job to job queue + j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0, req: BaseRequest(j.Config)}) + j.Total = j.Input.Total() + } + rand.Seed(time.Now().UnixNano()) - j.Total = j.Input.Total() defer j.Stop() j.Running = true @@ -154,6 +168,17 @@ func (j *Job) prepareQueueJob() { j.Config.Url = j.queuejobs[j.queuepos].Url j.currentDepth = j.queuejobs[j.queuepos].depth + + //Find all keywords present in new queued job + kws := j.Input.Keywords() + found_kws := make([]string, 0) + for _, k := range kws { + if RequestContainsKeyword(j.queuejobs[j.queuepos].req, k) { + found_kws = append(found_kws, k) + } + } + //And activate / disable inputproviders as needed + j.Input.ActivateKeywords(found_kws) j.queuepos += 1 } @@ -203,9 +228,13 @@ wg.Add(1) go j.runBackgroundTasks(&wg) - // Print the base URL when starting a new recursion queue job + // Print the base URL when starting a new recursion or sniper queue job if j.queuepos > 1 { - j.Output.Info(fmt.Sprintf("Starting queued job on target: %s", j.Config.Url)) + if j.Config.InputMode == "sniper" { + j.Output.Info(fmt.Sprintf("Starting queued sniper job (%d of %d) on target: %s", j.queuepos, len(j.queuejobs), j.Config.Url)) + } else { + j.Output.Info(fmt.Sprintf("Starting queued job on target: %s", j.Config.Url)) + } } //Limiter blocks after reaching the buffer, ensuring limited concurrency @@ -297,33 +326,59 @@ func (j *Job) isMatch(resp Response) bool { matched := false - for _, m := range j.Config.Matchers { + var matchers map[string]FilterProvider + var filters map[string]FilterProvider + if j.Config.AutoCalibrationPerHost { + filters = j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*resp.Request)) + } else { + filters = j.Config.MatcherManager.GetFilters() + } + matchers = j.Config.MatcherManager.GetMatchers() + for _, m := range matchers { match, err := m.Filter(&resp) if err != nil { continue } if match { matched = true + } else if j.Config.MatcherMode == "and" { + // we already know this isn't "and" match + return false + } } // The response was not matched, return before running filters if !matched { return false } - for _, f := range j.Config.Filters { + for _, f := range filters { fv, err := f.Filter(&resp) if err != nil { continue } if fv { - return false - } + // return false + if j.Config.FilterMode == "or" { + // return early, as filter matched + return false + } + } else { + if j.Config.FilterMode == "and" { + // return early as not all filters matched in "and" mode + return true + } + } + } + if len(filters) > 0 && j.Config.FilterMode == "and" { + // we did not return early, so all filters were matched + return false } return true } func (j *Job) runTask(input map[string][]byte, position int, retried bool) { - req, err := j.Runner.Prepare(input) + basereq := j.queuejobs[j.queuepos-1].req + req, err := j.Runner.Prepare(input, &basereq) req.Position = position if err != nil { j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err)) @@ -331,6 +386,7 @@ log.Printf("%s", err) return } + resp, err := j.Runner.Execute(&req) if err != nil { if retried { @@ -356,10 +412,15 @@ j.inc429() } } + j.pauseWg.Wait() + + // Handle autocalibration, must be done after the actual request to ensure sane value in req.Host + _ = j.CalibrateIfNeeded(HostURLFromRequest(req), input) + if j.isMatch(resp) { // Re-send request through replay-proxy if needed if j.ReplayRunner != nil { - replayreq, err := j.ReplayRunner.Prepare(input) + replayreq, err := j.ReplayRunner.Prepare(input, &basereq) replayreq.Position = position if err != nil { j.Output.Error(fmt.Sprintf("Encountered an error while preparing replayproxy request: %s\n", err)) @@ -370,6 +431,7 @@ } } j.Output.Result(resp) + // Refresh the progress indicator as we printed something out j.updateProgress() if j.Config.Recursion && j.Config.RecursionStrategy == "greedy" { @@ -387,7 +449,7 @@ // Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { recUrl := resp.Request.Url + "/" + "FUZZ" - newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1} + newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)} j.queuejobs = append(j.queuejobs, newJob) j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) } else { @@ -405,52 +467,12 @@ } if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { // We have yet to reach the maximum recursion depth - newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1} + newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)} j.queuejobs = append(j.queuejobs, newJob) j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) } else { j.Output.Warning(fmt.Sprintf("Directory found, but recursion depth exceeded. Ignoring: %s", resp.GetRedirectLocation(true))) } -} - -//CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests -func (j *Job) CalibrateResponses() ([]Response, error) { - cInputs := make([]string, 0) - rand.Seed(time.Now().UnixNano()) - if len(j.Config.AutoCalibrationStrings) < 1 { - cInputs = append(cInputs, "admin"+RandomString(16)+"/") - cInputs = append(cInputs, ".htaccess"+RandomString(16)) - cInputs = append(cInputs, RandomString(16)+"/") - cInputs = append(cInputs, RandomString(16)) - } else { - cInputs = append(cInputs, j.Config.AutoCalibrationStrings...) - } - - results := make([]Response, 0) - for _, input := range cInputs { - inputs := make(map[string][]byte, len(j.Config.InputProviders)) - for _, v := range j.Config.InputProviders { - inputs[v.Keyword] = []byte(input) - } - - req, err := j.Runner.Prepare(inputs) - if err != nil { - j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err)) - j.incError() - log.Printf("%s", err) - return results, err - } - resp, err := j.Runner.Execute(&req) - if err != nil { - return results, err - } - - // Only calibrate on responses that would be matched otherwise - if j.isMatch(resp) { - results = append(results, resp) - } - } - return results, nil } // CheckStop stops the job if stopping conditions are met diff --git a/pkg/ffuf/optionsparser.go b/pkg/ffuf/optionsparser.go index e04aa48..6d2dc98 100644 --- a/pkg/ffuf/optionsparser.go +++ b/pkg/ffuf/optionsparser.go @@ -37,27 +37,33 @@ RecursionDepth int RecursionStrategy string ReplayProxyURL string + SNI string Timeout int URL string + Http2 bool } type GeneralOptions struct { - AutoCalibration bool - AutoCalibrationStrings []string - Colors bool - ConfigFile string `toml:"-"` - Delay string - MaxTime int - MaxTimeJob int - Noninteractive bool - Quiet bool - Rate int - ShowVersion bool `toml:"-"` - StopOn403 bool - StopOnAll bool - StopOnErrors bool - Threads int - Verbose bool + AutoCalibration bool + AutoCalibrationKeyword string + AutoCalibrationPerHost bool + AutoCalibrationStrategy string + AutoCalibrationStrings []string + Colors bool + ConfigFile string `toml:"-"` + Delay string + Json bool + MaxTime int + MaxTimeJob int + Noninteractive bool + Quiet bool + Rate int + ShowVersion bool `toml:"-"` + StopOn403 bool + StopOnAll bool + StopOnErrors bool + Threads int + Verbose bool } type InputOptions struct { @@ -74,40 +80,49 @@ } type OutputOptions struct { - DebugLog string - OutputDirectory string - OutputFile string - OutputFormat string - OutputCreateEmptyFile bool + DebugLog string + OutputDirectory string + OutputFile string + OutputFormat string + OutputSkipEmptyFile bool } type FilterOptions struct { + Mode string Lines string Regexp string Size string Status string + Time string Words string } type MatcherOptions struct { + Mode string Lines string Regexp string Size string Status string + Time string Words string } //NewConfigOptions returns a newly created ConfigOptions struct with default values func NewConfigOptions() *ConfigOptions { c := &ConfigOptions{} + c.Filter.Mode = "or" c.Filter.Lines = "" c.Filter.Regexp = "" c.Filter.Size = "" c.Filter.Status = "" + c.Filter.Time = "" c.Filter.Words = "" c.General.AutoCalibration = false + c.General.AutoCalibrationKeyword = "FUZZ" + c.General.AutoCalibrationStrategy = "basic" c.General.Colors = false c.General.Delay = "" + c.General.Json = false c.General.MaxTime = 0 c.General.MaxTimeJob = 0 c.General.Noninteractive = false @@ -129,7 +144,9 @@ c.HTTP.RecursionStrategy = "default" c.HTTP.ReplayProxyURL = "" c.HTTP.Timeout = 10 + c.HTTP.SNI = "" c.HTTP.URL = "" + c.HTTP.Http2 = false c.Input.DirSearchCompat = false c.Input.Extensions = "" c.Input.IgnoreWordlistComments = false @@ -137,16 +154,18 @@ c.Input.InputNum = 100 c.Input.Request = "" c.Input.RequestProto = "https" + c.Matcher.Mode = "or" c.Matcher.Lines = "" c.Matcher.Regexp = "" c.Matcher.Size = "" - c.Matcher.Status = "200,204,301,302,307,401,403,405" + c.Matcher.Status = "200,204,301,302,307,401,403,405,500" + c.Matcher.Time = "" c.Matcher.Words = "" c.Output.DebugLog = "" c.Output.OutputDirectory = "" c.Output.OutputFile = "" c.Output.OutputFormat = "json" - c.Output.OutputCreateEmptyFile = false + c.Output.OutputSkipEmptyFile = false return c } @@ -175,6 +194,32 @@ } //Prepare inputproviders + conf.InputMode = parseOpts.Input.InputMode + + validmode := false + for _, mode := range []string{"clusterbomb", "pitchfork", "sniper"} { + if conf.InputMode == mode { + validmode = true + } + } + if !validmode { + errs.Add(fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode)) + } + + template := "" + // sniper mode needs some additional checking + if conf.InputMode == "sniper" { + template = "§" + + if len(parseOpts.Input.Wordlists) > 1 { + errs.Add(fmt.Errorf("sniper mode only supports one wordlist")) + } + + if len(parseOpts.Input.Inputcommands) > 1 { + errs.Add(fmt.Errorf("sniper mode only supports one input command")) + } + } + for _, v := range parseOpts.Input.Wordlists { var wl []string if runtime.GOOS == "windows" { @@ -199,33 +244,44 @@ wl = strings.SplitN(v, ":", 2) } if len(wl) == 2 { + if conf.InputMode == "sniper" { + errs.Add(fmt.Errorf("sniper mode does not support wordlist keywords")) + } else { + conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ + Name: "wordlist", + Value: wl[0], + Keyword: wl[1], + }) + } + } else { conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ - Name: "wordlist", - Value: wl[0], - Keyword: wl[1], + Name: "wordlist", + Value: wl[0], + Keyword: "FUZZ", + Template: template, }) - } else { - conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ - Name: "wordlist", - Value: wl[0], - Keyword: "FUZZ", - }) - } - } + } + } + for _, v := range parseOpts.Input.Inputcommands { ic := strings.SplitN(v, ":", 2) if len(ic) == 2 { + if conf.InputMode == "sniper" { + errs.Add(fmt.Errorf("sniper mode does not support command keywords")) + } else { + conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ + Name: "command", + Value: ic[0], + Keyword: ic[1], + }) + conf.CommandKeywords = append(conf.CommandKeywords, ic[0]) + } + } else { conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ - Name: "command", - Value: ic[0], - Keyword: ic[1], - }) - conf.CommandKeywords = append(conf.CommandKeywords, ic[0]) - } else { - conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ - Name: "command", - Value: ic[0], - Keyword: "FUZZ", + Name: "command", + Value: ic[0], + Keyword: "FUZZ", + Template: template, }) conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ") } @@ -247,6 +303,11 @@ //Prepare URL if parseOpts.HTTP.URL != "" { conf.Url = parseOpts.HTTP.URL + } + + // Prepare SNI + if parseOpts.HTTP.SNI != "" { + conf.SNI = parseOpts.HTTP.SNI } //Prepare headers and make canonical @@ -378,11 +439,11 @@ conf.DirSearchCompat = parseOpts.Input.DirSearchCompat conf.Colors = parseOpts.General.Colors conf.InputNum = parseOpts.Input.InputNum - conf.InputMode = parseOpts.Input.InputMode + conf.InputShell = parseOpts.Input.InputShell conf.OutputFile = parseOpts.Output.OutputFile conf.OutputDirectory = parseOpts.Output.OutputDirectory - conf.OutputCreateEmptyFile = parseOpts.Output.OutputCreateEmptyFile + conf.OutputSkipEmptyFile = parseOpts.Output.OutputSkipEmptyFile conf.IgnoreBody = parseOpts.HTTP.IgnoreBody conf.Quiet = parseOpts.General.Quiet conf.StopOn403 = parseOpts.General.StopOn403 @@ -393,12 +454,44 @@ conf.RecursionDepth = parseOpts.HTTP.RecursionDepth conf.RecursionStrategy = parseOpts.HTTP.RecursionStrategy conf.AutoCalibration = parseOpts.General.AutoCalibration + conf.AutoCalibrationPerHost = parseOpts.General.AutoCalibrationPerHost + conf.AutoCalibrationStrategy = parseOpts.General.AutoCalibrationStrategy conf.Threads = parseOpts.General.Threads conf.Timeout = parseOpts.HTTP.Timeout conf.MaxTime = parseOpts.General.MaxTime conf.MaxTimeJob = parseOpts.General.MaxTimeJob conf.Noninteractive = parseOpts.General.Noninteractive conf.Verbose = parseOpts.General.Verbose + conf.Json = parseOpts.General.Json + conf.Http2 = parseOpts.HTTP.Http2 + + // Check that fmode and mmode have sane values + valid_opmodes := []string{"and", "or"} + fmode_found := false + mmode_found := false + for _, v := range valid_opmodes { + if v == parseOpts.Filter.Mode { + fmode_found = true + } + if v == parseOpts.Matcher.Mode { + mmode_found = true + } + } + if !fmode_found { + errmsg := fmt.Sprintf("Unrecognized value for parameter fmode: %s, valid values are: and, or", parseOpts.Filter.Mode) + errs.Add(fmt.Errorf(errmsg)) + } + if !mmode_found { + errmsg := fmt.Sprintf("Unrecognized value for parameter mmode: %s, valid values are: and, or", parseOpts.Matcher.Mode) + errs.Add(fmt.Errorf(errmsg)) + } + conf.FilterMode = parseOpts.Filter.Mode + conf.MatcherMode = parseOpts.Matcher.Mode + + if conf.AutoCalibrationPerHost { + // AutoCalibrationPerHost implies AutoCalibration + conf.AutoCalibration = true + } // Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP if len(conf.Data) > 0 && @@ -412,9 +505,16 @@ conf.CommandLine = strings.Join(os.Args, " ") for _, provider := range conf.InputProviders { - if !keywordPresent(provider.Keyword, &conf) { - errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword) - errs.Add(fmt.Errorf(errmsg)) + if provider.Template != "" { + if !templatePresent(provider.Template, &conf) { + errmsg := fmt.Sprintf("Template %s defined, but not found in pairs in headers, method, URL or POST data.", provider.Template) + errs.Add(fmt.Errorf(errmsg)) + } + } else { + if !keywordPresent(provider.Keyword, &conf) { + errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword) + errs.Add(fmt.Errorf(errmsg)) + } } } @@ -425,6 +525,12 @@ errs.Add(fmt.Errorf(errmsg)) } } + + // Make verbose mutually exclusive with json + if parseOpts.General.Verbose && parseOpts.General.Json { + errs.Add(fmt.Errorf("Cannot have -json and -v")) + } + return &conf, errs.ErrorOrNil() } @@ -490,6 +596,7 @@ conf.Data = string(b) // Remove newline (typically added by the editor) at the end of the file + //nolint:gosimple // we specifically want to remove just a single newline, not all of them if strings.HasSuffix(conf.Data, "\r\n") { conf.Data = conf.Data[:len(conf.Data)-2] } else if strings.HasSuffix(conf.Data, "\n") { @@ -518,6 +625,46 @@ } } return false +} + +func templatePresent(template string, conf *Config) bool { + // Search for input location identifiers, these must exist in pairs + sane := false + + if c := strings.Count(conf.Method, template); c > 0 { + if c%2 != 0 { + return false + } + sane = true + } + if c := strings.Count(conf.Url, template); c > 0 { + if c%2 != 0 { + return false + } + sane = true + } + if c := strings.Count(conf.Data, template); c > 0 { + if c%2 != 0 { + return false + } + sane = true + } + for k, v := range conf.Headers { + if c := strings.Count(k, template); c > 0 { + if c%2 != 0 { + return false + } + sane = true + } + if c := strings.Count(v, template); c > 0 { + if c%2 != 0 { + return false + } + sane = true + } + } + + return sane } func ReadConfig(configFile string) (*ConfigOptions, error) { diff --git a/pkg/ffuf/optionsparser_test.go b/pkg/ffuf/optionsparser_test.go new file mode 100644 index 0000000..2e9913b --- /dev/null +++ b/pkg/ffuf/optionsparser_test.go @@ -0,0 +1,85 @@ +package ffuf + +import ( + "testing" +) + +func TestTemplatePresent(t *testing.T) { + template := "§" + + headers := make(map[string]string) + headers["foo"] = "§bar§" + headers["omg"] = "bbq" + headers["§world§"] = "Ooo" + + goodConf := Config{ + Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", + Method: "PO§ST§", + Headers: headers, + Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", + } + + if !templatePresent(template, &goodConf) { + t.Errorf("Expected-good config failed validation") + } + + badConfMethod := Config{ + Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", + Method: "POST§", + Headers: headers, + Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=§true§", + } + + if templatePresent(template, &badConfMethod) { + t.Errorf("Expected-bad config (Method) failed validation") + } + + badConfURL := Config{ + Url: "https://example.com/fooo/bar?test=§value§&order[0§]=§foo§", + Method: "§POST§", + Headers: headers, + Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=§true§", + } + + if templatePresent(template, &badConfURL) { + t.Errorf("Expected-bad config (URL) failed validation") + } + + badConfData := Config{ + Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", + Method: "§POST§", + Headers: headers, + Data: "line=Can we pull back the §veil of §static§ and reach in to the source of §all§ being?&commit=§true§", + } + + if templatePresent(template, &badConfData) { + t.Errorf("Expected-bad config (Data) failed validation") + } + + headers["kingdom"] = "§candy" + + badConfHeaderValue := Config{ + Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", + Method: "PO§ST§", + Headers: headers, + Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", + } + + if templatePresent(template, &badConfHeaderValue) { + t.Errorf("Expected-bad config (Header value) failed validation") + } + + headers["kingdom"] = "candy" + headers["§kingdom"] = "candy" + + badConfHeaderKey := Config{ + Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", + Method: "PO§ST§", + Headers: headers, + Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", + } + + if templatePresent(template, &badConfHeaderKey) { + t.Errorf("Expected-bad config (Header key) failed validation") + } +} diff --git a/pkg/ffuf/request.go b/pkg/ffuf/request.go index ed3d3c4..8cce661 100644 --- a/pkg/ffuf/request.go +++ b/pkg/ffuf/request.go @@ -1,4 +1,8 @@ package ffuf + +import ( + "strings" +) // Request holds the meaningful data that is passed for runner for making the query type Request struct { @@ -19,3 +23,178 @@ req.Headers = make(map[string]string) return req } + +// BaseRequest returns a base request struct populated from the main config +func BaseRequest(conf *Config) Request { + req := NewRequest(conf) + req.Headers = conf.Headers + req.Data = []byte(conf.Data) + return req +} + +// RecursionRequest returns a base request for a recursion target +func RecursionRequest(conf *Config, path string) Request { + r := BaseRequest(conf) + r.Url = path + return r +} + +// CopyRequest performs a deep copy of a request and returns a new struct +func CopyRequest(basereq *Request) Request { + var req Request + req.Method = basereq.Method + req.Host = basereq.Host + req.Url = basereq.Url + + req.Headers = make(map[string]string, len(basereq.Headers)) + for h, v := range basereq.Headers { + req.Headers[h] = v + } + + req.Data = make([]byte, len(basereq.Data)) + copy(req.Data, basereq.Data) + + if len(basereq.Input) > 0 { + req.Input = make(map[string][]byte, len(basereq.Input)) + for k, v := range basereq.Input { + req.Input[k] = v + } + } + + req.Position = basereq.Position + req.Raw = basereq.Raw + + return req +} + +// SniperRequests returns an array of requests, each with one of the templated locations replaced by a keyword +func SniperRequests(basereq *Request, template string) []Request { + var reqs []Request + keyword := "FUZZ" + + // Search for input location identifiers, these must exist in pairs + if c := strings.Count(basereq.Method, template); c > 0 { + if c%2 == 0 { + tokens := templateLocations(template, basereq.Method) + + for i := 0; i < len(tokens); i = i + 2 { + newreq := CopyRequest(basereq) + newreq.Method = injectKeyword(basereq.Method, keyword, tokens[i], tokens[i+1]) + scrubTemplates(&newreq, template) + reqs = append(reqs, newreq) + } + } + } + + if c := strings.Count(basereq.Url, template); c > 0 { + if c%2 == 0 { + tokens := templateLocations(template, basereq.Url) + + for i := 0; i < len(tokens); i = i + 2 { + newreq := CopyRequest(basereq) + newreq.Url = injectKeyword(basereq.Url, keyword, tokens[i], tokens[i+1]) + scrubTemplates(&newreq, template) + reqs = append(reqs, newreq) + } + } + } + + data := string(basereq.Data) + if c := strings.Count(data, template); c > 0 { + if c%2 == 0 { + tokens := templateLocations(template, data) + + for i := 0; i < len(tokens); i = i + 2 { + newreq := CopyRequest(basereq) + newreq.Data = []byte(injectKeyword(data, keyword, tokens[i], tokens[i+1])) + scrubTemplates(&newreq, template) + reqs = append(reqs, newreq) + } + } + } + + for k, v := range basereq.Headers { + if c := strings.Count(k, template); c > 0 { + if c%2 == 0 { + tokens := templateLocations(template, k) + + for i := 0; i < len(tokens); i = i + 2 { + newreq := CopyRequest(basereq) + newreq.Headers[injectKeyword(k, keyword, tokens[i], tokens[i+1])] = v + delete(newreq.Headers, k) + scrubTemplates(&newreq, template) + reqs = append(reqs, newreq) + } + } + } + if c := strings.Count(v, template); c > 0 { + if c%2 == 0 { + tokens := templateLocations(template, v) + + for i := 0; i < len(tokens); i = i + 2 { + newreq := CopyRequest(basereq) + newreq.Headers[k] = injectKeyword(v, keyword, tokens[i], tokens[i+1]) + scrubTemplates(&newreq, template) + reqs = append(reqs, newreq) + } + } + } + } + + return reqs +} + +// templateLocations returns an array of template character locations in input +func templateLocations(template string, input string) []int { + var tokens []int + + for k, i := range []rune(input) { + if i == []rune(template)[0] { + tokens = append(tokens, k) + } + } + + return tokens +} + +// injectKeyword takes a string, a keyword, and a start/end offset. The data between +// the start/end offset in string is removed, and replaced by keyword +func injectKeyword(input string, keyword string, startOffset int, endOffset int) string { + + // some basic sanity checking, return the original string unchanged if offsets didnt make sense + if startOffset > len(input) || endOffset > len(input) || startOffset > endOffset { + return input + } + + inputslice := []rune(input) + keywordslice := []rune(keyword) + + prefix := inputslice[:startOffset] + suffix := inputslice[endOffset+1:] + + inputslice = append(prefix, keywordslice...) + inputslice = append(inputslice, suffix...) + + return string(inputslice) +} + +// scrubTemplates removes all template (§) strings from the request struct +func scrubTemplates(req *Request, template string) { + req.Method = strings.Join(strings.Split(req.Method, template), "") + req.Url = strings.Join(strings.Split(req.Url, template), "") + req.Data = []byte(strings.Join(strings.Split(string(req.Data), template), "")) + + for k, v := range req.Headers { + if c := strings.Count(k, template); c > 0 { + if c%2 == 0 { + delete(req.Headers, k) + req.Headers[strings.Join(strings.Split(k, template), "")] = v + } + } + if c := strings.Count(v, template); c > 0 { + if c%2 == 0 { + req.Headers[k] = strings.Join(strings.Split(v, template), "") + } + } + } +} diff --git a/pkg/ffuf/request_test.go b/pkg/ffuf/request_test.go new file mode 100644 index 0000000..7c55f78 --- /dev/null +++ b/pkg/ffuf/request_test.go @@ -0,0 +1,246 @@ +package ffuf + +import ( + "reflect" + "testing" +) + +func TestBaseRequest(t *testing.T) { + headers := make(map[string]string) + headers["foo"] = "bar" + headers["baz"] = "wibble" + headers["Content-Type"] = "application/json" + + data := "{\"quote\":\"I'll still be here tomorrow to high five you yesterday, my friend. Peace.\"}" + + expectedreq := Request{Method: "POST", Url: "http://example.com/aaaa", Headers: headers, Data: []byte(data)} + config := Config{Method: "POST", Url: "http://example.com/aaaa", Headers: headers, Data: data} + basereq := BaseRequest(&config) + + if !reflect.DeepEqual(basereq, expectedreq) { + t.Errorf("BaseRequest does not return a struct with expected values") + } + +} + +func TestCopyRequest(t *testing.T) { + headers := make(map[string]string) + headers["foo"] = "bar" + headers["omg"] = "bbq" + + data := "line=Is+that+where+creativity+comes+from?+From+sad+biz?" + + input := make(map[string][]byte) + input["matthew"] = []byte("If you are the head that floats atop the §ziggurat§, then the stairs that lead to you must be infinite.") + + basereq := Request{Method: "POST", + Host: "testhost.local", + Url: "http://example.com/aaaa", + Headers: headers, + Data: []byte(data), + Input: input, + Position: 2, + Raw: "We're not oil and water, we're oil and vinegar! It's good. It's yummy.", + } + + copiedreq := CopyRequest(&basereq) + + if !reflect.DeepEqual(basereq, copiedreq) { + t.Errorf("CopyRequest does not return an equal struct") + } +} + +func TestSniperRequests(t *testing.T) { + headers := make(map[string]string) + headers["foo"] = "§bar§" + headers["§omg§"] = "bbq" + + testreq := Request{ + Method: "§POST§", + Url: "http://example.com/aaaa?param=§lemony§", + Headers: headers, + Data: []byte("line=§yo yo, it's grease§"), + } + + requests := SniperRequests(&testreq, "§") + + if len(requests) != 5 { + t.Errorf("SniperRequests returned an incorrect number of requests") + } + + headers = make(map[string]string) + headers["foo"] = "bar" + headers["omg"] = "bbq" + + var expected Request + expected = Request{ // Method + Method: "FUZZ", + Url: "http://example.com/aaaa?param=lemony", + Headers: headers, + Data: []byte("line=yo yo, it's grease"), + } + + pass := false + for _, req := range requests { + if reflect.DeepEqual(req, expected) { + pass = true + } + } + + if !pass { + t.Errorf("SniperRequests does not return expected values (Method)") + } + + expected = Request{ // URL + Method: "POST", + Url: "http://example.com/aaaa?param=FUZZ", + Headers: headers, + Data: []byte("line=yo yo, it's grease"), + } + + pass = false + for _, req := range requests { + if reflect.DeepEqual(req, expected) { + pass = true + } + } + + if !pass { + t.Errorf("SniperRequests does not return expected values (Url)") + } + + expected = Request{ // Data + Method: "POST", + Url: "http://example.com/aaaa?param=lemony", + Headers: headers, + Data: []byte("line=FUZZ"), + } + + pass = false + for _, req := range requests { + if reflect.DeepEqual(req, expected) { + pass = true + } + } + + if !pass { + t.Errorf("SniperRequests does not return expected values (Data)") + } + + headers = make(map[string]string) + headers["foo"] = "FUZZ" + headers["omg"] = "bbq" + + expected = Request{ // Header value + Method: "POST", + Url: "http://example.com/aaaa?param=lemony", + Headers: headers, + Data: []byte("line=yo yo, it's grease"), + } + + pass = false + for _, req := range requests { + if reflect.DeepEqual(req, expected) { + pass = true + } + } + + if !pass { + t.Errorf("SniperRequests does not return expected values (Header value)") + } + + headers = make(map[string]string) + headers["foo"] = "bar" + headers["FUZZ"] = "bbq" + + expected = Request{ // Header key + Method: "POST", + Url: "http://example.com/aaaa?param=lemony", + Headers: headers, + Data: []byte("line=yo yo, it's grease"), + } + + pass = false + for _, req := range requests { + if reflect.DeepEqual(req, expected) { + pass = true + } + } + + if !pass { + t.Errorf("SniperRequests does not return expected values (Header key)") + } + +} + +func TestTemplateLocations(t *testing.T) { + test := "this is my 1§template locator§ test" + arr := templateLocations("§", test) + expected := []int{12, 29} + if !reflect.DeepEqual(arr, expected) { + t.Errorf("templateLocations does not return expected values") + } + + test2 := "§template locator§" + arr = templateLocations("§", test2) + expected = []int{0, 17} + if !reflect.DeepEqual(arr, expected) { + t.Errorf("templateLocations does not return expected values") + } + + if len(templateLocations("§", "te§st2")) != 1 { + t.Errorf("templateLocations does not return expected values") + } +} + +func TestInjectKeyword(t *testing.T) { + input := "§Greetings, creator§" + offsetTuple := templateLocations("§", input) + expected := "FUZZ" + + result := injectKeyword(input, "FUZZ", offsetTuple[0], offsetTuple[1]) + if result != expected { + t.Errorf("injectKeyword returned unexpected result: " + result) + } + + if injectKeyword(input, "FUZZ", -32, 44) != input { + t.Errorf("injectKeyword offset validation failed") + } + + if injectKeyword(input, "FUZZ", 12, 2) != input { + t.Errorf("injectKeyword offset validation failed") + } + + if injectKeyword(input, "FUZZ", 0, 25) != input { + t.Errorf("injectKeyword offset validation failed") + } + +} + +func TestScrubTemplates(t *testing.T) { + headers := make(map[string]string) + headers["foo"] = "§bar§" + headers["§omg§"] = "bbq" + + testreq := Request{Method: "§POST§", + Url: "http://example.com/aaaa?param=§lemony§", + Headers: headers, + Data: []byte("line=§yo yo, it's grease§"), + } + + headers = make(map[string]string) + headers["foo"] = "bar" + headers["omg"] = "bbq" + + expectedreq := Request{Method: "POST", + Url: "http://example.com/aaaa?param=lemony", + Headers: headers, + Data: []byte("line=yo yo, it's grease"), + } + + scrubTemplates(&testreq, "§") + + if !reflect.DeepEqual(testreq, expectedreq) { + t.Errorf("scrubTemplates does not return expected values") + } +} diff --git a/pkg/ffuf/response.go b/pkg/ffuf/response.go index aecfd2f..58f9f8d 100644 --- a/pkg/ffuf/response.go +++ b/pkg/ffuf/response.go @@ -3,6 +3,7 @@ import ( "net/http" "net/url" + "time" ) // Response struct holds the meaningful data returned from request and is meant for passing to filters @@ -18,6 +19,7 @@ Request *Request Raw string ResultFile string + Time time.Duration } // GetRedirectLocation returns the redirect location for a 3xx redirect HTTP response @@ -41,10 +43,38 @@ if err != nil { return redirectLocation } - redirectLocation = baseUrl.ResolveReference(redirectUrl).String() + if redirectUrl.IsAbs() && UrlEqual(redirectUrl, baseUrl) { + redirectLocation = redirectUrl.Scheme + "://" + + baseUrl.Host + redirectUrl.Path + } else { + redirectLocation = baseUrl.ResolveReference(redirectUrl).String() + } } return redirectLocation +} + +func UrlEqual(url1, url2 *url.URL) bool { + if url1.Hostname() != url2.Hostname() { + return false + } + if url1.Scheme != url2.Scheme { + return false + } + p1, p2 := getUrlPort(url1), getUrlPort(url2) + return p1 == p2 +} + +func getUrlPort(url *url.URL) string { + var portMap = map[string]string{ + "http": "80", + "https": "443", + } + p := url.Port() + if p == "" { + p = portMap[url.Scheme] + } + return p } func NewResponse(httpresp *http.Response, req *Request) Response { diff --git a/pkg/ffuf/util.go b/pkg/ffuf/util.go index e9a8aa9..183c635 100644 --- a/pkg/ffuf/util.go +++ b/pkg/ffuf/util.go @@ -3,7 +3,9 @@ import ( "fmt" "math/rand" + "net/url" "os" + "strings" ) //used for random string generation in calibration function @@ -43,6 +45,37 @@ return !md.IsDir() } +//RequestContainsKeyword checks if a keyword is present in any field of a request +func RequestContainsKeyword(req Request, kw string) bool { + if strings.Contains(req.Host, kw) { + return true + } + if strings.Contains(req.Url, kw) { + return true + } + if strings.Contains(req.Method, kw) { + return true + } + if strings.Contains(string(req.Data), kw) { + return true + } + for k, v := range req.Headers { + if strings.Contains(k, kw) || strings.Contains(v, kw) { + return true + } + } + return false +} + +//HostURLFromRequest gets a host + path without the filename or last part of the URL path +func HostURLFromRequest(req Request) string { + u, _ := url.Parse(req.Url) + u.Host = req.Host + pathparts := strings.Split(u.Path, "/") + trimpath := strings.TrimSpace(strings.Join(pathparts[:len(pathparts)-1], "/")) + return u.Host + trimpath +} + //Version returns the ffuf version string func Version() string { return fmt.Sprintf("%s%s", VERSION, VERSION_APPENDIX) diff --git a/pkg/ffuf/util_test.go b/pkg/ffuf/util_test.go new file mode 100644 index 0000000..4dc21fe --- /dev/null +++ b/pkg/ffuf/util_test.go @@ -0,0 +1,26 @@ +package ffuf + +import ( + "math/rand" + "testing" +) + +func TestRandomString(t *testing.T) { + length := 1 + rand.Intn(65535) + str := RandomString(length) + + if len(str) != length { + t.Errorf("Length of generated string was %d, was expecting %d", len(str), length) + } +} + +func TestUniqStringSlice(t *testing.T) { + slice := []string{"foo", "foo", "bar", "baz", "baz", "foo", "baz", "baz", "foo"} + expectedLength := 3 + + uniqSlice := UniqStringSlice(slice) + + if len(uniqSlice) != expectedLength { + t.Errorf("Length of slice was %d, was expecting %d", len(uniqSlice), expectedLength) + } +} diff --git a/pkg/ffuf/version.go b/pkg/ffuf/version.go index 70cd682..6fc2f9f 100644 --- a/pkg/ffuf/version.go +++ b/pkg/ffuf/version.go @@ -2,7 +2,7 @@ var ( //VERSION holds the current version number - VERSION = "1.3.1" + VERSION = "1.5.0" //VERSION_APPENDIX holds additional version definition VERSION_APPENDIX = "-exclusive-dev" ) diff --git a/pkg/filter/filter.go b/pkg/filter/filter.go index c1a1e57..72e73d6 100644 --- a/pkg/filter/filter.go +++ b/pkg/filter/filter.go @@ -1,13 +1,55 @@ package filter import ( - "flag" "fmt" - "strconv" - "strings" + "github.com/ffuf/ffuf/pkg/ffuf" + "sync" +) - "github.com/ffuf/ffuf/pkg/ffuf" -) +// MatcherManager handles both filters and matchers. +type MatcherManager struct { + IsCalibrated bool + Mutex sync.Mutex + Matchers map[string]ffuf.FilterProvider + Filters map[string]ffuf.FilterProvider + PerDomainFilters map[string]*PerDomainFilter +} + +type PerDomainFilter struct { + IsCalibrated bool + Filters map[string]ffuf.FilterProvider +} + +func NewPerDomainFilter(globfilters map[string]ffuf.FilterProvider) *PerDomainFilter { + return &PerDomainFilter{IsCalibrated: false, Filters: globfilters} +} + +func (p *PerDomainFilter) SetCalibrated(value bool) { + p.IsCalibrated = value +} + +func NewMatcherManager() ffuf.MatcherManager { + return &MatcherManager{ + IsCalibrated: false, + Matchers: make(map[string]ffuf.FilterProvider), + Filters: make(map[string]ffuf.FilterProvider), + PerDomainFilters: make(map[string]*PerDomainFilter), + } +} + +func (f *MatcherManager) SetCalibrated(value bool) { + f.IsCalibrated = value +} + +func (f *MatcherManager) SetCalibratedForHost(host string, value bool) { + if f.PerDomainFilters[host] != nil { + f.PerDomainFilters[host].IsCalibrated = value + } else { + newFilter := NewPerDomainFilter(f.Filters) + newFilter.IsCalibrated = true + f.PerDomainFilters[host] = newFilter + } +} func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) { if name == "status" { @@ -25,185 +67,108 @@ if name == "regexp" { return NewRegexpFilter(value) } + if name == "time" { + return NewTimeFilter(value) + } return nil, fmt.Errorf("Could not create filter with name %s", name) } -//AddFilter adds a new filter to Config -func AddFilter(conf *ffuf.Config, name string, option string) error { +//AddFilter adds a new filter to MatcherManager +func (f *MatcherManager) AddFilter(name string, option string, replace bool) error { + f.Mutex.Lock() + defer f.Mutex.Unlock() newf, err := NewFilterByName(name, option) if err == nil { // valid filter create or append - if conf.Filters[name] == nil { - conf.Filters[name] = newf + if f.Filters[name] == nil || replace { + f.Filters[name] = newf } else { - newoption := conf.Filters[name].Repr() + "," + option + newoption := f.Filters[name].Repr() + "," + option newerf, err := NewFilterByName(name, newoption) if err == nil { - conf.Filters[name] = newerf + f.Filters[name] = newerf } } } return err } +//AddPerDomainFilter adds a new filter to PerDomainFilter configuration +func (f *MatcherManager) AddPerDomainFilter(domain string, name string, option string) error { + f.Mutex.Lock() + defer f.Mutex.Unlock() + var pdFilters *PerDomainFilter + if filter, ok := f.PerDomainFilters[domain]; ok { + pdFilters = filter + } else { + pdFilters = NewPerDomainFilter(f.Filters) + } + newf, err := NewFilterByName(name, option) + if err == nil { + // valid filter create or append + if pdFilters.Filters[name] == nil { + pdFilters.Filters[name] = newf + } else { + newoption := pdFilters.Filters[name].Repr() + "," + option + newerf, err := NewFilterByName(name, newoption) + if err == nil { + pdFilters.Filters[name] = newerf + } + } + } + f.PerDomainFilters[domain] = pdFilters + return err +} + //RemoveFilter removes a filter of a given type -func RemoveFilter(conf *ffuf.Config, name string) { - delete(conf.Filters, name) +func (f *MatcherManager) RemoveFilter(name string) { + f.Mutex.Lock() + defer f.Mutex.Unlock() + delete(f.Filters, name) } //AddMatcher adds a new matcher to Config -func AddMatcher(conf *ffuf.Config, name string, option string) error { +func (f *MatcherManager) AddMatcher(name string, option string) error { + f.Mutex.Lock() + defer f.Mutex.Unlock() newf, err := NewFilterByName(name, option) if err == nil { - conf.Matchers[name] = newf + // valid filter create or append + if f.Matchers[name] == nil { + f.Matchers[name] = newf + } else { + newoption := f.Matchers[name].Repr() + "," + option + newerf, err := NewFilterByName(name, newoption) + if err == nil { + f.Matchers[name] = newerf + } + } } return err } -//CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and acting accordingly -func CalibrateIfNeeded(j *ffuf.Job) error { - var err error - if !j.Config.AutoCalibration { - return nil - } - // Handle the calibration - responses, err := j.CalibrateResponses() - if err != nil { - return err - } - if len(responses) > 0 { - err = calibrateFilters(j, responses) - } - return err +func (f *MatcherManager) GetFilters() map[string]ffuf.FilterProvider { + return f.Filters } -func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) error { - sizeCalib := make([]string, 0) - wordCalib := make([]string, 0) - lineCalib := make([]string, 0) - for _, r := range responses { - if r.ContentLength > 0 { - // Only add if we have an actual size of responses - sizeCalib = append(sizeCalib, strconv.FormatInt(r.ContentLength, 10)) - } - if r.ContentWords > 0 { - // Only add if we have an actual word length of response - wordCalib = append(wordCalib, strconv.FormatInt(r.ContentWords, 10)) - } - if r.ContentLines > 1 { - // Only add if we have an actual word length of response - lineCalib = append(lineCalib, strconv.FormatInt(r.ContentLines, 10)) - } - } - - //Remove duplicates - sizeCalib = ffuf.UniqStringSlice(sizeCalib) - wordCalib = ffuf.UniqStringSlice(wordCalib) - lineCalib = ffuf.UniqStringSlice(lineCalib) - - if len(sizeCalib) > 0 { - err := AddFilter(j.Config, "size", strings.Join(sizeCalib, ",")) - if err != nil { - return err - } - } - if len(wordCalib) > 0 { - err := AddFilter(j.Config, "word", strings.Join(wordCalib, ",")) - if err != nil { - return err - } - } - if len(lineCalib) > 0 { - err := AddFilter(j.Config, "line", strings.Join(lineCalib, ",")) - if err != nil { - return err - } - } - return nil +func (f *MatcherManager) GetMatchers() map[string]ffuf.FilterProvider { + return f.Matchers } -func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error { - errs := ffuf.NewMultierror() - // If any other matcher is set, ignore -mc default value - matcherSet := false - statusSet := false - warningIgnoreBody := false - flag.Visit(func(f *flag.Flag) { - if f.Name == "mc" { - statusSet = true - } - if f.Name == "ms" { - matcherSet = true - warningIgnoreBody = true - } - if f.Name == "ml" { - matcherSet = true - warningIgnoreBody = true - } - if f.Name == "mr" { - matcherSet = true - } - if f.Name == "mw" { - matcherSet = true - warningIgnoreBody = true - } - }) - if statusSet || !matcherSet { - if err := AddMatcher(conf, "status", parseOpts.Matcher.Status); err != nil { - errs.Add(err) - } +func (f *MatcherManager) FiltersForDomain(domain string) map[string]ffuf.FilterProvider { + if f.PerDomainFilters[domain] == nil { + return f.Filters } + return f.PerDomainFilters[domain].Filters +} - if parseOpts.Filter.Status != "" { - if err := AddFilter(conf, "status", parseOpts.Filter.Status); err != nil { - errs.Add(err) - } +func (f *MatcherManager) CalibratedForDomain(domain string) bool { + if f.PerDomainFilters[domain] != nil { + return f.PerDomainFilters[domain].IsCalibrated } - if parseOpts.Filter.Size != "" { - warningIgnoreBody = true - if err := AddFilter(conf, "size", parseOpts.Filter.Size); err != nil { - errs.Add(err) - } - } - if parseOpts.Filter.Regexp != "" { - if err := AddFilter(conf, "regexp", parseOpts.Filter.Regexp); err != nil { - errs.Add(err) - } - } - if parseOpts.Filter.Words != "" { - warningIgnoreBody = true - if err := AddFilter(conf, "word", parseOpts.Filter.Words); err != nil { - errs.Add(err) - } - } - if parseOpts.Filter.Lines != "" { - warningIgnoreBody = true - if err := AddFilter(conf, "line", parseOpts.Filter.Lines); err != nil { - errs.Add(err) - } - } - if parseOpts.Matcher.Size != "" { - if err := AddMatcher(conf, "size", parseOpts.Matcher.Size); err != nil { - errs.Add(err) - } - } - if parseOpts.Matcher.Regexp != "" { - if err := AddMatcher(conf, "regexp", parseOpts.Matcher.Regexp); err != nil { - errs.Add(err) - } - } - if parseOpts.Matcher.Words != "" { - if err := AddMatcher(conf, "word", parseOpts.Matcher.Words); err != nil { - errs.Add(err) - } - } - if parseOpts.Matcher.Lines != "" { - if err := AddMatcher(conf, "line", parseOpts.Matcher.Lines); err != nil { - errs.Add(err) - } - } - if conf.IgnoreBody && warningIgnoreBody { - fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n") - } - return errs.ErrorOrNil() + return false } + +func (f *MatcherManager) Calibrated() bool { + return f.IsCalibrated +} diff --git a/pkg/filter/filter_test.go b/pkg/filter/filter_test.go index 55e097f..331261f 100644 --- a/pkg/filter/filter_test.go +++ b/pkg/filter/filter_test.go @@ -29,6 +29,11 @@ if _, ok := ref.(*RegexpFilter); !ok { t.Errorf("Was expecting regexpfilter") } + + tf, _ := NewFilterByName("time", "200") + if _, ok := tf.(*TimeFilter); !ok { + t.Errorf("Was expecting timefilter") + } } func TestNewFilterByNameError(t *testing.T) { diff --git a/pkg/filter/time.go b/pkg/filter/time.go new file mode 100755 index 0000000..1041708 --- /dev/null +++ b/pkg/filter/time.go @@ -0,0 +1,66 @@ +package filter + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + + "github.com/ffuf/ffuf/pkg/ffuf" +) + +type TimeFilter struct { + ms int64 // milliseconds since first response byte + gt bool // filter if response time is greater than + lt bool // filter if response time is less than + valueRaw string +} + +func NewTimeFilter(value string) (ffuf.FilterProvider, error) { + var milliseconds int64 + gt, lt := false, false + + gt = strings.HasPrefix(value, ">") + lt = strings.HasPrefix(value, "<") + + if (!lt && !gt) || (lt && gt) { + return &TimeFilter{}, fmt.Errorf("Time filter or matcher (-ft / -mt): invalid value: %s", value) + } + + milliseconds, err := strconv.ParseInt(value[1:], 10, 64) + if err != nil { + return &TimeFilter{}, fmt.Errorf("Time filter or matcher (-ft / -mt): invalid value: %s", value) + } + return &TimeFilter{ms: milliseconds, gt: gt, lt: lt, valueRaw: value}, nil +} + +func (f *TimeFilter) MarshalJSON() ([]byte, error) { + return json.Marshal(&struct { + Value string `json:"value"` + }{ + Value: f.valueRaw, + }) +} + +func (f *TimeFilter) Filter(response *ffuf.Response) (bool, error) { + if f.gt { + if response.Time.Milliseconds() > f.ms { + return true, nil + } + + } else if f.lt { + if response.Time.Milliseconds() < f.ms { + return true, nil + } + } + + return false, nil +} + +func (f *TimeFilter) Repr() string { + return f.valueRaw +} + +func (f *TimeFilter) ReprVerbose() string { + return fmt.Sprintf("Response time: %s", f.Repr()) +} diff --git a/pkg/filter/time_test.go b/pkg/filter/time_test.go new file mode 100755 index 0000000..03e1b8a --- /dev/null +++ b/pkg/filter/time_test.go @@ -0,0 +1,54 @@ +package filter + +import ( + "testing" + "time" + + "github.com/ffuf/ffuf/pkg/ffuf" +) + +func TestNewTimeFilter(t *testing.T) { + fp, _ := NewTimeFilter(">100") + + f := fp.(*TimeFilter) + + if !f.gt || f.lt { + t.Errorf("Time filter was expected to have greater-than") + } + + if f.ms != 100 { + t.Errorf("Time filter was expected to have ms == 100") + } +} + +func TestNewTimeFilterError(t *testing.T) { + _, err := NewTimeFilter("100>") + if err == nil { + t.Errorf("Was expecting an error from errenous input data") + } +} + +func TestTimeFiltering(t *testing.T) { + f, _ := NewTimeFilter(">100") + + for i, test := range []struct { + input int64 + output bool + }{ + {1342, true}, + {2000, true}, + {35000, true}, + {1458700, true}, + {99, false}, + {2, false}, + } { + resp := ffuf.Response{ + Data: []byte("dahhhhhtaaaaa"), + Time: time.Duration(test.input * int64(time.Millisecond)), + } + filterReturn, _ := f.Filter(&resp) + if filterReturn != test.output { + t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) + } + } +} diff --git a/pkg/input/command.go b/pkg/input/command.go index 2e72199..c6aa059 100644 --- a/pkg/input/command.go +++ b/pkg/input/command.go @@ -12,6 +12,7 @@ type CommandInput struct { config *ffuf.Config count int + active bool keyword string command string shell string @@ -19,6 +20,7 @@ func NewCommandInput(keyword string, value string, conf *ffuf.Config) (*CommandInput, error) { var cmd CommandInput + cmd.active = true cmd.keyword = keyword cmd.config = conf cmd.count = 0 @@ -74,3 +76,15 @@ func (c *CommandInput) Total() int { return c.config.InputNum } + +func (c *CommandInput) Active() bool { + return c.active +} + +func (c *CommandInput) Enable() { + c.active = true +} + +func (c *CommandInput) Disable() { + c.active = false +} diff --git a/pkg/input/input.go b/pkg/input/input.go index 14ba58e..f5502c2 100644 --- a/pkg/input/input.go +++ b/pkg/input/input.go @@ -16,7 +16,7 @@ func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, ffuf.Multierror) { validmode := false errs := ffuf.NewMultierror() - for _, mode := range []string{"clusterbomb", "pitchfork"} { + for _, mode := range []string{"clusterbomb", "pitchfork", "sniper"} { if conf.InputMode == mode { validmode = true } @@ -51,9 +51,29 @@ return nil } +// ActivateKeywords enables / disables wordlists based on list of active keywords +func (i *MainInputProvider) ActivateKeywords(kws []string) { + for _, p := range i.Providers { + if sliceContains(kws, p.Keyword()) { + p.Active() + } else { + p.Disable() + } + } +} + //Position will return the current position of progress func (i *MainInputProvider) Position() int { return i.position +} + +//Keywords returns a slice of all keywords in the inputprovider +func (i *MainInputProvider) Keywords() []string { + kws := make([]string, 0) + for _, p := range i.Providers { + kws = append(kws, p.Keyword()) + } + return kws } //Next will increment the cursor position, and return a boolean telling if there's inputs left @@ -68,7 +88,7 @@ //Value returns a map of inputs for keywords func (i *MainInputProvider) Value() map[string][]byte { retval := make(map[string][]byte) - if i.Config.InputMode == "clusterbomb" { + if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" { retval = i.clusterbombValue() } if i.Config.InputMode == "pitchfork" { @@ -91,6 +111,10 @@ func (i *MainInputProvider) pitchforkValue() map[string][]byte { values := make(map[string][]byte) for _, p := range i.Providers { + if !p.Active() { + // The inputprovider is disabled + continue + } if !p.Next() { // Loop to beginning if the inputprovider has been exhausted p.ResetPosition() @@ -108,7 +132,11 @@ // Should we signal the next InputProvider in the slice to increment signalNext := false first := true - for index, p := range i.Providers { + index := 0 + for _, p := range i.Providers { + if !p.Active() { + continue + } if signalNext { p.IncrementPosition() signalNext = false @@ -130,18 +158,24 @@ p.IncrementPosition() first = false } + index += 1 } return values } func (i *MainInputProvider) clusterbombIteratorReset() { - for index, p := range i.Providers { + index := 0 + for _, p := range i.Providers { + if !p.Active() { + continue + } if index < i.msbIterator { p.ResetPosition() } if index == i.msbIterator { p.IncrementPosition() } + index += 1 } } @@ -150,16 +184,32 @@ count := 0 if i.Config.InputMode == "pitchfork" { for _, p := range i.Providers { + if !p.Active() { + continue + } if p.Total() > count { count = p.Total() } } } - if i.Config.InputMode == "clusterbomb" { + if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" { count = 1 for _, p := range i.Providers { + if !p.Active() { + continue + } count = count * p.Total() } } return count } + +//sliceContains is a helper function that returns true if a string is included in a string slice +func sliceContains(sslice []string, str string) bool { + for _, v := range sslice { + if v == str { + return true + } + } + return false +} diff --git a/pkg/input/wordlist.go b/pkg/input/wordlist.go index 7fc297e..f22dfd9 100644 --- a/pkg/input/wordlist.go +++ b/pkg/input/wordlist.go @@ -10,6 +10,7 @@ ) type WordlistInput struct { + active bool config *ffuf.Config data [][]byte position int @@ -18,6 +19,7 @@ func NewWordlistInput(keyword string, value string, conf *ffuf.Config) (*WordlistInput, error) { var wl WordlistInput + wl.active = true wl.keyword = keyword wl.config = conf wl.position = 0 @@ -55,7 +57,7 @@ return w.keyword } -//Next will increment the cursor position, and return a boolean telling if there's words left in the list +//Next will return a boolean telling if there's words left in the list func (w *WordlistInput) Next() bool { return w.position < len(w.data) } @@ -73,6 +75,21 @@ //Total returns the size of wordlist func (w *WordlistInput) Total() int { return len(w.data) +} + +//Active returns boolean if the inputprovider is active +func (w *WordlistInput) Active() bool { + return w.active +} + +//Enable sets the inputprovider as active +func (w *WordlistInput) Enable() { + w.active = true +} + +//Disable disables the inputprovider +func (w *WordlistInput) Disable() { + w.active = false } //validFile checks that the wordlist file exists and can be read diff --git a/pkg/input/wordlist_test.go b/pkg/input/wordlist_test.go new file mode 100644 index 0000000..0938bcd --- /dev/null +++ b/pkg/input/wordlist_test.go @@ -0,0 +1,21 @@ +package input + +import ( + "testing" +) + +func TestStripCommentsIgnoresCommentLines(t *testing.T) { + text, _ := stripComments("# text") + + if text != "" { + t.Errorf("Returned text was not a blank string") + } +} + +func TestStripCommentsStripsCommentAfterText(t *testing.T) { + text, _ := stripComments("text # comment") + + if text != "text" { + t.Errorf("Comment was not stripped or pre-comment text was not returned") + } +} diff --git a/pkg/interactive/termhandler.go b/pkg/interactive/termhandler.go index bade761..5846bf4 100644 --- a/pkg/interactive/termhandler.go +++ b/pkg/interactive/termhandler.go @@ -3,11 +3,11 @@ import ( "bufio" "fmt" - "github.com/ffuf/ffuf/pkg/ffuf" - "github.com/ffuf/ffuf/pkg/filter" "strconv" "strings" "time" + + "github.com/ffuf/ffuf/pkg/ffuf" ) type interactive struct { @@ -80,35 +80,89 @@ } else if len(args) > 2 { i.Job.Output.Error("Too many arguments for \"fc\"") } else { - i.updateFilter("status", args[1]) + i.updateFilter("status", args[1], true) i.Job.Output.Info("New status code filter value set") } + case "afc": + if len(args) < 2 { + i.Job.Output.Error("Please define a value to append to status code filter") + } else if len(args) > 2 { + i.Job.Output.Error("Too many arguments for \"afc\"") + } else { + i.appendFilter("status", args[1]) + i.Job.Output.Info("New status code filter value set") + } case "fl": if len(args) < 2 { i.Job.Output.Error("Please define a value for line count filter, or \"none\" for removing it") } else if len(args) > 2 { i.Job.Output.Error("Too many arguments for \"fl\"") } else { - i.updateFilter("line", args[1]) + i.updateFilter("line", args[1], true) i.Job.Output.Info("New line count filter value set") } + case "afl": + if len(args) < 2 { + i.Job.Output.Error("Please define a value to append to line count filter") + } else if len(args) > 2 { + i.Job.Output.Error("Too many arguments for \"afl\"") + } else { + i.appendFilter("line", args[1]) + i.Job.Output.Info("New line count filter value set") + } case "fw": if len(args) < 2 { i.Job.Output.Error("Please define a value for word count filter, or \"none\" for removing it") } else if len(args) > 2 { i.Job.Output.Error("Too many arguments for \"fw\"") } else { - i.updateFilter("word", args[1]) + i.updateFilter("word", args[1], true) i.Job.Output.Info("New word count filter value set") } + case "afw": + if len(args) < 2 { + i.Job.Output.Error("Please define a value to append to word count filter") + } else if len(args) > 2 { + i.Job.Output.Error("Too many arguments for \"afw\"") + } else { + i.appendFilter("word", args[1]) + i.Job.Output.Info("New word count filter value set") + } case "fs": if len(args) < 2 { i.Job.Output.Error("Please define a value for response size filter, or \"none\" for removing it") } else if len(args) > 2 { i.Job.Output.Error("Too many arguments for \"fs\"") } else { - i.updateFilter("size", args[1]) + i.updateFilter("size", args[1], true) i.Job.Output.Info("New response size filter value set") + } + case "afs": + if len(args) < 2 { + i.Job.Output.Error("Please define a value to append to size filter") + } else if len(args) > 2 { + i.Job.Output.Error("Too many arguments for \"afs\"") + } else { + i.appendFilter("size", args[1]) + i.Job.Output.Info("New response size filter value set") + } + case "ft": + if len(args) < 2 { + i.Job.Output.Error("Please define a value for response time filter, or \"none\" for removing it") + } else if len(args) > 2 { + i.Job.Output.Error("Too many arguments for \"ft\"") + } else { + i.updateFilter("time", args[1], true) + i.Job.Output.Info("New response time filter value set") + } + case "aft": + if len(args) < 2 { + i.Job.Output.Error("Please define a value to append to response time filter") + } else if len(args) > 2 { + i.Job.Output.Error("Too many arguments for \"aft\"") + } else { + i.appendFilter("time", args[1]) + i.Job.Output.Info("New response time filter value set") } case "queueshow": i.printQueue() @@ -137,19 +191,10 @@ } } -func (i *interactive) updateFilter(name, value string) { - if value == "none" { - filter.RemoveFilter(i.Job.Config, name) - } else { - newFc, err := filter.NewFilterByName(name, value) - if err != nil { - i.Job.Output.Error(fmt.Sprintf("Error while setting new filter value: %s", err)) - return - } else { - i.Job.Config.Filters[name] = newFc - } - - results := make([]ffuf.Result, 0) +func (i *interactive) refreshResults() { + results := make([]ffuf.Result, 0) + filters := i.Job.Config.MatcherManager.GetFilters() + for _, filter := range filters { for _, res := range i.Job.Output.GetCurrentResults() { fakeResp := &ffuf.Response{ StatusCode: res.StatusCode, @@ -157,18 +202,31 @@ ContentWords: res.ContentWords, ContentLength: res.ContentLength, } - filterOut, _ := newFc.Filter(fakeResp) + filterOut, _ := filter.Filter(fakeResp) if !filterOut { results = append(results, res) } } - i.Job.Output.SetCurrentResults(results) - } + } + i.Job.Output.SetCurrentResults(results) +} + +func (i *interactive) updateFilter(name, value string, replace bool) { + if value == "none" { + i.Job.Config.MatcherManager.RemoveFilter(name) + } else { + _ = i.Job.Config.MatcherManager.AddFilter(name, value, replace) + } + i.refreshResults() +} + +func (i *interactive) appendFilter(name, value string) { + i.updateFilter(name, value, false) } func (i *interactive) printQueue() { if len(i.Job.QueuedJobs()) > 0 { - i.Job.Output.Raw("Queued recursion jobs:\n") + i.Job.Output.Raw("Queued jobs:\n") for index, job := range i.Job.QueuedJobs() { postfix := "" if index == 0 { @@ -177,7 +235,7 @@ i.Job.Output.Raw(fmt.Sprintf(" [%d] : %s%s\n", index, job.Url, postfix)) } } else { - i.Job.Output.Info("Recursion job queue is empty") + i.Job.Output.Info("Job queue is empty") } } @@ -192,7 +250,7 @@ i.Job.Output.Warning("Cannot delete the currently running job. Use \"queueskip\" to advance to the next one") } else { i.Job.DeleteQueueItem(index) - i.Job.Output.Info("Recursion job successfully deleted!") + i.Job.Output.Info("Job successfully deleted!") } } } @@ -205,8 +263,8 @@ } func (i *interactive) printHelp() { - var fc, fl, fs, fw string - for name, filter := range i.Job.Config.Filters { + var fc, fl, fs, ft, fw string + for name, filter := range i.Job.Config.MatcherManager.GetFilters() { switch name { case "status": fc = "(active: " + filter.Repr() + ")" @@ -216,22 +274,30 @@ fw = "(active: " + filter.Repr() + ")" case "size": fs = "(active: " + filter.Repr() + ")" + case "time": + ft = "(active: " + filter.Repr() + ")" } } help := ` available commands: - fc [value] - (re)configure status code filter %s - fl [value] - (re)configure line count filter %s - fw [value] - (re)configure word count filter %s - fs [value] - (re)configure size filter %s - queueshow - show recursive job queue - queuedel [number] - delete a recursion job in the queue - queueskip - advance to the next queued recursion job + afc [value] - append to status code filter %s + fc [value] - (re)configure status code filter %s + afl [value] - append to line count filter %s + fl [value] - (re)configure line count filter %s + afw [value] - append to word count filter %s + fw [value] - (re)configure word count filter %s + afs [value] - append to size filter %s + fs [value] - (re)configure size filter %s + aft [value] - append to time filter %s + ft [value] - (re)configure time filter %s + queueshow - show job queue + queuedel [number] - delete a job in the queue + queueskip - advance to the next queued job restart - restart and resume the current ffuf job resume - resume current ffuf job (or: ENTER) show - show results for the current job savejson [filename] - save current matches to a file help - you are looking at it ` - i.Job.Output.Raw(fmt.Sprintf(help, fc, fl, fw, fs)) -} + i.Job.Output.Raw(fmt.Sprintf(help, fc, fc, fl, fl, fw, fw, fs, fs, ft, ft)) +} diff --git a/pkg/output/file_csv.go b/pkg/output/file_csv.go index 70f0dee..3679708 100644 --- a/pkg/output/file_csv.go +++ b/pkg/output/file_csv.go @@ -9,14 +9,9 @@ "github.com/ffuf/ffuf/pkg/ffuf" ) -var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "resultfile"} +var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "duration", "resultfile"} func writeCSV(filename string, config *ffuf.Config, res []ffuf.Result, encode bool) error { - - if config.OutputCreateEmptyFile && (len(res) == 0) { - return nil - } - header := make([]string, 0) f, err := os.Create(filename) if err != nil { @@ -69,6 +64,7 @@ res = append(res, strconv.FormatInt(r.ContentWords, 10)) res = append(res, strconv.FormatInt(r.ContentLines, 10)) res = append(res, r.ContentType) + res = append(res, r.Duration.String()) res = append(res, r.ResultFile) return res } diff --git a/pkg/output/file_csv_test.go b/pkg/output/file_csv_test.go new file mode 100644 index 0000000..a858ece --- /dev/null +++ b/pkg/output/file_csv_test.go @@ -0,0 +1,44 @@ +package output + +import ( + "reflect" + "testing" + "time" + + "github.com/ffuf/ffuf/pkg/ffuf" +) + +func TestToCSV(t *testing.T) { + result := ffuf.Result{ + Input: map[string][]byte{"x": {66}}, + Position: 1, + StatusCode: 200, + ContentLength: 3, + ContentWords: 4, + ContentLines: 5, + ContentType: "application/json", + RedirectLocation: "http://no.pe", + Url: "http://as.df", + Duration: time.Duration(123), + ResultFile: "resultfile", + Host: "host", + } + + csv := toCSV(result) + + if !reflect.DeepEqual(csv, []string{ + "B", + "http://as.df", + "http://no.pe", + "1", + "200", + "3", + "4", + "5", + "application/json", + "123ns", + "resultfile"}) { + + t.Errorf("CSV was not generated in expected format") + } +} diff --git a/pkg/output/file_html.go b/pkg/output/file_html.go index 2f6bcf2..325a4dd 100644 --- a/pkg/output/file_html.go +++ b/pkg/output/file_html.go @@ -78,6 +78,7 @@ Words Lines Type + Duration Resultfile @@ -99,6 +100,7 @@ {{ $result.ContentWords }} {{ $result.ContentLines }} {{ $result.ContentType }} + {{ $result.Duration }} {{ $result.ResultFile }} {{ end }} @@ -177,11 +179,6 @@ } func writeHTML(filename string, config *ffuf.Config, results []ffuf.Result) error { - - if config.OutputCreateEmptyFile && (len(results) == 0) { - return nil - } - results = colorizeResults(results) ti := time.Now() diff --git a/pkg/output/file_json.go b/pkg/output/file_json.go index cf495b2..61f5cc9 100644 --- a/pkg/output/file_json.go +++ b/pkg/output/file_json.go @@ -24,6 +24,7 @@ ContentLines int64 `json:"lines"` ContentType string `json:"content-type"` RedirectLocation string `json:"redirectlocation"` + Duration time.Duration `json:"duration"` ResultFile string `json:"resultfile"` Url string `json:"url"` Host string `json:"host"` @@ -37,11 +38,6 @@ } func writeEJSON(filename string, config *ffuf.Config, res []ffuf.Result) error { - - if config.OutputCreateEmptyFile && (len(res) == 0) { - return nil - } - t := time.Now() outJSON := ejsonFileOutput{ CommandLine: config.CommandLine, @@ -77,6 +73,7 @@ ContentLines: r.ContentLines, ContentType: r.ContentType, RedirectLocation: r.RedirectLocation, + Duration: r.Duration, ResultFile: r.ResultFile, Url: r.Url, Host: r.Host, diff --git a/pkg/output/file_md.go b/pkg/output/file_md.go index 86db0b8..a9186aa 100644 --- a/pkg/output/file_md.go +++ b/pkg/output/file_md.go @@ -14,18 +14,13 @@ Command line : ` + "`{{.CommandLine}}`" + ` Time: ` + "{{ .Time }}" + ` - {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | ResultFile | + {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | Duration | ResultFile | {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- | - {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .ResultFile }} | + {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .Duration}} | {{ .ResultFile }} | {{end}}` // The template format is not pretty but follows the markdown guide ) func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) error { - - if config.OutputCreateEmptyFile && (len(res) == 0) { - return nil - } - ti := time.Now() keywords := make([]string, 0) diff --git a/pkg/output/stdout.go b/pkg/output/stdout.go index 043427c..758f3bd 100644 --- a/pkg/output/stdout.go +++ b/pkg/output/stdout.go @@ -2,6 +2,7 @@ import ( "crypto/md5" + "encoding/json" "fmt" "io/ioutil" "os" @@ -123,11 +124,11 @@ } // Print matchers - for _, f := range s.config.Matchers { + for _, f := range s.config.MatcherManager.GetMatchers() { printOption([]byte("Matcher"), []byte(f.ReprVerbose())) } // Print filters - for _, f := range s.config.Filters { + for _, f := range s.config.MatcherManager.GetFilters() { printOption([]byte("Filter"), []byte(f.ReprVerbose())) } fmt.Fprintf(os.Stderr, "%s\n\n", BANNER_SEP) @@ -225,42 +226,38 @@ // Go through each type of write, adding // the suffix to each output file. - if config.OutputCreateEmptyFile && (len(res) == 0) { - return nil - } - s.config.OutputFile = BaseFilename + ".json" - err = writeJSON(filename, s.config, res) + err = writeJSON(s.config.OutputFile, s.config, res) if err != nil { s.Error(err.Error()) } s.config.OutputFile = BaseFilename + ".ejson" - err = writeEJSON(filename, s.config, res) + err = writeEJSON(s.config.OutputFile, s.config, res) if err != nil { s.Error(err.Error()) } s.config.OutputFile = BaseFilename + ".html" - err = writeHTML(filename, s.config, res) + err = writeHTML(s.config.OutputFile, s.config, res) if err != nil { s.Error(err.Error()) } s.config.OutputFile = BaseFilename + ".md" - err = writeMarkdown(filename, s.config, res) + err = writeMarkdown(s.config.OutputFile, s.config, res) if err != nil { s.Error(err.Error()) } s.config.OutputFile = BaseFilename + ".csv" - err = writeCSV(filename, s.config, res, false) + err = writeCSV(s.config.OutputFile, s.config, res, false) if err != nil { s.Error(err.Error()) } s.config.OutputFile = BaseFilename + ".ecsv" - err = writeCSV(filename, s.config, res, true) + err = writeCSV(s.config.OutputFile, s.config, res, true) if err != nil { s.Error(err.Error()) } @@ -272,6 +269,10 @@ // SaveFile saves the current results to a file of a given type func (s *Stdoutput) SaveFile(filename, format string) error { var err error + if s.config.OutputSkipEmptyFile && len(s.Results) == 0 { + s.Info("No results and -or defined, output file not written.") + return err + } switch format { case "all": err = s.writeToAll(filename, s.config, append(s.Results, s.CurrentResults...)) @@ -324,6 +325,7 @@ ContentType: resp.ContentType, RedirectLocation: resp.GetRedirectLocation(false), Url: resp.Request.Url, + Duration: resp.Time, ResultFile: resp.ResultFile, Host: resp.Request.Host, } @@ -358,15 +360,16 @@ } func (s *Stdoutput) PrintResult(res ffuf.Result) { - if s.config.Quiet { + switch { + case s.config.Quiet: s.resultQuiet(res) - } else { - if len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 { - // Print a multi-line result (when using multiple input keywords and wordlists) - s.resultMultiline(res) - } else { - s.resultNormal(res) - } + case s.config.Json: + s.resultJson(res) + case len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0: + // Print a multi-line result (when using multiple input keywords and wordlists) + s.resultMultiline(res) + default: + s.resultNormal(res) } } @@ -401,8 +404,7 @@ func (s *Stdoutput) resultMultiline(res ffuf.Result) { var res_hdr, res_str string res_str = "%s%s * %s: %s\n" - res_hdr = fmt.Sprintf("%s[Status: %d, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines) - res_hdr = s.colorize(res_hdr, res.StatusCode) + res_hdr = fmt.Sprintf("%s%s[Status: %d, Size: %d, Words: %d, Lines: %d, Duration: %dms]%s", TERMINAL_CLEAR_LINE, s.colorize(res.StatusCode), res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines, res.Duration.Milliseconds(), ANSI_CLEAR) reslines := "" if s.config.Verbose { reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, res.Url) @@ -427,13 +429,23 @@ } func (s *Stdoutput) resultNormal(res ffuf.Result) { - resnormal := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(res), s.colorize(fmt.Sprintf("%d", res.StatusCode), res.StatusCode), res.ContentLength, res.ContentWords, res.ContentLines) + resnormal := fmt.Sprintf("%s%s%-23s [Status: %d, Size: %d, Words: %d, Lines: %d, Duration: %dms]%s", TERMINAL_CLEAR_LINE, s.colorize(res.StatusCode), s.prepareInputsOneLine(res), res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines, res.Duration.Milliseconds(), ANSI_CLEAR) fmt.Println(resnormal) } -func (s *Stdoutput) colorize(input string, status int64) string { +func (s *Stdoutput) resultJson(res ffuf.Result) { + resBytes, err := json.Marshal(res) + if err != nil { + s.Error(err.Error()) + } else { + fmt.Fprint(os.Stderr, TERMINAL_CLEAR_LINE) + fmt.Println(string(resBytes)) + } +} + +func (s *Stdoutput) colorize(status int64) string { if !s.config.Colors { - return input + return "" } colorCode := ANSI_CLEAR if status >= 200 && status < 300 { @@ -448,7 +460,7 @@ if status >= 500 && status < 600 { colorCode = ANSI_RED } - return fmt.Sprintf("%s%s%s", colorCode, input, ANSI_CLEAR) + return colorCode } func printOption(name []byte, value []byte) { diff --git a/pkg/runner/simple.go b/pkg/runner/simple.go index e59288a..76ba525 100644 --- a/pkg/runner/simple.go +++ b/pkg/runner/simple.go @@ -7,6 +7,7 @@ "io/ioutil" "net" "net/http" + "net/http/httptrace" "net/http/httputil" "net/textproto" "net/url" @@ -41,12 +42,12 @@ proxyURL = http.ProxyURL(pu) } } - simplerunner.config = conf simplerunner.client = &http.Client{ CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse }, Timeout: time.Duration(time.Duration(conf.Timeout) * time.Second), Transport: &http.Transport{ + ForceAttemptHTTP2: conf.Http2, Proxy: proxyURL, MaxIdleConns: 1000, MaxIdleConnsPerHost: 500, @@ -58,6 +59,7 @@ TLSClientConfig: &tls.Config{ InsecureSkipVerify: true, Renegotiation: tls.RenegotiateOnceAsClient, + ServerName: conf.SNI, }, }} @@ -67,13 +69,8 @@ return &simplerunner } -func (r *SimpleRunner) Prepare(input map[string][]byte) (ffuf.Request, error) { - req := ffuf.NewRequest(r.config) - - req.Headers = r.config.Headers - req.Url = r.config.Url - req.Method = r.config.Method - req.Data = []byte(r.config.Data) +func (r *SimpleRunner) Prepare(input map[string][]byte, basereq *ffuf.Request) (ffuf.Request, error) { + req := ffuf.CopyRequest(basereq) for keyword, inputitem := range input { req.Method = strings.ReplaceAll(req.Method, keyword, string(inputitem)) @@ -96,7 +93,21 @@ var err error var rawreq []byte data := bytes.NewReader(req.Data) + + var start time.Time + var firstByteTime time.Duration + + trace := &httptrace.ClientTrace{ + WroteRequest: func(wri httptrace.WroteRequestInfo) { + start = time.Now() // begin the timer after the request is fully written + }, + GotFirstResponseByte: func() { + firstByteTime = time.Since(start) // record when the first byte of the response was received + }, + } + httpreq, err = http.NewRequestWithContext(r.config.Context, req.Method, req.Url, data) + if err != nil { return ffuf.Response{}, err } @@ -112,7 +123,7 @@ } req.Host = httpreq.Host - httpreq = httpreq.WithContext(r.config.Context) + httpreq = httpreq.WithContext(httptrace.WithClientTrace(r.config.Context, trace)) for k, v := range req.Headers { httpreq.Header.Set(k, v) } @@ -154,6 +165,7 @@ linesSize := len(strings.Split(string(resp.Data), "\n")) resp.ContentWords = int64(wordsSize) resp.ContentLines = int64(linesSize) + resp.Time = firstByteTime return resp, nil }