New upstream version 1.5.0
Sophie Brun
2 years ago
1 | 1 | - master |
2 | 2 | - New |
3 | 3 | - Changed |
4 | ||
5 | - v1.5.0 | |
6 | - New | |
7 | - New autocalibration options: `-ach`, `-ack` and `-acs`. Revamped the whole autocalibration process | |
8 | - Configurable modes for matchers and filters (CLI flags: `fmode` and `mmode`): "and" and "or" | |
9 | - Changed | |
10 | ||
11 | - v1.4.1 | |
12 | - New | |
13 | - Changed | |
14 | - Fixed a bug with recursion, introduced in the 1.4.0 release | |
15 | - Recursion now works better with multiple wordlists, disabling unnecessary wordlists for queued jobs where needed | |
16 | ||
17 | - v1.4.0 | |
18 | - New | |
19 | - Added response time logging and filtering | |
20 | - Added a CLI flag to specify TLS SNI value | |
21 | - Added full line colors | |
22 | - Added `-json` to emit newline delimited JSON output | |
23 | - Added 500 Internal Server Error to list of status codes matched by default | |
24 | - Changed | |
25 | - Fixed an issue where output file was created regardless of `-or` | |
26 | - Fixed an issue where output (often a lot of it) would be printed after entering interactive mode | |
27 | - Fixed an issue when reading wordlist files from ffufrc | |
28 | - Fixed an issue where `-of all` option only creates one output file (instead of all formats) | |
29 | - Fixed an issue where redirection to the same domain in recursive mode dropped port info from URL | |
30 | - Added HTTP2 support | |
4 | 31 | |
5 | 32 | - v1.3.1 |
6 | 33 | - New |
0 | 0 | # Contributors |
1 | ||
2 | * [adamtlangley](https://github.com/adamtlangley) | |
3 | * [adilsoybali](https://github.com/adilsoybali) | |
1 | 4 | * [AverageSecurityGuy](https://github.com/averagesecurityguy) |
2 | 5 | * [bp0](https://github.com/bp0lr) |
3 | 6 | * [bjhulst](https://github.com/bjhulst) |
9 | 12 | * [Damian89](https://github.com/Damian89) |
10 | 13 | * [Daviey](https://github.com/Daviey) |
11 | 14 | * [delic](https://github.com/delic) |
15 | * [denandz](https://github.com/denandz) | |
16 | * [erbbysam](https://github.com/erbbysam) | |
12 | 17 | * [eur0pa](https://github.com/eur0pa) |
13 | 18 | * [fabiobauer](https://github.com/fabiobauer) |
14 | 19 | * [fang0654](https://github.com/fang0654) |
20 | * [Hazegard](https://github.com/Hazegard) | |
15 | 21 | * [helpermika](https://github.com/helpermika) |
22 | * [h1x](https://github.com/h1x-lnx) | |
16 | 23 | * [Ice3man543](https://github.com/Ice3man543) |
17 | 24 | * [JamTookTheBait](https://github.com/JamTookTheBait) |
18 | 25 | * [jimen0](https://github.com/jimen0) |
19 | 26 | * [joohoi](https://github.com/joohoi) |
20 | 27 | * [jsgv](https://github.com/jsgv) |
28 | * [justinsteven](https://github.com/justinsteven) | |
21 | 29 | * [jvesiluoma](https://github.com/jvesiluoma) |
22 | 30 | * [Kiblyn11](https://github.com/Kiblyn11) |
23 | 31 | * [lc](https://github.com/lc) |
25 | 33 | * [nnwakelam](https://twitter.com/nnwakelam) |
26 | 34 | * [noraj](https://pwn.by/noraj) |
27 | 35 | * [oh6hay](https://github.com/oh6hay) |
36 | * [penguinxoxo](https://github.com/penguinxoxo) | |
28 | 37 | * [putsi](https://github.com/putsi) |
29 | 38 | * [SakiiR](https://github.com/SakiiR) |
30 | 39 | * [seblw](https://github.com/seblw) |
31 | 40 | * [Shaked](https://github.com/Shaked) |
41 | * [Skyehopper](https://github.com/Skyehopper) | |
32 | 42 | * [SolomonSklash](https://github.com/SolomonSklash) |
33 | 43 | * [l4yton](https://github.com/l4yton) |
44 | * [xfgusta](https://github.com/xfgusta) |
0 | ``` | |
1 | /'___\ /'___\ /'___\ | |
2 | /\ \__/ /\ \__/ __ __ /\ \__/ | |
3 | \ \ ,__\\ \ ,__\/\ \/\ \ \ \ ,__\ | |
4 | \ \ \_/ \ \ \_/\ \ \_\ \ \ \ \_/ | |
5 | \ \_\ \ \_\ \ \____/ \ \_\ | |
6 | \/_/ \/_/ \/___/ \/_/ | |
7 | ``` | |
8 | ||
0 | ![ffuf mascot](_img/ffuf_run_logo_600.png) | |
9 | 1 | # ffuf - Fuzz Faster U Fool |
10 | 2 | |
11 | 3 | A fast web fuzzer written in Go. |
33 | 25 | |
34 | 26 | [![Porchetta Industries](https://discordapp.com/api/guilds/736724457258745996/widget.png?style=banner2)](https://discord.gg/VWcdZCUsQP) |
35 | 27 | |
36 | ||
37 | ||
38 | ||
39 | 28 | ## Installation |
40 | 29 | |
41 | 30 | - [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run! |
42 | 31 | |
43 | 32 | _or_ |
44 | - If you have recent go compiler installed: `go get -u github.com/ffuf/ffuf` (the same command works for updating) | |
33 | - If you have recent go compiler installed: `go install github.com/ffuf/ffuf@latest` (the same command works for updating) | |
45 | 34 | |
46 | 35 | _or_ |
47 | - git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build | |
48 | ||
49 | Ffuf depends on Go 1.13 or greater. | |
36 | - `git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build` | |
37 | ||
38 | Ffuf depends on Go 1.16 or greater. | |
50 | 39 | |
51 | 40 | ## Example usage |
52 | 41 | |
56 | 45 | "[Everything you need to know about FFUF](https://codingo.io/tools/ffuf/bounty/2020/09/17/everything-you-need-to-know-about-ffuf.html)" by |
57 | 46 | Michael Skelton ([@codingo](https://github.com/codingo)). |
58 | 47 | |
48 | You can also practise your ffuf scans against a live host with different lessons and use cases either locally by using the docker container https://github.com/adamtlangley/ffufme or against the live hosted version at http://ffuf.me created by Adam Langley [@adamtlangley](https://twitter.com/adamtlangley). | |
59 | 49 | |
60 | 50 | ### Typical directory discovery |
61 | 51 | |
151 | 141 | Additionally, in case you wish to use bunch of configuration files for different use cases, you can do this by defining |
152 | 142 | the configuration file path using `-config` command line flag that takes the file path to the configuration file as its |
153 | 143 | parameter. |
144 | ||
145 | <p align="center"> | |
146 | <img width="250" src="_img/ffuf_juggling_250.png"> | |
147 | </p> | |
154 | 148 | |
155 | 149 | ## Usage |
156 | 150 | |
170 | 164 | -recursion-depth Maximum recursion depth. (default: 0) |
171 | 165 | -recursion-strategy Recursion strategy: "default" for a redirect based, and "greedy" to recurse on all matches (default: default) |
172 | 166 | -replay-proxy Replay matched requests using this proxy. |
167 | -sni Target TLS SNI, does not support FUZZ keyword | |
173 | 168 | -timeout HTTP request timeout in seconds. (default: 10) |
174 | 169 | -u Target URL |
175 | 170 | -x Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080 |
193 | 188 | -v Verbose output, printing full URL and redirect location (if any) with the results. (default: false) |
194 | 189 | |
195 | 190 | MATCHER OPTIONS: |
196 | -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403,405) | |
191 | -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403,405,500) | |
197 | 192 | -ml Match amount of lines in response |
198 | 193 | -mr Match regexp |
199 | 194 | -ms Match HTTP response size |
195 | -mt Match how many milliseconds to the first response byte, either greater or less than. EG: >100 or <100 | |
200 | 196 | -mw Match amount of words in response |
201 | 197 | |
202 | 198 | FILTER OPTIONS: |
204 | 200 | -fl Filter by amount of lines in response. Comma separated list of line counts and ranges |
205 | 201 | -fr Filter regexp |
206 | 202 | -fs Filter HTTP response size. Comma separated list of sizes and ranges |
203 | -ft Filter by number of milliseconds to the first response byte, either greater or less than. EG: >100 or <100 | |
207 | 204 | -fw Filter by amount of words in response. Comma separated list of word counts and ranges |
208 | 205 | |
209 | 206 | INPUT OPTIONS: |
213 | 210 | -input-cmd Command producing the input. --input-num is required when using this input method. Overrides -w. |
214 | 211 | -input-num Number of inputs to test. Used in conjunction with --input-cmd. (default: 100) |
215 | 212 | -input-shell Shell to be used for running command |
216 | -mode Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork (default: clusterbomb) | |
213 | -mode Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork, sniper (default: clusterbomb) | |
217 | 214 | -request File containing the raw http request |
218 | 215 | -request-proto Protocol to use along with raw request (default: https) |
219 | 216 | -w Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD' |
280 | 277 | For this kind of scenario, the user is able to use the command `restart`, which resets the state and starts the current |
281 | 278 | job from the beginning. |
282 | 279 | |
280 | <p align="center"> | |
281 | <img width="250" src="_img/ffuf_waving_250.png"> | |
282 | </p> | |
283 | ||
283 | 284 | |
284 | 285 | ## Sponsorware |
285 | 286 |
Binary diff not shown
Binary diff not shown
Binary diff not shown
Binary diff not shown
Binary diff not shown
26 | 26 | "randomtest", |
27 | 27 | "admin" |
28 | 28 | ] |
29 | autocalibration_strategy = "basic" | |
30 | autocalibration_keyword = "FUZZ" | |
31 | autocalibration_perhost = false | |
29 | 32 | colors = false |
30 | 33 | delay = "" |
31 | 34 | maxtime = 0 |
38 | 41 | stoponerrors = false |
39 | 42 | threads = 40 |
40 | 43 | verbose = false |
44 | json = false | |
41 | 45 | |
42 | 46 | [input] |
43 | 47 | dirsearchcompat = false |
64 | 68 | outputcreateemptyfile = false |
65 | 69 | |
66 | 70 | [filter] |
71 | mode = "or" | |
67 | 72 | lines = "" |
68 | 73 | regexp = "" |
69 | 74 | size = "" |
70 | 75 | status = "" |
76 | time = "" | |
71 | 77 | words = "" |
72 | 78 | |
73 | 79 | [matcher] |
80 | mode = "or" | |
74 | 81 | lines = "" |
75 | 82 | regexp = "" |
76 | 83 | size = "" |
77 | status = "200,204,301,302,307,401,403,405" | |
84 | status = "200,204,301,302,307,401,403,405,500" | |
85 | time = "" | |
78 | 86 | words = "" |
53 | 53 | Description: "Options controlling the HTTP request and its parts.", |
54 | 54 | Flags: make([]UsageFlag, 0), |
55 | 55 | Hidden: false, |
56 | ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "recursion-strategy", "replay-proxy", "timeout", "ignore-body", "x"}, | |
56 | ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "recursion-strategy", "replay-proxy", "timeout", "ignore-body", "x", "sni", "http2"}, | |
57 | 57 | } |
58 | 58 | u_general := UsageSection{ |
59 | 59 | Name: "GENERAL OPTIONS", |
60 | 60 | Description: "", |
61 | 61 | Flags: make([]UsageFlag, 0), |
62 | 62 | Hidden: false, |
63 | ExpectedFlags: []string{"ac", "acc", "c", "config", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"}, | |
63 | ExpectedFlags: []string{"ac", "acc", "ack", "ach", "acs", "c", "config", "json", "maxtime", "maxtime-job", "noninteractive", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"}, | |
64 | 64 | } |
65 | 65 | u_compat := UsageSection{ |
66 | 66 | Name: "COMPATIBILITY OPTIONS", |
74 | 74 | Description: "Matchers for the response filtering.", |
75 | 75 | Flags: make([]UsageFlag, 0), |
76 | 76 | Hidden: false, |
77 | ExpectedFlags: []string{"mc", "ml", "mr", "ms", "mw"}, | |
77 | ExpectedFlags: []string{"mmode", "mc", "ml", "mr", "ms", "mt", "mw"}, | |
78 | 78 | } |
79 | 79 | u_filter := UsageSection{ |
80 | 80 | Name: "FILTER OPTIONS", |
81 | 81 | Description: "Filters for the response filtering.", |
82 | 82 | Flags: make([]UsageFlag, 0), |
83 | 83 | Hidden: false, |
84 | ExpectedFlags: []string{"fc", "fl", "fr", "fs", "fw"}, | |
84 | ExpectedFlags: []string{"fmode", "fc", "fl", "fr", "fs", "ft", "fw"}, | |
85 | 85 | } |
86 | 86 | u_input := UsageSection{ |
87 | 87 | Name: "INPUT OPTIONS", |
3 | 3 | "context" |
4 | 4 | "flag" |
5 | 5 | "fmt" |
6 | "github.com/ffuf/ffuf/pkg/filter" | |
7 | "io/ioutil" | |
8 | "log" | |
9 | "os" | |
10 | "strings" | |
11 | ||
6 | 12 | "github.com/ffuf/ffuf/pkg/ffuf" |
7 | "github.com/ffuf/ffuf/pkg/filter" | |
8 | 13 | "github.com/ffuf/ffuf/pkg/input" |
9 | 14 | "github.com/ffuf/ffuf/pkg/interactive" |
10 | 15 | "github.com/ffuf/ffuf/pkg/output" |
11 | 16 | "github.com/ffuf/ffuf/pkg/runner" |
12 | "io/ioutil" | |
13 | "log" | |
14 | "os" | |
15 | "strings" | |
16 | 17 | ) |
17 | 18 | |
18 | 19 | type multiStringFlag []string |
53 | 54 | autocalibrationstrings = opts.General.AutoCalibrationStrings |
54 | 55 | headers = opts.HTTP.Headers |
55 | 56 | inputcommands = opts.Input.Inputcommands |
57 | wordlists = opts.Input.Wordlists | |
56 | 58 | |
57 | 59 | flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)") |
58 | 60 | flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)") |
59 | 61 | flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility") |
60 | flag.BoolVar(&opts.Output.OutputCreateEmptyFile, "or", opts.Output.OutputCreateEmptyFile, "Don't create the output file if we don't have results") | |
62 | flag.BoolVar(&opts.Output.OutputSkipEmptyFile, "or", opts.Output.OutputSkipEmptyFile, "Don't create the output file if we don't have results") | |
61 | 63 | flag.BoolVar(&opts.General.AutoCalibration, "ac", opts.General.AutoCalibration, "Automatically calibrate filtering options") |
64 | flag.BoolVar(&opts.General.AutoCalibrationPerHost, "ach", opts.General.AutoCalibration, "Per host autocalibration") | |
62 | 65 | flag.BoolVar(&opts.General.Colors, "c", opts.General.Colors, "Colorize output.") |
66 | flag.BoolVar(&opts.General.Json, "json", opts.General.Json, "JSON output, printing newline-delimited JSON records") | |
63 | 67 | flag.BoolVar(&opts.General.Noninteractive, "noninteractive", opts.General.Noninteractive, "Disable the interactive console functionality") |
64 | 68 | flag.BoolVar(&opts.General.Quiet, "s", opts.General.Quiet, "Do not print additional information (silent mode)") |
65 | 69 | flag.BoolVar(&opts.General.ShowVersion, "V", opts.General.ShowVersion, "Show version information.") |
70 | 74 | flag.BoolVar(&opts.HTTP.FollowRedirects, "r", opts.HTTP.FollowRedirects, "Follow redirects") |
71 | 75 | flag.BoolVar(&opts.HTTP.IgnoreBody, "ignore-body", opts.HTTP.IgnoreBody, "Do not fetch the response content.") |
72 | 76 | flag.BoolVar(&opts.HTTP.Recursion, "recursion", opts.HTTP.Recursion, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.") |
77 | flag.BoolVar(&opts.HTTP.Http2, "http2", opts.HTTP.Http2, "Use HTTP2 protocol") | |
73 | 78 | flag.BoolVar(&opts.Input.DirSearchCompat, "D", opts.Input.DirSearchCompat, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.") |
74 | 79 | flag.BoolVar(&opts.Input.IgnoreWordlistComments, "ic", opts.Input.IgnoreWordlistComments, "Ignore wordlist comments") |
75 | 80 | flag.IntVar(&opts.General.MaxTime, "maxtime", opts.General.MaxTime, "Maximum running time in seconds for entire process.") |
79 | 84 | flag.IntVar(&opts.HTTP.RecursionDepth, "recursion-depth", opts.HTTP.RecursionDepth, "Maximum recursion depth.") |
80 | 85 | flag.IntVar(&opts.HTTP.Timeout, "timeout", opts.HTTP.Timeout, "HTTP request timeout in seconds.") |
81 | 86 | flag.IntVar(&opts.Input.InputNum, "input-num", opts.Input.InputNum, "Number of inputs to test. Used in conjunction with --input-cmd.") |
87 | flag.StringVar(&opts.General.AutoCalibrationKeyword, "ack", opts.General.AutoCalibrationKeyword, "Autocalibration keyword") | |
88 | flag.StringVar(&opts.General.AutoCalibrationStrategy, "acs", opts.General.AutoCalibrationStrategy, "Autocalibration strategy: \"basic\" or \"advanced\"") | |
82 | 89 | flag.StringVar(&opts.General.ConfigFile, "config", "", "Load configuration from a file") |
90 | flag.StringVar(&opts.Filter.Mode, "fmode", opts.Filter.Mode, "Filter set operator. Either of: and, or") | |
83 | 91 | flag.StringVar(&opts.Filter.Lines, "fl", opts.Filter.Lines, "Filter by amount of lines in response. Comma separated list of line counts and ranges") |
84 | 92 | flag.StringVar(&opts.Filter.Regexp, "fr", opts.Filter.Regexp, "Filter regexp") |
85 | 93 | flag.StringVar(&opts.Filter.Size, "fs", opts.Filter.Size, "Filter HTTP response size. Comma separated list of sizes and ranges") |
86 | 94 | flag.StringVar(&opts.Filter.Status, "fc", opts.Filter.Status, "Filter HTTP status codes from response. Comma separated list of codes and ranges") |
95 | flag.StringVar(&opts.Filter.Time, "ft", opts.Filter.Time, "Filter by number of milliseconds to the first response byte, either greater or less than. EG: >100 or <100") | |
87 | 96 | flag.StringVar(&opts.Filter.Words, "fw", opts.Filter.Words, "Filter by amount of words in response. Comma separated list of word counts and ranges") |
88 | 97 | flag.StringVar(&opts.General.Delay, "p", opts.General.Delay, "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"") |
89 | 98 | flag.StringVar(&opts.HTTP.Data, "d", opts.HTTP.Data, "POST data") |
95 | 104 | flag.StringVar(&opts.HTTP.ReplayProxyURL, "replay-proxy", opts.HTTP.ReplayProxyURL, "Replay matched requests using this proxy.") |
96 | 105 | flag.StringVar(&opts.HTTP.RecursionStrategy, "recursion-strategy", opts.HTTP.RecursionStrategy, "Recursion strategy: \"default\" for a redirect based, and \"greedy\" to recurse on all matches") |
97 | 106 | flag.StringVar(&opts.HTTP.URL, "u", opts.HTTP.URL, "Target URL") |
107 | flag.StringVar(&opts.HTTP.SNI, "sni", opts.HTTP.SNI, "Target TLS SNI, does not support FUZZ keyword") | |
98 | 108 | flag.StringVar(&opts.Input.Extensions, "e", opts.Input.Extensions, "Comma separated list of extensions. Extends FUZZ keyword.") |
99 | flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork") | |
109 | flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork, sniper") | |
100 | 110 | flag.StringVar(&opts.Input.InputShell, "input-shell", opts.Input.InputShell, "Shell to be used for running command") |
101 | 111 | flag.StringVar(&opts.Input.Request, "request", opts.Input.Request, "File containing the raw http request") |
102 | 112 | flag.StringVar(&opts.Input.RequestProto, "request-proto", opts.Input.RequestProto, "Protocol to use along with raw request") |
113 | flag.StringVar(&opts.Matcher.Mode, "mmode", opts.Matcher.Mode, "Matcher set operator. Either of: and, or") | |
103 | 114 | flag.StringVar(&opts.Matcher.Lines, "ml", opts.Matcher.Lines, "Match amount of lines in response") |
104 | 115 | flag.StringVar(&opts.Matcher.Regexp, "mr", opts.Matcher.Regexp, "Match regexp") |
105 | 116 | flag.StringVar(&opts.Matcher.Size, "ms", opts.Matcher.Size, "Match HTTP response size") |
106 | 117 | flag.StringVar(&opts.Matcher.Status, "mc", opts.Matcher.Status, "Match HTTP status codes, or \"all\" for everything.") |
118 | flag.StringVar(&opts.Matcher.Time, "mt", opts.Matcher.Time, "Match how many milliseconds to the first response byte, either greater or less than. EG: >100 or <100") | |
107 | 119 | flag.StringVar(&opts.Matcher.Words, "mw", opts.Matcher.Words, "Match amount of words in response") |
108 | 120 | flag.StringVar(&opts.Output.DebugLog, "debug-log", opts.Output.DebugLog, "Write all of the internal logging to the specified file.") |
109 | 121 | flag.StringVar(&opts.Output.OutputDirectory, "od", opts.Output.OutputDirectory, "Directory path to store matched results to.") |
187 | 199 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) |
188 | 200 | os.Exit(1) |
189 | 201 | } |
190 | if err := filter.SetupFilters(opts, conf); err != nil { | |
202 | if err := SetupFilters(opts, conf); err != nil { | |
191 | 203 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) |
192 | 204 | Usage() |
193 | 205 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) |
194 | 206 | os.Exit(1) |
195 | 207 | } |
196 | 208 | |
197 | if err := filter.CalibrateIfNeeded(job); err != nil { | |
198 | fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err) | |
199 | os.Exit(1) | |
200 | } | |
201 | 209 | if !conf.Noninteractive { |
202 | 210 | go func() { |
203 | 211 | err := interactive.Handle(job) |
225 | 233 | job.Output = output.NewOutputProviderByName("stdout", conf) |
226 | 234 | return job, errs.ErrorOrNil() |
227 | 235 | } |
236 | ||
237 | func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error { | |
238 | errs := ffuf.NewMultierror() | |
239 | conf.MatcherManager = filter.NewMatcherManager() | |
240 | // If any other matcher is set, ignore -mc default value | |
241 | matcherSet := false | |
242 | statusSet := false | |
243 | warningIgnoreBody := false | |
244 | flag.Visit(func(f *flag.Flag) { | |
245 | if f.Name == "mc" { | |
246 | statusSet = true | |
247 | } | |
248 | if f.Name == "ms" { | |
249 | matcherSet = true | |
250 | warningIgnoreBody = true | |
251 | } | |
252 | if f.Name == "ml" { | |
253 | matcherSet = true | |
254 | warningIgnoreBody = true | |
255 | } | |
256 | if f.Name == "mr" { | |
257 | matcherSet = true | |
258 | } | |
259 | if f.Name == "mt" { | |
260 | matcherSet = true | |
261 | } | |
262 | if f.Name == "mw" { | |
263 | matcherSet = true | |
264 | warningIgnoreBody = true | |
265 | } | |
266 | }) | |
267 | // Only set default matchers if no | |
268 | if statusSet || !matcherSet { | |
269 | if err := conf.MatcherManager.AddMatcher("status", parseOpts.Matcher.Status); err != nil { | |
270 | errs.Add(err) | |
271 | } | |
272 | } | |
273 | ||
274 | if parseOpts.Filter.Status != "" { | |
275 | if err := conf.MatcherManager.AddFilter("status", parseOpts.Filter.Status, false); err != nil { | |
276 | errs.Add(err) | |
277 | } | |
278 | } | |
279 | if parseOpts.Filter.Size != "" { | |
280 | warningIgnoreBody = true | |
281 | if err := conf.MatcherManager.AddFilter("size", parseOpts.Filter.Size, false); err != nil { | |
282 | errs.Add(err) | |
283 | } | |
284 | } | |
285 | if parseOpts.Filter.Regexp != "" { | |
286 | if err := conf.MatcherManager.AddFilter("regexp", parseOpts.Filter.Regexp, false); err != nil { | |
287 | errs.Add(err) | |
288 | } | |
289 | } | |
290 | if parseOpts.Filter.Words != "" { | |
291 | warningIgnoreBody = true | |
292 | if err := conf.MatcherManager.AddFilter("word", parseOpts.Filter.Words, false); err != nil { | |
293 | errs.Add(err) | |
294 | } | |
295 | } | |
296 | if parseOpts.Filter.Lines != "" { | |
297 | warningIgnoreBody = true | |
298 | if err := conf.MatcherManager.AddFilter("line", parseOpts.Filter.Lines, false); err != nil { | |
299 | errs.Add(err) | |
300 | } | |
301 | } | |
302 | if parseOpts.Filter.Time != "" { | |
303 | if err := conf.MatcherManager.AddFilter("time", parseOpts.Filter.Time, false); err != nil { | |
304 | errs.Add(err) | |
305 | } | |
306 | } | |
307 | if parseOpts.Matcher.Size != "" { | |
308 | if err := conf.MatcherManager.AddMatcher("size", parseOpts.Matcher.Size); err != nil { | |
309 | errs.Add(err) | |
310 | } | |
311 | } | |
312 | if parseOpts.Matcher.Regexp != "" { | |
313 | if err := conf.MatcherManager.AddMatcher("regexp", parseOpts.Matcher.Regexp); err != nil { | |
314 | errs.Add(err) | |
315 | } | |
316 | } | |
317 | if parseOpts.Matcher.Words != "" { | |
318 | if err := conf.MatcherManager.AddMatcher("word", parseOpts.Matcher.Words); err != nil { | |
319 | errs.Add(err) | |
320 | } | |
321 | } | |
322 | if parseOpts.Matcher.Lines != "" { | |
323 | if err := conf.MatcherManager.AddMatcher("line", parseOpts.Matcher.Lines); err != nil { | |
324 | errs.Add(err) | |
325 | } | |
326 | } | |
327 | if parseOpts.Matcher.Time != "" { | |
328 | if err := conf.MatcherManager.AddFilter("time", parseOpts.Matcher.Time, false); err != nil { | |
329 | errs.Add(err) | |
330 | } | |
331 | } | |
332 | if conf.IgnoreBody && warningIgnoreBody { | |
333 | fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n") | |
334 | } | |
335 | return errs.ErrorOrNil() | |
336 | } |
0 | package ffuf | |
1 | ||
2 | import ( | |
3 | "fmt" | |
4 | "log" | |
5 | "math/rand" | |
6 | "strconv" | |
7 | "time" | |
8 | ) | |
9 | ||
10 | func (j *Job) autoCalibrationStrings() map[string][]string { | |
11 | rand.Seed(time.Now().UnixNano()) | |
12 | cInputs := make(map[string][]string) | |
13 | if len(j.Config.AutoCalibrationStrings) < 1 { | |
14 | cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(16)) | |
15 | cInputs["basic_admin"] = append(cInputs["basic_admin"], "admin"+RandomString(8)) | |
16 | cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(16)) | |
17 | cInputs["htaccess"] = append(cInputs["htaccess"], ".htaccess"+RandomString(8)) | |
18 | cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(16)) | |
19 | cInputs["basic_random"] = append(cInputs["basic_random"], RandomString(8)) | |
20 | if j.Config.AutoCalibrationStrategy == "advanced" { | |
21 | // Add directory tests and .htaccess too | |
22 | cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(16)+"/") | |
23 | cInputs["admin_dir"] = append(cInputs["admin_dir"], "admin"+RandomString(8)+"/") | |
24 | cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(16)+"/") | |
25 | cInputs["random_dir"] = append(cInputs["random_dir"], RandomString(8)+"/") | |
26 | } | |
27 | } else { | |
28 | cInputs["custom"] = append(cInputs["custom"], j.Config.AutoCalibrationStrings...) | |
29 | } | |
30 | return cInputs | |
31 | } | |
32 | ||
33 | func (j *Job) calibrationRequest(inputs map[string][]byte) (Response, error) { | |
34 | basereq := BaseRequest(j.Config) | |
35 | req, err := j.Runner.Prepare(inputs, &basereq) | |
36 | if err != nil { | |
37 | j.Output.Error(fmt.Sprintf("Encountered an error while preparing autocalibration request: %s\n", err)) | |
38 | j.incError() | |
39 | log.Printf("%s", err) | |
40 | return Response{}, err | |
41 | } | |
42 | resp, err := j.Runner.Execute(&req) | |
43 | if err != nil { | |
44 | j.Output.Error(fmt.Sprintf("Encountered an error while executing autocalibration request: %s\n", err)) | |
45 | j.incError() | |
46 | log.Printf("%s", err) | |
47 | return Response{}, err | |
48 | } | |
49 | // Only calibrate on responses that would be matched otherwise | |
50 | if j.isMatch(resp) { | |
51 | return resp, nil | |
52 | } | |
53 | return resp, fmt.Errorf("Response wouldn't be matched") | |
54 | } | |
55 | ||
56 | //CalibrateForHost runs autocalibration for a specific host | |
57 | func (j *Job) CalibrateForHost(host string, baseinput map[string][]byte) error { | |
58 | if j.Config.MatcherManager.CalibratedForDomain(host) { | |
59 | return nil | |
60 | } | |
61 | if baseinput[j.Config.AutoCalibrationKeyword] == nil { | |
62 | return fmt.Errorf("Autocalibration keyword \"%s\" not found in the request.", j.Config.AutoCalibrationKeyword) | |
63 | } | |
64 | cStrings := j.autoCalibrationStrings() | |
65 | input := make(map[string][]byte) | |
66 | for k, v := range baseinput { | |
67 | input[k] = v | |
68 | } | |
69 | for _, v := range cStrings { | |
70 | responses := make([]Response, 0) | |
71 | for _, cs := range v { | |
72 | input[j.Config.AutoCalibrationKeyword] = []byte(cs) | |
73 | resp, err := j.calibrationRequest(input) | |
74 | if err != nil { | |
75 | continue | |
76 | } | |
77 | responses = append(responses, resp) | |
78 | err = j.calibrateFilters(responses, true) | |
79 | if err != nil { | |
80 | j.Output.Error(fmt.Sprintf("%s", err)) | |
81 | } | |
82 | } | |
83 | } | |
84 | j.Config.MatcherManager.SetCalibratedForHost(host, true) | |
85 | return nil | |
86 | } | |
87 | ||
88 | //CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests | |
89 | func (j *Job) Calibrate(input map[string][]byte) error { | |
90 | if j.Config.MatcherManager.Calibrated() { | |
91 | return nil | |
92 | } | |
93 | cInputs := j.autoCalibrationStrings() | |
94 | ||
95 | for _, v := range cInputs { | |
96 | responses := make([]Response, 0) | |
97 | for _, cs := range v { | |
98 | input[j.Config.AutoCalibrationKeyword] = []byte(cs) | |
99 | resp, err := j.calibrationRequest(input) | |
100 | if err != nil { | |
101 | continue | |
102 | } | |
103 | responses = append(responses, resp) | |
104 | err = j.calibrateFilters(responses, false) | |
105 | if err != nil { | |
106 | j.Output.Error(fmt.Sprintf("%s", err)) | |
107 | } | |
108 | } | |
109 | } | |
110 | j.Config.MatcherManager.SetCalibrated(true) | |
111 | return nil | |
112 | } | |
113 | ||
114 | //CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and | |
115 | // configuring the filters accordingly | |
116 | func (j *Job) CalibrateIfNeeded(host string, input map[string][]byte) error { | |
117 | j.calibMutex.Lock() | |
118 | defer j.calibMutex.Unlock() | |
119 | if !j.Config.AutoCalibration { | |
120 | return nil | |
121 | } | |
122 | if j.Config.AutoCalibrationPerHost { | |
123 | return j.CalibrateForHost(host, input) | |
124 | } | |
125 | return j.Calibrate(input) | |
126 | } | |
127 | ||
128 | func (j *Job) calibrateFilters(responses []Response, perHost bool) error { | |
129 | // Work down from the most specific common denominator | |
130 | if len(responses) > 0 { | |
131 | // Content length | |
132 | baselineSize := responses[0].ContentLength | |
133 | sizeMatch := true | |
134 | for _, r := range responses { | |
135 | if baselineSize != r.ContentLength { | |
136 | sizeMatch = false | |
137 | } | |
138 | } | |
139 | if sizeMatch { | |
140 | if perHost { | |
141 | // Check if already filtered | |
142 | for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { | |
143 | match, _ := f.Filter(&responses[0]) | |
144 | if match { | |
145 | // Already filtered | |
146 | return nil | |
147 | } | |
148 | } | |
149 | _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "size", strconv.FormatInt(baselineSize, 10)) | |
150 | return nil | |
151 | } else { | |
152 | // Check if already filtered | |
153 | for _, f := range j.Config.MatcherManager.GetFilters() { | |
154 | match, _ := f.Filter(&responses[0]) | |
155 | if match { | |
156 | // Already filtered | |
157 | return nil | |
158 | } | |
159 | } | |
160 | _ = j.Config.MatcherManager.AddFilter("size", strconv.FormatInt(baselineSize, 10), false) | |
161 | return nil | |
162 | } | |
163 | } | |
164 | ||
165 | // Content words | |
166 | baselineWords := responses[0].ContentWords | |
167 | wordsMatch := true | |
168 | for _, r := range responses { | |
169 | if baselineWords != r.ContentWords { | |
170 | wordsMatch = false | |
171 | } | |
172 | } | |
173 | if wordsMatch { | |
174 | if perHost { | |
175 | // Check if already filtered | |
176 | for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { | |
177 | match, _ := f.Filter(&responses[0]) | |
178 | if match { | |
179 | // Already filtered | |
180 | return nil | |
181 | } | |
182 | } | |
183 | _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "word", strconv.FormatInt(baselineWords, 10)) | |
184 | return nil | |
185 | } else { | |
186 | // Check if already filtered | |
187 | for _, f := range j.Config.MatcherManager.GetFilters() { | |
188 | match, _ := f.Filter(&responses[0]) | |
189 | if match { | |
190 | // Already filtered | |
191 | return nil | |
192 | } | |
193 | } | |
194 | _ = j.Config.MatcherManager.AddFilter("word", strconv.FormatInt(baselineSize, 10), false) | |
195 | return nil | |
196 | } | |
197 | } | |
198 | ||
199 | // Content lines | |
200 | baselineLines := responses[0].ContentLines | |
201 | linesMatch := true | |
202 | for _, r := range responses { | |
203 | if baselineLines != r.ContentLines { | |
204 | linesMatch = false | |
205 | } | |
206 | } | |
207 | if linesMatch { | |
208 | if perHost { | |
209 | // Check if already filtered | |
210 | for _, f := range j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*responses[0].Request)) { | |
211 | match, _ := f.Filter(&responses[0]) | |
212 | if match { | |
213 | // Already filtered | |
214 | return nil | |
215 | } | |
216 | } | |
217 | _ = j.Config.MatcherManager.AddPerDomainFilter(HostURLFromRequest(*responses[0].Request), "line", strconv.FormatInt(baselineLines, 10)) | |
218 | return nil | |
219 | } else { | |
220 | // Check if already filtered | |
221 | for _, f := range j.Config.MatcherManager.GetFilters() { | |
222 | match, _ := f.Filter(&responses[0]) | |
223 | if match { | |
224 | // Already filtered | |
225 | return nil | |
226 | } | |
227 | } | |
228 | _ = j.Config.MatcherManager.AddFilter("line", strconv.FormatInt(baselineSize, 10), false) | |
229 | return nil | |
230 | } | |
231 | } | |
232 | } | |
233 | return fmt.Errorf("No common filtering values found") | |
234 | } |
4 | 4 | ) |
5 | 5 | |
6 | 6 | type Config struct { |
7 | AutoCalibration bool `json:"autocalibration"` | |
8 | AutoCalibrationStrings []string `json:"autocalibration_strings"` | |
9 | Cancel context.CancelFunc `json:"-"` | |
10 | Colors bool `json:"colors"` | |
11 | CommandKeywords []string `json:"-"` | |
12 | CommandLine string `json:"cmdline"` | |
13 | ConfigFile string `json:"configfile"` | |
14 | Context context.Context `json:"-"` | |
15 | Data string `json:"postdata"` | |
16 | Delay optRange `json:"delay"` | |
17 | DirSearchCompat bool `json:"dirsearch_compatibility"` | |
18 | Extensions []string `json:"extensions"` | |
19 | Filters map[string]FilterProvider `json:"filters"` | |
20 | FollowRedirects bool `json:"follow_redirects"` | |
21 | Headers map[string]string `json:"headers"` | |
22 | IgnoreBody bool `json:"ignorebody"` | |
23 | IgnoreWordlistComments bool `json:"ignore_wordlist_comments"` | |
24 | InputMode string `json:"inputmode"` | |
25 | InputNum int `json:"cmd_inputnum"` | |
26 | InputProviders []InputProviderConfig `json:"inputproviders"` | |
27 | InputShell string `json:"inputshell"` | |
28 | Matchers map[string]FilterProvider `json:"matchers"` | |
29 | MaxTime int `json:"maxtime"` | |
30 | MaxTimeJob int `json:"maxtime_job"` | |
31 | Method string `json:"method"` | |
32 | Noninteractive bool `json:"noninteractive"` | |
33 | OutputDirectory string `json:"outputdirectory"` | |
34 | OutputFile string `json:"outputfile"` | |
35 | OutputFormat string `json:"outputformat"` | |
36 | OutputCreateEmptyFile bool `json:"OutputCreateEmptyFile"` | |
37 | ProgressFrequency int `json:"-"` | |
38 | ProxyURL string `json:"proxyurl"` | |
39 | Quiet bool `json:"quiet"` | |
40 | Rate int64 `json:"rate"` | |
41 | Recursion bool `json:"recursion"` | |
42 | RecursionDepth int `json:"recursion_depth"` | |
43 | RecursionStrategy string `json:"recursion_strategy"` | |
44 | ReplayProxyURL string `json:"replayproxyurl"` | |
45 | StopOn403 bool `json:"stop_403"` | |
46 | StopOnAll bool `json:"stop_all"` | |
47 | StopOnErrors bool `json:"stop_errors"` | |
48 | Threads int `json:"threads"` | |
49 | Timeout int `json:"timeout"` | |
50 | Url string `json:"url"` | |
51 | Verbose bool `json:"verbose"` | |
7 | AutoCalibration bool `json:"autocalibration"` | |
8 | AutoCalibrationKeyword string `json:"autocalibration_keyword"` | |
9 | AutoCalibrationPerHost bool `json:"autocalibration_perhost"` | |
10 | AutoCalibrationStrategy string `json:"autocalibration_strategy"` | |
11 | AutoCalibrationStrings []string `json:"autocalibration_strings"` | |
12 | Cancel context.CancelFunc `json:"-"` | |
13 | Colors bool `json:"colors"` | |
14 | CommandKeywords []string `json:"-"` | |
15 | CommandLine string `json:"cmdline"` | |
16 | ConfigFile string `json:"configfile"` | |
17 | Context context.Context `json:"-"` | |
18 | Data string `json:"postdata"` | |
19 | Delay optRange `json:"delay"` | |
20 | DirSearchCompat bool `json:"dirsearch_compatibility"` | |
21 | Extensions []string `json:"extensions"` | |
22 | FilterMode string `json:"fmode"` | |
23 | FollowRedirects bool `json:"follow_redirects"` | |
24 | Headers map[string]string `json:"headers"` | |
25 | IgnoreBody bool `json:"ignorebody"` | |
26 | IgnoreWordlistComments bool `json:"ignore_wordlist_comments"` | |
27 | InputMode string `json:"inputmode"` | |
28 | InputNum int `json:"cmd_inputnum"` | |
29 | InputProviders []InputProviderConfig `json:"inputproviders"` | |
30 | InputShell string `json:"inputshell"` | |
31 | Json bool `json:"json"` | |
32 | MatcherManager MatcherManager `json:"matchers"` | |
33 | MatcherMode string `json:"mmode"` | |
34 | MaxTime int `json:"maxtime"` | |
35 | MaxTimeJob int `json:"maxtime_job"` | |
36 | Method string `json:"method"` | |
37 | Noninteractive bool `json:"noninteractive"` | |
38 | OutputDirectory string `json:"outputdirectory"` | |
39 | OutputFile string `json:"outputfile"` | |
40 | OutputFormat string `json:"outputformat"` | |
41 | OutputSkipEmptyFile bool `json:"OutputSkipEmptyFile"` | |
42 | ProgressFrequency int `json:"-"` | |
43 | ProxyURL string `json:"proxyurl"` | |
44 | Quiet bool `json:"quiet"` | |
45 | Rate int64 `json:"rate"` | |
46 | Recursion bool `json:"recursion"` | |
47 | RecursionDepth int `json:"recursion_depth"` | |
48 | RecursionStrategy string `json:"recursion_strategy"` | |
49 | ReplayProxyURL string `json:"replayproxyurl"` | |
50 | SNI string `json:"sni"` | |
51 | StopOn403 bool `json:"stop_403"` | |
52 | StopOnAll bool `json:"stop_all"` | |
53 | StopOnErrors bool `json:"stop_errors"` | |
54 | Threads int `json:"threads"` | |
55 | Timeout int `json:"timeout"` | |
56 | Url string `json:"url"` | |
57 | Verbose bool `json:"verbose"` | |
58 | Http2 bool `json:"http2"` | |
52 | 59 | } |
53 | 60 | |
54 | 61 | type InputProviderConfig struct { |
55 | Name string `json:"name"` | |
56 | Keyword string `json:"keyword"` | |
57 | Value string `json:"value"` | |
62 | Name string `json:"name"` | |
63 | Keyword string `json:"keyword"` | |
64 | Value string `json:"value"` | |
65 | Template string `json:"template"` // the templating string used for sniper mode (usually "§") | |
58 | 66 | } |
59 | 67 | |
60 | 68 | func NewConfig(ctx context.Context, cancel context.CancelFunc) Config { |
61 | 69 | var conf Config |
70 | conf.AutoCalibrationKeyword = "FUZZ" | |
71 | conf.AutoCalibrationStrategy = "basic" | |
62 | 72 | conf.AutoCalibrationStrings = make([]string, 0) |
63 | 73 | conf.CommandKeywords = make([]string, 0) |
64 | 74 | conf.Context = ctx |
67 | 77 | conf.Delay = optRange{0, 0, false, false} |
68 | 78 | conf.DirSearchCompat = false |
69 | 79 | conf.Extensions = make([]string, 0) |
70 | conf.Filters = make(map[string]FilterProvider) | |
80 | conf.FilterMode = "or" | |
71 | 81 | conf.FollowRedirects = false |
72 | 82 | conf.Headers = make(map[string]string) |
73 | 83 | conf.IgnoreWordlistComments = false |
75 | 85 | conf.InputNum = 0 |
76 | 86 | conf.InputShell = "" |
77 | 87 | conf.InputProviders = make([]InputProviderConfig, 0) |
78 | conf.Matchers = make(map[string]FilterProvider) | |
88 | conf.Json = false | |
89 | conf.MatcherMode = "or" | |
79 | 90 | conf.MaxTime = 0 |
80 | 91 | conf.MaxTimeJob = 0 |
81 | 92 | conf.Method = "GET" |
87 | 98 | conf.Recursion = false |
88 | 99 | conf.RecursionDepth = 0 |
89 | 100 | conf.RecursionStrategy = "default" |
101 | conf.SNI = "" | |
90 | 102 | conf.StopOn403 = false |
91 | 103 | conf.StopOnAll = false |
92 | 104 | conf.StopOnErrors = false |
93 | 105 | conf.Timeout = 10 |
94 | 106 | conf.Url = "" |
95 | 107 | conf.Verbose = false |
108 | conf.Http2 = false | |
96 | 109 | return conf |
97 | 110 | } |
98 | 111 |
0 | 0 | package ffuf |
1 | ||
2 | import "time" | |
3 | ||
4 | //MatcherManager provides functions for managing matchers and filters | |
5 | type MatcherManager interface { | |
6 | SetCalibrated(calibrated bool) | |
7 | SetCalibratedForHost(host string, calibrated bool) | |
8 | AddFilter(name string, option string, replace bool) error | |
9 | AddPerDomainFilter(domain string, name string, option string) error | |
10 | RemoveFilter(name string) | |
11 | AddMatcher(name string, option string) error | |
12 | GetFilters() map[string]FilterProvider | |
13 | GetMatchers() map[string]FilterProvider | |
14 | FiltersForDomain(domain string) map[string]FilterProvider | |
15 | CalibratedForDomain(domain string) bool | |
16 | Calibrated() bool | |
17 | } | |
1 | 18 | |
2 | 19 | //FilterProvider is a generic interface for both Matchers and Filters |
3 | 20 | type FilterProvider interface { |
8 | 25 | |
9 | 26 | //RunnerProvider is an interface for request executors |
10 | 27 | type RunnerProvider interface { |
11 | Prepare(input map[string][]byte) (Request, error) | |
28 | Prepare(input map[string][]byte, basereq *Request) (Request, error) | |
12 | 29 | Execute(req *Request) (Response, error) |
13 | 30 | } |
14 | 31 | |
15 | 32 | //InputProvider interface handles the input data for RunnerProvider |
16 | 33 | type InputProvider interface { |
34 | ActivateKeywords([]string) | |
17 | 35 | AddProvider(InputProviderConfig) error |
36 | Keywords() []string | |
18 | 37 | Next() bool |
19 | 38 | Position() int |
20 | 39 | Reset() |
31 | 50 | IncrementPosition() |
32 | 51 | Value() []byte |
33 | 52 | Total() int |
53 | Active() bool | |
54 | Enable() | |
55 | Disable() | |
34 | 56 | } |
35 | 57 | |
36 | 58 | //OutputProvider is responsible of providing output from the RunnerProvider |
61 | 83 | ContentType string `json:"content-type"` |
62 | 84 | RedirectLocation string `json:"redirectlocation"` |
63 | 85 | Url string `json:"url"` |
86 | Duration time.Duration `json:"duration"` | |
64 | 87 | ResultFile string `json:"resultfile"` |
65 | 88 | Host string `json:"host"` |
66 | 89 | HTMLColor string `json:"-"` |
35 | 35 | queuepos int |
36 | 36 | skipQueue bool |
37 | 37 | currentDepth int |
38 | calibMutex sync.Mutex | |
38 | 39 | pauseWg sync.WaitGroup |
39 | 40 | } |
40 | 41 | |
41 | 42 | type QueueJob struct { |
42 | 43 | Url string |
43 | 44 | depth int |
45 | req Request | |
44 | 46 | } |
45 | 47 | |
46 | 48 | func NewJob(conf *Config) *Job { |
106 | 108 | j.startTime = time.Now() |
107 | 109 | } |
108 | 110 | |
109 | // Add the default job to job queue | |
110 | j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0}) | |
111 | basereq := BaseRequest(j.Config) | |
112 | ||
113 | if j.Config.InputMode == "sniper" { | |
114 | // process multiple payload locations and create a queue job for each location | |
115 | reqs := SniperRequests(&basereq, j.Config.InputProviders[0].Template) | |
116 | for _, r := range reqs { | |
117 | j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0, req: r}) | |
118 | } | |
119 | j.Total = j.Input.Total() * len(reqs) | |
120 | } else { | |
121 | // Add the default job to job queue | |
122 | j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0, req: BaseRequest(j.Config)}) | |
123 | j.Total = j.Input.Total() | |
124 | } | |
125 | ||
111 | 126 | rand.Seed(time.Now().UnixNano()) |
112 | j.Total = j.Input.Total() | |
113 | 127 | defer j.Stop() |
114 | 128 | |
115 | 129 | j.Running = true |
153 | 167 | func (j *Job) prepareQueueJob() { |
154 | 168 | j.Config.Url = j.queuejobs[j.queuepos].Url |
155 | 169 | j.currentDepth = j.queuejobs[j.queuepos].depth |
170 | ||
171 | //Find all keywords present in new queued job | |
172 | kws := j.Input.Keywords() | |
173 | found_kws := make([]string, 0) | |
174 | for _, k := range kws { | |
175 | if RequestContainsKeyword(j.queuejobs[j.queuepos].req, k) { | |
176 | found_kws = append(found_kws, k) | |
177 | } | |
178 | } | |
179 | //And activate / disable inputproviders as needed | |
180 | j.Input.ActivateKeywords(found_kws) | |
156 | 181 | j.queuepos += 1 |
157 | 182 | } |
158 | 183 | |
202 | 227 | wg.Add(1) |
203 | 228 | go j.runBackgroundTasks(&wg) |
204 | 229 | |
205 | // Print the base URL when starting a new recursion queue job | |
230 | // Print the base URL when starting a new recursion or sniper queue job | |
206 | 231 | if j.queuepos > 1 { |
207 | j.Output.Info(fmt.Sprintf("Starting queued job on target: %s", j.Config.Url)) | |
232 | if j.Config.InputMode == "sniper" { | |
233 | j.Output.Info(fmt.Sprintf("Starting queued sniper job (%d of %d) on target: %s", j.queuepos, len(j.queuejobs), j.Config.Url)) | |
234 | } else { | |
235 | j.Output.Info(fmt.Sprintf("Starting queued job on target: %s", j.Config.Url)) | |
236 | } | |
208 | 237 | } |
209 | 238 | |
210 | 239 | //Limiter blocks after reaching the buffer, ensuring limited concurrency |
296 | 325 | |
297 | 326 | func (j *Job) isMatch(resp Response) bool { |
298 | 327 | matched := false |
299 | for _, m := range j.Config.Matchers { | |
328 | var matchers map[string]FilterProvider | |
329 | var filters map[string]FilterProvider | |
330 | if j.Config.AutoCalibrationPerHost { | |
331 | filters = j.Config.MatcherManager.FiltersForDomain(HostURLFromRequest(*resp.Request)) | |
332 | } else { | |
333 | filters = j.Config.MatcherManager.GetFilters() | |
334 | } | |
335 | matchers = j.Config.MatcherManager.GetMatchers() | |
336 | for _, m := range matchers { | |
300 | 337 | match, err := m.Filter(&resp) |
301 | 338 | if err != nil { |
302 | 339 | continue |
303 | 340 | } |
304 | 341 | if match { |
305 | 342 | matched = true |
343 | } else if j.Config.MatcherMode == "and" { | |
344 | // we already know this isn't "and" match | |
345 | return false | |
346 | ||
306 | 347 | } |
307 | 348 | } |
308 | 349 | // The response was not matched, return before running filters |
309 | 350 | if !matched { |
310 | 351 | return false |
311 | 352 | } |
312 | for _, f := range j.Config.Filters { | |
353 | for _, f := range filters { | |
313 | 354 | fv, err := f.Filter(&resp) |
314 | 355 | if err != nil { |
315 | 356 | continue |
316 | 357 | } |
317 | 358 | if fv { |
318 | return false | |
319 | } | |
359 | // return false | |
360 | if j.Config.FilterMode == "or" { | |
361 | // return early, as filter matched | |
362 | return false | |
363 | } | |
364 | } else { | |
365 | if j.Config.FilterMode == "and" { | |
366 | // return early as not all filters matched in "and" mode | |
367 | return true | |
368 | } | |
369 | } | |
370 | } | |
371 | if len(filters) > 0 && j.Config.FilterMode == "and" { | |
372 | // we did not return early, so all filters were matched | |
373 | return false | |
320 | 374 | } |
321 | 375 | return true |
322 | 376 | } |
323 | 377 | |
324 | 378 | func (j *Job) runTask(input map[string][]byte, position int, retried bool) { |
325 | req, err := j.Runner.Prepare(input) | |
379 | basereq := j.queuejobs[j.queuepos-1].req | |
380 | req, err := j.Runner.Prepare(input, &basereq) | |
326 | 381 | req.Position = position |
327 | 382 | if err != nil { |
328 | 383 | j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err)) |
330 | 385 | log.Printf("%s", err) |
331 | 386 | return |
332 | 387 | } |
388 | ||
333 | 389 | resp, err := j.Runner.Execute(&req) |
334 | 390 | if err != nil { |
335 | 391 | if retried { |
355 | 411 | j.inc429() |
356 | 412 | } |
357 | 413 | } |
414 | j.pauseWg.Wait() | |
415 | ||
416 | // Handle autocalibration, must be done after the actual request to ensure sane value in req.Host | |
417 | _ = j.CalibrateIfNeeded(HostURLFromRequest(req), input) | |
418 | ||
358 | 419 | if j.isMatch(resp) { |
359 | 420 | // Re-send request through replay-proxy if needed |
360 | 421 | if j.ReplayRunner != nil { |
361 | replayreq, err := j.ReplayRunner.Prepare(input) | |
422 | replayreq, err := j.ReplayRunner.Prepare(input, &basereq) | |
362 | 423 | replayreq.Position = position |
363 | 424 | if err != nil { |
364 | 425 | j.Output.Error(fmt.Sprintf("Encountered an error while preparing replayproxy request: %s\n", err)) |
369 | 430 | } |
370 | 431 | } |
371 | 432 | j.Output.Result(resp) |
433 | ||
372 | 434 | // Refresh the progress indicator as we printed something out |
373 | 435 | j.updateProgress() |
374 | 436 | if j.Config.Recursion && j.Config.RecursionStrategy == "greedy" { |
386 | 448 | // Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob |
387 | 449 | if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { |
388 | 450 | recUrl := resp.Request.Url + "/" + "FUZZ" |
389 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1} | |
451 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)} | |
390 | 452 | j.queuejobs = append(j.queuejobs, newJob) |
391 | 453 | j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) |
392 | 454 | } else { |
404 | 466 | } |
405 | 467 | if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { |
406 | 468 | // We have yet to reach the maximum recursion depth |
407 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1} | |
469 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1, req: RecursionRequest(j.Config, recUrl)} | |
408 | 470 | j.queuejobs = append(j.queuejobs, newJob) |
409 | 471 | j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) |
410 | 472 | } else { |
411 | 473 | j.Output.Warning(fmt.Sprintf("Directory found, but recursion depth exceeded. Ignoring: %s", resp.GetRedirectLocation(true))) |
412 | 474 | } |
413 | } | |
414 | ||
415 | //CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests | |
416 | func (j *Job) CalibrateResponses() ([]Response, error) { | |
417 | cInputs := make([]string, 0) | |
418 | rand.Seed(time.Now().UnixNano()) | |
419 | if len(j.Config.AutoCalibrationStrings) < 1 { | |
420 | cInputs = append(cInputs, "admin"+RandomString(16)+"/") | |
421 | cInputs = append(cInputs, ".htaccess"+RandomString(16)) | |
422 | cInputs = append(cInputs, RandomString(16)+"/") | |
423 | cInputs = append(cInputs, RandomString(16)) | |
424 | } else { | |
425 | cInputs = append(cInputs, j.Config.AutoCalibrationStrings...) | |
426 | } | |
427 | ||
428 | results := make([]Response, 0) | |
429 | for _, input := range cInputs { | |
430 | inputs := make(map[string][]byte, len(j.Config.InputProviders)) | |
431 | for _, v := range j.Config.InputProviders { | |
432 | inputs[v.Keyword] = []byte(input) | |
433 | } | |
434 | ||
435 | req, err := j.Runner.Prepare(inputs) | |
436 | if err != nil { | |
437 | j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err)) | |
438 | j.incError() | |
439 | log.Printf("%s", err) | |
440 | return results, err | |
441 | } | |
442 | resp, err := j.Runner.Execute(&req) | |
443 | if err != nil { | |
444 | return results, err | |
445 | } | |
446 | ||
447 | // Only calibrate on responses that would be matched otherwise | |
448 | if j.isMatch(resp) { | |
449 | results = append(results, resp) | |
450 | } | |
451 | } | |
452 | return results, nil | |
453 | 475 | } |
454 | 476 | |
455 | 477 | // CheckStop stops the job if stopping conditions are met |
36 | 36 | RecursionDepth int |
37 | 37 | RecursionStrategy string |
38 | 38 | ReplayProxyURL string |
39 | SNI string | |
39 | 40 | Timeout int |
40 | 41 | URL string |
42 | Http2 bool | |
41 | 43 | } |
42 | 44 | |
43 | 45 | type GeneralOptions struct { |
44 | AutoCalibration bool | |
45 | AutoCalibrationStrings []string | |
46 | Colors bool | |
47 | ConfigFile string `toml:"-"` | |
48 | Delay string | |
49 | MaxTime int | |
50 | MaxTimeJob int | |
51 | Noninteractive bool | |
52 | Quiet bool | |
53 | Rate int | |
54 | ShowVersion bool `toml:"-"` | |
55 | StopOn403 bool | |
56 | StopOnAll bool | |
57 | StopOnErrors bool | |
58 | Threads int | |
59 | Verbose bool | |
46 | AutoCalibration bool | |
47 | AutoCalibrationKeyword string | |
48 | AutoCalibrationPerHost bool | |
49 | AutoCalibrationStrategy string | |
50 | AutoCalibrationStrings []string | |
51 | Colors bool | |
52 | ConfigFile string `toml:"-"` | |
53 | Delay string | |
54 | Json bool | |
55 | MaxTime int | |
56 | MaxTimeJob int | |
57 | Noninteractive bool | |
58 | Quiet bool | |
59 | Rate int | |
60 | ShowVersion bool `toml:"-"` | |
61 | StopOn403 bool | |
62 | StopOnAll bool | |
63 | StopOnErrors bool | |
64 | Threads int | |
65 | Verbose bool | |
60 | 66 | } |
61 | 67 | |
62 | 68 | type InputOptions struct { |
73 | 79 | } |
74 | 80 | |
75 | 81 | type OutputOptions struct { |
76 | DebugLog string | |
77 | OutputDirectory string | |
78 | OutputFile string | |
79 | OutputFormat string | |
80 | OutputCreateEmptyFile bool | |
82 | DebugLog string | |
83 | OutputDirectory string | |
84 | OutputFile string | |
85 | OutputFormat string | |
86 | OutputSkipEmptyFile bool | |
81 | 87 | } |
82 | 88 | |
83 | 89 | type FilterOptions struct { |
90 | Mode string | |
84 | 91 | Lines string |
85 | 92 | Regexp string |
86 | 93 | Size string |
87 | 94 | Status string |
95 | Time string | |
88 | 96 | Words string |
89 | 97 | } |
90 | 98 | |
91 | 99 | type MatcherOptions struct { |
100 | Mode string | |
92 | 101 | Lines string |
93 | 102 | Regexp string |
94 | 103 | Size string |
95 | 104 | Status string |
105 | Time string | |
96 | 106 | Words string |
97 | 107 | } |
98 | 108 | |
99 | 109 | //NewConfigOptions returns a newly created ConfigOptions struct with default values |
100 | 110 | func NewConfigOptions() *ConfigOptions { |
101 | 111 | c := &ConfigOptions{} |
112 | c.Filter.Mode = "or" | |
102 | 113 | c.Filter.Lines = "" |
103 | 114 | c.Filter.Regexp = "" |
104 | 115 | c.Filter.Size = "" |
105 | 116 | c.Filter.Status = "" |
117 | c.Filter.Time = "" | |
106 | 118 | c.Filter.Words = "" |
107 | 119 | c.General.AutoCalibration = false |
120 | c.General.AutoCalibrationKeyword = "FUZZ" | |
121 | c.General.AutoCalibrationStrategy = "basic" | |
108 | 122 | c.General.Colors = false |
109 | 123 | c.General.Delay = "" |
124 | c.General.Json = false | |
110 | 125 | c.General.MaxTime = 0 |
111 | 126 | c.General.MaxTimeJob = 0 |
112 | 127 | c.General.Noninteractive = false |
128 | 143 | c.HTTP.RecursionStrategy = "default" |
129 | 144 | c.HTTP.ReplayProxyURL = "" |
130 | 145 | c.HTTP.Timeout = 10 |
146 | c.HTTP.SNI = "" | |
131 | 147 | c.HTTP.URL = "" |
148 | c.HTTP.Http2 = false | |
132 | 149 | c.Input.DirSearchCompat = false |
133 | 150 | c.Input.Extensions = "" |
134 | 151 | c.Input.IgnoreWordlistComments = false |
136 | 153 | c.Input.InputNum = 100 |
137 | 154 | c.Input.Request = "" |
138 | 155 | c.Input.RequestProto = "https" |
156 | c.Matcher.Mode = "or" | |
139 | 157 | c.Matcher.Lines = "" |
140 | 158 | c.Matcher.Regexp = "" |
141 | 159 | c.Matcher.Size = "" |
142 | c.Matcher.Status = "200,204,301,302,307,401,403,405" | |
160 | c.Matcher.Status = "200,204,301,302,307,401,403,405,500" | |
161 | c.Matcher.Time = "" | |
143 | 162 | c.Matcher.Words = "" |
144 | 163 | c.Output.DebugLog = "" |
145 | 164 | c.Output.OutputDirectory = "" |
146 | 165 | c.Output.OutputFile = "" |
147 | 166 | c.Output.OutputFormat = "json" |
148 | c.Output.OutputCreateEmptyFile = false | |
167 | c.Output.OutputSkipEmptyFile = false | |
149 | 168 | return c |
150 | 169 | } |
151 | 170 | |
174 | 193 | } |
175 | 194 | |
176 | 195 | //Prepare inputproviders |
196 | conf.InputMode = parseOpts.Input.InputMode | |
197 | ||
198 | validmode := false | |
199 | for _, mode := range []string{"clusterbomb", "pitchfork", "sniper"} { | |
200 | if conf.InputMode == mode { | |
201 | validmode = true | |
202 | } | |
203 | } | |
204 | if !validmode { | |
205 | errs.Add(fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode)) | |
206 | } | |
207 | ||
208 | template := "" | |
209 | // sniper mode needs some additional checking | |
210 | if conf.InputMode == "sniper" { | |
211 | template = "§" | |
212 | ||
213 | if len(parseOpts.Input.Wordlists) > 1 { | |
214 | errs.Add(fmt.Errorf("sniper mode only supports one wordlist")) | |
215 | } | |
216 | ||
217 | if len(parseOpts.Input.Inputcommands) > 1 { | |
218 | errs.Add(fmt.Errorf("sniper mode only supports one input command")) | |
219 | } | |
220 | } | |
221 | ||
177 | 222 | for _, v := range parseOpts.Input.Wordlists { |
178 | 223 | var wl []string |
179 | 224 | if runtime.GOOS == "windows" { |
198 | 243 | wl = strings.SplitN(v, ":", 2) |
199 | 244 | } |
200 | 245 | if len(wl) == 2 { |
246 | if conf.InputMode == "sniper" { | |
247 | errs.Add(fmt.Errorf("sniper mode does not support wordlist keywords")) | |
248 | } else { | |
249 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
250 | Name: "wordlist", | |
251 | Value: wl[0], | |
252 | Keyword: wl[1], | |
253 | }) | |
254 | } | |
255 | } else { | |
201 | 256 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ |
202 | Name: "wordlist", | |
203 | Value: wl[0], | |
204 | Keyword: wl[1], | |
257 | Name: "wordlist", | |
258 | Value: wl[0], | |
259 | Keyword: "FUZZ", | |
260 | Template: template, | |
205 | 261 | }) |
206 | } else { | |
207 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
208 | Name: "wordlist", | |
209 | Value: wl[0], | |
210 | Keyword: "FUZZ", | |
211 | }) | |
212 | } | |
213 | } | |
262 | } | |
263 | } | |
264 | ||
214 | 265 | for _, v := range parseOpts.Input.Inputcommands { |
215 | 266 | ic := strings.SplitN(v, ":", 2) |
216 | 267 | if len(ic) == 2 { |
268 | if conf.InputMode == "sniper" { | |
269 | errs.Add(fmt.Errorf("sniper mode does not support command keywords")) | |
270 | } else { | |
271 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
272 | Name: "command", | |
273 | Value: ic[0], | |
274 | Keyword: ic[1], | |
275 | }) | |
276 | conf.CommandKeywords = append(conf.CommandKeywords, ic[0]) | |
277 | } | |
278 | } else { | |
217 | 279 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ |
218 | Name: "command", | |
219 | Value: ic[0], | |
220 | Keyword: ic[1], | |
221 | }) | |
222 | conf.CommandKeywords = append(conf.CommandKeywords, ic[0]) | |
223 | } else { | |
224 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
225 | Name: "command", | |
226 | Value: ic[0], | |
227 | Keyword: "FUZZ", | |
280 | Name: "command", | |
281 | Value: ic[0], | |
282 | Keyword: "FUZZ", | |
283 | Template: template, | |
228 | 284 | }) |
229 | 285 | conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ") |
230 | 286 | } |
246 | 302 | //Prepare URL |
247 | 303 | if parseOpts.HTTP.URL != "" { |
248 | 304 | conf.Url = parseOpts.HTTP.URL |
305 | } | |
306 | ||
307 | // Prepare SNI | |
308 | if parseOpts.HTTP.SNI != "" { | |
309 | conf.SNI = parseOpts.HTTP.SNI | |
249 | 310 | } |
250 | 311 | |
251 | 312 | //Prepare headers and make canonical |
377 | 438 | conf.DirSearchCompat = parseOpts.Input.DirSearchCompat |
378 | 439 | conf.Colors = parseOpts.General.Colors |
379 | 440 | conf.InputNum = parseOpts.Input.InputNum |
380 | conf.InputMode = parseOpts.Input.InputMode | |
441 | ||
381 | 442 | conf.InputShell = parseOpts.Input.InputShell |
382 | 443 | conf.OutputFile = parseOpts.Output.OutputFile |
383 | 444 | conf.OutputDirectory = parseOpts.Output.OutputDirectory |
384 | conf.OutputCreateEmptyFile = parseOpts.Output.OutputCreateEmptyFile | |
445 | conf.OutputSkipEmptyFile = parseOpts.Output.OutputSkipEmptyFile | |
385 | 446 | conf.IgnoreBody = parseOpts.HTTP.IgnoreBody |
386 | 447 | conf.Quiet = parseOpts.General.Quiet |
387 | 448 | conf.StopOn403 = parseOpts.General.StopOn403 |
392 | 453 | conf.RecursionDepth = parseOpts.HTTP.RecursionDepth |
393 | 454 | conf.RecursionStrategy = parseOpts.HTTP.RecursionStrategy |
394 | 455 | conf.AutoCalibration = parseOpts.General.AutoCalibration |
456 | conf.AutoCalibrationPerHost = parseOpts.General.AutoCalibrationPerHost | |
457 | conf.AutoCalibrationStrategy = parseOpts.General.AutoCalibrationStrategy | |
395 | 458 | conf.Threads = parseOpts.General.Threads |
396 | 459 | conf.Timeout = parseOpts.HTTP.Timeout |
397 | 460 | conf.MaxTime = parseOpts.General.MaxTime |
398 | 461 | conf.MaxTimeJob = parseOpts.General.MaxTimeJob |
399 | 462 | conf.Noninteractive = parseOpts.General.Noninteractive |
400 | 463 | conf.Verbose = parseOpts.General.Verbose |
464 | conf.Json = parseOpts.General.Json | |
465 | conf.Http2 = parseOpts.HTTP.Http2 | |
466 | ||
467 | // Check that fmode and mmode have sane values | |
468 | valid_opmodes := []string{"and", "or"} | |
469 | fmode_found := false | |
470 | mmode_found := false | |
471 | for _, v := range valid_opmodes { | |
472 | if v == parseOpts.Filter.Mode { | |
473 | fmode_found = true | |
474 | } | |
475 | if v == parseOpts.Matcher.Mode { | |
476 | mmode_found = true | |
477 | } | |
478 | } | |
479 | if !fmode_found { | |
480 | errmsg := fmt.Sprintf("Unrecognized value for parameter fmode: %s, valid values are: and, or", parseOpts.Filter.Mode) | |
481 | errs.Add(fmt.Errorf(errmsg)) | |
482 | } | |
483 | if !mmode_found { | |
484 | errmsg := fmt.Sprintf("Unrecognized value for parameter mmode: %s, valid values are: and, or", parseOpts.Matcher.Mode) | |
485 | errs.Add(fmt.Errorf(errmsg)) | |
486 | } | |
487 | conf.FilterMode = parseOpts.Filter.Mode | |
488 | conf.MatcherMode = parseOpts.Matcher.Mode | |
489 | ||
490 | if conf.AutoCalibrationPerHost { | |
491 | // AutoCalibrationPerHost implies AutoCalibration | |
492 | conf.AutoCalibration = true | |
493 | } | |
401 | 494 | |
402 | 495 | // Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP |
403 | 496 | if len(conf.Data) > 0 && |
411 | 504 | conf.CommandLine = strings.Join(os.Args, " ") |
412 | 505 | |
413 | 506 | for _, provider := range conf.InputProviders { |
414 | if !keywordPresent(provider.Keyword, &conf) { | |
415 | errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword) | |
416 | errs.Add(fmt.Errorf(errmsg)) | |
507 | if provider.Template != "" { | |
508 | if !templatePresent(provider.Template, &conf) { | |
509 | errmsg := fmt.Sprintf("Template %s defined, but not found in pairs in headers, method, URL or POST data.", provider.Template) | |
510 | errs.Add(fmt.Errorf(errmsg)) | |
511 | } | |
512 | } else { | |
513 | if !keywordPresent(provider.Keyword, &conf) { | |
514 | errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword) | |
515 | errs.Add(fmt.Errorf(errmsg)) | |
516 | } | |
417 | 517 | } |
418 | 518 | } |
419 | 519 | |
424 | 524 | errs.Add(fmt.Errorf(errmsg)) |
425 | 525 | } |
426 | 526 | } |
527 | ||
528 | // Make verbose mutually exclusive with json | |
529 | if parseOpts.General.Verbose && parseOpts.General.Json { | |
530 | errs.Add(fmt.Errorf("Cannot have -json and -v")) | |
531 | } | |
532 | ||
427 | 533 | return &conf, errs.ErrorOrNil() |
428 | 534 | } |
429 | 535 | |
489 | 595 | conf.Data = string(b) |
490 | 596 | |
491 | 597 | // Remove newline (typically added by the editor) at the end of the file |
598 | //nolint:gosimple // we specifically want to remove just a single newline, not all of them | |
492 | 599 | if strings.HasSuffix(conf.Data, "\r\n") { |
493 | 600 | conf.Data = conf.Data[:len(conf.Data)-2] |
494 | 601 | } else if strings.HasSuffix(conf.Data, "\n") { |
517 | 624 | } |
518 | 625 | } |
519 | 626 | return false |
627 | } | |
628 | ||
629 | func templatePresent(template string, conf *Config) bool { | |
630 | // Search for input location identifiers, these must exist in pairs | |
631 | sane := false | |
632 | ||
633 | if c := strings.Count(conf.Method, template); c > 0 { | |
634 | if c%2 != 0 { | |
635 | return false | |
636 | } | |
637 | sane = true | |
638 | } | |
639 | if c := strings.Count(conf.Url, template); c > 0 { | |
640 | if c%2 != 0 { | |
641 | return false | |
642 | } | |
643 | sane = true | |
644 | } | |
645 | if c := strings.Count(conf.Data, template); c > 0 { | |
646 | if c%2 != 0 { | |
647 | return false | |
648 | } | |
649 | sane = true | |
650 | } | |
651 | for k, v := range conf.Headers { | |
652 | if c := strings.Count(k, template); c > 0 { | |
653 | if c%2 != 0 { | |
654 | return false | |
655 | } | |
656 | sane = true | |
657 | } | |
658 | if c := strings.Count(v, template); c > 0 { | |
659 | if c%2 != 0 { | |
660 | return false | |
661 | } | |
662 | sane = true | |
663 | } | |
664 | } | |
665 | ||
666 | return sane | |
520 | 667 | } |
521 | 668 | |
522 | 669 | func ReadConfig(configFile string) (*ConfigOptions, error) { |
0 | package ffuf | |
1 | ||
2 | import ( | |
3 | "testing" | |
4 | ) | |
5 | ||
6 | func TestTemplatePresent(t *testing.T) { | |
7 | template := "§" | |
8 | ||
9 | headers := make(map[string]string) | |
10 | headers["foo"] = "§bar§" | |
11 | headers["omg"] = "bbq" | |
12 | headers["§world§"] = "Ooo" | |
13 | ||
14 | goodConf := Config{ | |
15 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", | |
16 | Method: "PO§ST§", | |
17 | Headers: headers, | |
18 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", | |
19 | } | |
20 | ||
21 | if !templatePresent(template, &goodConf) { | |
22 | t.Errorf("Expected-good config failed validation") | |
23 | } | |
24 | ||
25 | badConfMethod := Config{ | |
26 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", | |
27 | Method: "POST§", | |
28 | Headers: headers, | |
29 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=§true§", | |
30 | } | |
31 | ||
32 | if templatePresent(template, &badConfMethod) { | |
33 | t.Errorf("Expected-bad config (Method) failed validation") | |
34 | } | |
35 | ||
36 | badConfURL := Config{ | |
37 | Url: "https://example.com/fooo/bar?test=§value§&order[0§]=§foo§", | |
38 | Method: "§POST§", | |
39 | Headers: headers, | |
40 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=§true§", | |
41 | } | |
42 | ||
43 | if templatePresent(template, &badConfURL) { | |
44 | t.Errorf("Expected-bad config (URL) failed validation") | |
45 | } | |
46 | ||
47 | badConfData := Config{ | |
48 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", | |
49 | Method: "§POST§", | |
50 | Headers: headers, | |
51 | Data: "line=Can we pull back the §veil of §static§ and reach in to the source of §all§ being?&commit=§true§", | |
52 | } | |
53 | ||
54 | if templatePresent(template, &badConfData) { | |
55 | t.Errorf("Expected-bad config (Data) failed validation") | |
56 | } | |
57 | ||
58 | headers["kingdom"] = "§candy" | |
59 | ||
60 | badConfHeaderValue := Config{ | |
61 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", | |
62 | Method: "PO§ST§", | |
63 | Headers: headers, | |
64 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", | |
65 | } | |
66 | ||
67 | if templatePresent(template, &badConfHeaderValue) { | |
68 | t.Errorf("Expected-bad config (Header value) failed validation") | |
69 | } | |
70 | ||
71 | headers["kingdom"] = "candy" | |
72 | headers["§kingdom"] = "candy" | |
73 | ||
74 | badConfHeaderKey := Config{ | |
75 | Url: "https://example.com/fooo/bar?test=§value§&order[§0§]=§foo§", | |
76 | Method: "PO§ST§", | |
77 | Headers: headers, | |
78 | Data: "line=Can we pull back the §veil§ of §static§ and reach in to the source of §all§ being?&commit=true", | |
79 | } | |
80 | ||
81 | if templatePresent(template, &badConfHeaderKey) { | |
82 | t.Errorf("Expected-bad config (Header key) failed validation") | |
83 | } | |
84 | } |
0 | 0 | package ffuf |
1 | ||
2 | import ( | |
3 | "strings" | |
4 | ) | |
1 | 5 | |
2 | 6 | // Request holds the meaningful data that is passed for runner for making the query |
3 | 7 | type Request struct { |
18 | 22 | req.Headers = make(map[string]string) |
19 | 23 | return req |
20 | 24 | } |
25 | ||
26 | // BaseRequest returns a base request struct populated from the main config | |
27 | func BaseRequest(conf *Config) Request { | |
28 | req := NewRequest(conf) | |
29 | req.Headers = conf.Headers | |
30 | req.Data = []byte(conf.Data) | |
31 | return req | |
32 | } | |
33 | ||
34 | // RecursionRequest returns a base request for a recursion target | |
35 | func RecursionRequest(conf *Config, path string) Request { | |
36 | r := BaseRequest(conf) | |
37 | r.Url = path | |
38 | return r | |
39 | } | |
40 | ||
41 | // CopyRequest performs a deep copy of a request and returns a new struct | |
42 | func CopyRequest(basereq *Request) Request { | |
43 | var req Request | |
44 | req.Method = basereq.Method | |
45 | req.Host = basereq.Host | |
46 | req.Url = basereq.Url | |
47 | ||
48 | req.Headers = make(map[string]string, len(basereq.Headers)) | |
49 | for h, v := range basereq.Headers { | |
50 | req.Headers[h] = v | |
51 | } | |
52 | ||
53 | req.Data = make([]byte, len(basereq.Data)) | |
54 | copy(req.Data, basereq.Data) | |
55 | ||
56 | if len(basereq.Input) > 0 { | |
57 | req.Input = make(map[string][]byte, len(basereq.Input)) | |
58 | for k, v := range basereq.Input { | |
59 | req.Input[k] = v | |
60 | } | |
61 | } | |
62 | ||
63 | req.Position = basereq.Position | |
64 | req.Raw = basereq.Raw | |
65 | ||
66 | return req | |
67 | } | |
68 | ||
69 | // SniperRequests returns an array of requests, each with one of the templated locations replaced by a keyword | |
70 | func SniperRequests(basereq *Request, template string) []Request { | |
71 | var reqs []Request | |
72 | keyword := "FUZZ" | |
73 | ||
74 | // Search for input location identifiers, these must exist in pairs | |
75 | if c := strings.Count(basereq.Method, template); c > 0 { | |
76 | if c%2 == 0 { | |
77 | tokens := templateLocations(template, basereq.Method) | |
78 | ||
79 | for i := 0; i < len(tokens); i = i + 2 { | |
80 | newreq := CopyRequest(basereq) | |
81 | newreq.Method = injectKeyword(basereq.Method, keyword, tokens[i], tokens[i+1]) | |
82 | scrubTemplates(&newreq, template) | |
83 | reqs = append(reqs, newreq) | |
84 | } | |
85 | } | |
86 | } | |
87 | ||
88 | if c := strings.Count(basereq.Url, template); c > 0 { | |
89 | if c%2 == 0 { | |
90 | tokens := templateLocations(template, basereq.Url) | |
91 | ||
92 | for i := 0; i < len(tokens); i = i + 2 { | |
93 | newreq := CopyRequest(basereq) | |
94 | newreq.Url = injectKeyword(basereq.Url, keyword, tokens[i], tokens[i+1]) | |
95 | scrubTemplates(&newreq, template) | |
96 | reqs = append(reqs, newreq) | |
97 | } | |
98 | } | |
99 | } | |
100 | ||
101 | data := string(basereq.Data) | |
102 | if c := strings.Count(data, template); c > 0 { | |
103 | if c%2 == 0 { | |
104 | tokens := templateLocations(template, data) | |
105 | ||
106 | for i := 0; i < len(tokens); i = i + 2 { | |
107 | newreq := CopyRequest(basereq) | |
108 | newreq.Data = []byte(injectKeyword(data, keyword, tokens[i], tokens[i+1])) | |
109 | scrubTemplates(&newreq, template) | |
110 | reqs = append(reqs, newreq) | |
111 | } | |
112 | } | |
113 | } | |
114 | ||
115 | for k, v := range basereq.Headers { | |
116 | if c := strings.Count(k, template); c > 0 { | |
117 | if c%2 == 0 { | |
118 | tokens := templateLocations(template, k) | |
119 | ||
120 | for i := 0; i < len(tokens); i = i + 2 { | |
121 | newreq := CopyRequest(basereq) | |
122 | newreq.Headers[injectKeyword(k, keyword, tokens[i], tokens[i+1])] = v | |
123 | delete(newreq.Headers, k) | |
124 | scrubTemplates(&newreq, template) | |
125 | reqs = append(reqs, newreq) | |
126 | } | |
127 | } | |
128 | } | |
129 | if c := strings.Count(v, template); c > 0 { | |
130 | if c%2 == 0 { | |
131 | tokens := templateLocations(template, v) | |
132 | ||
133 | for i := 0; i < len(tokens); i = i + 2 { | |
134 | newreq := CopyRequest(basereq) | |
135 | newreq.Headers[k] = injectKeyword(v, keyword, tokens[i], tokens[i+1]) | |
136 | scrubTemplates(&newreq, template) | |
137 | reqs = append(reqs, newreq) | |
138 | } | |
139 | } | |
140 | } | |
141 | } | |
142 | ||
143 | return reqs | |
144 | } | |
145 | ||
146 | // templateLocations returns an array of template character locations in input | |
147 | func templateLocations(template string, input string) []int { | |
148 | var tokens []int | |
149 | ||
150 | for k, i := range []rune(input) { | |
151 | if i == []rune(template)[0] { | |
152 | tokens = append(tokens, k) | |
153 | } | |
154 | } | |
155 | ||
156 | return tokens | |
157 | } | |
158 | ||
159 | // injectKeyword takes a string, a keyword, and a start/end offset. The data between | |
160 | // the start/end offset in string is removed, and replaced by keyword | |
161 | func injectKeyword(input string, keyword string, startOffset int, endOffset int) string { | |
162 | ||
163 | // some basic sanity checking, return the original string unchanged if offsets didnt make sense | |
164 | if startOffset > len(input) || endOffset > len(input) || startOffset > endOffset { | |
165 | return input | |
166 | } | |
167 | ||
168 | inputslice := []rune(input) | |
169 | keywordslice := []rune(keyword) | |
170 | ||
171 | prefix := inputslice[:startOffset] | |
172 | suffix := inputslice[endOffset+1:] | |
173 | ||
174 | inputslice = append(prefix, keywordslice...) | |
175 | inputslice = append(inputslice, suffix...) | |
176 | ||
177 | return string(inputslice) | |
178 | } | |
179 | ||
180 | // scrubTemplates removes all template (§) strings from the request struct | |
181 | func scrubTemplates(req *Request, template string) { | |
182 | req.Method = strings.Join(strings.Split(req.Method, template), "") | |
183 | req.Url = strings.Join(strings.Split(req.Url, template), "") | |
184 | req.Data = []byte(strings.Join(strings.Split(string(req.Data), template), "")) | |
185 | ||
186 | for k, v := range req.Headers { | |
187 | if c := strings.Count(k, template); c > 0 { | |
188 | if c%2 == 0 { | |
189 | delete(req.Headers, k) | |
190 | req.Headers[strings.Join(strings.Split(k, template), "")] = v | |
191 | } | |
192 | } | |
193 | if c := strings.Count(v, template); c > 0 { | |
194 | if c%2 == 0 { | |
195 | req.Headers[k] = strings.Join(strings.Split(v, template), "") | |
196 | } | |
197 | } | |
198 | } | |
199 | } |
0 | package ffuf | |
1 | ||
2 | import ( | |
3 | "reflect" | |
4 | "testing" | |
5 | ) | |
6 | ||
7 | func TestBaseRequest(t *testing.T) { | |
8 | headers := make(map[string]string) | |
9 | headers["foo"] = "bar" | |
10 | headers["baz"] = "wibble" | |
11 | headers["Content-Type"] = "application/json" | |
12 | ||
13 | data := "{\"quote\":\"I'll still be here tomorrow to high five you yesterday, my friend. Peace.\"}" | |
14 | ||
15 | expectedreq := Request{Method: "POST", Url: "http://example.com/aaaa", Headers: headers, Data: []byte(data)} | |
16 | config := Config{Method: "POST", Url: "http://example.com/aaaa", Headers: headers, Data: data} | |
17 | basereq := BaseRequest(&config) | |
18 | ||
19 | if !reflect.DeepEqual(basereq, expectedreq) { | |
20 | t.Errorf("BaseRequest does not return a struct with expected values") | |
21 | } | |
22 | ||
23 | } | |
24 | ||
25 | func TestCopyRequest(t *testing.T) { | |
26 | headers := make(map[string]string) | |
27 | headers["foo"] = "bar" | |
28 | headers["omg"] = "bbq" | |
29 | ||
30 | data := "line=Is+that+where+creativity+comes+from?+From+sad+biz?" | |
31 | ||
32 | input := make(map[string][]byte) | |
33 | input["matthew"] = []byte("If you are the head that floats atop the §ziggurat§, then the stairs that lead to you must be infinite.") | |
34 | ||
35 | basereq := Request{Method: "POST", | |
36 | Host: "testhost.local", | |
37 | Url: "http://example.com/aaaa", | |
38 | Headers: headers, | |
39 | Data: []byte(data), | |
40 | Input: input, | |
41 | Position: 2, | |
42 | Raw: "We're not oil and water, we're oil and vinegar! It's good. It's yummy.", | |
43 | } | |
44 | ||
45 | copiedreq := CopyRequest(&basereq) | |
46 | ||
47 | if !reflect.DeepEqual(basereq, copiedreq) { | |
48 | t.Errorf("CopyRequest does not return an equal struct") | |
49 | } | |
50 | } | |
51 | ||
52 | func TestSniperRequests(t *testing.T) { | |
53 | headers := make(map[string]string) | |
54 | headers["foo"] = "§bar§" | |
55 | headers["§omg§"] = "bbq" | |
56 | ||
57 | testreq := Request{ | |
58 | Method: "§POST§", | |
59 | Url: "http://example.com/aaaa?param=§lemony§", | |
60 | Headers: headers, | |
61 | Data: []byte("line=§yo yo, it's grease§"), | |
62 | } | |
63 | ||
64 | requests := SniperRequests(&testreq, "§") | |
65 | ||
66 | if len(requests) != 5 { | |
67 | t.Errorf("SniperRequests returned an incorrect number of requests") | |
68 | } | |
69 | ||
70 | headers = make(map[string]string) | |
71 | headers["foo"] = "bar" | |
72 | headers["omg"] = "bbq" | |
73 | ||
74 | var expected Request | |
75 | expected = Request{ // Method | |
76 | Method: "FUZZ", | |
77 | Url: "http://example.com/aaaa?param=lemony", | |
78 | Headers: headers, | |
79 | Data: []byte("line=yo yo, it's grease"), | |
80 | } | |
81 | ||
82 | pass := false | |
83 | for _, req := range requests { | |
84 | if reflect.DeepEqual(req, expected) { | |
85 | pass = true | |
86 | } | |
87 | } | |
88 | ||
89 | if !pass { | |
90 | t.Errorf("SniperRequests does not return expected values (Method)") | |
91 | } | |
92 | ||
93 | expected = Request{ // URL | |
94 | Method: "POST", | |
95 | Url: "http://example.com/aaaa?param=FUZZ", | |
96 | Headers: headers, | |
97 | Data: []byte("line=yo yo, it's grease"), | |
98 | } | |
99 | ||
100 | pass = false | |
101 | for _, req := range requests { | |
102 | if reflect.DeepEqual(req, expected) { | |
103 | pass = true | |
104 | } | |
105 | } | |
106 | ||
107 | if !pass { | |
108 | t.Errorf("SniperRequests does not return expected values (Url)") | |
109 | } | |
110 | ||
111 | expected = Request{ // Data | |
112 | Method: "POST", | |
113 | Url: "http://example.com/aaaa?param=lemony", | |
114 | Headers: headers, | |
115 | Data: []byte("line=FUZZ"), | |
116 | } | |
117 | ||
118 | pass = false | |
119 | for _, req := range requests { | |
120 | if reflect.DeepEqual(req, expected) { | |
121 | pass = true | |
122 | } | |
123 | } | |
124 | ||
125 | if !pass { | |
126 | t.Errorf("SniperRequests does not return expected values (Data)") | |
127 | } | |
128 | ||
129 | headers = make(map[string]string) | |
130 | headers["foo"] = "FUZZ" | |
131 | headers["omg"] = "bbq" | |
132 | ||
133 | expected = Request{ // Header value | |
134 | Method: "POST", | |
135 | Url: "http://example.com/aaaa?param=lemony", | |
136 | Headers: headers, | |
137 | Data: []byte("line=yo yo, it's grease"), | |
138 | } | |
139 | ||
140 | pass = false | |
141 | for _, req := range requests { | |
142 | if reflect.DeepEqual(req, expected) { | |
143 | pass = true | |
144 | } | |
145 | } | |
146 | ||
147 | if !pass { | |
148 | t.Errorf("SniperRequests does not return expected values (Header value)") | |
149 | } | |
150 | ||
151 | headers = make(map[string]string) | |
152 | headers["foo"] = "bar" | |
153 | headers["FUZZ"] = "bbq" | |
154 | ||
155 | expected = Request{ // Header key | |
156 | Method: "POST", | |
157 | Url: "http://example.com/aaaa?param=lemony", | |
158 | Headers: headers, | |
159 | Data: []byte("line=yo yo, it's grease"), | |
160 | } | |
161 | ||
162 | pass = false | |
163 | for _, req := range requests { | |
164 | if reflect.DeepEqual(req, expected) { | |
165 | pass = true | |
166 | } | |
167 | } | |
168 | ||
169 | if !pass { | |
170 | t.Errorf("SniperRequests does not return expected values (Header key)") | |
171 | } | |
172 | ||
173 | } | |
174 | ||
175 | func TestTemplateLocations(t *testing.T) { | |
176 | test := "this is my 1§template locator§ test" | |
177 | arr := templateLocations("§", test) | |
178 | expected := []int{12, 29} | |
179 | if !reflect.DeepEqual(arr, expected) { | |
180 | t.Errorf("templateLocations does not return expected values") | |
181 | } | |
182 | ||
183 | test2 := "§template locator§" | |
184 | arr = templateLocations("§", test2) | |
185 | expected = []int{0, 17} | |
186 | if !reflect.DeepEqual(arr, expected) { | |
187 | t.Errorf("templateLocations does not return expected values") | |
188 | } | |
189 | ||
190 | if len(templateLocations("§", "te§st2")) != 1 { | |
191 | t.Errorf("templateLocations does not return expected values") | |
192 | } | |
193 | } | |
194 | ||
195 | func TestInjectKeyword(t *testing.T) { | |
196 | input := "§Greetings, creator§" | |
197 | offsetTuple := templateLocations("§", input) | |
198 | expected := "FUZZ" | |
199 | ||
200 | result := injectKeyword(input, "FUZZ", offsetTuple[0], offsetTuple[1]) | |
201 | if result != expected { | |
202 | t.Errorf("injectKeyword returned unexpected result: " + result) | |
203 | } | |
204 | ||
205 | if injectKeyword(input, "FUZZ", -32, 44) != input { | |
206 | t.Errorf("injectKeyword offset validation failed") | |
207 | } | |
208 | ||
209 | if injectKeyword(input, "FUZZ", 12, 2) != input { | |
210 | t.Errorf("injectKeyword offset validation failed") | |
211 | } | |
212 | ||
213 | if injectKeyword(input, "FUZZ", 0, 25) != input { | |
214 | t.Errorf("injectKeyword offset validation failed") | |
215 | } | |
216 | ||
217 | } | |
218 | ||
219 | func TestScrubTemplates(t *testing.T) { | |
220 | headers := make(map[string]string) | |
221 | headers["foo"] = "§bar§" | |
222 | headers["§omg§"] = "bbq" | |
223 | ||
224 | testreq := Request{Method: "§POST§", | |
225 | Url: "http://example.com/aaaa?param=§lemony§", | |
226 | Headers: headers, | |
227 | Data: []byte("line=§yo yo, it's grease§"), | |
228 | } | |
229 | ||
230 | headers = make(map[string]string) | |
231 | headers["foo"] = "bar" | |
232 | headers["omg"] = "bbq" | |
233 | ||
234 | expectedreq := Request{Method: "POST", | |
235 | Url: "http://example.com/aaaa?param=lemony", | |
236 | Headers: headers, | |
237 | Data: []byte("line=yo yo, it's grease"), | |
238 | } | |
239 | ||
240 | scrubTemplates(&testreq, "§") | |
241 | ||
242 | if !reflect.DeepEqual(testreq, expectedreq) { | |
243 | t.Errorf("scrubTemplates does not return expected values") | |
244 | } | |
245 | } |
2 | 2 | import ( |
3 | 3 | "net/http" |
4 | 4 | "net/url" |
5 | "time" | |
5 | 6 | ) |
6 | 7 | |
7 | 8 | // Response struct holds the meaningful data returned from request and is meant for passing to filters |
17 | 18 | Request *Request |
18 | 19 | Raw string |
19 | 20 | ResultFile string |
21 | Time time.Duration | |
20 | 22 | } |
21 | 23 | |
22 | 24 | // GetRedirectLocation returns the redirect location for a 3xx redirect HTTP response |
40 | 42 | if err != nil { |
41 | 43 | return redirectLocation |
42 | 44 | } |
43 | redirectLocation = baseUrl.ResolveReference(redirectUrl).String() | |
45 | if redirectUrl.IsAbs() && UrlEqual(redirectUrl, baseUrl) { | |
46 | redirectLocation = redirectUrl.Scheme + "://" + | |
47 | baseUrl.Host + redirectUrl.Path | |
48 | } else { | |
49 | redirectLocation = baseUrl.ResolveReference(redirectUrl).String() | |
50 | } | |
44 | 51 | } |
45 | 52 | |
46 | 53 | return redirectLocation |
54 | } | |
55 | ||
56 | func UrlEqual(url1, url2 *url.URL) bool { | |
57 | if url1.Hostname() != url2.Hostname() { | |
58 | return false | |
59 | } | |
60 | if url1.Scheme != url2.Scheme { | |
61 | return false | |
62 | } | |
63 | p1, p2 := getUrlPort(url1), getUrlPort(url2) | |
64 | return p1 == p2 | |
65 | } | |
66 | ||
67 | func getUrlPort(url *url.URL) string { | |
68 | var portMap = map[string]string{ | |
69 | "http": "80", | |
70 | "https": "443", | |
71 | } | |
72 | p := url.Port() | |
73 | if p == "" { | |
74 | p = portMap[url.Scheme] | |
75 | } | |
76 | return p | |
47 | 77 | } |
48 | 78 | |
49 | 79 | func NewResponse(httpresp *http.Response, req *Request) Response { |
2 | 2 | import ( |
3 | 3 | "fmt" |
4 | 4 | "math/rand" |
5 | "net/url" | |
5 | 6 | "os" |
7 | "strings" | |
6 | 8 | ) |
7 | 9 | |
8 | 10 | //used for random string generation in calibration function |
42 | 44 | return !md.IsDir() |
43 | 45 | } |
44 | 46 | |
47 | //RequestContainsKeyword checks if a keyword is present in any field of a request | |
48 | func RequestContainsKeyword(req Request, kw string) bool { | |
49 | if strings.Contains(req.Host, kw) { | |
50 | return true | |
51 | } | |
52 | if strings.Contains(req.Url, kw) { | |
53 | return true | |
54 | } | |
55 | if strings.Contains(req.Method, kw) { | |
56 | return true | |
57 | } | |
58 | if strings.Contains(string(req.Data), kw) { | |
59 | return true | |
60 | } | |
61 | for k, v := range req.Headers { | |
62 | if strings.Contains(k, kw) || strings.Contains(v, kw) { | |
63 | return true | |
64 | } | |
65 | } | |
66 | return false | |
67 | } | |
68 | ||
69 | //HostURLFromRequest gets a host + path without the filename or last part of the URL path | |
70 | func HostURLFromRequest(req Request) string { | |
71 | u, _ := url.Parse(req.Url) | |
72 | u.Host = req.Host | |
73 | pathparts := strings.Split(u.Path, "/") | |
74 | trimpath := strings.TrimSpace(strings.Join(pathparts[:len(pathparts)-1], "/")) | |
75 | return u.Host + trimpath | |
76 | } | |
77 | ||
45 | 78 | //Version returns the ffuf version string |
46 | 79 | func Version() string { |
47 | 80 | return fmt.Sprintf("%s%s", VERSION, VERSION_APPENDIX) |
0 | package ffuf | |
1 | ||
2 | import ( | |
3 | "math/rand" | |
4 | "testing" | |
5 | ) | |
6 | ||
7 | func TestRandomString(t *testing.T) { | |
8 | length := 1 + rand.Intn(65535) | |
9 | str := RandomString(length) | |
10 | ||
11 | if len(str) != length { | |
12 | t.Errorf("Length of generated string was %d, was expecting %d", len(str), length) | |
13 | } | |
14 | } | |
15 | ||
16 | func TestUniqStringSlice(t *testing.T) { | |
17 | slice := []string{"foo", "foo", "bar", "baz", "baz", "foo", "baz", "baz", "foo"} | |
18 | expectedLength := 3 | |
19 | ||
20 | uniqSlice := UniqStringSlice(slice) | |
21 | ||
22 | if len(uniqSlice) != expectedLength { | |
23 | t.Errorf("Length of slice was %d, was expecting %d", len(uniqSlice), expectedLength) | |
24 | } | |
25 | } |
1 | 1 | |
2 | 2 | var ( |
3 | 3 | //VERSION holds the current version number |
4 | VERSION = "1.3.1" | |
4 | VERSION = "1.5.0" | |
5 | 5 | //VERSION_APPENDIX holds additional version definition |
6 | 6 | VERSION_APPENDIX = "-exclusive-dev" |
7 | 7 | ) |
0 | 0 | package filter |
1 | 1 | |
2 | 2 | import ( |
3 | "flag" | |
4 | 3 | "fmt" |
5 | "strconv" | |
6 | "strings" | |
4 | "github.com/ffuf/ffuf/pkg/ffuf" | |
5 | "sync" | |
6 | ) | |
7 | 7 | |
8 | "github.com/ffuf/ffuf/pkg/ffuf" | |
9 | ) | |
8 | // MatcherManager handles both filters and matchers. | |
9 | type MatcherManager struct { | |
10 | IsCalibrated bool | |
11 | Mutex sync.Mutex | |
12 | Matchers map[string]ffuf.FilterProvider | |
13 | Filters map[string]ffuf.FilterProvider | |
14 | PerDomainFilters map[string]*PerDomainFilter | |
15 | } | |
16 | ||
17 | type PerDomainFilter struct { | |
18 | IsCalibrated bool | |
19 | Filters map[string]ffuf.FilterProvider | |
20 | } | |
21 | ||
22 | func NewPerDomainFilter(globfilters map[string]ffuf.FilterProvider) *PerDomainFilter { | |
23 | return &PerDomainFilter{IsCalibrated: false, Filters: globfilters} | |
24 | } | |
25 | ||
26 | func (p *PerDomainFilter) SetCalibrated(value bool) { | |
27 | p.IsCalibrated = value | |
28 | } | |
29 | ||
30 | func NewMatcherManager() ffuf.MatcherManager { | |
31 | return &MatcherManager{ | |
32 | IsCalibrated: false, | |
33 | Matchers: make(map[string]ffuf.FilterProvider), | |
34 | Filters: make(map[string]ffuf.FilterProvider), | |
35 | PerDomainFilters: make(map[string]*PerDomainFilter), | |
36 | } | |
37 | } | |
38 | ||
39 | func (f *MatcherManager) SetCalibrated(value bool) { | |
40 | f.IsCalibrated = value | |
41 | } | |
42 | ||
43 | func (f *MatcherManager) SetCalibratedForHost(host string, value bool) { | |
44 | if f.PerDomainFilters[host] != nil { | |
45 | f.PerDomainFilters[host].IsCalibrated = value | |
46 | } else { | |
47 | newFilter := NewPerDomainFilter(f.Filters) | |
48 | newFilter.IsCalibrated = true | |
49 | f.PerDomainFilters[host] = newFilter | |
50 | } | |
51 | } | |
10 | 52 | |
11 | 53 | func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) { |
12 | 54 | if name == "status" { |
24 | 66 | if name == "regexp" { |
25 | 67 | return NewRegexpFilter(value) |
26 | 68 | } |
69 | if name == "time" { | |
70 | return NewTimeFilter(value) | |
71 | } | |
27 | 72 | return nil, fmt.Errorf("Could not create filter with name %s", name) |
28 | 73 | } |
29 | 74 | |
30 | //AddFilter adds a new filter to Config | |
31 | func AddFilter(conf *ffuf.Config, name string, option string) error { | |
75 | //AddFilter adds a new filter to MatcherManager | |
76 | func (f *MatcherManager) AddFilter(name string, option string, replace bool) error { | |
77 | f.Mutex.Lock() | |
78 | defer f.Mutex.Unlock() | |
32 | 79 | newf, err := NewFilterByName(name, option) |
33 | 80 | if err == nil { |
34 | 81 | // valid filter create or append |
35 | if conf.Filters[name] == nil { | |
36 | conf.Filters[name] = newf | |
82 | if f.Filters[name] == nil || replace { | |
83 | f.Filters[name] = newf | |
37 | 84 | } else { |
38 | newoption := conf.Filters[name].Repr() + "," + option | |
85 | newoption := f.Filters[name].Repr() + "," + option | |
39 | 86 | newerf, err := NewFilterByName(name, newoption) |
40 | 87 | if err == nil { |
41 | conf.Filters[name] = newerf | |
88 | f.Filters[name] = newerf | |
42 | 89 | } |
43 | 90 | } |
44 | 91 | } |
45 | 92 | return err |
46 | 93 | } |
47 | 94 | |
95 | //AddPerDomainFilter adds a new filter to PerDomainFilter configuration | |
96 | func (f *MatcherManager) AddPerDomainFilter(domain string, name string, option string) error { | |
97 | f.Mutex.Lock() | |
98 | defer f.Mutex.Unlock() | |
99 | var pdFilters *PerDomainFilter | |
100 | if filter, ok := f.PerDomainFilters[domain]; ok { | |
101 | pdFilters = filter | |
102 | } else { | |
103 | pdFilters = NewPerDomainFilter(f.Filters) | |
104 | } | |
105 | newf, err := NewFilterByName(name, option) | |
106 | if err == nil { | |
107 | // valid filter create or append | |
108 | if pdFilters.Filters[name] == nil { | |
109 | pdFilters.Filters[name] = newf | |
110 | } else { | |
111 | newoption := pdFilters.Filters[name].Repr() + "," + option | |
112 | newerf, err := NewFilterByName(name, newoption) | |
113 | if err == nil { | |
114 | pdFilters.Filters[name] = newerf | |
115 | } | |
116 | } | |
117 | } | |
118 | f.PerDomainFilters[domain] = pdFilters | |
119 | return err | |
120 | } | |
121 | ||
48 | 122 | //RemoveFilter removes a filter of a given type |
49 | func RemoveFilter(conf *ffuf.Config, name string) { | |
50 | delete(conf.Filters, name) | |
123 | func (f *MatcherManager) RemoveFilter(name string) { | |
124 | f.Mutex.Lock() | |
125 | defer f.Mutex.Unlock() | |
126 | delete(f.Filters, name) | |
51 | 127 | } |
52 | 128 | |
53 | 129 | //AddMatcher adds a new matcher to Config |
54 | func AddMatcher(conf *ffuf.Config, name string, option string) error { | |
130 | func (f *MatcherManager) AddMatcher(name string, option string) error { | |
131 | f.Mutex.Lock() | |
132 | defer f.Mutex.Unlock() | |
55 | 133 | newf, err := NewFilterByName(name, option) |
56 | 134 | if err == nil { |
57 | conf.Matchers[name] = newf | |
135 | // valid filter create or append | |
136 | if f.Matchers[name] == nil { | |
137 | f.Matchers[name] = newf | |
138 | } else { | |
139 | newoption := f.Matchers[name].Repr() + "," + option | |
140 | newerf, err := NewFilterByName(name, newoption) | |
141 | if err == nil { | |
142 | f.Matchers[name] = newerf | |
143 | } | |
144 | } | |
58 | 145 | } |
59 | 146 | return err |
60 | 147 | } |
61 | 148 | |
62 | //CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and acting accordingly | |
63 | func CalibrateIfNeeded(j *ffuf.Job) error { | |
64 | var err error | |
65 | if !j.Config.AutoCalibration { | |
66 | return nil | |
67 | } | |
68 | // Handle the calibration | |
69 | responses, err := j.CalibrateResponses() | |
70 | if err != nil { | |
71 | return err | |
72 | } | |
73 | if len(responses) > 0 { | |
74 | err = calibrateFilters(j, responses) | |
75 | } | |
76 | return err | |
149 | func (f *MatcherManager) GetFilters() map[string]ffuf.FilterProvider { | |
150 | return f.Filters | |
77 | 151 | } |
78 | 152 | |
79 | func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) error { | |
80 | sizeCalib := make([]string, 0) | |
81 | wordCalib := make([]string, 0) | |
82 | lineCalib := make([]string, 0) | |
83 | for _, r := range responses { | |
84 | if r.ContentLength > 0 { | |
85 | // Only add if we have an actual size of responses | |
86 | sizeCalib = append(sizeCalib, strconv.FormatInt(r.ContentLength, 10)) | |
87 | } | |
88 | if r.ContentWords > 0 { | |
89 | // Only add if we have an actual word length of response | |
90 | wordCalib = append(wordCalib, strconv.FormatInt(r.ContentWords, 10)) | |
91 | } | |
92 | if r.ContentLines > 1 { | |
93 | // Only add if we have an actual word length of response | |
94 | lineCalib = append(lineCalib, strconv.FormatInt(r.ContentLines, 10)) | |
95 | } | |
96 | } | |
97 | ||
98 | //Remove duplicates | |
99 | sizeCalib = ffuf.UniqStringSlice(sizeCalib) | |
100 | wordCalib = ffuf.UniqStringSlice(wordCalib) | |
101 | lineCalib = ffuf.UniqStringSlice(lineCalib) | |
102 | ||
103 | if len(sizeCalib) > 0 { | |
104 | err := AddFilter(j.Config, "size", strings.Join(sizeCalib, ",")) | |
105 | if err != nil { | |
106 | return err | |
107 | } | |
108 | } | |
109 | if len(wordCalib) > 0 { | |
110 | err := AddFilter(j.Config, "word", strings.Join(wordCalib, ",")) | |
111 | if err != nil { | |
112 | return err | |
113 | } | |
114 | } | |
115 | if len(lineCalib) > 0 { | |
116 | err := AddFilter(j.Config, "line", strings.Join(lineCalib, ",")) | |
117 | if err != nil { | |
118 | return err | |
119 | } | |
120 | } | |
121 | return nil | |
153 | func (f *MatcherManager) GetMatchers() map[string]ffuf.FilterProvider { | |
154 | return f.Matchers | |
122 | 155 | } |
123 | 156 | |
124 | func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error { | |
125 | errs := ffuf.NewMultierror() | |
126 | // If any other matcher is set, ignore -mc default value | |
127 | matcherSet := false | |
128 | statusSet := false | |
129 | warningIgnoreBody := false | |
130 | flag.Visit(func(f *flag.Flag) { | |
131 | if f.Name == "mc" { | |
132 | statusSet = true | |
133 | } | |
134 | if f.Name == "ms" { | |
135 | matcherSet = true | |
136 | warningIgnoreBody = true | |
137 | } | |
138 | if f.Name == "ml" { | |
139 | matcherSet = true | |
140 | warningIgnoreBody = true | |
141 | } | |
142 | if f.Name == "mr" { | |
143 | matcherSet = true | |
144 | } | |
145 | if f.Name == "mw" { | |
146 | matcherSet = true | |
147 | warningIgnoreBody = true | |
148 | } | |
149 | }) | |
150 | if statusSet || !matcherSet { | |
151 | if err := AddMatcher(conf, "status", parseOpts.Matcher.Status); err != nil { | |
152 | errs.Add(err) | |
153 | } | |
157 | func (f *MatcherManager) FiltersForDomain(domain string) map[string]ffuf.FilterProvider { | |
158 | if f.PerDomainFilters[domain] == nil { | |
159 | return f.Filters | |
154 | 160 | } |
161 | return f.PerDomainFilters[domain].Filters | |
162 | } | |
155 | 163 | |
156 | if parseOpts.Filter.Status != "" { | |
157 | if err := AddFilter(conf, "status", parseOpts.Filter.Status); err != nil { | |
158 | errs.Add(err) | |
159 | } | |
164 | func (f *MatcherManager) CalibratedForDomain(domain string) bool { | |
165 | if f.PerDomainFilters[domain] != nil { | |
166 | return f.PerDomainFilters[domain].IsCalibrated | |
160 | 167 | } |
161 | if parseOpts.Filter.Size != "" { | |
162 | warningIgnoreBody = true | |
163 | if err := AddFilter(conf, "size", parseOpts.Filter.Size); err != nil { | |
164 | errs.Add(err) | |
165 | } | |
166 | } | |
167 | if parseOpts.Filter.Regexp != "" { | |
168 | if err := AddFilter(conf, "regexp", parseOpts.Filter.Regexp); err != nil { | |
169 | errs.Add(err) | |
170 | } | |
171 | } | |
172 | if parseOpts.Filter.Words != "" { | |
173 | warningIgnoreBody = true | |
174 | if err := AddFilter(conf, "word", parseOpts.Filter.Words); err != nil { | |
175 | errs.Add(err) | |
176 | } | |
177 | } | |
178 | if parseOpts.Filter.Lines != "" { | |
179 | warningIgnoreBody = true | |
180 | if err := AddFilter(conf, "line", parseOpts.Filter.Lines); err != nil { | |
181 | errs.Add(err) | |
182 | } | |
183 | } | |
184 | if parseOpts.Matcher.Size != "" { | |
185 | if err := AddMatcher(conf, "size", parseOpts.Matcher.Size); err != nil { | |
186 | errs.Add(err) | |
187 | } | |
188 | } | |
189 | if parseOpts.Matcher.Regexp != "" { | |
190 | if err := AddMatcher(conf, "regexp", parseOpts.Matcher.Regexp); err != nil { | |
191 | errs.Add(err) | |
192 | } | |
193 | } | |
194 | if parseOpts.Matcher.Words != "" { | |
195 | if err := AddMatcher(conf, "word", parseOpts.Matcher.Words); err != nil { | |
196 | errs.Add(err) | |
197 | } | |
198 | } | |
199 | if parseOpts.Matcher.Lines != "" { | |
200 | if err := AddMatcher(conf, "line", parseOpts.Matcher.Lines); err != nil { | |
201 | errs.Add(err) | |
202 | } | |
203 | } | |
204 | if conf.IgnoreBody && warningIgnoreBody { | |
205 | fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n") | |
206 | } | |
207 | return errs.ErrorOrNil() | |
168 | return false | |
208 | 169 | } |
170 | ||
171 | func (f *MatcherManager) Calibrated() bool { | |
172 | return f.IsCalibrated | |
173 | } |
28 | 28 | if _, ok := ref.(*RegexpFilter); !ok { |
29 | 29 | t.Errorf("Was expecting regexpfilter") |
30 | 30 | } |
31 | ||
32 | tf, _ := NewFilterByName("time", "200") | |
33 | if _, ok := tf.(*TimeFilter); !ok { | |
34 | t.Errorf("Was expecting timefilter") | |
35 | } | |
31 | 36 | } |
32 | 37 | |
33 | 38 | func TestNewFilterByNameError(t *testing.T) { |
0 | package filter | |
1 | ||
2 | import ( | |
3 | "encoding/json" | |
4 | "fmt" | |
5 | "strconv" | |
6 | "strings" | |
7 | ||
8 | "github.com/ffuf/ffuf/pkg/ffuf" | |
9 | ) | |
10 | ||
11 | type TimeFilter struct { | |
12 | ms int64 // milliseconds since first response byte | |
13 | gt bool // filter if response time is greater than | |
14 | lt bool // filter if response time is less than | |
15 | valueRaw string | |
16 | } | |
17 | ||
18 | func NewTimeFilter(value string) (ffuf.FilterProvider, error) { | |
19 | var milliseconds int64 | |
20 | gt, lt := false, false | |
21 | ||
22 | gt = strings.HasPrefix(value, ">") | |
23 | lt = strings.HasPrefix(value, "<") | |
24 | ||
25 | if (!lt && !gt) || (lt && gt) { | |
26 | return &TimeFilter{}, fmt.Errorf("Time filter or matcher (-ft / -mt): invalid value: %s", value) | |
27 | } | |
28 | ||
29 | milliseconds, err := strconv.ParseInt(value[1:], 10, 64) | |
30 | if err != nil { | |
31 | return &TimeFilter{}, fmt.Errorf("Time filter or matcher (-ft / -mt): invalid value: %s", value) | |
32 | } | |
33 | return &TimeFilter{ms: milliseconds, gt: gt, lt: lt, valueRaw: value}, nil | |
34 | } | |
35 | ||
36 | func (f *TimeFilter) MarshalJSON() ([]byte, error) { | |
37 | return json.Marshal(&struct { | |
38 | Value string `json:"value"` | |
39 | }{ | |
40 | Value: f.valueRaw, | |
41 | }) | |
42 | } | |
43 | ||
44 | func (f *TimeFilter) Filter(response *ffuf.Response) (bool, error) { | |
45 | if f.gt { | |
46 | if response.Time.Milliseconds() > f.ms { | |
47 | return true, nil | |
48 | } | |
49 | ||
50 | } else if f.lt { | |
51 | if response.Time.Milliseconds() < f.ms { | |
52 | return true, nil | |
53 | } | |
54 | } | |
55 | ||
56 | return false, nil | |
57 | } | |
58 | ||
59 | func (f *TimeFilter) Repr() string { | |
60 | return f.valueRaw | |
61 | } | |
62 | ||
63 | func (f *TimeFilter) ReprVerbose() string { | |
64 | return fmt.Sprintf("Response time: %s", f.Repr()) | |
65 | } |
0 | package filter | |
1 | ||
2 | import ( | |
3 | "testing" | |
4 | "time" | |
5 | ||
6 | "github.com/ffuf/ffuf/pkg/ffuf" | |
7 | ) | |
8 | ||
9 | func TestNewTimeFilter(t *testing.T) { | |
10 | fp, _ := NewTimeFilter(">100") | |
11 | ||
12 | f := fp.(*TimeFilter) | |
13 | ||
14 | if !f.gt || f.lt { | |
15 | t.Errorf("Time filter was expected to have greater-than") | |
16 | } | |
17 | ||
18 | if f.ms != 100 { | |
19 | t.Errorf("Time filter was expected to have ms == 100") | |
20 | } | |
21 | } | |
22 | ||
23 | func TestNewTimeFilterError(t *testing.T) { | |
24 | _, err := NewTimeFilter("100>") | |
25 | if err == nil { | |
26 | t.Errorf("Was expecting an error from errenous input data") | |
27 | } | |
28 | } | |
29 | ||
30 | func TestTimeFiltering(t *testing.T) { | |
31 | f, _ := NewTimeFilter(">100") | |
32 | ||
33 | for i, test := range []struct { | |
34 | input int64 | |
35 | output bool | |
36 | }{ | |
37 | {1342, true}, | |
38 | {2000, true}, | |
39 | {35000, true}, | |
40 | {1458700, true}, | |
41 | {99, false}, | |
42 | {2, false}, | |
43 | } { | |
44 | resp := ffuf.Response{ | |
45 | Data: []byte("dahhhhhtaaaaa"), | |
46 | Time: time.Duration(test.input * int64(time.Millisecond)), | |
47 | } | |
48 | filterReturn, _ := f.Filter(&resp) | |
49 | if filterReturn != test.output { | |
50 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) | |
51 | } | |
52 | } | |
53 | } |
11 | 11 | type CommandInput struct { |
12 | 12 | config *ffuf.Config |
13 | 13 | count int |
14 | active bool | |
14 | 15 | keyword string |
15 | 16 | command string |
16 | 17 | shell string |
18 | 19 | |
19 | 20 | func NewCommandInput(keyword string, value string, conf *ffuf.Config) (*CommandInput, error) { |
20 | 21 | var cmd CommandInput |
22 | cmd.active = true | |
21 | 23 | cmd.keyword = keyword |
22 | 24 | cmd.config = conf |
23 | 25 | cmd.count = 0 |
73 | 75 | func (c *CommandInput) Total() int { |
74 | 76 | return c.config.InputNum |
75 | 77 | } |
78 | ||
79 | func (c *CommandInput) Active() bool { | |
80 | return c.active | |
81 | } | |
82 | ||
83 | func (c *CommandInput) Enable() { | |
84 | c.active = true | |
85 | } | |
86 | ||
87 | func (c *CommandInput) Disable() { | |
88 | c.active = false | |
89 | } |
15 | 15 | func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, ffuf.Multierror) { |
16 | 16 | validmode := false |
17 | 17 | errs := ffuf.NewMultierror() |
18 | for _, mode := range []string{"clusterbomb", "pitchfork"} { | |
18 | for _, mode := range []string{"clusterbomb", "pitchfork", "sniper"} { | |
19 | 19 | if conf.InputMode == mode { |
20 | 20 | validmode = true |
21 | 21 | } |
50 | 50 | return nil |
51 | 51 | } |
52 | 52 | |
53 | // ActivateKeywords enables / disables wordlists based on list of active keywords | |
54 | func (i *MainInputProvider) ActivateKeywords(kws []string) { | |
55 | for _, p := range i.Providers { | |
56 | if sliceContains(kws, p.Keyword()) { | |
57 | p.Active() | |
58 | } else { | |
59 | p.Disable() | |
60 | } | |
61 | } | |
62 | } | |
63 | ||
53 | 64 | //Position will return the current position of progress |
54 | 65 | func (i *MainInputProvider) Position() int { |
55 | 66 | return i.position |
67 | } | |
68 | ||
69 | //Keywords returns a slice of all keywords in the inputprovider | |
70 | func (i *MainInputProvider) Keywords() []string { | |
71 | kws := make([]string, 0) | |
72 | for _, p := range i.Providers { | |
73 | kws = append(kws, p.Keyword()) | |
74 | } | |
75 | return kws | |
56 | 76 | } |
57 | 77 | |
58 | 78 | //Next will increment the cursor position, and return a boolean telling if there's inputs left |
67 | 87 | //Value returns a map of inputs for keywords |
68 | 88 | func (i *MainInputProvider) Value() map[string][]byte { |
69 | 89 | retval := make(map[string][]byte) |
70 | if i.Config.InputMode == "clusterbomb" { | |
90 | if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" { | |
71 | 91 | retval = i.clusterbombValue() |
72 | 92 | } |
73 | 93 | if i.Config.InputMode == "pitchfork" { |
90 | 110 | func (i *MainInputProvider) pitchforkValue() map[string][]byte { |
91 | 111 | values := make(map[string][]byte) |
92 | 112 | for _, p := range i.Providers { |
113 | if !p.Active() { | |
114 | // The inputprovider is disabled | |
115 | continue | |
116 | } | |
93 | 117 | if !p.Next() { |
94 | 118 | // Loop to beginning if the inputprovider has been exhausted |
95 | 119 | p.ResetPosition() |
107 | 131 | // Should we signal the next InputProvider in the slice to increment |
108 | 132 | signalNext := false |
109 | 133 | first := true |
110 | for index, p := range i.Providers { | |
134 | index := 0 | |
135 | for _, p := range i.Providers { | |
136 | if !p.Active() { | |
137 | continue | |
138 | } | |
111 | 139 | if signalNext { |
112 | 140 | p.IncrementPosition() |
113 | 141 | signalNext = false |
129 | 157 | p.IncrementPosition() |
130 | 158 | first = false |
131 | 159 | } |
160 | index += 1 | |
132 | 161 | } |
133 | 162 | return values |
134 | 163 | } |
135 | 164 | |
136 | 165 | func (i *MainInputProvider) clusterbombIteratorReset() { |
137 | for index, p := range i.Providers { | |
166 | index := 0 | |
167 | for _, p := range i.Providers { | |
168 | if !p.Active() { | |
169 | continue | |
170 | } | |
138 | 171 | if index < i.msbIterator { |
139 | 172 | p.ResetPosition() |
140 | 173 | } |
141 | 174 | if index == i.msbIterator { |
142 | 175 | p.IncrementPosition() |
143 | 176 | } |
177 | index += 1 | |
144 | 178 | } |
145 | 179 | } |
146 | 180 | |
149 | 183 | count := 0 |
150 | 184 | if i.Config.InputMode == "pitchfork" { |
151 | 185 | for _, p := range i.Providers { |
186 | if !p.Active() { | |
187 | continue | |
188 | } | |
152 | 189 | if p.Total() > count { |
153 | 190 | count = p.Total() |
154 | 191 | } |
155 | 192 | } |
156 | 193 | } |
157 | if i.Config.InputMode == "clusterbomb" { | |
194 | if i.Config.InputMode == "clusterbomb" || i.Config.InputMode == "sniper" { | |
158 | 195 | count = 1 |
159 | 196 | for _, p := range i.Providers { |
197 | if !p.Active() { | |
198 | continue | |
199 | } | |
160 | 200 | count = count * p.Total() |
161 | 201 | } |
162 | 202 | } |
163 | 203 | return count |
164 | 204 | } |
205 | ||
206 | //sliceContains is a helper function that returns true if a string is included in a string slice | |
207 | func sliceContains(sslice []string, str string) bool { | |
208 | for _, v := range sslice { | |
209 | if v == str { | |
210 | return true | |
211 | } | |
212 | } | |
213 | return false | |
214 | } |
9 | 9 | ) |
10 | 10 | |
11 | 11 | type WordlistInput struct { |
12 | active bool | |
12 | 13 | config *ffuf.Config |
13 | 14 | data [][]byte |
14 | 15 | position int |
17 | 18 | |
18 | 19 | func NewWordlistInput(keyword string, value string, conf *ffuf.Config) (*WordlistInput, error) { |
19 | 20 | var wl WordlistInput |
21 | wl.active = true | |
20 | 22 | wl.keyword = keyword |
21 | 23 | wl.config = conf |
22 | 24 | wl.position = 0 |
54 | 56 | return w.keyword |
55 | 57 | } |
56 | 58 | |
57 | //Next will increment the cursor position, and return a boolean telling if there's words left in the list | |
59 | //Next will return a boolean telling if there's words left in the list | |
58 | 60 | func (w *WordlistInput) Next() bool { |
59 | 61 | return w.position < len(w.data) |
60 | 62 | } |
72 | 74 | //Total returns the size of wordlist |
73 | 75 | func (w *WordlistInput) Total() int { |
74 | 76 | return len(w.data) |
77 | } | |
78 | ||
79 | //Active returns boolean if the inputprovider is active | |
80 | func (w *WordlistInput) Active() bool { | |
81 | return w.active | |
82 | } | |
83 | ||
84 | //Enable sets the inputprovider as active | |
85 | func (w *WordlistInput) Enable() { | |
86 | w.active = true | |
87 | } | |
88 | ||
89 | //Disable disables the inputprovider | |
90 | func (w *WordlistInput) Disable() { | |
91 | w.active = false | |
75 | 92 | } |
76 | 93 | |
77 | 94 | //validFile checks that the wordlist file exists and can be read |
0 | package input | |
1 | ||
2 | import ( | |
3 | "testing" | |
4 | ) | |
5 | ||
6 | func TestStripCommentsIgnoresCommentLines(t *testing.T) { | |
7 | text, _ := stripComments("# text") | |
8 | ||
9 | if text != "" { | |
10 | t.Errorf("Returned text was not a blank string") | |
11 | } | |
12 | } | |
13 | ||
14 | func TestStripCommentsStripsCommentAfterText(t *testing.T) { | |
15 | text, _ := stripComments("text # comment") | |
16 | ||
17 | if text != "text" { | |
18 | t.Errorf("Comment was not stripped or pre-comment text was not returned") | |
19 | } | |
20 | } |
2 | 2 | import ( |
3 | 3 | "bufio" |
4 | 4 | "fmt" |
5 | "github.com/ffuf/ffuf/pkg/ffuf" | |
6 | "github.com/ffuf/ffuf/pkg/filter" | |
7 | 5 | "strconv" |
8 | 6 | "strings" |
9 | 7 | "time" |
8 | ||
9 | "github.com/ffuf/ffuf/pkg/ffuf" | |
10 | 10 | ) |
11 | 11 | |
12 | 12 | type interactive struct { |
79 | 79 | } else if len(args) > 2 { |
80 | 80 | i.Job.Output.Error("Too many arguments for \"fc\"") |
81 | 81 | } else { |
82 | i.updateFilter("status", args[1]) | |
82 | i.updateFilter("status", args[1], true) | |
83 | 83 | i.Job.Output.Info("New status code filter value set") |
84 | 84 | } |
85 | case "afc": | |
86 | if len(args) < 2 { | |
87 | i.Job.Output.Error("Please define a value to append to status code filter") | |
88 | } else if len(args) > 2 { | |
89 | i.Job.Output.Error("Too many arguments for \"afc\"") | |
90 | } else { | |
91 | i.appendFilter("status", args[1]) | |
92 | i.Job.Output.Info("New status code filter value set") | |
93 | } | |
85 | 94 | case "fl": |
86 | 95 | if len(args) < 2 { |
87 | 96 | i.Job.Output.Error("Please define a value for line count filter, or \"none\" for removing it") |
88 | 97 | } else if len(args) > 2 { |
89 | 98 | i.Job.Output.Error("Too many arguments for \"fl\"") |
90 | 99 | } else { |
91 | i.updateFilter("line", args[1]) | |
100 | i.updateFilter("line", args[1], true) | |
92 | 101 | i.Job.Output.Info("New line count filter value set") |
93 | 102 | } |
103 | case "afl": | |
104 | if len(args) < 2 { | |
105 | i.Job.Output.Error("Please define a value to append to line count filter") | |
106 | } else if len(args) > 2 { | |
107 | i.Job.Output.Error("Too many arguments for \"afl\"") | |
108 | } else { | |
109 | i.appendFilter("line", args[1]) | |
110 | i.Job.Output.Info("New line count filter value set") | |
111 | } | |
94 | 112 | case "fw": |
95 | 113 | if len(args) < 2 { |
96 | 114 | i.Job.Output.Error("Please define a value for word count filter, or \"none\" for removing it") |
97 | 115 | } else if len(args) > 2 { |
98 | 116 | i.Job.Output.Error("Too many arguments for \"fw\"") |
99 | 117 | } else { |
100 | i.updateFilter("word", args[1]) | |
118 | i.updateFilter("word", args[1], true) | |
101 | 119 | i.Job.Output.Info("New word count filter value set") |
102 | 120 | } |
121 | case "afw": | |
122 | if len(args) < 2 { | |
123 | i.Job.Output.Error("Please define a value to append to word count filter") | |
124 | } else if len(args) > 2 { | |
125 | i.Job.Output.Error("Too many arguments for \"afw\"") | |
126 | } else { | |
127 | i.appendFilter("word", args[1]) | |
128 | i.Job.Output.Info("New word count filter value set") | |
129 | } | |
103 | 130 | case "fs": |
104 | 131 | if len(args) < 2 { |
105 | 132 | i.Job.Output.Error("Please define a value for response size filter, or \"none\" for removing it") |
106 | 133 | } else if len(args) > 2 { |
107 | 134 | i.Job.Output.Error("Too many arguments for \"fs\"") |
108 | 135 | } else { |
109 | i.updateFilter("size", args[1]) | |
136 | i.updateFilter("size", args[1], true) | |
110 | 137 | i.Job.Output.Info("New response size filter value set") |
138 | } | |
139 | case "afs": | |
140 | if len(args) < 2 { | |
141 | i.Job.Output.Error("Please define a value to append to size filter") | |
142 | } else if len(args) > 2 { | |
143 | i.Job.Output.Error("Too many arguments for \"afs\"") | |
144 | } else { | |
145 | i.appendFilter("size", args[1]) | |
146 | i.Job.Output.Info("New response size filter value set") | |
147 | } | |
148 | case "ft": | |
149 | if len(args) < 2 { | |
150 | i.Job.Output.Error("Please define a value for response time filter, or \"none\" for removing it") | |
151 | } else if len(args) > 2 { | |
152 | i.Job.Output.Error("Too many arguments for \"ft\"") | |
153 | } else { | |
154 | i.updateFilter("time", args[1], true) | |
155 | i.Job.Output.Info("New response time filter value set") | |
156 | } | |
157 | case "aft": | |
158 | if len(args) < 2 { | |
159 | i.Job.Output.Error("Please define a value to append to response time filter") | |
160 | } else if len(args) > 2 { | |
161 | i.Job.Output.Error("Too many arguments for \"aft\"") | |
162 | } else { | |
163 | i.appendFilter("time", args[1]) | |
164 | i.Job.Output.Info("New response time filter value set") | |
111 | 165 | } |
112 | 166 | case "queueshow": |
113 | 167 | i.printQueue() |
136 | 190 | } |
137 | 191 | } |
138 | 192 | |
139 | func (i *interactive) updateFilter(name, value string) { | |
140 | if value == "none" { | |
141 | filter.RemoveFilter(i.Job.Config, name) | |
142 | } else { | |
143 | newFc, err := filter.NewFilterByName(name, value) | |
144 | if err != nil { | |
145 | i.Job.Output.Error(fmt.Sprintf("Error while setting new filter value: %s", err)) | |
146 | return | |
147 | } else { | |
148 | i.Job.Config.Filters[name] = newFc | |
149 | } | |
150 | ||
151 | results := make([]ffuf.Result, 0) | |
193 | func (i *interactive) refreshResults() { | |
194 | results := make([]ffuf.Result, 0) | |
195 | filters := i.Job.Config.MatcherManager.GetFilters() | |
196 | for _, filter := range filters { | |
152 | 197 | for _, res := range i.Job.Output.GetCurrentResults() { |
153 | 198 | fakeResp := &ffuf.Response{ |
154 | 199 | StatusCode: res.StatusCode, |
156 | 201 | ContentWords: res.ContentWords, |
157 | 202 | ContentLength: res.ContentLength, |
158 | 203 | } |
159 | filterOut, _ := newFc.Filter(fakeResp) | |
204 | filterOut, _ := filter.Filter(fakeResp) | |
160 | 205 | if !filterOut { |
161 | 206 | results = append(results, res) |
162 | 207 | } |
163 | 208 | } |
164 | i.Job.Output.SetCurrentResults(results) | |
165 | } | |
209 | } | |
210 | i.Job.Output.SetCurrentResults(results) | |
211 | } | |
212 | ||
213 | func (i *interactive) updateFilter(name, value string, replace bool) { | |
214 | if value == "none" { | |
215 | i.Job.Config.MatcherManager.RemoveFilter(name) | |
216 | } else { | |
217 | _ = i.Job.Config.MatcherManager.AddFilter(name, value, replace) | |
218 | } | |
219 | i.refreshResults() | |
220 | } | |
221 | ||
222 | func (i *interactive) appendFilter(name, value string) { | |
223 | i.updateFilter(name, value, false) | |
166 | 224 | } |
167 | 225 | |
168 | 226 | func (i *interactive) printQueue() { |
169 | 227 | if len(i.Job.QueuedJobs()) > 0 { |
170 | i.Job.Output.Raw("Queued recursion jobs:\n") | |
228 | i.Job.Output.Raw("Queued jobs:\n") | |
171 | 229 | for index, job := range i.Job.QueuedJobs() { |
172 | 230 | postfix := "" |
173 | 231 | if index == 0 { |
176 | 234 | i.Job.Output.Raw(fmt.Sprintf(" [%d] : %s%s\n", index, job.Url, postfix)) |
177 | 235 | } |
178 | 236 | } else { |
179 | i.Job.Output.Info("Recursion job queue is empty") | |
237 | i.Job.Output.Info("Job queue is empty") | |
180 | 238 | } |
181 | 239 | } |
182 | 240 | |
191 | 249 | i.Job.Output.Warning("Cannot delete the currently running job. Use \"queueskip\" to advance to the next one") |
192 | 250 | } else { |
193 | 251 | i.Job.DeleteQueueItem(index) |
194 | i.Job.Output.Info("Recursion job successfully deleted!") | |
252 | i.Job.Output.Info("Job successfully deleted!") | |
195 | 253 | } |
196 | 254 | } |
197 | 255 | } |
204 | 262 | } |
205 | 263 | |
206 | 264 | func (i *interactive) printHelp() { |
207 | var fc, fl, fs, fw string | |
208 | for name, filter := range i.Job.Config.Filters { | |
265 | var fc, fl, fs, ft, fw string | |
266 | for name, filter := range i.Job.Config.MatcherManager.GetFilters() { | |
209 | 267 | switch name { |
210 | 268 | case "status": |
211 | 269 | fc = "(active: " + filter.Repr() + ")" |
215 | 273 | fw = "(active: " + filter.Repr() + ")" |
216 | 274 | case "size": |
217 | 275 | fs = "(active: " + filter.Repr() + ")" |
276 | case "time": | |
277 | ft = "(active: " + filter.Repr() + ")" | |
218 | 278 | } |
219 | 279 | } |
220 | 280 | help := ` |
221 | 281 | available commands: |
222 | fc [value] - (re)configure status code filter %s | |
223 | fl [value] - (re)configure line count filter %s | |
224 | fw [value] - (re)configure word count filter %s | |
225 | fs [value] - (re)configure size filter %s | |
226 | queueshow - show recursive job queue | |
227 | queuedel [number] - delete a recursion job in the queue | |
228 | queueskip - advance to the next queued recursion job | |
282 | afc [value] - append to status code filter %s | |
283 | fc [value] - (re)configure status code filter %s | |
284 | afl [value] - append to line count filter %s | |
285 | fl [value] - (re)configure line count filter %s | |
286 | afw [value] - append to word count filter %s | |
287 | fw [value] - (re)configure word count filter %s | |
288 | afs [value] - append to size filter %s | |
289 | fs [value] - (re)configure size filter %s | |
290 | aft [value] - append to time filter %s | |
291 | ft [value] - (re)configure time filter %s | |
292 | queueshow - show job queue | |
293 | queuedel [number] - delete a job in the queue | |
294 | queueskip - advance to the next queued job | |
229 | 295 | restart - restart and resume the current ffuf job |
230 | 296 | resume - resume current ffuf job (or: ENTER) |
231 | 297 | show - show results for the current job |
232 | 298 | savejson [filename] - save current matches to a file |
233 | 299 | help - you are looking at it |
234 | 300 | ` |
235 | i.Job.Output.Raw(fmt.Sprintf(help, fc, fl, fw, fs)) | |
236 | } | |
301 | i.Job.Output.Raw(fmt.Sprintf(help, fc, fc, fl, fl, fw, fw, fs, fs, ft, ft)) | |
302 | } |
8 | 8 | "github.com/ffuf/ffuf/pkg/ffuf" |
9 | 9 | ) |
10 | 10 | |
11 | var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "resultfile"} | |
11 | var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "duration", "resultfile"} | |
12 | 12 | |
13 | 13 | func writeCSV(filename string, config *ffuf.Config, res []ffuf.Result, encode bool) error { |
14 | ||
15 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
16 | return nil | |
17 | } | |
18 | ||
19 | 14 | header := make([]string, 0) |
20 | 15 | f, err := os.Create(filename) |
21 | 16 | if err != nil { |
68 | 63 | res = append(res, strconv.FormatInt(r.ContentWords, 10)) |
69 | 64 | res = append(res, strconv.FormatInt(r.ContentLines, 10)) |
70 | 65 | res = append(res, r.ContentType) |
66 | res = append(res, r.Duration.String()) | |
71 | 67 | res = append(res, r.ResultFile) |
72 | 68 | return res |
73 | 69 | } |
0 | package output | |
1 | ||
2 | import ( | |
3 | "reflect" | |
4 | "testing" | |
5 | "time" | |
6 | ||
7 | "github.com/ffuf/ffuf/pkg/ffuf" | |
8 | ) | |
9 | ||
10 | func TestToCSV(t *testing.T) { | |
11 | result := ffuf.Result{ | |
12 | Input: map[string][]byte{"x": {66}}, | |
13 | Position: 1, | |
14 | StatusCode: 200, | |
15 | ContentLength: 3, | |
16 | ContentWords: 4, | |
17 | ContentLines: 5, | |
18 | ContentType: "application/json", | |
19 | RedirectLocation: "http://no.pe", | |
20 | Url: "http://as.df", | |
21 | Duration: time.Duration(123), | |
22 | ResultFile: "resultfile", | |
23 | Host: "host", | |
24 | } | |
25 | ||
26 | csv := toCSV(result) | |
27 | ||
28 | if !reflect.DeepEqual(csv, []string{ | |
29 | "B", | |
30 | "http://as.df", | |
31 | "http://no.pe", | |
32 | "1", | |
33 | "200", | |
34 | "3", | |
35 | "4", | |
36 | "5", | |
37 | "application/json", | |
38 | "123ns", | |
39 | "resultfile"}) { | |
40 | ||
41 | t.Errorf("CSV was not generated in expected format") | |
42 | } | |
43 | } |
77 | 77 | <th>Words</th> |
78 | 78 | <th>Lines</th> |
79 | 79 | <th>Type</th> |
80 | <th>Duration</th> | |
80 | 81 | <th>Resultfile</th> |
81 | 82 | </tr> |
82 | 83 | </thead> |
98 | 99 | <td>{{ $result.ContentWords }}</td> |
99 | 100 | <td>{{ $result.ContentLines }}</td> |
100 | 101 | <td>{{ $result.ContentType }}</td> |
102 | <td>{{ $result.Duration }}</td> | |
101 | 103 | <td>{{ $result.ResultFile }}</td> |
102 | 104 | </tr> |
103 | 105 | {{ end }} |
176 | 178 | } |
177 | 179 | |
178 | 180 | func writeHTML(filename string, config *ffuf.Config, results []ffuf.Result) error { |
179 | ||
180 | if config.OutputCreateEmptyFile && (len(results) == 0) { | |
181 | return nil | |
182 | } | |
183 | ||
184 | 181 | results = colorizeResults(results) |
185 | 182 | |
186 | 183 | ti := time.Now() |
23 | 23 | ContentLines int64 `json:"lines"` |
24 | 24 | ContentType string `json:"content-type"` |
25 | 25 | RedirectLocation string `json:"redirectlocation"` |
26 | Duration time.Duration `json:"duration"` | |
26 | 27 | ResultFile string `json:"resultfile"` |
27 | 28 | Url string `json:"url"` |
28 | 29 | Host string `json:"host"` |
36 | 37 | } |
37 | 38 | |
38 | 39 | func writeEJSON(filename string, config *ffuf.Config, res []ffuf.Result) error { |
39 | ||
40 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
41 | return nil | |
42 | } | |
43 | ||
44 | 40 | t := time.Now() |
45 | 41 | outJSON := ejsonFileOutput{ |
46 | 42 | CommandLine: config.CommandLine, |
76 | 72 | ContentLines: r.ContentLines, |
77 | 73 | ContentType: r.ContentType, |
78 | 74 | RedirectLocation: r.RedirectLocation, |
75 | Duration: r.Duration, | |
79 | 76 | ResultFile: r.ResultFile, |
80 | 77 | Url: r.Url, |
81 | 78 | Host: r.Host, |
13 | 13 | Command line : ` + "`{{.CommandLine}}`" + ` |
14 | 14 | Time: ` + "{{ .Time }}" + ` |
15 | 15 | |
16 | {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | ResultFile | | |
16 | {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | Duration | ResultFile | | |
17 | 17 | {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- | |
18 | {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .ResultFile }} | | |
18 | {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .Duration}} | {{ .ResultFile }} | | |
19 | 19 | {{end}}` // The template format is not pretty but follows the markdown guide |
20 | 20 | ) |
21 | 21 | |
22 | 22 | func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) error { |
23 | ||
24 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
25 | return nil | |
26 | } | |
27 | ||
28 | 23 | ti := time.Now() |
29 | 24 | |
30 | 25 | keywords := make([]string, 0) |
1 | 1 | |
2 | 2 | import ( |
3 | 3 | "crypto/md5" |
4 | "encoding/json" | |
4 | 5 | "fmt" |
5 | 6 | "io/ioutil" |
6 | 7 | "os" |
122 | 123 | } |
123 | 124 | |
124 | 125 | // Print matchers |
125 | for _, f := range s.config.Matchers { | |
126 | for _, f := range s.config.MatcherManager.GetMatchers() { | |
126 | 127 | printOption([]byte("Matcher"), []byte(f.ReprVerbose())) |
127 | 128 | } |
128 | 129 | // Print filters |
129 | for _, f := range s.config.Filters { | |
130 | for _, f := range s.config.MatcherManager.GetFilters() { | |
130 | 131 | printOption([]byte("Filter"), []byte(f.ReprVerbose())) |
131 | 132 | } |
132 | 133 | fmt.Fprintf(os.Stderr, "%s\n\n", BANNER_SEP) |
224 | 225 | // Go through each type of write, adding |
225 | 226 | // the suffix to each output file. |
226 | 227 | |
227 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
228 | return nil | |
229 | } | |
230 | ||
231 | 228 | s.config.OutputFile = BaseFilename + ".json" |
232 | err = writeJSON(filename, s.config, res) | |
229 | err = writeJSON(s.config.OutputFile, s.config, res) | |
233 | 230 | if err != nil { |
234 | 231 | s.Error(err.Error()) |
235 | 232 | } |
236 | 233 | |
237 | 234 | s.config.OutputFile = BaseFilename + ".ejson" |
238 | err = writeEJSON(filename, s.config, res) | |
235 | err = writeEJSON(s.config.OutputFile, s.config, res) | |
239 | 236 | if err != nil { |
240 | 237 | s.Error(err.Error()) |
241 | 238 | } |
242 | 239 | |
243 | 240 | s.config.OutputFile = BaseFilename + ".html" |
244 | err = writeHTML(filename, s.config, res) | |
241 | err = writeHTML(s.config.OutputFile, s.config, res) | |
245 | 242 | if err != nil { |
246 | 243 | s.Error(err.Error()) |
247 | 244 | } |
248 | 245 | |
249 | 246 | s.config.OutputFile = BaseFilename + ".md" |
250 | err = writeMarkdown(filename, s.config, res) | |
247 | err = writeMarkdown(s.config.OutputFile, s.config, res) | |
251 | 248 | if err != nil { |
252 | 249 | s.Error(err.Error()) |
253 | 250 | } |
254 | 251 | |
255 | 252 | s.config.OutputFile = BaseFilename + ".csv" |
256 | err = writeCSV(filename, s.config, res, false) | |
253 | err = writeCSV(s.config.OutputFile, s.config, res, false) | |
257 | 254 | if err != nil { |
258 | 255 | s.Error(err.Error()) |
259 | 256 | } |
260 | 257 | |
261 | 258 | s.config.OutputFile = BaseFilename + ".ecsv" |
262 | err = writeCSV(filename, s.config, res, true) | |
259 | err = writeCSV(s.config.OutputFile, s.config, res, true) | |
263 | 260 | if err != nil { |
264 | 261 | s.Error(err.Error()) |
265 | 262 | } |
271 | 268 | // SaveFile saves the current results to a file of a given type |
272 | 269 | func (s *Stdoutput) SaveFile(filename, format string) error { |
273 | 270 | var err error |
271 | if s.config.OutputSkipEmptyFile && len(s.Results) == 0 { | |
272 | s.Info("No results and -or defined, output file not written.") | |
273 | return err | |
274 | } | |
274 | 275 | switch format { |
275 | 276 | case "all": |
276 | 277 | err = s.writeToAll(filename, s.config, append(s.Results, s.CurrentResults...)) |
323 | 324 | ContentType: resp.ContentType, |
324 | 325 | RedirectLocation: resp.GetRedirectLocation(false), |
325 | 326 | Url: resp.Request.Url, |
327 | Duration: resp.Time, | |
326 | 328 | ResultFile: resp.ResultFile, |
327 | 329 | Host: resp.Request.Host, |
328 | 330 | } |
357 | 359 | } |
358 | 360 | |
359 | 361 | func (s *Stdoutput) PrintResult(res ffuf.Result) { |
360 | if s.config.Quiet { | |
362 | switch { | |
363 | case s.config.Quiet: | |
361 | 364 | s.resultQuiet(res) |
362 | } else { | |
363 | if len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 { | |
364 | // Print a multi-line result (when using multiple input keywords and wordlists) | |
365 | s.resultMultiline(res) | |
366 | } else { | |
367 | s.resultNormal(res) | |
368 | } | |
365 | case s.config.Json: | |
366 | s.resultJson(res) | |
367 | case len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0: | |
368 | // Print a multi-line result (when using multiple input keywords and wordlists) | |
369 | s.resultMultiline(res) | |
370 | default: | |
371 | s.resultNormal(res) | |
369 | 372 | } |
370 | 373 | } |
371 | 374 | |
400 | 403 | func (s *Stdoutput) resultMultiline(res ffuf.Result) { |
401 | 404 | var res_hdr, res_str string |
402 | 405 | res_str = "%s%s * %s: %s\n" |
403 | res_hdr = fmt.Sprintf("%s[Status: %d, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines) | |
404 | res_hdr = s.colorize(res_hdr, res.StatusCode) | |
406 | res_hdr = fmt.Sprintf("%s%s[Status: %d, Size: %d, Words: %d, Lines: %d, Duration: %dms]%s", TERMINAL_CLEAR_LINE, s.colorize(res.StatusCode), res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines, res.Duration.Milliseconds(), ANSI_CLEAR) | |
405 | 407 | reslines := "" |
406 | 408 | if s.config.Verbose { |
407 | 409 | reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, res.Url) |
426 | 428 | } |
427 | 429 | |
428 | 430 | func (s *Stdoutput) resultNormal(res ffuf.Result) { |
429 | resnormal := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(res), s.colorize(fmt.Sprintf("%d", res.StatusCode), res.StatusCode), res.ContentLength, res.ContentWords, res.ContentLines) | |
431 | resnormal := fmt.Sprintf("%s%s%-23s [Status: %d, Size: %d, Words: %d, Lines: %d, Duration: %dms]%s", TERMINAL_CLEAR_LINE, s.colorize(res.StatusCode), s.prepareInputsOneLine(res), res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines, res.Duration.Milliseconds(), ANSI_CLEAR) | |
430 | 432 | fmt.Println(resnormal) |
431 | 433 | } |
432 | 434 | |
433 | func (s *Stdoutput) colorize(input string, status int64) string { | |
435 | func (s *Stdoutput) resultJson(res ffuf.Result) { | |
436 | resBytes, err := json.Marshal(res) | |
437 | if err != nil { | |
438 | s.Error(err.Error()) | |
439 | } else { | |
440 | fmt.Fprint(os.Stderr, TERMINAL_CLEAR_LINE) | |
441 | fmt.Println(string(resBytes)) | |
442 | } | |
443 | } | |
444 | ||
445 | func (s *Stdoutput) colorize(status int64) string { | |
434 | 446 | if !s.config.Colors { |
435 | return input | |
447 | return "" | |
436 | 448 | } |
437 | 449 | colorCode := ANSI_CLEAR |
438 | 450 | if status >= 200 && status < 300 { |
447 | 459 | if status >= 500 && status < 600 { |
448 | 460 | colorCode = ANSI_RED |
449 | 461 | } |
450 | return fmt.Sprintf("%s%s%s", colorCode, input, ANSI_CLEAR) | |
462 | return colorCode | |
451 | 463 | } |
452 | 464 | |
453 | 465 | func printOption(name []byte, value []byte) { |
6 | 6 | "io/ioutil" |
7 | 7 | "net" |
8 | 8 | "net/http" |
9 | "net/http/httptrace" | |
9 | 10 | "net/http/httputil" |
10 | 11 | "net/textproto" |
11 | 12 | "net/url" |
40 | 41 | proxyURL = http.ProxyURL(pu) |
41 | 42 | } |
42 | 43 | } |
43 | ||
44 | 44 | simplerunner.config = conf |
45 | 45 | simplerunner.client = &http.Client{ |
46 | 46 | CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse }, |
47 | 47 | Timeout: time.Duration(time.Duration(conf.Timeout) * time.Second), |
48 | 48 | Transport: &http.Transport{ |
49 | ForceAttemptHTTP2: conf.Http2, | |
49 | 50 | Proxy: proxyURL, |
50 | 51 | MaxIdleConns: 1000, |
51 | 52 | MaxIdleConnsPerHost: 500, |
57 | 58 | TLSClientConfig: &tls.Config{ |
58 | 59 | InsecureSkipVerify: true, |
59 | 60 | Renegotiation: tls.RenegotiateOnceAsClient, |
61 | ServerName: conf.SNI, | |
60 | 62 | }, |
61 | 63 | }} |
62 | 64 | |
66 | 68 | return &simplerunner |
67 | 69 | } |
68 | 70 | |
69 | func (r *SimpleRunner) Prepare(input map[string][]byte) (ffuf.Request, error) { | |
70 | req := ffuf.NewRequest(r.config) | |
71 | ||
72 | req.Headers = r.config.Headers | |
73 | req.Url = r.config.Url | |
74 | req.Method = r.config.Method | |
75 | req.Data = []byte(r.config.Data) | |
71 | func (r *SimpleRunner) Prepare(input map[string][]byte, basereq *ffuf.Request) (ffuf.Request, error) { | |
72 | req := ffuf.CopyRequest(basereq) | |
76 | 73 | |
77 | 74 | for keyword, inputitem := range input { |
78 | 75 | req.Method = strings.ReplaceAll(req.Method, keyword, string(inputitem)) |
95 | 92 | var err error |
96 | 93 | var rawreq []byte |
97 | 94 | data := bytes.NewReader(req.Data) |
95 | ||
96 | var start time.Time | |
97 | var firstByteTime time.Duration | |
98 | ||
99 | trace := &httptrace.ClientTrace{ | |
100 | WroteRequest: func(wri httptrace.WroteRequestInfo) { | |
101 | start = time.Now() // begin the timer after the request is fully written | |
102 | }, | |
103 | GotFirstResponseByte: func() { | |
104 | firstByteTime = time.Since(start) // record when the first byte of the response was received | |
105 | }, | |
106 | } | |
107 | ||
98 | 108 | httpreq, err = http.NewRequestWithContext(r.config.Context, req.Method, req.Url, data) |
109 | ||
99 | 110 | if err != nil { |
100 | 111 | return ffuf.Response{}, err |
101 | 112 | } |
111 | 122 | } |
112 | 123 | |
113 | 124 | req.Host = httpreq.Host |
114 | httpreq = httpreq.WithContext(r.config.Context) | |
125 | httpreq = httpreq.WithContext(httptrace.WithClientTrace(r.config.Context, trace)) | |
115 | 126 | for k, v := range req.Headers { |
116 | 127 | httpreq.Header.Set(k, v) |
117 | 128 | } |
153 | 164 | linesSize := len(strings.Split(string(resp.Data), "\n")) |
154 | 165 | resp.ContentWords = int64(wordsSize) |
155 | 166 | resp.ContentLines = int64(linesSize) |
167 | resp.Time = firstByteTime | |
156 | 168 | |
157 | 169 | return resp, nil |
158 | 170 | } |