Codebase list ffuf / 81ac516
Deleted the upstream files to import again and make adjusts in repository. Marcio de Souza Oliveira 3 years ago
45 changed file(s) with 0 addition(s) and 3896 deletion(s). Raw diff Collapse all Expand all
+0
-96
CHANGELOG.md less more
0 ## Changelog
1
2 - master
3 - New
4 - Changed
5
6 - v1.0.2
7 - Changed
8 - Write POST request data properly to file when ran with `-od`.
9 - Fixed a bug by using header canonicaliztion related to HTTP headers being case insensitive.
10 - Properly handle relative redirect urls with `-recursion`
11 - Calculate req/sec correctly for when using recursion
12 - When `-request` is used, allow the user to override URL using `-u`
13
14 - v1.0.1
15 - Changed
16 - Fixed a bug where regex matchers and filters would fail if `-od` was used to store the request & response contents.
17
18 - v1.0
19 - New
20 - New CLI flag `-ic` to ignore comments from wordlist.
21 - New CLI flags `-request` to specify the raw request file to build the actual request from and `-request-proto` to define the new request format.
22 - New CLI flag `-od` (output directory) to enable writing requests and responses for matched results to a file for postprocessing or debugging purposes.
23 - New CLI flag `-maxtime` to limit the running time of ffuf
24 - New CLI flags `-recursion` and `-recursion-depth` to control recursive ffuf jobs if directories are found. This requires the `-u` to end with FUZZ keyword.
25 - New CLI flag `-replay-proxy` to replay matched requests using a custom proxy.
26 - Changed
27 - Limit the use of `-e` (extensions) to a single keyword: FUZZ
28 - Regexp matching and filtering (-mr/-fr) allow using keywords in patterns
29 - Take 429 responses into account when -sa (stop on all error cases) is used
30 - Remove -k flag support, convert to dummy flag #134
31 - Write configuration to output JSON
32 - Better help text.
33 - If any matcher is set, ignore -mc default value.
34
35 - v0.12
36 - New
37 - Added a new flag to select a multi wordlist operation mode: `--mode`, possible values: `clusterbomb` and `pitchfork`.
38 - Added a new output file format eJSON, for always base64 encoding the input data.
39 - Redirect location is always shown in the output files (when using `-o`)
40 - Full URL is always shown in the output files (when using `-o`)
41 - HTML output format got [DataTables](https://datatables.net/) support allowing realtime searches, sorting by column etc.
42 - New CLI flag `-v` for verbose output. Including full URL, and redirect location.
43 - SIGTERM monitoring, in order to catch keyboard interrupts an such, to be able to write `-o` files before exiting.
44 - Changed
45 - Fixed a bug in the default multi wordlist mode
46 - Fixed JSON output regression, where all the input data was always encoded in base64
47 - `--debug-log` no correctly logs connection errors
48 - Removed `-l` flag in favor of `-v`
49 - More verbose information in banner shown in startup.
50
51 - v0.11
52 - New
53
54 - New CLI flag: -l, shows target location of redirect responses
55 - New CLI flac: -acc, custom auto-calibration strings
56 - New CLI flag: -debug-log, writes the debug logging to the specified file.
57 - New CLI flags -ml and -fl, filters/matches line count in response
58 - Ability to use multiple wordlists / keywords by defining multiple -w command line flags. The if no keyword is defined, the default is FUZZ to keep backwards compatibility. Example: `-w "wordlists/custom.txt:CUSTOM" -H "RandomHeader: CUSTOM"`.
59
60 - Changed
61 - New CLI flag: -i, dummy flag that does nothing. for compatibility with copy as curl.
62 - New CLI flag: -b/--cookie, cookie data for compatibility with copy as curl.
63 - New Output format are available: HTML and Markdown table.
64 - New CLI flag: -l, shows target location of redirect responses
65 - Filtering and matching by status code, response size or word count now allow using ranges in addition to single values
66 - The internal logging information to be discarded, and can be written to a file with the new `-debug-log` flag.
67
68 - v0.10
69 - New
70 - New CLI flag: -ac to autocalibrate response size and word filters based on few preset URLs.
71 - New CLI flag: -timeout to specify custom timeouts for all HTTP requests.
72 - New CLI flag: --data for compatibility with copy as curl functionality of browsers.
73 - New CLI flag: --compressed, dummy flag that does nothing. for compatibility with copy as curl.
74 - New CLI flags: --input-cmd, and --input-num to handle input generation using external commands. Mutators for example. Environment variable FFUF_NUM will be updated on every call of the command.
75 - When --input-cmd is used, display position instead of the payload in results. The output file (of all formats) will include the payload in addition to the position however.
76
77 - Changed
78 - Wordlist can also be read from standard input
79 - Defining -d or --data implies POST method if -X doesn't set it to something else than GET
80
81 - v0.9
82 - New
83 - New output file formats: CSV and eCSV (CSV with base64 encoded input field to avoid CSV breakage with payloads containing a comma)
84 - New CLI flag to follow redirects
85 - Erroring connections will be retried once
86 - Error counter in status bar
87 - New CLI flags: -se (stop on spurious errors) and -sa (stop on all errors, implies -se and -sf)
88 - New CLI flags: -e to provide a list of extensions to add to wordlist entries, and -D to provide DirSearch wordlist format compatibility.
89 - Wildcard option for response status code matcher.
90 - v0.8
91 - New
92 - New CLI flag to write output to a file in JSON format
93 - New CLI flag to stop on spurious 403 responses
94 - Changed
95 - Regex matching / filtering now matches the headers alongside of the response body
+0
-20
CONTRIBUTORS.md less more
0 # Contributors
1
2 * [bjhulst](https://github.com/bjhulst)
3 * [ccsplit](https://github.com/ccsplit)
4 * [codingo](https://github.com/codingo)
5 * [delic](https://github.com/delic)
6 * [eur0pa](https://github.com/eur0pa)
7 * [fang0654](https://github.com/fang0654)
8 * [Ice3man543](https://github.com/Ice3man543)
9 * [JamTookTheBait](https://github.com/JamTookTheBait)
10 * [joohoi](https://github.com/joohoi)
11 * [jvesiluoma](https://github.com/jvesiluoma)
12 * [lc](https://github.com/lc)
13 * [nnwakelam](https://twitter.com/nnwakelam)
14 * [oh6hay](https://github.com/oh6hay)
15 * [putsi](https://github.com/putsi)
16 * [SakiiR](https://github.com/SakiiR)
17 * [seblw](https://github.com/seblw)
18 * [Shaked](https://github.com/Shaked)
19 * [SolomonSklash](https://github.com/SolomonSklash)
+0
-21
LICENSE less more
0 MIT License
1
2 Copyright (c) 2020 Joona Hoikkala
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy
5 of this software and associated documentation files (the "Software"), to deal
6 in the Software without restriction, including without limitation the rights
7 to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 copies of the Software, and to permit persons to whom the Software is
9 furnished to do so, subject to the following conditions:
10
11 The above copyright notice and this permission notice shall be included in all
12 copies or substantial portions of the Software.
13
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 SOFTWARE.
+0
-178
README.md less more
0 ```
1 /'___\ /'___\ /'___\
2 /\ \__/ /\ \__/ __ __ /\ \__/
3 \ \ ,__\\ \ ,__\/\ \/\ \ \ \ ,__\
4 \ \ \_/ \ \ \_/\ \ \_\ \ \ \ \_/
5 \ \_\ \ \_\ \ \____/ \ \_\
6 \/_/ \/_/ \/___/ \/_/
7 ```
8
9 # ffuf - Fuzz Faster U Fool
10
11 A fast web fuzzer written in Go.
12
13 ## Installation
14
15 - [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run!
16 or
17 - If you have go compiler installed: `go get github.com/ffuf/ffuf`
18
19 The only dependency of ffuf is Go 1.11. No dependencies outside of Go standard library are needed.
20
21 ## Example usage
22
23 ### Typical directory discovery
24
25 [![asciicast](https://asciinema.org/a/211350.png)](https://asciinema.org/a/211350)
26
27 By using the FUZZ keyword at the end of URL (`-u`):
28
29 ```
30 ffuf -w /path/to/wordlist -u https://target/FUZZ
31 ```
32
33 ### Virtual host discovery (without DNS records)
34
35 [![asciicast](https://asciinema.org/a/211360.png)](https://asciinema.org/a/211360)
36
37 Assuming that the default virtualhost response size is 4242 bytes, we can filter out all the responses of that size (`-fs 4242`)while fuzzing the Host - header:
38
39 ```
40 ffuf -w /path/to/vhost/wordlist -u https://target -H "Host: FUZZ" -fs 4242
41 ```
42
43 ### GET parameter fuzzing
44
45 GET parameter name fuzzing is very similar to directory discovery, and works by defining the `FUZZ` keyword as a part of the URL. This also assumes an response size of 4242 bytes for invalid GET parameter name.
46
47 ```
48 ffuf -w /path/to/paramnames.txt -u https://target/script.php?FUZZ=test_value -fs 4242
49 ```
50
51 If the parameter name is known, the values can be fuzzed the same way. This example assumes a wrong parameter value returning HTTP response code 401.
52
53 ```
54 ffuf -w /path/to/values.txt -u https://target/script.php?valid_name=FUZZ -fc 401
55 ```
56
57 ### POST data fuzzing
58
59 This is a very straightforward operation, again by using the `FUZZ` keyword. This example is fuzzing only part of the POST request. We're again filtering out the 401 responses.
60
61 ```
62 ffuf -w /path/to/postdata.txt -X POST -d "username=admin\&password=FUZZ" -u https://target/login.php -fc 401
63 ```
64
65 ### Using external mutator to produce test cases
66
67 For this example, we'll fuzz JSON data that's sent over POST. [Radamsa](https://gitlab.com/akihe/radamsa) is used as the mutator.
68
69 When `--input-cmd` is used, ffuf will display matches as their position. This same position value will be available for the callee as an environment variable `$FFUF_NUM`. We'll use this position value as the seed for the mutator. Files example1.txt and example2.txt contain valid JSON payloads. We are matching all the responses, but filtering out response code `400 - Bad request`:
70
71 ```
72 ffuf --input-cmd 'radamsa --seed $FFUF_NUM example1.txt example2.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400
73 ```
74
75 It of course isn't very efficient to call the mutator for each payload, so we can also pre-generate the payloads, still using [Radamsa](https://gitlab.com/akihe/radamsa) as an example:
76
77 ```
78 # Generate 1000 example payloads
79 radamsa -n 1000 -o %n.txt example1.txt example2.txt
80
81 # This results into files 1.txt ... 1000.txt
82 # Now we can just read the payload data in a loop from file for ffuf
83
84 ffuf --input-cmd 'cat $FFUF_NUM.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400
85 ```
86
87 ## Usage
88
89 To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-u`), headers (`-H`), or POST data (`-d`).
90
91 ```
92 Fuzz Faster U Fool - v1.0
93
94 HTTP OPTIONS:
95 -H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted.
96 -X HTTP method to use (default: GET)
97 -b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality.
98 -d POST data
99 -r Follow redirects (default: false)
100 -recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false)
101 -recursion-depth Maximum recursion depth. (default: 0)
102 -replay-proxy Replay matched requests using this proxy.
103 -timeout HTTP request timeout in seconds. (default: 10)
104 -u Target URL
105 -x HTTP Proxy URL
106
107 GENERAL OPTIONS:
108 -V Show version information. (default: false)
109 -ac Automatically calibrate filtering options (default: false)
110 -acc Custom auto-calibration string. Can be used multiple times. Implies -ac
111 -c Colorize output. (default: false)
112 -maxtime Maximum running time in seconds. (default: 0)
113 -p Seconds of `delay` between requests, or a range of random delay. For example "0.1" or "0.1-2.0"
114 -s Do not print additional information (silent mode) (default: false)
115 -sa Stop on all error cases. Implies -sf and -se. (default: false)
116 -se Stop on spurious errors (default: false)
117 -sf Stop when > 95% of responses return 403 Forbidden (default: false)
118 -t Number of concurrent threads. (default: 40)
119 -v Verbose output, printing full URL and redirect location (if any) with the results. (default: false)
120
121 MATCHER OPTIONS:
122 -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403)
123 -ml Match amount of lines in response
124 -mr Match regexp
125 -ms Match HTTP response size
126 -mw Match amount of words in response
127
128 FILTER OPTIONS:
129 -fc Filter HTTP status codes from response. Comma separated list of codes and ranges
130 -fl Filter by amount of lines in response. Comma separated list of line counts and ranges
131 -fr Filter regexp
132 -fs Filter HTTP response size. Comma separated list of sizes and ranges
133 -fw Filter by amount of words in response. Comma separated list of word counts and ranges
134
135 INPUT OPTIONS:
136 -D DirSearch wordlist compatibility mode. Used in conjunction with -e flag. (default: false)
137 -e Comma separated list of extensions. Extends FUZZ keyword.
138 -ic Ignore wordlist comments (default: false)
139 -input-cmd Command producing the input. --input-num is required when using this input method. Overrides -w.
140 -input-num Number of inputs to test. Used in conjunction with --input-cmd. (default: 100)
141 -mode Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork (default: clusterbomb)
142 -request File containing the raw http request
143 -request-proto Protocol to use along with raw request (default: https)
144 -w Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'
145
146 OUTPUT OPTIONS:
147 -debug-log Write all of the internal logging to the specified file.
148 -o Write output to file
149 -od Directory path to store matched results to.
150 -of Output file format. Available formats: json, ejson, html, md, csv, ecsv (default: json)
151
152 EXAMPLE USAGE:
153 Fuzz file paths from wordlist.txt, match all responses but filter out those with content-size 42.
154 Colored, verbose output.
155 ffuf -w wordlist.txt -u https://example.org/FUZZ -mc all -fs 42 -c -v
156
157 Fuzz Host-header, match HTTP 200 responses.
158 ffuf -w hosts.txt -u https://example.org/ -H "Host: FUZZ" -mc 200
159
160 Fuzz POST JSON data. Match all responses not containing text "error".
161 ffuf -w entries.txt -u https://example.org/ -X POST -H "Content-Type: application/json" \
162 -d '{"name": "FUZZ", "anotherkey": "anothervalue"}' -fr "error"
163
164 Fuzz multiple locations. Match only responses reflecting the value of "VAL" keyword. Colored.
165 ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr "VAL" -c
166
167 More information and examples: https://github.com/ffuf/ffuf
168 ```
169
170 ## Helper scripts and advanced payloads
171
172 See [ffuf-scripts](https://github.com/ffuf/ffuf-scripts) repository for helper scripts and payload generators
173 for different workflows and usage scenarios.
174
175 ## License
176
177 ffuf is released under MIT license. See [LICENSE](https://github.com/ffuf/ffuf/blob/master/LICENSE).
+0
-3
go.mod less more
0 module github.com/ffuf/ffuf
1
2 go 1.11
+0
-160
help.go less more
0 package main
1
2 import (
3 "flag"
4 "fmt"
5 "os"
6
7 "github.com/ffuf/ffuf/pkg/ffuf"
8 )
9
10 type UsageSection struct {
11 Name string
12 Description string
13 Flags []UsageFlag
14 Hidden bool
15 ExpectedFlags []string
16 }
17
18 //PrintSection prints out the section name, description and each of the flags
19 func (u *UsageSection) PrintSection(max_length int, extended bool) {
20 // Do not print if extended usage not requested and section marked as hidden
21 if !extended && u.Hidden {
22 return
23 }
24 fmt.Printf("%s:\n", u.Name)
25 for _, f := range u.Flags {
26 f.PrintFlag(max_length)
27 }
28 fmt.Printf("\n")
29 }
30
31 type UsageFlag struct {
32 Name string
33 Description string
34 Default string
35 }
36
37 //PrintFlag prints out the flag name, usage string and default value
38 func (f *UsageFlag) PrintFlag(max_length int) {
39 // Create format string, used for padding
40 format := fmt.Sprintf(" -%%-%ds %%s", max_length)
41 if f.Default != "" {
42 format = format + " (default: %s)\n"
43 fmt.Printf(format, f.Name, f.Description, f.Default)
44 } else {
45 format = format + "\n"
46 fmt.Printf(format, f.Name, f.Description)
47 }
48 }
49
50 func Usage() {
51 u_http := UsageSection{
52 Name: "HTTP OPTIONS",
53 Description: "Options controlling the HTTP request and its parts.",
54 Flags: make([]UsageFlag, 0),
55 Hidden: false,
56 ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "replay-proxy", "timeout", "x"},
57 }
58 u_general := UsageSection{
59 Name: "GENERAL OPTIONS",
60 Description: "",
61 Flags: make([]UsageFlag, 0),
62 Hidden: false,
63 ExpectedFlags: []string{"ac", "acc", "c", "maxtime", "p", "s", "sa", "se", "sf", "t", "v", "V"},
64 }
65 u_compat := UsageSection{
66 Name: "COMPATIBILITY OPTIONS",
67 Description: "Options to ensure compatibility with other pieces of software.",
68 Flags: make([]UsageFlag, 0),
69 Hidden: true,
70 ExpectedFlags: []string{"compressed", "cookie", "data", "data-ascii", "data-binary", "i", "k"},
71 }
72 u_matcher := UsageSection{
73 Name: "MATCHER OPTIONS",
74 Description: "Matchers for the response filtering.",
75 Flags: make([]UsageFlag, 0),
76 Hidden: false,
77 ExpectedFlags: []string{"mc", "ml", "mr", "ms", "mw"},
78 }
79 u_filter := UsageSection{
80 Name: "FILTER OPTIONS",
81 Description: "Filters for the response filtering.",
82 Flags: make([]UsageFlag, 0),
83 Hidden: false,
84 ExpectedFlags: []string{"fc", "fl", "fr", "fs", "fw"},
85 }
86 u_input := UsageSection{
87 Name: "INPUT OPTIONS",
88 Description: "Options for input data for fuzzing. Wordlists and input generators.",
89 Flags: make([]UsageFlag, 0),
90 Hidden: false,
91 ExpectedFlags: []string{"D", "ic", "input-cmd", "input-num", "mode", "request", "request-proto", "e", "w"},
92 }
93 u_output := UsageSection{
94 Name: "OUTPUT OPTIONS",
95 Description: "Options for output. Output file formats, file names and debug file locations.",
96 Flags: make([]UsageFlag, 0),
97 Hidden: false,
98 ExpectedFlags: []string{"debug-log", "o", "of", "od"},
99 }
100 sections := []UsageSection{u_http, u_general, u_compat, u_matcher, u_filter, u_input, u_output}
101
102 // Populate the flag sections
103 max_length := 0
104 flag.VisitAll(func(f *flag.Flag) {
105 found := false
106 for i, section := range sections {
107 if strInSlice(f.Name, section.ExpectedFlags) {
108 sections[i].Flags = append(sections[i].Flags, UsageFlag{
109 Name: f.Name,
110 Description: f.Usage,
111 Default: f.DefValue,
112 })
113 found = true
114 }
115 }
116 if !found {
117 fmt.Printf("DEBUG: Flag %s was found but not defined in usage.go.\n", f.Name)
118 os.Exit(1)
119 }
120 if len(f.Name) > max_length {
121 max_length = len(f.Name)
122 }
123 })
124
125 fmt.Printf("Fuzz Faster U Fool - v%s\n\n", ffuf.VERSION)
126
127 // Print out the sections
128 for _, section := range sections {
129 section.PrintSection(max_length, false)
130 }
131
132 // Usage examples.
133 fmt.Printf("EXAMPLE USAGE:\n")
134
135 fmt.Printf(" Fuzz file paths from wordlist.txt, match all responses but filter out those with content-size 42.\n")
136 fmt.Printf(" Colored, verbose output.\n")
137 fmt.Printf(" ffuf -w wordlist.txt -u https://example.org/FUZZ -mc all -fs 42 -c -v\n\n")
138
139 fmt.Printf(" Fuzz Host-header, match HTTP 200 responses.\n")
140 fmt.Printf(" ffuf -w hosts.txt -u https://example.org/ -H \"Host: FUZZ\" -mc 200\n\n")
141
142 fmt.Printf(" Fuzz POST JSON data. Match all responses not containing text \"error\".\n")
143 fmt.Printf(" ffuf -w entries.txt -u https://example.org/ -X POST -H \"Content-Type: application/json\" \\\n")
144 fmt.Printf(" -d '{\"name\": \"FUZZ\", \"anotherkey\": \"anothervalue\"}' -fr \"error\"\n\n")
145
146 fmt.Printf(" Fuzz multiple locations. Match only responses reflecting the value of \"VAL\" keyword. Colored.\n")
147 fmt.Printf(" ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr \"VAL\" -c\n\n")
148
149 fmt.Printf(" More information and examples: https://github.com/ffuf/ffuf\n\n")
150 }
151
152 func strInSlice(val string, slice []string) bool {
153 for _, v := range slice {
154 if v == val {
155 return true
156 }
157 }
158 return false
159 }
+0
-553
main.go less more
0 package main
1
2 import (
3 "bufio"
4 "context"
5 "flag"
6 "fmt"
7 "io/ioutil"
8 "log"
9 "net/textproto"
10 "net/url"
11 "os"
12 "strconv"
13 "strings"
14
15 "github.com/ffuf/ffuf/pkg/ffuf"
16 "github.com/ffuf/ffuf/pkg/filter"
17 "github.com/ffuf/ffuf/pkg/input"
18 "github.com/ffuf/ffuf/pkg/output"
19 "github.com/ffuf/ffuf/pkg/runner"
20 )
21
22 type cliOptions struct {
23 extensions string
24 delay string
25 filterStatus string
26 filterSize string
27 filterRegexp string
28 filterWords string
29 filterLines string
30 matcherStatus string
31 matcherSize string
32 matcherRegexp string
33 matcherWords string
34 matcherLines string
35 proxyURL string
36 replayProxyURL string
37 request string
38 requestProto string
39 URL string
40 outputFormat string
41 wordlists multiStringFlag
42 inputcommands multiStringFlag
43 headers multiStringFlag
44 cookies multiStringFlag
45 AutoCalibrationStrings multiStringFlag
46 showVersion bool
47 debugLog string
48 }
49
50 type multiStringFlag []string
51
52 func (m *multiStringFlag) String() string {
53 return ""
54 }
55
56 func (m *multiStringFlag) Set(value string) error {
57 *m = append(*m, value)
58 return nil
59 }
60
61 func main() {
62 ctx, cancel := context.WithCancel(context.Background())
63 defer cancel()
64 conf := ffuf.NewConfig(ctx)
65 opts := cliOptions{}
66 var ignored bool
67 flag.BoolVar(&conf.IgnoreWordlistComments, "ic", false, "Ignore wordlist comments")
68 flag.StringVar(&opts.extensions, "e", "", "Comma separated list of extensions. Extends FUZZ keyword.")
69 flag.BoolVar(&conf.DirSearchCompat, "D", false, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.")
70 flag.Var(&opts.headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.")
71 flag.StringVar(&opts.URL, "u", "", "Target URL")
72 flag.Var(&opts.wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'")
73 flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility")
74 flag.StringVar(&opts.delay, "p", "", "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"")
75 flag.StringVar(&opts.filterStatus, "fc", "", "Filter HTTP status codes from response. Comma separated list of codes and ranges")
76 flag.StringVar(&opts.filterSize, "fs", "", "Filter HTTP response size. Comma separated list of sizes and ranges")
77 flag.StringVar(&opts.filterRegexp, "fr", "", "Filter regexp")
78 flag.StringVar(&opts.filterWords, "fw", "", "Filter by amount of words in response. Comma separated list of word counts and ranges")
79 flag.StringVar(&opts.filterLines, "fl", "", "Filter by amount of lines in response. Comma separated list of line counts and ranges")
80 flag.StringVar(&conf.Data, "d", "", "POST data")
81 flag.StringVar(&conf.Data, "data", "", "POST data (alias of -d)")
82 flag.StringVar(&conf.Data, "data-ascii", "", "POST data (alias of -d)")
83 flag.StringVar(&conf.Data, "data-binary", "", "POST data (alias of -d)")
84 flag.BoolVar(&conf.Colors, "c", false, "Colorize output.")
85 flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)")
86 flag.Var(&opts.inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.")
87 flag.IntVar(&conf.InputNum, "input-num", 100, "Number of inputs to test. Used in conjunction with --input-cmd.")
88 flag.StringVar(&conf.InputMode, "mode", "clusterbomb", "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork")
89 flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)")
90 flag.Var(&opts.cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.")
91 flag.Var(&opts.cookies, "cookie", "Cookie data (alias of -b)")
92 flag.StringVar(&opts.matcherStatus, "mc", "200,204,301,302,307,401,403", "Match HTTP status codes, or \"all\" for everything.")
93 flag.StringVar(&opts.matcherSize, "ms", "", "Match HTTP response size")
94 flag.StringVar(&opts.matcherRegexp, "mr", "", "Match regexp")
95 flag.StringVar(&opts.matcherWords, "mw", "", "Match amount of words in response")
96 flag.StringVar(&opts.matcherLines, "ml", "", "Match amount of lines in response")
97 flag.StringVar(&opts.proxyURL, "x", "", "HTTP Proxy URL")
98 flag.StringVar(&opts.request, "request", "", "File containing the raw http request")
99 flag.StringVar(&opts.requestProto, "request-proto", "https", "Protocol to use along with raw request")
100 flag.StringVar(&conf.Method, "X", "GET", "HTTP method to use")
101 flag.StringVar(&conf.OutputFile, "o", "", "Write output to file")
102 flag.StringVar(&opts.outputFormat, "of", "json", "Output file format. Available formats: json, ejson, html, md, csv, ecsv")
103 flag.StringVar(&conf.OutputDirectory, "od", "", "Directory path to store matched results to.")
104 flag.BoolVar(&conf.Quiet, "s", false, "Do not print additional information (silent mode)")
105 flag.BoolVar(&conf.StopOn403, "sf", false, "Stop when > 95% of responses return 403 Forbidden")
106 flag.BoolVar(&conf.StopOnErrors, "se", false, "Stop on spurious errors")
107 flag.BoolVar(&conf.StopOnAll, "sa", false, "Stop on all error cases. Implies -sf and -se.")
108 flag.BoolVar(&conf.FollowRedirects, "r", false, "Follow redirects")
109 flag.BoolVar(&conf.Recursion, "recursion", false, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.")
110 flag.IntVar(&conf.RecursionDepth, "recursion-depth", 0, "Maximum recursion depth.")
111 flag.StringVar(&opts.replayProxyURL, "replay-proxy", "", "Replay matched requests using this proxy.")
112 flag.BoolVar(&conf.AutoCalibration, "ac", false, "Automatically calibrate filtering options")
113 flag.Var(&opts.AutoCalibrationStrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac")
114 flag.IntVar(&conf.Threads, "t", 40, "Number of concurrent threads.")
115 flag.IntVar(&conf.Timeout, "timeout", 10, "HTTP request timeout in seconds.")
116 flag.IntVar(&conf.MaxTime, "maxtime", 0, "Maximum running time in seconds.")
117 flag.BoolVar(&conf.Verbose, "v", false, "Verbose output, printing full URL and redirect location (if any) with the results.")
118 flag.BoolVar(&opts.showVersion, "V", false, "Show version information.")
119 flag.StringVar(&opts.debugLog, "debug-log", "", "Write all of the internal logging to the specified file.")
120 flag.Usage = Usage
121 flag.Parse()
122 if opts.showVersion {
123 fmt.Printf("ffuf version: %s\n", ffuf.VERSION)
124 os.Exit(0)
125 }
126 if len(opts.debugLog) != 0 {
127 f, err := os.OpenFile(opts.debugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
128 if err != nil {
129 fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err)
130 log.SetOutput(ioutil.Discard)
131 } else {
132 log.SetOutput(f)
133 defer f.Close()
134 }
135 } else {
136 log.SetOutput(ioutil.Discard)
137 }
138 if err := prepareConfig(&opts, &conf); err != nil {
139 fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
140 Usage()
141 os.Exit(1)
142 }
143 job, err := prepareJob(&conf)
144 if err != nil {
145 fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
146 Usage()
147 os.Exit(1)
148 }
149 if err := prepareFilters(&opts, &conf); err != nil {
150 fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err)
151 Usage()
152 os.Exit(1)
153 }
154
155 if err := filter.CalibrateIfNeeded(job); err != nil {
156 fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err)
157 os.Exit(1)
158 }
159
160 // Job handles waiting for goroutines to complete itself
161 job.Start()
162 }
163
164 func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) {
165 job := &ffuf.Job{
166 Config: conf,
167 }
168 errs := ffuf.NewMultierror()
169 var err error
170 inputprovider, err := input.NewInputProvider(conf)
171 if err != nil {
172 errs.Add(err)
173 }
174 // TODO: implement error handling for runnerprovider and outputprovider
175 // We only have http runner right now
176 job.Runner = runner.NewRunnerByName("http", conf, false)
177 if len(conf.ReplayProxyURL) > 0 {
178 job.ReplayRunner = runner.NewRunnerByName("http", conf, true)
179 }
180 // Initialize the correct inputprovider
181 for _, v := range conf.InputProviders {
182 err = inputprovider.AddProvider(v)
183 if err != nil {
184 errs.Add(err)
185 }
186 }
187 job.Input = inputprovider
188 // We only have stdout outputprovider right now
189 job.Output = output.NewOutputProviderByName("stdout", conf)
190 return job, errs.ErrorOrNil()
191 }
192
193 func prepareFilters(parseOpts *cliOptions, conf *ffuf.Config) error {
194 errs := ffuf.NewMultierror()
195 // If any other matcher is set, ignore -mc default value
196 matcherSet := false
197 statusSet := false
198 flag.Visit(func(f *flag.Flag) {
199 if f.Name == "mc" {
200 statusSet = true
201 }
202 if f.Name == "ms" {
203 matcherSet = true
204 }
205 if f.Name == "ml" {
206 matcherSet = true
207 }
208 if f.Name == "mr" {
209 matcherSet = true
210 }
211 if f.Name == "mw" {
212 matcherSet = true
213 }
214 })
215 if statusSet || !matcherSet {
216 if err := filter.AddMatcher(conf, "status", parseOpts.matcherStatus); err != nil {
217 errs.Add(err)
218 }
219 }
220
221 if parseOpts.filterStatus != "" {
222 if err := filter.AddFilter(conf, "status", parseOpts.filterStatus); err != nil {
223 errs.Add(err)
224 }
225 }
226 if parseOpts.filterSize != "" {
227 if err := filter.AddFilter(conf, "size", parseOpts.filterSize); err != nil {
228 errs.Add(err)
229 }
230 }
231 if parseOpts.filterRegexp != "" {
232 if err := filter.AddFilter(conf, "regexp", parseOpts.filterRegexp); err != nil {
233 errs.Add(err)
234 }
235 }
236 if parseOpts.filterWords != "" {
237 if err := filter.AddFilter(conf, "word", parseOpts.filterWords); err != nil {
238 errs.Add(err)
239 }
240 }
241 if parseOpts.filterLines != "" {
242 if err := filter.AddFilter(conf, "line", parseOpts.filterLines); err != nil {
243 errs.Add(err)
244 }
245 }
246 if parseOpts.matcherSize != "" {
247 if err := filter.AddMatcher(conf, "size", parseOpts.matcherSize); err != nil {
248 errs.Add(err)
249 }
250 }
251 if parseOpts.matcherRegexp != "" {
252 if err := filter.AddMatcher(conf, "regexp", parseOpts.matcherRegexp); err != nil {
253 errs.Add(err)
254 }
255 }
256 if parseOpts.matcherWords != "" {
257 if err := filter.AddMatcher(conf, "word", parseOpts.matcherWords); err != nil {
258 errs.Add(err)
259 }
260 }
261 if parseOpts.matcherLines != "" {
262 if err := filter.AddMatcher(conf, "line", parseOpts.matcherLines); err != nil {
263 errs.Add(err)
264 }
265 }
266 return errs.ErrorOrNil()
267 }
268
269 func prepareConfig(parseOpts *cliOptions, conf *ffuf.Config) error {
270 //TODO: refactor in a proper flag library that can handle things like required flags
271 errs := ffuf.NewMultierror()
272
273 var err error
274 var err2 error
275 if len(parseOpts.URL) == 0 && parseOpts.request == "" {
276 errs.Add(fmt.Errorf("-u flag or -request flag is required"))
277 }
278
279 // prepare extensions
280 if parseOpts.extensions != "" {
281 extensions := strings.Split(parseOpts.extensions, ",")
282 conf.Extensions = extensions
283 }
284
285 // Convert cookies to a header
286 if len(parseOpts.cookies) > 0 {
287 parseOpts.headers.Set("Cookie: " + strings.Join(parseOpts.cookies, "; "))
288 }
289
290 //Prepare inputproviders
291 for _, v := range parseOpts.wordlists {
292 wl := strings.SplitN(v, ":", 2)
293 if len(wl) == 2 {
294 conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
295 Name: "wordlist",
296 Value: wl[0],
297 Keyword: wl[1],
298 })
299 } else {
300 conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
301 Name: "wordlist",
302 Value: wl[0],
303 Keyword: "FUZZ",
304 })
305 }
306 }
307 for _, v := range parseOpts.inputcommands {
308 ic := strings.SplitN(v, ":", 2)
309 if len(ic) == 2 {
310 conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
311 Name: "command",
312 Value: ic[0],
313 Keyword: ic[1],
314 })
315 conf.CommandKeywords = append(conf.CommandKeywords, ic[0])
316 } else {
317 conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{
318 Name: "command",
319 Value: ic[0],
320 Keyword: "FUZZ",
321 })
322 conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ")
323 }
324 }
325
326 if len(conf.InputProviders) == 0 {
327 errs.Add(fmt.Errorf("Either -w or --input-cmd flag is required"))
328 }
329
330 // Prepare the request using body
331 if parseOpts.request != "" {
332 err := parseRawRequest(parseOpts, conf)
333 if err != nil {
334 errmsg := fmt.Sprintf("Could not parse raw request: %s", err)
335 errs.Add(fmt.Errorf(errmsg))
336 }
337 }
338
339 //Prepare URL
340 if parseOpts.URL != "" {
341 conf.Url = parseOpts.URL
342 }
343
344 //Prepare headers and make canonical
345 for _, v := range parseOpts.headers {
346 hs := strings.SplitN(v, ":", 2)
347 if len(hs) == 2 {
348 // trim and make canonical
349 // except if used in custom defined header
350 var CanonicalNeeded bool = true
351 for _, a := range conf.CommandKeywords {
352 if a == hs[0] {
353 CanonicalNeeded = false
354 }
355 }
356 // check if part of InputProviders
357 if CanonicalNeeded {
358 for _, b := range conf.InputProviders {
359 if b.Keyword == hs[0] {
360 CanonicalNeeded = false
361 }
362 }
363 }
364 if CanonicalNeeded {
365 var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.TrimSpace(hs[0]))
366 conf.Headers[CanonicalHeader] = strings.TrimSpace(hs[1])
367 } else {
368 conf.Headers[strings.TrimSpace(hs[0])] = strings.TrimSpace(hs[1])
369 }
370 } else {
371 errs.Add(fmt.Errorf("Header defined by -H needs to have a value. \":\" should be used as a separator"))
372 }
373 }
374
375 //Prepare delay
376 d := strings.Split(parseOpts.delay, "-")
377 if len(d) > 2 {
378 errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
379 } else if len(d) == 2 {
380 conf.Delay.IsRange = true
381 conf.Delay.HasDelay = true
382 conf.Delay.Min, err = strconv.ParseFloat(d[0], 64)
383 conf.Delay.Max, err2 = strconv.ParseFloat(d[1], 64)
384 if err != nil || err2 != nil {
385 errs.Add(fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5"))
386 }
387 } else if len(parseOpts.delay) > 0 {
388 conf.Delay.IsRange = false
389 conf.Delay.HasDelay = true
390 conf.Delay.Min, err = strconv.ParseFloat(parseOpts.delay, 64)
391 if err != nil {
392 errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\""))
393 }
394 }
395
396 // Verify proxy url format
397 if len(parseOpts.proxyURL) > 0 {
398 _, err := url.Parse(parseOpts.proxyURL)
399 if err != nil {
400 errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err))
401 } else {
402 conf.ProxyURL = parseOpts.proxyURL
403 }
404 }
405
406 // Verify replayproxy url format
407 if len(parseOpts.replayProxyURL) > 0 {
408 _, err := url.Parse(parseOpts.replayProxyURL)
409 if err != nil {
410 errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err))
411 } else {
412 conf.ReplayProxyURL = parseOpts.replayProxyURL
413 }
414 }
415
416 //Check the output file format option
417 if conf.OutputFile != "" {
418 //No need to check / error out if output file isn't defined
419 outputFormats := []string{"json", "ejson", "html", "md", "csv", "ecsv"}
420 found := false
421 for _, f := range outputFormats {
422 if f == parseOpts.outputFormat {
423 conf.OutputFormat = f
424 found = true
425 }
426 }
427 if !found {
428 errs.Add(fmt.Errorf("Unknown output file format (-of): %s", parseOpts.outputFormat))
429 }
430 }
431
432 // Auto-calibration strings
433 if len(parseOpts.AutoCalibrationStrings) > 0 {
434 conf.AutoCalibrationStrings = parseOpts.AutoCalibrationStrings
435 }
436 // Using -acc implies -ac
437 if len(conf.AutoCalibrationStrings) > 0 {
438 conf.AutoCalibration = true
439 }
440
441 // Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP
442 if conf.Method == "GET" {
443 if len(conf.Data) > 0 {
444 conf.Method = "POST"
445 }
446 }
447
448 conf.CommandLine = strings.Join(os.Args, " ")
449
450 for _, provider := range conf.InputProviders {
451 if !keywordPresent(provider.Keyword, conf) {
452 errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword)
453 errs.Add(fmt.Errorf(errmsg))
454 }
455 }
456
457 // Do checks for recursion mode
458 if conf.Recursion {
459 if !strings.HasSuffix(conf.Url, "FUZZ") {
460 errmsg := fmt.Sprintf("When using -recursion the URL (-u) must end with FUZZ keyword.")
461 errs.Add(fmt.Errorf(errmsg))
462 }
463 }
464
465 return errs.ErrorOrNil()
466 }
467
468 func parseRawRequest(parseOpts *cliOptions, conf *ffuf.Config) error {
469 file, err := os.Open(parseOpts.request)
470 if err != nil {
471 return fmt.Errorf("could not open request file: %s", err)
472 }
473 defer file.Close()
474
475 r := bufio.NewReader(file)
476
477 s, err := r.ReadString('\n')
478 if err != nil {
479 return fmt.Errorf("could not read request: %s", err)
480 }
481 parts := strings.Split(s, " ")
482 if len(parts) < 3 {
483 return fmt.Errorf("malformed request supplied")
484 }
485 // Set the request Method
486 conf.Method = parts[0]
487
488 for {
489 line, err := r.ReadString('\n')
490 line = strings.TrimSpace(line)
491
492 if err != nil || line == "" {
493 break
494 }
495
496 p := strings.SplitN(line, ":", 2)
497 if len(p) != 2 {
498 continue
499 }
500
501 if strings.EqualFold(p[0], "content-length") {
502 continue
503 }
504
505 conf.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1])
506 }
507
508 // Handle case with the full http url in path. In that case,
509 // ignore any host header that we encounter and use the path as request URL
510 if strings.HasPrefix(parts[1], "http") {
511 parsed, err := url.Parse(parts[1])
512 if err != nil {
513 return fmt.Errorf("could not parse request URL: %s", err)
514 }
515 conf.Url = parts[1]
516 conf.Headers["Host"] = parsed.Host
517 } else {
518 // Build the request URL from the request
519 conf.Url = parseOpts.requestProto + "://" + conf.Headers["Host"] + parts[1]
520 }
521
522 // Set the request body
523 b, err := ioutil.ReadAll(r)
524 if err != nil {
525 return fmt.Errorf("could not read request body: %s", err)
526 }
527 conf.Data = string(b)
528
529 return nil
530 }
531
532 func keywordPresent(keyword string, conf *ffuf.Config) bool {
533 //Search for keyword from HTTP method, URL and POST data too
534 if strings.Index(conf.Method, keyword) != -1 {
535 return true
536 }
537 if strings.Index(conf.Url, keyword) != -1 {
538 return true
539 }
540 if strings.Index(conf.Data, keyword) != -1 {
541 return true
542 }
543 for k, v := range conf.Headers {
544 if strings.Index(k, keyword) != -1 {
545 return true
546 }
547 if strings.Index(v, keyword) != -1 {
548 return true
549 }
550 }
551 return false
552 }
+0
-84
pkg/ffuf/config.go less more
0 package ffuf
1
2 import (
3 "context"
4 )
5
6 type Config struct {
7 Headers map[string]string `json:"headers"`
8 Extensions []string `json:"extensions"`
9 DirSearchCompat bool `json:"dirsearch_compatibility"`
10 Method string `json:"method"`
11 Url string `json:"url"`
12 Data string `json:"postdata"`
13 Quiet bool `json:"quiet"`
14 Colors bool `json:"colors"`
15 InputProviders []InputProviderConfig `json:"inputproviders"`
16 CommandKeywords []string `json:"-"`
17 InputNum int `json:"cmd_inputnum"`
18 InputMode string `json:"inputmode"`
19 OutputDirectory string `json:"outputdirectory"`
20 OutputFile string `json:"outputfile"`
21 OutputFormat string `json:"outputformat"`
22 IgnoreWordlistComments bool `json:"ignore_wordlist_comments"`
23 StopOn403 bool `json:"stop_403"`
24 StopOnErrors bool `json:"stop_errors"`
25 StopOnAll bool `json:"stop_all"`
26 FollowRedirects bool `json:"follow_redirects"`
27 AutoCalibration bool `json:"autocalibration"`
28 AutoCalibrationStrings []string `json:"autocalibration_strings"`
29 Timeout int `json:"timeout"`
30 ProgressFrequency int `json:"-"`
31 Delay optRange `json:"delay"`
32 Filters map[string]FilterProvider `json:"filters"`
33 Matchers map[string]FilterProvider `json:"matchers"`
34 Threads int `json:"threads"`
35 Context context.Context `json:"-"`
36 ProxyURL string `json:"proxyurl"`
37 ReplayProxyURL string `json:"replayproxyurl"`
38 CommandLine string `json:"cmdline"`
39 Verbose bool `json:"verbose"`
40 MaxTime int `json:"maxtime"`
41 Recursion bool `json:"recursion"`
42 RecursionDepth int `json:"recursion_depth"`
43 }
44
45 type InputProviderConfig struct {
46 Name string `json:"name"`
47 Keyword string `json:"keyword"`
48 Value string `json:"value"`
49 }
50
51 func NewConfig(ctx context.Context) Config {
52 var conf Config
53 conf.Context = ctx
54 conf.Headers = make(map[string]string)
55 conf.Method = "GET"
56 conf.Url = ""
57 conf.Data = ""
58 conf.Quiet = false
59 conf.IgnoreWordlistComments = false
60 conf.StopOn403 = false
61 conf.StopOnErrors = false
62 conf.StopOnAll = false
63 conf.FollowRedirects = false
64 conf.InputProviders = make([]InputProviderConfig, 0)
65 conf.CommandKeywords = make([]string, 0)
66 conf.AutoCalibrationStrings = make([]string, 0)
67 conf.InputNum = 0
68 conf.InputMode = "clusterbomb"
69 conf.ProxyURL = ""
70 conf.Filters = make(map[string]FilterProvider)
71 conf.Matchers = make(map[string]FilterProvider)
72 conf.Delay = optRange{0, 0, false, false}
73 conf.Extensions = make([]string, 0)
74 conf.Timeout = 10
75 // Progress update frequency, in milliseconds
76 conf.ProgressFrequency = 100
77 conf.DirSearchCompat = false
78 conf.Verbose = false
79 conf.MaxTime = 0
80 conf.Recursion = false
81 conf.RecursionDepth = 0
82 return conf
83 }
+0
-6
pkg/ffuf/const.go less more
0 package ffuf
1
2 const (
3 //VERSION holds the current version number
4 VERSION = "1.0.2"
5 )
+0
-45
pkg/ffuf/interfaces.go less more
0 package ffuf
1
2 //FilterProvider is a generic interface for both Matchers and Filters
3 type FilterProvider interface {
4 Filter(response *Response) (bool, error)
5 Repr() string
6 }
7
8 //RunnerProvider is an interface for request executors
9 type RunnerProvider interface {
10 Prepare(input map[string][]byte) (Request, error)
11 Execute(req *Request) (Response, error)
12 }
13
14 //InputProvider interface handles the input data for RunnerProvider
15 type InputProvider interface {
16 AddProvider(InputProviderConfig) error
17 Next() bool
18 Position() int
19 Reset()
20 Value() map[string][]byte
21 Total() int
22 }
23
24 //InternalInputProvider interface handles providing input data to InputProvider
25 type InternalInputProvider interface {
26 Keyword() string
27 Next() bool
28 Position() int
29 ResetPosition()
30 IncrementPosition()
31 Value() []byte
32 Total() int
33 }
34
35 //OutputProvider is responsible of providing output from the RunnerProvider
36 type OutputProvider interface {
37 Banner() error
38 Finalize() error
39 Progress(status Progress)
40 Info(infostring string)
41 Error(errstring string)
42 Warning(warnstring string)
43 Result(resp Response)
44 }
+0
-385
pkg/ffuf/job.go less more
0 package ffuf
1
2 import (
3 "fmt"
4 "log"
5 "math/rand"
6 "os"
7 "os/signal"
8 "sync"
9 "syscall"
10 "time"
11 )
12
13 //Job ties together Config, Runner, Input and Output
14 type Job struct {
15 Config *Config
16 ErrorMutex sync.Mutex
17 Input InputProvider
18 Runner RunnerProvider
19 ReplayRunner RunnerProvider
20 Output OutputProvider
21 Counter int
22 ErrorCounter int
23 SpuriousErrorCounter int
24 Total int
25 Running bool
26 Count403 int
27 Count429 int
28 Error string
29 startTime time.Time
30 queuejobs []QueueJob
31 queuepos int
32 currentDepth int
33 }
34
35 type QueueJob struct {
36 Url string
37 depth int
38 }
39
40 func NewJob(conf *Config) Job {
41 var j Job
42 j.Counter = 0
43 j.ErrorCounter = 0
44 j.SpuriousErrorCounter = 0
45 j.Running = false
46 j.queuepos = 0
47 j.queuejobs = make([]QueueJob, 0)
48 j.currentDepth = 0
49 return j
50 }
51
52 //incError increments the error counter
53 func (j *Job) incError() {
54 j.ErrorMutex.Lock()
55 defer j.ErrorMutex.Unlock()
56 j.ErrorCounter++
57 j.SpuriousErrorCounter++
58 }
59
60 //inc403 increments the 403 response counter
61 func (j *Job) inc403() {
62 j.ErrorMutex.Lock()
63 defer j.ErrorMutex.Unlock()
64 j.Count403++
65 }
66
67 // inc429 increments the 429 response counter
68 func (j *Job) inc429() {
69 j.ErrorMutex.Lock()
70 defer j.ErrorMutex.Unlock()
71 j.Count429++
72 }
73
74 //resetSpuriousErrors resets the spurious error counter
75 func (j *Job) resetSpuriousErrors() {
76 j.ErrorMutex.Lock()
77 defer j.ErrorMutex.Unlock()
78 j.SpuriousErrorCounter = 0
79 }
80
81 //Start the execution of the Job
82 func (j *Job) Start() {
83 // Add the default job to job queue
84 j.queuejobs = append(j.queuejobs, QueueJob{Url: j.Config.Url, depth: 0})
85 rand.Seed(time.Now().UnixNano())
86 j.Total = j.Input.Total()
87 defer j.Stop()
88 j.Running = true
89 //Show banner if not running in silent mode
90 if !j.Config.Quiet {
91 j.Output.Banner()
92 }
93 // Monitor for SIGTERM and do cleanup properly (writing the output files etc)
94 j.interruptMonitor()
95 for j.jobsInQueue() {
96 j.prepareQueueJob()
97 if j.queuepos > 1 {
98 // Print info for queued recursive jobs
99 j.Output.Info(fmt.Sprintf("Scanning: %s", j.Config.Url))
100 }
101 j.Input.Reset()
102 j.startTime = time.Now()
103 j.Counter = 0
104 j.startExecution()
105 }
106
107 j.Output.Finalize()
108 }
109
110 func (j *Job) jobsInQueue() bool {
111 if j.queuepos < len(j.queuejobs) {
112 return true
113 }
114 return false
115 }
116
117 func (j *Job) prepareQueueJob() {
118 j.Config.Url = j.queuejobs[j.queuepos].Url
119 j.currentDepth = j.queuejobs[j.queuepos].depth
120 j.queuepos += 1
121 }
122
123 func (j *Job) startExecution() {
124 var wg sync.WaitGroup
125 wg.Add(1)
126 go j.runProgress(&wg)
127 //Limiter blocks after reaching the buffer, ensuring limited concurrency
128 limiter := make(chan bool, j.Config.Threads)
129 for j.Input.Next() {
130 // Check if we should stop the process
131 j.CheckStop()
132 if !j.Running {
133 defer j.Output.Warning(j.Error)
134 break
135 }
136 limiter <- true
137 nextInput := j.Input.Value()
138 nextPosition := j.Input.Position()
139 wg.Add(1)
140 j.Counter++
141 go func() {
142 defer func() { <-limiter }()
143 defer wg.Done()
144 j.runTask(nextInput, nextPosition, false)
145 if j.Config.Delay.HasDelay {
146 var sleepDurationMS time.Duration
147 if j.Config.Delay.IsRange {
148 sTime := j.Config.Delay.Min + rand.Float64()*(j.Config.Delay.Max-j.Config.Delay.Min)
149 sleepDurationMS = time.Duration(sTime * 1000)
150 } else {
151 sleepDurationMS = time.Duration(j.Config.Delay.Min * 1000)
152 }
153 time.Sleep(sleepDurationMS * time.Millisecond)
154 }
155 }()
156 }
157 wg.Wait()
158 j.updateProgress()
159 return
160 }
161
162 func (j *Job) interruptMonitor() {
163 sigChan := make(chan os.Signal, 2)
164 signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM)
165 go func() {
166 for _ = range sigChan {
167 j.Error = "Caught keyboard interrupt (Ctrl-C)\n"
168 j.Stop()
169 }
170 }()
171 }
172
173 func (j *Job) runProgress(wg *sync.WaitGroup) {
174 defer wg.Done()
175 totalProgress := j.Input.Total()
176 for j.Counter <= totalProgress {
177 if !j.Running {
178 break
179 }
180 j.updateProgress()
181 if j.Counter == totalProgress {
182 return
183 }
184 time.Sleep(time.Millisecond * time.Duration(j.Config.ProgressFrequency))
185 }
186 }
187
188 func (j *Job) updateProgress() {
189 prog := Progress{
190 StartedAt: j.startTime,
191 ReqCount: j.Counter,
192 ReqTotal: j.Input.Total(),
193 QueuePos: j.queuepos,
194 QueueTotal: len(j.queuejobs),
195 ErrorCount: j.ErrorCounter,
196 }
197 j.Output.Progress(prog)
198 }
199
200 func (j *Job) isMatch(resp Response) bool {
201 matched := false
202 for _, m := range j.Config.Matchers {
203 match, err := m.Filter(&resp)
204 if err != nil {
205 continue
206 }
207 if match {
208 matched = true
209 }
210 }
211 // The response was not matched, return before running filters
212 if !matched {
213 return false
214 }
215 for _, f := range j.Config.Filters {
216 fv, err := f.Filter(&resp)
217 if err != nil {
218 continue
219 }
220 if fv {
221 return false
222 }
223 }
224 return true
225 }
226
227 func (j *Job) runTask(input map[string][]byte, position int, retried bool) {
228 req, err := j.Runner.Prepare(input)
229 req.Position = position
230 if err != nil {
231 j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err))
232 j.incError()
233 log.Printf("%s", err)
234 return
235 }
236 resp, err := j.Runner.Execute(&req)
237 if err != nil {
238 if retried {
239 j.incError()
240 log.Printf("%s", err)
241 } else {
242 j.runTask(input, position, true)
243 }
244 return
245 }
246 if j.SpuriousErrorCounter > 0 {
247 j.resetSpuriousErrors()
248 }
249 if j.Config.StopOn403 || j.Config.StopOnAll {
250 // Increment Forbidden counter if we encountered one
251 if resp.StatusCode == 403 {
252 j.inc403()
253 }
254 }
255 if j.Config.StopOnAll {
256 // increment 429 counter if the response code is 429
257 if j.Config.StopOnAll {
258 if resp.StatusCode == 429 {
259 j.inc429()
260 }
261 }
262 }
263 if j.isMatch(resp) {
264 // Re-send request through replay-proxy if needed
265 if j.ReplayRunner != nil {
266 replayreq, err := j.ReplayRunner.Prepare(input)
267 replayreq.Position = position
268 if err != nil {
269 j.Output.Error(fmt.Sprintf("Encountered an error while preparing replayproxy request: %s\n", err))
270 j.incError()
271 log.Printf("%s", err)
272 } else {
273 _, _ = j.ReplayRunner.Execute(&replayreq)
274 }
275 }
276 j.Output.Result(resp)
277 // Refresh the progress indicator as we printed something out
278 j.updateProgress()
279 }
280
281 if j.Config.Recursion && len(resp.GetRedirectLocation(false)) > 0 {
282 j.handleRecursionJob(resp)
283 }
284 return
285 }
286
287 //handleRecursionJob adds a new recursion job to the job queue if a new directory is found
288 func (j *Job) handleRecursionJob(resp Response) {
289 if (resp.Request.Url + "/") != resp.GetRedirectLocation(true) {
290 // Not a directory, return early
291 return
292 }
293 if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth {
294 // We have yet to reach the maximum recursion depth
295 recUrl := resp.Request.Url + "/" + "FUZZ"
296 newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1}
297 j.queuejobs = append(j.queuejobs, newJob)
298 j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl))
299 } else {
300 j.Output.Warning(fmt.Sprintf("Directory found, but recursion depth exceeded. Ignoring: %s", resp.GetRedirectLocation(true)))
301 }
302 }
303
304 //CalibrateResponses returns slice of Responses for randomly generated filter autocalibration requests
305 func (j *Job) CalibrateResponses() ([]Response, error) {
306 cInputs := make([]string, 0)
307 if len(j.Config.AutoCalibrationStrings) < 1 {
308 cInputs = append(cInputs, "admin"+RandomString(16)+"/")
309 cInputs = append(cInputs, ".htaccess"+RandomString(16))
310 cInputs = append(cInputs, RandomString(16)+"/")
311 cInputs = append(cInputs, RandomString(16))
312 } else {
313 cInputs = append(cInputs, j.Config.AutoCalibrationStrings...)
314 }
315
316 results := make([]Response, 0)
317 for _, input := range cInputs {
318 inputs := make(map[string][]byte, 0)
319 for _, v := range j.Config.InputProviders {
320 inputs[v.Keyword] = []byte(input)
321 }
322
323 req, err := j.Runner.Prepare(inputs)
324 if err != nil {
325 j.Output.Error(fmt.Sprintf("Encountered an error while preparing request: %s\n", err))
326 j.incError()
327 log.Printf("%s", err)
328 return results, err
329 }
330 resp, err := j.Runner.Execute(&req)
331 if err != nil {
332 return results, err
333 }
334
335 // Only calibrate on responses that would be matched otherwise
336 if j.isMatch(resp) {
337 results = append(results, resp)
338 }
339 }
340 return results, nil
341 }
342
343 // CheckStop stops the job if stopping conditions are met
344 func (j *Job) CheckStop() {
345 if j.Counter > 50 {
346 // We have enough samples
347 if j.Config.StopOn403 || j.Config.StopOnAll {
348 if float64(j.Count403)/float64(j.Counter) > 0.95 {
349 // Over 95% of requests are 403
350 j.Error = "Getting an unusual amount of 403 responses, exiting."
351 j.Stop()
352 }
353 }
354 if j.Config.StopOnErrors || j.Config.StopOnAll {
355 if j.SpuriousErrorCounter > j.Config.Threads*2 {
356 // Most of the requests are erroring
357 j.Error = "Receiving spurious errors, exiting."
358 j.Stop()
359 }
360
361 }
362 if j.Config.StopOnAll && (float64(j.Count429)/float64(j.Counter) > 0.2) {
363 // Over 20% of responses are 429
364 j.Error = "Getting an unusual amount of 429 responses, exiting."
365 j.Stop()
366 }
367 }
368
369 // check for maximum running time
370 if j.Config.MaxTime > 0 {
371 dur := time.Now().Sub(j.startTime)
372 runningSecs := int(dur / time.Second)
373 if runningSecs >= j.Config.MaxTime {
374 j.Error = "Maximum running time reached, exiting."
375 j.Stop()
376 }
377 }
378 }
379
380 //Stop the execution of the Job
381 func (j *Job) Stop() {
382 j.Running = false
383 return
384 }
+0
-30
pkg/ffuf/multierror.go less more
0 package ffuf
1
2 import (
3 "fmt"
4 )
5
6 type Multierror struct {
7 errors []error
8 }
9
10 //NewMultierror returns a new Multierror
11 func NewMultierror() Multierror {
12 return Multierror{}
13 }
14
15 func (m *Multierror) Add(err error) {
16 m.errors = append(m.errors, err)
17 }
18
19 func (m *Multierror) ErrorOrNil() error {
20 var errString string
21 if len(m.errors) > 0 {
22 errString += fmt.Sprintf("%d errors occured.\n", len(m.errors))
23 for _, e := range m.errors {
24 errString += fmt.Sprintf("\t* %s\n", e)
25 }
26 return fmt.Errorf("%s", errString)
27 }
28 return nil
29 }
+0
-67
pkg/ffuf/optrange.go less more
0 package ffuf
1
2 import (
3 "encoding/json"
4 "fmt"
5 "strconv"
6 "strings"
7 )
8
9 //optRange stores either a single float, in which case the value is stored in min and IsRange is false,
10 //or a range of floats, in which case IsRange is true
11 type optRange struct {
12 Min float64
13 Max float64
14 IsRange bool
15 HasDelay bool
16 }
17
18 type optRangeJSON struct {
19 Value string `json:"value"`
20 }
21
22 func (o *optRange) MarshalJSON() ([]byte, error) {
23 value := ""
24 if o.Min == o.Max {
25 value = fmt.Sprintf("%.2f", o.Min)
26 } else {
27 value = fmt.Sprintf("%.2f-%.2f", o.Min, o.Max)
28 }
29 return json.Marshal(&optRangeJSON{
30 Value: value,
31 })
32 }
33
34 func (o *optRange) UnmarshalJSON(b []byte) error {
35 var inc optRangeJSON
36 err := json.Unmarshal(b, &inc)
37 if err != nil {
38 return err
39 }
40 return o.Initialize(inc.Value)
41 }
42
43 //Initialize sets up the optRange from string value
44 func (o *optRange) Initialize(value string) error {
45 var err, err2 error
46 d := strings.Split(value, "-")
47 if len(d) > 2 {
48 return fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")
49 } else if len(d) == 2 {
50 o.IsRange = true
51 o.HasDelay = true
52 o.Min, err = strconv.ParseFloat(d[0], 64)
53 o.Max, err2 = strconv.ParseFloat(d[1], 64)
54 if err != nil || err2 != nil {
55 return fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5")
56 }
57 } else if len(value) > 0 {
58 o.IsRange = false
59 o.HasDelay = true
60 o.Min, err = strconv.ParseFloat(value, 64)
61 if err != nil {
62 return fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")
63 }
64 }
65 return nil
66 }
+0
-14
pkg/ffuf/progress.go less more
0 package ffuf
1
2 import (
3 "time"
4 )
5
6 type Progress struct {
7 StartedAt time.Time
8 ReqCount int
9 ReqTotal int
10 QueuePos int
11 QueueTotal int
12 ErrorCount int
13 }
+0
-20
pkg/ffuf/request.go less more
0 package ffuf
1
2 // Request holds the meaningful data that is passed for runner for making the query
3 type Request struct {
4 Method string
5 Url string
6 Headers map[string]string
7 Data []byte
8 Input map[string][]byte
9 Position int
10 Raw string
11 }
12
13 func NewRequest(conf *Config) Request {
14 var req Request
15 req.Method = conf.Method
16 req.Url = conf.Url
17 req.Headers = make(map[string]string)
18 return req
19 }
+0
-54
pkg/ffuf/response.go less more
0 package ffuf
1
2 import (
3 "net/http"
4 "net/url"
5 )
6
7 // Response struct holds the meaningful data returned from request and is meant for passing to filters
8 type Response struct {
9 StatusCode int64
10 Headers map[string][]string
11 Data []byte
12 ContentLength int64
13 ContentWords int64
14 ContentLines int64
15 Cancelled bool
16 Request *Request
17 Raw string
18 ResultFile string
19 }
20
21 // GetRedirectLocation returns the redirect location for a 3xx redirect HTTP response
22 func (resp *Response) GetRedirectLocation(absolute bool) string {
23
24 redirectLocation := ""
25 if resp.StatusCode >= 300 && resp.StatusCode <= 399 {
26 redirectLocation = resp.Headers["Location"][0]
27 }
28
29 if absolute {
30 redirectUrl, err := url.Parse(redirectLocation)
31 if err != nil {
32 return redirectLocation
33 }
34 baseUrl, err := url.Parse(resp.Request.Url)
35 if err != nil {
36 return redirectLocation
37 }
38 redirectLocation = baseUrl.ResolveReference(redirectUrl).String()
39 }
40
41 return redirectLocation
42 }
43
44 func NewResponse(httpresp *http.Response, req *Request) Response {
45 var resp Response
46 resp.Request = req
47 resp.StatusCode = int64(httpresp.StatusCode)
48 resp.Headers = httpresp.Header
49 resp.Cancelled = false
50 resp.Raw = ""
51 resp.ResultFile = ""
52 return resp
53 }
+0
-31
pkg/ffuf/util.go less more
0 package ffuf
1
2 import (
3 "math/rand"
4 )
5
6 //used for random string generation in calibration function
7 var chars = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
8
9 //RandomString returns a random string of length of parameter n
10 func RandomString(n int) string {
11 s := make([]rune, n)
12 for i := range s {
13 s[i] = chars[rand.Intn(len(chars))]
14 }
15 return string(s)
16 }
17
18 //UniqStringSlice returns an unordered slice of unique strings. The duplicates are dropped
19 func UniqStringSlice(inslice []string) []string {
20 found := map[string]bool{}
21
22 for _, v := range inslice {
23 found[v] = true
24 }
25 ret := []string{}
26 for k, _ := range found {
27 ret = append(ret, k)
28 }
29 return ret
30 }
+0
-38
pkg/ffuf/valuerange.go less more
0 package ffuf
1
2 import (
3 "fmt"
4 "regexp"
5 "strconv"
6 )
7
8 type ValueRange struct {
9 Min, Max int64
10 }
11
12 func ValueRangeFromString(instr string) (ValueRange, error) {
13 // is the value a range
14 minmax := regexp.MustCompile("^(\\d+)\\-(\\d+)$").FindAllStringSubmatch(instr, -1)
15 if minmax != nil {
16 // yes
17 minval, err := strconv.ParseInt(minmax[0][1], 10, 0)
18 if err != nil {
19 return ValueRange{}, fmt.Errorf("Invalid value: %s", minmax[0][1])
20 }
21 maxval, err := strconv.ParseInt(minmax[0][2], 10, 0)
22 if err != nil {
23 return ValueRange{}, fmt.Errorf("Invalid value: %s", minmax[0][2])
24 }
25 if minval >= maxval {
26 return ValueRange{}, fmt.Errorf("Minimum has to be smaller than maximum")
27 }
28 return ValueRange{minval, maxval}, nil
29 } else {
30 // no, a single value or something else
31 intval, err := strconv.ParseInt(instr, 10, 0)
32 if err != nil {
33 return ValueRange{}, fmt.Errorf("Invalid value: %s", instr)
34 }
35 return ValueRange{intval, intval}, nil
36 }
37 }
+0
-97
pkg/filter/filter.go less more
0 package filter
1
2 import (
3 "fmt"
4 "strconv"
5 "strings"
6
7 "github.com/ffuf/ffuf/pkg/ffuf"
8 )
9
10 func NewFilterByName(name string, value string) (ffuf.FilterProvider, error) {
11 if name == "status" {
12 return NewStatusFilter(value)
13 }
14 if name == "size" {
15 return NewSizeFilter(value)
16 }
17 if name == "word" {
18 return NewWordFilter(value)
19 }
20 if name == "line" {
21 return NewLineFilter(value)
22 }
23 if name == "regexp" {
24 return NewRegexpFilter(value)
25 }
26 return nil, fmt.Errorf("Could not create filter with name %s", name)
27 }
28
29 //AddFilter adds a new filter to Config
30 func AddFilter(conf *ffuf.Config, name string, option string) error {
31 newf, err := NewFilterByName(name, option)
32 if err == nil {
33 conf.Filters[name] = newf
34 }
35 return err
36 }
37
38 //AddMatcher adds a new matcher to Config
39 func AddMatcher(conf *ffuf.Config, name string, option string) error {
40 newf, err := NewFilterByName(name, option)
41 if err == nil {
42 conf.Matchers[name] = newf
43 }
44 return err
45 }
46
47 //CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and acting accordingly
48 func CalibrateIfNeeded(j *ffuf.Job) error {
49 if !j.Config.AutoCalibration {
50 return nil
51 }
52 // Handle the calibration
53 responses, err := j.CalibrateResponses()
54 if err != nil {
55 return err
56 }
57 if len(responses) > 0 {
58 calibrateFilters(j, responses)
59 }
60 return nil
61 }
62
63 func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) {
64 sizeCalib := make([]string, 0)
65 wordCalib := make([]string, 0)
66 lineCalib := make([]string, 0)
67 for _, r := range responses {
68 if r.ContentLength > 0 {
69 // Only add if we have an actual size of responses
70 sizeCalib = append(sizeCalib, strconv.FormatInt(r.ContentLength, 10))
71 }
72 if r.ContentWords > 0 {
73 // Only add if we have an actual word length of response
74 wordCalib = append(wordCalib, strconv.FormatInt(r.ContentWords, 10))
75 }
76 if r.ContentLines > 1 {
77 // Only add if we have an actual word length of response
78 lineCalib = append(lineCalib, strconv.FormatInt(r.ContentLines, 10))
79 }
80 }
81
82 //Remove duplicates
83 sizeCalib = ffuf.UniqStringSlice(sizeCalib)
84 wordCalib = ffuf.UniqStringSlice(wordCalib)
85 lineCalib = ffuf.UniqStringSlice(lineCalib)
86
87 if len(sizeCalib) > 0 {
88 AddFilter(j.Config, "size", strings.Join(sizeCalib, ","))
89 }
90 if len(wordCalib) > 0 {
91 AddFilter(j.Config, "word", strings.Join(wordCalib, ","))
92 }
93 if len(lineCalib) > 0 {
94 AddFilter(j.Config, "line", strings.Join(lineCalib, ","))
95 }
96 }
+0
-46
pkg/filter/filter_test.go less more
0 package filter
1
2 import (
3 "testing"
4 )
5
6 func TestNewFilterByName(t *testing.T) {
7 scf, _ := NewFilterByName("status", "200")
8 if _, ok := scf.(*StatusFilter); !ok {
9 t.Errorf("Was expecting statusfilter")
10 }
11
12 szf, _ := NewFilterByName("size", "200")
13 if _, ok := szf.(*SizeFilter); !ok {
14 t.Errorf("Was expecting sizefilter")
15 }
16
17 wf, _ := NewFilterByName("word", "200")
18 if _, ok := wf.(*WordFilter); !ok {
19 t.Errorf("Was expecting wordfilter")
20 }
21
22 lf, _ := NewFilterByName("line", "200")
23 if _, ok := lf.(*LineFilter); !ok {
24 t.Errorf("Was expecting linefilter")
25 }
26
27 ref, _ := NewFilterByName("regexp", "200")
28 if _, ok := ref.(*RegexpFilter); !ok {
29 t.Errorf("Was expecting regexpfilter")
30 }
31 }
32
33 func TestNewFilterByNameError(t *testing.T) {
34 _, err := NewFilterByName("status", "invalid")
35 if err == nil {
36 t.Errorf("Was expecing an error")
37 }
38 }
39
40 func TestNewFilterByNameNotFound(t *testing.T) {
41 _, err := NewFilterByName("nonexistent", "invalid")
42 if err == nil {
43 t.Errorf("Was expecing an error with invalid filter name")
44 }
45 }
+0
-64
pkg/filter/lines.go less more
0 package filter
1
2 import (
3 "encoding/json"
4 "fmt"
5 "strconv"
6 "strings"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 type LineFilter struct {
12 Value []ffuf.ValueRange
13 }
14
15 func NewLineFilter(value string) (ffuf.FilterProvider, error) {
16 var intranges []ffuf.ValueRange
17 for _, sv := range strings.Split(value, ",") {
18 vr, err := ffuf.ValueRangeFromString(sv)
19 if err != nil {
20 return &LineFilter{}, fmt.Errorf("Line filter or matcher (-fl / -ml): invalid value: %s", sv)
21 }
22 intranges = append(intranges, vr)
23 }
24 return &LineFilter{Value: intranges}, nil
25 }
26
27 func (f *LineFilter) MarshalJSON() ([]byte, error) {
28 value := make([]string, 0)
29 for _, v := range f.Value {
30 if v.Min == v.Max {
31 value = append(value, strconv.FormatInt(v.Min, 10))
32 } else {
33 value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
34 }
35 }
36 return json.Marshal(&struct {
37 Value string `json:"value"`
38 }{
39 Value: strings.Join(value, ","),
40 })
41 }
42
43 func (f *LineFilter) Filter(response *ffuf.Response) (bool, error) {
44 linesSize := len(strings.Split(string(response.Data), "\n"))
45 for _, iv := range f.Value {
46 if iv.Min <= int64(linesSize) && int64(linesSize) <= iv.Max {
47 return true, nil
48 }
49 }
50 return false, nil
51 }
52
53 func (f *LineFilter) Repr() string {
54 var strval []string
55 for _, iv := range f.Value {
56 if iv.Min == iv.Max {
57 strval = append(strval, strconv.Itoa(int(iv.Min)))
58 } else {
59 strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
60 }
61 }
62 return fmt.Sprintf("Response lines: %s", strings.Join(strval, ","))
63 }
+0
-52
pkg/filter/lines_test.go less more
0 package filter
1
2 import (
3 "strings"
4 "testing"
5
6 "github.com/ffuf/ffuf/pkg/ffuf"
7 )
8
9 func TestNewLineFilter(t *testing.T) {
10 f, _ := NewLineFilter("200,301,400-410,500")
11 linesRepr := f.Repr()
12 if strings.Index(linesRepr, "200,301,400-410,500") == -1 {
13 t.Errorf("Word filter was expected to have 4 values")
14 }
15 }
16
17 func TestNewLineFilterError(t *testing.T) {
18 _, err := NewLineFilter("invalid")
19 if err == nil {
20 t.Errorf("Was expecting an error from errenous input data")
21 }
22 }
23
24 func TestLineFiltering(t *testing.T) {
25 f, _ := NewLineFilter("200,301,402-450,500")
26 for i, test := range []struct {
27 input int64
28 output bool
29 }{
30 {200, true},
31 {301, true},
32 {500, true},
33 {4, false},
34 {444, true},
35 {302, false},
36 {401, false},
37 {402, true},
38 {450, true},
39 {451, false},
40 } {
41 var data []string
42 for i := int64(0); i < test.input; i++ {
43 data = append(data, "A")
44 }
45 resp := ffuf.Response{Data: []byte(strings.Join(data, " "))}
46 filterReturn, _ := f.Filter(&resp)
47 if filterReturn != test.output {
48 t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
49 }
50 }
51 }
+0
-55
pkg/filter/regex.go less more
0 package filter
1
2 import (
3 "encoding/json"
4 "fmt"
5 "regexp"
6 "strings"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 type RegexpFilter struct {
12 Value *regexp.Regexp
13 valueRaw string
14 }
15
16 func NewRegexpFilter(value string) (ffuf.FilterProvider, error) {
17 re, err := regexp.Compile(value)
18 if err != nil {
19 return &RegexpFilter{}, fmt.Errorf("Regexp filter or matcher (-fr / -mr): invalid value: %s", value)
20 }
21 return &RegexpFilter{Value: re, valueRaw: value}, nil
22 }
23
24 func (f *RegexpFilter) MarshalJSON() ([]byte, error) {
25 return json.Marshal(&struct {
26 Value string `json:"value"`
27 }{
28 Value: f.valueRaw,
29 })
30 }
31
32 func (f *RegexpFilter) Filter(response *ffuf.Response) (bool, error) {
33 matchheaders := ""
34 for k, v := range response.Headers {
35 for _, iv := range v {
36 matchheaders += k + ": " + iv + "\r\n"
37 }
38 }
39 matchdata := []byte(matchheaders)
40 matchdata = append(matchdata, response.Data...)
41 pattern := f.valueRaw
42 for keyword, inputitem := range response.Request.Input {
43 pattern = strings.Replace(pattern, keyword, regexp.QuoteMeta(string(inputitem)), -1)
44 }
45 matched, err := regexp.Match(pattern, matchdata)
46 if err != nil {
47 return false, nil
48 }
49 return matched, nil
50 }
51
52 func (f *RegexpFilter) Repr() string {
53 return fmt.Sprintf("Regexp: %s", f.valueRaw)
54 }
+0
-44
pkg/filter/regexp_test.go less more
0 package filter
1
2 import (
3 "strings"
4 "testing"
5
6 "github.com/ffuf/ffuf/pkg/ffuf"
7 )
8
9 func TestNewRegexpFilter(t *testing.T) {
10 f, _ := NewRegexpFilter("s([a-z]+)arch")
11 statusRepr := f.Repr()
12 if strings.Index(statusRepr, "s([a-z]+)arch") == -1 {
13 t.Errorf("Status filter was expected to have a regexp value")
14 }
15 }
16
17 func TestNewRegexpFilterError(t *testing.T) {
18 _, err := NewRegexpFilter("r((")
19 if err == nil {
20 t.Errorf("Was expecting an error from errenous input data")
21 }
22 }
23
24 func TestRegexpFiltering(t *testing.T) {
25 f, _ := NewRegexpFilter("s([a-z]+)arch")
26 for i, test := range []struct {
27 input string
28 output bool
29 }{
30 {"search", true},
31 {"text and search", true},
32 {"sbarch in beginning", true},
33 {"midd scarch le", true},
34 {"s1arch", false},
35 {"invalid", false},
36 } {
37 resp := ffuf.Response{Data: []byte(test.input)}
38 filterReturn, _ := f.Filter(&resp)
39 if filterReturn != test.output {
40 t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
41 }
42 }
43 }
+0
-64
pkg/filter/size.go less more
0 package filter
1
2 import (
3 "encoding/json"
4 "fmt"
5 "strconv"
6 "strings"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 type SizeFilter struct {
12 Value []ffuf.ValueRange
13 }
14
15 func NewSizeFilter(value string) (ffuf.FilterProvider, error) {
16 var intranges []ffuf.ValueRange
17 for _, sv := range strings.Split(value, ",") {
18 vr, err := ffuf.ValueRangeFromString(sv)
19 if err != nil {
20 return &SizeFilter{}, fmt.Errorf("Size filter or matcher (-fs / -ms): invalid value: %s", sv)
21 }
22
23 intranges = append(intranges, vr)
24 }
25 return &SizeFilter{Value: intranges}, nil
26 }
27
28 func (f *SizeFilter) MarshalJSON() ([]byte, error) {
29 value := make([]string, 0)
30 for _, v := range f.Value {
31 if v.Min == v.Max {
32 value = append(value, strconv.FormatInt(v.Min, 10))
33 } else {
34 value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
35 }
36 }
37 return json.Marshal(&struct {
38 Value string `json:"value"`
39 }{
40 Value: strings.Join(value, ","),
41 })
42 }
43
44 func (f *SizeFilter) Filter(response *ffuf.Response) (bool, error) {
45 for _, iv := range f.Value {
46 if iv.Min <= response.ContentLength && response.ContentLength <= iv.Max {
47 return true, nil
48 }
49 }
50 return false, nil
51 }
52
53 func (f *SizeFilter) Repr() string {
54 var strval []string
55 for _, iv := range f.Value {
56 if iv.Min == iv.Max {
57 strval = append(strval, strconv.Itoa(int(iv.Min)))
58 } else {
59 strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
60 }
61 }
62 return fmt.Sprintf("Response size: %s", strings.Join(strval, ","))
63 }
+0
-47
pkg/filter/size_test.go less more
0 package filter
1
2 import (
3 "strings"
4 "testing"
5
6 "github.com/ffuf/ffuf/pkg/ffuf"
7 )
8
9 func TestNewSizeFilter(t *testing.T) {
10 f, _ := NewSizeFilter("1,2,3,444,5-90")
11 sizeRepr := f.Repr()
12 if strings.Index(sizeRepr, "1,2,3,444,5-90") == -1 {
13 t.Errorf("Size filter was expected to have 5 values")
14 }
15 }
16
17 func TestNewSizeFilterError(t *testing.T) {
18 _, err := NewSizeFilter("invalid")
19 if err == nil {
20 t.Errorf("Was expecting an error from errenous input data")
21 }
22 }
23
24 func TestFiltering(t *testing.T) {
25 f, _ := NewSizeFilter("1,2,3,5-90,444")
26 for i, test := range []struct {
27 input int64
28 output bool
29 }{
30 {1, true},
31 {2, true},
32 {3, true},
33 {4, false},
34 {5, true},
35 {70, true},
36 {90, true},
37 {91, false},
38 {444, true},
39 } {
40 resp := ffuf.Response{ContentLength: test.input}
41 filterReturn, _ := f.Filter(&resp)
42 if filterReturn != test.output {
43 t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
44 }
45 }
46 }
+0
-79
pkg/filter/status.go less more
0 package filter
1
2 import (
3 "encoding/json"
4 "fmt"
5 "strconv"
6 "strings"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 const AllStatuses = 0
12
13 type StatusFilter struct {
14 Value []ffuf.ValueRange
15 }
16
17 func NewStatusFilter(value string) (ffuf.FilterProvider, error) {
18 var intranges []ffuf.ValueRange
19 for _, sv := range strings.Split(value, ",") {
20 if sv == "all" {
21 intranges = append(intranges, ffuf.ValueRange{AllStatuses, AllStatuses})
22 } else {
23 vr, err := ffuf.ValueRangeFromString(sv)
24 if err != nil {
25 return &StatusFilter{}, fmt.Errorf("Status filter or matcher (-fc / -mc): invalid value %s", sv)
26 }
27 intranges = append(intranges, vr)
28 }
29 }
30 return &StatusFilter{Value: intranges}, nil
31 }
32
33 func (f *StatusFilter) MarshalJSON() ([]byte, error) {
34 value := make([]string, 0)
35 for _, v := range f.Value {
36 if v.Min == 0 && v.Max == 0 {
37 value = append(value, "all")
38 } else {
39 if v.Min == v.Max {
40 value = append(value, strconv.FormatInt(v.Min, 10))
41 } else {
42 value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
43 }
44 }
45 }
46 return json.Marshal(&struct {
47 Value string `json:"value"`
48 }{
49 Value: strings.Join(value, ","),
50 })
51 }
52
53 func (f *StatusFilter) Filter(response *ffuf.Response) (bool, error) {
54 for _, iv := range f.Value {
55 if iv.Min == AllStatuses && iv.Max == AllStatuses {
56 // Handle the "all" case
57 return true, nil
58 }
59 if iv.Min <= response.StatusCode && response.StatusCode <= iv.Max {
60 return true, nil
61 }
62 }
63 return false, nil
64 }
65
66 func (f *StatusFilter) Repr() string {
67 var strval []string
68 for _, iv := range f.Value {
69 if iv.Min == AllStatuses && iv.Max == AllStatuses {
70 strval = append(strval, "all")
71 } else if iv.Min == iv.Max {
72 strval = append(strval, strconv.Itoa(int(iv.Min)))
73 } else {
74 strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
75 }
76 }
77 return fmt.Sprintf("Response status: %s", strings.Join(strval, ","))
78 }
+0
-48
pkg/filter/status_test.go less more
0 package filter
1
2 import (
3 "strings"
4 "testing"
5
6 "github.com/ffuf/ffuf/pkg/ffuf"
7 )
8
9 func TestNewStatusFilter(t *testing.T) {
10 f, _ := NewStatusFilter("200,301,400-410,500")
11 statusRepr := f.Repr()
12 if strings.Index(statusRepr, "200,301,400-410,500") == -1 {
13 t.Errorf("Status filter was expected to have 4 values")
14 }
15 }
16
17 func TestNewStatusFilterError(t *testing.T) {
18 _, err := NewStatusFilter("invalid")
19 if err == nil {
20 t.Errorf("Was expecting an error from errenous input data")
21 }
22 }
23
24 func TestStatusFiltering(t *testing.T) {
25 f, _ := NewStatusFilter("200,301,400-498,500")
26 for i, test := range []struct {
27 input int64
28 output bool
29 }{
30 {200, true},
31 {301, true},
32 {500, true},
33 {4, false},
34 {399, false},
35 {400, true},
36 {444, true},
37 {498, true},
38 {499, false},
39 {302, false},
40 } {
41 resp := ffuf.Response{StatusCode: test.input}
42 filterReturn, _ := f.Filter(&resp)
43 if filterReturn != test.output {
44 t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
45 }
46 }
47 }
+0
-64
pkg/filter/words.go less more
0 package filter
1
2 import (
3 "encoding/json"
4 "fmt"
5 "strconv"
6 "strings"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 type WordFilter struct {
12 Value []ffuf.ValueRange
13 }
14
15 func NewWordFilter(value string) (ffuf.FilterProvider, error) {
16 var intranges []ffuf.ValueRange
17 for _, sv := range strings.Split(value, ",") {
18 vr, err := ffuf.ValueRangeFromString(sv)
19 if err != nil {
20 return &WordFilter{}, fmt.Errorf("Word filter or matcher (-fw / -mw): invalid value: %s", sv)
21 }
22 intranges = append(intranges, vr)
23 }
24 return &WordFilter{Value: intranges}, nil
25 }
26
27 func (f *WordFilter) MarshalJSON() ([]byte, error) {
28 value := make([]string, 0)
29 for _, v := range f.Value {
30 if v.Min == v.Max {
31 value = append(value, strconv.FormatInt(v.Min, 10))
32 } else {
33 value = append(value, fmt.Sprintf("%d-%d", v.Min, v.Max))
34 }
35 }
36 return json.Marshal(&struct {
37 Value string `json:"value"`
38 }{
39 Value: strings.Join(value, ","),
40 })
41 }
42
43 func (f *WordFilter) Filter(response *ffuf.Response) (bool, error) {
44 wordsSize := len(strings.Split(string(response.Data), " "))
45 for _, iv := range f.Value {
46 if iv.Min <= int64(wordsSize) && int64(wordsSize) <= iv.Max {
47 return true, nil
48 }
49 }
50 return false, nil
51 }
52
53 func (f *WordFilter) Repr() string {
54 var strval []string
55 for _, iv := range f.Value {
56 if iv.Min == iv.Max {
57 strval = append(strval, strconv.Itoa(int(iv.Min)))
58 } else {
59 strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max)))
60 }
61 }
62 return fmt.Sprintf("Response words: %s", strings.Join(strval, ","))
63 }
+0
-52
pkg/filter/words_test.go less more
0 package filter
1
2 import (
3 "strings"
4 "testing"
5
6 "github.com/ffuf/ffuf/pkg/ffuf"
7 )
8
9 func TestNewWordFilter(t *testing.T) {
10 f, _ := NewWordFilter("200,301,400-410,500")
11 wordsRepr := f.Repr()
12 if strings.Index(wordsRepr, "200,301,400-410,500") == -1 {
13 t.Errorf("Word filter was expected to have 4 values")
14 }
15 }
16
17 func TestNewWordFilterError(t *testing.T) {
18 _, err := NewWordFilter("invalid")
19 if err == nil {
20 t.Errorf("Was expecting an error from errenous input data")
21 }
22 }
23
24 func TestWordFiltering(t *testing.T) {
25 f, _ := NewWordFilter("200,301,402-450,500")
26 for i, test := range []struct {
27 input int64
28 output bool
29 }{
30 {200, true},
31 {301, true},
32 {500, true},
33 {4, false},
34 {444, true},
35 {302, false},
36 {401, false},
37 {402, true},
38 {450, true},
39 {451, false},
40 } {
41 var data []string
42 for i := int64(0); i < test.input; i++ {
43 data = append(data, "A")
44 }
45 resp := ffuf.Response{Data: []byte(strings.Join(data, " "))}
46 filterReturn, _ := f.Filter(&resp)
47 if filterReturn != test.output {
48 t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn)
49 }
50 }
51 }
+0
-72
pkg/input/command.go less more
0 package input
1
2 import (
3 "bytes"
4 "os"
5 "os/exec"
6 "strconv"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 type CommandInput struct {
12 config *ffuf.Config
13 count int
14 keyword string
15 command string
16 }
17
18 func NewCommandInput(keyword string, value string, conf *ffuf.Config) (*CommandInput, error) {
19 var cmd CommandInput
20 cmd.keyword = keyword
21 cmd.config = conf
22 cmd.count = 0
23 cmd.command = value
24 return &cmd, nil
25 }
26
27 //Keyword returns the keyword assigned to this InternalInputProvider
28 func (c *CommandInput) Keyword() string {
29 return c.keyword
30 }
31
32 //Position will return the current position in the input list
33 func (c *CommandInput) Position() int {
34 return c.count
35 }
36
37 //ResetPosition will reset the current position of the InternalInputProvider
38 func (c *CommandInput) ResetPosition() {
39 c.count = 0
40 }
41
42 //IncrementPosition increments the current position in the inputprovider
43 func (c *CommandInput) IncrementPosition() {
44 c.count += 1
45 }
46
47 //Next will increment the cursor position, and return a boolean telling if there's iterations left
48 func (c *CommandInput) Next() bool {
49 if c.count >= c.config.InputNum {
50 return false
51 }
52 return true
53 }
54
55 //Value returns the input from command stdoutput
56 func (c *CommandInput) Value() []byte {
57 var stdout bytes.Buffer
58 os.Setenv("FFUF_NUM", strconv.Itoa(c.count))
59 cmd := exec.Command(SHELL_CMD, SHELL_ARG, c.command)
60 cmd.Stdout = &stdout
61 err := cmd.Run()
62 if err != nil {
63 return []byte("")
64 }
65 return stdout.Bytes()
66 }
67
68 //Total returns the size of wordlist
69 func (c *CommandInput) Total() int {
70 return c.config.InputNum
71 }
+0
-8
pkg/input/const.go less more
0 // +build !windows
1
2 package input
3
4 const (
5 SHELL_CMD = "/bin/sh"
6 SHELL_ARG = "-c"
7 )
+0
-8
pkg/input/const_windows.go less more
0 // +build windows
1
2 package input
3
4 const (
5 SHELL_CMD = "cmd.exe"
6 SHELL_ARG = "/C"
7 )
+0
-155
pkg/input/input.go less more
0 package input
1
2 import (
3 "fmt"
4
5 "github.com/ffuf/ffuf/pkg/ffuf"
6 )
7
8 type MainInputProvider struct {
9 Providers []ffuf.InternalInputProvider
10 Config *ffuf.Config
11 position int
12 msbIterator int
13 }
14
15 func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, error) {
16 validmode := false
17 for _, mode := range []string{"clusterbomb", "pitchfork"} {
18 if conf.InputMode == mode {
19 validmode = true
20 }
21 }
22 if !validmode {
23 return &MainInputProvider{}, fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode)
24 }
25 return &MainInputProvider{Config: conf, msbIterator: 0}, nil
26 }
27
28 func (i *MainInputProvider) AddProvider(provider ffuf.InputProviderConfig) error {
29 if provider.Name == "command" {
30 newcomm, _ := NewCommandInput(provider.Keyword, provider.Value, i.Config)
31 i.Providers = append(i.Providers, newcomm)
32 } else {
33 // Default to wordlist
34 newwl, err := NewWordlistInput(provider.Keyword, provider.Value, i.Config)
35 if err != nil {
36 return err
37 }
38 i.Providers = append(i.Providers, newwl)
39 }
40 return nil
41 }
42
43 //Position will return the current position of progress
44 func (i *MainInputProvider) Position() int {
45 return i.position
46 }
47
48 //Next will increment the cursor position, and return a boolean telling if there's inputs left
49 func (i *MainInputProvider) Next() bool {
50 if i.position >= i.Total() {
51 return false
52 }
53 i.position++
54 return true
55 }
56
57 //Value returns a map of inputs for keywords
58 func (i *MainInputProvider) Value() map[string][]byte {
59 retval := make(map[string][]byte)
60 if i.Config.InputMode == "clusterbomb" {
61 retval = i.clusterbombValue()
62 }
63 if i.Config.InputMode == "pitchfork" {
64 retval = i.pitchforkValue()
65 }
66 return retval
67 }
68
69 //Reset resets all the inputproviders and counters
70 func (i *MainInputProvider) Reset() {
71 for _, p := range i.Providers {
72 p.ResetPosition()
73 }
74 i.position = 0
75 i.msbIterator = 0
76 }
77
78 //pitchforkValue returns a map of keyword:value pairs including all inputs.
79 //This mode will iterate through wordlists in lockstep.
80 func (i *MainInputProvider) pitchforkValue() map[string][]byte {
81 values := make(map[string][]byte)
82 for _, p := range i.Providers {
83 if !p.Next() {
84 // Loop to beginning if the inputprovider has been exhausted
85 p.ResetPosition()
86 }
87 values[p.Keyword()] = p.Value()
88 p.IncrementPosition()
89 }
90 return values
91 }
92
93 //clusterbombValue returns map of keyword:value pairs including all inputs.
94 //this mode will iterate through all possible combinations.
95 func (i *MainInputProvider) clusterbombValue() map[string][]byte {
96 values := make(map[string][]byte)
97 // Should we signal the next InputProvider in the slice to increment
98 signalNext := false
99 first := true
100 for index, p := range i.Providers {
101 if signalNext {
102 p.IncrementPosition()
103 signalNext = false
104 }
105 if !p.Next() {
106 // No more inputs in this inputprovider
107 if index == i.msbIterator {
108 // Reset all previous wordlists and increment the msb counter
109 i.msbIterator += 1
110 i.clusterbombIteratorReset()
111 // Start again
112 return i.clusterbombValue()
113 }
114 p.ResetPosition()
115 signalNext = true
116 }
117 values[p.Keyword()] = p.Value()
118 if first {
119 p.IncrementPosition()
120 first = false
121 }
122 }
123 return values
124 }
125
126 func (i *MainInputProvider) clusterbombIteratorReset() {
127 for index, p := range i.Providers {
128 if index < i.msbIterator {
129 p.ResetPosition()
130 }
131 if index == i.msbIterator {
132 p.IncrementPosition()
133 }
134 }
135 }
136
137 //Total returns the amount of input combinations available
138 func (i *MainInputProvider) Total() int {
139 count := 0
140 if i.Config.InputMode == "pitchfork" {
141 for _, p := range i.Providers {
142 if p.Total() > count {
143 count = p.Total()
144 }
145 }
146 }
147 if i.Config.InputMode == "clusterbomb" {
148 count = 1
149 for _, p := range i.Providers {
150 count = count * p.Total()
151 }
152 }
153 return count
154 }
+0
-168
pkg/input/wordlist.go less more
0 package input
1
2 import (
3 "bufio"
4 "os"
5 "regexp"
6 "strings"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 type WordlistInput struct {
12 config *ffuf.Config
13 data [][]byte
14 position int
15 keyword string
16 }
17
18 func NewWordlistInput(keyword string, value string, conf *ffuf.Config) (*WordlistInput, error) {
19 var wl WordlistInput
20 wl.keyword = keyword
21 wl.config = conf
22 wl.position = 0
23 var valid bool
24 var err error
25 // stdin?
26 if value == "-" {
27 // yes
28 valid = true
29 } else {
30 // no
31 valid, err = wl.validFile(value)
32 }
33 if err != nil {
34 return &wl, err
35 }
36 if valid {
37 err = wl.readFile(value)
38 }
39 return &wl, err
40 }
41
42 //Position will return the current position in the input list
43 func (w *WordlistInput) Position() int {
44 return w.position
45 }
46
47 //ResetPosition resets the position back to beginning of the wordlist.
48 func (w *WordlistInput) ResetPosition() {
49 w.position = 0
50 }
51
52 //Keyword returns the keyword assigned to this InternalInputProvider
53 func (w *WordlistInput) Keyword() string {
54 return w.keyword
55 }
56
57 //Next will increment the cursor position, and return a boolean telling if there's words left in the list
58 func (w *WordlistInput) Next() bool {
59 if w.position >= len(w.data) {
60 return false
61 }
62 return true
63 }
64
65 //IncrementPosition will increment the current position in the inputprovider data slice
66 func (w *WordlistInput) IncrementPosition() {
67 w.position += 1
68 }
69
70 //Value returns the value from wordlist at current cursor position
71 func (w *WordlistInput) Value() []byte {
72 return w.data[w.position]
73 }
74
75 //Total returns the size of wordlist
76 func (w *WordlistInput) Total() int {
77 return len(w.data)
78 }
79
80 //validFile checks that the wordlist file exists and can be read
81 func (w *WordlistInput) validFile(path string) (bool, error) {
82 _, err := os.Stat(path)
83 if err != nil {
84 return false, err
85 }
86 f, err := os.Open(path)
87 if err != nil {
88 return false, err
89 }
90 f.Close()
91 return true, nil
92 }
93
94 //readFile reads the file line by line to a byte slice
95 func (w *WordlistInput) readFile(path string) error {
96 var file *os.File
97 var err error
98 if path == "-" {
99 file = os.Stdin
100 } else {
101 file, err = os.Open(path)
102 if err != nil {
103 return err
104 }
105 }
106 defer file.Close()
107
108 var data [][]byte
109 var ok bool
110 reader := bufio.NewScanner(file)
111 re := regexp.MustCompile(`(?i)%ext%`)
112 for reader.Scan() {
113 if w.config.DirSearchCompat && len(w.config.Extensions) > 0 {
114 text := []byte(reader.Text())
115 if re.Match(text) {
116 for _, ext := range w.config.Extensions {
117 contnt := re.ReplaceAll(text, []byte(ext))
118 data = append(data, []byte(contnt))
119 }
120 } else {
121 text := reader.Text()
122
123 if w.config.IgnoreWordlistComments {
124 text, ok = stripComments(text)
125 if !ok {
126 continue
127 }
128 }
129 data = append(data, []byte(text))
130 }
131 } else {
132 text := reader.Text()
133
134 if w.config.IgnoreWordlistComments {
135 text, ok = stripComments(text)
136 if !ok {
137 continue
138 }
139 }
140 data = append(data, []byte(text))
141 if w.keyword == "FUZZ" && len(w.config.Extensions) > 0 {
142 for _, ext := range w.config.Extensions {
143 data = append(data, []byte(text+ext))
144 }
145 }
146 }
147 }
148 w.data = data
149 return reader.Err()
150 }
151
152 // stripComments removes all kind of comments from the word
153 func stripComments(text string) (string, bool) {
154 // If the line starts with a # ignoring any space on the left,
155 // return blank.
156 if strings.HasPrefix(strings.TrimLeft(text, " "), "#") {
157 return "", false
158 }
159
160 // If the line has # later after a space, that's a comment.
161 // Only send the word upto space to the routine.
162 index := strings.Index(text, " #")
163 if index == -1 {
164 return text, true
165 }
166 return text[:index], true
167 }
+0
-12
pkg/output/const.go less more
0 // +build !windows
1
2 package output
3
4 const (
5 TERMINAL_CLEAR_LINE = "\r\x1b[2K"
6 ANSI_CLEAR = "\x1b[0m"
7 ANSI_RED = "\x1b[31m"
8 ANSI_GREEN = "\x1b[32m"
9 ANSI_BLUE = "\x1b[34m"
10 ANSI_YELLOW = "\x1b[33m"
11 )
+0
-12
pkg/output/const_windows.go less more
0 // +build windows
1
2 package output
3
4 const (
5 TERMINAL_CLEAR_LINE = "\r\r"
6 ANSI_CLEAR = ""
7 ANSI_RED = ""
8 ANSI_GREEN = ""
9 ANSI_BLUE = ""
10 ANSI_YELLOW = ""
11 )
+0
-71
pkg/output/file_csv.go less more
0 package output
1
2 import (
3 "encoding/base64"
4 "encoding/csv"
5 "os"
6 "strconv"
7
8 "github.com/ffuf/ffuf/pkg/ffuf"
9 )
10
11 var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "resultfile"}
12
13 func writeCSV(config *ffuf.Config, res []Result, encode bool) error {
14 header := make([]string, 0)
15 f, err := os.Create(config.OutputFile)
16 if err != nil {
17 return err
18 }
19 defer f.Close()
20
21 w := csv.NewWriter(f)
22 defer w.Flush()
23
24 for _, inputprovider := range config.InputProviders {
25 header = append(header, inputprovider.Keyword)
26 }
27
28 for _, item := range staticheaders {
29 header = append(header, item)
30 }
31
32 if err := w.Write(header); err != nil {
33 return err
34 }
35 for _, r := range res {
36 if encode {
37 inputs := make(map[string][]byte, 0)
38 for k, v := range r.Input {
39 inputs[k] = []byte(base64encode(v))
40 }
41 r.Input = inputs
42 }
43
44 err := w.Write(toCSV(r))
45 if err != nil {
46 return err
47 }
48 }
49 return nil
50 }
51
52 func base64encode(in []byte) string {
53 return base64.StdEncoding.EncodeToString(in)
54 }
55
56 func toCSV(r Result) []string {
57 res := make([]string, 0)
58 for _, v := range r.Input {
59 res = append(res, string(v))
60 }
61 res = append(res, r.Url)
62 res = append(res, r.RedirectLocation)
63 res = append(res, strconv.Itoa(r.Position))
64 res = append(res, strconv.FormatInt(r.StatusCode, 10))
65 res = append(res, strconv.FormatInt(r.ContentLength, 10))
66 res = append(res, strconv.FormatInt(r.ContentWords, 10))
67 res = append(res, strconv.FormatInt(r.ContentLines, 10))
68 res = append(res, r.ResultFile)
69 return res
70 }
+0
-186
pkg/output/file_html.go less more
0 package output
1
2 import (
3 "html/template"
4 "os"
5 "time"
6
7 "github.com/ffuf/ffuf/pkg/ffuf"
8 )
9
10 type htmlFileOutput struct {
11 CommandLine string
12 Time string
13 Keys []string
14 Results []Result
15 }
16
17 const (
18 htmlTemplate = `
19 <!DOCTYPE html>
20 <html>
21 <head>
22 <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
23 <meta
24 name="viewport"
25 content="width=device-width, initial-scale=1, maximum-scale=1.0"
26 />
27 <title>FFUF Report - </title>
28
29 <!-- CSS -->
30 <link
31 href="https://fonts.googleapis.com/icon?family=Material+Icons"
32 rel="stylesheet"
33 />
34 <link
35 rel="stylesheet"
36 href="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/css/materialize.min.css"
37 />
38 <link
39 rel="stylesheet"
40 type="text/css"
41 href="https://cdn.datatables.net/1.10.20/css/jquery.dataTables.css"
42 />
43
44 </head>
45
46 <body>
47 <nav>
48 <div class="nav-wrapper">
49 <a href="#" class="brand-logo">FFUF</a>
50 <ul id="nav-mobile" class="right hide-on-med-and-down">
51 </ul>
52 </div>
53 </nav>
54
55 <main class="section no-pad-bot" id="index-banner">
56 <div class="container">
57 <br /><br />
58 <h1 class="header center ">FFUF Report</h1>
59 <div class="row center">
60
61 <pre>{{ .CommandLine }}</pre>
62 <pre>{{ .Time }}</pre>
63
64 <table id="ffufreport">
65 <thead>
66 <div style="display:none">
67 |result_raw|StatusCode|Input|Position|ContentLength|ContentWords|ContentLines|
68 </div>
69 <tr>
70 <th>Status</th>
71 {{ range .Keys }} <th>{{ . }}</th>
72 {{ end }}
73 <th>URL</th>
74 <th>Redirect location</th>
75 <th>Position</th>
76 <th>Length</th>
77 <th>Words</th>
78 <th>Lines</th>
79 <th>Resultfile</th>
80 </tr>
81 </thead>
82
83 <tbody>
84 {{range $result := .Results}}
85 <div style="display:none">
86 |result_raw|{{ $result.StatusCode }}{{ range $keyword, $value := $result.Input }}|{{ $value | printf "%s" }}{{ end }}|{{ $result.Url }}|{{ $result.RedirectLocation }}|{{ $result.Position }}|{{ $result.ContentLength }}|{{ $result.ContentWords }}|{{ $result.ContentLines }}|
87 </div>
88 <tr class="result-{{ $result.StatusCode }}" style="background-color: {{$result.HTMLColor}};"><td><font color="black" class="status-code">{{ $result.StatusCode }}</font></td>{{ range $keyword, $value := $result.Input }}<td>{{ $value | printf "%s" }}</td>{{ end }}</td><td>{{ $result.Url }}</td><td>{{ $result.RedirectLocation }}</td><td>{{ $result.Position }}</td><td>{{ $result.ContentLength }}</td><td>{{ $result.ContentWords }}</td><td>{{ $result.ContentLines }}</td><td>{{ $result.ResultFile }}</td></tr>
89 {{end}}
90 </tbody>
91 </table>
92
93 </div>
94 <br /><br />
95 </div>
96 </main>
97
98 <!--JavaScript at end of body for optimized loading-->
99 <script src="https://code.jquery.com/jquery-3.4.1.min.js" integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script>
100 <script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/1.0.0/js/materialize.min.js"></script>
101 <script type="text/javascript" charset="utf8" src="https://cdn.datatables.net/1.10.20/js/jquery.dataTables.js"></script>
102 <script>
103 $(document).ready( function () {
104 $('#ffufreport').DataTable();
105 } );
106 </script>
107 <style>
108 body {
109 display: flex;
110 min-height: 100vh;
111 flex-direction: column;
112 }
113
114 main {
115 flex: 1 0 auto;
116 }
117 </style>
118 </body>
119 </html>
120
121 `
122 )
123
124 // colorizeResults returns a new slice with HTMLColor attribute
125 func colorizeResults(results []Result) []Result {
126 newResults := make([]Result, 0)
127
128 for _, r := range results {
129 result := r
130 result.HTMLColor = "black"
131
132 s := result.StatusCode
133
134 if s >= 200 && s <= 299 {
135 result.HTMLColor = "#adea9e"
136 }
137
138 if s >= 300 && s <= 399 {
139 result.HTMLColor = "#bbbbe6"
140 }
141
142 if s >= 400 && s <= 499 {
143 result.HTMLColor = "#d2cb7e"
144 }
145
146 if s >= 500 && s <= 599 {
147 result.HTMLColor = "#de8dc1"
148 }
149
150 newResults = append(newResults, result)
151 }
152
153 return newResults
154 }
155
156 func writeHTML(config *ffuf.Config, results []Result) error {
157
158 results = colorizeResults(results)
159
160 ti := time.Now()
161
162 keywords := make([]string, 0)
163 for _, inputprovider := range config.InputProviders {
164 keywords = append(keywords, inputprovider.Keyword)
165 }
166
167 outHTML := htmlFileOutput{
168 CommandLine: config.CommandLine,
169 Time: ti.Format(time.RFC3339),
170 Results: results,
171 Keys: keywords,
172 }
173
174 f, err := os.Create(config.OutputFile)
175 if err != nil {
176 return err
177 }
178 defer f.Close()
179
180 templateName := "output.html"
181 t := template.New(templateName).Delims("{{", "}}")
182 t.Parse(htmlTemplate)
183 t.Execute(f, outHTML)
184 return nil
185 }
+0
-90
pkg/output/file_json.go less more
0 package output
1
2 import (
3 "encoding/json"
4 "io/ioutil"
5 "time"
6
7 "github.com/ffuf/ffuf/pkg/ffuf"
8 )
9
10 type ejsonFileOutput struct {
11 CommandLine string `json:"commandline"`
12 Time string `json:"time"`
13 Results []Result `json:"results"`
14 }
15
16 type JsonResult struct {
17 Input map[string]string `json:"input"`
18 Position int `json:"position"`
19 StatusCode int64 `json:"status"`
20 ContentLength int64 `json:"length"`
21 ContentWords int64 `json:"words"`
22 ContentLines int64 `json:"lines"`
23 RedirectLocation string `json:"redirectlocation"`
24 ResultFile string `json:"resultfile"`
25 Url string `json:"url"`
26 }
27
28 type jsonFileOutput struct {
29 CommandLine string `json:"commandline"`
30 Time string `json:"time"`
31 Results []JsonResult `json:"results"`
32 Config *ffuf.Config `json:"config"`
33 }
34
35 func writeEJSON(config *ffuf.Config, res []Result) error {
36 t := time.Now()
37 outJSON := ejsonFileOutput{
38 CommandLine: config.CommandLine,
39 Time: t.Format(time.RFC3339),
40 Results: res,
41 }
42
43 outBytes, err := json.Marshal(outJSON)
44 if err != nil {
45 return err
46 }
47 err = ioutil.WriteFile(config.OutputFile, outBytes, 0644)
48 if err != nil {
49 return err
50 }
51 return nil
52 }
53
54 func writeJSON(config *ffuf.Config, res []Result) error {
55 t := time.Now()
56 jsonRes := make([]JsonResult, 0)
57 for _, r := range res {
58 strinput := make(map[string]string)
59 for k, v := range r.Input {
60 strinput[k] = string(v)
61 }
62 jsonRes = append(jsonRes, JsonResult{
63 Input: strinput,
64 Position: r.Position,
65 StatusCode: r.StatusCode,
66 ContentLength: r.ContentLength,
67 ContentWords: r.ContentWords,
68 ContentLines: r.ContentLines,
69 RedirectLocation: r.RedirectLocation,
70 ResultFile: r.ResultFile,
71 Url: r.Url,
72 })
73 }
74 outJSON := jsonFileOutput{
75 CommandLine: config.CommandLine,
76 Time: t.Format(time.RFC3339),
77 Results: jsonRes,
78 Config: config,
79 }
80 outBytes, err := json.Marshal(outJSON)
81 if err != nil {
82 return err
83 }
84 err = ioutil.WriteFile(config.OutputFile, outBytes, 0644)
85 if err != nil {
86 return err
87 }
88 return nil
89 }
+0
-50
pkg/output/file_md.go less more
0 package output
1
2 import (
3 "html/template"
4 "os"
5 "time"
6
7 "github.com/ffuf/ffuf/pkg/ffuf"
8 )
9
10 const (
11 markdownTemplate = `# FFUF Report
12
13 Command line : ` + "`{{.CommandLine}}`" + `
14 Time: ` + "{{ .Time }}" + `
15
16 {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | ResultFile |
17 {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- |
18 {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ResultFile }} |
19 {{end}}` // The template format is not pretty but follows the markdown guide
20 )
21
22 func writeMarkdown(config *ffuf.Config, res []Result) error {
23
24 ti := time.Now()
25
26 keywords := make([]string, 0)
27 for _, inputprovider := range config.InputProviders {
28 keywords = append(keywords, inputprovider.Keyword)
29 }
30
31 outMD := htmlFileOutput{
32 CommandLine: config.CommandLine,
33 Time: ti.Format(time.RFC3339),
34 Results: res,
35 Keys: keywords,
36 }
37
38 f, err := os.Create(config.OutputFile)
39 if err != nil {
40 return err
41 }
42 defer f.Close()
43
44 templateName := "output.md"
45 t := template.New(templateName).Delims("{{", "}}")
46 t.Parse(markdownTemplate)
47 t.Execute(f, outMD)
48 return nil
49 }
+0
-10
pkg/output/output.go less more
0 package output
1
2 import (
3 "github.com/ffuf/ffuf/pkg/ffuf"
4 )
5
6 func NewOutputProviderByName(name string, conf *ffuf.Config) ffuf.OutputProvider {
7 //We have only one outputprovider at the moment
8 return NewStdoutput(conf)
9 }
+0
-375
pkg/output/stdout.go less more
0 package output
1
2 import (
3 "crypto/md5"
4 "fmt"
5 "io/ioutil"
6 "os"
7 "path"
8 "strconv"
9 "time"
10
11 "github.com/ffuf/ffuf/pkg/ffuf"
12 )
13
14 const (
15 BANNER_HEADER = `
16 /'___\ /'___\ /'___\
17 /\ \__/ /\ \__/ __ __ /\ \__/
18 \ \ ,__\\ \ ,__\/\ \/\ \ \ \ ,__\
19 \ \ \_/ \ \ \_/\ \ \_\ \ \ \ \_/
20 \ \_\ \ \_\ \ \____/ \ \_\
21 \/_/ \/_/ \/___/ \/_/
22 `
23 BANNER_SEP = "________________________________________________"
24 )
25
26 type Stdoutput struct {
27 config *ffuf.Config
28 Results []Result
29 }
30
31 type Result struct {
32 Input map[string][]byte `json:"input"`
33 Position int `json:"position"`
34 StatusCode int64 `json:"status"`
35 ContentLength int64 `json:"length"`
36 ContentWords int64 `json:"words"`
37 ContentLines int64 `json:"lines"`
38 RedirectLocation string `json:"redirectlocation"`
39 Url string `json:"url"`
40 ResultFile string `json:"resultfile"`
41 HTMLColor string `json:"-"`
42 }
43
44 func NewStdoutput(conf *ffuf.Config) *Stdoutput {
45 var outp Stdoutput
46 outp.config = conf
47 outp.Results = []Result{}
48 return &outp
49 }
50
51 func (s *Stdoutput) Banner() error {
52 fmt.Printf("%s\n v%s\n%s\n\n", BANNER_HEADER, ffuf.VERSION, BANNER_SEP)
53 printOption([]byte("Method"), []byte(s.config.Method))
54 printOption([]byte("URL"), []byte(s.config.Url))
55 // Print headers
56 if len(s.config.Headers) > 0 {
57 for k, v := range s.config.Headers {
58 printOption([]byte("Header"), []byte(fmt.Sprintf("%s: %s", k, v)))
59 }
60 }
61 // Print POST data
62 if len(s.config.Data) > 0 {
63 printOption([]byte("Data"), []byte(s.config.Data))
64 }
65
66 // Print extensions
67 if len(s.config.Extensions) > 0 {
68 exts := ""
69 for _, ext := range s.config.Extensions {
70 exts = fmt.Sprintf("%s%s ", exts, ext)
71 }
72 printOption([]byte("Extensions"), []byte(exts))
73 }
74
75 // Output file info
76 if len(s.config.OutputFile) > 0 {
77 printOption([]byte("Output file"), []byte(s.config.OutputFile))
78 printOption([]byte("File format"), []byte(s.config.OutputFormat))
79 }
80
81 // Follow redirects?
82 follow := fmt.Sprintf("%t", s.config.FollowRedirects)
83 printOption([]byte("Follow redirects"), []byte(follow))
84
85 // Autocalibration
86 autocalib := fmt.Sprintf("%t", s.config.AutoCalibration)
87 printOption([]byte("Calibration"), []byte(autocalib))
88
89 // Proxies
90 if len(s.config.ProxyURL) > 0 {
91 proxy := fmt.Sprintf("%s", s.config.ProxyURL)
92 printOption([]byte("Proxy"), []byte(proxy))
93 }
94 if len(s.config.ReplayProxyURL) > 0 {
95 replayproxy := fmt.Sprintf("%s", s.config.ReplayProxyURL)
96 printOption([]byte("ReplayProxy"), []byte(replayproxy))
97 }
98
99 // Timeout
100 timeout := fmt.Sprintf("%d", s.config.Timeout)
101 printOption([]byte("Timeout"), []byte(timeout))
102
103 // Threads
104 threads := fmt.Sprintf("%d", s.config.Threads)
105 printOption([]byte("Threads"), []byte(threads))
106
107 // Delay?
108 if s.config.Delay.HasDelay {
109 delay := ""
110 if s.config.Delay.IsRange {
111 delay = fmt.Sprintf("%.2f - %.2f seconds", s.config.Delay.Min, s.config.Delay.Max)
112 } else {
113 delay = fmt.Sprintf("%.2f seconds", s.config.Delay.Min)
114 }
115 printOption([]byte("Delay"), []byte(delay))
116 }
117
118 // Print matchers
119 for _, f := range s.config.Matchers {
120 printOption([]byte("Matcher"), []byte(f.Repr()))
121 }
122 // Print filters
123 for _, f := range s.config.Filters {
124 printOption([]byte("Filter"), []byte(f.Repr()))
125 }
126 fmt.Printf("%s\n\n", BANNER_SEP)
127 return nil
128 }
129
130 func (s *Stdoutput) Progress(status ffuf.Progress) {
131 if s.config.Quiet {
132 // No progress for quiet mode
133 return
134 }
135
136 dur := time.Now().Sub(status.StartedAt)
137 runningSecs := int(dur / time.Second)
138 var reqRate int
139 if runningSecs > 0 {
140 reqRate = int(status.ReqCount / runningSecs)
141 } else {
142 reqRate = 0
143 }
144
145 hours := dur / time.Hour
146 dur -= hours * time.Hour
147 mins := dur / time.Minute
148 dur -= mins * time.Minute
149 secs := dur / time.Second
150
151 fmt.Fprintf(os.Stderr, "%s:: Progress: [%d/%d] :: Job [%d/%d] :: %d req/sec :: Duration: [%d:%02d:%02d] :: Errors: %d ::", TERMINAL_CLEAR_LINE, status.ReqCount, status.ReqTotal, status.QueuePos, status.QueueTotal, reqRate, hours, mins, secs, status.ErrorCount)
152 }
153
154 func (s *Stdoutput) Info(infostring string) {
155 if s.config.Quiet {
156 fmt.Fprintf(os.Stderr, "%s", infostring)
157 } else {
158 if !s.config.Colors {
159 fmt.Fprintf(os.Stderr, "%s[INFO] %s\n", TERMINAL_CLEAR_LINE, infostring)
160 } else {
161 fmt.Fprintf(os.Stderr, "%s[%sINFO%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_BLUE, ANSI_CLEAR, infostring)
162 }
163 }
164 }
165
166 func (s *Stdoutput) Error(errstring string) {
167 if s.config.Quiet {
168 fmt.Fprintf(os.Stderr, "%s", errstring)
169 } else {
170 if !s.config.Colors {
171 fmt.Fprintf(os.Stderr, "%s[ERR] %s\n", TERMINAL_CLEAR_LINE, errstring)
172 } else {
173 fmt.Fprintf(os.Stderr, "%s[%sERR%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_RED, ANSI_CLEAR, errstring)
174 }
175 }
176 }
177
178 func (s *Stdoutput) Warning(warnstring string) {
179 if s.config.Quiet {
180 fmt.Fprintf(os.Stderr, "%s", warnstring)
181 } else {
182 if !s.config.Colors {
183 fmt.Fprintf(os.Stderr, "%s[WARN] %s", TERMINAL_CLEAR_LINE, warnstring)
184 } else {
185 fmt.Fprintf(os.Stderr, "%s[%sWARN%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_RED, ANSI_CLEAR, warnstring)
186 }
187 }
188 }
189
190 func (s *Stdoutput) Finalize() error {
191 var err error
192 if s.config.OutputFile != "" {
193 if s.config.OutputFormat == "json" {
194 err = writeJSON(s.config, s.Results)
195 } else if s.config.OutputFormat == "ejson" {
196 err = writeEJSON(s.config, s.Results)
197 } else if s.config.OutputFormat == "html" {
198 err = writeHTML(s.config, s.Results)
199 } else if s.config.OutputFormat == "md" {
200 err = writeMarkdown(s.config, s.Results)
201 } else if s.config.OutputFormat == "csv" {
202 err = writeCSV(s.config, s.Results, false)
203 } else if s.config.OutputFormat == "ecsv" {
204 err = writeCSV(s.config, s.Results, true)
205 }
206 if err != nil {
207 s.Error(fmt.Sprintf("%s", err))
208 }
209 }
210 fmt.Fprintf(os.Stderr, "\n")
211 return nil
212 }
213
214 func (s *Stdoutput) Result(resp ffuf.Response) {
215 // Do we want to write request and response to a file
216 if len(s.config.OutputDirectory) > 0 {
217 resp.ResultFile = s.writeResultToFile(resp)
218 }
219 // Output the result
220 s.printResult(resp)
221 // Check if we need the data later
222 if s.config.OutputFile != "" {
223 // No need to store results if we're not going to use them later
224 inputs := make(map[string][]byte, 0)
225 for k, v := range resp.Request.Input {
226 inputs[k] = v
227 }
228 sResult := Result{
229 Input: inputs,
230 Position: resp.Request.Position,
231 StatusCode: resp.StatusCode,
232 ContentLength: resp.ContentLength,
233 ContentWords: resp.ContentWords,
234 ContentLines: resp.ContentLines,
235 RedirectLocation: resp.GetRedirectLocation(false),
236 Url: resp.Request.Url,
237 ResultFile: resp.ResultFile,
238 }
239 s.Results = append(s.Results, sResult)
240 }
241 }
242
243 func (s *Stdoutput) writeResultToFile(resp ffuf.Response) string {
244 var fileContent, fileName, filePath string
245 // Create directory if needed
246 if s.config.OutputDirectory != "" {
247 err := os.Mkdir(s.config.OutputDirectory, 0750)
248 if err != nil {
249 if !os.IsExist(err) {
250 s.Error(fmt.Sprintf("%s", err))
251 return ""
252 }
253 }
254 }
255 fileContent = fmt.Sprintf("%s\n---- ↑ Request ---- Response ↓ ----\n\n%s", resp.Request.Raw, resp.Raw)
256
257 // Create file name
258 fileName = fmt.Sprintf("%x", md5.Sum([]byte(fileContent)))
259
260 filePath = path.Join(s.config.OutputDirectory, fileName)
261 err := ioutil.WriteFile(filePath, []byte(fileContent), 0640)
262 if err != nil {
263 s.Error(fmt.Sprintf("%s", err))
264 }
265 return fileName
266 }
267
268 func (s *Stdoutput) printResult(resp ffuf.Response) {
269 if s.config.Quiet {
270 s.resultQuiet(resp)
271 } else {
272 if len(resp.Request.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 {
273 // Print a multi-line result (when using multiple input keywords and wordlists)
274 s.resultMultiline(resp)
275 } else {
276 s.resultNormal(resp)
277 }
278 }
279 }
280
281 func (s *Stdoutput) prepareInputsOneLine(resp ffuf.Response) string {
282 inputs := ""
283 if len(resp.Request.Input) > 1 {
284 for k, v := range resp.Request.Input {
285 if inSlice(k, s.config.CommandKeywords) {
286 // If we're using external command for input, display the position instead of input
287 inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(resp.Request.Position))
288 } else {
289 inputs = fmt.Sprintf("%s%s : %s ", inputs, k, v)
290 }
291 }
292 } else {
293 for k, v := range resp.Request.Input {
294 if inSlice(k, s.config.CommandKeywords) {
295 // If we're using external command for input, display the position instead of input
296 inputs = strconv.Itoa(resp.Request.Position)
297 } else {
298 inputs = string(v)
299 }
300 }
301 }
302 return inputs
303 }
304
305 func (s *Stdoutput) resultQuiet(resp ffuf.Response) {
306 fmt.Println(s.prepareInputsOneLine(resp))
307 }
308
309 func (s *Stdoutput) resultMultiline(resp ffuf.Response) {
310 var res_hdr, res_str string
311 res_str = "%s%s * %s: %s\n"
312 res_hdr = fmt.Sprintf("%s[Status: %d, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, resp.StatusCode, resp.ContentLength, resp.ContentWords, resp.ContentLines)
313 res_hdr = s.colorize(res_hdr, resp.StatusCode)
314 reslines := ""
315 if s.config.Verbose {
316 reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, resp.Request.Url)
317 redirectLocation := resp.GetRedirectLocation(false)
318 if redirectLocation != "" {
319 reslines = fmt.Sprintf("%s%s| --> | %s\n", reslines, TERMINAL_CLEAR_LINE, redirectLocation)
320 }
321 }
322 if resp.ResultFile != "" {
323 reslines = fmt.Sprintf("%s%s| RES | %s\n", reslines, TERMINAL_CLEAR_LINE, resp.ResultFile)
324 }
325 for k, v := range resp.Request.Input {
326 if inSlice(k, s.config.CommandKeywords) {
327 // If we're using external command for input, display the position instead of input
328 reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(resp.Request.Position))
329 } else {
330 // Wordlist input
331 reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, v)
332 }
333 }
334 fmt.Printf("%s\n%s\n", res_hdr, reslines)
335 }
336
337 func (s *Stdoutput) resultNormal(resp ffuf.Response) {
338 var res_str string
339 res_str = fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(resp), s.colorize(fmt.Sprintf("%d", resp.StatusCode), resp.StatusCode), resp.ContentLength, resp.ContentWords, resp.ContentLines)
340 fmt.Println(res_str)
341 }
342
343 func (s *Stdoutput) colorize(input string, status int64) string {
344 if !s.config.Colors {
345 return fmt.Sprintf("%s", input)
346 }
347 colorCode := ANSI_CLEAR
348 if status >= 200 && status < 300 {
349 colorCode = ANSI_GREEN
350 }
351 if status >= 300 && status < 400 {
352 colorCode = ANSI_BLUE
353 }
354 if status >= 400 && status < 500 {
355 colorCode = ANSI_YELLOW
356 }
357 if status >= 500 && status < 600 {
358 colorCode = ANSI_RED
359 }
360 return fmt.Sprintf("%s%s%s", colorCode, input, ANSI_CLEAR)
361 }
362
363 func printOption(name []byte, value []byte) {
364 fmt.Printf(" :: %-16s : %s\n", name, value)
365 }
366
367 func inSlice(key string, slice []string) bool {
368 for _, v := range slice {
369 if v == key {
370 return true
371 }
372 }
373 return false
374 }
+0
-10
pkg/runner/runner.go less more
0 package runner
1
2 import (
3 "github.com/ffuf/ffuf/pkg/ffuf"
4 )
5
6 func NewRunnerByName(name string, conf *ffuf.Config, replay bool) ffuf.RunnerProvider {
7 // We have only one Runner at the moment
8 return NewSimpleRunner(conf, replay)
9 }
+0
-152
pkg/runner/simple.go less more
0 package runner
1
2 import (
3 "bytes"
4 "crypto/tls"
5 "fmt"
6 "io/ioutil"
7 "net/http"
8 "net/http/httputil"
9 "net/textproto"
10 "net/url"
11 "strconv"
12 "strings"
13 "time"
14 "unicode/utf8"
15
16 "github.com/ffuf/ffuf/pkg/ffuf"
17 )
18
19 //Download results < 5MB
20 const MAX_DOWNLOAD_SIZE = 5242880
21
22 type SimpleRunner struct {
23 config *ffuf.Config
24 client *http.Client
25 }
26
27 func NewSimpleRunner(conf *ffuf.Config, replay bool) ffuf.RunnerProvider {
28 var simplerunner SimpleRunner
29 proxyURL := http.ProxyFromEnvironment
30 customProxy := ""
31
32 if replay {
33 customProxy = conf.ReplayProxyURL
34 } else {
35 customProxy = conf.ProxyURL
36 }
37 if len(customProxy) > 0 {
38 pu, err := url.Parse(customProxy)
39 if err == nil {
40 proxyURL = http.ProxyURL(pu)
41 }
42 }
43
44 simplerunner.config = conf
45 simplerunner.client = &http.Client{
46 CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse },
47 Timeout: time.Duration(time.Duration(conf.Timeout) * time.Second),
48 Transport: &http.Transport{
49 Proxy: proxyURL,
50 MaxIdleConns: 1000,
51 MaxIdleConnsPerHost: 500,
52 MaxConnsPerHost: 500,
53 TLSClientConfig: &tls.Config{
54 InsecureSkipVerify: true,
55 },
56 }}
57
58 if conf.FollowRedirects {
59 simplerunner.client.CheckRedirect = nil
60 }
61 return &simplerunner
62 }
63
64 func (r *SimpleRunner) Prepare(input map[string][]byte) (ffuf.Request, error) {
65 req := ffuf.NewRequest(r.config)
66
67 req.Headers = r.config.Headers
68 req.Url = r.config.Url
69 req.Method = r.config.Method
70 req.Data = []byte(r.config.Data)
71
72 for keyword, inputitem := range input {
73 req.Method = strings.Replace(req.Method, keyword, string(inputitem), -1)
74 headers := make(map[string]string, 0)
75 for h, v := range req.Headers {
76 var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.Replace(h, keyword, string(inputitem), -1))
77 headers[CanonicalHeader] = strings.Replace(v, keyword, string(inputitem), -1)
78 }
79 req.Headers = headers
80 req.Url = strings.Replace(req.Url, keyword, string(inputitem), -1)
81 req.Data = []byte(strings.Replace(string(req.Data), keyword, string(inputitem), -1))
82 }
83
84 req.Input = input
85 return req, nil
86 }
87
88 func (r *SimpleRunner) Execute(req *ffuf.Request) (ffuf.Response, error) {
89 var httpreq *http.Request
90 var err error
91 var rawreq []byte
92 data := bytes.NewReader(req.Data)
93 httpreq, err = http.NewRequest(req.Method, req.Url, data)
94 if err != nil {
95 return ffuf.Response{}, err
96 }
97
98 // set default User-Agent header if not present
99 if _, ok := req.Headers["User-Agent"]; !ok {
100 req.Headers["User-Agent"] = fmt.Sprintf("%s v%s", "Fuzz Faster U Fool", ffuf.VERSION)
101 }
102
103 // Handle Go http.Request special cases
104 if _, ok := req.Headers["Host"]; ok {
105 httpreq.Host = req.Headers["Host"]
106 }
107 httpreq = httpreq.WithContext(r.config.Context)
108 for k, v := range req.Headers {
109 httpreq.Header.Set(k, v)
110 }
111
112 if len(r.config.OutputDirectory) > 0 {
113 rawreq, _ = httputil.DumpRequestOut(httpreq, true)
114 }
115
116 httpresp, err := r.client.Do(httpreq)
117 if err != nil {
118 return ffuf.Response{}, err
119 }
120
121 resp := ffuf.NewResponse(httpresp, req)
122 defer httpresp.Body.Close()
123
124 // Check if we should download the resource or not
125 size, err := strconv.Atoi(httpresp.Header.Get("Content-Length"))
126 if err == nil {
127 resp.ContentLength = int64(size)
128 if size > MAX_DOWNLOAD_SIZE {
129 resp.Cancelled = true
130 return resp, nil
131 }
132 }
133
134 if len(r.config.OutputDirectory) > 0 {
135 rawresp, _ := httputil.DumpResponse(httpresp, true)
136 resp.Request.Raw = string(rawreq)
137 resp.Raw = string(rawresp)
138 }
139
140 if respbody, err := ioutil.ReadAll(httpresp.Body); err == nil {
141 resp.ContentLength = int64(utf8.RuneCountInString(string(respbody)))
142 resp.Data = respbody
143 }
144
145 wordsSize := len(strings.Split(string(resp.Data), " "))
146 linesSize := len(strings.Split(string(resp.Data), "\n"))
147 resp.ContentWords = int64(wordsSize)
148 resp.ContentLines = int64(linesSize)
149
150 return resp, nil
151 }