New upstream version 1.2.1
Raphaël Hertzog
3 years ago
0 | # Description | |
1 | ||
2 | Please add a short description of pull request contents. | |
3 | If this PR addresses an existing issue, please add the issue number below. | |
4 | ||
5 | Fixes: #(issue number) | |
6 | ||
7 | ## Additonally | |
8 | ||
9 | - [ ] If this is the first time you are contributing to ffuf, add your name to `CONTRIBUTORS.md`. | |
10 | The file should be alphabetically ordered. | |
11 | - [ ] Add a short description of the fix to `CHANGELOG.md` | |
12 | ||
13 | Thanks for contributing to ffuf :) |
0 | # For most projects, this workflow file will not need changing; you simply need | |
1 | # to commit it to your repository. | |
2 | # | |
3 | # You may wish to alter this file to override the set of languages analyzed, | |
4 | # or to provide custom queries or build logic. | |
5 | name: "CodeQL" | |
6 | ||
7 | on: | |
8 | push: | |
9 | branches: [master] | |
10 | pull_request: | |
11 | # The branches below must be a subset of the branches above | |
12 | branches: [master] | |
13 | schedule: | |
14 | - cron: '0 9 * * 3' | |
15 | ||
16 | jobs: | |
17 | analyze: | |
18 | name: Analyze | |
19 | runs-on: ubuntu-latest | |
20 | ||
21 | strategy: | |
22 | fail-fast: false | |
23 | matrix: | |
24 | # Override automatic language detection by changing the below list | |
25 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] | |
26 | language: ['go'] | |
27 | # Learn more... | |
28 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection | |
29 | ||
30 | steps: | |
31 | - name: Checkout repository | |
32 | uses: actions/checkout@v2 | |
33 | with: | |
34 | # We must fetch at least the immediate parents so that if this is | |
35 | # a pull request then we can checkout the head. | |
36 | fetch-depth: 2 | |
37 | ||
38 | # If this run was triggered by a pull request event, then checkout | |
39 | # the head of the pull request instead of the merge commit. | |
40 | - run: git checkout HEAD^2 | |
41 | if: ${{ github.event_name == 'pull_request' }} | |
42 | ||
43 | # Initializes the CodeQL tools for scanning. | |
44 | - name: Initialize CodeQL | |
45 | uses: github/codeql-action/init@v1 | |
46 | with: | |
47 | languages: ${{ matrix.language }} | |
48 | # If you wish to specify custom queries, you can do so here or in a config file. | |
49 | # By default, queries listed here will override any specified in a config file. | |
50 | # Prefix the list here with "+" to use these queries and those in the config file. | |
51 | # queries: ./path/to/local/query, your-org/your-repo/queries@main | |
52 | ||
53 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). | |
54 | # If this step fails, then you should remove it and run the build manually (see below) | |
55 | - name: Autobuild | |
56 | uses: github/codeql-action/autobuild@v1 | |
57 | ||
58 | # ℹ️ Command-line programs to run using the OS shell. | |
59 | # 📚 https://git.io/JvXDl | |
60 | ||
61 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines | |
62 | # and modify them (or add more) to build your code if your project | |
63 | # uses a compiled language | |
64 | ||
65 | #- run: | | |
66 | # make bootstrap | |
67 | # make release | |
68 | ||
69 | - name: Perform CodeQL Analysis | |
70 | uses: github/codeql-action/analyze@v1 |
0 | name: golangci-lint | |
1 | on: | |
2 | push: | |
3 | tags: | |
4 | - v* | |
5 | branches: | |
6 | - master | |
7 | pull_request: | |
8 | jobs: | |
9 | golangci: | |
10 | name: lint | |
11 | runs-on: ubuntu-latest | |
12 | steps: | |
13 | - uses: actions/checkout@v2 | |
14 | - name: golangci-lint | |
15 | uses: golangci/golangci-lint-action@v2 | |
16 | with: | |
17 | # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version. | |
18 | version: v1.29 | |
19 | ||
20 | # Optional: working directory, useful for monorepos | |
21 | # working-directory: somedir | |
22 | ||
23 | # Optional: golangci-lint command line arguments. | |
24 | # args: --issues-exit-code=0 | |
25 | ||
26 | # Optional: show only new issues if it's a pull request. The default value is `false`. | |
27 | # only-new-issues: true⏎ |
0 | 0 | builds: |
1 | - binary: ffuf | |
1 | - id: ffuf | |
2 | binary: ffuf | |
3 | flags: | |
4 | - -trimpath | |
5 | env: | |
6 | - CGO_ENABLED=0 | |
7 | asmflags: | |
8 | - all=-trimpath={{.Env.GOPATH}} | |
9 | gcflags: | |
10 | - all=-trimpath={{.Env.GOPATH}} | |
11 | ldflags: | | |
12 | -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} -extldflags '-static' | |
2 | 13 | goos: |
3 | 14 | - linux |
4 | 15 | - windows |
0 | 0 | ## Changelog |
1 | 1 | - master |
2 | 2 | - New |
3 | - Changed | |
4 | ||
5 | - v1.2.0 | |
6 | - New | |
7 | - Added 405 Method Not Allowed to list of status codes matched by default. | |
8 | - New CLI flag `-rate` to set maximum rate of requests per second. The adjustment is dynamic. | |
9 | - New CLI flag `-config` to define a configuration file with preconfigured settings for the job. | |
10 | - Ffuf now reads a default configuration file `$HOME/.ffufrc` upon startup. Options set in this file | |
11 | are overwritten by the ones provided on CLI. | |
12 | - Change banner logging to stderr instead of stdout. | |
13 | - New CLI flag `-or` to avoid creating result files if we didn't get any. | |
14 | - New CLI flag `-input-shell` to set the shell to be used by `input-cmd` | |
3 | 15 | |
4 | 16 | - Changed |
17 | - Pre-flight errors are now displayed also after the usage text to prevent the need to scroll through backlog. | |
18 | - Cancelling via SIGINT (Ctrl-C) is now more responsive | |
19 | - Fixed issue where a thread would hang due to TCP errors | |
20 | - Fixed the issue where the option -ac was overwriting existing filters. Now auto-calibration will add them where needed. | |
21 | - The `-w` flag now accepts comma delimited values in the form of `file1:W1,file2:W2`. | |
22 | - Links in the HTML report are now clickable | |
23 | - Fixed panic during wordlist flag parsing in Windows systems. | |
5 | 24 | |
6 | 25 | - v1.1.0 |
7 | 26 | - New |
0 | 0 | # Contributors |
1 | * [AverageSecurityGuy](https://github.com/averagesecurityguy) | |
2 | * [bp0](https://github.com/bp0lr) | |
1 | 3 | * [bjhulst](https://github.com/bjhulst) |
2 | 4 | * [bsysop](https://twitter.com/bsysop) |
3 | 5 | * [ccsplit](https://github.com/ccsplit) |
6 | * [choket](https://github.com/choket) | |
4 | 7 | * [codingo](https://github.com/codingo) |
5 | 8 | * [c_sto](https://github.com/c-sto) |
6 | 9 | * [Damian89](https://github.com/Damian89) |
7 | 10 | * [Daviey](https://github.com/Daviey) |
8 | 11 | * [delic](https://github.com/delic) |
9 | 12 | * [eur0pa](https://github.com/eur0pa) |
13 | * [fabiobauer](https://github.com/fabiobauer) | |
10 | 14 | * [fang0654](https://github.com/fang0654) |
11 | 15 | * [helpermika](https://github.com/helpermika) |
12 | 16 | * [Ice3man543](https://github.com/Ice3man543) |
13 | 17 | * [JamTookTheBait](https://github.com/JamTookTheBait) |
18 | * [jimen0](https://github.com/jimen0) | |
14 | 19 | * [joohoi](https://github.com/joohoi) |
20 | * [jsgv](https://github.com/jsgv) | |
15 | 21 | * [jvesiluoma](https://github.com/jvesiluoma) |
22 | * [Kiblyn11](https://github.com/Kiblyn11) | |
16 | 23 | * [lc](https://github.com/lc) |
17 | 24 | * [nnwakelam](https://twitter.com/nnwakelam) |
18 | 25 | * [oh6hay](https://github.com/oh6hay) |
0 | 0 | MIT License |
1 | 1 | |
2 | Copyright (c) 2020 Joona Hoikkala | |
2 | Copyright (c) 2021 Joona Hoikkala | |
3 | 3 | |
4 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy |
5 | 5 | of this software and associated documentation files (the "Software"), to deal |
13 | 13 | ## Installation |
14 | 14 | |
15 | 15 | - [Download](https://github.com/ffuf/ffuf/releases/latest) a prebuilt binary from [releases page](https://github.com/ffuf/ffuf/releases/latest), unpack and run! |
16 | or | |
17 | - If you have go compiler installed: `go get github.com/ffuf/ffuf` | |
18 | ||
19 | The only dependency of ffuf is Go 1.11. No dependencies outside of Go standard library are needed. | |
16 | ||
17 | _or_ | |
18 | - If you have recent go compiler installed: `go get -u github.com/ffuf/ffuf` (the same command works for updating) | |
19 | ||
20 | _or_ | |
21 | - git clone https://github.com/ffuf/ffuf ; cd ffuf ; go get ; go build | |
22 | ||
23 | Ffuf depends on Go 1.13 or greater. | |
20 | 24 | |
21 | 25 | ## Example usage |
22 | 26 | |
27 | The usage examples below show just the simplest tasks you can accomplish using `ffuf`. | |
28 | ||
29 | For more extensive documentation, with real life usage examples and tips, be sure to check out the awesome guide: | |
30 | "[Everything you need to know about FFUF](https://codingo.io/tools/ffuf/bounty/2020/09/17/everything-you-need-to-know-about-ffuf.html)" by | |
31 | Michael Skelton ([@codingo](https://github.com/codingo)). | |
32 | ||
33 | ||
23 | 34 | ### Typical directory discovery |
24 | 35 | |
25 | 36 | [![asciicast](https://asciinema.org/a/211350.png)](https://asciinema.org/a/211350) |
85 | 96 | When `--input-cmd` is used, ffuf will display matches as their position. This same position value will be available for the callee as an environment variable `$FFUF_NUM`. We'll use this position value as the seed for the mutator. Files example1.txt and example2.txt contain valid JSON payloads. We are matching all the responses, but filtering out response code `400 - Bad request`: |
86 | 97 | |
87 | 98 | ``` |
88 | ffuf --input-cmd 'radamsa --seed $FFUF_NUM example1.txt example2.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400 | |
99 | ffuf --input-cmd 'radamsa --seed $FFUF_NUM example1.txt example2.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/FUZZ -mc all -fc 400 | |
89 | 100 | ``` |
90 | 101 | |
91 | 102 | It of course isn't very efficient to call the mutator for each payload, so we can also pre-generate the payloads, still using [Radamsa](https://gitlab.com/akihe/radamsa) as an example: |
100 | 111 | ffuf --input-cmd 'cat $FFUF_NUM.txt' -H "Content-Type: application/json" -X POST -u https://ffuf.io.fi/ -mc all -fc 400 |
101 | 112 | ``` |
102 | 113 | |
114 | ### Configuration files | |
115 | ||
116 | When running ffuf, it first checks if a default configuration file exists. The file path for it is `~/.ffufrc` / `$HOME/.ffufrc` | |
117 | for most *nixes (for example `/home/joohoi/.ffufrc`) and `%USERPROFILE%\.ffufrc` for Windows. You can configure one or | |
118 | multiple options in this file, and they will be applied on every subsequent ffuf job. An example of .ffufrc file can be | |
119 | found [here](https://github.com/ffuf/ffuf/blob/master/ffufrc.example). | |
120 | ||
121 | The configuration options provided on the command line override the ones loaded from `~/.ffufrc`. | |
122 | Note: this does not apply for CLI flags that can be provided more than once. One of such examples is `-H` (header) flag. | |
123 | In this case, the `-H` values provided on the command line will be _appended_ to the ones from the config file instead. | |
124 | ||
125 | Additionally, in case you wish to use bunch of configuration files for different use cases, you can do this by defining | |
126 | the configuration file path using `-config` command line flag that takes the file path to the configuration file as its | |
127 | parameter. | |
128 | ||
103 | 129 | ## Usage |
104 | 130 | |
105 | 131 | To define the test case for ffuf, use the keyword `FUZZ` anywhere in the URL (`-u`), headers (`-H`), or POST data (`-d`). |
106 | 132 | |
107 | 133 | ``` |
108 | Fuzz Faster U Fool - v1.0 | |
134 | Fuzz Faster U Fool - v1.2.0-git | |
109 | 135 | |
110 | 136 | HTTP OPTIONS: |
111 | 137 | -H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted. |
112 | 138 | -X HTTP method to use (default: GET) |
113 | 139 | -b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality. |
114 | 140 | -d POST data |
141 | -ignore-body Do not fetch the response content. (default: false) | |
115 | 142 | -r Follow redirects (default: false) |
116 | 143 | -recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false) |
117 | 144 | -recursion-depth Maximum recursion depth. (default: 0) |
125 | 152 | -ac Automatically calibrate filtering options (default: false) |
126 | 153 | -acc Custom auto-calibration string. Can be used multiple times. Implies -ac |
127 | 154 | -c Colorize output. (default: false) |
128 | -maxtime Maximum running time in seconds for the entire process. (default: 0) | |
155 | -config Load configuration from a file | |
156 | -maxtime Maximum running time in seconds for entire process. (default: 0) | |
129 | 157 | -maxtime-job Maximum running time in seconds per job. (default: 0) |
130 | 158 | -p Seconds of `delay` between requests, or a range of random delay. For example "0.1" or "0.1-2.0" |
159 | -rate Rate of requests per second (default: 0) | |
131 | 160 | -s Do not print additional information (silent mode) (default: false) |
132 | 161 | -sa Stop on all error cases. Implies -sf and -se. (default: false) |
133 | 162 | -se Stop on spurious errors (default: false) |
136 | 165 | -v Verbose output, printing full URL and redirect location (if any) with the results. (default: false) |
137 | 166 | |
138 | 167 | MATCHER OPTIONS: |
139 | -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403) | |
168 | -mc Match HTTP status codes, or "all" for everything. (default: 200,204,301,302,307,401,403,405) | |
140 | 169 | -ml Match amount of lines in response |
141 | 170 | -mr Match regexp |
142 | 171 | -ms Match HTTP response size |
164 | 193 | -debug-log Write all of the internal logging to the specified file. |
165 | 194 | -o Write output to file |
166 | 195 | -od Directory path to store matched results to. |
167 | -of Output file format. Available formats: json, ejson, html, md, csv, ecsv (default: json) | |
196 | -of Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats) (default: json) | |
197 | -or Don't create the output file if we don't have results | |
168 | 198 | |
169 | 199 | EXAMPLE USAGE: |
170 | 200 | Fuzz file paths from wordlist.txt, match all responses but filter out those with content-size 42. |
182 | 212 | ffuf -w params.txt:PARAM -w values.txt:VAL -u https://example.org/?PARAM=VAL -mr "VAL" -c |
183 | 213 | |
184 | 214 | More information and examples: https://github.com/ffuf/ffuf |
215 | ||
185 | 216 | ``` |
186 | 217 | |
187 | 218 | ## Helper scripts and advanced payloads |
0 | # This is an example of a ffuf configuration file. | |
1 | # https://github.com/ffuf/ffuf | |
2 | ||
3 | [http] | |
4 | cookies = [ | |
5 | "cookiename=cookievalue" | |
6 | ] | |
7 | data = "post=data&key=value" | |
8 | followredirects = false | |
9 | headers = [ | |
10 | "X-Header-Name: value", | |
11 | "X-Another-Header: value" | |
12 | ] | |
13 | ignorebody = false | |
14 | method = "GET" | |
15 | proxyurl = "http://127.0.0.1:8080" | |
16 | recursion = false | |
17 | recursiondepth = 0 | |
18 | replayproxyurl = "http://127.0.0.1:8080" | |
19 | timeout = 10 | |
20 | url = "https://example.org/FUZZ" | |
21 | ||
22 | [general] | |
23 | autocalibration = false | |
24 | autocalibrationstrings = [ | |
25 | "randomtest", | |
26 | "admin" | |
27 | ] | |
28 | colors = false | |
29 | delay = "" | |
30 | maxtime = 0 | |
31 | maxtimejob = 0 | |
32 | quiet = false | |
33 | rate = 0 | |
34 | stopon403 = false | |
35 | stoponall = false | |
36 | stoponerrors = false | |
37 | threads = 40 | |
38 | verbose = false | |
39 | ||
40 | [input] | |
41 | dirsearchcompat = false | |
42 | extensions = "" | |
43 | ignorewordlistcomments = false | |
44 | inputmode = "clusterbomb" | |
45 | inputnum = 100 | |
46 | inputcommands = [ | |
47 | "seq 1 100:CUSTOMKEYWORD" | |
48 | ] | |
49 | request = "requestfile.txt" | |
50 | requestproto = "https" | |
51 | wordlists = [ | |
52 | "/path/to/wordlist:FUZZ", | |
53 | "/path/to/hostlist:HOST" | |
54 | ] | |
55 | ||
56 | ||
57 | [output] | |
58 | debuglog = "debug.log" | |
59 | outputdirectory = "/tmp/rawoutputdir" | |
60 | outputfile = "output.json" | |
61 | outputformat = "json" | |
62 | outputcreateemptyfile = false | |
63 | ||
64 | [filter] | |
65 | lines = "" | |
66 | regexp = "" | |
67 | size = "" | |
68 | status = "" | |
69 | words = "" | |
70 | ||
71 | [matcher] | |
72 | lines = "" | |
73 | regexp = "" | |
74 | size = "" | |
75 | status = "200,204,301,302,307,401,403,405" | |
76 | words = "" |
0 | 0 | module github.com/ffuf/ffuf |
1 | 1 | |
2 | go 1.11 | |
2 | go 1.13 | |
3 | ||
4 | require github.com/pelletier/go-toml v1.8.1 |
0 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= | |
1 | github.com/pelletier/go-toml v1.8.1 h1:1Nf83orprkJyknT6h7zbuEGUEjcyVlCxSUGTENmNCRM= | |
2 | github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= |
60 | 60 | Description: "", |
61 | 61 | Flags: make([]UsageFlag, 0), |
62 | 62 | Hidden: false, |
63 | ExpectedFlags: []string{"ac", "acc", "c", "maxtime", "maxtime-job", "p", "s", "sa", "se", "sf", "t", "v", "V"}, | |
63 | ExpectedFlags: []string{"ac", "acc", "c", "config", "maxtime", "maxtime-job", "p", "rate", "s", "sa", "se", "sf", "t", "v", "V"}, | |
64 | 64 | } |
65 | 65 | u_compat := UsageSection{ |
66 | 66 | Name: "COMPATIBILITY OPTIONS", |
88 | 88 | Description: "Options for input data for fuzzing. Wordlists and input generators.", |
89 | 89 | Flags: make([]UsageFlag, 0), |
90 | 90 | Hidden: false, |
91 | ExpectedFlags: []string{"D", "ic", "input-cmd", "input-num", "mode", "request", "request-proto", "e", "w"}, | |
91 | ExpectedFlags: []string{"D", "ic", "input-cmd", "input-num", "input-shell", "mode", "request", "request-proto", "e", "w"}, | |
92 | 92 | } |
93 | 93 | u_output := UsageSection{ |
94 | 94 | Name: "OUTPUT OPTIONS", |
95 | 95 | Description: "Options for output. Output file formats, file names and debug file locations.", |
96 | 96 | Flags: make([]UsageFlag, 0), |
97 | 97 | Hidden: false, |
98 | ExpectedFlags: []string{"debug-log", "o", "of", "od"}, | |
98 | ExpectedFlags: []string{"debug-log", "o", "of", "od", "or"}, | |
99 | 99 | } |
100 | 100 | sections := []UsageSection{u_http, u_general, u_compat, u_matcher, u_filter, u_input, u_output} |
101 | 101 |
0 | 0 | package main |
1 | 1 | |
2 | 2 | import ( |
3 | "bufio" | |
4 | 3 | "context" |
5 | 4 | "flag" |
6 | 5 | "fmt" |
7 | 6 | "io/ioutil" |
8 | 7 | "log" |
9 | "net/textproto" | |
10 | "net/url" | |
11 | 8 | "os" |
12 | "runtime" | |
13 | "strconv" | |
14 | 9 | "strings" |
15 | 10 | |
16 | 11 | "github.com/ffuf/ffuf/pkg/ffuf" |
20 | 15 | "github.com/ffuf/ffuf/pkg/runner" |
21 | 16 | ) |
22 | 17 | |
23 | type cliOptions struct { | |
24 | extensions string | |
25 | delay string | |
26 | filterStatus string | |
27 | filterSize string | |
28 | filterRegexp string | |
29 | filterWords string | |
30 | filterLines string | |
31 | matcherStatus string | |
32 | matcherSize string | |
33 | matcherRegexp string | |
34 | matcherWords string | |
35 | matcherLines string | |
36 | proxyURL string | |
37 | replayProxyURL string | |
38 | request string | |
39 | requestProto string | |
40 | URL string | |
41 | outputFormat string | |
42 | ignoreBody bool | |
43 | wordlists multiStringFlag | |
44 | inputcommands multiStringFlag | |
45 | headers multiStringFlag | |
46 | cookies multiStringFlag | |
47 | AutoCalibrationStrings multiStringFlag | |
48 | showVersion bool | |
49 | debugLog string | |
50 | } | |
51 | ||
52 | 18 | type multiStringFlag []string |
19 | type wordlistFlag []string | |
53 | 20 | |
54 | 21 | func (m *multiStringFlag) String() string { |
22 | return "" | |
23 | } | |
24 | ||
25 | func (m *wordlistFlag) String() string { | |
55 | 26 | return "" |
56 | 27 | } |
57 | 28 | |
60 | 31 | return nil |
61 | 32 | } |
62 | 33 | |
63 | func main() { | |
64 | ctx, cancel := context.WithCancel(context.Background()) | |
65 | defer cancel() | |
66 | conf := ffuf.NewConfig(ctx) | |
67 | opts := cliOptions{} | |
34 | func (m *wordlistFlag) Set(value string) error { | |
35 | delimited := strings.Split(value, ",") | |
36 | ||
37 | if len(delimited) > 1 { | |
38 | *m = append(*m, delimited...) | |
39 | } else { | |
40 | *m = append(*m, value) | |
41 | } | |
42 | ||
43 | return nil | |
44 | } | |
45 | ||
46 | //ParseFlags parses the command line flags and (re)populates the ConfigOptions struct | |
47 | func ParseFlags(opts *ffuf.ConfigOptions) *ffuf.ConfigOptions { | |
68 | 48 | var ignored bool |
69 | flag.BoolVar(&conf.IgnoreWordlistComments, "ic", false, "Ignore wordlist comments") | |
70 | flag.StringVar(&opts.extensions, "e", "", "Comma separated list of extensions. Extends FUZZ keyword.") | |
71 | flag.BoolVar(&conf.DirSearchCompat, "D", false, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.") | |
72 | flag.Var(&opts.headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.") | |
73 | flag.StringVar(&opts.URL, "u", "", "Target URL") | |
74 | flag.Var(&opts.wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'") | |
49 | var cookies, autocalibrationstrings, headers, inputcommands multiStringFlag | |
50 | var wordlists wordlistFlag | |
51 | ||
52 | cookies = opts.HTTP.Cookies | |
53 | autocalibrationstrings = opts.General.AutoCalibrationStrings | |
54 | headers = opts.HTTP.Headers | |
55 | inputcommands = opts.Input.Inputcommands | |
56 | ||
57 | flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)") | |
58 | flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)") | |
75 | 59 | flag.BoolVar(&ignored, "k", false, "Dummy flag for backwards compatibility") |
76 | flag.StringVar(&opts.delay, "p", "", "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"") | |
77 | flag.StringVar(&opts.filterStatus, "fc", "", "Filter HTTP status codes from response. Comma separated list of codes and ranges") | |
78 | flag.StringVar(&opts.filterSize, "fs", "", "Filter HTTP response size. Comma separated list of sizes and ranges") | |
79 | flag.StringVar(&opts.filterRegexp, "fr", "", "Filter regexp") | |
80 | flag.StringVar(&opts.filterWords, "fw", "", "Filter by amount of words in response. Comma separated list of word counts and ranges") | |
81 | flag.StringVar(&opts.filterLines, "fl", "", "Filter by amount of lines in response. Comma separated list of line counts and ranges") | |
82 | flag.StringVar(&conf.Data, "d", "", "POST data") | |
83 | flag.StringVar(&conf.Data, "data", "", "POST data (alias of -d)") | |
84 | flag.StringVar(&conf.Data, "data-ascii", "", "POST data (alias of -d)") | |
85 | flag.StringVar(&conf.Data, "data-binary", "", "POST data (alias of -d)") | |
86 | flag.BoolVar(&conf.Colors, "c", false, "Colorize output.") | |
87 | flag.BoolVar(&ignored, "compressed", true, "Dummy flag for copy as curl functionality (ignored)") | |
88 | flag.Var(&opts.inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.") | |
89 | flag.IntVar(&conf.InputNum, "input-num", 100, "Number of inputs to test. Used in conjunction with --input-cmd.") | |
90 | flag.StringVar(&conf.InputMode, "mode", "clusterbomb", "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork") | |
91 | flag.BoolVar(&ignored, "i", true, "Dummy flag for copy as curl functionality (ignored)") | |
92 | flag.Var(&opts.cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.") | |
93 | flag.Var(&opts.cookies, "cookie", "Cookie data (alias of -b)") | |
94 | flag.StringVar(&opts.matcherStatus, "mc", "200,204,301,302,307,401,403", "Match HTTP status codes, or \"all\" for everything.") | |
95 | flag.StringVar(&opts.matcherSize, "ms", "", "Match HTTP response size") | |
96 | flag.StringVar(&opts.matcherRegexp, "mr", "", "Match regexp") | |
97 | flag.StringVar(&opts.matcherWords, "mw", "", "Match amount of words in response") | |
98 | flag.StringVar(&opts.matcherLines, "ml", "", "Match amount of lines in response") | |
99 | flag.StringVar(&opts.proxyURL, "x", "", "HTTP Proxy URL") | |
100 | flag.StringVar(&opts.request, "request", "", "File containing the raw http request") | |
101 | flag.StringVar(&opts.requestProto, "request-proto", "https", "Protocol to use along with raw request") | |
102 | flag.StringVar(&conf.Method, "X", "GET", "HTTP method to use") | |
103 | flag.StringVar(&conf.OutputFile, "o", "", "Write output to file") | |
104 | flag.StringVar(&opts.outputFormat, "of", "json", "Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats)") | |
105 | flag.StringVar(&conf.OutputDirectory, "od", "", "Directory path to store matched results to.") | |
106 | flag.BoolVar(&conf.IgnoreBody, "ignore-body", false, "Do not fetch the response content.") | |
107 | flag.BoolVar(&conf.Quiet, "s", false, "Do not print additional information (silent mode)") | |
108 | flag.BoolVar(&conf.StopOn403, "sf", false, "Stop when > 95% of responses return 403 Forbidden") | |
109 | flag.BoolVar(&conf.StopOnErrors, "se", false, "Stop on spurious errors") | |
110 | flag.BoolVar(&conf.StopOnAll, "sa", false, "Stop on all error cases. Implies -sf and -se.") | |
111 | flag.BoolVar(&conf.FollowRedirects, "r", false, "Follow redirects") | |
112 | flag.BoolVar(&conf.Recursion, "recursion", false, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.") | |
113 | flag.IntVar(&conf.RecursionDepth, "recursion-depth", 0, "Maximum recursion depth.") | |
114 | flag.StringVar(&opts.replayProxyURL, "replay-proxy", "", "Replay matched requests using this proxy.") | |
115 | flag.BoolVar(&conf.AutoCalibration, "ac", false, "Automatically calibrate filtering options") | |
116 | flag.Var(&opts.AutoCalibrationStrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac") | |
117 | flag.IntVar(&conf.Threads, "t", 40, "Number of concurrent threads.") | |
118 | flag.IntVar(&conf.Timeout, "timeout", 10, "HTTP request timeout in seconds.") | |
119 | flag.IntVar(&conf.MaxTime, "maxtime", 0, "Maximum running time in seconds for entire process.") | |
120 | flag.IntVar(&conf.MaxTimeJob, "maxtime-job", 0, "Maximum running time in seconds per job.") | |
121 | flag.BoolVar(&conf.Verbose, "v", false, "Verbose output, printing full URL and redirect location (if any) with the results.") | |
122 | flag.BoolVar(&opts.showVersion, "V", false, "Show version information.") | |
123 | flag.StringVar(&opts.debugLog, "debug-log", "", "Write all of the internal logging to the specified file.") | |
60 | flag.BoolVar(&opts.Output.OutputCreateEmptyFile, "or", opts.Output.OutputCreateEmptyFile, "Don't create the output file if we don't have results") | |
61 | flag.BoolVar(&opts.General.AutoCalibration, "ac", opts.General.AutoCalibration, "Automatically calibrate filtering options") | |
62 | flag.BoolVar(&opts.General.Colors, "c", opts.General.Colors, "Colorize output.") | |
63 | flag.BoolVar(&opts.General.Quiet, "s", opts.General.Quiet, "Do not print additional information (silent mode)") | |
64 | flag.BoolVar(&opts.General.ShowVersion, "V", opts.General.ShowVersion, "Show version information.") | |
65 | flag.BoolVar(&opts.General.StopOn403, "sf", opts.General.StopOn403, "Stop when > 95% of responses return 403 Forbidden") | |
66 | flag.BoolVar(&opts.General.StopOnAll, "sa", opts.General.StopOnAll, "Stop on all error cases. Implies -sf and -se.") | |
67 | flag.BoolVar(&opts.General.StopOnErrors, "se", opts.General.StopOnErrors, "Stop on spurious errors") | |
68 | flag.BoolVar(&opts.General.Verbose, "v", opts.General.Verbose, "Verbose output, printing full URL and redirect location (if any) with the results.") | |
69 | flag.BoolVar(&opts.HTTP.FollowRedirects, "r", opts.HTTP.FollowRedirects, "Follow redirects") | |
70 | flag.BoolVar(&opts.HTTP.IgnoreBody, "ignore-body", opts.HTTP.IgnoreBody, "Do not fetch the response content.") | |
71 | flag.BoolVar(&opts.HTTP.Recursion, "recursion", opts.HTTP.Recursion, "Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it.") | |
72 | flag.BoolVar(&opts.Input.DirSearchCompat, "D", opts.Input.DirSearchCompat, "DirSearch wordlist compatibility mode. Used in conjunction with -e flag.") | |
73 | flag.BoolVar(&opts.Input.IgnoreWordlistComments, "ic", opts.Input.IgnoreWordlistComments, "Ignore wordlist comments") | |
74 | flag.IntVar(&opts.General.MaxTime, "maxtime", opts.General.MaxTime, "Maximum running time in seconds for entire process.") | |
75 | flag.IntVar(&opts.General.MaxTimeJob, "maxtime-job", opts.General.MaxTimeJob, "Maximum running time in seconds per job.") | |
76 | flag.IntVar(&opts.General.Rate, "rate", opts.General.Rate, "Rate of requests per second") | |
77 | flag.IntVar(&opts.General.Threads, "t", opts.General.Threads, "Number of concurrent threads.") | |
78 | flag.IntVar(&opts.HTTP.RecursionDepth, "recursion-depth", opts.HTTP.RecursionDepth, "Maximum recursion depth.") | |
79 | flag.IntVar(&opts.HTTP.Timeout, "timeout", opts.HTTP.Timeout, "HTTP request timeout in seconds.") | |
80 | flag.IntVar(&opts.Input.InputNum, "input-num", opts.Input.InputNum, "Number of inputs to test. Used in conjunction with --input-cmd.") | |
81 | flag.StringVar(&opts.General.ConfigFile, "config", "", "Load configuration from a file") | |
82 | flag.StringVar(&opts.Filter.Lines, "fl", opts.Filter.Lines, "Filter by amount of lines in response. Comma separated list of line counts and ranges") | |
83 | flag.StringVar(&opts.Filter.Regexp, "fr", opts.Filter.Regexp, "Filter regexp") | |
84 | flag.StringVar(&opts.Filter.Size, "fs", opts.Filter.Size, "Filter HTTP response size. Comma separated list of sizes and ranges") | |
85 | flag.StringVar(&opts.Filter.Status, "fc", opts.Filter.Status, "Filter HTTP status codes from response. Comma separated list of codes and ranges") | |
86 | flag.StringVar(&opts.Filter.Words, "fw", opts.Filter.Words, "Filter by amount of words in response. Comma separated list of word counts and ranges") | |
87 | flag.StringVar(&opts.General.Delay, "p", opts.General.Delay, "Seconds of `delay` between requests, or a range of random delay. For example \"0.1\" or \"0.1-2.0\"") | |
88 | flag.StringVar(&opts.HTTP.Data, "d", opts.HTTP.Data, "POST data") | |
89 | flag.StringVar(&opts.HTTP.Data, "data", opts.HTTP.Data, "POST data (alias of -d)") | |
90 | flag.StringVar(&opts.HTTP.Data, "data-ascii", opts.HTTP.Data, "POST data (alias of -d)") | |
91 | flag.StringVar(&opts.HTTP.Data, "data-binary", opts.HTTP.Data, "POST data (alias of -d)") | |
92 | flag.StringVar(&opts.HTTP.Method, "X", opts.HTTP.Method, "HTTP method to use") | |
93 | flag.StringVar(&opts.HTTP.ProxyURL, "x", opts.HTTP.ProxyURL, "HTTP Proxy URL") | |
94 | flag.StringVar(&opts.HTTP.ReplayProxyURL, "replay-proxy", opts.HTTP.ReplayProxyURL, "Replay matched requests using this proxy.") | |
95 | flag.StringVar(&opts.HTTP.URL, "u", opts.HTTP.URL, "Target URL") | |
96 | flag.StringVar(&opts.Input.Extensions, "e", opts.Input.Extensions, "Comma separated list of extensions. Extends FUZZ keyword.") | |
97 | flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork") | |
98 | flag.StringVar(&opts.Input.InputShell, "input-shell", opts.Input.InputShell, "Shell to be used for running command") | |
99 | flag.StringVar(&opts.Input.Request, "request", opts.Input.Request, "File containing the raw http request") | |
100 | flag.StringVar(&opts.Input.RequestProto, "request-proto", opts.Input.RequestProto, "Protocol to use along with raw request") | |
101 | flag.StringVar(&opts.Matcher.Lines, "ml", opts.Matcher.Lines, "Match amount of lines in response") | |
102 | flag.StringVar(&opts.Matcher.Regexp, "mr", opts.Matcher.Regexp, "Match regexp") | |
103 | flag.StringVar(&opts.Matcher.Size, "ms", opts.Matcher.Size, "Match HTTP response size") | |
104 | flag.StringVar(&opts.Matcher.Status, "mc", opts.Matcher.Status, "Match HTTP status codes, or \"all\" for everything.") | |
105 | flag.StringVar(&opts.Matcher.Words, "mw", opts.Matcher.Words, "Match amount of words in response") | |
106 | flag.StringVar(&opts.Output.DebugLog, "debug-log", opts.Output.DebugLog, "Write all of the internal logging to the specified file.") | |
107 | flag.StringVar(&opts.Output.OutputDirectory, "od", opts.Output.OutputDirectory, "Directory path to store matched results to.") | |
108 | flag.StringVar(&opts.Output.OutputFile, "o", opts.Output.OutputFile, "Write output to file") | |
109 | flag.StringVar(&opts.Output.OutputFormat, "of", opts.Output.OutputFormat, "Output file format. Available formats: json, ejson, html, md, csv, ecsv (or, 'all' for all formats)") | |
110 | flag.Var(&autocalibrationstrings, "acc", "Custom auto-calibration string. Can be used multiple times. Implies -ac") | |
111 | flag.Var(&cookies, "b", "Cookie data `\"NAME1=VALUE1; NAME2=VALUE2\"` for copy as curl functionality.") | |
112 | flag.Var(&cookies, "cookie", "Cookie data (alias of -b)") | |
113 | flag.Var(&headers, "H", "Header `\"Name: Value\"`, separated by colon. Multiple -H flags are accepted.") | |
114 | flag.Var(&inputcommands, "input-cmd", "Command producing the input. --input-num is required when using this input method. Overrides -w.") | |
115 | flag.Var(&wordlists, "w", "Wordlist file path and (optional) keyword separated by colon. eg. '/path/to/wordlist:KEYWORD'") | |
124 | 116 | flag.Usage = Usage |
125 | 117 | flag.Parse() |
126 | if opts.showVersion { | |
118 | ||
119 | opts.General.AutoCalibrationStrings = autocalibrationstrings | |
120 | opts.HTTP.Cookies = cookies | |
121 | opts.HTTP.Headers = headers | |
122 | opts.Input.Inputcommands = inputcommands | |
123 | opts.Input.Wordlists = wordlists | |
124 | return opts | |
125 | } | |
126 | ||
127 | func main() { | |
128 | ||
129 | var err, optserr error | |
130 | ||
131 | // prepare the default config options from default config file | |
132 | var opts *ffuf.ConfigOptions | |
133 | opts, optserr = ffuf.ReadDefaultConfig() | |
134 | ||
135 | opts = ParseFlags(opts) | |
136 | ||
137 | if opts.General.ShowVersion { | |
127 | 138 | fmt.Printf("ffuf version: %s\n", ffuf.VERSION) |
128 | 139 | os.Exit(0) |
129 | 140 | } |
130 | if len(opts.debugLog) != 0 { | |
131 | f, err := os.OpenFile(opts.debugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) | |
141 | if len(opts.Output.DebugLog) != 0 { | |
142 | f, err := os.OpenFile(opts.Output.DebugLog, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) | |
132 | 143 | if err != nil { |
133 | 144 | fmt.Fprintf(os.Stderr, "Disabling logging, encountered error(s): %s\n", err) |
134 | 145 | log.SetOutput(ioutil.Discard) |
139 | 150 | } else { |
140 | 151 | log.SetOutput(ioutil.Discard) |
141 | 152 | } |
142 | if err := prepareConfig(&opts, &conf); err != nil { | |
153 | if optserr != nil { | |
154 | log.Printf("Error while opening default config file: %s", optserr) | |
155 | } | |
156 | ||
157 | if opts.General.ConfigFile != "" { | |
158 | opts, err = ffuf.ReadConfig(opts.General.ConfigFile) | |
159 | if err != nil { | |
160 | fmt.Fprintf(os.Stderr, "Encoutered error(s): %s\n", err) | |
161 | Usage() | |
162 | fmt.Fprintf(os.Stderr, "Encoutered error(s): %s\n", err) | |
163 | os.Exit(1) | |
164 | } | |
165 | // Reset the flag package state | |
166 | flag.CommandLine = flag.NewFlagSet(os.Args[0], flag.ExitOnError) | |
167 | // Re-parse the cli options | |
168 | opts = ParseFlags(opts) | |
169 | } | |
170 | ||
171 | // Prepare context and set up Config struct | |
172 | ctx, cancel := context.WithCancel(context.Background()) | |
173 | defer cancel() | |
174 | conf, err := ffuf.ConfigFromOptions(opts, ctx, cancel) | |
175 | if err != nil { | |
143 | 176 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) |
144 | 177 | Usage() |
145 | os.Exit(1) | |
146 | } | |
147 | job, err := prepareJob(&conf) | |
178 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) | |
179 | os.Exit(1) | |
180 | } | |
181 | job, err := prepareJob(conf) | |
148 | 182 | if err != nil { |
149 | 183 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) |
150 | 184 | Usage() |
151 | os.Exit(1) | |
152 | } | |
153 | if err := prepareFilters(&opts, &conf); err != nil { | |
185 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) | |
186 | os.Exit(1) | |
187 | } | |
188 | if err := filter.SetupFilters(opts, conf); err != nil { | |
154 | 189 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) |
155 | 190 | Usage() |
191 | fmt.Fprintf(os.Stderr, "Encountered error(s): %s\n", err) | |
156 | 192 | os.Exit(1) |
157 | 193 | } |
158 | 194 | |
166 | 202 | } |
167 | 203 | |
168 | 204 | func prepareJob(conf *ffuf.Config) (*ffuf.Job, error) { |
169 | job := &ffuf.Job{ | |
170 | Config: conf, | |
171 | } | |
172 | errs := ffuf.NewMultierror() | |
173 | var err error | |
174 | inputprovider, err := input.NewInputProvider(conf) | |
175 | if err != nil { | |
176 | errs.Add(err) | |
177 | } | |
205 | job := ffuf.NewJob(conf) | |
206 | var errs ffuf.Multierror | |
207 | job.Input, errs = input.NewInputProvider(conf) | |
178 | 208 | // TODO: implement error handling for runnerprovider and outputprovider |
179 | 209 | // We only have http runner right now |
180 | 210 | job.Runner = runner.NewRunnerByName("http", conf, false) |
181 | 211 | if len(conf.ReplayProxyURL) > 0 { |
182 | 212 | job.ReplayRunner = runner.NewRunnerByName("http", conf, true) |
183 | 213 | } |
184 | // Initialize the correct inputprovider | |
185 | for _, v := range conf.InputProviders { | |
186 | err = inputprovider.AddProvider(v) | |
187 | if err != nil { | |
188 | errs.Add(err) | |
189 | } | |
190 | } | |
191 | job.Input = inputprovider | |
192 | 214 | // We only have stdout outputprovider right now |
193 | 215 | job.Output = output.NewOutputProviderByName("stdout", conf) |
194 | 216 | return job, errs.ErrorOrNil() |
195 | 217 | } |
196 | ||
197 | func prepareFilters(parseOpts *cliOptions, conf *ffuf.Config) error { | |
198 | errs := ffuf.NewMultierror() | |
199 | // If any other matcher is set, ignore -mc default value | |
200 | matcherSet := false | |
201 | statusSet := false | |
202 | warningIgnoreBody := false | |
203 | flag.Visit(func(f *flag.Flag) { | |
204 | if f.Name == "mc" { | |
205 | statusSet = true | |
206 | } | |
207 | if f.Name == "ms" { | |
208 | matcherSet = true | |
209 | warningIgnoreBody = true | |
210 | } | |
211 | if f.Name == "ml" { | |
212 | matcherSet = true | |
213 | warningIgnoreBody = true | |
214 | } | |
215 | if f.Name == "mr" { | |
216 | matcherSet = true | |
217 | } | |
218 | if f.Name == "mw" { | |
219 | matcherSet = true | |
220 | warningIgnoreBody = true | |
221 | } | |
222 | }) | |
223 | if statusSet || !matcherSet { | |
224 | if err := filter.AddMatcher(conf, "status", parseOpts.matcherStatus); err != nil { | |
225 | errs.Add(err) | |
226 | } | |
227 | } | |
228 | ||
229 | if parseOpts.filterStatus != "" { | |
230 | if err := filter.AddFilter(conf, "status", parseOpts.filterStatus); err != nil { | |
231 | errs.Add(err) | |
232 | } | |
233 | } | |
234 | if parseOpts.filterSize != "" { | |
235 | warningIgnoreBody = true | |
236 | if err := filter.AddFilter(conf, "size", parseOpts.filterSize); err != nil { | |
237 | errs.Add(err) | |
238 | } | |
239 | } | |
240 | if parseOpts.filterRegexp != "" { | |
241 | if err := filter.AddFilter(conf, "regexp", parseOpts.filterRegexp); err != nil { | |
242 | errs.Add(err) | |
243 | } | |
244 | } | |
245 | if parseOpts.filterWords != "" { | |
246 | warningIgnoreBody = true | |
247 | if err := filter.AddFilter(conf, "word", parseOpts.filterWords); err != nil { | |
248 | errs.Add(err) | |
249 | } | |
250 | } | |
251 | if parseOpts.filterLines != "" { | |
252 | warningIgnoreBody = true | |
253 | if err := filter.AddFilter(conf, "line", parseOpts.filterLines); err != nil { | |
254 | errs.Add(err) | |
255 | } | |
256 | } | |
257 | if parseOpts.matcherSize != "" { | |
258 | if err := filter.AddMatcher(conf, "size", parseOpts.matcherSize); err != nil { | |
259 | errs.Add(err) | |
260 | } | |
261 | } | |
262 | if parseOpts.matcherRegexp != "" { | |
263 | if err := filter.AddMatcher(conf, "regexp", parseOpts.matcherRegexp); err != nil { | |
264 | errs.Add(err) | |
265 | } | |
266 | } | |
267 | if parseOpts.matcherWords != "" { | |
268 | if err := filter.AddMatcher(conf, "word", parseOpts.matcherWords); err != nil { | |
269 | errs.Add(err) | |
270 | } | |
271 | } | |
272 | if parseOpts.matcherLines != "" { | |
273 | if err := filter.AddMatcher(conf, "line", parseOpts.matcherLines); err != nil { | |
274 | errs.Add(err) | |
275 | } | |
276 | } | |
277 | if conf.IgnoreBody && warningIgnoreBody { | |
278 | fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n") | |
279 | } | |
280 | return errs.ErrorOrNil() | |
281 | } | |
282 | ||
283 | func prepareConfig(parseOpts *cliOptions, conf *ffuf.Config) error { | |
284 | //TODO: refactor in a proper flag library that can handle things like required flags | |
285 | errs := ffuf.NewMultierror() | |
286 | ||
287 | var err error | |
288 | var err2 error | |
289 | if len(parseOpts.URL) == 0 && parseOpts.request == "" { | |
290 | errs.Add(fmt.Errorf("-u flag or -request flag is required")) | |
291 | } | |
292 | ||
293 | // prepare extensions | |
294 | if parseOpts.extensions != "" { | |
295 | extensions := strings.Split(parseOpts.extensions, ",") | |
296 | conf.Extensions = extensions | |
297 | } | |
298 | ||
299 | // Convert cookies to a header | |
300 | if len(parseOpts.cookies) > 0 { | |
301 | parseOpts.headers.Set("Cookie: " + strings.Join(parseOpts.cookies, "; ")) | |
302 | } | |
303 | ||
304 | //Prepare inputproviders | |
305 | for _, v := range parseOpts.wordlists { | |
306 | var wl []string | |
307 | if runtime.GOOS == "windows" { | |
308 | // Try to ensure that Windows file paths like C:\path\to\wordlist.txt:KEYWORD are treated properly | |
309 | if ffuf.FileExists(v) { | |
310 | // The wordlist was supplied without a keyword parameter | |
311 | wl = []string{v} | |
312 | } else { | |
313 | filepart := v[:strings.LastIndex(v, ":")] | |
314 | if ffuf.FileExists(filepart) { | |
315 | wl = []string{filepart, v[strings.LastIndex(v, ":")+1:]} | |
316 | } else { | |
317 | // The file was not found. Use full wordlist parameter value for more concise error message down the line | |
318 | wl = []string{v} | |
319 | } | |
320 | } | |
321 | } else { | |
322 | wl = strings.SplitN(v, ":", 2) | |
323 | } | |
324 | if len(wl) == 2 { | |
325 | conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{ | |
326 | Name: "wordlist", | |
327 | Value: wl[0], | |
328 | Keyword: wl[1], | |
329 | }) | |
330 | } else { | |
331 | conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{ | |
332 | Name: "wordlist", | |
333 | Value: wl[0], | |
334 | Keyword: "FUZZ", | |
335 | }) | |
336 | } | |
337 | } | |
338 | for _, v := range parseOpts.inputcommands { | |
339 | ic := strings.SplitN(v, ":", 2) | |
340 | if len(ic) == 2 { | |
341 | conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{ | |
342 | Name: "command", | |
343 | Value: ic[0], | |
344 | Keyword: ic[1], | |
345 | }) | |
346 | conf.CommandKeywords = append(conf.CommandKeywords, ic[0]) | |
347 | } else { | |
348 | conf.InputProviders = append(conf.InputProviders, ffuf.InputProviderConfig{ | |
349 | Name: "command", | |
350 | Value: ic[0], | |
351 | Keyword: "FUZZ", | |
352 | }) | |
353 | conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ") | |
354 | } | |
355 | } | |
356 | ||
357 | if len(conf.InputProviders) == 0 { | |
358 | errs.Add(fmt.Errorf("Either -w or --input-cmd flag is required")) | |
359 | } | |
360 | ||
361 | // Prepare the request using body | |
362 | if parseOpts.request != "" { | |
363 | err := parseRawRequest(parseOpts, conf) | |
364 | if err != nil { | |
365 | errmsg := fmt.Sprintf("Could not parse raw request: %s", err) | |
366 | errs.Add(fmt.Errorf(errmsg)) | |
367 | } | |
368 | } | |
369 | ||
370 | //Prepare URL | |
371 | if parseOpts.URL != "" { | |
372 | conf.Url = parseOpts.URL | |
373 | } | |
374 | ||
375 | //Prepare headers and make canonical | |
376 | for _, v := range parseOpts.headers { | |
377 | hs := strings.SplitN(v, ":", 2) | |
378 | if len(hs) == 2 { | |
379 | // trim and make canonical | |
380 | // except if used in custom defined header | |
381 | var CanonicalNeeded bool = true | |
382 | for _, a := range conf.CommandKeywords { | |
383 | if a == hs[0] { | |
384 | CanonicalNeeded = false | |
385 | } | |
386 | } | |
387 | // check if part of InputProviders | |
388 | if CanonicalNeeded { | |
389 | for _, b := range conf.InputProviders { | |
390 | if b.Keyword == hs[0] { | |
391 | CanonicalNeeded = false | |
392 | } | |
393 | } | |
394 | } | |
395 | if CanonicalNeeded { | |
396 | var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.TrimSpace(hs[0])) | |
397 | conf.Headers[CanonicalHeader] = strings.TrimSpace(hs[1]) | |
398 | } else { | |
399 | conf.Headers[strings.TrimSpace(hs[0])] = strings.TrimSpace(hs[1]) | |
400 | } | |
401 | } else { | |
402 | errs.Add(fmt.Errorf("Header defined by -H needs to have a value. \":\" should be used as a separator")) | |
403 | } | |
404 | } | |
405 | ||
406 | //Prepare delay | |
407 | d := strings.Split(parseOpts.delay, "-") | |
408 | if len(d) > 2 { | |
409 | errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")) | |
410 | } else if len(d) == 2 { | |
411 | conf.Delay.IsRange = true | |
412 | conf.Delay.HasDelay = true | |
413 | conf.Delay.Min, err = strconv.ParseFloat(d[0], 64) | |
414 | conf.Delay.Max, err2 = strconv.ParseFloat(d[1], 64) | |
415 | if err != nil || err2 != nil { | |
416 | errs.Add(fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5")) | |
417 | } | |
418 | } else if len(parseOpts.delay) > 0 { | |
419 | conf.Delay.IsRange = false | |
420 | conf.Delay.HasDelay = true | |
421 | conf.Delay.Min, err = strconv.ParseFloat(parseOpts.delay, 64) | |
422 | if err != nil { | |
423 | errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")) | |
424 | } | |
425 | } | |
426 | ||
427 | // Verify proxy url format | |
428 | if len(parseOpts.proxyURL) > 0 { | |
429 | _, err := url.Parse(parseOpts.proxyURL) | |
430 | if err != nil { | |
431 | errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err)) | |
432 | } else { | |
433 | conf.ProxyURL = parseOpts.proxyURL | |
434 | } | |
435 | } | |
436 | ||
437 | // Verify replayproxy url format | |
438 | if len(parseOpts.replayProxyURL) > 0 { | |
439 | _, err := url.Parse(parseOpts.replayProxyURL) | |
440 | if err != nil { | |
441 | errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err)) | |
442 | } else { | |
443 | conf.ReplayProxyURL = parseOpts.replayProxyURL | |
444 | } | |
445 | } | |
446 | ||
447 | //Check the output file format option | |
448 | if conf.OutputFile != "" { | |
449 | //No need to check / error out if output file isn't defined | |
450 | outputFormats := []string{"all", "json", "ejson", "html", "md", "csv", "ecsv"} | |
451 | found := false | |
452 | for _, f := range outputFormats { | |
453 | if f == parseOpts.outputFormat { | |
454 | conf.OutputFormat = f | |
455 | found = true | |
456 | } | |
457 | } | |
458 | if !found { | |
459 | errs.Add(fmt.Errorf("Unknown output file format (-of): %s", parseOpts.outputFormat)) | |
460 | } | |
461 | } | |
462 | ||
463 | // Auto-calibration strings | |
464 | if len(parseOpts.AutoCalibrationStrings) > 0 { | |
465 | conf.AutoCalibrationStrings = parseOpts.AutoCalibrationStrings | |
466 | } | |
467 | // Using -acc implies -ac | |
468 | if len(conf.AutoCalibrationStrings) > 0 { | |
469 | conf.AutoCalibration = true | |
470 | } | |
471 | ||
472 | // Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP | |
473 | if len(conf.Data) > 0 && | |
474 | conf.Method == "GET" && | |
475 | //don't modify the method automatically if a request file is being used as input | |
476 | len(parseOpts.request) == 0 { | |
477 | ||
478 | conf.Method = "POST" | |
479 | } | |
480 | ||
481 | conf.CommandLine = strings.Join(os.Args, " ") | |
482 | ||
483 | for _, provider := range conf.InputProviders { | |
484 | if !keywordPresent(provider.Keyword, conf) { | |
485 | errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword) | |
486 | errs.Add(fmt.Errorf(errmsg)) | |
487 | } | |
488 | } | |
489 | ||
490 | // Do checks for recursion mode | |
491 | if conf.Recursion { | |
492 | if !strings.HasSuffix(conf.Url, "FUZZ") { | |
493 | errmsg := fmt.Sprintf("When using -recursion the URL (-u) must end with FUZZ keyword.") | |
494 | errs.Add(fmt.Errorf(errmsg)) | |
495 | } | |
496 | } | |
497 | ||
498 | return errs.ErrorOrNil() | |
499 | } | |
500 | ||
501 | func parseRawRequest(parseOpts *cliOptions, conf *ffuf.Config) error { | |
502 | file, err := os.Open(parseOpts.request) | |
503 | if err != nil { | |
504 | return fmt.Errorf("could not open request file: %s", err) | |
505 | } | |
506 | defer file.Close() | |
507 | ||
508 | r := bufio.NewReader(file) | |
509 | ||
510 | s, err := r.ReadString('\n') | |
511 | if err != nil { | |
512 | return fmt.Errorf("could not read request: %s", err) | |
513 | } | |
514 | parts := strings.Split(s, " ") | |
515 | if len(parts) < 3 { | |
516 | return fmt.Errorf("malformed request supplied") | |
517 | } | |
518 | // Set the request Method | |
519 | conf.Method = parts[0] | |
520 | ||
521 | for { | |
522 | line, err := r.ReadString('\n') | |
523 | line = strings.TrimSpace(line) | |
524 | ||
525 | if err != nil || line == "" { | |
526 | break | |
527 | } | |
528 | ||
529 | p := strings.SplitN(line, ":", 2) | |
530 | if len(p) != 2 { | |
531 | continue | |
532 | } | |
533 | ||
534 | if strings.EqualFold(p[0], "content-length") { | |
535 | continue | |
536 | } | |
537 | ||
538 | conf.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1]) | |
539 | } | |
540 | ||
541 | // Handle case with the full http url in path. In that case, | |
542 | // ignore any host header that we encounter and use the path as request URL | |
543 | if strings.HasPrefix(parts[1], "http") { | |
544 | parsed, err := url.Parse(parts[1]) | |
545 | if err != nil { | |
546 | return fmt.Errorf("could not parse request URL: %s", err) | |
547 | } | |
548 | conf.Url = parts[1] | |
549 | conf.Headers["Host"] = parsed.Host | |
550 | } else { | |
551 | // Build the request URL from the request | |
552 | conf.Url = parseOpts.requestProto + "://" + conf.Headers["Host"] + parts[1] | |
553 | } | |
554 | ||
555 | // Set the request body | |
556 | b, err := ioutil.ReadAll(r) | |
557 | if err != nil { | |
558 | return fmt.Errorf("could not read request body: %s", err) | |
559 | } | |
560 | conf.Data = string(b) | |
561 | ||
562 | return nil | |
563 | } | |
564 | ||
565 | func keywordPresent(keyword string, conf *ffuf.Config) bool { | |
566 | //Search for keyword from HTTP method, URL and POST data too | |
567 | if strings.Index(conf.Method, keyword) != -1 { | |
568 | return true | |
569 | } | |
570 | if strings.Index(conf.Url, keyword) != -1 { | |
571 | return true | |
572 | } | |
573 | if strings.Index(conf.Data, keyword) != -1 { | |
574 | return true | |
575 | } | |
576 | for k, v := range conf.Headers { | |
577 | if strings.Index(k, keyword) != -1 { | |
578 | return true | |
579 | } | |
580 | if strings.Index(v, keyword) != -1 { | |
581 | return true | |
582 | } | |
583 | } | |
584 | return false | |
585 | } |
4 | 4 | ) |
5 | 5 | |
6 | 6 | type Config struct { |
7 | AutoCalibration bool `json:"autocalibration"` | |
8 | AutoCalibrationStrings []string `json:"autocalibration_strings"` | |
9 | Cancel context.CancelFunc `json:"-"` | |
10 | Colors bool `json:"colors"` | |
11 | CommandKeywords []string `json:"-"` | |
12 | CommandLine string `json:"cmdline"` | |
13 | ConfigFile string `json:"configfile"` | |
14 | Context context.Context `json:"-"` | |
15 | Data string `json:"postdata"` | |
16 | Delay optRange `json:"delay"` | |
17 | DirSearchCompat bool `json:"dirsearch_compatibility"` | |
18 | Extensions []string `json:"extensions"` | |
19 | Filters map[string]FilterProvider `json:"filters"` | |
20 | FollowRedirects bool `json:"follow_redirects"` | |
7 | 21 | Headers map[string]string `json:"headers"` |
8 | Extensions []string `json:"extensions"` | |
9 | DirSearchCompat bool `json:"dirsearch_compatibility"` | |
22 | IgnoreBody bool `json:"ignorebody"` | |
23 | IgnoreWordlistComments bool `json:"ignore_wordlist_comments"` | |
24 | InputMode string `json:"inputmode"` | |
25 | InputNum int `json:"cmd_inputnum"` | |
26 | InputProviders []InputProviderConfig `json:"inputproviders"` | |
27 | InputShell string `json:"inputshell"` | |
28 | Matchers map[string]FilterProvider `json:"matchers"` | |
29 | MaxTime int `json:"maxtime"` | |
30 | MaxTimeJob int `json:"maxtime_job"` | |
10 | 31 | Method string `json:"method"` |
11 | Url string `json:"url"` | |
12 | Data string `json:"postdata"` | |
13 | Quiet bool `json:"quiet"` | |
14 | Colors bool `json:"colors"` | |
15 | InputProviders []InputProviderConfig `json:"inputproviders"` | |
16 | CommandKeywords []string `json:"-"` | |
17 | InputNum int `json:"cmd_inputnum"` | |
18 | InputMode string `json:"inputmode"` | |
19 | 32 | OutputDirectory string `json:"outputdirectory"` |
20 | 33 | OutputFile string `json:"outputfile"` |
21 | 34 | OutputFormat string `json:"outputformat"` |
22 | IgnoreBody bool `json:"ignorebody"` | |
23 | IgnoreWordlistComments bool `json:"ignore_wordlist_comments"` | |
24 | StopOn403 bool `json:"stop_403"` | |
25 | StopOnErrors bool `json:"stop_errors"` | |
26 | StopOnAll bool `json:"stop_all"` | |
27 | FollowRedirects bool `json:"follow_redirects"` | |
28 | AutoCalibration bool `json:"autocalibration"` | |
29 | AutoCalibrationStrings []string `json:"autocalibration_strings"` | |
30 | Timeout int `json:"timeout"` | |
35 | OutputCreateEmptyFile bool `json:"OutputCreateEmptyFile"` | |
31 | 36 | ProgressFrequency int `json:"-"` |
32 | Delay optRange `json:"delay"` | |
33 | Filters map[string]FilterProvider `json:"filters"` | |
34 | Matchers map[string]FilterProvider `json:"matchers"` | |
35 | Threads int `json:"threads"` | |
36 | Context context.Context `json:"-"` | |
37 | 37 | ProxyURL string `json:"proxyurl"` |
38 | ReplayProxyURL string `json:"replayproxyurl"` | |
39 | CommandLine string `json:"cmdline"` | |
40 | Verbose bool `json:"verbose"` | |
41 | MaxTime int `json:"maxtime"` | |
42 | MaxTimeJob int `json:"maxtime_job"` | |
38 | Quiet bool `json:"quiet"` | |
39 | Rate int64 `json:"rate"` | |
43 | 40 | Recursion bool `json:"recursion"` |
44 | 41 | RecursionDepth int `json:"recursion_depth"` |
42 | ReplayProxyURL string `json:"replayproxyurl"` | |
43 | StopOn403 bool `json:"stop_403"` | |
44 | StopOnAll bool `json:"stop_all"` | |
45 | StopOnErrors bool `json:"stop_errors"` | |
46 | Threads int `json:"threads"` | |
47 | Timeout int `json:"timeout"` | |
48 | Url string `json:"url"` | |
49 | Verbose bool `json:"verbose"` | |
45 | 50 | } |
46 | 51 | |
47 | 52 | type InputProviderConfig struct { |
50 | 55 | Value string `json:"value"` |
51 | 56 | } |
52 | 57 | |
53 | func NewConfig(ctx context.Context) Config { | |
58 | func NewConfig(ctx context.Context, cancel context.CancelFunc) Config { | |
54 | 59 | var conf Config |
60 | conf.AutoCalibrationStrings = make([]string, 0) | |
61 | conf.CommandKeywords = make([]string, 0) | |
55 | 62 | conf.Context = ctx |
63 | conf.Cancel = cancel | |
64 | conf.Data = "" | |
65 | conf.Delay = optRange{0, 0, false, false} | |
66 | conf.DirSearchCompat = false | |
67 | conf.Extensions = make([]string, 0) | |
68 | conf.Filters = make(map[string]FilterProvider) | |
69 | conf.FollowRedirects = false | |
56 | 70 | conf.Headers = make(map[string]string) |
57 | conf.Method = "GET" | |
58 | conf.Url = "" | |
59 | conf.Data = "" | |
60 | conf.Quiet = false | |
61 | 71 | conf.IgnoreWordlistComments = false |
62 | conf.StopOn403 = false | |
63 | conf.StopOnErrors = false | |
64 | conf.StopOnAll = false | |
65 | conf.FollowRedirects = false | |
72 | conf.InputMode = "clusterbomb" | |
73 | conf.InputNum = 0 | |
74 | conf.InputShell = "" | |
66 | 75 | conf.InputProviders = make([]InputProviderConfig, 0) |
67 | conf.CommandKeywords = make([]string, 0) | |
68 | conf.AutoCalibrationStrings = make([]string, 0) | |
69 | conf.InputNum = 0 | |
70 | conf.InputMode = "clusterbomb" | |
71 | conf.ProxyURL = "" | |
72 | conf.Filters = make(map[string]FilterProvider) | |
73 | 76 | conf.Matchers = make(map[string]FilterProvider) |
74 | conf.Delay = optRange{0, 0, false, false} | |
75 | conf.Extensions = make([]string, 0) | |
76 | conf.Timeout = 10 | |
77 | // Progress update frequency, in milliseconds | |
78 | conf.ProgressFrequency = 100 | |
79 | conf.DirSearchCompat = false | |
80 | conf.Verbose = false | |
81 | 77 | conf.MaxTime = 0 |
82 | 78 | conf.MaxTimeJob = 0 |
79 | conf.Method = "GET" | |
80 | conf.ProgressFrequency = 125 | |
81 | conf.ProxyURL = "" | |
82 | conf.Quiet = false | |
83 | conf.Rate = 0 | |
83 | 84 | conf.Recursion = false |
84 | 85 | conf.RecursionDepth = 0 |
86 | conf.StopOn403 = false | |
87 | conf.StopOnAll = false | |
88 | conf.StopOnErrors = false | |
89 | conf.Timeout = 10 | |
90 | conf.Url = "" | |
91 | conf.Verbose = false | |
85 | 92 | return conf |
86 | 93 | } |
94 | ||
95 | func (c *Config) SetContext(ctx context.Context, cancel context.CancelFunc) { | |
96 | c.Context = ctx | |
97 | c.Cancel = cancel | |
98 | } |
1 | 1 | |
2 | 2 | const ( |
3 | 3 | //VERSION holds the current version number |
4 | VERSION = "1.1.0" | |
4 | VERSION = "1.2.1" | |
5 | 5 | ) |
34 | 34 | |
35 | 35 | //OutputProvider is responsible of providing output from the RunnerProvider |
36 | 36 | type OutputProvider interface { |
37 | Banner() error | |
37 | Banner() | |
38 | 38 | Finalize() error |
39 | 39 | Progress(status Progress) |
40 | 40 | Info(infostring string) |
27 | 27 | Count403 int |
28 | 28 | Count429 int |
29 | 29 | Error string |
30 | Rate *RateThrottle | |
30 | 31 | startTime time.Time |
31 | 32 | startTimeJob time.Time |
32 | 33 | queuejobs []QueueJob |
39 | 40 | depth int |
40 | 41 | } |
41 | 42 | |
42 | func NewJob(conf *Config) Job { | |
43 | func NewJob(conf *Config) *Job { | |
43 | 44 | var j Job |
45 | j.Config = conf | |
44 | 46 | j.Counter = 0 |
45 | 47 | j.ErrorCounter = 0 |
46 | 48 | j.SpuriousErrorCounter = 0 |
49 | 51 | j.queuepos = 0 |
50 | 52 | j.queuejobs = make([]QueueJob, 0) |
51 | 53 | j.currentDepth = 0 |
52 | return j | |
54 | j.Rate = NewRateThrottle(conf) | |
55 | return &j | |
53 | 56 | } |
54 | 57 | |
55 | 58 | //incError increments the error counter |
115 | 118 | j.startExecution() |
116 | 119 | } |
117 | 120 | |
118 | j.Output.Finalize() | |
121 | err := j.Output.Finalize() | |
122 | if err != nil { | |
123 | j.Output.Error(err.Error()) | |
124 | } | |
119 | 125 | } |
120 | 126 | |
121 | 127 | func (j *Job) jobsInQueue() bool { |
122 | if j.queuepos < len(j.queuejobs) { | |
123 | return true | |
124 | } | |
125 | return false | |
128 | return j.queuepos < len(j.queuejobs) | |
126 | 129 | } |
127 | 130 | |
128 | 131 | func (j *Job) prepareQueueJob() { |
131 | 134 | j.queuepos += 1 |
132 | 135 | } |
133 | 136 | |
137 | func (j *Job) sleepIfNeeded() { | |
138 | var sleepDuration time.Duration | |
139 | if j.Config.Delay.HasDelay { | |
140 | if j.Config.Delay.IsRange { | |
141 | sTime := j.Config.Delay.Min + rand.Float64()*(j.Config.Delay.Max-j.Config.Delay.Min) | |
142 | sleepDuration = time.Duration(sTime * 1000) | |
143 | } else { | |
144 | sleepDuration = time.Duration(j.Config.Delay.Min * 1000) | |
145 | } | |
146 | sleepDuration = sleepDuration * time.Millisecond | |
147 | } | |
148 | // makes the sleep cancellable by context | |
149 | select { | |
150 | case <-j.Config.Context.Done(): // cancelled | |
151 | case <-time.After(sleepDuration): // sleep | |
152 | } | |
153 | } | |
154 | ||
134 | 155 | func (j *Job) startExecution() { |
135 | 156 | var wg sync.WaitGroup |
136 | 157 | wg.Add(1) |
137 | go j.runProgress(&wg) | |
158 | go j.runBackgroundTasks(&wg) | |
138 | 159 | //Limiter blocks after reaching the buffer, ensuring limited concurrency |
139 | 160 | limiter := make(chan bool, j.Config.Threads) |
140 | 161 | |
146 | 167 | defer j.Output.Warning(j.Error) |
147 | 168 | break |
148 | 169 | } |
149 | ||
150 | 170 | limiter <- true |
151 | 171 | nextInput := j.Input.Value() |
152 | 172 | nextPosition := j.Input.Position() |
153 | 173 | wg.Add(1) |
154 | 174 | j.Counter++ |
175 | ||
155 | 176 | go func() { |
156 | 177 | defer func() { <-limiter }() |
157 | 178 | defer wg.Done() |
179 | threadStart := time.Now() | |
158 | 180 | j.runTask(nextInput, nextPosition, false) |
159 | if j.Config.Delay.HasDelay { | |
160 | var sleepDurationMS time.Duration | |
161 | if j.Config.Delay.IsRange { | |
162 | sTime := j.Config.Delay.Min + rand.Float64()*(j.Config.Delay.Max-j.Config.Delay.Min) | |
163 | sleepDurationMS = time.Duration(sTime * 1000) | |
164 | } else { | |
165 | sleepDurationMS = time.Duration(j.Config.Delay.Min * 1000) | |
166 | } | |
167 | time.Sleep(sleepDurationMS * time.Millisecond) | |
168 | } | |
181 | j.sleepIfNeeded() | |
182 | j.Rate.Throttle() | |
183 | threadEnd := time.Now() | |
184 | j.Rate.Tick(threadStart, threadEnd) | |
169 | 185 | }() |
170 | 186 | |
171 | 187 | if !j.RunningJob { |
175 | 191 | } |
176 | 192 | wg.Wait() |
177 | 193 | j.updateProgress() |
178 | return | |
179 | 194 | } |
180 | 195 | |
181 | 196 | func (j *Job) interruptMonitor() { |
182 | 197 | sigChan := make(chan os.Signal, 2) |
183 | 198 | signal.Notify(sigChan, os.Interrupt, syscall.SIGTERM) |
184 | 199 | go func() { |
185 | for _ = range sigChan { | |
200 | for range sigChan { | |
186 | 201 | j.Error = "Caught keyboard interrupt (Ctrl-C)\n" |
187 | 202 | j.Stop() |
188 | 203 | } |
189 | 204 | }() |
190 | 205 | } |
191 | 206 | |
192 | func (j *Job) runProgress(wg *sync.WaitGroup) { | |
207 | func (j *Job) runBackgroundTasks(wg *sync.WaitGroup) { | |
193 | 208 | defer wg.Done() |
194 | 209 | totalProgress := j.Input.Total() |
195 | 210 | for j.Counter <= totalProgress { |
197 | 212 | if !j.Running { |
198 | 213 | break |
199 | 214 | } |
200 | ||
201 | 215 | j.updateProgress() |
202 | 216 | if j.Counter == totalProgress { |
203 | 217 | return |
204 | 218 | } |
205 | ||
206 | 219 | if !j.RunningJob { |
207 | 220 | return |
208 | 221 | } |
209 | ||
222 | j.Rate.Adjust() | |
210 | 223 | time.Sleep(time.Millisecond * time.Duration(j.Config.ProgressFrequency)) |
211 | 224 | } |
212 | 225 | } |
216 | 229 | StartedAt: j.startTimeJob, |
217 | 230 | ReqCount: j.Counter, |
218 | 231 | ReqTotal: j.Input.Total(), |
232 | ReqSec: j.Rate.CurrentRate(), | |
219 | 233 | QueuePos: j.queuepos, |
220 | 234 | QueueTotal: len(j.queuejobs), |
221 | 235 | ErrorCount: j.ErrorCounter, |
280 | 294 | } |
281 | 295 | if j.Config.StopOnAll { |
282 | 296 | // increment 429 counter if the response code is 429 |
283 | if j.Config.StopOnAll { | |
284 | if resp.StatusCode == 429 { | |
285 | j.inc429() | |
286 | } | |
297 | if resp.StatusCode == 429 { | |
298 | j.inc429() | |
287 | 299 | } |
288 | 300 | } |
289 | 301 | if j.isMatch(resp) { |
307 | 319 | if j.Config.Recursion && len(resp.GetRedirectLocation(false)) > 0 { |
308 | 320 | j.handleRecursionJob(resp) |
309 | 321 | } |
310 | return | |
311 | 322 | } |
312 | 323 | |
313 | 324 | //handleRecursionJob adds a new recursion job to the job queue if a new directory is found |
342 | 353 | |
343 | 354 | results := make([]Response, 0) |
344 | 355 | for _, input := range cInputs { |
345 | inputs := make(map[string][]byte, 0) | |
356 | inputs := make(map[string][]byte, len(j.Config.InputProviders)) | |
346 | 357 | for _, v := range j.Config.InputProviders { |
347 | 358 | inputs[v.Keyword] = []byte(input) |
348 | 359 | } |
395 | 406 | |
396 | 407 | // Check for runtime of entire process |
397 | 408 | if j.Config.MaxTime > 0 { |
398 | dur := time.Now().Sub(j.startTime) | |
409 | dur := time.Since(j.startTime) | |
399 | 410 | runningSecs := int(dur / time.Second) |
400 | 411 | if runningSecs >= j.Config.MaxTime { |
401 | 412 | j.Error = "Maximum running time for entire process reached, exiting." |
405 | 416 | |
406 | 417 | // Check for runtime of current job |
407 | 418 | if j.Config.MaxTimeJob > 0 { |
408 | dur := time.Now().Sub(j.startTimeJob) | |
419 | dur := time.Since(j.startTimeJob) | |
409 | 420 | runningSecs := int(dur / time.Second) |
410 | 421 | if runningSecs >= j.Config.MaxTimeJob { |
411 | 422 | j.Error = "Maximum running time for this job reached, continuing with next job if one exists." |
418 | 429 | //Stop the execution of the Job |
419 | 430 | func (j *Job) Stop() { |
420 | 431 | j.Running = false |
421 | return | |
432 | j.Config.Cancel() | |
422 | 433 | } |
423 | 434 | |
424 | 435 | //Stop current, resume to next |
425 | 436 | func (j *Job) Next() { |
426 | 437 | j.RunningJob = false |
427 | return | |
428 | } | |
438 | } |
0 | package ffuf | |
1 | ||
2 | import ( | |
3 | "bufio" | |
4 | "context" | |
5 | "fmt" | |
6 | "io/ioutil" | |
7 | "net/textproto" | |
8 | "net/url" | |
9 | "os" | |
10 | "path/filepath" | |
11 | "runtime" | |
12 | "strconv" | |
13 | "strings" | |
14 | ||
15 | "github.com/pelletier/go-toml" | |
16 | ) | |
17 | ||
18 | type ConfigOptions struct { | |
19 | Filter FilterOptions | |
20 | General GeneralOptions | |
21 | HTTP HTTPOptions | |
22 | Input InputOptions | |
23 | Matcher MatcherOptions | |
24 | Output OutputOptions | |
25 | } | |
26 | ||
27 | type HTTPOptions struct { | |
28 | Cookies []string | |
29 | Data string | |
30 | FollowRedirects bool | |
31 | Headers []string | |
32 | IgnoreBody bool | |
33 | Method string | |
34 | ProxyURL string | |
35 | Recursion bool | |
36 | RecursionDepth int | |
37 | ReplayProxyURL string | |
38 | Timeout int | |
39 | URL string | |
40 | } | |
41 | ||
42 | type GeneralOptions struct { | |
43 | AutoCalibration bool | |
44 | AutoCalibrationStrings []string | |
45 | Colors bool | |
46 | ConfigFile string `toml:"-"` | |
47 | Delay string | |
48 | MaxTime int | |
49 | MaxTimeJob int | |
50 | Quiet bool | |
51 | Rate int | |
52 | ShowVersion bool `toml:"-"` | |
53 | StopOn403 bool | |
54 | StopOnAll bool | |
55 | StopOnErrors bool | |
56 | Threads int | |
57 | Verbose bool | |
58 | } | |
59 | ||
60 | type InputOptions struct { | |
61 | DirSearchCompat bool | |
62 | Extensions string | |
63 | IgnoreWordlistComments bool | |
64 | InputMode string | |
65 | InputNum int | |
66 | InputShell string | |
67 | Inputcommands []string | |
68 | Request string | |
69 | RequestProto string | |
70 | Wordlists []string | |
71 | } | |
72 | ||
73 | type OutputOptions struct { | |
74 | DebugLog string | |
75 | OutputDirectory string | |
76 | OutputFile string | |
77 | OutputFormat string | |
78 | OutputCreateEmptyFile bool | |
79 | } | |
80 | ||
81 | type FilterOptions struct { | |
82 | Lines string | |
83 | Regexp string | |
84 | Size string | |
85 | Status string | |
86 | Words string | |
87 | } | |
88 | ||
89 | type MatcherOptions struct { | |
90 | Lines string | |
91 | Regexp string | |
92 | Size string | |
93 | Status string | |
94 | Words string | |
95 | } | |
96 | ||
97 | //NewConfigOptions returns a newly created ConfigOptions struct with default values | |
98 | func NewConfigOptions() *ConfigOptions { | |
99 | c := &ConfigOptions{} | |
100 | c.Filter.Lines = "" | |
101 | c.Filter.Regexp = "" | |
102 | c.Filter.Size = "" | |
103 | c.Filter.Status = "" | |
104 | c.Filter.Words = "" | |
105 | c.General.AutoCalibration = false | |
106 | c.General.Colors = false | |
107 | c.General.Delay = "" | |
108 | c.General.MaxTime = 0 | |
109 | c.General.MaxTimeJob = 0 | |
110 | c.General.Quiet = false | |
111 | c.General.Rate = 0 | |
112 | c.General.ShowVersion = false | |
113 | c.General.StopOn403 = false | |
114 | c.General.StopOnAll = false | |
115 | c.General.StopOnErrors = false | |
116 | c.General.Threads = 40 | |
117 | c.General.Verbose = false | |
118 | c.HTTP.Data = "" | |
119 | c.HTTP.FollowRedirects = false | |
120 | c.HTTP.IgnoreBody = false | |
121 | c.HTTP.Method = "" | |
122 | c.HTTP.ProxyURL = "" | |
123 | c.HTTP.Recursion = false | |
124 | c.HTTP.RecursionDepth = 0 | |
125 | c.HTTP.ReplayProxyURL = "" | |
126 | c.HTTP.Timeout = 10 | |
127 | c.HTTP.URL = "" | |
128 | c.Input.DirSearchCompat = false | |
129 | c.Input.Extensions = "" | |
130 | c.Input.IgnoreWordlistComments = false | |
131 | c.Input.InputMode = "clusterbomb" | |
132 | c.Input.InputNum = 100 | |
133 | c.Input.Request = "" | |
134 | c.Input.RequestProto = "https" | |
135 | c.Matcher.Lines = "" | |
136 | c.Matcher.Regexp = "" | |
137 | c.Matcher.Size = "" | |
138 | c.Matcher.Status = "200,204,301,302,307,401,403,405" | |
139 | c.Matcher.Words = "" | |
140 | c.Output.DebugLog = "" | |
141 | c.Output.OutputDirectory = "" | |
142 | c.Output.OutputFile = "" | |
143 | c.Output.OutputFormat = "json" | |
144 | c.Output.OutputCreateEmptyFile = false | |
145 | return c | |
146 | } | |
147 | ||
148 | //ConfigFromOptions parses the values in ConfigOptions struct, ensures that the values are sane, | |
149 | // and creates a Config struct out of them. | |
150 | func ConfigFromOptions(parseOpts *ConfigOptions, ctx context.Context, cancel context.CancelFunc) (*Config, error) { | |
151 | //TODO: refactor in a proper flag library that can handle things like required flags | |
152 | errs := NewMultierror() | |
153 | conf := NewConfig(ctx, cancel) | |
154 | ||
155 | var err error | |
156 | var err2 error | |
157 | if len(parseOpts.HTTP.URL) == 0 && parseOpts.Input.Request == "" { | |
158 | errs.Add(fmt.Errorf("-u flag or -request flag is required")) | |
159 | } | |
160 | ||
161 | // prepare extensions | |
162 | if parseOpts.Input.Extensions != "" { | |
163 | extensions := strings.Split(parseOpts.Input.Extensions, ",") | |
164 | conf.Extensions = extensions | |
165 | } | |
166 | ||
167 | // Convert cookies to a header | |
168 | if len(parseOpts.HTTP.Cookies) > 0 { | |
169 | parseOpts.HTTP.Headers = append(parseOpts.HTTP.Headers, "Cookie: "+strings.Join(parseOpts.HTTP.Cookies, "; ")) | |
170 | } | |
171 | ||
172 | //Prepare inputproviders | |
173 | for _, v := range parseOpts.Input.Wordlists { | |
174 | var wl []string | |
175 | if runtime.GOOS == "windows" { | |
176 | // Try to ensure that Windows file paths like C:\path\to\wordlist.txt:KEYWORD are treated properly | |
177 | if FileExists(v) { | |
178 | // The wordlist was supplied without a keyword parameter | |
179 | wl = []string{v} | |
180 | } else { | |
181 | filepart := v | |
182 | if strings.Contains(filepart, ":") { | |
183 | filepart = v[:strings.LastIndex(filepart, ":")] | |
184 | } | |
185 | ||
186 | if FileExists(filepart) { | |
187 | wl = []string{filepart, v[strings.LastIndex(v, ":")+1:]} | |
188 | } else { | |
189 | // The file was not found. Use full wordlist parameter value for more concise error message down the line | |
190 | wl = []string{v} | |
191 | } | |
192 | } | |
193 | } else { | |
194 | wl = strings.SplitN(v, ":", 2) | |
195 | } | |
196 | if len(wl) == 2 { | |
197 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
198 | Name: "wordlist", | |
199 | Value: wl[0], | |
200 | Keyword: wl[1], | |
201 | }) | |
202 | } else { | |
203 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
204 | Name: "wordlist", | |
205 | Value: wl[0], | |
206 | Keyword: "FUZZ", | |
207 | }) | |
208 | } | |
209 | } | |
210 | for _, v := range parseOpts.Input.Inputcommands { | |
211 | ic := strings.SplitN(v, ":", 2) | |
212 | if len(ic) == 2 { | |
213 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
214 | Name: "command", | |
215 | Value: ic[0], | |
216 | Keyword: ic[1], | |
217 | }) | |
218 | conf.CommandKeywords = append(conf.CommandKeywords, ic[0]) | |
219 | } else { | |
220 | conf.InputProviders = append(conf.InputProviders, InputProviderConfig{ | |
221 | Name: "command", | |
222 | Value: ic[0], | |
223 | Keyword: "FUZZ", | |
224 | }) | |
225 | conf.CommandKeywords = append(conf.CommandKeywords, "FUZZ") | |
226 | } | |
227 | } | |
228 | ||
229 | if len(conf.InputProviders) == 0 { | |
230 | errs.Add(fmt.Errorf("Either -w or --input-cmd flag is required")) | |
231 | } | |
232 | ||
233 | // Prepare the request using body | |
234 | if parseOpts.Input.Request != "" { | |
235 | err := parseRawRequest(parseOpts, &conf) | |
236 | if err != nil { | |
237 | errmsg := fmt.Sprintf("Could not parse raw request: %s", err) | |
238 | errs.Add(fmt.Errorf(errmsg)) | |
239 | } | |
240 | } | |
241 | ||
242 | //Prepare URL | |
243 | if parseOpts.HTTP.URL != "" { | |
244 | conf.Url = parseOpts.HTTP.URL | |
245 | } | |
246 | ||
247 | //Prepare headers and make canonical | |
248 | for _, v := range parseOpts.HTTP.Headers { | |
249 | hs := strings.SplitN(v, ":", 2) | |
250 | if len(hs) == 2 { | |
251 | // trim and make canonical | |
252 | // except if used in custom defined header | |
253 | var CanonicalNeeded = true | |
254 | for _, a := range conf.CommandKeywords { | |
255 | if a == hs[0] { | |
256 | CanonicalNeeded = false | |
257 | } | |
258 | } | |
259 | // check if part of InputProviders | |
260 | if CanonicalNeeded { | |
261 | for _, b := range conf.InputProviders { | |
262 | if b.Keyword == hs[0] { | |
263 | CanonicalNeeded = false | |
264 | } | |
265 | } | |
266 | } | |
267 | if CanonicalNeeded { | |
268 | var CanonicalHeader = textproto.CanonicalMIMEHeaderKey(strings.TrimSpace(hs[0])) | |
269 | conf.Headers[CanonicalHeader] = strings.TrimSpace(hs[1]) | |
270 | } else { | |
271 | conf.Headers[strings.TrimSpace(hs[0])] = strings.TrimSpace(hs[1]) | |
272 | } | |
273 | } else { | |
274 | errs.Add(fmt.Errorf("Header defined by -H needs to have a value. \":\" should be used as a separator")) | |
275 | } | |
276 | } | |
277 | ||
278 | //Prepare delay | |
279 | d := strings.Split(parseOpts.General.Delay, "-") | |
280 | if len(d) > 2 { | |
281 | errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")) | |
282 | } else if len(d) == 2 { | |
283 | conf.Delay.IsRange = true | |
284 | conf.Delay.HasDelay = true | |
285 | conf.Delay.Min, err = strconv.ParseFloat(d[0], 64) | |
286 | conf.Delay.Max, err2 = strconv.ParseFloat(d[1], 64) | |
287 | if err != nil || err2 != nil { | |
288 | errs.Add(fmt.Errorf("Delay range min and max values need to be valid floats. For example: 0.1-0.5")) | |
289 | } | |
290 | } else if len(parseOpts.General.Delay) > 0 { | |
291 | conf.Delay.IsRange = false | |
292 | conf.Delay.HasDelay = true | |
293 | conf.Delay.Min, err = strconv.ParseFloat(parseOpts.General.Delay, 64) | |
294 | if err != nil { | |
295 | errs.Add(fmt.Errorf("Delay needs to be either a single float: \"0.1\" or a range of floats, delimited by dash: \"0.1-0.8\"")) | |
296 | } | |
297 | } | |
298 | ||
299 | // Verify proxy url format | |
300 | if len(parseOpts.HTTP.ProxyURL) > 0 { | |
301 | _, err := url.Parse(parseOpts.HTTP.ProxyURL) | |
302 | if err != nil { | |
303 | errs.Add(fmt.Errorf("Bad proxy url (-x) format: %s", err)) | |
304 | } else { | |
305 | conf.ProxyURL = parseOpts.HTTP.ProxyURL | |
306 | } | |
307 | } | |
308 | ||
309 | // Verify replayproxy url format | |
310 | if len(parseOpts.HTTP.ReplayProxyURL) > 0 { | |
311 | _, err := url.Parse(parseOpts.HTTP.ReplayProxyURL) | |
312 | if err != nil { | |
313 | errs.Add(fmt.Errorf("Bad replay-proxy url (-replay-proxy) format: %s", err)) | |
314 | } else { | |
315 | conf.ReplayProxyURL = parseOpts.HTTP.ReplayProxyURL | |
316 | } | |
317 | } | |
318 | ||
319 | //Check the output file format option | |
320 | if parseOpts.Output.OutputFile != "" { | |
321 | //No need to check / error out if output file isn't defined | |
322 | outputFormats := []string{"all", "json", "ejson", "html", "md", "csv", "ecsv"} | |
323 | found := false | |
324 | for _, f := range outputFormats { | |
325 | if f == parseOpts.Output.OutputFormat { | |
326 | conf.OutputFormat = f | |
327 | found = true | |
328 | } | |
329 | } | |
330 | if !found { | |
331 | errs.Add(fmt.Errorf("Unknown output file format (-of): %s", parseOpts.Output.OutputFormat)) | |
332 | } | |
333 | } | |
334 | ||
335 | // Auto-calibration strings | |
336 | if len(parseOpts.General.AutoCalibrationStrings) > 0 { | |
337 | conf.AutoCalibrationStrings = parseOpts.General.AutoCalibrationStrings | |
338 | } | |
339 | // Using -acc implies -ac | |
340 | if len(parseOpts.General.AutoCalibrationStrings) > 0 { | |
341 | conf.AutoCalibration = true | |
342 | } | |
343 | ||
344 | if parseOpts.General.Rate < 0 { | |
345 | conf.Rate = 0 | |
346 | } else { | |
347 | conf.Rate = int64(parseOpts.General.Rate) | |
348 | } | |
349 | ||
350 | if conf.Method == "" { | |
351 | if parseOpts.HTTP.Method == "" { | |
352 | // Only set if defined on command line, because we might be reparsing the CLI after | |
353 | // populating it through raw request in the first iteration | |
354 | conf.Method = "GET" | |
355 | } else { | |
356 | conf.Method = parseOpts.HTTP.Method | |
357 | } | |
358 | } else { | |
359 | if parseOpts.HTTP.Method != "" { | |
360 | // Method overridden in CLI | |
361 | conf.Method = parseOpts.HTTP.Method | |
362 | } | |
363 | } | |
364 | ||
365 | if parseOpts.HTTP.Data != "" { | |
366 | // Only set if defined on command line, because we might be reparsing the CLI after | |
367 | // populating it through raw request in the first iteration | |
368 | conf.Data = parseOpts.HTTP.Data | |
369 | } | |
370 | ||
371 | // Common stuff | |
372 | conf.IgnoreWordlistComments = parseOpts.Input.IgnoreWordlistComments | |
373 | conf.DirSearchCompat = parseOpts.Input.DirSearchCompat | |
374 | conf.Colors = parseOpts.General.Colors | |
375 | conf.InputNum = parseOpts.Input.InputNum | |
376 | conf.InputMode = parseOpts.Input.InputMode | |
377 | conf.InputShell = parseOpts.Input.InputShell | |
378 | conf.OutputFile = parseOpts.Output.OutputFile | |
379 | conf.OutputDirectory = parseOpts.Output.OutputDirectory | |
380 | conf.OutputCreateEmptyFile = parseOpts.Output.OutputCreateEmptyFile | |
381 | conf.IgnoreBody = parseOpts.HTTP.IgnoreBody | |
382 | conf.Quiet = parseOpts.General.Quiet | |
383 | conf.StopOn403 = parseOpts.General.StopOn403 | |
384 | conf.StopOnAll = parseOpts.General.StopOnAll | |
385 | conf.StopOnErrors = parseOpts.General.StopOnErrors | |
386 | conf.FollowRedirects = parseOpts.HTTP.FollowRedirects | |
387 | conf.Recursion = parseOpts.HTTP.Recursion | |
388 | conf.RecursionDepth = parseOpts.HTTP.RecursionDepth | |
389 | conf.AutoCalibration = parseOpts.General.AutoCalibration | |
390 | conf.Threads = parseOpts.General.Threads | |
391 | conf.Timeout = parseOpts.HTTP.Timeout | |
392 | conf.MaxTime = parseOpts.General.MaxTime | |
393 | conf.MaxTimeJob = parseOpts.General.MaxTimeJob | |
394 | conf.Verbose = parseOpts.General.Verbose | |
395 | ||
396 | // Handle copy as curl situation where POST method is implied by --data flag. If method is set to anything but GET, NOOP | |
397 | if len(conf.Data) > 0 && | |
398 | conf.Method == "GET" && | |
399 | //don't modify the method automatically if a request file is being used as input | |
400 | len(parseOpts.Input.Request) == 0 { | |
401 | ||
402 | conf.Method = "POST" | |
403 | } | |
404 | ||
405 | conf.CommandLine = strings.Join(os.Args, " ") | |
406 | ||
407 | for _, provider := range conf.InputProviders { | |
408 | if !keywordPresent(provider.Keyword, &conf) { | |
409 | errmsg := fmt.Sprintf("Keyword %s defined, but not found in headers, method, URL or POST data.", provider.Keyword) | |
410 | errs.Add(fmt.Errorf(errmsg)) | |
411 | } | |
412 | } | |
413 | ||
414 | // Do checks for recursion mode | |
415 | if parseOpts.HTTP.Recursion { | |
416 | if !strings.HasSuffix(conf.Url, "FUZZ") { | |
417 | errmsg := "When using -recursion the URL (-u) must end with FUZZ keyword." | |
418 | errs.Add(fmt.Errorf(errmsg)) | |
419 | } | |
420 | } | |
421 | return &conf, errs.ErrorOrNil() | |
422 | } | |
423 | ||
424 | func parseRawRequest(parseOpts *ConfigOptions, conf *Config) error { | |
425 | file, err := os.Open(parseOpts.Input.Request) | |
426 | if err != nil { | |
427 | return fmt.Errorf("could not open request file: %s", err) | |
428 | } | |
429 | defer file.Close() | |
430 | ||
431 | r := bufio.NewReader(file) | |
432 | ||
433 | s, err := r.ReadString('\n') | |
434 | if err != nil { | |
435 | return fmt.Errorf("could not read request: %s", err) | |
436 | } | |
437 | parts := strings.Split(s, " ") | |
438 | if len(parts) < 3 { | |
439 | return fmt.Errorf("malformed request supplied") | |
440 | } | |
441 | // Set the request Method | |
442 | conf.Method = parts[0] | |
443 | ||
444 | for { | |
445 | line, err := r.ReadString('\n') | |
446 | line = strings.TrimSpace(line) | |
447 | ||
448 | if err != nil || line == "" { | |
449 | break | |
450 | } | |
451 | ||
452 | p := strings.SplitN(line, ":", 2) | |
453 | if len(p) != 2 { | |
454 | continue | |
455 | } | |
456 | ||
457 | if strings.EqualFold(p[0], "content-length") { | |
458 | continue | |
459 | } | |
460 | ||
461 | conf.Headers[strings.TrimSpace(p[0])] = strings.TrimSpace(p[1]) | |
462 | } | |
463 | ||
464 | // Handle case with the full http url in path. In that case, | |
465 | // ignore any host header that we encounter and use the path as request URL | |
466 | if strings.HasPrefix(parts[1], "http") { | |
467 | parsed, err := url.Parse(parts[1]) | |
468 | if err != nil { | |
469 | return fmt.Errorf("could not parse request URL: %s", err) | |
470 | } | |
471 | conf.Url = parts[1] | |
472 | conf.Headers["Host"] = parsed.Host | |
473 | } else { | |
474 | // Build the request URL from the request | |
475 | conf.Url = parseOpts.Input.RequestProto + "://" + conf.Headers["Host"] + parts[1] | |
476 | } | |
477 | ||
478 | // Set the request body | |
479 | b, err := ioutil.ReadAll(r) | |
480 | if err != nil { | |
481 | return fmt.Errorf("could not read request body: %s", err) | |
482 | } | |
483 | conf.Data = string(b) | |
484 | ||
485 | return nil | |
486 | } | |
487 | ||
488 | func keywordPresent(keyword string, conf *Config) bool { | |
489 | //Search for keyword from HTTP method, URL and POST data too | |
490 | if strings.Contains(conf.Method, keyword) { | |
491 | return true | |
492 | } | |
493 | if strings.Contains(conf.Url, keyword) { | |
494 | return true | |
495 | } | |
496 | if strings.Contains(conf.Data, keyword) { | |
497 | return true | |
498 | } | |
499 | for k, v := range conf.Headers { | |
500 | if strings.Contains(k, keyword) { | |
501 | return true | |
502 | } | |
503 | if strings.Contains(v, keyword) { | |
504 | return true | |
505 | } | |
506 | } | |
507 | return false | |
508 | } | |
509 | ||
510 | func ReadConfig(configFile string) (*ConfigOptions, error) { | |
511 | conf := NewConfigOptions() | |
512 | configData, err := ioutil.ReadFile(configFile) | |
513 | if err == nil { | |
514 | err = toml.Unmarshal(configData, conf) | |
515 | } | |
516 | return conf, err | |
517 | } | |
518 | ||
519 | func ReadDefaultConfig() (*ConfigOptions, error) { | |
520 | userhome, err := os.UserHomeDir() | |
521 | if err != nil { | |
522 | return NewConfigOptions(), err | |
523 | } | |
524 | defaultconf := filepath.Join(userhome, ".ffufrc") | |
525 | return ReadConfig(defaultconf) | |
526 | } |
7 | 7 | StartedAt time.Time |
8 | 8 | ReqCount int |
9 | 9 | ReqTotal int |
10 | ReqSec int64 | |
10 | 11 | QueuePos int |
11 | 12 | QueueTotal int |
12 | 13 | ErrorCount int |
0 | package ffuf | |
1 | ||
2 | import ( | |
3 | "container/ring" | |
4 | "sync" | |
5 | "time" | |
6 | ) | |
7 | ||
8 | type RateThrottle struct { | |
9 | rateCounter *ring.Ring | |
10 | RateAdjustment float64 | |
11 | RateAdjustmentPos int | |
12 | Config *Config | |
13 | RateMutex sync.Mutex | |
14 | lastAdjustment time.Time | |
15 | } | |
16 | ||
17 | func NewRateThrottle(conf *Config) *RateThrottle { | |
18 | return &RateThrottle{ | |
19 | rateCounter: ring.New(conf.Threads), | |
20 | RateAdjustment: 0, | |
21 | RateAdjustmentPos: 0, | |
22 | Config: conf, | |
23 | lastAdjustment: time.Now(), | |
24 | } | |
25 | } | |
26 | ||
27 | //CurrentRate calculates requests/second value from circular list of rate | |
28 | func (r *RateThrottle) CurrentRate() int64 { | |
29 | n := r.rateCounter.Len() | |
30 | var total int64 | |
31 | total = 0 | |
32 | r.rateCounter.Do(func(r interface{}) { | |
33 | switch val := r.(type) { | |
34 | case int64: | |
35 | total += val | |
36 | default: | |
37 | // circular list entry was nil, happens when < number_of_threads responses have been recorded. | |
38 | // the total number of entries is less than length of the list | |
39 | n -= 1 | |
40 | } | |
41 | }) | |
42 | if total > 0 { | |
43 | avg := total / int64(n) | |
44 | return time.Second.Nanoseconds() * int64(r.Config.Threads) / avg | |
45 | } | |
46 | ||
47 | return 0 | |
48 | } | |
49 | ||
50 | //rateTick adds a new duration measurement tick to rate counter | |
51 | func (r *RateThrottle) Tick(start, end time.Time) { | |
52 | if start.Before(r.lastAdjustment) { | |
53 | // We don't want to store data for threads started pre-adjustment | |
54 | return | |
55 | } | |
56 | r.RateMutex.Lock() | |
57 | defer r.RateMutex.Unlock() | |
58 | dur := end.Sub(start).Nanoseconds() | |
59 | r.rateCounter = r.rateCounter.Next() | |
60 | r.RateAdjustmentPos += 1 | |
61 | r.rateCounter.Value = dur | |
62 | } | |
63 | ||
64 | func (r *RateThrottle) Throttle() { | |
65 | if r.Config.Rate == 0 { | |
66 | // No throttling | |
67 | return | |
68 | } | |
69 | if r.RateAdjustment > 0.0 { | |
70 | delayNS := float64(time.Second.Nanoseconds()) * r.RateAdjustment | |
71 | time.Sleep(time.Nanosecond * time.Duration(delayNS)) | |
72 | } | |
73 | } | |
74 | ||
75 | //Adjust changes the RateAdjustment value, which is multiplier of second to pause between requests in a thread | |
76 | func (r *RateThrottle) Adjust() { | |
77 | if r.RateAdjustmentPos < r.Config.Threads { | |
78 | // Do not adjust if we don't have enough data yet | |
79 | return | |
80 | } | |
81 | r.RateMutex.Lock() | |
82 | defer r.RateMutex.Unlock() | |
83 | currentRate := r.CurrentRate() | |
84 | ||
85 | if r.RateAdjustment == 0.0 { | |
86 | if currentRate > r.Config.Rate { | |
87 | // If we're adjusting the rate for the first time, start at a safe point (0.2sec) | |
88 | r.RateAdjustment = 0.2 | |
89 | return | |
90 | } else { | |
91 | // NOOP | |
92 | return | |
93 | } | |
94 | } | |
95 | difference := float64(currentRate) / float64(r.Config.Rate) | |
96 | if r.RateAdjustment < 0.00001 && difference < 0.9 { | |
97 | // Reset the rate adjustment as throttling is not relevant at current speed | |
98 | r.RateAdjustment = 0.0 | |
99 | } else { | |
100 | r.RateAdjustment = r.RateAdjustment * difference | |
101 | } | |
102 | // Reset the counters | |
103 | r.lastAdjustment = time.Now() | |
104 | r.RateAdjustmentPos = 0 | |
105 | } |
24 | 24 | found[v] = true |
25 | 25 | } |
26 | 26 | ret := []string{} |
27 | for k, _ := range found { | |
27 | for k := range found { | |
28 | 28 | ret = append(ret, k) |
29 | 29 | } |
30 | 30 | return ret |
31 | 31 | } |
32 | 32 | |
33 | //FileExists checks if the filepath exists and is not a directory | |
33 | //FileExists checks if the filepath exists and is not a directory. | |
34 | //Returns false in case it's not possible to describe the named file. | |
34 | 35 | func FileExists(path string) bool { |
35 | 36 | md, err := os.Stat(path) |
36 | if os.IsNotExist(err) { | |
37 | if err != nil { | |
37 | 38 | return false |
38 | 39 | } |
40 | ||
39 | 41 | return !md.IsDir() |
40 | 42 | } |
11 | 11 | |
12 | 12 | func ValueRangeFromString(instr string) (ValueRange, error) { |
13 | 13 | // is the value a range |
14 | minmax := regexp.MustCompile("^(\\d+)\\-(\\d+)$").FindAllStringSubmatch(instr, -1) | |
14 | minmax := regexp.MustCompile(`^(\d+)-(\d+)$`).FindAllStringSubmatch(instr, -1) | |
15 | 15 | if minmax != nil { |
16 | 16 | // yes |
17 | 17 | minval, err := strconv.ParseInt(minmax[0][1], 10, 0) |
0 | 0 | package filter |
1 | 1 | |
2 | 2 | import ( |
3 | "flag" | |
3 | 4 | "fmt" |
4 | 5 | "strconv" |
5 | 6 | "strings" |
28 | 29 | |
29 | 30 | //AddFilter adds a new filter to Config |
30 | 31 | func AddFilter(conf *ffuf.Config, name string, option string) error { |
31 | newf, err := NewFilterByName(name, option) | |
32 | if err == nil { | |
33 | conf.Filters[name] = newf | |
34 | } | |
35 | return err | |
32 | newf, err := NewFilterByName(name, option) | |
33 | if err == nil { | |
34 | // valid filter create or append | |
35 | if conf.Filters[name] == nil { | |
36 | conf.Filters[name] = newf | |
37 | } else { | |
38 | currentfilter := conf.Filters[name].Repr() | |
39 | newoption := strings.TrimSpace(strings.Split(currentfilter, ":")[1]) + "," + option | |
40 | newerf, err := NewFilterByName(name, newoption) | |
41 | if err == nil { | |
42 | conf.Filters[name] = newerf | |
43 | } | |
44 | } | |
45 | } | |
46 | return err | |
36 | 47 | } |
37 | 48 | |
38 | 49 | //AddMatcher adds a new matcher to Config |
46 | 57 | |
47 | 58 | //CalibrateIfNeeded runs a self-calibration task for filtering options (if needed) by requesting random resources and acting accordingly |
48 | 59 | func CalibrateIfNeeded(j *ffuf.Job) error { |
60 | var err error | |
49 | 61 | if !j.Config.AutoCalibration { |
50 | 62 | return nil |
51 | 63 | } |
55 | 67 | return err |
56 | 68 | } |
57 | 69 | if len(responses) > 0 { |
58 | calibrateFilters(j, responses) | |
59 | } | |
60 | return nil | |
61 | } | |
62 | ||
63 | func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) { | |
70 | err = calibrateFilters(j, responses) | |
71 | } | |
72 | return err | |
73 | } | |
74 | ||
75 | func calibrateFilters(j *ffuf.Job, responses []ffuf.Response) error { | |
64 | 76 | sizeCalib := make([]string, 0) |
65 | 77 | wordCalib := make([]string, 0) |
66 | 78 | lineCalib := make([]string, 0) |
85 | 97 | lineCalib = ffuf.UniqStringSlice(lineCalib) |
86 | 98 | |
87 | 99 | if len(sizeCalib) > 0 { |
88 | AddFilter(j.Config, "size", strings.Join(sizeCalib, ",")) | |
100 | err := AddFilter(j.Config, "size", strings.Join(sizeCalib, ",")) | |
101 | if err != nil { | |
102 | return err | |
103 | } | |
89 | 104 | } |
90 | 105 | if len(wordCalib) > 0 { |
91 | AddFilter(j.Config, "word", strings.Join(wordCalib, ",")) | |
106 | err := AddFilter(j.Config, "word", strings.Join(wordCalib, ",")) | |
107 | if err != nil { | |
108 | return err | |
109 | } | |
92 | 110 | } |
93 | 111 | if len(lineCalib) > 0 { |
94 | AddFilter(j.Config, "line", strings.Join(lineCalib, ",")) | |
95 | } | |
96 | } | |
112 | err := AddFilter(j.Config, "line", strings.Join(lineCalib, ",")) | |
113 | if err != nil { | |
114 | return err | |
115 | } | |
116 | } | |
117 | return nil | |
118 | } | |
119 | ||
120 | func SetupFilters(parseOpts *ffuf.ConfigOptions, conf *ffuf.Config) error { | |
121 | errs := ffuf.NewMultierror() | |
122 | // If any other matcher is set, ignore -mc default value | |
123 | matcherSet := false | |
124 | statusSet := false | |
125 | warningIgnoreBody := false | |
126 | flag.Visit(func(f *flag.Flag) { | |
127 | if f.Name == "mc" { | |
128 | statusSet = true | |
129 | } | |
130 | if f.Name == "ms" { | |
131 | matcherSet = true | |
132 | warningIgnoreBody = true | |
133 | } | |
134 | if f.Name == "ml" { | |
135 | matcherSet = true | |
136 | warningIgnoreBody = true | |
137 | } | |
138 | if f.Name == "mr" { | |
139 | matcherSet = true | |
140 | } | |
141 | if f.Name == "mw" { | |
142 | matcherSet = true | |
143 | warningIgnoreBody = true | |
144 | } | |
145 | }) | |
146 | if statusSet || !matcherSet { | |
147 | if err := AddMatcher(conf, "status", parseOpts.Matcher.Status); err != nil { | |
148 | errs.Add(err) | |
149 | } | |
150 | } | |
151 | ||
152 | if parseOpts.Filter.Status != "" { | |
153 | if err := AddFilter(conf, "status", parseOpts.Filter.Status); err != nil { | |
154 | errs.Add(err) | |
155 | } | |
156 | } | |
157 | if parseOpts.Filter.Size != "" { | |
158 | warningIgnoreBody = true | |
159 | if err := AddFilter(conf, "size", parseOpts.Filter.Size); err != nil { | |
160 | errs.Add(err) | |
161 | } | |
162 | } | |
163 | if parseOpts.Filter.Regexp != "" { | |
164 | if err := AddFilter(conf, "regexp", parseOpts.Filter.Regexp); err != nil { | |
165 | errs.Add(err) | |
166 | } | |
167 | } | |
168 | if parseOpts.Filter.Words != "" { | |
169 | warningIgnoreBody = true | |
170 | if err := AddFilter(conf, "word", parseOpts.Filter.Words); err != nil { | |
171 | errs.Add(err) | |
172 | } | |
173 | } | |
174 | if parseOpts.Filter.Lines != "" { | |
175 | warningIgnoreBody = true | |
176 | if err := AddFilter(conf, "line", parseOpts.Filter.Lines); err != nil { | |
177 | errs.Add(err) | |
178 | } | |
179 | } | |
180 | if parseOpts.Matcher.Size != "" { | |
181 | if err := AddMatcher(conf, "size", parseOpts.Matcher.Size); err != nil { | |
182 | errs.Add(err) | |
183 | } | |
184 | } | |
185 | if parseOpts.Matcher.Regexp != "" { | |
186 | if err := AddMatcher(conf, "regexp", parseOpts.Matcher.Regexp); err != nil { | |
187 | errs.Add(err) | |
188 | } | |
189 | } | |
190 | if parseOpts.Matcher.Words != "" { | |
191 | if err := AddMatcher(conf, "word", parseOpts.Matcher.Words); err != nil { | |
192 | errs.Add(err) | |
193 | } | |
194 | } | |
195 | if parseOpts.Matcher.Lines != "" { | |
196 | if err := AddMatcher(conf, "line", parseOpts.Matcher.Lines); err != nil { | |
197 | errs.Add(err) | |
198 | } | |
199 | } | |
200 | if conf.IgnoreBody && warningIgnoreBody { | |
201 | fmt.Printf("*** Warning: possible undesired combination of -ignore-body and the response options: fl,fs,fw,ml,ms and mw.\n") | |
202 | } | |
203 | return errs.ErrorOrNil() | |
204 | } |
9 | 9 | func TestNewLineFilter(t *testing.T) { |
10 | 10 | f, _ := NewLineFilter("200,301,400-410,500") |
11 | 11 | linesRepr := f.Repr() |
12 | if strings.Index(linesRepr, "200,301,400-410,500") == -1 { | |
12 | if !strings.Contains(linesRepr, "200,301,400-410,500") { | |
13 | 13 | t.Errorf("Word filter was expected to have 4 values") |
14 | 14 | } |
15 | 15 | } |
42 | 42 | for i := int64(0); i < test.input; i++ { |
43 | 43 | data = append(data, "A") |
44 | 44 | } |
45 | resp := ffuf.Response{Data: []byte(strings.Join(data, " "))} | |
45 | resp := ffuf.Response{Data: []byte(strings.Join(data, "\n"))} | |
46 | 46 | filterReturn, _ := f.Filter(&resp) |
47 | 47 | if filterReturn != test.output { |
48 | 48 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) |
40 | 40 | matchdata = append(matchdata, response.Data...) |
41 | 41 | pattern := f.valueRaw |
42 | 42 | for keyword, inputitem := range response.Request.Input { |
43 | pattern = strings.Replace(pattern, keyword, regexp.QuoteMeta(string(inputitem)), -1) | |
43 | pattern = strings.ReplaceAll(pattern, keyword, regexp.QuoteMeta(string(inputitem))) | |
44 | 44 | } |
45 | 45 | matched, err := regexp.Match(pattern, matchdata) |
46 | 46 | if err != nil { |
9 | 9 | func TestNewRegexpFilter(t *testing.T) { |
10 | 10 | f, _ := NewRegexpFilter("s([a-z]+)arch") |
11 | 11 | statusRepr := f.Repr() |
12 | if strings.Index(statusRepr, "s([a-z]+)arch") == -1 { | |
12 | if !strings.Contains(statusRepr, "s([a-z]+)arch") { | |
13 | 13 | t.Errorf("Status filter was expected to have a regexp value") |
14 | 14 | } |
15 | 15 | } |
34 | 34 | {"s1arch", false}, |
35 | 35 | {"invalid", false}, |
36 | 36 | } { |
37 | resp := ffuf.Response{Data: []byte(test.input)} | |
37 | inp := make(map[string][]byte) | |
38 | resp := ffuf.Response{ | |
39 | Data: []byte(test.input), | |
40 | Request: &ffuf.Request{ | |
41 | Input: inp, | |
42 | }, | |
43 | } | |
38 | 44 | filterReturn, _ := f.Filter(&resp) |
39 | 45 | if filterReturn != test.output { |
40 | 46 | t.Errorf("Filter test %d: Was expecing filter return value of %t but got %t", i, test.output, filterReturn) |
9 | 9 | func TestNewSizeFilter(t *testing.T) { |
10 | 10 | f, _ := NewSizeFilter("1,2,3,444,5-90") |
11 | 11 | sizeRepr := f.Repr() |
12 | if strings.Index(sizeRepr, "1,2,3,444,5-90") == -1 { | |
12 | if !strings.Contains(sizeRepr, "1,2,3,444,5-90") { | |
13 | 13 | t.Errorf("Size filter was expected to have 5 values") |
14 | 14 | } |
15 | 15 | } |
18 | 18 | var intranges []ffuf.ValueRange |
19 | 19 | for _, sv := range strings.Split(value, ",") { |
20 | 20 | if sv == "all" { |
21 | intranges = append(intranges, ffuf.ValueRange{AllStatuses, AllStatuses}) | |
21 | intranges = append(intranges, ffuf.ValueRange{Min: AllStatuses, Max: AllStatuses}) | |
22 | 22 | } else { |
23 | 23 | vr, err := ffuf.ValueRangeFromString(sv) |
24 | 24 | if err != nil { |
9 | 9 | func TestNewStatusFilter(t *testing.T) { |
10 | 10 | f, _ := NewStatusFilter("200,301,400-410,500") |
11 | 11 | statusRepr := f.Repr() |
12 | if strings.Index(statusRepr, "200,301,400-410,500") == -1 { | |
12 | if !strings.Contains(statusRepr, "200,301,400-410,500") { | |
13 | 13 | t.Errorf("Status filter was expected to have 4 values") |
14 | 14 | } |
15 | 15 | } |
9 | 9 | func TestNewWordFilter(t *testing.T) { |
10 | 10 | f, _ := NewWordFilter("200,301,400-410,500") |
11 | 11 | wordsRepr := f.Repr() |
12 | if strings.Index(wordsRepr, "200,301,400-410,500") == -1 { | |
12 | if !strings.Contains(wordsRepr, "200,301,400-410,500") { | |
13 | 13 | t.Errorf("Word filter was expected to have 4 values") |
14 | 14 | } |
15 | 15 | } |
13 | 13 | count int |
14 | 14 | keyword string |
15 | 15 | command string |
16 | shell string | |
16 | 17 | } |
17 | 18 | |
18 | 19 | func NewCommandInput(keyword string, value string, conf *ffuf.Config) (*CommandInput, error) { |
21 | 22 | cmd.config = conf |
22 | 23 | cmd.count = 0 |
23 | 24 | cmd.command = value |
25 | cmd.shell = SHELL_CMD | |
26 | ||
27 | if cmd.config.InputShell != "" { | |
28 | cmd.shell = cmd.config.InputShell | |
29 | } | |
30 | ||
24 | 31 | return &cmd, nil |
25 | 32 | } |
26 | 33 | |
46 | 53 | |
47 | 54 | //Next will increment the cursor position, and return a boolean telling if there's iterations left |
48 | 55 | func (c *CommandInput) Next() bool { |
49 | if c.count >= c.config.InputNum { | |
50 | return false | |
51 | } | |
52 | return true | |
56 | return c.count < c.config.InputNum | |
53 | 57 | } |
54 | 58 | |
55 | 59 | //Value returns the input from command stdoutput |
56 | 60 | func (c *CommandInput) Value() []byte { |
57 | 61 | var stdout bytes.Buffer |
58 | 62 | os.Setenv("FFUF_NUM", strconv.Itoa(c.count)) |
59 | cmd := exec.Command(SHELL_CMD, SHELL_ARG, c.command) | |
63 | cmd := exec.Command(c.shell, SHELL_ARG, c.command) | |
60 | 64 | cmd.Stdout = &stdout |
61 | 65 | err := cmd.Run() |
62 | 66 | if err != nil { |
12 | 12 | msbIterator int |
13 | 13 | } |
14 | 14 | |
15 | func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, error) { | |
15 | func NewInputProvider(conf *ffuf.Config) (ffuf.InputProvider, ffuf.Multierror) { | |
16 | 16 | validmode := false |
17 | errs := ffuf.NewMultierror() | |
17 | 18 | for _, mode := range []string{"clusterbomb", "pitchfork"} { |
18 | 19 | if conf.InputMode == mode { |
19 | 20 | validmode = true |
20 | 21 | } |
21 | 22 | } |
22 | 23 | if !validmode { |
23 | return &MainInputProvider{}, fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode) | |
24 | errs.Add(fmt.Errorf("Input mode (-mode) %s not recognized", conf.InputMode)) | |
25 | return &MainInputProvider{}, errs | |
24 | 26 | } |
25 | return &MainInputProvider{Config: conf, msbIterator: 0}, nil | |
27 | mainip := MainInputProvider{Config: conf, msbIterator: 0} | |
28 | // Initialize the correct inputprovider | |
29 | for _, v := range conf.InputProviders { | |
30 | err := mainip.AddProvider(v) | |
31 | if err != nil { | |
32 | errs.Add(err) | |
33 | } | |
34 | } | |
35 | return &mainip, errs | |
26 | 36 | } |
27 | 37 | |
28 | 38 | func (i *MainInputProvider) AddProvider(provider ffuf.InputProviderConfig) error { |
56 | 56 | |
57 | 57 | //Next will increment the cursor position, and return a boolean telling if there's words left in the list |
58 | 58 | func (w *WordlistInput) Next() bool { |
59 | if w.position >= len(w.data) { | |
60 | return false | |
61 | } | |
62 | return true | |
59 | return w.position < len(w.data) | |
63 | 60 | } |
64 | 61 | |
65 | 62 | //IncrementPosition will increment the current position in the inputprovider data slice |
11 | 11 | var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "resultfile"} |
12 | 12 | |
13 | 13 | func writeCSV(config *ffuf.Config, res []Result, encode bool) error { |
14 | ||
15 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
16 | return nil | |
17 | } | |
18 | ||
14 | 19 | header := make([]string, 0) |
15 | 20 | f, err := os.Create(config.OutputFile) |
16 | 21 | if err != nil { |
24 | 29 | for _, inputprovider := range config.InputProviders { |
25 | 30 | header = append(header, inputprovider.Keyword) |
26 | 31 | } |
27 | ||
28 | for _, item := range staticheaders { | |
29 | header = append(header, item) | |
30 | } | |
32 | header = append(header, staticheaders...) | |
31 | 33 | |
32 | 34 | if err := w.Write(header); err != nil { |
33 | 35 | return err |
34 | 36 | } |
35 | 37 | for _, r := range res { |
36 | 38 | if encode { |
37 | inputs := make(map[string][]byte, 0) | |
39 | inputs := make(map[string][]byte, len(r.Input)) | |
38 | 40 | for k, v := range r.Input { |
39 | 41 | inputs[k] = []byte(base64encode(v)) |
40 | 42 | } |
85 | 85 | <div style="display:none"> |
86 | 86 | |result_raw|{{ $result.StatusCode }}{{ range $keyword, $value := $result.Input }}|{{ $value | printf "%s" }}{{ end }}|{{ $result.Url }}|{{ $result.RedirectLocation }}|{{ $result.Position }}|{{ $result.ContentLength }}|{{ $result.ContentWords }}|{{ $result.ContentLines }}| |
87 | 87 | </div> |
88 | <tr class="result-{{ $result.StatusCode }}" style="background-color: {{$result.HTMLColor}};"><td><font color="black" class="status-code">{{ $result.StatusCode }}</font></td>{{ range $keyword, $value := $result.Input }}<td>{{ $value | printf "%s" }}</td>{{ end }}</td><td>{{ $result.Url }}</td><td>{{ $result.RedirectLocation }}</td><td>{{ $result.Position }}</td><td>{{ $result.ContentLength }}</td><td>{{ $result.ContentWords }}</td><td>{{ $result.ContentLines }}</td><td>{{ $result.ResultFile }}</td></tr> | |
89 | {{end}} | |
88 | <tr class="result-{{ $result.StatusCode }}" style="background-color: {{$result.HTMLColor}};"> | |
89 | <td><font color="black" class="status-code">{{ $result.StatusCode }}</font></td> | |
90 | {{ range $keyword, $value := $result.Input }} | |
91 | <td>{{ $value | printf "%s" }}</td> | |
92 | {{ end }} | |
93 | <td><a href="{{ $result.Url }}">{{ $result.Url }}</a></td> | |
94 | <td><a href="{{ $result.RedirectLocation }}">{{ $result.RedirectLocation }}</a></td> | |
95 | <td>{{ $result.Position }}</td> | |
96 | <td>{{ $result.ContentLength }}</td> | |
97 | <td>{{ $result.ContentWords }}</td> | |
98 | <td>{{ $result.ContentLines }}</td> | |
99 | <td>{{ $result.ResultFile }}</td> | |
100 | </tr> | |
101 | {{ end }} | |
90 | 102 | </tbody> |
91 | 103 | </table> |
92 | 104 | |
163 | 175 | |
164 | 176 | func writeHTML(config *ffuf.Config, results []Result) error { |
165 | 177 | |
178 | if(config.OutputCreateEmptyFile && (len(results) == 0)){ | |
179 | return nil | |
180 | } | |
181 | ||
166 | 182 | results = colorizeResults(results) |
167 | 183 | |
168 | 184 | ti := time.Now() |
187 | 203 | |
188 | 204 | templateName := "output.html" |
189 | 205 | t := template.New(templateName).Delims("{{", "}}") |
190 | t.Parse(htmlTemplate) | |
191 | t.Execute(f, outHTML) | |
192 | return nil | |
206 | _, err = t.Parse(htmlTemplate) | |
207 | if err != nil { | |
208 | return err | |
209 | } | |
210 | err = t.Execute(f, outHTML) | |
211 | return err | |
193 | 212 | } |
35 | 35 | } |
36 | 36 | |
37 | 37 | func writeEJSON(config *ffuf.Config, res []Result) error { |
38 | ||
39 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
40 | return nil | |
41 | } | |
42 | ||
38 | 43 | t := time.Now() |
39 | 44 | outJSON := ejsonFileOutput{ |
40 | 45 | CommandLine: config.CommandLine, |
21 | 21 | |
22 | 22 | func writeMarkdown(config *ffuf.Config, res []Result) error { |
23 | 23 | |
24 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
25 | return nil | |
26 | } | |
27 | ||
24 | 28 | ti := time.Now() |
25 | 29 | |
26 | 30 | keywords := make([]string, 0) |
43 | 47 | |
44 | 48 | templateName := "output.md" |
45 | 49 | t := template.New(templateName).Delims("{{", "}}") |
46 | t.Parse(markdownTemplate) | |
47 | t.Execute(f, outMD) | |
48 | return nil | |
50 | _, err = t.Parse(markdownTemplate) | |
51 | if err != nil { | |
52 | return err | |
53 | } | |
54 | err = t.Execute(f, outMD) | |
55 | return err | |
49 | 56 | } |
49 | 49 | return &outp |
50 | 50 | } |
51 | 51 | |
52 | func (s *Stdoutput) Banner() error { | |
53 | fmt.Printf("%s\n v%s\n%s\n\n", BANNER_HEADER, ffuf.VERSION, BANNER_SEP) | |
52 | func (s *Stdoutput) Banner() { | |
53 | fmt.Fprintf(os.Stderr, "%s\n v%s\n%s\n\n", BANNER_HEADER, ffuf.VERSION, BANNER_SEP) | |
54 | 54 | printOption([]byte("Method"), []byte(s.config.Method)) |
55 | 55 | printOption([]byte("URL"), []byte(s.config.Url)) |
56 | 56 | |
106 | 106 | |
107 | 107 | // Proxies |
108 | 108 | if len(s.config.ProxyURL) > 0 { |
109 | proxy := fmt.Sprintf("%s", s.config.ProxyURL) | |
110 | printOption([]byte("Proxy"), []byte(proxy)) | |
109 | printOption([]byte("Proxy"), []byte(s.config.ProxyURL)) | |
111 | 110 | } |
112 | 111 | if len(s.config.ReplayProxyURL) > 0 { |
113 | replayproxy := fmt.Sprintf("%s", s.config.ReplayProxyURL) | |
114 | printOption([]byte("ReplayProxy"), []byte(replayproxy)) | |
112 | printOption([]byte("ReplayProxy"), []byte(s.config.ReplayProxyURL)) | |
115 | 113 | } |
116 | 114 | |
117 | 115 | // Timeout |
141 | 139 | for _, f := range s.config.Filters { |
142 | 140 | printOption([]byte("Filter"), []byte(f.Repr())) |
143 | 141 | } |
144 | fmt.Printf("%s\n\n", BANNER_SEP) | |
145 | return nil | |
142 | fmt.Fprintf(os.Stderr, "%s\n\n", BANNER_SEP) | |
146 | 143 | } |
147 | 144 | |
148 | 145 | func (s *Stdoutput) Progress(status ffuf.Progress) { |
151 | 148 | return |
152 | 149 | } |
153 | 150 | |
154 | dur := time.Now().Sub(status.StartedAt) | |
151 | dur := time.Since(status.StartedAt) | |
155 | 152 | runningSecs := int(dur / time.Second) |
156 | var reqRate int | |
153 | var reqRate int64 | |
157 | 154 | if runningSecs > 0 { |
158 | reqRate = int(status.ReqCount / runningSecs) | |
155 | reqRate = status.ReqSec | |
159 | 156 | } else { |
160 | 157 | reqRate = 0 |
161 | 158 | } |
212 | 209 | // Go through each type of write, adding |
213 | 210 | // the suffix to each output file. |
214 | 211 | |
212 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
213 | return nil | |
214 | } | |
215 | ||
215 | 216 | s.config.OutputFile = BaseFilename + ".json" |
216 | 217 | err = writeJSON(s.config, s.Results) |
217 | 218 | if err != nil { |
218 | s.Error(fmt.Sprintf("%s", err)) | |
219 | s.Error(err.Error()) | |
219 | 220 | } |
220 | 221 | |
221 | 222 | s.config.OutputFile = BaseFilename + ".ejson" |
222 | 223 | err = writeEJSON(s.config, s.Results) |
223 | 224 | if err != nil { |
224 | s.Error(fmt.Sprintf("%s", err)) | |
225 | s.Error(err.Error()) | |
225 | 226 | } |
226 | 227 | |
227 | 228 | s.config.OutputFile = BaseFilename + ".html" |
228 | 229 | err = writeHTML(s.config, s.Results) |
229 | 230 | if err != nil { |
230 | s.Error(fmt.Sprintf("%s", err)) | |
231 | s.Error(err.Error()) | |
231 | 232 | } |
232 | 233 | |
233 | 234 | s.config.OutputFile = BaseFilename + ".md" |
234 | 235 | err = writeMarkdown(s.config, s.Results) |
235 | 236 | if err != nil { |
236 | s.Error(fmt.Sprintf("%s", err)) | |
237 | s.Error(err.Error()) | |
237 | 238 | } |
238 | 239 | |
239 | 240 | s.config.OutputFile = BaseFilename + ".csv" |
240 | 241 | err = writeCSV(s.config, s.Results, false) |
241 | 242 | if err != nil { |
242 | s.Error(fmt.Sprintf("%s", err)) | |
243 | s.Error(err.Error()) | |
243 | 244 | } |
244 | 245 | |
245 | 246 | s.config.OutputFile = BaseFilename + ".ecsv" |
246 | 247 | err = writeCSV(s.config, s.Results, true) |
247 | 248 | if err != nil { |
248 | s.Error(fmt.Sprintf("%s", err)) | |
249 | s.Error(err.Error()) | |
249 | 250 | } |
250 | 251 | |
251 | 252 | return nil |
271 | 272 | err = writeCSV(s.config, s.Results, true) |
272 | 273 | } |
273 | 274 | if err != nil { |
274 | s.Error(fmt.Sprintf("%s", err)) | |
275 | s.Error(err.Error()) | |
275 | 276 | } |
276 | 277 | } |
277 | 278 | fmt.Fprintf(os.Stderr, "\n") |
288 | 289 | // Check if we need the data later |
289 | 290 | if s.config.OutputFile != "" { |
290 | 291 | // No need to store results if we're not going to use them later |
291 | inputs := make(map[string][]byte, 0) | |
292 | inputs := make(map[string][]byte, len(resp.Request.Input)) | |
292 | 293 | for k, v := range resp.Request.Input { |
293 | 294 | inputs[k] = v |
294 | 295 | } |
315 | 316 | err := os.Mkdir(s.config.OutputDirectory, 0750) |
316 | 317 | if err != nil { |
317 | 318 | if !os.IsExist(err) { |
318 | s.Error(fmt.Sprintf("%s", err)) | |
319 | s.Error(err.Error()) | |
319 | 320 | return "" |
320 | 321 | } |
321 | 322 | } |
328 | 329 | filePath = path.Join(s.config.OutputDirectory, fileName) |
329 | 330 | err := ioutil.WriteFile(filePath, []byte(fileContent), 0640) |
330 | 331 | if err != nil { |
331 | s.Error(fmt.Sprintf("%s", err)) | |
332 | s.Error(err.Error()) | |
332 | 333 | } |
333 | 334 | return fileName |
334 | 335 | } |
403 | 404 | } |
404 | 405 | |
405 | 406 | func (s *Stdoutput) resultNormal(resp ffuf.Response) { |
406 | var res_str string | |
407 | res_str = fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(resp), s.colorize(fmt.Sprintf("%d", resp.StatusCode), resp.StatusCode), resp.ContentLength, resp.ContentWords, resp.ContentLines) | |
408 | fmt.Println(res_str) | |
407 | res := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(resp), s.colorize(fmt.Sprintf("%d", resp.StatusCode), resp.StatusCode), resp.ContentLength, resp.ContentWords, resp.ContentLines) | |
408 | fmt.Println(res) | |
409 | 409 | } |
410 | 410 | |
411 | 411 | func (s *Stdoutput) colorize(input string, status int64) string { |
412 | 412 | if !s.config.Colors { |
413 | return fmt.Sprintf("%s", input) | |
413 | return input | |
414 | 414 | } |
415 | 415 | colorCode := ANSI_CLEAR |
416 | 416 | if status >= 200 && status < 300 { |
429 | 429 | } |
430 | 430 | |
431 | 431 | func printOption(name []byte, value []byte) { |
432 | fmt.Printf(" :: %-16s : %s\n", name, value) | |
432 | fmt.Fprintf(os.Stderr, " :: %-16s : %s\n", name, value) | |
433 | 433 | } |
434 | 434 | |
435 | 435 | func inSlice(key string, slice []string) bool { |
4 | 4 | "crypto/tls" |
5 | 5 | "fmt" |
6 | 6 | "io/ioutil" |
7 | "net" | |
7 | 8 | "net/http" |
8 | 9 | "net/http/httputil" |
9 | 10 | "net/textproto" |
50 | 51 | MaxIdleConns: 1000, |
51 | 52 | MaxIdleConnsPerHost: 500, |
52 | 53 | MaxConnsPerHost: 500, |
54 | DialContext: (&net.Dialer{ | |
55 | Timeout: time.Duration(time.Duration(conf.Timeout) * time.Second), | |
56 | }).DialContext, | |
57 | TLSHandshakeTimeout: time.Duration(time.Duration(conf.Timeout) * time.Second), | |
53 | 58 | TLSClientConfig: &tls.Config{ |
54 | 59 | InsecureSkipVerify: true, |
55 | 60 | Renegotiation: tls.RenegotiateOnceAsClient, |
71 | 76 | req.Data = []byte(r.config.Data) |
72 | 77 | |
73 | 78 | for keyword, inputitem := range input { |
74 | req.Method = strings.Replace(req.Method, keyword, string(inputitem), -1) | |
75 | headers := make(map[string]string, 0) | |
79 | req.Method = strings.ReplaceAll(req.Method, keyword, string(inputitem)) | |
80 | headers := make(map[string]string, len(req.Headers)) | |
76 | 81 | for h, v := range req.Headers { |
77 | var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.Replace(h, keyword, string(inputitem), -1)) | |
78 | headers[CanonicalHeader] = strings.Replace(v, keyword, string(inputitem), -1) | |
82 | var CanonicalHeader string = textproto.CanonicalMIMEHeaderKey(strings.ReplaceAll(h, keyword, string(inputitem))) | |
83 | headers[CanonicalHeader] = strings.ReplaceAll(v, keyword, string(inputitem)) | |
79 | 84 | } |
80 | 85 | req.Headers = headers |
81 | req.Url = strings.Replace(req.Url, keyword, string(inputitem), -1) | |
82 | req.Data = []byte(strings.Replace(string(req.Data), keyword, string(inputitem), -1)) | |
86 | req.Url = strings.ReplaceAll(req.Url, keyword, string(inputitem)) | |
87 | req.Data = []byte(strings.ReplaceAll(string(req.Data), keyword, string(inputitem))) | |
83 | 88 | } |
84 | 89 | |
85 | 90 | req.Input = input |
91 | 96 | var err error |
92 | 97 | var rawreq []byte |
93 | 98 | data := bytes.NewReader(req.Data) |
94 | httpreq, err = http.NewRequest(req.Method, req.Url, data) | |
99 | httpreq, err = http.NewRequestWithContext(r.config.Context, req.Method, req.Url, data) | |
95 | 100 | if err != nil { |
96 | 101 | return ffuf.Response{}, err |
97 | 102 | } |