New upstream version 1.3.0
Sophie Brun
3 years ago
0 | github: [joohoi] |
0 | # For most projects, this workflow file will not need changing; you simply need | |
1 | # to commit it to your repository. | |
2 | # | |
3 | # You may wish to alter this file to override the set of languages analyzed, | |
4 | # or to provide custom queries or build logic. | |
5 | name: "CodeQL" | |
6 | ||
7 | on: | |
8 | push: | |
9 | branches: [master] | |
10 | pull_request: | |
11 | # The branches below must be a subset of the branches above | |
12 | branches: [master] | |
13 | schedule: | |
14 | - cron: '0 9 * * 3' | |
15 | ||
16 | jobs: | |
17 | analyze: | |
18 | name: Analyze | |
19 | runs-on: ubuntu-latest | |
20 | ||
21 | strategy: | |
22 | fail-fast: false | |
23 | matrix: | |
24 | # Override automatic language detection by changing the below list | |
25 | # Supported options are ['csharp', 'cpp', 'go', 'java', 'javascript', 'python'] | |
26 | language: ['go'] | |
27 | # Learn more... | |
28 | # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection | |
29 | ||
30 | steps: | |
31 | - name: Checkout repository | |
32 | uses: actions/checkout@v2 | |
33 | with: | |
34 | # We must fetch at least the immediate parents so that if this is | |
35 | # a pull request then we can checkout the head. | |
36 | fetch-depth: 2 | |
37 | ||
38 | # If this run was triggered by a pull request event, then checkout | |
39 | # the head of the pull request instead of the merge commit. | |
40 | - run: git checkout HEAD^2 | |
41 | if: ${{ github.event_name == 'pull_request' }} | |
42 | ||
43 | # Initializes the CodeQL tools for scanning. | |
44 | - name: Initialize CodeQL | |
45 | uses: github/codeql-action/init@v1 | |
46 | with: | |
47 | languages: ${{ matrix.language }} | |
48 | # If you wish to specify custom queries, you can do so here or in a config file. | |
49 | # By default, queries listed here will override any specified in a config file. | |
50 | # Prefix the list here with "+" to use these queries and those in the config file. | |
51 | # queries: ./path/to/local/query, your-org/your-repo/queries@main | |
52 | ||
53 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). | |
54 | # If this step fails, then you should remove it and run the build manually (see below) | |
55 | - name: Autobuild | |
56 | uses: github/codeql-action/autobuild@v1 | |
57 | ||
58 | # âšī¸ Command-line programs to run using the OS shell. | |
59 | # đ https://git.io/JvXDl | |
60 | ||
61 | # âī¸ If the Autobuild fails above, remove it and uncomment the following three lines | |
62 | # and modify them (or add more) to build your code if your project | |
63 | # uses a compiled language | |
64 | ||
65 | #- run: | | |
66 | # make bootstrap | |
67 | # make release | |
68 | ||
69 | - name: Perform CodeQL Analysis | |
70 | uses: github/codeql-action/analyze@v1 |
9 | 9 | gcflags: |
10 | 10 | - all=-trimpath={{.Env.GOPATH}} |
11 | 11 | ldflags: | |
12 | -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} -extldflags '-static' | |
12 | -s -w -X github.com/ffuf/ffuf/pkg/ffuf.VERSION_APPENDIX= -extldflags '-static' | |
13 | 13 | goos: |
14 | 14 | - linux |
15 | 15 | - windows |
1 | 1 | - master |
2 | 2 | - New |
3 | 3 | - Changed |
4 | ||
5 | - v1.3.0 | |
6 | - New | |
7 | - All output file formats now include the `Content-Type`. | |
8 | - New CLI flag `-recursion-strategy` that allows adding new queued recursion jobs for non-redirect responses. | |
9 | - Ability to enter interactive mode by pressing `ENTER` during the ffuf execution. The interactive mode allows | |
10 | user to change filters, manage recursion queue, save snapshot of matches to a file etc. | |
11 | - Changed | |
12 | - Fix a badchar in progress output | |
13 | ||
14 | - v1.2.1 | |
15 | - Changed | |
16 | - Fixed a build breaking bug in `input-shell` parameter | |
4 | 17 | |
5 | 18 | - v1.2.0 |
6 | 19 | - New |
22 | 22 | * [Kiblyn11](https://github.com/Kiblyn11) |
23 | 23 | * [lc](https://github.com/lc) |
24 | 24 | * [nnwakelam](https://twitter.com/nnwakelam) |
25 | * [noraj](https://pwn.by/noraj) | |
25 | 26 | * [oh6hay](https://github.com/oh6hay) |
26 | 27 | * [putsi](https://github.com/putsi) |
27 | 28 | * [SakiiR](https://github.com/SakiiR) |
28 | 29 | * [seblw](https://github.com/seblw) |
29 | 30 | * [Shaked](https://github.com/Shaked) |
30 | 31 | * [SolomonSklash](https://github.com/SolomonSklash) |
32 | * [l4yton](https://github.com/l4yton) |
9 | 9 | # ffuf - Fuzz Faster U Fool |
10 | 10 | |
11 | 11 | A fast web fuzzer written in Go. |
12 | ||
13 | - [Installation](https://github.com/ffuf/ffuf#installation) | |
14 | - [Example usage](https://github.com/ffuf/ffuf#example-usage) | |
15 | - [Content discovery](https://github.com/ffuf/ffuf#typical-directory-discovery) | |
16 | - [Vhost discovery](https://github.com/ffuf/ffuf#virtual-host-discovery-without-dns-records) | |
17 | - [Parameter fuzzing](https://github.com/ffuf/ffuf#get-parameter-fuzzing) | |
18 | - [POST data fuzzing](https://github.com/ffuf/ffuf#post-data-fuzzing) | |
19 | - [Using external mutator](https://github.com/ffuf/ffuf#using-external-mutator-to-produce-test-cases) | |
20 | - [Configuration files](https://github.com/ffuf/ffuf#configuration-files) | |
21 | - [Help](https://github.com/ffuf/ffuf#usage) | |
22 | - [Interactive mode](https://github.com/ffuf/ffuf#interactive-mode) | |
23 | - [Sponsorware?](https://github.com/ffuf/ffuf#sponsorware) | |
24 | ||
25 | ## Sponsors | |
26 | [![Offensive Security](_img/offsec-logo.png)](https://www.offensive-security.com/) | |
27 | ||
28 | ## Official Discord Channel | |
29 | ||
30 | ffuf has a channel at Porchetta Industries Discord server alongside of channels for many other tools. | |
31 | ||
32 | Come to hang out & to discuss about ffuf, it's usage and development! | |
33 | ||
34 | [![Porchetta Industries](https://discordapp.com/api/guilds/736724457258745996/widget.png?style=banner2)](https://discord.gg/VWcdZCUsQP) | |
35 | ||
36 | ||
37 | ||
12 | 38 | |
13 | 39 | ## Installation |
14 | 40 | |
134 | 160 | Fuzz Faster U Fool - v1.2.0-git |
135 | 161 | |
136 | 162 | HTTP OPTIONS: |
137 | -H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted. | |
138 | -X HTTP method to use (default: GET) | |
139 | -b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality. | |
140 | -d POST data | |
141 | -ignore-body Do not fetch the response content. (default: false) | |
142 | -r Follow redirects (default: false) | |
143 | -recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false) | |
144 | -recursion-depth Maximum recursion depth. (default: 0) | |
145 | -replay-proxy Replay matched requests using this proxy. | |
146 | -timeout HTTP request timeout in seconds. (default: 10) | |
147 | -u Target URL | |
148 | -x HTTP Proxy URL | |
163 | -H Header `"Name: Value"`, separated by colon. Multiple -H flags are accepted. | |
164 | -X HTTP method to use | |
165 | -b Cookie data `"NAME1=VALUE1; NAME2=VALUE2"` for copy as curl functionality. | |
166 | -d POST data | |
167 | -ignore-body Do not fetch the response content. (default: false) | |
168 | -r Follow redirects (default: false) | |
169 | -recursion Scan recursively. Only FUZZ keyword is supported, and URL (-u) has to end in it. (default: false) | |
170 | -recursion-depth Maximum recursion depth. (default: 0) | |
171 | -recursion-strategy Recursion strategy: "default" for a redirect based, and "greedy" to recurse on all matches (default: default) | |
172 | -replay-proxy Replay matched requests using this proxy. | |
173 | -timeout HTTP request timeout in seconds. (default: 10) | |
174 | -u Target URL | |
175 | -x Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080 | |
149 | 176 | |
150 | 177 | GENERAL OPTIONS: |
151 | 178 | -V Show version information. (default: false) |
215 | 242 | |
216 | 243 | ``` |
217 | 244 | |
245 | ### Interactive mode | |
246 | ||
247 | By pressing `ENTER` during ffuf execution, the process is paused and user is dropped to a shell-like interactive mode: | |
248 | ``` | |
249 | entering interactive mode | |
250 | type "help" for a list of commands, or ENTER to resume. | |
251 | > help | |
252 | ||
253 | available commands: | |
254 | fc [value] - (re)configure status code filter | |
255 | fl [value] - (re)configure line count filter | |
256 | fw [value] - (re)configure word count filter | |
257 | fs [value] - (re)configure size filter | |
258 | queueshow - show recursive job queue | |
259 | queuedel [number] - delete a recursion job in the queue | |
260 | queueskip - advance to the next queued recursion job | |
261 | restart - restart and resume the current ffuf job | |
262 | resume - resume current ffuf job (or: ENTER) | |
263 | show - show results | |
264 | savejson [filename] - save current matches to a file | |
265 | help - you are looking at it | |
266 | > | |
267 | ``` | |
268 | ||
269 | in this mode, filters can be reconfigured, queue managed and the current state saved to disk. | |
270 | ||
271 | When (re)configuring the filters, they get applied posthumously and all the false positive matches from memory that | |
272 | would have been filtered out by the newly added filters get deleted. | |
273 | ||
274 | The new state of matches can be printed out with a command `show` that will print out all the matches as like they | |
275 | would have been found by `ffuf`. | |
276 | ||
277 | As "negative" matches are not stored to memory, relaxing the filters cannot unfortunately bring back the lost matches. | |
278 | For this kind of scenario, the user is able to use the command `restart`, which resets the state and starts the current | |
279 | job from the beginning. | |
280 | ||
281 | ||
282 | ## Sponsorware | |
283 | ||
284 | `ffuf` employs a sponsorware model. This means that all new features developed by its author are initially exclusively | |
285 | available for their sponsors. 30 days after the exclusive release, all the new features will be released at the freely | |
286 | available open source repository at https://github.com/ffuf/ffuf . | |
287 | ||
288 | This model enables me to provide concrete benefits for the generous individuals and companies that enable me to work on | |
289 | `ffuf`. The different sponsorship tiers can be seen [here](https://github.com/sponsors/joohoi). | |
290 | ||
291 | All the community contributions are and will be available directly in the freely available open source repository. The | |
292 | exclusive version benefits only include new features created by [@joohoi](https://github.com/joohoi) | |
293 | ||
294 | ### Access the sponsorware through code contributions | |
295 | ||
296 | People that create significant contributions to the `ffuf` project itself should and will have access to the sponsorware | |
297 | as well. If you are planning to create such a contribution, please contact [@joohoi](https://github.com/joohoi) | |
298 | first to ensure that there aren't other people working on the same feature. | |
299 | ||
218 | 300 | ## Helper scripts and advanced payloads |
219 | 301 | |
220 | 302 | See [ffuf-scripts](https://github.com/ffuf/ffuf-scripts) repository for helper scripts and payload generators |
Binary diff not shown
14 | 14 | method = "GET" |
15 | 15 | proxyurl = "http://127.0.0.1:8080" |
16 | 16 | recursion = false |
17 | recursiondepth = 0 | |
17 | recursion_depth = 0 | |
18 | recursion_strategy = "default" | |
18 | 19 | replayproxyurl = "http://127.0.0.1:8080" |
19 | 20 | timeout = 10 |
20 | 21 | url = "https://example.org/FUZZ" |
53 | 53 | Description: "Options controlling the HTTP request and its parts.", |
54 | 54 | Flags: make([]UsageFlag, 0), |
55 | 55 | Hidden: false, |
56 | ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "replay-proxy", "timeout", "ignore-body", "x"}, | |
56 | ExpectedFlags: []string{"H", "X", "b", "d", "r", "u", "recursion", "recursion-depth", "recursion-strategy", "replay-proxy", "timeout", "ignore-body", "x"}, | |
57 | 57 | } |
58 | 58 | u_general := UsageSection{ |
59 | 59 | Name: "GENERAL OPTIONS", |
122 | 122 | } |
123 | 123 | }) |
124 | 124 | |
125 | fmt.Printf("Fuzz Faster U Fool - v%s\n\n", ffuf.VERSION) | |
125 | fmt.Printf("Fuzz Faster U Fool - v%s\n\n", ffuf.Version()) | |
126 | 126 | |
127 | 127 | // Print out the sections |
128 | 128 | for _, section := range sections { |
3 | 3 | "context" |
4 | 4 | "flag" |
5 | 5 | "fmt" |
6 | "github.com/ffuf/ffuf/pkg/ffuf" | |
7 | "github.com/ffuf/ffuf/pkg/filter" | |
8 | "github.com/ffuf/ffuf/pkg/input" | |
9 | "github.com/ffuf/ffuf/pkg/interactive" | |
10 | "github.com/ffuf/ffuf/pkg/output" | |
11 | "github.com/ffuf/ffuf/pkg/runner" | |
6 | 12 | "io/ioutil" |
7 | 13 | "log" |
8 | 14 | "os" |
9 | 15 | "strings" |
10 | ||
11 | "github.com/ffuf/ffuf/pkg/ffuf" | |
12 | "github.com/ffuf/ffuf/pkg/filter" | |
13 | "github.com/ffuf/ffuf/pkg/input" | |
14 | "github.com/ffuf/ffuf/pkg/output" | |
15 | "github.com/ffuf/ffuf/pkg/runner" | |
16 | 16 | ) |
17 | 17 | |
18 | 18 | type multiStringFlag []string |
90 | 90 | flag.StringVar(&opts.HTTP.Data, "data-ascii", opts.HTTP.Data, "POST data (alias of -d)") |
91 | 91 | flag.StringVar(&opts.HTTP.Data, "data-binary", opts.HTTP.Data, "POST data (alias of -d)") |
92 | 92 | flag.StringVar(&opts.HTTP.Method, "X", opts.HTTP.Method, "HTTP method to use") |
93 | flag.StringVar(&opts.HTTP.ProxyURL, "x", opts.HTTP.ProxyURL, "HTTP Proxy URL") | |
93 | flag.StringVar(&opts.HTTP.ProxyURL, "x", opts.HTTP.ProxyURL, "Proxy URL (SOCKS5 or HTTP). For example: http://127.0.0.1:8080 or socks5://127.0.0.1:8080") | |
94 | 94 | flag.StringVar(&opts.HTTP.ReplayProxyURL, "replay-proxy", opts.HTTP.ReplayProxyURL, "Replay matched requests using this proxy.") |
95 | flag.StringVar(&opts.HTTP.RecursionStrategy, "recursion-strategy", opts.HTTP.RecursionStrategy, "Recursion strategy: \"default\" for a redirect based, and \"greedy\" to recurse on all matches") | |
95 | 96 | flag.StringVar(&opts.HTTP.URL, "u", opts.HTTP.URL, "Target URL") |
96 | 97 | flag.StringVar(&opts.Input.Extensions, "e", opts.Input.Extensions, "Comma separated list of extensions. Extends FUZZ keyword.") |
97 | 98 | flag.StringVar(&opts.Input.InputMode, "mode", opts.Input.InputMode, "Multi-wordlist operation mode. Available modes: clusterbomb, pitchfork") |
135 | 136 | opts = ParseFlags(opts) |
136 | 137 | |
137 | 138 | if opts.General.ShowVersion { |
138 | fmt.Printf("ffuf version: %s\n", ffuf.VERSION) | |
139 | fmt.Printf("ffuf version: %s\n", ffuf.Version()) | |
139 | 140 | os.Exit(0) |
140 | 141 | } |
141 | 142 | if len(opts.Output.DebugLog) != 0 { |
196 | 197 | fmt.Fprintf(os.Stderr, "Error in autocalibration, exiting: %s\n", err) |
197 | 198 | os.Exit(1) |
198 | 199 | } |
200 | go func() { | |
201 | err := interactive.Handle(job) | |
202 | if err != nil { | |
203 | log.Printf("Error while trying to initialize interactive session: %s", err) | |
204 | } | |
205 | }() | |
199 | 206 | |
200 | 207 | // Job handles waiting for goroutines to complete itself |
201 | 208 | job.Start() |
32 | 32 | OutputDirectory string `json:"outputdirectory"` |
33 | 33 | OutputFile string `json:"outputfile"` |
34 | 34 | OutputFormat string `json:"outputformat"` |
35 | OutputCreateEmptyFile bool `json:"OutputCreateEmptyFile"` | |
35 | OutputCreateEmptyFile bool `json:"OutputCreateEmptyFile"` | |
36 | 36 | ProgressFrequency int `json:"-"` |
37 | 37 | ProxyURL string `json:"proxyurl"` |
38 | 38 | Quiet bool `json:"quiet"` |
39 | 39 | Rate int64 `json:"rate"` |
40 | 40 | Recursion bool `json:"recursion"` |
41 | 41 | RecursionDepth int `json:"recursion_depth"` |
42 | RecursionStrategy string `json:"recursion_strategy"` | |
42 | 43 | ReplayProxyURL string `json:"replayproxyurl"` |
43 | 44 | StopOn403 bool `json:"stop_403"` |
44 | 45 | StopOnAll bool `json:"stop_all"` |
83 | 84 | conf.Rate = 0 |
84 | 85 | conf.Recursion = false |
85 | 86 | conf.RecursionDepth = 0 |
87 | conf.RecursionStrategy = "default" | |
86 | 88 | conf.StopOn403 = false |
87 | 89 | conf.StopOnAll = false |
88 | 90 | conf.StopOnErrors = false |
3 | 3 | type FilterProvider interface { |
4 | 4 | Filter(response *Response) (bool, error) |
5 | 5 | Repr() string |
6 | ReprVerbose() string | |
6 | 7 | } |
7 | 8 | |
8 | 9 | //RunnerProvider is an interface for request executors |
39 | 40 | Progress(status Progress) |
40 | 41 | Info(infostring string) |
41 | 42 | Error(errstring string) |
43 | Raw(output string) | |
42 | 44 | Warning(warnstring string) |
43 | 45 | Result(resp Response) |
46 | PrintResult(res Result) | |
47 | SaveFile(filename, format string) error | |
48 | GetResults() []Result | |
49 | SetResults(results []Result) | |
50 | Reset() | |
44 | 51 | } |
52 | ||
53 | type Result struct { | |
54 | Input map[string][]byte `json:"input"` | |
55 | Position int `json:"position"` | |
56 | StatusCode int64 `json:"status"` | |
57 | ContentLength int64 `json:"length"` | |
58 | ContentWords int64 `json:"words"` | |
59 | ContentLines int64 `json:"lines"` | |
60 | ContentType string `json:"content-type"` | |
61 | RedirectLocation string `json:"redirectlocation"` | |
62 | Url string `json:"url"` | |
63 | ResultFile string `json:"resultfile"` | |
64 | Host string `json:"host"` | |
65 | HTMLColor string `json:"-"` | |
66 | } |
24 | 24 | Total int |
25 | 25 | Running bool |
26 | 26 | RunningJob bool |
27 | Paused bool | |
27 | 28 | Count403 int |
28 | 29 | Count429 int |
29 | 30 | Error string |
32 | 33 | startTimeJob time.Time |
33 | 34 | queuejobs []QueueJob |
34 | 35 | queuepos int |
36 | skipQueue bool | |
35 | 37 | currentDepth int |
38 | pauseWg sync.WaitGroup | |
36 | 39 | } |
37 | 40 | |
38 | 41 | type QueueJob struct { |
48 | 51 | j.SpuriousErrorCounter = 0 |
49 | 52 | j.Running = false |
50 | 53 | j.RunningJob = false |
54 | j.Paused = false | |
51 | 55 | j.queuepos = 0 |
52 | 56 | j.queuejobs = make([]QueueJob, 0) |
53 | 57 | j.currentDepth = 0 |
54 | 58 | j.Rate = NewRateThrottle(conf) |
59 | j.skipQueue = false | |
55 | 60 | return &j |
56 | 61 | } |
57 | 62 | |
84 | 89 | j.SpuriousErrorCounter = 0 |
85 | 90 | } |
86 | 91 | |
92 | //DeleteQueueItem deletes a recursion job from the queue by its index in the slice | |
93 | func (j *Job) DeleteQueueItem(index int) { | |
94 | index = j.queuepos + index - 1 | |
95 | j.queuejobs = append(j.queuejobs[:index], j.queuejobs[index+1:]...) | |
96 | } | |
97 | ||
98 | //QueuedJobs returns the slice of queued recursive jobs | |
99 | func (j *Job) QueuedJobs() []QueueJob { | |
100 | return j.queuejobs[j.queuepos-1:] | |
101 | } | |
102 | ||
87 | 103 | //Start the execution of the Job |
88 | 104 | func (j *Job) Start() { |
89 | 105 | if j.startTime.IsZero() { |
106 | 122 | j.interruptMonitor() |
107 | 123 | for j.jobsInQueue() { |
108 | 124 | j.prepareQueueJob() |
109 | ||
110 | if j.queuepos > 1 && !j.RunningJob { | |
111 | // Print info for queued recursive jobs | |
112 | j.Output.Info(fmt.Sprintf("Scanning: %s", j.Config.Url)) | |
113 | } | |
114 | j.Input.Reset() | |
115 | j.startTimeJob = time.Now() | |
125 | j.Reset() | |
116 | 126 | j.RunningJob = true |
117 | j.Counter = 0 | |
118 | 127 | j.startExecution() |
119 | 128 | } |
120 | 129 | |
122 | 131 | if err != nil { |
123 | 132 | j.Output.Error(err.Error()) |
124 | 133 | } |
134 | } | |
135 | ||
136 | // Reset resets the counters and wordlist position for a job | |
137 | func (j *Job) Reset() { | |
138 | j.Input.Reset() | |
139 | j.Counter = 0 | |
140 | j.skipQueue = false | |
141 | j.startTimeJob = time.Now() | |
142 | j.Output.Reset() | |
125 | 143 | } |
126 | 144 | |
127 | 145 | func (j *Job) jobsInQueue() bool { |
132 | 150 | j.Config.Url = j.queuejobs[j.queuepos].Url |
133 | 151 | j.currentDepth = j.queuejobs[j.queuepos].depth |
134 | 152 | j.queuepos += 1 |
153 | } | |
154 | ||
155 | //SkipQueue allows to skip the current job and advance to the next queued recursion job | |
156 | func (j *Job) SkipQueue() { | |
157 | j.skipQueue = true | |
135 | 158 | } |
136 | 159 | |
137 | 160 | func (j *Job) sleepIfNeeded() { |
152 | 175 | } |
153 | 176 | } |
154 | 177 | |
178 | // Pause pauses the job process | |
179 | func (j *Job) Pause() { | |
180 | if !j.Paused { | |
181 | j.Paused = true | |
182 | j.pauseWg.Add(1) | |
183 | j.Output.Info("------ PAUSING ------") | |
184 | } | |
185 | } | |
186 | ||
187 | // Resume resumes the job process | |
188 | func (j *Job) Resume() { | |
189 | if j.Paused { | |
190 | j.Paused = false | |
191 | j.Output.Info("------ RESUMING -----") | |
192 | j.pauseWg.Done() | |
193 | } | |
194 | } | |
195 | ||
155 | 196 | func (j *Job) startExecution() { |
156 | 197 | var wg sync.WaitGroup |
157 | 198 | wg.Add(1) |
158 | 199 | go j.runBackgroundTasks(&wg) |
200 | ||
201 | // Print the base URL when starting a new recursion queue job | |
202 | if j.queuepos > 1 { | |
203 | j.Output.Info(fmt.Sprintf("Starting queued job on target: %s", j.Config.Url)) | |
204 | } | |
205 | ||
159 | 206 | //Limiter blocks after reaching the buffer, ensuring limited concurrency |
160 | 207 | limiter := make(chan bool, j.Config.Threads) |
161 | 208 | |
162 | for j.Input.Next() { | |
209 | for j.Input.Next() && !j.skipQueue { | |
163 | 210 | // Check if we should stop the process |
164 | 211 | j.CheckStop() |
165 | 212 | |
167 | 214 | defer j.Output.Warning(j.Error) |
168 | 215 | break |
169 | 216 | } |
217 | j.pauseWg.Wait() | |
170 | 218 | limiter <- true |
171 | 219 | nextInput := j.Input.Value() |
172 | 220 | nextPosition := j.Input.Position() |
199 | 247 | go func() { |
200 | 248 | for range sigChan { |
201 | 249 | j.Error = "Caught keyboard interrupt (Ctrl-C)\n" |
250 | // resume if paused | |
251 | if j.Paused { | |
252 | j.pauseWg.Done() | |
253 | } | |
254 | // Stop the job | |
202 | 255 | j.Stop() |
203 | 256 | } |
204 | 257 | }() |
207 | 260 | func (j *Job) runBackgroundTasks(wg *sync.WaitGroup) { |
208 | 261 | defer wg.Done() |
209 | 262 | totalProgress := j.Input.Total() |
210 | for j.Counter <= totalProgress { | |
211 | ||
263 | for j.Counter <= totalProgress && !j.skipQueue { | |
264 | j.pauseWg.Wait() | |
212 | 265 | if !j.Running { |
213 | 266 | break |
214 | 267 | } |
314 | 367 | j.Output.Result(resp) |
315 | 368 | // Refresh the progress indicator as we printed something out |
316 | 369 | j.updateProgress() |
317 | } | |
318 | ||
319 | if j.Config.Recursion && len(resp.GetRedirectLocation(false)) > 0 { | |
320 | j.handleRecursionJob(resp) | |
321 | } | |
322 | } | |
323 | ||
324 | //handleRecursionJob adds a new recursion job to the job queue if a new directory is found | |
325 | func (j *Job) handleRecursionJob(resp Response) { | |
370 | if j.Config.Recursion && j.Config.RecursionStrategy == "greedy" { | |
371 | j.handleGreedyRecursionJob(resp) | |
372 | } | |
373 | } | |
374 | ||
375 | if j.Config.Recursion && j.Config.RecursionStrategy == "default" && len(resp.GetRedirectLocation(false)) > 0 { | |
376 | j.handleDefaultRecursionJob(resp) | |
377 | } | |
378 | } | |
379 | ||
380 | //handleGreedyRecursionJob adds a recursion job to the queue if the maximum depth has not been reached | |
381 | func (j *Job) handleGreedyRecursionJob(resp Response) { | |
382 | // Handle greedy recursion strategy. Match has been determined before calling handleRecursionJob | |
383 | if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { | |
384 | recUrl := resp.Request.Url + "/" + "FUZZ" | |
385 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1} | |
386 | j.queuejobs = append(j.queuejobs, newJob) | |
387 | j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) | |
388 | } else { | |
389 | j.Output.Warning(fmt.Sprintf("Maximum recursion depth reached. Ignoring: %s", resp.Request.Url)) | |
390 | } | |
391 | } | |
392 | ||
393 | //handleDefaultRecursionJob adds a new recursion job to the job queue if a new directory is found and maximum depth has | |
394 | //not been reached | |
395 | func (j *Job) handleDefaultRecursionJob(resp Response) { | |
396 | recUrl := resp.Request.Url + "/" + "FUZZ" | |
326 | 397 | if (resp.Request.Url + "/") != resp.GetRedirectLocation(true) { |
327 | 398 | // Not a directory, return early |
328 | 399 | return |
329 | 400 | } |
330 | 401 | if j.Config.RecursionDepth == 0 || j.currentDepth < j.Config.RecursionDepth { |
331 | 402 | // We have yet to reach the maximum recursion depth |
332 | recUrl := resp.Request.Url + "/" + "FUZZ" | |
333 | 403 | newJob := QueueJob{Url: recUrl, depth: j.currentDepth + 1} |
334 | 404 | j.queuejobs = append(j.queuejobs, newJob) |
335 | 405 | j.Output.Info(fmt.Sprintf("Adding a new job to the queue: %s", recUrl)) |
25 | 25 | } |
26 | 26 | |
27 | 27 | type HTTPOptions struct { |
28 | Cookies []string | |
29 | Data string | |
30 | FollowRedirects bool | |
31 | Headers []string | |
32 | IgnoreBody bool | |
33 | Method string | |
34 | ProxyURL string | |
35 | Recursion bool | |
36 | RecursionDepth int | |
37 | ReplayProxyURL string | |
38 | Timeout int | |
39 | URL string | |
28 | Cookies []string | |
29 | Data string | |
30 | FollowRedirects bool | |
31 | Headers []string | |
32 | IgnoreBody bool | |
33 | Method string | |
34 | ProxyURL string | |
35 | Recursion bool | |
36 | RecursionDepth int | |
37 | RecursionStrategy string | |
38 | ReplayProxyURL string | |
39 | Timeout int | |
40 | URL string | |
40 | 41 | } |
41 | 42 | |
42 | 43 | type GeneralOptions struct { |
71 | 72 | } |
72 | 73 | |
73 | 74 | type OutputOptions struct { |
74 | DebugLog string | |
75 | OutputDirectory string | |
76 | OutputFile string | |
77 | OutputFormat string | |
78 | OutputCreateEmptyFile bool | |
75 | DebugLog string | |
76 | OutputDirectory string | |
77 | OutputFile string | |
78 | OutputFormat string | |
79 | OutputCreateEmptyFile bool | |
79 | 80 | } |
80 | 81 | |
81 | 82 | type FilterOptions struct { |
122 | 123 | c.HTTP.ProxyURL = "" |
123 | 124 | c.HTTP.Recursion = false |
124 | 125 | c.HTTP.RecursionDepth = 0 |
126 | c.HTTP.RecursionStrategy = "default" | |
125 | 127 | c.HTTP.ReplayProxyURL = "" |
126 | 128 | c.HTTP.Timeout = 10 |
127 | 129 | c.HTTP.URL = "" |
386 | 388 | conf.FollowRedirects = parseOpts.HTTP.FollowRedirects |
387 | 389 | conf.Recursion = parseOpts.HTTP.Recursion |
388 | 390 | conf.RecursionDepth = parseOpts.HTTP.RecursionDepth |
391 | conf.RecursionStrategy = parseOpts.HTTP.RecursionStrategy | |
389 | 392 | conf.AutoCalibration = parseOpts.General.AutoCalibration |
390 | 393 | conf.Threads = parseOpts.General.Threads |
391 | 394 | conf.Timeout = parseOpts.HTTP.Timeout |
12 | 12 | ContentLength int64 |
13 | 13 | ContentWords int64 |
14 | 14 | ContentLines int64 |
15 | ContentType string | |
15 | 16 | Cancelled bool |
16 | 17 | Request *Request |
17 | 18 | Raw string |
49 | 50 | var resp Response |
50 | 51 | resp.Request = req |
51 | 52 | resp.StatusCode = int64(httpresp.StatusCode) |
53 | resp.ContentType = httpresp.Header.Get("Content-Type") | |
52 | 54 | resp.Headers = httpresp.Header |
53 | 55 | resp.Cancelled = false |
54 | 56 | resp.Raw = "" |
0 | 0 | package ffuf |
1 | 1 | |
2 | 2 | import ( |
3 | "fmt" | |
3 | 4 | "math/rand" |
4 | 5 | "os" |
5 | 6 | ) |
40 | 41 | |
41 | 42 | return !md.IsDir() |
42 | 43 | } |
44 | ||
45 | //Version returns the ffuf version string | |
46 | func Version() string { | |
47 | return fmt.Sprintf("%s%s", VERSION, VERSION_APPENDIX) | |
48 | } |
0 | package ffuf | |
1 | ||
2 | var ( | |
3 | //VERSION holds the current version number | |
4 | VERSION = "1.3.0" | |
5 | //VERSION_APPENDIX holds additional version definition | |
6 | VERSION_APPENDIX = "-exclusive-dev" | |
7 | ) |
29 | 29 | |
30 | 30 | //AddFilter adds a new filter to Config |
31 | 31 | func AddFilter(conf *ffuf.Config, name string, option string) error { |
32 | newf, err := NewFilterByName(name, option) | |
33 | if err == nil { | |
34 | // valid filter create or append | |
35 | if conf.Filters[name] == nil { | |
36 | conf.Filters[name] = newf | |
37 | } else { | |
38 | currentfilter := conf.Filters[name].Repr() | |
39 | newoption := strings.TrimSpace(strings.Split(currentfilter, ":")[1]) + "," + option | |
40 | newerf, err := NewFilterByName(name, newoption) | |
41 | if err == nil { | |
42 | conf.Filters[name] = newerf | |
43 | } | |
44 | } | |
45 | } | |
46 | return err | |
32 | newf, err := NewFilterByName(name, option) | |
33 | if err == nil { | |
34 | // valid filter create or append | |
35 | if conf.Filters[name] == nil { | |
36 | conf.Filters[name] = newf | |
37 | } else { | |
38 | newoption := conf.Filters[name].Repr() + "," + option | |
39 | newerf, err := NewFilterByName(name, newoption) | |
40 | if err == nil { | |
41 | conf.Filters[name] = newerf | |
42 | } | |
43 | } | |
44 | } | |
45 | return err | |
46 | } | |
47 | ||
48 | //RemoveFilter removes a filter of a given type | |
49 | func RemoveFilter(conf *ffuf.Config, name string) { | |
50 | delete(conf.Filters, name) | |
47 | 51 | } |
48 | 52 | |
49 | 53 | //AddMatcher adds a new matcher to Config |
59 | 59 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) |
60 | 60 | } |
61 | 61 | } |
62 | return fmt.Sprintf("Response lines: %s", strings.Join(strval, ",")) | |
62 | return strings.Join(strval, ",") | |
63 | 63 | } |
64 | ||
65 | func (f *LineFilter) ReprVerbose() string { | |
66 | return fmt.Sprintf("Response lines: %s", f.Repr()) | |
67 | } |
50 | 50 | } |
51 | 51 | |
52 | 52 | func (f *RegexpFilter) Repr() string { |
53 | return f.valueRaw | |
54 | } | |
55 | ||
56 | func (f *RegexpFilter) ReprVerbose() string { | |
53 | 57 | return fmt.Sprintf("Regexp: %s", f.valueRaw) |
54 | 58 | } |
59 | 59 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) |
60 | 60 | } |
61 | 61 | } |
62 | return fmt.Sprintf("Response size: %s", strings.Join(strval, ",")) | |
62 | return strings.Join(strval, ",") | |
63 | 63 | } |
64 | ||
65 | func (f *SizeFilter) ReprVerbose() string { | |
66 | return fmt.Sprintf("Response size: %s", f.Repr()) | |
67 | } |
74 | 74 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) |
75 | 75 | } |
76 | 76 | } |
77 | return fmt.Sprintf("Response status: %s", strings.Join(strval, ",")) | |
77 | return strings.Join(strval, ",") | |
78 | 78 | } |
79 | ||
80 | func (f *StatusFilter) ReprVerbose() string { | |
81 | return fmt.Sprintf("Response status: %s", f.Repr()) | |
82 | } |
59 | 59 | strval = append(strval, strconv.Itoa(int(iv.Min))+"-"+strconv.Itoa(int(iv.Max))) |
60 | 60 | } |
61 | 61 | } |
62 | return fmt.Sprintf("Response words: %s", strings.Join(strval, ",")) | |
62 | return strings.Join(strval, ",") | |
63 | 63 | } |
64 | ||
65 | func (f *WordFilter) ReprVerbose() string { | |
66 | return fmt.Sprintf("Response words: %s", f.Repr()) | |
67 | } |
0 | // +build !windows | |
1 | ||
2 | package interactive | |
3 | ||
4 | import "os" | |
5 | ||
6 | func termHandle() (*os.File, error) { | |
7 | return os.Open("/dev/tty") | |
8 | } |
0 | package interactive | |
1 | ||
2 | import ( | |
3 | "bufio" | |
4 | "fmt" | |
5 | "github.com/ffuf/ffuf/pkg/ffuf" | |
6 | "github.com/ffuf/ffuf/pkg/filter" | |
7 | "strconv" | |
8 | "strings" | |
9 | "time" | |
10 | ) | |
11 | ||
12 | type interactive struct { | |
13 | Job *ffuf.Job | |
14 | paused bool | |
15 | } | |
16 | ||
17 | func Handle(job *ffuf.Job) error { | |
18 | i := interactive{job, false} | |
19 | tty, err := termHandle() | |
20 | if err != nil { | |
21 | return err | |
22 | } | |
23 | defer tty.Close() | |
24 | inreader := bufio.NewScanner(tty) | |
25 | inreader.Split(bufio.ScanLines) | |
26 | for inreader.Scan() { | |
27 | i.handleInput(inreader.Bytes()) | |
28 | } | |
29 | return nil | |
30 | } | |
31 | ||
32 | func (i *interactive) handleInput(in []byte) { | |
33 | instr := string(in) | |
34 | args := strings.Split(strings.TrimSpace(instr), " ") | |
35 | if len(args) == 1 && args[0] == "" { | |
36 | // Enter pressed - toggle interactive state | |
37 | i.paused = !i.paused | |
38 | if i.paused { | |
39 | i.Job.Pause() | |
40 | time.Sleep(500 * time.Millisecond) | |
41 | i.printBanner() | |
42 | } else { | |
43 | i.Job.Resume() | |
44 | } | |
45 | } else { | |
46 | switch args[0] { | |
47 | case "?": | |
48 | i.printHelp() | |
49 | case "help": | |
50 | i.printHelp() | |
51 | case "resume": | |
52 | i.paused = false | |
53 | i.Job.Resume() | |
54 | case "restart": | |
55 | i.Job.Reset() | |
56 | i.paused = false | |
57 | i.Job.Output.Info("Restarting the current ffuf job!") | |
58 | i.Job.Resume() | |
59 | case "show": | |
60 | for _, r := range i.Job.Output.GetResults() { | |
61 | i.Job.Output.PrintResult(r) | |
62 | } | |
63 | case "savejson": | |
64 | if len(args) < 2 { | |
65 | i.Job.Output.Error("Please define the filename") | |
66 | } else if len(args) > 2 { | |
67 | i.Job.Output.Error("Too many arguments for \"savejson\"") | |
68 | } else { | |
69 | err := i.Job.Output.SaveFile(args[1], "json") | |
70 | if err != nil { | |
71 | i.Job.Output.Error(fmt.Sprintf("%s", err)) | |
72 | } else { | |
73 | i.Job.Output.Info("Output file successfully saved!") | |
74 | } | |
75 | } | |
76 | case "fc": | |
77 | if len(args) < 2 { | |
78 | i.Job.Output.Error("Please define a value for status code filter, or \"none\" for removing it") | |
79 | } else if len(args) > 2 { | |
80 | i.Job.Output.Error("Too many arguments for \"fc\"") | |
81 | } else { | |
82 | i.updateFilter("status", args[1]) | |
83 | i.Job.Output.Info("New status code filter value set") | |
84 | } | |
85 | case "fl": | |
86 | if len(args) < 2 { | |
87 | i.Job.Output.Error("Please define a value for line count filter, or \"none\" for removing it") | |
88 | } else if len(args) > 2 { | |
89 | i.Job.Output.Error("Too many arguments for \"fl\"") | |
90 | } else { | |
91 | i.updateFilter("line", args[1]) | |
92 | i.Job.Output.Info("New line count filter value set") | |
93 | } | |
94 | case "fw": | |
95 | if len(args) < 2 { | |
96 | i.Job.Output.Error("Please define a value for word count filter, or \"none\" for removing it") | |
97 | } else if len(args) > 2 { | |
98 | i.Job.Output.Error("Too many arguments for \"fw\"") | |
99 | } else { | |
100 | i.updateFilter("word", args[1]) | |
101 | i.Job.Output.Info("New word count filter value set") | |
102 | } | |
103 | case "fs": | |
104 | if len(args) < 2 { | |
105 | i.Job.Output.Error("Please define a value for response size filter, or \"none\" for removing it") | |
106 | } else if len(args) > 2 { | |
107 | i.Job.Output.Error("Too many arguments for \"fs\"") | |
108 | } else { | |
109 | i.updateFilter("size", args[1]) | |
110 | i.Job.Output.Info("New response size filter value set") | |
111 | } | |
112 | case "queueshow": | |
113 | i.printQueue() | |
114 | case "queuedel": | |
115 | if len(args) < 2 { | |
116 | i.Job.Output.Error("Please define the index of a queued job to remove. Use \"queueshow\" for listing of jobs.") | |
117 | } else if len(args) > 2 { | |
118 | i.Job.Output.Error("Too many arguments for \"queuedel\"") | |
119 | } else { | |
120 | i.deleteQueue(args[1]) | |
121 | } | |
122 | case "queueskip": | |
123 | i.Job.SkipQueue() | |
124 | i.Job.Output.Info("Skipping to the next queued job") | |
125 | default: | |
126 | if i.paused { | |
127 | i.Job.Output.Warning(fmt.Sprintf("Unknown command: \"%s\". Enter \"help\" for a list of available commands", args[0])) | |
128 | } else { | |
129 | i.Job.Output.Error("NOPE") | |
130 | } | |
131 | } | |
132 | } | |
133 | ||
134 | if i.paused { | |
135 | i.printPrompt() | |
136 | } | |
137 | } | |
138 | ||
139 | func (i *interactive) updateFilter(name, value string) { | |
140 | if value == "none" { | |
141 | filter.RemoveFilter(i.Job.Config, name) | |
142 | } else { | |
143 | newFc, err := filter.NewFilterByName(name, value) | |
144 | if err != nil { | |
145 | i.Job.Output.Error(fmt.Sprintf("Error while setting new filter value: %s", err)) | |
146 | return | |
147 | } else { | |
148 | i.Job.Config.Filters[name] = newFc | |
149 | } | |
150 | ||
151 | results := make([]ffuf.Result, 0) | |
152 | for _, res := range i.Job.Output.GetResults() { | |
153 | fakeResp := &ffuf.Response{ | |
154 | StatusCode: res.StatusCode, | |
155 | ContentLines: res.ContentLength, | |
156 | ContentWords: res.ContentWords, | |
157 | ContentLength: res.ContentLength, | |
158 | } | |
159 | filterOut, _ := newFc.Filter(fakeResp) | |
160 | if !filterOut { | |
161 | results = append(results, res) | |
162 | } | |
163 | } | |
164 | i.Job.Output.SetResults(results) | |
165 | } | |
166 | } | |
167 | ||
168 | func (i *interactive) printQueue() { | |
169 | if len(i.Job.QueuedJobs()) > 0 { | |
170 | i.Job.Output.Raw("Queued recursion jobs:\n") | |
171 | for index, job := range i.Job.QueuedJobs() { | |
172 | postfix := "" | |
173 | if index == 0 { | |
174 | postfix = " (active job)" | |
175 | } | |
176 | i.Job.Output.Raw(fmt.Sprintf(" [%d] : %s%s\n", index, job.Url, postfix)) | |
177 | } | |
178 | } else { | |
179 | i.Job.Output.Info("Recursion job queue is empty") | |
180 | } | |
181 | } | |
182 | ||
183 | func (i *interactive) deleteQueue(in string) { | |
184 | index, err := strconv.Atoi(in) | |
185 | if err != nil { | |
186 | i.Job.Output.Warning(fmt.Sprintf("Not a number: %s", in)) | |
187 | } else { | |
188 | if index < 0 || index > len(i.Job.QueuedJobs())-1 { | |
189 | i.Job.Output.Warning("No such queued job. Use \"queueshow\" to list the jobs in queue") | |
190 | } else if index == 0 { | |
191 | i.Job.Output.Warning("Cannot delete the currently running job. Use \"queueskip\" to advance to the next one") | |
192 | } else { | |
193 | i.Job.DeleteQueueItem(index) | |
194 | i.Job.Output.Info("Recursion job successfully deleted!") | |
195 | } | |
196 | } | |
197 | } | |
198 | func (i *interactive) printBanner() { | |
199 | i.Job.Output.Raw("entering interactive mode\ntype \"help\" for a list of commands, or ENTER to resume.\n") | |
200 | } | |
201 | ||
202 | func (i *interactive) printPrompt() { | |
203 | i.Job.Output.Raw("> ") | |
204 | } | |
205 | ||
206 | func (i *interactive) printHelp() { | |
207 | var fc, fl, fs, fw string | |
208 | for name, filter := range i.Job.Config.Filters { | |
209 | switch name { | |
210 | case "status": | |
211 | fc = "(active: " + filter.Repr() + ")" | |
212 | case "line": | |
213 | fl = "(active: " + filter.Repr() + ")" | |
214 | case "word": | |
215 | fw = "(active: " + filter.Repr() + ")" | |
216 | case "size": | |
217 | fs = "(active: " + filter.Repr() + ")" | |
218 | } | |
219 | } | |
220 | help := ` | |
221 | available commands: | |
222 | fc [value] - (re)configure status code filter %s | |
223 | fl [value] - (re)configure line count filter %s | |
224 | fw [value] - (re)configure word count filter %s | |
225 | fs [value] - (re)configure size filter %s | |
226 | queueshow - show recursive job queue | |
227 | queuedel [number] - delete a recursion job in the queue | |
228 | queueskip - advance to the next queued recursion job | |
229 | restart - restart and resume the current ffuf job | |
230 | resume - resume current ffuf job (or: ENTER) | |
231 | show - show results | |
232 | savejson [filename] - save current matches to a file | |
233 | help - you are looking at it | |
234 | ` | |
235 | i.Job.Output.Raw(fmt.Sprintf(help, fc, fl, fw, fs)) | |
236 | } |
0 | // +build windows | |
1 | ||
2 | package interactive | |
3 | ||
4 | import ( | |
5 | "os" | |
6 | "syscall" | |
7 | ) | |
8 | ||
9 | func termHandle() (*os.File, error) { | |
10 | var tty *os.File | |
11 | _, err := syscall.Open("CONIN$", syscall.O_RDWR, 0) | |
12 | if err != nil { | |
13 | return tty, err | |
14 | } | |
15 | tty, err = os.Open("CONIN$") | |
16 | if err != nil { | |
17 | return tty, err | |
18 | } | |
19 | return tty, nil | |
20 | } |
8 | 8 | "github.com/ffuf/ffuf/pkg/ffuf" |
9 | 9 | ) |
10 | 10 | |
11 | var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "resultfile"} | |
11 | var staticheaders = []string{"url", "redirectlocation", "position", "status_code", "content_length", "content_words", "content_lines", "content_type", "resultfile"} | |
12 | 12 | |
13 | func writeCSV(config *ffuf.Config, res []Result, encode bool) error { | |
14 | ||
15 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
13 | func writeCSV(filename string, config *ffuf.Config, res []ffuf.Result, encode bool) error { | |
14 | ||
15 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
16 | 16 | return nil |
17 | 17 | } |
18 | ||
18 | ||
19 | 19 | header := make([]string, 0) |
20 | f, err := os.Create(config.OutputFile) | |
20 | f, err := os.Create(filename) | |
21 | 21 | if err != nil { |
22 | 22 | return err |
23 | 23 | } |
55 | 55 | return base64.StdEncoding.EncodeToString(in) |
56 | 56 | } |
57 | 57 | |
58 | func toCSV(r Result) []string { | |
58 | func toCSV(r ffuf.Result) []string { | |
59 | 59 | res := make([]string, 0) |
60 | 60 | for _, v := range r.Input { |
61 | 61 | res = append(res, string(v)) |
67 | 67 | res = append(res, strconv.FormatInt(r.ContentLength, 10)) |
68 | 68 | res = append(res, strconv.FormatInt(r.ContentWords, 10)) |
69 | 69 | res = append(res, strconv.FormatInt(r.ContentLines, 10)) |
70 | res = append(res, r.ContentType) | |
70 | 71 | res = append(res, r.ResultFile) |
71 | 72 | return res |
72 | 73 | } |
11 | 11 | CommandLine string |
12 | 12 | Time string |
13 | 13 | Keys []string |
14 | Results []Result | |
14 | Results []ffuf.Result | |
15 | 15 | } |
16 | 16 | |
17 | 17 | const ( |
75 | 75 | <th>Position</th> |
76 | 76 | <th>Length</th> |
77 | 77 | <th>Words</th> |
78 | <th>Lines</th> | |
78 | <th>Lines</th> | |
79 | <th>Type</th> | |
79 | 80 | <th>Resultfile</th> |
80 | 81 | </tr> |
81 | 82 | </thead> |
83 | 84 | <tbody> |
84 | 85 | {{range $result := .Results}} |
85 | 86 | <div style="display:none"> |
86 | |result_raw|{{ $result.StatusCode }}{{ range $keyword, $value := $result.Input }}|{{ $value | printf "%s" }}{{ end }}|{{ $result.Url }}|{{ $result.RedirectLocation }}|{{ $result.Position }}|{{ $result.ContentLength }}|{{ $result.ContentWords }}|{{ $result.ContentLines }}| | |
87 | |result_raw|{{ $result.StatusCode }}{{ range $keyword, $value := $result.Input }}|{{ $value | printf "%s" }}{{ end }}|{{ $result.Url }}|{{ $result.RedirectLocation }}|{{ $result.Position }}|{{ $result.ContentLength }}|{{ $result.ContentWords }}|{{ $result.ContentLines }}|{{ $result.ContentType }}| | |
87 | 88 | </div> |
88 | 89 | <tr class="result-{{ $result.StatusCode }}" style="background-color: {{$result.HTMLColor}};"> |
89 | 90 | <td><font color="black" class="status-code">{{ $result.StatusCode }}</font></td> |
95 | 96 | <td>{{ $result.Position }}</td> |
96 | 97 | <td>{{ $result.ContentLength }}</td> |
97 | 98 | <td>{{ $result.ContentWords }}</td> |
98 | <td>{{ $result.ContentLines }}</td> | |
99 | <td>{{ $result.ContentLines }}</td> | |
100 | <td>{{ $result.ContentType }}</td> | |
99 | 101 | <td>{{ $result.ResultFile }}</td> |
100 | 102 | </tr> |
101 | 103 | {{ end }} |
142 | 144 | ) |
143 | 145 | |
144 | 146 | // colorizeResults returns a new slice with HTMLColor attribute |
145 | func colorizeResults(results []Result) []Result { | |
146 | newResults := make([]Result, 0) | |
147 | func colorizeResults(results []ffuf.Result) []ffuf.Result { | |
148 | newResults := make([]ffuf.Result, 0) | |
147 | 149 | |
148 | 150 | for _, r := range results { |
149 | 151 | result := r |
173 | 175 | return newResults |
174 | 176 | } |
175 | 177 | |
176 | func writeHTML(config *ffuf.Config, results []Result) error { | |
177 | ||
178 | if(config.OutputCreateEmptyFile && (len(results) == 0)){ | |
178 | func writeHTML(filename string, config *ffuf.Config, results []ffuf.Result) error { | |
179 | ||
180 | if config.OutputCreateEmptyFile && (len(results) == 0) { | |
179 | 181 | return nil |
180 | } | |
181 | ||
182 | } | |
183 | ||
182 | 184 | results = colorizeResults(results) |
183 | 185 | |
184 | 186 | ti := time.Now() |
195 | 197 | Keys: keywords, |
196 | 198 | } |
197 | 199 | |
198 | f, err := os.Create(config.OutputFile) | |
200 | f, err := os.Create(filename) | |
199 | 201 | if err != nil { |
200 | 202 | return err |
201 | 203 | } |
8 | 8 | ) |
9 | 9 | |
10 | 10 | type ejsonFileOutput struct { |
11 | CommandLine string `json:"commandline"` | |
12 | Time string `json:"time"` | |
13 | Results []Result `json:"results"` | |
14 | Config *ffuf.Config `json:"config"` | |
11 | CommandLine string `json:"commandline"` | |
12 | Time string `json:"time"` | |
13 | Results []ffuf.Result `json:"results"` | |
14 | Config *ffuf.Config `json:"config"` | |
15 | 15 | } |
16 | 16 | |
17 | 17 | type JsonResult struct { |
21 | 21 | ContentLength int64 `json:"length"` |
22 | 22 | ContentWords int64 `json:"words"` |
23 | 23 | ContentLines int64 `json:"lines"` |
24 | ContentType string `json:"content-type"` | |
24 | 25 | RedirectLocation string `json:"redirectlocation"` |
25 | 26 | ResultFile string `json:"resultfile"` |
26 | 27 | Url string `json:"url"` |
34 | 35 | Config *ffuf.Config `json:"config"` |
35 | 36 | } |
36 | 37 | |
37 | func writeEJSON(config *ffuf.Config, res []Result) error { | |
38 | ||
39 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
38 | func writeEJSON(filename string, config *ffuf.Config, res []ffuf.Result) error { | |
39 | ||
40 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
40 | 41 | return nil |
41 | 42 | } |
42 | ||
43 | ||
43 | 44 | t := time.Now() |
44 | 45 | outJSON := ejsonFileOutput{ |
45 | 46 | CommandLine: config.CommandLine, |
51 | 52 | if err != nil { |
52 | 53 | return err |
53 | 54 | } |
54 | err = ioutil.WriteFile(config.OutputFile, outBytes, 0644) | |
55 | err = ioutil.WriteFile(filename, outBytes, 0644) | |
55 | 56 | if err != nil { |
56 | 57 | return err |
57 | 58 | } |
58 | 59 | return nil |
59 | 60 | } |
60 | 61 | |
61 | func writeJSON(config *ffuf.Config, res []Result) error { | |
62 | func writeJSON(filename string, config *ffuf.Config, res []ffuf.Result) error { | |
62 | 63 | t := time.Now() |
63 | 64 | jsonRes := make([]JsonResult, 0) |
64 | 65 | for _, r := range res { |
73 | 74 | ContentLength: r.ContentLength, |
74 | 75 | ContentWords: r.ContentWords, |
75 | 76 | ContentLines: r.ContentLines, |
77 | ContentType: r.ContentType, | |
76 | 78 | RedirectLocation: r.RedirectLocation, |
77 | 79 | ResultFile: r.ResultFile, |
78 | 80 | Url: r.Url, |
89 | 91 | if err != nil { |
90 | 92 | return err |
91 | 93 | } |
92 | err = ioutil.WriteFile(config.OutputFile, outBytes, 0644) | |
94 | err = ioutil.WriteFile(filename, outBytes, 0644) | |
93 | 95 | if err != nil { |
94 | 96 | return err |
95 | 97 | } |
13 | 13 | Command line : ` + "`{{.CommandLine}}`" + ` |
14 | 14 | Time: ` + "{{ .Time }}" + ` |
15 | 15 | |
16 | {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | ResultFile | | |
17 | {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | | |
18 | {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ResultFile }} | | |
16 | {{ range .Keys }}| {{ . }} {{ end }}| URL | Redirectlocation | Position | Status Code | Content Length | Content Words | Content Lines | Content Type | ResultFile | | |
17 | {{ range .Keys }}| :- {{ end }}| :-- | :--------------- | :---- | :------- | :---------- | :------------- | :------------ | :--------- | :----------- | | |
18 | {{range .Results}}{{ range $keyword, $value := .Input }}| {{ $value | printf "%s" }} {{ end }}| {{ .Url }} | {{ .RedirectLocation }} | {{ .Position }} | {{ .StatusCode }} | {{ .ContentLength }} | {{ .ContentWords }} | {{ .ContentLines }} | {{ .ContentType }} | {{ .ResultFile }} | | |
19 | 19 | {{end}}` // The template format is not pretty but follows the markdown guide |
20 | 20 | ) |
21 | 21 | |
22 | func writeMarkdown(config *ffuf.Config, res []Result) error { | |
22 | func writeMarkdown(filename string, config *ffuf.Config, res []ffuf.Result) error { | |
23 | 23 | |
24 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
24 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
25 | 25 | return nil |
26 | 26 | } |
27 | 27 | |
39 | 39 | Keys: keywords, |
40 | 40 | } |
41 | 41 | |
42 | f, err := os.Create(config.OutputFile) | |
42 | f, err := os.Create(filename) | |
43 | 43 | if err != nil { |
44 | 44 | return err |
45 | 45 | } |
6 | 6 | "os" |
7 | 7 | "path" |
8 | 8 | "strconv" |
9 | "strings" | |
9 | 10 | "time" |
10 | 11 | |
11 | 12 | "github.com/ffuf/ffuf/pkg/ffuf" |
25 | 26 | |
26 | 27 | type Stdoutput struct { |
27 | 28 | config *ffuf.Config |
28 | Results []Result | |
29 | } | |
30 | ||
31 | type Result struct { | |
32 | Input map[string][]byte `json:"input"` | |
33 | Position int `json:"position"` | |
34 | StatusCode int64 `json:"status"` | |
35 | ContentLength int64 `json:"length"` | |
36 | ContentWords int64 `json:"words"` | |
37 | ContentLines int64 `json:"lines"` | |
38 | RedirectLocation string `json:"redirectlocation"` | |
39 | Url string `json:"url"` | |
40 | ResultFile string `json:"resultfile"` | |
41 | Host string `json:"host"` | |
42 | HTMLColor string `json:"-"` | |
29 | Results []ffuf.Result | |
43 | 30 | } |
44 | 31 | |
45 | 32 | func NewStdoutput(conf *ffuf.Config) *Stdoutput { |
46 | 33 | var outp Stdoutput |
47 | 34 | outp.config = conf |
48 | outp.Results = []Result{} | |
35 | outp.Results = []ffuf.Result{} | |
49 | 36 | return &outp |
50 | 37 | } |
51 | 38 | |
52 | 39 | func (s *Stdoutput) Banner() { |
53 | fmt.Fprintf(os.Stderr, "%s\n v%s\n%s\n\n", BANNER_HEADER, ffuf.VERSION, BANNER_SEP) | |
40 | version := strings.ReplaceAll(ffuf.Version(), "<3", fmt.Sprintf("%s<3%s", ANSI_RED, ANSI_CLEAR)) | |
41 | fmt.Fprintf(os.Stderr, "%s\n v%s\n%s\n\n", BANNER_HEADER, version, BANNER_SEP) | |
54 | 42 | printOption([]byte("Method"), []byte(s.config.Method)) |
55 | 43 | printOption([]byte("URL"), []byte(s.config.Url)) |
56 | 44 | |
133 | 121 | |
134 | 122 | // Print matchers |
135 | 123 | for _, f := range s.config.Matchers { |
136 | printOption([]byte("Matcher"), []byte(f.Repr())) | |
124 | printOption([]byte("Matcher"), []byte(f.ReprVerbose())) | |
137 | 125 | } |
138 | 126 | // Print filters |
139 | 127 | for _, f := range s.config.Filters { |
140 | printOption([]byte("Filter"), []byte(f.Repr())) | |
128 | printOption([]byte("Filter"), []byte(f.ReprVerbose())) | |
141 | 129 | } |
142 | 130 | fmt.Fprintf(os.Stderr, "%s\n\n", BANNER_SEP) |
131 | } | |
132 | ||
133 | // Reset resets the result slice | |
134 | func (s *Stdoutput) Reset() { | |
135 | s.Results = make([]ffuf.Result, 0) | |
136 | } | |
137 | ||
138 | // GetResults returns the result slice | |
139 | func (s *Stdoutput) GetResults() []ffuf.Result { | |
140 | return s.Results | |
141 | } | |
142 | ||
143 | // SetResults sets the result slice | |
144 | func (s *Stdoutput) SetResults(results []ffuf.Result) { | |
145 | s.Results = results | |
143 | 146 | } |
144 | 147 | |
145 | 148 | func (s *Stdoutput) Progress(status ffuf.Progress) { |
163 | 166 | dur -= mins * time.Minute |
164 | 167 | secs := dur / time.Second |
165 | 168 | |
166 | fmt.Fprintf(os.Stderr, "%s:: Progress: [%d/%d]Â :: Job [%d/%d] :: %d req/sec :: Duration: [%d:%02d:%02d] :: Errors: %d ::", TERMINAL_CLEAR_LINE, status.ReqCount, status.ReqTotal, status.QueuePos, status.QueueTotal, reqRate, hours, mins, secs, status.ErrorCount) | |
169 | fmt.Fprintf(os.Stderr, "%s:: Progress: [%d/%d] :: Job [%d/%d] :: %d req/sec :: Duration: [%d:%02d:%02d] :: Errors: %d ::", TERMINAL_CLEAR_LINE, status.ReqCount, status.ReqTotal, status.QueuePos, status.QueueTotal, reqRate, hours, mins, secs, status.ErrorCount) | |
167 | 170 | } |
168 | 171 | |
169 | 172 | func (s *Stdoutput) Info(infostring string) { |
171 | 174 | fmt.Fprintf(os.Stderr, "%s", infostring) |
172 | 175 | } else { |
173 | 176 | if !s.config.Colors { |
174 | fmt.Fprintf(os.Stderr, "%s[INFO] %s\n", TERMINAL_CLEAR_LINE, infostring) | |
175 | } else { | |
176 | fmt.Fprintf(os.Stderr, "%s[%sINFO%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_BLUE, ANSI_CLEAR, infostring) | |
177 | fmt.Fprintf(os.Stderr, "%s[INFO] %s\n\n", TERMINAL_CLEAR_LINE, infostring) | |
178 | } else { | |
179 | fmt.Fprintf(os.Stderr, "%s[%sINFO%s] %s\n\n", TERMINAL_CLEAR_LINE, ANSI_BLUE, ANSI_CLEAR, infostring) | |
177 | 180 | } |
178 | 181 | } |
179 | 182 | } |
195 | 198 | fmt.Fprintf(os.Stderr, "%s", warnstring) |
196 | 199 | } else { |
197 | 200 | if !s.config.Colors { |
198 | fmt.Fprintf(os.Stderr, "%s[WARN] %s", TERMINAL_CLEAR_LINE, warnstring) | |
201 | fmt.Fprintf(os.Stderr, "%s[WARN] %s\n", TERMINAL_CLEAR_LINE, warnstring) | |
199 | 202 | } else { |
200 | 203 | fmt.Fprintf(os.Stderr, "%s[%sWARN%s] %s\n", TERMINAL_CLEAR_LINE, ANSI_RED, ANSI_CLEAR, warnstring) |
201 | 204 | } |
202 | 205 | } |
203 | 206 | } |
204 | 207 | |
205 | func (s *Stdoutput) writeToAll(config *ffuf.Config, res []Result) error { | |
208 | func (s *Stdoutput) Raw(output string) { | |
209 | fmt.Fprintf(os.Stderr, "%s%s", TERMINAL_CLEAR_LINE, output) | |
210 | } | |
211 | ||
212 | func (s *Stdoutput) writeToAll(filename string, config *ffuf.Config, res []ffuf.Result) error { | |
206 | 213 | var err error |
207 | 214 | var BaseFilename string = s.config.OutputFile |
208 | 215 | |
209 | 216 | // Go through each type of write, adding |
210 | 217 | // the suffix to each output file. |
211 | 218 | |
212 | if(config.OutputCreateEmptyFile && (len(res) == 0)){ | |
219 | if config.OutputCreateEmptyFile && (len(res) == 0) { | |
213 | 220 | return nil |
214 | } | |
221 | } | |
215 | 222 | |
216 | 223 | s.config.OutputFile = BaseFilename + ".json" |
217 | err = writeJSON(s.config, s.Results) | |
224 | err = writeJSON(filename, s.config, s.Results) | |
218 | 225 | if err != nil { |
219 | 226 | s.Error(err.Error()) |
220 | 227 | } |
221 | 228 | |
222 | 229 | s.config.OutputFile = BaseFilename + ".ejson" |
223 | err = writeEJSON(s.config, s.Results) | |
230 | err = writeEJSON(filename, s.config, s.Results) | |
224 | 231 | if err != nil { |
225 | 232 | s.Error(err.Error()) |
226 | 233 | } |
227 | 234 | |
228 | 235 | s.config.OutputFile = BaseFilename + ".html" |
229 | err = writeHTML(s.config, s.Results) | |
236 | err = writeHTML(filename, s.config, s.Results) | |
230 | 237 | if err != nil { |
231 | 238 | s.Error(err.Error()) |
232 | 239 | } |
233 | 240 | |
234 | 241 | s.config.OutputFile = BaseFilename + ".md" |
235 | err = writeMarkdown(s.config, s.Results) | |
242 | err = writeMarkdown(filename, s.config, s.Results) | |
236 | 243 | if err != nil { |
237 | 244 | s.Error(err.Error()) |
238 | 245 | } |
239 | 246 | |
240 | 247 | s.config.OutputFile = BaseFilename + ".csv" |
241 | err = writeCSV(s.config, s.Results, false) | |
248 | err = writeCSV(filename, s.config, s.Results, false) | |
242 | 249 | if err != nil { |
243 | 250 | s.Error(err.Error()) |
244 | 251 | } |
245 | 252 | |
246 | 253 | s.config.OutputFile = BaseFilename + ".ecsv" |
247 | err = writeCSV(s.config, s.Results, true) | |
254 | err = writeCSV(filename, s.config, s.Results, true) | |
248 | 255 | if err != nil { |
249 | 256 | s.Error(err.Error()) |
250 | 257 | } |
253 | 260 | |
254 | 261 | } |
255 | 262 | |
263 | // SaveFile saves the current results to a file of a given type | |
264 | func (s *Stdoutput) SaveFile(filename, format string) error { | |
265 | var err error | |
266 | switch format { | |
267 | case "all": | |
268 | err = s.writeToAll(filename, s.config, s.Results) | |
269 | case "json": | |
270 | err = writeJSON(filename, s.config, s.Results) | |
271 | case "ejson": | |
272 | err = writeEJSON(filename, s.config, s.Results) | |
273 | case "html": | |
274 | err = writeHTML(filename, s.config, s.Results) | |
275 | case "md": | |
276 | err = writeMarkdown(filename, s.config, s.Results) | |
277 | case "csv": | |
278 | err = writeCSV(filename, s.config, s.Results, false) | |
279 | case "ecsv": | |
280 | err = writeCSV(filename, s.config, s.Results, true) | |
281 | } | |
282 | return err | |
283 | } | |
284 | ||
285 | // Finalize gets run after all the ffuf jobs are completed | |
256 | 286 | func (s *Stdoutput) Finalize() error { |
257 | 287 | var err error |
258 | 288 | if s.config.OutputFile != "" { |
259 | if s.config.OutputFormat == "all" { | |
260 | err = s.writeToAll(s.config, s.Results) | |
261 | } else if s.config.OutputFormat == "json" { | |
262 | err = writeJSON(s.config, s.Results) | |
263 | } else if s.config.OutputFormat == "ejson" { | |
264 | err = writeEJSON(s.config, s.Results) | |
265 | } else if s.config.OutputFormat == "html" { | |
266 | err = writeHTML(s.config, s.Results) | |
267 | } else if s.config.OutputFormat == "md" { | |
268 | err = writeMarkdown(s.config, s.Results) | |
269 | } else if s.config.OutputFormat == "csv" { | |
270 | err = writeCSV(s.config, s.Results, false) | |
271 | } else if s.config.OutputFormat == "ecsv" { | |
272 | err = writeCSV(s.config, s.Results, true) | |
273 | } | |
289 | err = s.SaveFile(s.config.OutputFile, s.config.OutputFormat) | |
274 | 290 | if err != nil { |
275 | 291 | s.Error(err.Error()) |
276 | 292 | } |
284 | 300 | if len(s.config.OutputDirectory) > 0 { |
285 | 301 | resp.ResultFile = s.writeResultToFile(resp) |
286 | 302 | } |
303 | ||
304 | inputs := make(map[string][]byte, len(resp.Request.Input)) | |
305 | for k, v := range resp.Request.Input { | |
306 | inputs[k] = v | |
307 | } | |
308 | sResult := ffuf.Result{ | |
309 | Input: inputs, | |
310 | Position: resp.Request.Position, | |
311 | StatusCode: resp.StatusCode, | |
312 | ContentLength: resp.ContentLength, | |
313 | ContentWords: resp.ContentWords, | |
314 | ContentLines: resp.ContentLines, | |
315 | ContentType: resp.ContentType, | |
316 | RedirectLocation: resp.GetRedirectLocation(false), | |
317 | Url: resp.Request.Url, | |
318 | ResultFile: resp.ResultFile, | |
319 | Host: resp.Request.Host, | |
320 | } | |
321 | s.Results = append(s.Results, sResult) | |
287 | 322 | // Output the result |
288 | s.printResult(resp) | |
289 | // Check if we need the data later | |
290 | if s.config.OutputFile != "" { | |
291 | // No need to store results if we're not going to use them later | |
292 | inputs := make(map[string][]byte, len(resp.Request.Input)) | |
293 | for k, v := range resp.Request.Input { | |
294 | inputs[k] = v | |
295 | } | |
296 | sResult := Result{ | |
297 | Input: inputs, | |
298 | Position: resp.Request.Position, | |
299 | StatusCode: resp.StatusCode, | |
300 | ContentLength: resp.ContentLength, | |
301 | ContentWords: resp.ContentWords, | |
302 | ContentLines: resp.ContentLines, | |
303 | RedirectLocation: resp.GetRedirectLocation(false), | |
304 | Url: resp.Request.Url, | |
305 | ResultFile: resp.ResultFile, | |
306 | Host: resp.Request.Host, | |
307 | } | |
308 | s.Results = append(s.Results, sResult) | |
309 | } | |
323 | s.PrintResult(sResult) | |
310 | 324 | } |
311 | 325 | |
312 | 326 | func (s *Stdoutput) writeResultToFile(resp ffuf.Response) string { |
334 | 348 | return fileName |
335 | 349 | } |
336 | 350 | |
337 | func (s *Stdoutput) printResult(resp ffuf.Response) { | |
351 | func (s *Stdoutput) PrintResult(res ffuf.Result) { | |
338 | 352 | if s.config.Quiet { |
339 | s.resultQuiet(resp) | |
340 | } else { | |
341 | if len(resp.Request.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 { | |
353 | s.resultQuiet(res) | |
354 | } else { | |
355 | if len(res.Input) > 1 || s.config.Verbose || len(s.config.OutputDirectory) > 0 { | |
342 | 356 | // Print a multi-line result (when using multiple input keywords and wordlists) |
343 | s.resultMultiline(resp) | |
344 | } else { | |
345 | s.resultNormal(resp) | |
346 | } | |
347 | } | |
348 | } | |
349 | ||
350 | func (s *Stdoutput) prepareInputsOneLine(resp ffuf.Response) string { | |
357 | s.resultMultiline(res) | |
358 | } else { | |
359 | s.resultNormal(res) | |
360 | } | |
361 | } | |
362 | } | |
363 | ||
364 | func (s *Stdoutput) prepareInputsOneLine(res ffuf.Result) string { | |
351 | 365 | inputs := "" |
352 | if len(resp.Request.Input) > 1 { | |
353 | for k, v := range resp.Request.Input { | |
366 | if len(res.Input) > 1 { | |
367 | for k, v := range res.Input { | |
354 | 368 | if inSlice(k, s.config.CommandKeywords) { |
355 | 369 | // If we're using external command for input, display the position instead of input |
356 | inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(resp.Request.Position)) | |
370 | inputs = fmt.Sprintf("%s%s : %s ", inputs, k, strconv.Itoa(res.Position)) | |
357 | 371 | } else { |
358 | 372 | inputs = fmt.Sprintf("%s%s : %s ", inputs, k, v) |
359 | 373 | } |
360 | 374 | } |
361 | 375 | } else { |
362 | for k, v := range resp.Request.Input { | |
376 | for k, v := range res.Input { | |
363 | 377 | if inSlice(k, s.config.CommandKeywords) { |
364 | 378 | // If we're using external command for input, display the position instead of input |
365 | inputs = strconv.Itoa(resp.Request.Position) | |
379 | inputs = strconv.Itoa(res.Position) | |
366 | 380 | } else { |
367 | 381 | inputs = string(v) |
368 | 382 | } |
371 | 385 | return inputs |
372 | 386 | } |
373 | 387 | |
374 | func (s *Stdoutput) resultQuiet(resp ffuf.Response) { | |
375 | fmt.Println(s.prepareInputsOneLine(resp)) | |
376 | } | |
377 | ||
378 | func (s *Stdoutput) resultMultiline(resp ffuf.Response) { | |
388 | func (s *Stdoutput) resultQuiet(res ffuf.Result) { | |
389 | fmt.Println(s.prepareInputsOneLine(res)) | |
390 | } | |
391 | ||
392 | func (s *Stdoutput) resultMultiline(res ffuf.Result) { | |
379 | 393 | var res_hdr, res_str string |
380 | 394 | res_str = "%s%s * %s: %s\n" |
381 | res_hdr = fmt.Sprintf("%s[Status: %d, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, resp.StatusCode, resp.ContentLength, resp.ContentWords, resp.ContentLines) | |
382 | res_hdr = s.colorize(res_hdr, resp.StatusCode) | |
395 | res_hdr = fmt.Sprintf("%s[Status: %d, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, res.StatusCode, res.ContentLength, res.ContentWords, res.ContentLines) | |
396 | res_hdr = s.colorize(res_hdr, res.StatusCode) | |
383 | 397 | reslines := "" |
384 | 398 | if s.config.Verbose { |
385 | reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, resp.Request.Url) | |
386 | redirectLocation := resp.GetRedirectLocation(false) | |
399 | reslines = fmt.Sprintf("%s%s| URL | %s\n", reslines, TERMINAL_CLEAR_LINE, res.Url) | |
400 | redirectLocation := res.RedirectLocation | |
387 | 401 | if redirectLocation != "" { |
388 | 402 | reslines = fmt.Sprintf("%s%s| --> | %s\n", reslines, TERMINAL_CLEAR_LINE, redirectLocation) |
389 | 403 | } |
390 | 404 | } |
391 | if resp.ResultFile != "" { | |
392 | reslines = fmt.Sprintf("%s%s| RES |Â %s\n", reslines, TERMINAL_CLEAR_LINE, resp.ResultFile) | |
393 | } | |
394 | for k, v := range resp.Request.Input { | |
405 | if res.ResultFile != "" { | |
406 | reslines = fmt.Sprintf("%s%s| RES | %s\n", reslines, TERMINAL_CLEAR_LINE, res.ResultFile) | |
407 | } | |
408 | for k, v := range res.Input { | |
395 | 409 | if inSlice(k, s.config.CommandKeywords) { |
396 | 410 | // If we're using external command for input, display the position instead of input |
397 | reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(resp.Request.Position)) | |
411 | reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, strconv.Itoa(res.Position)) | |
398 | 412 | } else { |
399 | 413 | // Wordlist input |
400 | 414 | reslines = fmt.Sprintf(res_str, reslines, TERMINAL_CLEAR_LINE, k, v) |
403 | 417 | fmt.Printf("%s\n%s\n", res_hdr, reslines) |
404 | 418 | } |
405 | 419 | |
406 | func (s *Stdoutput) resultNormal(resp ffuf.Response) { | |
407 | res := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(resp), s.colorize(fmt.Sprintf("%d", resp.StatusCode), resp.StatusCode), resp.ContentLength, resp.ContentWords, resp.ContentLines) | |
408 | fmt.Println(res) | |
420 | func (s *Stdoutput) resultNormal(res ffuf.Result) { | |
421 | resnormal := fmt.Sprintf("%s%-23s [Status: %s, Size: %d, Words: %d, Lines: %d]", TERMINAL_CLEAR_LINE, s.prepareInputsOneLine(res), s.colorize(fmt.Sprintf("%d", res.StatusCode), res.StatusCode), res.ContentLength, res.ContentWords, res.ContentLines) | |
422 | fmt.Println(resnormal) | |
409 | 423 | } |
410 | 424 | |
411 | 425 | func (s *Stdoutput) colorize(input string, status int64) string { |
103 | 103 | |
104 | 104 | // set default User-Agent header if not present |
105 | 105 | if _, ok := req.Headers["User-Agent"]; !ok { |
106 | req.Headers["User-Agent"] = fmt.Sprintf("%s v%s", "Fuzz Faster U Fool", ffuf.VERSION) | |
106 | req.Headers["User-Agent"] = fmt.Sprintf("%s v%s", "Fuzz Faster U Fool", ffuf.Version()) | |
107 | 107 | } |
108 | 108 | |
109 | 109 | // Handle Go http.Request special cases |