New upstream version 2.5.0
Sophie Brun
2 years ago
287 | 287 | "name": "Daniel Saxton", |
288 | 288 | "avatar_url": "https://avatars.githubusercontent.com/u/2658661?v=4", |
289 | 289 | "profile": "https://github.com/dsaxton", |
290 | "contributions": [ | |
291 | "ideas", | |
292 | "code" | |
293 | ] | |
294 | }, | |
295 | { | |
296 | "login": "narkopolo", | |
297 | "name": "narkopolo", | |
298 | "avatar_url": "https://avatars.githubusercontent.com/u/16690056?v=4", | |
299 | "profile": "https://github.com/narkopolo", | |
300 | "contributions": [ | |
301 | "ideas" | |
302 | ] | |
303 | }, | |
304 | { | |
305 | "login": "justinsteven", | |
306 | "name": "Justin Steven", | |
307 | "avatar_url": "https://avatars.githubusercontent.com/u/1893909?v=4", | |
308 | "profile": "https://ring0.lol", | |
309 | "contributions": [ | |
310 | "ideas" | |
311 | ] | |
312 | }, | |
313 | { | |
314 | "login": "7047payloads", | |
315 | "name": "7047payloads", | |
316 | "avatar_url": "https://avatars.githubusercontent.com/u/95562424?v=4", | |
317 | "profile": "https://github.com/7047payloads", | |
318 | "contributions": [ | |
319 | "code" | |
320 | ] | |
321 | }, | |
322 | { | |
323 | "login": "unkn0wnsyst3m", | |
324 | "name": "unkn0wnsyst3m", | |
325 | "avatar_url": "https://avatars.githubusercontent.com/u/21272239?v=4", | |
326 | "profile": "https://github.com/unkn0wnsyst3m", | |
327 | "contributions": [ | |
328 | "ideas" | |
329 | ] | |
330 | }, | |
331 | { | |
332 | "login": "its0x08", | |
333 | "name": "0x08", | |
334 | "avatar_url": "https://avatars.githubusercontent.com/u/15280042?v=4", | |
335 | "profile": "https://ironwort.me/", | |
336 | "contributions": [ | |
337 | "ideas" | |
338 | ] | |
339 | }, | |
340 | { | |
341 | "login": "MD-Levitan", | |
342 | "name": "kusok", | |
343 | "avatar_url": "https://avatars.githubusercontent.com/u/12116508?v=4", | |
344 | "profile": "https://github.com/MD-Levitan", | |
345 | "contributions": [ | |
346 | "ideas", | |
347 | "code" | |
348 | ] | |
349 | }, | |
350 | { | |
351 | "login": "godylockz", | |
352 | "name": "godylockz", | |
353 | "avatar_url": "https://avatars.githubusercontent.com/u/81207744?v=4", | |
354 | "profile": "https://github.com/godylockz", | |
290 | 355 | "contributions": [ |
291 | 356 | "ideas", |
292 | 357 | "code" |
15 | 15 | |
16 | 16 | ## Documentation |
17 | 17 | - [ ] New code is documented using [doc comments](https://doc.rust-lang.org/stable/rust-by-example/meta/doc.html) |
18 | - [ ] Documentation about your PR is included in the README, as needed | |
18 | - [ ] Documentation about your PR is included in the `docs`, as needed. The docs live in a [separate repository](https://epi052.github.io/feroxbuster-docs/docs/). Update the appropriate pages at the links below. | |
19 | - [ ] update [example config file section](https://epi052.github.io/feroxbuster-docs/docs/configuration/ferox-config-toml/) | |
20 | - [ ] update [help output section](https://epi052.github.io/feroxbuster-docs/docs/configuration/command-line/) | |
21 | - [ ] add an [example](https://epi052.github.io/feroxbuster-docs/docs/examples/) | |
22 | - [ ] update [comparison table](https://epi052.github.io/feroxbuster-docs/docs/compare/) | |
19 | 23 | |
20 | 24 | ## Additional Tests |
21 | 25 | - [ ] New code is unit tested |
7 | 7 | |
8 | 8 | # jetbrains metadata folder |
9 | 9 | .idea/ |
10 | ||
11 | # vscode metadata folder | |
12 | .vscode/ | |
10 | 13 | |
11 | 14 | # personal feroxbuster config for testing |
12 | 15 | ferox-config.toml |
165 | 165 | |
166 | 166 | feroxbuster uses the [`clippy`](https://rust-lang.github.io/rust-clippy/) code linter. |
167 | 167 | |
168 | The command that will ultimately be used in the CI pipeline for linting is `cargo clippy --all-targets --all-features -- -D warnings -A clippy::unnecessary_unwrap`. | |
168 | The command that will ultimately be used in the CI pipeline for linting is `cargo clippy --all-targets --all-features -- -D warnings -A clippy::mutex-atomic`. | |
169 | 169 | |
170 | 170 | Before submitting a Pull Request, the above command should be run. Please do not ignore any linting errors in code you write or modify, as they are meant to **help** by ensuring a clean and simple code base. |
171 | 171 |
11 | 11 | ] |
12 | 12 | |
13 | 13 | [[package]] |
14 | name = "ansi_term" | |
15 | version = "0.12.1" | |
16 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
17 | checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" | |
18 | dependencies = [ | |
19 | "winapi", | |
20 | ] | |
21 | ||
22 | [[package]] | |
23 | 14 | name = "anyhow" |
24 | version = "1.0.51" | |
25 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
26 | checksum = "8b26702f315f53b6071259e15dd9d64528213b44d61de1ec926eca7715d62203" | |
15 | version = "1.0.52" | |
16 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
17 | checksum = "84450d0b4a8bd1ba4144ce8ce718fbc5d071358b1e5384bace6536b3d1f2d5b3" | |
27 | 18 | |
28 | 19 | [[package]] |
29 | 20 | name = "ascii-canvas" |
46 | 37 | |
47 | 38 | [[package]] |
48 | 39 | name = "assert_cmd" |
49 | version = "2.0.2" | |
50 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
51 | checksum = "e996dc7940838b7ef1096b882e29ec30a3149a3a443cdc8dba19ed382eca1fe2" | |
40 | version = "2.0.4" | |
41 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
42 | checksum = "93ae1ddd39efd67689deb1979d80bad3bf7f2b09c6e6117c8d1f2443b5e2f83e" | |
52 | 43 | dependencies = [ |
53 | 44 | "bstr", |
54 | 45 | "doc-comment", |
295 | 286 | |
296 | 287 | [[package]] |
297 | 288 | name = "bumpalo" |
298 | version = "3.8.0" | |
299 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
300 | checksum = "8f1e260c3a9040a7c19a12468758f4c16f31a81a1fe087482be9570ec864bb6c" | |
289 | version = "3.9.1" | |
290 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
291 | checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" | |
292 | ||
293 | [[package]] | |
294 | name = "byteorder" | |
295 | version = "1.4.3" | |
296 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
297 | checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" | |
301 | 298 | |
302 | 299 | [[package]] |
303 | 300 | name = "bytes" |
331 | 328 | |
332 | 329 | [[package]] |
333 | 330 | name = "clap" |
334 | version = "2.34.0" | |
335 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
336 | checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" | |
337 | dependencies = [ | |
338 | "ansi_term", | |
331 | version = "3.0.7" | |
332 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
333 | checksum = "12e8611f9ae4e068fa3e56931fded356ff745e70987ff76924a6e0ab1c8ef2e3" | |
334 | dependencies = [ | |
339 | 335 | "atty", |
340 | 336 | "bitflags", |
337 | "indexmap", | |
338 | "lazy_static", | |
339 | "os_str_bytes", | |
341 | 340 | "strsim", |
341 | "termcolor", | |
342 | "terminal_size", | |
342 | 343 | "textwrap", |
343 | "unicode-width", | |
344 | "vec_map", | |
344 | ] | |
345 | ||
346 | [[package]] | |
347 | name = "clap_complete" | |
348 | version = "3.0.4" | |
349 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
350 | checksum = "d044e9db8cd0f68191becdeb5246b7462e4cf0c069b19ae00d1bf3fa9889498d" | |
351 | dependencies = [ | |
352 | "clap", | |
345 | 353 | ] |
346 | 354 | |
347 | 355 | [[package]] |
369 | 377 | ] |
370 | 378 | |
371 | 379 | [[package]] |
380 | name = "convert_case" | |
381 | version = "0.4.0" | |
382 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
383 | checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" | |
384 | ||
385 | [[package]] | |
372 | 386 | name = "core-foundation" |
373 | 387 | version = "0.9.2" |
374 | 388 | source = "registry+https://github.com/rust-lang/crates.io-index" |
386 | 400 | |
387 | 401 | [[package]] |
388 | 402 | name = "crossbeam-utils" |
389 | version = "0.8.5" | |
390 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
391 | checksum = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db" | |
403 | version = "0.8.6" | |
404 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
405 | checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120" | |
392 | 406 | dependencies = [ |
393 | 407 | "cfg-if", |
394 | 408 | "lazy_static", |
426 | 440 | checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" |
427 | 441 | |
428 | 442 | [[package]] |
443 | name = "cssparser" | |
444 | version = "0.27.2" | |
445 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
446 | checksum = "754b69d351cdc2d8ee09ae203db831e005560fc6030da058f86ad60c92a9cb0a" | |
447 | dependencies = [ | |
448 | "cssparser-macros", | |
449 | "dtoa-short", | |
450 | "itoa 0.4.8", | |
451 | "matches", | |
452 | "phf", | |
453 | "proc-macro2", | |
454 | "quote", | |
455 | "smallvec", | |
456 | "syn", | |
457 | ] | |
458 | ||
459 | [[package]] | |
460 | name = "cssparser-macros" | |
461 | version = "0.6.0" | |
462 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
463 | checksum = "dfae75de57f2b2e85e8768c3ea840fd159c8f33e2b6522c7835b7abac81be16e" | |
464 | dependencies = [ | |
465 | "quote", | |
466 | "syn", | |
467 | ] | |
468 | ||
469 | [[package]] | |
429 | 470 | name = "ctor" |
430 | 471 | version = "0.1.21" |
431 | 472 | source = "registry+https://github.com/rust-lang/crates.io-index" |
447 | 488 | |
448 | 489 | [[package]] |
449 | 490 | name = "curl" |
450 | version = "0.4.41" | |
451 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
452 | checksum = "1bc6d233563261f8db6ffb83bbaad5a73837a6e6b28868e926337ebbdece0be3" | |
491 | version = "0.4.42" | |
492 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
493 | checksum = "7de97b894edd5b5bcceef8b78d7da9b75b1d2f2f9a910569d0bde3dd31d84939" | |
453 | 494 | dependencies = [ |
454 | 495 | "curl-sys", |
455 | 496 | "libc", |
462 | 503 | |
463 | 504 | [[package]] |
464 | 505 | name = "curl-sys" |
465 | version = "0.4.51+curl-7.80.0" | |
466 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
467 | checksum = "d130987e6a6a34fe0889e1083022fa48cd90e6709a84be3fb8dd95801de5af20" | |
506 | version = "0.4.52+curl-7.81.0" | |
507 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
508 | checksum = "14b8c2d1023ea5fded5b7b892e4b8e95f70038a421126a056761a84246a28971" | |
468 | 509 | dependencies = [ |
469 | 510 | "cc", |
470 | 511 | "libc", |
477 | 518 | ] |
478 | 519 | |
479 | 520 | [[package]] |
521 | name = "derive_more" | |
522 | version = "0.99.17" | |
523 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
524 | checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" | |
525 | dependencies = [ | |
526 | "convert_case", | |
527 | "proc-macro2", | |
528 | "quote", | |
529 | "rustc_version", | |
530 | "syn", | |
531 | ] | |
532 | ||
533 | [[package]] | |
480 | 534 | name = "diff" |
481 | 535 | version = "0.1.12" |
482 | 536 | source = "registry+https://github.com/rust-lang/crates.io-index" |
483 | 537 | checksum = "0e25ea47919b1560c4e3b7fe0aaab9becf5b84a10325ddf7db0f0ba5e1026499" |
484 | 538 | |
485 | 539 | [[package]] |
486 | name = "difference" | |
487 | version = "2.0.0" | |
488 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
489 | checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" | |
490 | ||
491 | [[package]] | |
492 | 540 | name = "difflib" |
493 | 541 | version = "0.4.0" |
494 | 542 | source = "registry+https://github.com/rust-lang/crates.io-index" |
540 | 588 | version = "0.3.3" |
541 | 589 | source = "registry+https://github.com/rust-lang/crates.io-index" |
542 | 590 | checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" |
591 | ||
592 | [[package]] | |
593 | name = "dtoa" | |
594 | version = "0.4.8" | |
595 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
596 | checksum = "56899898ce76aaf4a0f24d914c97ea6ed976d42fec6ad33fcbb0a1103e07b2b0" | |
597 | ||
598 | [[package]] | |
599 | name = "dtoa-short" | |
600 | version = "0.3.3" | |
601 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
602 | checksum = "bde03329ae10e79ede66c9ce4dc930aa8599043b0743008548680f25b91502d6" | |
603 | dependencies = [ | |
604 | "dtoa", | |
605 | ] | |
606 | ||
607 | [[package]] | |
608 | name = "ego-tree" | |
609 | version = "0.6.2" | |
610 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
611 | checksum = "3a68a4904193147e0a8dec3314640e6db742afd5f6e634f428a6af230d9b3591" | |
543 | 612 | |
544 | 613 | [[package]] |
545 | 614 | name = "either" |
601 | 670 | |
602 | 671 | [[package]] |
603 | 672 | name = "feroxbuster" |
604 | version = "2.4.1" | |
673 | version = "2.5.0" | |
605 | 674 | dependencies = [ |
606 | 675 | "anyhow", |
607 | 676 | "assert_cmd", |
608 | 677 | "clap", |
678 | "clap_complete", | |
609 | 679 | "console", |
610 | 680 | "crossterm", |
611 | 681 | "ctrlc", |
623 | 693 | "regex", |
624 | 694 | "reqwest", |
625 | 695 | "rlimit", |
696 | "scraper", | |
626 | 697 | "serde", |
627 | 698 | "serde_json", |
628 | 699 | "serde_regex", |
678 | 749 | dependencies = [ |
679 | 750 | "matches", |
680 | 751 | "percent-encoding", |
752 | ] | |
753 | ||
754 | [[package]] | |
755 | name = "futf" | |
756 | version = "0.1.4" | |
757 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
758 | checksum = "7c9c1ce3fa9336301af935ab852c437817d14cd33690446569392e65170aac3b" | |
759 | dependencies = [ | |
760 | "mac", | |
761 | "new_debug_unreachable", | |
681 | 762 | ] |
682 | 763 | |
683 | 764 | [[package]] |
791 | 872 | checksum = "8fb6c4351f4f134772edf9bcd17de13b7fbcb2c56928b440d6823bd4dc9ebd80" |
792 | 873 | |
793 | 874 | [[package]] |
875 | name = "fxhash" | |
876 | version = "0.2.1" | |
877 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
878 | checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" | |
879 | dependencies = [ | |
880 | "byteorder", | |
881 | ] | |
882 | ||
883 | [[package]] | |
884 | name = "getopts" | |
885 | version = "0.2.21" | |
886 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
887 | checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" | |
888 | dependencies = [ | |
889 | "unicode-width", | |
890 | ] | |
891 | ||
892 | [[package]] | |
794 | 893 | name = "getrandom" |
795 | version = "0.2.3" | |
796 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
797 | checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753" | |
894 | version = "0.1.16" | |
895 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
896 | checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" | |
798 | 897 | dependencies = [ |
799 | 898 | "cfg-if", |
800 | 899 | "libc", |
801 | "wasi", | |
900 | "wasi 0.9.0+wasi-snapshot-preview1", | |
901 | ] | |
902 | ||
903 | [[package]] | |
904 | name = "getrandom" | |
905 | version = "0.2.4" | |
906 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
907 | checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" | |
908 | dependencies = [ | |
909 | "cfg-if", | |
910 | "libc", | |
911 | "wasi 0.10.2+wasi-snapshot-preview1", | |
802 | 912 | ] |
803 | 913 | |
804 | 914 | [[package]] |
816 | 926 | |
817 | 927 | [[package]] |
818 | 928 | name = "h2" |
819 | version = "0.3.9" | |
820 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
821 | checksum = "8f072413d126e57991455e0a922b31e4c8ba7c2ffbebf6b78b4f8521397d65cd" | |
929 | version = "0.3.10" | |
930 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
931 | checksum = "0c9de88456263e249e241fcd211d3954e2c9b0ef7ccfc235a444eb367cae3689" | |
822 | 932 | dependencies = [ |
823 | 933 | "bytes", |
824 | 934 | "fnv", |
849 | 959 | ] |
850 | 960 | |
851 | 961 | [[package]] |
962 | name = "html5ever" | |
963 | version = "0.25.1" | |
964 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
965 | checksum = "aafcf38a1a36118242d29b92e1b08ef84e67e4a5ed06e0a80be20e6a32bfed6b" | |
966 | dependencies = [ | |
967 | "log", | |
968 | "mac", | |
969 | "markup5ever", | |
970 | "proc-macro2", | |
971 | "quote", | |
972 | "syn", | |
973 | ] | |
974 | ||
975 | [[package]] | |
852 | 976 | name = "http" |
853 | version = "0.2.5" | |
854 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
855 | checksum = "1323096b05d41827dadeaee54c9981958c0f94e670bc94ed80037d1a7b8b186b" | |
977 | version = "0.2.6" | |
978 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
979 | checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" | |
856 | 980 | dependencies = [ |
857 | 981 | "bytes", |
858 | 982 | "fnv", |
859 | "itoa 0.4.8", | |
983 | "itoa 1.0.1", | |
860 | 984 | ] |
861 | 985 | |
862 | 986 | [[package]] |
884 | 1008 | |
885 | 1009 | [[package]] |
886 | 1010 | name = "httpmock" |
887 | version = "0.6.4" | |
888 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
889 | checksum = "67fc2a6377230dc7cc007c74c34665f92589b4a73ed503f1c91ede8de6df35f0" | |
1011 | version = "0.6.6" | |
1012 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1013 | checksum = "c159c4fc205e6c1a9b325cb7ec135d13b5f47188ce175dabb76ec847f331d9bd" | |
890 | 1014 | dependencies = [ |
891 | 1015 | "assert-json-diff", |
892 | 1016 | "async-object-pool", |
894 | 1018 | "base64", |
895 | 1019 | "basic-cookies", |
896 | 1020 | "crossbeam-utils", |
897 | "difference", | |
898 | 1021 | "form_urlencoded", |
899 | 1022 | "futures-util", |
900 | 1023 | "hyper", |
902 | 1025 | "lazy_static", |
903 | 1026 | "levenshtein", |
904 | 1027 | "log", |
905 | "qstring", | |
906 | 1028 | "regex", |
907 | 1029 | "serde", |
908 | 1030 | "serde_json", |
909 | 1031 | "serde_regex", |
1032 | "similar", | |
910 | 1033 | "tokio", |
1034 | "url", | |
911 | 1035 | ] |
912 | 1036 | |
913 | 1037 | [[package]] |
966 | 1090 | |
967 | 1091 | [[package]] |
968 | 1092 | name = "indexmap" |
969 | version = "1.7.0" | |
970 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
971 | checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5" | |
1093 | version = "1.8.0" | |
1094 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1095 | checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" | |
972 | 1096 | dependencies = [ |
973 | 1097 | "autocfg", |
974 | 1098 | "hashbrown", |
1173 | 1297 | ] |
1174 | 1298 | |
1175 | 1299 | [[package]] |
1300 | name = "mac" | |
1301 | version = "0.1.1" | |
1302 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1303 | checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" | |
1304 | ||
1305 | [[package]] | |
1306 | name = "markup5ever" | |
1307 | version = "0.10.1" | |
1308 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1309 | checksum = "a24f40fb03852d1cdd84330cddcaf98e9ec08a7b7768e952fad3b4cf048ec8fd" | |
1310 | dependencies = [ | |
1311 | "log", | |
1312 | "phf", | |
1313 | "phf_codegen", | |
1314 | "string_cache", | |
1315 | "string_cache_codegen", | |
1316 | "tendril", | |
1317 | ] | |
1318 | ||
1319 | [[package]] | |
1176 | 1320 | name = "matches" |
1177 | 1321 | version = "0.1.9" |
1178 | 1322 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1259 | 1403 | ] |
1260 | 1404 | |
1261 | 1405 | [[package]] |
1406 | name = "nodrop" | |
1407 | version = "0.1.14" | |
1408 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1409 | checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" | |
1410 | ||
1411 | [[package]] | |
1262 | 1412 | name = "normalize-line-endings" |
1263 | 1413 | version = "0.3.0" |
1264 | 1414 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1284 | 1434 | |
1285 | 1435 | [[package]] |
1286 | 1436 | name = "num_cpus" |
1287 | version = "1.13.0" | |
1288 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1289 | checksum = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3" | |
1437 | version = "1.13.1" | |
1438 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1439 | checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" | |
1290 | 1440 | dependencies = [ |
1291 | 1441 | "hermit-abi", |
1292 | 1442 | "libc", |
1320 | 1470 | |
1321 | 1471 | [[package]] |
1322 | 1472 | name = "openssl-probe" |
1323 | version = "0.1.4" | |
1324 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1325 | checksum = "28988d872ab76095a6e6ac88d99b54fd267702734fd7ffe610ca27f533ddb95a" | |
1473 | version = "0.1.5" | |
1474 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1475 | checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" | |
1326 | 1476 | |
1327 | 1477 | [[package]] |
1328 | 1478 | name = "openssl-src" |
1348 | 1498 | ] |
1349 | 1499 | |
1350 | 1500 | [[package]] |
1501 | name = "os_str_bytes" | |
1502 | version = "6.0.0" | |
1503 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1504 | checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64" | |
1505 | dependencies = [ | |
1506 | "memchr", | |
1507 | ] | |
1508 | ||
1509 | [[package]] | |
1351 | 1510 | name = "parking" |
1352 | 1511 | version = "2.0.0" |
1353 | 1512 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1395 | 1554 | ] |
1396 | 1555 | |
1397 | 1556 | [[package]] |
1398 | name = "phf_shared" | |
1557 | name = "phf" | |
1399 | 1558 | version = "0.8.0" |
1400 | 1559 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1401 | checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" | |
1402 | dependencies = [ | |
1403 | "siphasher", | |
1404 | ] | |
1405 | ||
1406 | [[package]] | |
1407 | name = "pico-args" | |
1408 | version = "0.4.2" | |
1409 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1410 | checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468" | |
1411 | ||
1412 | [[package]] | |
1413 | name = "pin-project" | |
1414 | version = "1.0.8" | |
1415 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1416 | checksum = "576bc800220cc65dac09e99e97b08b358cfab6e17078de8dc5fee223bd2d0c08" | |
1417 | dependencies = [ | |
1418 | "pin-project-internal", | |
1419 | ] | |
1420 | ||
1421 | [[package]] | |
1422 | name = "pin-project-internal" | |
1423 | version = "1.0.8" | |
1424 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1425 | checksum = "6e8fe8163d14ce7f0cdac2e040116f22eac817edabff0be91e8aff7e9accf389" | |
1426 | dependencies = [ | |
1560 | checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" | |
1561 | dependencies = [ | |
1562 | "phf_macros", | |
1563 | "phf_shared", | |
1564 | "proc-macro-hack", | |
1565 | ] | |
1566 | ||
1567 | [[package]] | |
1568 | name = "phf_codegen" | |
1569 | version = "0.8.0" | |
1570 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1571 | checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" | |
1572 | dependencies = [ | |
1573 | "phf_generator", | |
1574 | "phf_shared", | |
1575 | ] | |
1576 | ||
1577 | [[package]] | |
1578 | name = "phf_generator" | |
1579 | version = "0.8.0" | |
1580 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1581 | checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" | |
1582 | dependencies = [ | |
1583 | "phf_shared", | |
1584 | "rand", | |
1585 | ] | |
1586 | ||
1587 | [[package]] | |
1588 | name = "phf_macros" | |
1589 | version = "0.8.0" | |
1590 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1591 | checksum = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c" | |
1592 | dependencies = [ | |
1593 | "phf_generator", | |
1594 | "phf_shared", | |
1595 | "proc-macro-hack", | |
1427 | 1596 | "proc-macro2", |
1428 | 1597 | "quote", |
1429 | 1598 | "syn", |
1430 | 1599 | ] |
1431 | 1600 | |
1432 | 1601 | [[package]] |
1602 | name = "phf_shared" | |
1603 | version = "0.8.0" | |
1604 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1605 | checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" | |
1606 | dependencies = [ | |
1607 | "siphasher", | |
1608 | ] | |
1609 | ||
1610 | [[package]] | |
1611 | name = "pico-args" | |
1612 | version = "0.4.2" | |
1613 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1614 | checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468" | |
1615 | ||
1616 | [[package]] | |
1617 | name = "pin-project" | |
1618 | version = "1.0.10" | |
1619 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1620 | checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" | |
1621 | dependencies = [ | |
1622 | "pin-project-internal", | |
1623 | ] | |
1624 | ||
1625 | [[package]] | |
1626 | name = "pin-project-internal" | |
1627 | version = "1.0.10" | |
1628 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1629 | checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" | |
1630 | dependencies = [ | |
1631 | "proc-macro2", | |
1632 | "quote", | |
1633 | "syn", | |
1634 | ] | |
1635 | ||
1636 | [[package]] | |
1433 | 1637 | name = "pin-project-lite" |
1434 | version = "0.2.7" | |
1435 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1436 | checksum = "8d31d11c69a6b52a174b42bdc0c30e5e11670f90788b2c471c31c1d17d449443" | |
1638 | version = "0.2.8" | |
1639 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1640 | checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" | |
1437 | 1641 | |
1438 | 1642 | [[package]] |
1439 | 1643 | name = "pin-utils" |
1462 | 1666 | |
1463 | 1667 | [[package]] |
1464 | 1668 | name = "ppv-lite86" |
1465 | version = "0.2.15" | |
1466 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1467 | checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba" | |
1669 | version = "0.2.16" | |
1670 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1671 | checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" | |
1468 | 1672 | |
1469 | 1673 | [[package]] |
1470 | 1674 | name = "precomputed-hash" |
1474 | 1678 | |
1475 | 1679 | [[package]] |
1476 | 1680 | name = "predicates" |
1477 | version = "2.1.0" | |
1478 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1479 | checksum = "95e5a7689e456ab905c22c2b48225bb921aba7c8dfa58440d68ba13f6222a715" | |
1681 | version = "2.1.1" | |
1682 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1683 | checksum = "a5aab5be6e4732b473071984b3164dbbfb7a3674d30ea5ff44410b6bcd960c3c" | |
1480 | 1684 | dependencies = [ |
1481 | 1685 | "difflib", |
1482 | 1686 | "float-cmp", |
1488 | 1692 | |
1489 | 1693 | [[package]] |
1490 | 1694 | name = "predicates-core" |
1491 | version = "1.0.2" | |
1492 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1493 | checksum = "57e35a3326b75e49aa85f5dc6ec15b41108cf5aee58eabb1f274dd18b73c2451" | |
1695 | version = "1.0.3" | |
1696 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1697 | checksum = "da1c2388b1513e1b605fcec39a95e0a9e8ef088f71443ef37099fa9ae6673fcb" | |
1494 | 1698 | |
1495 | 1699 | [[package]] |
1496 | 1700 | name = "predicates-tree" |
1497 | version = "1.0.4" | |
1498 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1499 | checksum = "338c7be2905b732ae3984a2f40032b5e94fd8f52505b186c7d4d68d193445df7" | |
1701 | version = "1.0.5" | |
1702 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1703 | checksum = "4d86de6de25020a36c6d3643a86d9a6a9f552107c0559c60ea03551b5e16c032" | |
1500 | 1704 | dependencies = [ |
1501 | 1705 | "predicates-core", |
1502 | 1706 | "termtree", |
1503 | 1707 | ] |
1504 | 1708 | |
1505 | 1709 | [[package]] |
1710 | name = "proc-macro-hack" | |
1711 | version = "0.5.19" | |
1712 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1713 | checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" | |
1714 | ||
1715 | [[package]] | |
1506 | 1716 | name = "proc-macro2" |
1507 | version = "1.0.34" | |
1508 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1509 | checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1" | |
1717 | version = "1.0.36" | |
1718 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1719 | checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" | |
1510 | 1720 | dependencies = [ |
1511 | 1721 | "unicode-xid", |
1512 | 1722 | ] |
1513 | 1723 | |
1514 | 1724 | [[package]] |
1515 | name = "qstring" | |
1516 | version = "0.7.2" | |
1517 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1518 | checksum = "d464fae65fff2680baf48019211ce37aaec0c78e9264c84a3e484717f965104e" | |
1519 | dependencies = [ | |
1520 | "percent-encoding", | |
1521 | ] | |
1522 | ||
1523 | [[package]] | |
1524 | 1725 | name = "quote" |
1525 | version = "1.0.10" | |
1526 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1527 | checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05" | |
1726 | version = "1.0.14" | |
1727 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1728 | checksum = "47aa80447ce4daf1717500037052af176af5d38cc3e571d9ec1c7353fc10c87d" | |
1528 | 1729 | dependencies = [ |
1529 | 1730 | "proc-macro2", |
1530 | 1731 | ] |
1531 | 1732 | |
1532 | 1733 | [[package]] |
1533 | 1734 | name = "rand" |
1534 | version = "0.8.4" | |
1535 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1536 | checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" | |
1537 | dependencies = [ | |
1735 | version = "0.7.3" | |
1736 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1737 | checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" | |
1738 | dependencies = [ | |
1739 | "getrandom 0.1.16", | |
1538 | 1740 | "libc", |
1539 | 1741 | "rand_chacha", |
1540 | 1742 | "rand_core", |
1541 | 1743 | "rand_hc", |
1744 | "rand_pcg", | |
1542 | 1745 | ] |
1543 | 1746 | |
1544 | 1747 | [[package]] |
1545 | 1748 | name = "rand_chacha" |
1546 | version = "0.3.1" | |
1547 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1548 | checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" | |
1749 | version = "0.2.2" | |
1750 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1751 | checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" | |
1549 | 1752 | dependencies = [ |
1550 | 1753 | "ppv-lite86", |
1551 | 1754 | "rand_core", |
1553 | 1756 | |
1554 | 1757 | [[package]] |
1555 | 1758 | name = "rand_core" |
1556 | version = "0.6.3" | |
1557 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1558 | checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" | |
1559 | dependencies = [ | |
1560 | "getrandom", | |
1759 | version = "0.5.1" | |
1760 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1761 | checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" | |
1762 | dependencies = [ | |
1763 | "getrandom 0.1.16", | |
1561 | 1764 | ] |
1562 | 1765 | |
1563 | 1766 | [[package]] |
1564 | 1767 | name = "rand_hc" |
1565 | version = "0.3.1" | |
1566 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1567 | checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" | |
1768 | version = "0.2.0" | |
1769 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1770 | checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" | |
1771 | dependencies = [ | |
1772 | "rand_core", | |
1773 | ] | |
1774 | ||
1775 | [[package]] | |
1776 | name = "rand_pcg" | |
1777 | version = "0.2.1" | |
1778 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1779 | checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" | |
1568 | 1780 | dependencies = [ |
1569 | 1781 | "rand_core", |
1570 | 1782 | ] |
1584 | 1796 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1585 | 1797 | checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64" |
1586 | 1798 | dependencies = [ |
1587 | "getrandom", | |
1799 | "getrandom 0.2.4", | |
1588 | 1800 | "redox_syscall", |
1589 | 1801 | ] |
1590 | 1802 | |
1622 | 1834 | |
1623 | 1835 | [[package]] |
1624 | 1836 | name = "reqwest" |
1625 | version = "0.11.7" | |
1626 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1627 | checksum = "07bea77bc708afa10e59905c3d4af7c8fd43c9214251673095ff8b14345fcbc5" | |
1837 | version = "0.11.9" | |
1838 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1839 | checksum = "87f242f1488a539a79bac6dbe7c8609ae43b7914b7736210f239a37cccb32525" | |
1628 | 1840 | dependencies = [ |
1629 | 1841 | "base64", |
1630 | 1842 | "bytes", |
1631 | 1843 | "encoding_rs", |
1632 | 1844 | "futures-core", |
1633 | 1845 | "futures-util", |
1846 | "h2", | |
1634 | 1847 | "http", |
1635 | 1848 | "http-body", |
1636 | 1849 | "hyper", |
1666 | 1879 | ] |
1667 | 1880 | |
1668 | 1881 | [[package]] |
1882 | name = "rustc_version" | |
1883 | version = "0.4.0" | |
1884 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1885 | checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" | |
1886 | dependencies = [ | |
1887 | "semver", | |
1888 | ] | |
1889 | ||
1890 | [[package]] | |
1669 | 1891 | name = "rustversion" |
1670 | 1892 | version = "1.0.6" |
1671 | 1893 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1694 | 1916 | checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" |
1695 | 1917 | |
1696 | 1918 | [[package]] |
1919 | name = "scraper" | |
1920 | version = "0.12.0" | |
1921 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1922 | checksum = "48e02aa790c80c2e494130dec6a522033b6a23603ffc06360e9fe6c611ea2c12" | |
1923 | dependencies = [ | |
1924 | "cssparser", | |
1925 | "ego-tree", | |
1926 | "getopts", | |
1927 | "html5ever", | |
1928 | "matches", | |
1929 | "selectors", | |
1930 | "smallvec", | |
1931 | "tendril", | |
1932 | ] | |
1933 | ||
1934 | [[package]] | |
1697 | 1935 | name = "security-framework" |
1698 | version = "2.4.2" | |
1699 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1700 | checksum = "525bc1abfda2e1998d152c45cf13e696f76d0a4972310b22fac1658b05df7c87" | |
1936 | version = "2.5.0" | |
1937 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1938 | checksum = "d09d3c15d814eda1d6a836f2f2b56a6abc1446c8a34351cb3180d3db92ffe4ce" | |
1701 | 1939 | dependencies = [ |
1702 | 1940 | "bitflags", |
1703 | 1941 | "core-foundation", |
1708 | 1946 | |
1709 | 1947 | [[package]] |
1710 | 1948 | name = "security-framework-sys" |
1711 | version = "2.4.2" | |
1712 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1713 | checksum = "a9dd14d83160b528b7bfd66439110573efcfbe281b17fc2ca9f39f550d619c7e" | |
1949 | version = "2.5.0" | |
1950 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1951 | checksum = "e90dd10c41c6bfc633da6e0c659bd25d31e0791e5974ac42970267d59eba87f7" | |
1714 | 1952 | dependencies = [ |
1715 | 1953 | "core-foundation-sys", |
1716 | 1954 | "libc", |
1717 | 1955 | ] |
1718 | 1956 | |
1719 | 1957 | [[package]] |
1958 | name = "selectors" | |
1959 | version = "0.22.0" | |
1960 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1961 | checksum = "df320f1889ac4ba6bc0cdc9c9af7af4bd64bb927bccdf32d81140dc1f9be12fe" | |
1962 | dependencies = [ | |
1963 | "bitflags", | |
1964 | "cssparser", | |
1965 | "derive_more", | |
1966 | "fxhash", | |
1967 | "log", | |
1968 | "matches", | |
1969 | "phf", | |
1970 | "phf_codegen", | |
1971 | "precomputed-hash", | |
1972 | "servo_arc", | |
1973 | "smallvec", | |
1974 | "thin-slice", | |
1975 | ] | |
1976 | ||
1977 | [[package]] | |
1978 | name = "semver" | |
1979 | version = "1.0.4" | |
1980 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1981 | checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" | |
1982 | ||
1983 | [[package]] | |
1720 | 1984 | name = "serde" |
1721 | version = "1.0.132" | |
1722 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1723 | checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008" | |
1985 | version = "1.0.133" | |
1986 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1987 | checksum = "97565067517b60e2d1ea8b268e59ce036de907ac523ad83a0475da04e818989a" | |
1724 | 1988 | dependencies = [ |
1725 | 1989 | "serde_derive", |
1726 | 1990 | ] |
1727 | 1991 | |
1728 | 1992 | [[package]] |
1729 | 1993 | name = "serde_derive" |
1730 | version = "1.0.132" | |
1731 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1732 | checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276" | |
1994 | version = "1.0.133" | |
1995 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1996 | checksum = "ed201699328568d8d08208fdd080e3ff594e6c422e438b6705905da01005d537" | |
1733 | 1997 | dependencies = [ |
1734 | 1998 | "proc-macro2", |
1735 | 1999 | "quote", |
1738 | 2002 | |
1739 | 2003 | [[package]] |
1740 | 2004 | name = "serde_json" |
1741 | version = "1.0.73" | |
1742 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1743 | checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5" | |
2005 | version = "1.0.75" | |
2006 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2007 | checksum = "c059c05b48c5c0067d4b4b2b4f0732dd65feb52daf7e0ea09cd87e7dadc1af79" | |
1744 | 2008 | dependencies = [ |
1745 | 2009 | "itoa 1.0.1", |
1746 | 2010 | "ryu", |
1759 | 2023 | |
1760 | 2024 | [[package]] |
1761 | 2025 | name = "serde_urlencoded" |
1762 | version = "0.7.0" | |
1763 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1764 | checksum = "edfa57a7f8d9c1d260a549e7224100f6c43d43f9103e06dd8b4095a9b2b43ce9" | |
2026 | version = "0.7.1" | |
2027 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2028 | checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" | |
1765 | 2029 | dependencies = [ |
1766 | 2030 | "form_urlencoded", |
1767 | "itoa 0.4.8", | |
2031 | "itoa 1.0.1", | |
1768 | 2032 | "ryu", |
1769 | 2033 | "serde", |
1770 | 2034 | ] |
1771 | 2035 | |
1772 | 2036 | [[package]] |
2037 | name = "servo_arc" | |
2038 | version = "0.1.1" | |
2039 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2040 | checksum = "d98238b800e0d1576d8b6e3de32827c2d74bee68bb97748dcf5071fb53965432" | |
2041 | dependencies = [ | |
2042 | "nodrop", | |
2043 | "stable_deref_trait", | |
2044 | ] | |
2045 | ||
2046 | [[package]] | |
1773 | 2047 | name = "signal-hook" |
1774 | version = "0.3.12" | |
1775 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1776 | checksum = "c35dfd12afb7828318348b8c408383cf5071a086c1d4ab1c0f9840ec92dbb922" | |
2048 | version = "0.3.13" | |
2049 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2050 | checksum = "647c97df271007dcea485bb74ffdb57f2e683f1306c854f468a0c244badabf2d" | |
1777 | 2051 | dependencies = [ |
1778 | 2052 | "libc", |
1779 | 2053 | "signal-hook-registry", |
1800 | 2074 | ] |
1801 | 2075 | |
1802 | 2076 | [[package]] |
2077 | name = "similar" | |
2078 | version = "2.1.0" | |
2079 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2080 | checksum = "2e24979f63a11545f5f2c60141afe249d4f19f84581ea2138065e400941d83d3" | |
2081 | ||
2082 | [[package]] | |
1803 | 2083 | name = "siphasher" |
1804 | version = "0.3.7" | |
1805 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1806 | checksum = "533494a8f9b724d33625ab53c6c4800f7cc445895924a8ef649222dcb76e938b" | |
2084 | version = "0.3.8" | |
2085 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2086 | checksum = "ba1eead9e94aa5a2e02de9e7839f96a007f686ae7a1d57c7797774810d24908a" | |
1807 | 2087 | |
1808 | 2088 | [[package]] |
1809 | 2089 | name = "slab" |
1824 | 2104 | |
1825 | 2105 | [[package]] |
1826 | 2106 | name = "smallvec" |
1827 | version = "1.7.0" | |
1828 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1829 | checksum = "1ecab6c735a6bb4139c0caafd0cc3635748bbb3acf4550e8138122099251f309" | |
2107 | version = "1.8.0" | |
2108 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2109 | checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" | |
1830 | 2110 | |
1831 | 2111 | [[package]] |
1832 | 2112 | name = "socket2" |
1837 | 2117 | "libc", |
1838 | 2118 | "winapi", |
1839 | 2119 | ] |
2120 | ||
2121 | [[package]] | |
2122 | name = "stable_deref_trait" | |
2123 | version = "1.2.0" | |
2124 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2125 | checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" | |
1840 | 2126 | |
1841 | 2127 | [[package]] |
1842 | 2128 | name = "string_cache" |
1849 | 2135 | "parking_lot", |
1850 | 2136 | "phf_shared", |
1851 | 2137 | "precomputed-hash", |
2138 | "serde", | |
2139 | ] | |
2140 | ||
2141 | [[package]] | |
2142 | name = "string_cache_codegen" | |
2143 | version = "0.5.1" | |
2144 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2145 | checksum = "f24c8e5e19d22a726626f1a5e16fe15b132dcf21d10177fa5a45ce7962996b97" | |
2146 | dependencies = [ | |
2147 | "phf_generator", | |
2148 | "phf_shared", | |
2149 | "proc-macro2", | |
2150 | "quote", | |
1852 | 2151 | ] |
1853 | 2152 | |
1854 | 2153 | [[package]] |
1855 | 2154 | name = "strsim" |
1856 | version = "0.8.0" | |
1857 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1858 | checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" | |
2155 | version = "0.10.0" | |
2156 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2157 | checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" | |
1859 | 2158 | |
1860 | 2159 | [[package]] |
1861 | 2160 | name = "syn" |
1862 | version = "1.0.82" | |
1863 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1864 | checksum = "8daf5dd0bb60cbd4137b1b587d2fc0ae729bc07cf01cd70b36a1ed5ade3b9d59" | |
2161 | version = "1.0.85" | |
2162 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2163 | checksum = "a684ac3dcd8913827e18cd09a68384ee66c1de24157e3c556c9ab16d85695fb7" | |
1865 | 2164 | dependencies = [ |
1866 | 2165 | "proc-macro2", |
1867 | 2166 | "quote", |
1870 | 2169 | |
1871 | 2170 | [[package]] |
1872 | 2171 | name = "tempfile" |
1873 | version = "3.2.0" | |
1874 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1875 | checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22" | |
2172 | version = "3.3.0" | |
2173 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2174 | checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" | |
1876 | 2175 | dependencies = [ |
1877 | 2176 | "cfg-if", |
1878 | "libc", | |
1879 | "rand", | |
2177 | "fastrand", | |
2178 | "libc", | |
1880 | 2179 | "redox_syscall", |
1881 | 2180 | "remove_dir_all", |
1882 | 2181 | "winapi", |
1883 | 2182 | ] |
1884 | 2183 | |
1885 | 2184 | [[package]] |
2185 | name = "tendril" | |
2186 | version = "0.4.2" | |
2187 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2188 | checksum = "a9ef557cb397a4f0a5a3a628f06515f78563f2209e64d47055d9dc6052bf5e33" | |
2189 | dependencies = [ | |
2190 | "futf", | |
2191 | "mac", | |
2192 | "utf-8", | |
2193 | ] | |
2194 | ||
2195 | [[package]] | |
1886 | 2196 | name = "term" |
1887 | 2197 | version = "0.7.0" |
1888 | 2198 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1914 | 2224 | |
1915 | 2225 | [[package]] |
1916 | 2226 | name = "termtree" |
1917 | version = "0.2.3" | |
1918 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1919 | checksum = "13a4ec180a2de59b57434704ccfad967f789b12737738798fa08798cd5824c16" | |
2227 | version = "0.2.4" | |
2228 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2229 | checksum = "507e9898683b6c43a9aa55b64259b721b52ba226e0f3779137e50ad114a4c90b" | |
1920 | 2230 | |
1921 | 2231 | [[package]] |
1922 | 2232 | name = "textwrap" |
1923 | version = "0.11.0" | |
1924 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
1925 | checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" | |
1926 | dependencies = [ | |
1927 | "unicode-width", | |
1928 | ] | |
2233 | version = "0.14.2" | |
2234 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2235 | checksum = "0066c8d12af8b5acd21e00547c3797fde4e8677254a7ee429176ccebbe93dd80" | |
2236 | dependencies = [ | |
2237 | "terminal_size", | |
2238 | ] | |
2239 | ||
2240 | [[package]] | |
2241 | name = "thin-slice" | |
2242 | version = "0.1.1" | |
2243 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2244 | checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" | |
1929 | 2245 | |
1930 | 2246 | [[package]] |
1931 | 2247 | name = "thiserror" |
2153 | 2469 | ] |
2154 | 2470 | |
2155 | 2471 | [[package]] |
2472 | name = "utf-8" | |
2473 | version = "0.7.6" | |
2474 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2475 | checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" | |
2476 | ||
2477 | [[package]] | |
2156 | 2478 | name = "uuid" |
2157 | 2479 | version = "0.8.2" |
2158 | 2480 | source = "registry+https://github.com/rust-lang/crates.io-index" |
2159 | 2481 | checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" |
2160 | 2482 | dependencies = [ |
2161 | "getrandom", | |
2483 | "getrandom 0.2.4", | |
2162 | 2484 | ] |
2163 | 2485 | |
2164 | 2486 | [[package]] |
2178 | 2500 | checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" |
2179 | 2501 | |
2180 | 2502 | [[package]] |
2181 | name = "vec_map" | |
2182 | version = "0.8.2" | |
2183 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2184 | checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" | |
2185 | ||
2186 | [[package]] | |
2187 | 2503 | name = "version_check" |
2188 | version = "0.9.3" | |
2189 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2190 | checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe" | |
2504 | version = "0.9.4" | |
2505 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2506 | checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" | |
2191 | 2507 | |
2192 | 2508 | [[package]] |
2193 | 2509 | name = "wait-timeout" |
2213 | 2529 | "log", |
2214 | 2530 | "try-lock", |
2215 | 2531 | ] |
2532 | ||
2533 | [[package]] | |
2534 | name = "wasi" | |
2535 | version = "0.9.0+wasi-snapshot-preview1" | |
2536 | source = "registry+https://github.com/rust-lang/crates.io-index" | |
2537 | checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" | |
2216 | 2538 | |
2217 | 2539 | [[package]] |
2218 | 2540 | name = "wasi" |
0 | 0 | [package] |
1 | 1 | name = "feroxbuster" |
2 | version = "2.4.1" | |
3 | authors = ["Ben 'epi' Risher <[email protected]>"] | |
2 | version = "2.5.0" | |
3 | authors = ["Ben 'epi' Risher (@epi052)"] | |
4 | 4 | license = "MIT" |
5 | edition = "2018" | |
5 | edition = "2021" | |
6 | 6 | homepage = "https://github.com/epi052/feroxbuster" |
7 | 7 | repository = "https://github.com/epi052/feroxbuster" |
8 | 8 | description = "A fast, simple, recursive content discovery tool." |
15 | 15 | maintenance = { status = "actively-developed" } |
16 | 16 | |
17 | 17 | [build-dependencies] |
18 | clap = "2.33" | |
18 | clap = {version = "3.0", features = ["cargo"]} | |
19 | clap_complete = "3.0" | |
19 | 20 | regex = "1" |
20 | 21 | lazy_static = "1.4" |
21 | 22 | dirs = "4.0" |
22 | 23 | |
23 | 24 | [dependencies] |
25 | scraper = "0.12" | |
24 | 26 | futures = { version = "0.3"} |
25 | 27 | tokio = { version = "1.15", features = ["full"] } |
26 | 28 | tokio-util = {version = "0.6", features = ["codec"]} |
29 | 31 | reqwest = { version = "0.11", features = ["socks"] } |
30 | 32 | url = { version = "2.2", features = ["serde"]} # uses feature unification to add 'serde' to reqwest::Url |
31 | 33 | serde_regex = "1.1" |
32 | clap = "2.34" | |
34 | clap = {version = "3.0", features = ["wrap_help", "cargo"]} | |
33 | 35 | lazy_static = "1.4" |
34 | 36 | toml = "0.5" |
35 | 37 | serde = { version = "1.0", features = ["derive", "rc"] } |
48 | 50 | leaky-bucket = "0.10.0" |
49 | 51 | |
50 | 52 | [dev-dependencies] |
51 | tempfile = "3.1" | |
53 | tempfile = "3.3" | |
52 | 54 | httpmock = "0.6" |
53 | 55 | assert_cmd = "2.0" |
54 | 56 | predicates = "2.1" |
220 | 220 | <td align="center"><a href="https://github.com/hunter0x8"><img src="https://avatars.githubusercontent.com/u/46222314?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Muhammad Ahsan</b></sub></a><br /><a href="https://github.com/epi052/feroxbuster/issues?q=author%3Ahunter0x8" title="Bug reports">🐛</a></td> |
221 | 221 | <td align="center"><a href="https://github.com/cortantief"><img src="https://avatars.githubusercontent.com/u/34527333?v=4?s=100" width="100px;" alt=""/><br /><sub><b>cortantief</b></sub></a><br /><a href="https://github.com/epi052/feroxbuster/issues?q=author%3Acortantief" title="Bug reports">🐛</a> <a href="https://github.com/epi052/feroxbuster/commits?author=cortantief" title="Code">💻</a></td> |
222 | 222 | <td align="center"><a href="https://github.com/dsaxton"><img src="https://avatars.githubusercontent.com/u/2658661?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Daniel Saxton</b></sub></a><br /><a href="#ideas-dsaxton" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/epi052/feroxbuster/commits?author=dsaxton" title="Code">💻</a></td> |
223 | <td align="center"><a href="https://github.com/narkopolo"><img src="https://avatars.githubusercontent.com/u/16690056?v=4?s=100" width="100px;" alt=""/><br /><sub><b>narkopolo</b></sub></a><br /><a href="#ideas-narkopolo" title="Ideas, Planning, & Feedback">🤔</a></td> | |
224 | <td align="center"><a href="https://ring0.lol"><img src="https://avatars.githubusercontent.com/u/1893909?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Justin Steven</b></sub></a><br /><a href="#ideas-justinsteven" title="Ideas, Planning, & Feedback">🤔</a></td> | |
225 | <td align="center"><a href="https://github.com/7047payloads"><img src="https://avatars.githubusercontent.com/u/95562424?v=4?s=100" width="100px;" alt=""/><br /><sub><b>7047payloads</b></sub></a><br /><a href="https://github.com/epi052/feroxbuster/commits?author=7047payloads" title="Code">💻</a></td> | |
226 | <td align="center"><a href="https://github.com/unkn0wnsyst3m"><img src="https://avatars.githubusercontent.com/u/21272239?v=4?s=100" width="100px;" alt=""/><br /><sub><b>unkn0wnsyst3m</b></sub></a><br /><a href="#ideas-unkn0wnsyst3m" title="Ideas, Planning, & Feedback">🤔</a></td> | |
227 | </tr> | |
228 | <tr> | |
229 | <td align="center"><a href="https://ironwort.me/"><img src="https://avatars.githubusercontent.com/u/15280042?v=4?s=100" width="100px;" alt=""/><br /><sub><b>0x08</b></sub></a><br /><a href="#ideas-its0x08" title="Ideas, Planning, & Feedback">🤔</a></td> | |
230 | <td align="center"><a href="https://github.com/MD-Levitan"><img src="https://avatars.githubusercontent.com/u/12116508?v=4?s=100" width="100px;" alt=""/><br /><sub><b>kusok</b></sub></a><br /><a href="#ideas-MD-Levitan" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/epi052/feroxbuster/commits?author=MD-Levitan" title="Code">💻</a></td> | |
231 | <td align="center"><a href="https://github.com/godylockz"><img src="https://avatars.githubusercontent.com/u/81207744?v=4?s=100" width="100px;" alt=""/><br /><sub><b>godylockz</b></sub></a><br /><a href="#ideas-godylockz" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/epi052/feroxbuster/commits?author=godylockz" title="Code">💻</a></td> | |
223 | 232 | </tr> |
224 | 233 | </table> |
225 | 234 |
0 | 0 | use std::fs::{copy, create_dir_all, OpenOptions}; |
1 | 1 | use std::io::{Read, Seek, SeekFrom, Write}; |
2 | extern crate clap; | |
3 | extern crate dirs; | |
4 | 2 | |
5 | use clap::Shell; | |
3 | use clap_complete::{generate_to, shells}; | |
6 | 4 | |
7 | 5 | include!("src/parser.rs"); |
8 | 6 | |
17 | 15 | |
18 | 16 | let mut app = initialize(); |
19 | 17 | |
20 | let shells: [Shell; 4] = [Shell::Bash, Shell::Fish, Shell::Zsh, Shell::PowerShell]; | |
21 | ||
22 | for shell in &shells { | |
23 | app.gen_completions("feroxbuster", *shell, outdir); | |
24 | } | |
18 | generate_to(shells::Bash, &mut app, "feroxbuster", outdir).unwrap(); | |
19 | generate_to(shells::Zsh, &mut app, "feroxbuster", outdir).unwrap(); | |
20 | generate_to(shells::Zsh, &mut app, "feroxbuster", outdir).unwrap(); | |
21 | generate_to(shells::PowerShell, &mut app, "feroxbuster", outdir).unwrap(); | |
22 | generate_to(shells::Elvish, &mut app, "feroxbuster", outdir).unwrap(); | |
25 | 23 | |
26 | 24 | // 0xdf pointed out an oddity when tab-completing options that expect file paths, the fix we |
27 | 25 | // landed on was to add -o plusdirs to the bash completion script. The following code aims to |
30 | 30 | # redirects = true |
31 | 31 | # insecure = true |
32 | 32 | # extensions = ["php", "html"] |
33 | # methods = ["GET", "POST"] | |
34 | # data = [11, 12, 13, 14, 15] | |
33 | 35 | # url_denylist = ["http://dont-scan.me", "https://also-not.me"] |
34 | 36 | # regex_denylist = ["/deny.*"] |
35 | 37 | # no_recursion = true |
14 | 14 | |
15 | 15 | local context curcontext="$curcontext" state line |
16 | 16 | _arguments "${_arguments_options[@]}" \ |
17 | '-w+[Path to the wordlist]' \ | |
18 | '--wordlist=[Path to the wordlist]' \ | |
19 | '*-u+[The target URL(s) (required, unless --stdin used)]' \ | |
20 | '*--url=[The target URL(s) (required, unless --stdin used)]' \ | |
21 | '-t+[Number of concurrent threads (default: 50)]' \ | |
22 | '--threads=[Number of concurrent threads (default: 50)]' \ | |
23 | '-d+[Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)]' \ | |
24 | '--depth=[Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)]' \ | |
25 | '-T+[Number of seconds before a request times out (default: 7)]' \ | |
26 | '--timeout=[Number of seconds before a request times out (default: 7)]' \ | |
27 | '-p+[Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)]' \ | |
28 | '--proxy=[Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)]' \ | |
29 | '-P+[Send only unfiltered requests through a Replay Proxy, instead of all requests]' \ | |
30 | '--replay-proxy=[Send only unfiltered requests through a Replay Proxy, instead of all requests]' \ | |
31 | '*-R+[Status Codes to send through a Replay Proxy when found (default: --status-codes value)]' \ | |
32 | '*--replay-codes=[Status Codes to send through a Replay Proxy when found (default: --status-codes value)]' \ | |
33 | '*-s+[Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)]' \ | |
34 | '*--status-codes=[Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)]' \ | |
35 | '-o+[Output file to write results to (use w/ --json for JSON entries)]' \ | |
36 | '--output=[Output file to write results to (use w/ --json for JSON entries)]' \ | |
37 | '(-u --url)--resume-from=[State file from which to resume a partially complete scan (ex. --resume-from ferox-1606586780.state)]' \ | |
38 | '--debug-log=[Output file to write log entries (use w/ --json for JSON entries)]' \ | |
39 | '-a+[Sets the User-Agent (default: feroxbuster/VERSION)]' \ | |
40 | '--user-agent=[Sets the User-Agent (default: feroxbuster/VERSION)]' \ | |
41 | '*-x+[File extension(s) to search for (ex: -x php -x pdf js)]' \ | |
42 | '*--extensions=[File extension(s) to search for (ex: -x php -x pdf js)]' \ | |
43 | '*--dont-scan=[URL(s) or Regex Pattern(s) to exclude from recursion/scans]' \ | |
44 | '*-H+[Specify HTTP headers (ex: -H Header:val '\''stuff: things'\'')]' \ | |
45 | '*--headers=[Specify HTTP headers (ex: -H Header:val '\''stuff: things'\'')]' \ | |
46 | '*-Q+[Specify URL query parameters (ex: -Q token=stuff -Q secret=key)]' \ | |
47 | '*--query=[Specify URL query parameters (ex: -Q token=stuff -Q secret=key)]' \ | |
48 | '*-S+[Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)]' \ | |
49 | '*--filter-size=[Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)]' \ | |
50 | '*-X+[Filter out messages via regular expression matching on the response'\''s body (ex: -X '\''^ignore me$'\'')]' \ | |
51 | '*--filter-regex=[Filter out messages via regular expression matching on the response'\''s body (ex: -X '\''^ignore me$'\'')]' \ | |
52 | '*-W+[Filter out messages of a particular word count (ex: -W 312 -W 91,82)]' \ | |
53 | '*--filter-words=[Filter out messages of a particular word count (ex: -W 312 -W 91,82)]' \ | |
54 | '*-N+[Filter out messages of a particular line count (ex: -N 20 -N 31,30)]' \ | |
55 | '*--filter-lines=[Filter out messages of a particular line count (ex: -N 20 -N 31,30)]' \ | |
56 | '*-C+[Filter out status codes (deny list) (ex: -C 200 -C 401)]' \ | |
57 | '*--filter-status=[Filter out status codes (deny list) (ex: -C 200 -C 401)]' \ | |
58 | '*--filter-similar-to=[Filter out pages that are similar to the given page (ex. --filter-similar-to http://site.xyz/soft404)]' \ | |
59 | '-L+[Limit total number of concurrent scans (default: 0, i.e. no limit)]' \ | |
60 | '--scan-limit=[Limit total number of concurrent scans (default: 0, i.e. no limit)]' \ | |
61 | '--parallel=[Run parallel feroxbuster instances (one child process per url passed via stdin)]' \ | |
62 | '(--auto-tune)--rate-limit=[Limit number of requests per second (per directory) (default: 0, i.e. no limit)]' \ | |
63 | '--time-limit=[Limit total run time of all scans (ex: --time-limit 10m)]' \ | |
17 | '-u+[The target URL (required, unless \[--stdin || --resume-from\] used)]:URL:_urls' \ | |
18 | '--url=[The target URL (required, unless \[--stdin || --resume-from\] used)]:URL:_urls' \ | |
19 | '(-u --url)--resume-from=[State file from which to resume a partially complete scan (ex. --resume-from ferox-1606586780.state)]:STATE_FILE:_files' \ | |
20 | '-p+[Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)]:PROXY:_urls' \ | |
21 | '--proxy=[Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)]:PROXY:_urls' \ | |
22 | '-P+[Send only unfiltered requests through a Replay Proxy, instead of all requests]:REPLAY_PROXY:_urls' \ | |
23 | '--replay-proxy=[Send only unfiltered requests through a Replay Proxy, instead of all requests]:REPLAY_PROXY:_urls' \ | |
24 | '*-R+[Status Codes to send through a Replay Proxy when found (default: --status-codes value)]:REPLAY_CODE: ' \ | |
25 | '*--replay-codes=[Status Codes to send through a Replay Proxy when found (default: --status-codes value)]:REPLAY_CODE: ' \ | |
26 | '-a+[Sets the User-Agent (default: feroxbuster/2.5.0)]:USER_AGENT: ' \ | |
27 | '--user-agent=[Sets the User-Agent (default: feroxbuster/2.5.0)]:USER_AGENT: ' \ | |
28 | '*-x+[File extension(s) to search for (ex: -x php -x pdf js)]:FILE_EXTENSION: ' \ | |
29 | '*--extensions=[File extension(s) to search for (ex: -x php -x pdf js)]:FILE_EXTENSION: ' \ | |
30 | '*-m+[Which HTTP request method(s) should be sent (default: GET)]:HTTP_METHODS: ' \ | |
31 | '*--methods=[Which HTTP request method(s) should be sent (default: GET)]:HTTP_METHODS: ' \ | |
32 | '--data=[Request'\''s Body; can read data from a file if input starts with an @ (ex: @post.bin)]:DATA: ' \ | |
33 | '*-H+[Specify HTTP headers to be used in each request (ex: -H Header:val -H '\''stuff: things'\'')]:HEADER: ' \ | |
34 | '*--headers=[Specify HTTP headers to be used in each request (ex: -H Header:val -H '\''stuff: things'\'')]:HEADER: ' \ | |
35 | '*-b+[Specify HTTP cookies to be used in each request (ex: -b stuff=things)]:COOKIE: ' \ | |
36 | '*--cookies=[Specify HTTP cookies to be used in each request (ex: -b stuff=things)]:COOKIE: ' \ | |
37 | '*-Q+[Request'\''s URL query parameters (ex: -Q token=stuff -Q secret=key)]:QUERY: ' \ | |
38 | '*--query=[Request'\''s URL query parameters (ex: -Q token=stuff -Q secret=key)]:QUERY: ' \ | |
39 | '*--dont-scan=[URL(s) or Regex Pattern(s) to exclude from recursion/scans]:URL: ' \ | |
40 | '*-S+[Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)]:SIZE: ' \ | |
41 | '*--filter-size=[Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)]:SIZE: ' \ | |
42 | '*-X+[Filter out messages via regular expression matching on the response'\''s body (ex: -X '\''^ignore me$'\'')]:REGEX: ' \ | |
43 | '*--filter-regex=[Filter out messages via regular expression matching on the response'\''s body (ex: -X '\''^ignore me$'\'')]:REGEX: ' \ | |
44 | '*-W+[Filter out messages of a particular word count (ex: -W 312 -W 91,82)]:WORDS: ' \ | |
45 | '*--filter-words=[Filter out messages of a particular word count (ex: -W 312 -W 91,82)]:WORDS: ' \ | |
46 | '*-N+[Filter out messages of a particular line count (ex: -N 20 -N 31,30)]:LINES: ' \ | |
47 | '*--filter-lines=[Filter out messages of a particular line count (ex: -N 20 -N 31,30)]:LINES: ' \ | |
48 | '*-C+[Filter out status codes (deny list) (ex: -C 200 -C 401)]:STATUS_CODE: ' \ | |
49 | '*--filter-status=[Filter out status codes (deny list) (ex: -C 200 -C 401)]:STATUS_CODE: ' \ | |
50 | '*--filter-similar-to=[Filter out pages that are similar to the given page (ex. --filter-similar-to http://site.xyz/soft404)]:UNWANTED_PAGE:_urls' \ | |
51 | '*-s+[Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)]:STATUS_CODE: ' \ | |
52 | '*--status-codes=[Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)]:STATUS_CODE: ' \ | |
53 | '-T+[Number of seconds before a client'\''s request times out (default: 7)]:SECONDS: ' \ | |
54 | '--timeout=[Number of seconds before a client'\''s request times out (default: 7)]:SECONDS: ' \ | |
55 | '-t+[Number of concurrent threads (default: 50)]:THREADS: ' \ | |
56 | '--threads=[Number of concurrent threads (default: 50)]:THREADS: ' \ | |
57 | '-d+[Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)]:RECURSION_DEPTH: ' \ | |
58 | '--depth=[Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)]:RECURSION_DEPTH: ' \ | |
59 | '-L+[Limit total number of concurrent scans (default: 0, i.e. no limit)]:SCAN_LIMIT: ' \ | |
60 | '--scan-limit=[Limit total number of concurrent scans (default: 0, i.e. no limit)]:SCAN_LIMIT: ' \ | |
61 | '--parallel=[Run parallel feroxbuster instances (one child process per url passed via stdin)]:PARALLEL_SCANS: ' \ | |
62 | '(--auto-tune)--rate-limit=[Limit number of requests per second (per directory) (default: 0, i.e. no limit)]:RATE_LIMIT: ' \ | |
63 | '--time-limit=[Limit total run time of all scans (ex: --time-limit 10m)]:TIME_SPEC: ' \ | |
64 | '-w+[Path to the wordlist]:FILE:_files' \ | |
65 | '--wordlist=[Path to the wordlist]:FILE:_files' \ | |
66 | '-o+[Output file to write results to (use w/ --json for JSON entries)]:FILE:_files' \ | |
67 | '--output=[Output file to write results to (use w/ --json for JSON entries)]:FILE:_files' \ | |
68 | '--debug-log=[Output file to write log entries (use w/ --json for JSON entries)]:FILE:_files' \ | |
69 | '-h[Print help information]' \ | |
70 | '--help[Print help information]' \ | |
71 | '-V[Print version information]' \ | |
72 | '--version[Print version information]' \ | |
73 | '(-u --url)--stdin[Read url(s) from STDIN]' \ | |
74 | '-A[Use a random User-Agent]' \ | |
75 | '--random-agent[Use a random User-Agent]' \ | |
76 | '-f[Append / to each request'\''s URL]' \ | |
77 | '--add-slash[Append / to each request'\''s URL]' \ | |
78 | '-r[Allow client to follow redirects]' \ | |
79 | '--redirects[Allow client to follow redirects]' \ | |
80 | '-k[Disables TLS certificate validation in the client]' \ | |
81 | '--insecure[Disables TLS certificate validation in the client]' \ | |
82 | '-n[Do not scan recursively]' \ | |
83 | '--no-recursion[Do not scan recursively]' \ | |
84 | '-e[Extract links from response body (html, javascript, etc...); make new requests based on findings]' \ | |
85 | '--extract-links[Extract links from response body (html, javascript, etc...); make new requests based on findings]' \ | |
86 | '(--auto-bail)--auto-tune[Automatically lower scan rate when an excessive amount of errors are encountered]' \ | |
87 | '--auto-bail[Automatically stop scanning when an excessive amount of errors are encountered]' \ | |
88 | '-D[Don'\''t auto-filter wildcard responses]' \ | |
89 | '--dont-filter[Don'\''t auto-filter wildcard responses]' \ | |
64 | 90 | '(--silent)*-v[Increase verbosity level (use -vv or more for greater effect. \[CAUTION\] 4 -v'\''s is probably too much)]' \ |
65 | 91 | '(--silent)*--verbosity[Increase verbosity level (use -vv or more for greater effect. \[CAUTION\] 4 -v'\''s is probably too much)]' \ |
66 | 92 | '(-q --quiet)--silent[Only print URLs + turn off logging (good for piping a list of urls to other commands)]' \ |
67 | 93 | '-q[Hide progress bars and banner (good for tmux windows w/ notifications)]' \ |
68 | 94 | '--quiet[Hide progress bars and banner (good for tmux windows w/ notifications)]' \ |
69 | '(--auto-bail)--auto-tune[Automatically lower scan rate when an excessive amount of errors are encountered]' \ | |
70 | '--auto-bail[Automatically stop scanning when an excessive amount of errors are encountered]' \ | |
71 | 95 | '--json[Emit JSON logs to --output and --debug-log instead of normal text]' \ |
72 | '-D[Don'\''t auto-filter wildcard responses]' \ | |
73 | '--dont-filter[Don'\''t auto-filter wildcard responses]' \ | |
74 | '-A[Use a random User-Agent]' \ | |
75 | '--random-agent[Use a random User-Agent]' \ | |
76 | '-r[Follow redirects]' \ | |
77 | '--redirects[Follow redirects]' \ | |
78 | '-k[Disables TLS certificate validation]' \ | |
79 | '--insecure[Disables TLS certificate validation]' \ | |
80 | '-n[Do not scan recursively]' \ | |
81 | '--no-recursion[Do not scan recursively]' \ | |
82 | '-f[Append / to each request]' \ | |
83 | '--add-slash[Append / to each request]' \ | |
84 | '(-u --url)--stdin[Read url(s) from STDIN]' \ | |
85 | '-e[Extract links from response body (html, javascript, etc...); make new requests based on findings (default: false)]' \ | |
86 | '--extract-links[Extract links from response body (html, javascript, etc...); make new requests based on findings (default: false)]' \ | |
87 | '-h[Prints help information]' \ | |
88 | '--help[Prints help information]' \ | |
89 | '-V[Prints version information]' \ | |
90 | '--version[Prints version information]' \ | |
91 | 96 | && ret=0 |
92 | ||
93 | 97 | } |
94 | 98 | |
95 | 99 | (( $+functions[_feroxbuster_commands] )) || |
96 | 100 | _feroxbuster_commands() { |
97 | local commands; commands=( | |
98 | ||
99 | ) | |
101 | local commands; commands=() | |
100 | 102 | _describe -t commands 'feroxbuster commands' commands "$@" |
101 | 103 | } |
102 | 104 | |
103 | _feroxbuster "$@"⏎ | |
105 | _feroxbuster "$@" |
19 | 19 | |
20 | 20 | $completions = @(switch ($command) { |
21 | 21 | 'feroxbuster' { |
22 | [CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Path to the wordlist') | |
23 | [CompletionResult]::new('--wordlist', 'wordlist', [CompletionResultType]::ParameterName, 'Path to the wordlist') | |
24 | [CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'The target URL(s) (required, unless --stdin used)') | |
25 | [CompletionResult]::new('--url', 'url', [CompletionResultType]::ParameterName, 'The target URL(s) (required, unless --stdin used)') | |
26 | [CompletionResult]::new('-t', 't', [CompletionResultType]::ParameterName, 'Number of concurrent threads (default: 50)') | |
27 | [CompletionResult]::new('--threads', 'threads', [CompletionResultType]::ParameterName, 'Number of concurrent threads (default: 50)') | |
28 | [CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)') | |
29 | [CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)') | |
30 | [CompletionResult]::new('-T', 'T', [CompletionResultType]::ParameterName, 'Number of seconds before a request times out (default: 7)') | |
31 | [CompletionResult]::new('--timeout', 'timeout', [CompletionResultType]::ParameterName, 'Number of seconds before a request times out (default: 7)') | |
22 | [CompletionResult]::new('-u', 'u', [CompletionResultType]::ParameterName, 'The target URL (required, unless [--stdin || --resume-from] used)') | |
23 | [CompletionResult]::new('--url', 'url', [CompletionResultType]::ParameterName, 'The target URL (required, unless [--stdin || --resume-from] used)') | |
24 | [CompletionResult]::new('--resume-from', 'resume-from', [CompletionResultType]::ParameterName, 'State file from which to resume a partially complete scan (ex. --resume-from ferox-1606586780.state)') | |
32 | 25 | [CompletionResult]::new('-p', 'p', [CompletionResultType]::ParameterName, 'Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)') |
33 | 26 | [CompletionResult]::new('--proxy', 'proxy', [CompletionResultType]::ParameterName, 'Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)') |
34 | 27 | [CompletionResult]::new('-P', 'P', [CompletionResultType]::ParameterName, 'Send only unfiltered requests through a Replay Proxy, instead of all requests') |
35 | 28 | [CompletionResult]::new('--replay-proxy', 'replay-proxy', [CompletionResultType]::ParameterName, 'Send only unfiltered requests through a Replay Proxy, instead of all requests') |
36 | 29 | [CompletionResult]::new('-R', 'R', [CompletionResultType]::ParameterName, 'Status Codes to send through a Replay Proxy when found (default: --status-codes value)') |
37 | 30 | [CompletionResult]::new('--replay-codes', 'replay-codes', [CompletionResultType]::ParameterName, 'Status Codes to send through a Replay Proxy when found (default: --status-codes value)') |
38 | [CompletionResult]::new('-s', 's', [CompletionResultType]::ParameterName, 'Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)') | |
39 | [CompletionResult]::new('--status-codes', 'status-codes', [CompletionResultType]::ParameterName, 'Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)') | |
40 | [CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Output file to write results to (use w/ --json for JSON entries)') | |
41 | [CompletionResult]::new('--output', 'output', [CompletionResultType]::ParameterName, 'Output file to write results to (use w/ --json for JSON entries)') | |
42 | [CompletionResult]::new('--resume-from', 'resume-from', [CompletionResultType]::ParameterName, 'State file from which to resume a partially complete scan (ex. --resume-from ferox-1606586780.state)') | |
43 | [CompletionResult]::new('--debug-log', 'debug-log', [CompletionResultType]::ParameterName, 'Output file to write log entries (use w/ --json for JSON entries)') | |
44 | [CompletionResult]::new('-a', 'a', [CompletionResultType]::ParameterName, 'Sets the User-Agent (default: feroxbuster/VERSION)') | |
45 | [CompletionResult]::new('--user-agent', 'user-agent', [CompletionResultType]::ParameterName, 'Sets the User-Agent (default: feroxbuster/VERSION)') | |
31 | [CompletionResult]::new('-a', 'a', [CompletionResultType]::ParameterName, 'Sets the User-Agent (default: feroxbuster/2.5.0)') | |
32 | [CompletionResult]::new('--user-agent', 'user-agent', [CompletionResultType]::ParameterName, 'Sets the User-Agent (default: feroxbuster/2.5.0)') | |
46 | 33 | [CompletionResult]::new('-x', 'x', [CompletionResultType]::ParameterName, 'File extension(s) to search for (ex: -x php -x pdf js)') |
47 | 34 | [CompletionResult]::new('--extensions', 'extensions', [CompletionResultType]::ParameterName, 'File extension(s) to search for (ex: -x php -x pdf js)') |
35 | [CompletionResult]::new('-m', 'm', [CompletionResultType]::ParameterName, 'Which HTTP request method(s) should be sent (default: GET)') | |
36 | [CompletionResult]::new('--methods', 'methods', [CompletionResultType]::ParameterName, 'Which HTTP request method(s) should be sent (default: GET)') | |
37 | [CompletionResult]::new('--data', 'data', [CompletionResultType]::ParameterName, 'Request''s Body; can read data from a file if input starts with an @ (ex: @post.bin)') | |
38 | [CompletionResult]::new('-H', 'H', [CompletionResultType]::ParameterName, 'Specify HTTP headers to be used in each request (ex: -H Header:val -H ''stuff: things'')') | |
39 | [CompletionResult]::new('--headers', 'headers', [CompletionResultType]::ParameterName, 'Specify HTTP headers to be used in each request (ex: -H Header:val -H ''stuff: things'')') | |
40 | [CompletionResult]::new('-b', 'b', [CompletionResultType]::ParameterName, 'Specify HTTP cookies to be used in each request (ex: -b stuff=things)') | |
41 | [CompletionResult]::new('--cookies', 'cookies', [CompletionResultType]::ParameterName, 'Specify HTTP cookies to be used in each request (ex: -b stuff=things)') | |
42 | [CompletionResult]::new('-Q', 'Q', [CompletionResultType]::ParameterName, 'Request''s URL query parameters (ex: -Q token=stuff -Q secret=key)') | |
43 | [CompletionResult]::new('--query', 'query', [CompletionResultType]::ParameterName, 'Request''s URL query parameters (ex: -Q token=stuff -Q secret=key)') | |
48 | 44 | [CompletionResult]::new('--dont-scan', 'dont-scan', [CompletionResultType]::ParameterName, 'URL(s) or Regex Pattern(s) to exclude from recursion/scans') |
49 | [CompletionResult]::new('-H', 'H', [CompletionResultType]::ParameterName, 'Specify HTTP headers (ex: -H Header:val ''stuff: things'')') | |
50 | [CompletionResult]::new('--headers', 'headers', [CompletionResultType]::ParameterName, 'Specify HTTP headers (ex: -H Header:val ''stuff: things'')') | |
51 | [CompletionResult]::new('-Q', 'Q', [CompletionResultType]::ParameterName, 'Specify URL query parameters (ex: -Q token=stuff -Q secret=key)') | |
52 | [CompletionResult]::new('--query', 'query', [CompletionResultType]::ParameterName, 'Specify URL query parameters (ex: -Q token=stuff -Q secret=key)') | |
53 | 45 | [CompletionResult]::new('-S', 'S', [CompletionResultType]::ParameterName, 'Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)') |
54 | 46 | [CompletionResult]::new('--filter-size', 'filter-size', [CompletionResultType]::ParameterName, 'Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)') |
55 | 47 | [CompletionResult]::new('-X', 'X', [CompletionResultType]::ParameterName, 'Filter out messages via regular expression matching on the response''s body (ex: -X ''^ignore me$'')') |
61 | 53 | [CompletionResult]::new('-C', 'C', [CompletionResultType]::ParameterName, 'Filter out status codes (deny list) (ex: -C 200 -C 401)') |
62 | 54 | [CompletionResult]::new('--filter-status', 'filter-status', [CompletionResultType]::ParameterName, 'Filter out status codes (deny list) (ex: -C 200 -C 401)') |
63 | 55 | [CompletionResult]::new('--filter-similar-to', 'filter-similar-to', [CompletionResultType]::ParameterName, 'Filter out pages that are similar to the given page (ex. --filter-similar-to http://site.xyz/soft404)') |
56 | [CompletionResult]::new('-s', 's', [CompletionResultType]::ParameterName, 'Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)') | |
57 | [CompletionResult]::new('--status-codes', 'status-codes', [CompletionResultType]::ParameterName, 'Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)') | |
58 | [CompletionResult]::new('-T', 'T', [CompletionResultType]::ParameterName, 'Number of seconds before a client''s request times out (default: 7)') | |
59 | [CompletionResult]::new('--timeout', 'timeout', [CompletionResultType]::ParameterName, 'Number of seconds before a client''s request times out (default: 7)') | |
60 | [CompletionResult]::new('-t', 't', [CompletionResultType]::ParameterName, 'Number of concurrent threads (default: 50)') | |
61 | [CompletionResult]::new('--threads', 'threads', [CompletionResultType]::ParameterName, 'Number of concurrent threads (default: 50)') | |
62 | [CompletionResult]::new('-d', 'd', [CompletionResultType]::ParameterName, 'Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)') | |
63 | [CompletionResult]::new('--depth', 'depth', [CompletionResultType]::ParameterName, 'Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)') | |
64 | 64 | [CompletionResult]::new('-L', 'L', [CompletionResultType]::ParameterName, 'Limit total number of concurrent scans (default: 0, i.e. no limit)') |
65 | 65 | [CompletionResult]::new('--scan-limit', 'scan-limit', [CompletionResultType]::ParameterName, 'Limit total number of concurrent scans (default: 0, i.e. no limit)') |
66 | 66 | [CompletionResult]::new('--parallel', 'parallel', [CompletionResultType]::ParameterName, 'Run parallel feroxbuster instances (one child process per url passed via stdin)') |
67 | 67 | [CompletionResult]::new('--rate-limit', 'rate-limit', [CompletionResultType]::ParameterName, 'Limit number of requests per second (per directory) (default: 0, i.e. no limit)') |
68 | 68 | [CompletionResult]::new('--time-limit', 'time-limit', [CompletionResultType]::ParameterName, 'Limit total run time of all scans (ex: --time-limit 10m)') |
69 | [CompletionResult]::new('-w', 'w', [CompletionResultType]::ParameterName, 'Path to the wordlist') | |
70 | [CompletionResult]::new('--wordlist', 'wordlist', [CompletionResultType]::ParameterName, 'Path to the wordlist') | |
71 | [CompletionResult]::new('-o', 'o', [CompletionResultType]::ParameterName, 'Output file to write results to (use w/ --json for JSON entries)') | |
72 | [CompletionResult]::new('--output', 'output', [CompletionResultType]::ParameterName, 'Output file to write results to (use w/ --json for JSON entries)') | |
73 | [CompletionResult]::new('--debug-log', 'debug-log', [CompletionResultType]::ParameterName, 'Output file to write log entries (use w/ --json for JSON entries)') | |
74 | [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Print help information') | |
75 | [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Print help information') | |
76 | [CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Print version information') | |
77 | [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Print version information') | |
78 | [CompletionResult]::new('--stdin', 'stdin', [CompletionResultType]::ParameterName, 'Read url(s) from STDIN') | |
79 | [CompletionResult]::new('-A', 'A', [CompletionResultType]::ParameterName, 'Use a random User-Agent') | |
80 | [CompletionResult]::new('--random-agent', 'random-agent', [CompletionResultType]::ParameterName, 'Use a random User-Agent') | |
81 | [CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Append / to each request''s URL') | |
82 | [CompletionResult]::new('--add-slash', 'add-slash', [CompletionResultType]::ParameterName, 'Append / to each request''s URL') | |
83 | [CompletionResult]::new('-r', 'r', [CompletionResultType]::ParameterName, 'Allow client to follow redirects') | |
84 | [CompletionResult]::new('--redirects', 'redirects', [CompletionResultType]::ParameterName, 'Allow client to follow redirects') | |
85 | [CompletionResult]::new('-k', 'k', [CompletionResultType]::ParameterName, 'Disables TLS certificate validation in the client') | |
86 | [CompletionResult]::new('--insecure', 'insecure', [CompletionResultType]::ParameterName, 'Disables TLS certificate validation in the client') | |
87 | [CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Do not scan recursively') | |
88 | [CompletionResult]::new('--no-recursion', 'no-recursion', [CompletionResultType]::ParameterName, 'Do not scan recursively') | |
89 | [CompletionResult]::new('-e', 'e', [CompletionResultType]::ParameterName, 'Extract links from response body (html, javascript, etc...); make new requests based on findings') | |
90 | [CompletionResult]::new('--extract-links', 'extract-links', [CompletionResultType]::ParameterName, 'Extract links from response body (html, javascript, etc...); make new requests based on findings') | |
91 | [CompletionResult]::new('--auto-tune', 'auto-tune', [CompletionResultType]::ParameterName, 'Automatically lower scan rate when an excessive amount of errors are encountered') | |
92 | [CompletionResult]::new('--auto-bail', 'auto-bail', [CompletionResultType]::ParameterName, 'Automatically stop scanning when an excessive amount of errors are encountered') | |
93 | [CompletionResult]::new('-D', 'D', [CompletionResultType]::ParameterName, 'Don''t auto-filter wildcard responses') | |
94 | [CompletionResult]::new('--dont-filter', 'dont-filter', [CompletionResultType]::ParameterName, 'Don''t auto-filter wildcard responses') | |
69 | 95 | [CompletionResult]::new('-v', 'v', [CompletionResultType]::ParameterName, 'Increase verbosity level (use -vv or more for greater effect. [CAUTION] 4 -v''s is probably too much)') |
70 | 96 | [CompletionResult]::new('--verbosity', 'verbosity', [CompletionResultType]::ParameterName, 'Increase verbosity level (use -vv or more for greater effect. [CAUTION] 4 -v''s is probably too much)') |
71 | 97 | [CompletionResult]::new('--silent', 'silent', [CompletionResultType]::ParameterName, 'Only print URLs + turn off logging (good for piping a list of urls to other commands)') |
72 | 98 | [CompletionResult]::new('-q', 'q', [CompletionResultType]::ParameterName, 'Hide progress bars and banner (good for tmux windows w/ notifications)') |
73 | 99 | [CompletionResult]::new('--quiet', 'quiet', [CompletionResultType]::ParameterName, 'Hide progress bars and banner (good for tmux windows w/ notifications)') |
74 | [CompletionResult]::new('--auto-tune', 'auto-tune', [CompletionResultType]::ParameterName, 'Automatically lower scan rate when an excessive amount of errors are encountered') | |
75 | [CompletionResult]::new('--auto-bail', 'auto-bail', [CompletionResultType]::ParameterName, 'Automatically stop scanning when an excessive amount of errors are encountered') | |
76 | 100 | [CompletionResult]::new('--json', 'json', [CompletionResultType]::ParameterName, 'Emit JSON logs to --output and --debug-log instead of normal text') |
77 | [CompletionResult]::new('-D', 'D', [CompletionResultType]::ParameterName, 'Don''t auto-filter wildcard responses') | |
78 | [CompletionResult]::new('--dont-filter', 'dont-filter', [CompletionResultType]::ParameterName, 'Don''t auto-filter wildcard responses') | |
79 | [CompletionResult]::new('-A', 'A', [CompletionResultType]::ParameterName, 'Use a random User-Agent') | |
80 | [CompletionResult]::new('--random-agent', 'random-agent', [CompletionResultType]::ParameterName, 'Use a random User-Agent') | |
81 | [CompletionResult]::new('-r', 'r', [CompletionResultType]::ParameterName, 'Follow redirects') | |
82 | [CompletionResult]::new('--redirects', 'redirects', [CompletionResultType]::ParameterName, 'Follow redirects') | |
83 | [CompletionResult]::new('-k', 'k', [CompletionResultType]::ParameterName, 'Disables TLS certificate validation') | |
84 | [CompletionResult]::new('--insecure', 'insecure', [CompletionResultType]::ParameterName, 'Disables TLS certificate validation') | |
85 | [CompletionResult]::new('-n', 'n', [CompletionResultType]::ParameterName, 'Do not scan recursively') | |
86 | [CompletionResult]::new('--no-recursion', 'no-recursion', [CompletionResultType]::ParameterName, 'Do not scan recursively') | |
87 | [CompletionResult]::new('-f', 'f', [CompletionResultType]::ParameterName, 'Append / to each request') | |
88 | [CompletionResult]::new('--add-slash', 'add-slash', [CompletionResultType]::ParameterName, 'Append / to each request') | |
89 | [CompletionResult]::new('--stdin', 'stdin', [CompletionResultType]::ParameterName, 'Read url(s) from STDIN') | |
90 | [CompletionResult]::new('-e', 'e', [CompletionResultType]::ParameterName, 'Extract links from response body (html, javascript, etc...); make new requests based on findings (default: false)') | |
91 | [CompletionResult]::new('--extract-links', 'extract-links', [CompletionResultType]::ParameterName, 'Extract links from response body (html, javascript, etc...); make new requests based on findings (default: false)') | |
92 | [CompletionResult]::new('-h', 'h', [CompletionResultType]::ParameterName, 'Prints help information') | |
93 | [CompletionResult]::new('--help', 'help', [CompletionResultType]::ParameterName, 'Prints help information') | |
94 | [CompletionResult]::new('-V', 'V', [CompletionResultType]::ParameterName, 'Prints version information') | |
95 | [CompletionResult]::new('--version', 'version', [CompletionResultType]::ParameterName, 'Prints version information') | |
96 | 101 | break |
97 | 102 | } |
98 | 103 | }) |
8 | 8 | for i in ${COMP_WORDS[@]} |
9 | 9 | do |
10 | 10 | case "${i}" in |
11 | feroxbuster) | |
11 | "$1") | |
12 | 12 | cmd="feroxbuster" |
13 | 13 | ;; |
14 | ||
15 | 14 | *) |
16 | 15 | ;; |
17 | 16 | esac |
19 | 18 | |
20 | 19 | case "${cmd}" in |
21 | 20 | feroxbuster) |
22 | opts=" -v -q -D -A -r -k -n -f -e -h -V -w -u -t -d -T -p -P -R -s -o -a -x -H -Q -S -X -W -N -C -L --verbosity --silent --quiet --auto-tune --auto-bail --json --dont-filter --random-agent --redirects --insecure --no-recursion --add-slash --stdin --extract-links --help --version --wordlist --url --threads --depth --timeout --proxy --replay-proxy --replay-codes --status-codes --output --resume-from --debug-log --user-agent --extensions --dont-scan --headers --query --filter-size --filter-regex --filter-words --filter-lines --filter-status --filter-similar-to --scan-limit --parallel --rate-limit --time-limit " | |
21 | opts="-h -V -u -p -P -R -a -A -x -m -H -b -Q -f -S -X -W -N -C -s -T -r -k -t -n -d -e -L -w -D -v -q -o --help --version --url --stdin --resume-from --proxy --replay-proxy --replay-codes --user-agent --random-agent --extensions --methods --data --headers --cookies --query --add-slash --dont-scan --filter-size --filter-regex --filter-words --filter-lines --filter-status --filter-similar-to --status-codes --timeout --redirects --insecure --threads --no-recursion --depth --extract-links --scan-limit --parallel --rate-limit --time-limit --wordlist --auto-tune --auto-bail --dont-filter --verbosity --silent --quiet --json --output --debug-log" | |
23 | 22 | if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then |
24 | 23 | COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) |
25 | 24 | return 0 |
26 | 25 | fi |
27 | 26 | case "${prev}" in |
28 | ||
27 | --url) | |
28 | COMPREPLY=($(compgen -f "${cur}")) | |
29 | return 0 | |
30 | ;; | |
31 | -u) | |
32 | COMPREPLY=($(compgen -f "${cur}")) | |
33 | return 0 | |
34 | ;; | |
35 | --resume-from) | |
36 | COMPREPLY=($(compgen -f "${cur}")) | |
37 | return 0 | |
38 | ;; | |
39 | --proxy) | |
40 | COMPREPLY=($(compgen -f "${cur}")) | |
41 | return 0 | |
42 | ;; | |
43 | -p) | |
44 | COMPREPLY=($(compgen -f "${cur}")) | |
45 | return 0 | |
46 | ;; | |
47 | --replay-proxy) | |
48 | COMPREPLY=($(compgen -f "${cur}")) | |
49 | return 0 | |
50 | ;; | |
51 | -P) | |
52 | COMPREPLY=($(compgen -f "${cur}")) | |
53 | return 0 | |
54 | ;; | |
55 | --replay-codes) | |
56 | COMPREPLY=($(compgen -f "${cur}")) | |
57 | return 0 | |
58 | ;; | |
59 | -R) | |
60 | COMPREPLY=($(compgen -f "${cur}")) | |
61 | return 0 | |
62 | ;; | |
63 | --user-agent) | |
64 | COMPREPLY=($(compgen -f "${cur}")) | |
65 | return 0 | |
66 | ;; | |
67 | -a) | |
68 | COMPREPLY=($(compgen -f "${cur}")) | |
69 | return 0 | |
70 | ;; | |
71 | --extensions) | |
72 | COMPREPLY=($(compgen -f "${cur}")) | |
73 | return 0 | |
74 | ;; | |
75 | -x) | |
76 | COMPREPLY=($(compgen -f "${cur}")) | |
77 | return 0 | |
78 | ;; | |
79 | --methods) | |
80 | COMPREPLY=($(compgen -f "${cur}")) | |
81 | return 0 | |
82 | ;; | |
83 | -m) | |
84 | COMPREPLY=($(compgen -f "${cur}")) | |
85 | return 0 | |
86 | ;; | |
87 | --data) | |
88 | COMPREPLY=($(compgen -f "${cur}")) | |
89 | return 0 | |
90 | ;; | |
91 | --headers) | |
92 | COMPREPLY=($(compgen -f "${cur}")) | |
93 | return 0 | |
94 | ;; | |
95 | -H) | |
96 | COMPREPLY=($(compgen -f "${cur}")) | |
97 | return 0 | |
98 | ;; | |
99 | --cookies) | |
100 | COMPREPLY=($(compgen -f "${cur}")) | |
101 | return 0 | |
102 | ;; | |
103 | -b) | |
104 | COMPREPLY=($(compgen -f "${cur}")) | |
105 | return 0 | |
106 | ;; | |
107 | --query) | |
108 | COMPREPLY=($(compgen -f "${cur}")) | |
109 | return 0 | |
110 | ;; | |
111 | -Q) | |
112 | COMPREPLY=($(compgen -f "${cur}")) | |
113 | return 0 | |
114 | ;; | |
115 | --dont-scan) | |
116 | COMPREPLY=($(compgen -f "${cur}")) | |
117 | return 0 | |
118 | ;; | |
119 | --filter-size) | |
120 | COMPREPLY=($(compgen -f "${cur}")) | |
121 | return 0 | |
122 | ;; | |
123 | -S) | |
124 | COMPREPLY=($(compgen -f "${cur}")) | |
125 | return 0 | |
126 | ;; | |
127 | --filter-regex) | |
128 | COMPREPLY=($(compgen -f "${cur}")) | |
129 | return 0 | |
130 | ;; | |
131 | -X) | |
132 | COMPREPLY=($(compgen -f "${cur}")) | |
133 | return 0 | |
134 | ;; | |
135 | --filter-words) | |
136 | COMPREPLY=($(compgen -f "${cur}")) | |
137 | return 0 | |
138 | ;; | |
139 | -W) | |
140 | COMPREPLY=($(compgen -f "${cur}")) | |
141 | return 0 | |
142 | ;; | |
143 | --filter-lines) | |
144 | COMPREPLY=($(compgen -f "${cur}")) | |
145 | return 0 | |
146 | ;; | |
147 | -N) | |
148 | COMPREPLY=($(compgen -f "${cur}")) | |
149 | return 0 | |
150 | ;; | |
151 | --filter-status) | |
152 | COMPREPLY=($(compgen -f "${cur}")) | |
153 | return 0 | |
154 | ;; | |
155 | -C) | |
156 | COMPREPLY=($(compgen -f "${cur}")) | |
157 | return 0 | |
158 | ;; | |
159 | --filter-similar-to) | |
160 | COMPREPLY=($(compgen -f "${cur}")) | |
161 | return 0 | |
162 | ;; | |
163 | --status-codes) | |
164 | COMPREPLY=($(compgen -f "${cur}")) | |
165 | return 0 | |
166 | ;; | |
167 | -s) | |
168 | COMPREPLY=($(compgen -f "${cur}")) | |
169 | return 0 | |
170 | ;; | |
171 | --timeout) | |
172 | COMPREPLY=($(compgen -f "${cur}")) | |
173 | return 0 | |
174 | ;; | |
175 | -T) | |
176 | COMPREPLY=($(compgen -f "${cur}")) | |
177 | return 0 | |
178 | ;; | |
179 | --threads) | |
180 | COMPREPLY=($(compgen -f "${cur}")) | |
181 | return 0 | |
182 | ;; | |
183 | -t) | |
184 | COMPREPLY=($(compgen -f "${cur}")) | |
185 | return 0 | |
186 | ;; | |
187 | --depth) | |
188 | COMPREPLY=($(compgen -f "${cur}")) | |
189 | return 0 | |
190 | ;; | |
191 | -d) | |
192 | COMPREPLY=($(compgen -f "${cur}")) | |
193 | return 0 | |
194 | ;; | |
195 | --scan-limit) | |
196 | COMPREPLY=($(compgen -f "${cur}")) | |
197 | return 0 | |
198 | ;; | |
199 | -L) | |
200 | COMPREPLY=($(compgen -f "${cur}")) | |
201 | return 0 | |
202 | ;; | |
203 | --parallel) | |
204 | COMPREPLY=($(compgen -f "${cur}")) | |
205 | return 0 | |
206 | ;; | |
207 | --rate-limit) | |
208 | COMPREPLY=($(compgen -f "${cur}")) | |
209 | return 0 | |
210 | ;; | |
211 | --time-limit) | |
212 | COMPREPLY=($(compgen -f "${cur}")) | |
213 | return 0 | |
214 | ;; | |
29 | 215 | --wordlist) |
30 | 216 | COMPREPLY=($(compgen -f "${cur}")) |
31 | 217 | return 0 |
32 | 218 | ;; |
33 | -w) | |
34 | COMPREPLY=($(compgen -f "${cur}")) | |
35 | return 0 | |
36 | ;; | |
37 | --url) | |
38 | COMPREPLY=($(compgen -f "${cur}")) | |
39 | return 0 | |
40 | ;; | |
41 | -u) | |
42 | COMPREPLY=($(compgen -f "${cur}")) | |
43 | return 0 | |
44 | ;; | |
45 | --threads) | |
46 | COMPREPLY=($(compgen -f "${cur}")) | |
47 | return 0 | |
48 | ;; | |
49 | -t) | |
50 | COMPREPLY=($(compgen -f "${cur}")) | |
51 | return 0 | |
52 | ;; | |
53 | --depth) | |
54 | COMPREPLY=($(compgen -f "${cur}")) | |
55 | return 0 | |
56 | ;; | |
57 | -d) | |
58 | COMPREPLY=($(compgen -f "${cur}")) | |
59 | return 0 | |
60 | ;; | |
61 | --timeout) | |
62 | COMPREPLY=($(compgen -f "${cur}")) | |
63 | return 0 | |
64 | ;; | |
65 | -T) | |
66 | COMPREPLY=($(compgen -f "${cur}")) | |
67 | return 0 | |
68 | ;; | |
69 | --proxy) | |
70 | COMPREPLY=($(compgen -f "${cur}")) | |
71 | return 0 | |
72 | ;; | |
73 | -p) | |
74 | COMPREPLY=($(compgen -f "${cur}")) | |
75 | return 0 | |
76 | ;; | |
77 | --replay-proxy) | |
78 | COMPREPLY=($(compgen -f "${cur}")) | |
79 | return 0 | |
80 | ;; | |
81 | -P) | |
82 | COMPREPLY=($(compgen -f "${cur}")) | |
83 | return 0 | |
84 | ;; | |
85 | --replay-codes) | |
86 | COMPREPLY=($(compgen -f "${cur}")) | |
87 | return 0 | |
88 | ;; | |
89 | -R) | |
90 | COMPREPLY=($(compgen -f "${cur}")) | |
91 | return 0 | |
92 | ;; | |
93 | --status-codes) | |
94 | COMPREPLY=($(compgen -f "${cur}")) | |
95 | return 0 | |
96 | ;; | |
97 | -s) | |
219 | -w) | |
98 | 220 | COMPREPLY=($(compgen -f "${cur}")) |
99 | 221 | return 0 |
100 | 222 | ;; |
102 | 224 | COMPREPLY=($(compgen -f "${cur}")) |
103 | 225 | return 0 |
104 | 226 | ;; |
105 | -o) | |
106 | COMPREPLY=($(compgen -f "${cur}")) | |
107 | return 0 | |
108 | ;; | |
109 | --resume-from) | |
227 | -o) | |
110 | 228 | COMPREPLY=($(compgen -f "${cur}")) |
111 | 229 | return 0 |
112 | 230 | ;; |
113 | 231 | --debug-log) |
114 | COMPREPLY=($(compgen -f "${cur}")) | |
115 | return 0 | |
116 | ;; | |
117 | --user-agent) | |
118 | COMPREPLY=($(compgen -f "${cur}")) | |
119 | return 0 | |
120 | ;; | |
121 | -a) | |
122 | COMPREPLY=($(compgen -f "${cur}")) | |
123 | return 0 | |
124 | ;; | |
125 | --extensions) | |
126 | COMPREPLY=($(compgen -f "${cur}")) | |
127 | return 0 | |
128 | ;; | |
129 | -x) | |
130 | COMPREPLY=($(compgen -f "${cur}")) | |
131 | return 0 | |
132 | ;; | |
133 | --dont-scan) | |
134 | COMPREPLY=($(compgen -f "${cur}")) | |
135 | return 0 | |
136 | ;; | |
137 | --headers) | |
138 | COMPREPLY=($(compgen -f "${cur}")) | |
139 | return 0 | |
140 | ;; | |
141 | -H) | |
142 | COMPREPLY=($(compgen -f "${cur}")) | |
143 | return 0 | |
144 | ;; | |
145 | --query) | |
146 | COMPREPLY=($(compgen -f "${cur}")) | |
147 | return 0 | |
148 | ;; | |
149 | -Q) | |
150 | COMPREPLY=($(compgen -f "${cur}")) | |
151 | return 0 | |
152 | ;; | |
153 | --filter-size) | |
154 | COMPREPLY=($(compgen -f "${cur}")) | |
155 | return 0 | |
156 | ;; | |
157 | -S) | |
158 | COMPREPLY=($(compgen -f "${cur}")) | |
159 | return 0 | |
160 | ;; | |
161 | --filter-regex) | |
162 | COMPREPLY=($(compgen -f "${cur}")) | |
163 | return 0 | |
164 | ;; | |
165 | -X) | |
166 | COMPREPLY=($(compgen -f "${cur}")) | |
167 | return 0 | |
168 | ;; | |
169 | --filter-words) | |
170 | COMPREPLY=($(compgen -f "${cur}")) | |
171 | return 0 | |
172 | ;; | |
173 | -W) | |
174 | COMPREPLY=($(compgen -f "${cur}")) | |
175 | return 0 | |
176 | ;; | |
177 | --filter-lines) | |
178 | COMPREPLY=($(compgen -f "${cur}")) | |
179 | return 0 | |
180 | ;; | |
181 | -N) | |
182 | COMPREPLY=($(compgen -f "${cur}")) | |
183 | return 0 | |
184 | ;; | |
185 | --filter-status) | |
186 | COMPREPLY=($(compgen -f "${cur}")) | |
187 | return 0 | |
188 | ;; | |
189 | -C) | |
190 | COMPREPLY=($(compgen -f "${cur}")) | |
191 | return 0 | |
192 | ;; | |
193 | --filter-similar-to) | |
194 | COMPREPLY=($(compgen -f "${cur}")) | |
195 | return 0 | |
196 | ;; | |
197 | --scan-limit) | |
198 | COMPREPLY=($(compgen -f "${cur}")) | |
199 | return 0 | |
200 | ;; | |
201 | -L) | |
202 | COMPREPLY=($(compgen -f "${cur}")) | |
203 | return 0 | |
204 | ;; | |
205 | --parallel) | |
206 | COMPREPLY=($(compgen -f "${cur}")) | |
207 | return 0 | |
208 | ;; | |
209 | --rate-limit) | |
210 | COMPREPLY=($(compgen -f "${cur}")) | |
211 | return 0 | |
212 | ;; | |
213 | --time-limit) | |
214 | 232 | COMPREPLY=($(compgen -f "${cur}")) |
215 | 233 | return 0 |
216 | 234 | ;; |
221 | 239 | COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") ) |
222 | 240 | return 0 |
223 | 241 | ;; |
224 | ||
225 | 242 | esac |
226 | 243 | } |
227 | 244 |
0 | ||
1 | use builtin; | |
2 | use str; | |
3 | ||
4 | set edit:completion:arg-completer[feroxbuster] = {|@words| | |
5 | fn spaces {|n| | |
6 | builtin:repeat $n ' ' | str:join '' | |
7 | } | |
8 | fn cand {|text desc| | |
9 | edit:complex-candidate $text &display=$text' '(spaces (- 14 (wcswidth $text)))$desc | |
10 | } | |
11 | var command = 'feroxbuster' | |
12 | for word $words[1..-1] { | |
13 | if (str:has-prefix $word '-') { | |
14 | break | |
15 | } | |
16 | set command = $command';'$word | |
17 | } | |
18 | var completions = [ | |
19 | &'feroxbuster'= { | |
20 | cand -u 'The target URL (required, unless [--stdin || --resume-from] used)' | |
21 | cand --url 'The target URL (required, unless [--stdin || --resume-from] used)' | |
22 | cand --resume-from 'State file from which to resume a partially complete scan (ex. --resume-from ferox-1606586780.state)' | |
23 | cand -p 'Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)' | |
24 | cand --proxy 'Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)' | |
25 | cand -P 'Send only unfiltered requests through a Replay Proxy, instead of all requests' | |
26 | cand --replay-proxy 'Send only unfiltered requests through a Replay Proxy, instead of all requests' | |
27 | cand -R 'Status Codes to send through a Replay Proxy when found (default: --status-codes value)' | |
28 | cand --replay-codes 'Status Codes to send through a Replay Proxy when found (default: --status-codes value)' | |
29 | cand -a 'Sets the User-Agent (default: feroxbuster/2.5.0)' | |
30 | cand --user-agent 'Sets the User-Agent (default: feroxbuster/2.5.0)' | |
31 | cand -x 'File extension(s) to search for (ex: -x php -x pdf js)' | |
32 | cand --extensions 'File extension(s) to search for (ex: -x php -x pdf js)' | |
33 | cand -m 'Which HTTP request method(s) should be sent (default: GET)' | |
34 | cand --methods 'Which HTTP request method(s) should be sent (default: GET)' | |
35 | cand --data 'Request''s Body; can read data from a file if input starts with an @ (ex: @post.bin)' | |
36 | cand -H 'Specify HTTP headers to be used in each request (ex: -H Header:val -H ''stuff: things'')' | |
37 | cand --headers 'Specify HTTP headers to be used in each request (ex: -H Header:val -H ''stuff: things'')' | |
38 | cand -b 'Specify HTTP cookies to be used in each request (ex: -b stuff=things)' | |
39 | cand --cookies 'Specify HTTP cookies to be used in each request (ex: -b stuff=things)' | |
40 | cand -Q 'Request''s URL query parameters (ex: -Q token=stuff -Q secret=key)' | |
41 | cand --query 'Request''s URL query parameters (ex: -Q token=stuff -Q secret=key)' | |
42 | cand --dont-scan 'URL(s) or Regex Pattern(s) to exclude from recursion/scans' | |
43 | cand -S 'Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)' | |
44 | cand --filter-size 'Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)' | |
45 | cand -X 'Filter out messages via regular expression matching on the response''s body (ex: -X ''^ignore me$'')' | |
46 | cand --filter-regex 'Filter out messages via regular expression matching on the response''s body (ex: -X ''^ignore me$'')' | |
47 | cand -W 'Filter out messages of a particular word count (ex: -W 312 -W 91,82)' | |
48 | cand --filter-words 'Filter out messages of a particular word count (ex: -W 312 -W 91,82)' | |
49 | cand -N 'Filter out messages of a particular line count (ex: -N 20 -N 31,30)' | |
50 | cand --filter-lines 'Filter out messages of a particular line count (ex: -N 20 -N 31,30)' | |
51 | cand -C 'Filter out status codes (deny list) (ex: -C 200 -C 401)' | |
52 | cand --filter-status 'Filter out status codes (deny list) (ex: -C 200 -C 401)' | |
53 | cand --filter-similar-to 'Filter out pages that are similar to the given page (ex. --filter-similar-to http://site.xyz/soft404)' | |
54 | cand -s 'Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)' | |
55 | cand --status-codes 'Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)' | |
56 | cand -T 'Number of seconds before a client''s request times out (default: 7)' | |
57 | cand --timeout 'Number of seconds before a client''s request times out (default: 7)' | |
58 | cand -t 'Number of concurrent threads (default: 50)' | |
59 | cand --threads 'Number of concurrent threads (default: 50)' | |
60 | cand -d 'Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)' | |
61 | cand --depth 'Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)' | |
62 | cand -L 'Limit total number of concurrent scans (default: 0, i.e. no limit)' | |
63 | cand --scan-limit 'Limit total number of concurrent scans (default: 0, i.e. no limit)' | |
64 | cand --parallel 'Run parallel feroxbuster instances (one child process per url passed via stdin)' | |
65 | cand --rate-limit 'Limit number of requests per second (per directory) (default: 0, i.e. no limit)' | |
66 | cand --time-limit 'Limit total run time of all scans (ex: --time-limit 10m)' | |
67 | cand -w 'Path to the wordlist' | |
68 | cand --wordlist 'Path to the wordlist' | |
69 | cand -o 'Output file to write results to (use w/ --json for JSON entries)' | |
70 | cand --output 'Output file to write results to (use w/ --json for JSON entries)' | |
71 | cand --debug-log 'Output file to write log entries (use w/ --json for JSON entries)' | |
72 | cand -h 'Print help information' | |
73 | cand --help 'Print help information' | |
74 | cand -V 'Print version information' | |
75 | cand --version 'Print version information' | |
76 | cand --stdin 'Read url(s) from STDIN' | |
77 | cand -A 'Use a random User-Agent' | |
78 | cand --random-agent 'Use a random User-Agent' | |
79 | cand -f 'Append / to each request''s URL' | |
80 | cand --add-slash 'Append / to each request''s URL' | |
81 | cand -r 'Allow client to follow redirects' | |
82 | cand --redirects 'Allow client to follow redirects' | |
83 | cand -k 'Disables TLS certificate validation in the client' | |
84 | cand --insecure 'Disables TLS certificate validation in the client' | |
85 | cand -n 'Do not scan recursively' | |
86 | cand --no-recursion 'Do not scan recursively' | |
87 | cand -e 'Extract links from response body (html, javascript, etc...); make new requests based on findings' | |
88 | cand --extract-links 'Extract links from response body (html, javascript, etc...); make new requests based on findings' | |
89 | cand --auto-tune 'Automatically lower scan rate when an excessive amount of errors are encountered' | |
90 | cand --auto-bail 'Automatically stop scanning when an excessive amount of errors are encountered' | |
91 | cand -D 'Don''t auto-filter wildcard responses' | |
92 | cand --dont-filter 'Don''t auto-filter wildcard responses' | |
93 | cand -v 'Increase verbosity level (use -vv or more for greater effect. [CAUTION] 4 -v''s is probably too much)' | |
94 | cand --verbosity 'Increase verbosity level (use -vv or more for greater effect. [CAUTION] 4 -v''s is probably too much)' | |
95 | cand --silent 'Only print URLs + turn off logging (good for piping a list of urls to other commands)' | |
96 | cand -q 'Hide progress bars and banner (good for tmux windows w/ notifications)' | |
97 | cand --quiet 'Hide progress bars and banner (good for tmux windows w/ notifications)' | |
98 | cand --json 'Emit JSON logs to --output and --debug-log instead of normal text' | |
99 | } | |
100 | ] | |
101 | $completions[$command] | |
102 | } |
11 | 11 | complete -c feroxbuster -n "__fish_use_subcommand" -l debug-log -d 'Output file to write log entries (use w/ --json for JSON entries)' |
12 | 12 | complete -c feroxbuster -n "__fish_use_subcommand" -s a -l user-agent -d 'Sets the User-Agent (default: feroxbuster/VERSION)' |
13 | 13 | complete -c feroxbuster -n "__fish_use_subcommand" -s x -l extensions -d 'File extension(s) to search for (ex: -x php -x pdf js)' |
14 | complete -c feroxbuster -n "__fish_use_subcommand" -s m -l methods -d 'HTTP request method(s) (default: GET)' | |
15 | complete -c feroxbuster -n "__fish_use_subcommand" -l data -d 'HTTP Body data; can read data from a file if input starts with an @ (ex: @post.bin)' | |
14 | 16 | complete -c feroxbuster -n "__fish_use_subcommand" -l dont-scan -d 'URL(s) or Regex Pattern(s) to exclude from recursion/scans' |
15 | 17 | complete -c feroxbuster -n "__fish_use_subcommand" -s H -l headers -d 'Specify HTTP headers (ex: -H Header:val \'stuff: things\')' |
18 | complete -c feroxbuster -n "__fish_use_subcommand" -s b -l cookies -d 'Specify HTTP cookies (ex: -b stuff=things)' | |
16 | 19 | complete -c feroxbuster -n "__fish_use_subcommand" -s Q -l query -d 'Specify URL query parameters (ex: -Q token=stuff -Q secret=key)' |
17 | 20 | complete -c feroxbuster -n "__fish_use_subcommand" -s S -l filter-size -d 'Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)' |
18 | 21 | complete -c feroxbuster -n "__fish_use_subcommand" -s X -l filter-regex -d 'Filter out messages via regular expression matching on the response\'s body (ex: -X \'^ignore me$\')' |
2 | 2 | config::Configuration, |
3 | 3 | event_handlers::Handles, |
4 | 4 | utils::{logged_request, status_colorizer}, |
5 | VERSION, | |
5 | DEFAULT_METHOD, VERSION, | |
6 | 6 | }; |
7 | 7 | use anyhow::{bail, Result}; |
8 | 8 | use console::{style, Emoji}; |
96 | 96 | |
97 | 97 | /// represents Configuration.extensions |
98 | 98 | extensions: BannerEntry, |
99 | ||
100 | /// represents Configuration.methods | |
101 | methods: BannerEntry, | |
102 | ||
103 | /// represents Configuration.data | |
104 | data: BannerEntry, | |
99 | 105 | |
100 | 106 | /// represents Configuration.insecure |
101 | 107 | insecure: BannerEntry, |
301 | 307 | "Extensions", |
302 | 308 | &format!("[{}]", config.extensions.join(", ")), |
303 | 309 | ); |
310 | let methods = BannerEntry::new( | |
311 | "🏁", | |
312 | "HTTP methods", | |
313 | &format!("[{}]", config.methods.join(", ")), | |
314 | ); | |
315 | ||
316 | let offset = std::cmp::min(config.data.len(), 30); | |
317 | let data = String::from_utf8(config.data[..offset].to_vec()) | |
318 | .unwrap_or_else(|_err| { | |
319 | format!( | |
320 | "{:x?} ...", | |
321 | &config.data[..std::cmp::min(config.data.len(), 13)] | |
322 | ) | |
323 | }) | |
324 | .replace("\n", " ") | |
325 | .replace("\r", ""); | |
326 | let data = BannerEntry::new("💣", "HTTP Body", &data); | |
304 | 327 | let insecure = BannerEntry::new("🔓", "Insecure", &config.insecure.to_string()); |
305 | 328 | let redirects = BannerEntry::new("📍", "Follow Redirects", &config.redirects.to_string()); |
306 | 329 | let dont_filter = |
338 | 361 | output, |
339 | 362 | debug_log, |
340 | 363 | extensions, |
364 | methods, | |
365 | data, | |
341 | 366 | insecure, |
342 | 367 | dont_filter, |
343 | 368 | redirects, |
394 | 419 | |
395 | 420 | let api_url = Url::parse(url)?; |
396 | 421 | |
397 | let result = logged_request(&api_url, handles.clone()).await?; | |
422 | let result = logged_request(&api_url, DEFAULT_METHOD, None, handles.clone()).await?; | |
398 | 423 | let body = result.text().await?; |
399 | 424 | |
400 | 425 | let json_response: Value = serde_json::from_str(&body)?; |
524 | 549 | writeln!(&mut writer, "{}", self.extensions)?; |
525 | 550 | } |
526 | 551 | |
552 | if !config.methods.is_empty() { | |
553 | writeln!(&mut writer, "{}", self.methods)?; | |
554 | } | |
555 | ||
556 | if !config.data.is_empty() { | |
557 | writeln!(&mut writer, "{}", self.data)?; | |
558 | } | |
559 | ||
527 | 560 | if config.insecure { |
528 | 561 | writeln!(&mut writer, "{}", self.insecure)?; |
529 | 562 | } |
0 | 0 | use super::utils::{ |
1 | depth, report_and_exit, save_state, serialized_type, status_codes, threads, timeout, | |
1 | depth, methods, report_and_exit, save_state, serialized_type, status_codes, threads, timeout, | |
2 | 2 | user_agent, wordlist, OutputLevel, RequesterPolicy, |
3 | 3 | }; |
4 | 4 | use crate::config::determine_output_level; |
8 | 8 | DEFAULT_CONFIG_NAME, |
9 | 9 | }; |
10 | 10 | use anyhow::{anyhow, Context, Result}; |
11 | use clap::{value_t, ArgMatches}; | |
11 | use clap::ArgMatches; | |
12 | 12 | use regex::Regex; |
13 | use reqwest::{Client, StatusCode, Url}; | |
13 | use reqwest::{Client, Method, StatusCode, Url}; | |
14 | 14 | use serde::{Deserialize, Serialize}; |
15 | 15 | use std::{ |
16 | 16 | collections::HashMap, |
21 | 21 | |
22 | 22 | /// macro helper to abstract away repetitive configuration updates |
23 | 23 | macro_rules! update_config_if_present { |
24 | ($c:expr, $m:ident, $v:expr, $t:ty) => { | |
25 | match value_t!($m, $v, $t) { | |
26 | Ok(value) => *$c = value, // Update value | |
27 | Err(clap::Error { | |
28 | kind: clap::ErrorKind::ArgumentNotFound, | |
29 | message: _, | |
30 | info: _, | |
31 | }) => { | |
32 | // Do nothing if argument not found | |
24 | ($conf_val:expr, $matches:ident, $arg_name:expr) => { | |
25 | match $matches.value_of_t($arg_name) { | |
26 | Ok(value) => *$conf_val = value, // Update value | |
27 | Err(err) => { | |
28 | if !matches!(err.kind, clap::ErrorKind::ArgumentNotFound) { | |
29 | // Do nothing if argument not found | |
30 | err.exit() // Exit with error on any other parse error | |
31 | } | |
33 | 32 | } |
34 | Err(e) => e.exit(), // Exit with error on parse error | |
35 | 33 | } |
36 | 34 | }; |
37 | 35 | } |
169 | 167 | /// File extension(s) to search for |
170 | 168 | #[serde(default)] |
171 | 169 | pub extensions: Vec<String>, |
170 | ||
171 | /// HTTP requests methods(s) to search for | |
172 | #[serde(default = "methods")] | |
173 | pub methods: Vec<String>, | |
174 | ||
175 | /// HTTP Body data to send during request | |
176 | #[serde(default)] | |
177 | pub data: Vec<u8>, | |
172 | 178 | |
173 | 179 | /// HTTP headers to be used in each request |
174 | 180 | #[serde(default)] |
314 | 320 | replay_proxy: String::new(), |
315 | 321 | queries: Vec::new(), |
316 | 322 | extensions: Vec::new(), |
323 | methods: methods(), | |
324 | data: Vec::new(), | |
317 | 325 | filter_size: Vec::new(), |
318 | 326 | filter_regex: Vec::new(), |
319 | 327 | url_denylist: Vec::new(), |
356 | 364 | /// - **random_agent**: `false` |
357 | 365 | /// - **insecure**: `false` (don't be insecure, i.e. don't allow invalid certs) |
358 | 366 | /// - **extensions**: `None` |
367 | /// - **methods**: [`DEFAULT_METHOD`] | |
368 | /// - **data**: `None` | |
359 | 369 | /// - **url_denylist**: `None` |
360 | 370 | /// - **regex_denylist**: `None` |
361 | 371 | /// - **filter_size**: `None` |
506 | 516 | fn parse_cli_args(args: &ArgMatches) -> Self { |
507 | 517 | let mut config = Configuration::default(); |
508 | 518 | |
509 | update_config_if_present!(&mut config.threads, args, "threads", usize); | |
510 | update_config_if_present!(&mut config.depth, args, "depth", usize); | |
511 | update_config_if_present!(&mut config.scan_limit, args, "scan_limit", usize); | |
512 | update_config_if_present!(&mut config.parallel, args, "parallel", usize); | |
513 | update_config_if_present!(&mut config.rate_limit, args, "rate_limit", usize); | |
514 | update_config_if_present!(&mut config.wordlist, args, "wordlist", String); | |
515 | update_config_if_present!(&mut config.output, args, "output", String); | |
516 | update_config_if_present!(&mut config.debug_log, args, "debug_log", String); | |
517 | update_config_if_present!(&mut config.time_limit, args, "time_limit", String); | |
518 | update_config_if_present!(&mut config.resume_from, args, "resume_from", String); | |
519 | update_config_if_present!(&mut config.threads, args, "threads"); | |
520 | update_config_if_present!(&mut config.depth, args, "depth"); | |
521 | update_config_if_present!(&mut config.scan_limit, args, "scan_limit"); | |
522 | update_config_if_present!(&mut config.parallel, args, "parallel"); | |
523 | update_config_if_present!(&mut config.rate_limit, args, "rate_limit"); | |
524 | update_config_if_present!(&mut config.wordlist, args, "wordlist"); | |
525 | update_config_if_present!(&mut config.output, args, "output"); | |
526 | update_config_if_present!(&mut config.debug_log, args, "debug_log"); | |
527 | update_config_if_present!(&mut config.time_limit, args, "time_limit"); | |
528 | update_config_if_present!(&mut config.resume_from, args, "resume_from"); | |
519 | 529 | |
520 | 530 | if let Some(arg) = args.values_of("status_codes") { |
521 | 531 | config.status_codes = arg |
555 | 565 | config.extensions = arg.map(|val| val.to_string()).collect(); |
556 | 566 | } |
557 | 567 | |
568 | if let Some(arg) = args.values_of("methods") { | |
569 | config.methods = arg | |
570 | .map(|val| { | |
571 | // Check methods if they are correct | |
572 | Method::from_bytes(val.as_bytes()) | |
573 | .unwrap_or_else(|e| report_and_exit(&e.to_string())) | |
574 | .as_str() | |
575 | .to_string() | |
576 | }) | |
577 | .collect(); | |
578 | } | |
579 | ||
580 | if let Some(arg) = args.value_of("data") { | |
581 | if let Some(stripped) = arg.strip_prefix('@') { | |
582 | config.data = | |
583 | std::fs::read(stripped).unwrap_or_else(|e| report_and_exit(&e.to_string())); | |
584 | } else { | |
585 | config.data = arg.as_bytes().to_vec(); | |
586 | } | |
587 | } | |
588 | ||
558 | 589 | if args.is_present("stdin") { |
559 | 590 | config.stdin = true; |
560 | 591 | } else if let Some(url) = args.value_of("url") { |
568 | 599 | // url to be scanned. With the addition of regex support, I want to move parsing |
569 | 600 | // out of should_deny_url and into here, so it's performed once instead of thousands |
570 | 601 | // of times |
571 | for denier in arg.into_iter() { | |
602 | for denier in arg { | |
572 | 603 | // could be an absolute url or a regex, need to determine which and populate the |
573 | 604 | // appropriate vector |
574 | 605 | match Url::parse(denier.trim_end_matches('/')) { |
693 | 724 | //// |
694 | 725 | // organizational breakpoint; all options below alter the Client configuration |
695 | 726 | //// |
696 | update_config_if_present!(&mut config.proxy, args, "proxy", String); | |
697 | update_config_if_present!(&mut config.replay_proxy, args, "replay_proxy", String); | |
698 | update_config_if_present!(&mut config.user_agent, args, "user_agent", String); | |
699 | update_config_if_present!(&mut config.timeout, args, "timeout", u64); | |
727 | update_config_if_present!(&mut config.proxy, args, "proxy"); | |
728 | update_config_if_present!(&mut config.replay_proxy, args, "replay_proxy"); | |
729 | update_config_if_present!(&mut config.user_agent, args, "user_agent"); | |
730 | update_config_if_present!(&mut config.timeout, args, "timeout"); | |
700 | 731 | |
701 | 732 | if args.is_present("random_agent") { |
702 | 733 | config.random_agent = true; |
722 | 753 | let value = split_val.collect::<Vec<&str>>().join(":"); |
723 | 754 | config.headers.insert(name.to_string(), value.to_string()); |
724 | 755 | } |
756 | } | |
757 | ||
758 | if let Some(cookies) = args.values_of("cookies") { | |
759 | config.headers.insert( | |
760 | // we know the header name is always "cookie" | |
761 | "Cookie".to_string(), | |
762 | // on splitting, there should be only two elements, | |
763 | // a key and a value | |
764 | cookies | |
765 | .map(|cookie| cookie.split('=').collect::<Vec<&str>>()[..].to_owned()) | |
766 | .filter(|parts| parts.len() == 2) | |
767 | .map(|parts| format!("{}={}", parts[0].trim(), parts[1].trim())) | |
768 | // trim the spaces, join with an equals sign | |
769 | .collect::<Vec<String>>() | |
770 | .join("; "), // join all the cookies with semicolons for the final header | |
771 | ); | |
725 | 772 | } |
726 | 773 | |
727 | 774 | if let Some(queries) = args.values_of("queries") { |
832 | 879 | update_if_not_default!(&mut conf.insecure, new.insecure, false); |
833 | 880 | update_if_not_default!(&mut conf.extract_links, new.extract_links, false); |
834 | 881 | update_if_not_default!(&mut conf.extensions, new.extensions, Vec::<String>::new()); |
882 | update_if_not_default!(&mut conf.methods, new.methods, Vec::<String>::new()); | |
883 | update_if_not_default!(&mut conf.data, new.data, Vec::<u8>::new()); | |
835 | 884 | update_if_not_default!(&mut conf.url_denylist, new.url_denylist, Vec::<Url>::new()); |
836 | 885 | if !new.regex_denylist.is_empty() { |
837 | 886 | // cant use the update_if_not_default macro due to the following error |
30 | 30 | redirects = true |
31 | 31 | insecure = true |
32 | 32 | extensions = ["html", "php", "js"] |
33 | methods = ["GET", "PUT", "DELETE"] | |
34 | data = [31, 32, 33, 34] | |
33 | 35 | url_denylist = ["http://dont-scan.me", "https://also-not.me"] |
34 | 36 | regex_denylist = ["/deny.*"] |
35 | 37 | headers = {stuff = "things", mostuff = "mothings"} |
95 | 97 | assert_eq!(config.queries, Vec::new()); |
96 | 98 | assert_eq!(config.filter_size, Vec::<u64>::new()); |
97 | 99 | assert_eq!(config.extensions, Vec::<String>::new()); |
100 | assert_eq!(config.methods, vec!["GET"]); | |
101 | assert_eq!(config.data, Vec::<u8>::new()); | |
98 | 102 | assert_eq!(config.url_denylist, Vec::<Url>::new()); |
99 | 103 | assert_eq!(config.filter_regex, Vec::<String>::new()); |
100 | 104 | assert_eq!(config.filter_similar, Vec::<String>::new()); |
291 | 295 | fn config_reads_extensions() { |
292 | 296 | let config = setup_config_test(); |
293 | 297 | assert_eq!(config.extensions, vec!["html", "php", "js"]); |
298 | } | |
299 | ||
300 | #[test] | |
301 | /// parse the test config and see that the value parsed is correct | |
302 | fn config_reads_methods() { | |
303 | let config = setup_config_test(); | |
304 | assert_eq!(config.methods, vec!["GET", "PUT", "DELETE"]); | |
305 | } | |
306 | ||
307 | #[test] | |
308 | /// parse the test config and see that the value parsed is correct | |
309 | fn config_reads_data() { | |
310 | let config = setup_config_test(); | |
311 | assert_eq!(config.data, vec![31, 32, 33, 34]); | |
294 | 312 | } |
295 | 313 | |
296 | 314 | #[test] |
0 | 0 | use crate::{ |
1 | 1 | utils::{module_colorizer, status_colorizer}, |
2 | DEFAULT_STATUS_CODES, DEFAULT_WORDLIST, VERSION, | |
2 | DEFAULT_METHOD, DEFAULT_STATUS_CODES, DEFAULT_WORDLIST, VERSION, | |
3 | 3 | }; |
4 | 4 | #[cfg(not(test))] |
5 | 5 | use std::process::exit; |
49 | 49 | .iter() |
50 | 50 | .map(|code| code.as_u16()) |
51 | 51 | .collect() |
52 | } | |
53 | ||
54 | /// default HTTP Method | |
55 | pub(super) fn methods() -> Vec<String> { | |
56 | vec![DEFAULT_METHOD.to_owned()] | |
52 | 57 | } |
53 | 58 | |
54 | 59 | /// default wordlist |
213 | 213 | make_request( |
214 | 214 | self.config.replay_client.as_ref().unwrap(), |
215 | 215 | resp.url(), |
216 | resp.method().as_str(), | |
217 | None, | |
216 | 218 | self.config.output_level, |
217 | 219 | &self.config, |
218 | 220 | tx_stats.clone(), |
20 | 20 | |
21 | 21 | /// Examine robots.txt (specifically) and extract links |
22 | 22 | RobotsTxt, |
23 | ||
24 | // Parse HTML and extract links | |
25 | ParseHtml, | |
23 | 26 | } |
24 | 27 | |
25 | 28 | /// responsible for building an `Extractor` |
27 | 30 | /// Response from which to extract links |
28 | 31 | response: Option<&'a FeroxResponse>, |
29 | 32 | |
30 | /// Response from which to extract links | |
33 | /// URL of where to extract links | |
31 | 34 | url: String, |
32 | 35 | |
33 | 36 | /// Handles object to house the underlying mpsc transmitters |
13 | 13 | }, |
14 | 14 | url::FeroxUrl, |
15 | 15 | utils::{logged_request, make_request}, |
16 | DEFAULT_METHOD, | |
16 | 17 | }; |
17 | 18 | use anyhow::{bail, Context, Result}; |
18 | 19 | use reqwest::{StatusCode, Url}; |
20 | use scraper::{Html, Selector}; | |
19 | 21 | use std::collections::HashSet; |
20 | 22 | use tokio::sync::oneshot; |
21 | 23 | |
41 | 43 | /// Response from which to extract links |
42 | 44 | pub(super) response: Option<&'a FeroxResponse>, |
43 | 45 | |
44 | /// Response from which to extract links | |
46 | /// URL of where to extract links | |
45 | 47 | pub(super) url: String, |
46 | 48 | |
47 | 49 | /// Handles object to house the underlying mpsc transmitters |
54 | 56 | /// Extractor implementation |
55 | 57 | impl<'a> Extractor<'a> { |
56 | 58 | /// perform extraction from the given target and return any links found |
57 | pub async fn extract(&self) -> Result<HashSet<String>> { | |
59 | pub async fn extract(&self) -> Result<(HashSet<String>, bool)> { | |
58 | 60 | log::trace!("enter: extract (this fn has associated trace exit msg)"); |
59 | 61 | match self.target { |
60 | 62 | ExtractionTarget::ResponseBody => Ok(self.extract_from_body().await?), |
61 | 63 | ExtractionTarget::RobotsTxt => Ok(self.extract_from_robots().await?), |
64 | ExtractionTarget::ParseHtml => Ok(self.parse_html().await?), | |
62 | 65 | } |
63 | 66 | } |
64 | 67 | |
90 | 93 | continue; |
91 | 94 | } |
92 | 95 | |
93 | if resp.is_file() { | |
94 | // very likely a file, simply request and report | |
95 | log::debug!("Extracted file: {}", resp); | |
96 | ||
97 | scanned_urls.add_file_scan(&resp.url().to_string(), ScanOrder::Latest); | |
96 | // request and report assumed file | |
97 | if resp.is_file() || !resp.is_directory() { | |
98 | log::debug!("Extracted File: {}", resp); | |
99 | ||
100 | scanned_urls.add_file_scan(resp.url().as_str(), ScanOrder::Latest); | |
98 | 101 | |
99 | 102 | if let Err(e) = resp.send_report(self.handles.output.tx.clone()) { |
100 | 103 | log::warn!("Could not send FeroxResponse to output handler: {}", e); |
141 | 144 | /// - homepage/assets/img/ |
142 | 145 | /// - homepage/assets/ |
143 | 146 | /// - homepage/ |
144 | pub(super) async fn extract_from_body(&self) -> Result<HashSet<String>> { | |
145 | log::trace!("enter: get_links"); | |
147 | pub(super) async fn extract_from_body(&self) -> Result<(HashSet<String>, bool)> { | |
148 | log::trace!("enter: extract_from_body"); | |
146 | 149 | |
147 | 150 | let mut links = HashSet::<String>::new(); |
148 | ||
149 | let body = self.response.unwrap().text(); | |
151 | let dirlist_flag = false; | |
152 | ||
153 | // Response | |
154 | let response = self.response.unwrap(); | |
155 | let resp_url = response.url(); | |
156 | let body = response.text(); | |
157 | let html = Html::parse_document(body); | |
158 | ||
159 | // Extract Links | |
160 | self.extract_links_by_attr(resp_url, &mut links, &html, "a", "href"); | |
161 | self.extract_links_by_attr(resp_url, &mut links, &html, "img", "src"); | |
162 | self.extract_links_by_attr(resp_url, &mut links, &html, "form", "action"); | |
163 | self.extract_links_by_attr(resp_url, &mut links, &html, "script", "src"); | |
164 | self.extract_links_by_attr(resp_url, &mut links, &html, "iframe", "src"); | |
165 | self.extract_links_by_attr(resp_url, &mut links, &html, "div", "src"); | |
166 | self.extract_links_by_attr(resp_url, &mut links, &html, "frame", "src"); | |
167 | self.extract_links_by_attr(resp_url, &mut links, &html, "embed", "src"); | |
168 | self.extract_links_by_attr(resp_url, &mut links, &html, "script", "src"); | |
150 | 169 | |
151 | 170 | for capture in self.links_regex.captures_iter(body) { |
152 | 171 | // remove single & double quotes from both ends of the capture |
186 | 205 | |
187 | 206 | self.update_stats(links.len())?; |
188 | 207 | |
189 | log::trace!("exit: get_links -> {:?}", links); | |
190 | ||
191 | Ok(links) | |
208 | log::trace!("exit: extract_from_body -> {:?} {}", links, dirlist_flag); | |
209 | Ok((links, dirlist_flag)) | |
192 | 210 | } |
193 | 211 | |
194 | 212 | /// take a url fragment like homepage/assets/img/icons/handshake.svg and |
195 | 213 | /// incrementally add |
196 | /// - homepage/assets/img/icons/ | |
197 | /// - homepage/assets/img/ | |
198 | /// - homepage/assets/ | |
199 | /// - homepage/ | |
214 | /// - homepage/assets/img/icons/ | |
215 | /// - homepage/assets/img/ | |
216 | /// - homepage/assets/ | |
217 | /// - homepage/ | |
200 | 218 | fn add_all_sub_paths(&self, url_path: &str, links: &mut HashSet<String>) -> Result<()> { |
201 | 219 | log::trace!("enter: add_all_sub_paths({}, {:?})", url_path, links); |
202 | 220 | |
265 | 283 | |
266 | 284 | let old_url = match self.target { |
267 | 285 | ExtractionTarget::ResponseBody => self.response.unwrap().url().clone(), |
268 | ExtractionTarget::RobotsTxt => match Url::parse(&self.url) { | |
269 | Ok(u) => u, | |
270 | Err(e) => { | |
271 | bail!("Could not parse {}: {}", self.url, e); | |
272 | } | |
273 | }, | |
286 | ExtractionTarget::ParseHtml | ExtractionTarget::RobotsTxt => { | |
287 | match Url::parse(&self.url) { | |
288 | Ok(u) => u, | |
289 | Err(e) => { | |
290 | bail!("Could not parse {}: {}", self.url, e); | |
291 | } | |
292 | } | |
293 | } | |
274 | 294 | }; |
275 | 295 | |
276 | 296 | let new_url = old_url |
285 | 305 | } |
286 | 306 | |
287 | 307 | /// Wrapper around link extraction logic |
288 | /// currently used in two places: | |
289 | /// - links from response bodies | |
290 | /// - links from robots.txt responses | |
291 | /// | |
292 | /// general steps taken: | |
293 | 308 | /// - create a new Url object based on cli options/args |
294 | 309 | /// - check if the new Url has already been seen/scanned -> None |
295 | 310 | /// - make a request to the new Url ? -> Some(response) : None |
323 | 338 | } |
324 | 339 | |
325 | 340 | // make the request and store the response |
326 | let new_response = logged_request(&new_url, self.handles.clone()).await?; | |
327 | ||
328 | let new_ferox_response = | |
329 | FeroxResponse::from(new_response, url, true, self.handles.config.output_level).await; | |
341 | let new_response = | |
342 | logged_request(&new_url, DEFAULT_METHOD, None, self.handles.clone()).await?; | |
343 | ||
344 | let new_ferox_response = FeroxResponse::from( | |
345 | new_response, | |
346 | url, | |
347 | DEFAULT_METHOD, | |
348 | true, | |
349 | self.handles.config.output_level, | |
350 | ) | |
351 | .await; | |
330 | 352 | |
331 | 353 | log::trace!("exit: request_link -> {:?}", new_ferox_response); |
332 | 354 | |
341 | 363 | /// http://localhost/stuff/things |
342 | 364 | /// this function requests: |
343 | 365 | /// http://localhost/robots.txt |
344 | pub(super) async fn extract_from_robots(&self) -> Result<HashSet<String>> { | |
366 | pub(super) async fn extract_from_robots(&self) -> Result<(HashSet<String>, bool)> { | |
345 | 367 | log::trace!("enter: extract_robots_txt"); |
346 | 368 | |
347 | 369 | let mut links: HashSet<String> = HashSet::new(); |
348 | ||
349 | let response = self.request_robots_txt().await?; | |
350 | ||
351 | for capture in self.robots_regex.captures_iter(response.text()) { | |
370 | let dirlist_flag = false; | |
371 | ||
372 | // request | |
373 | let response = self.make_extract_request("/robots.txt").await?; | |
374 | let body = response.text(); | |
375 | ||
376 | for capture in self.robots_regex.captures_iter(body) { | |
352 | 377 | if let Some(new_path) = capture.name("url_path") { |
353 | 378 | let mut new_url = Url::parse(&self.url)?; |
354 | 379 | new_url.set_path(new_path.as_str()); |
360 | 385 | |
361 | 386 | self.update_stats(links.len())?; |
362 | 387 | |
363 | log::trace!("exit: extract_robots_txt -> {:?}", links); | |
364 | Ok(links) | |
365 | } | |
366 | ||
367 | /// helper function that simply requests /robots.txt on the given url's base url | |
388 | log::trace!("exit: extract_robots_txt -> {:?} {}", links, dirlist_flag); | |
389 | Ok((links, dirlist_flag)) | |
390 | } | |
391 | ||
392 | /// Entry point to parse html for links (i.e. webscraping, directory listings) | |
393 | /// this function requests: | |
394 | /// http://localhost/<location> | |
395 | pub(super) async fn parse_html(&self) -> Result<(HashSet<String>, bool)> { | |
396 | log::trace!("enter: parse_html"); | |
397 | ||
398 | let mut links: HashSet<String> = HashSet::new(); | |
399 | let mut dirlist_flag = false; | |
400 | ||
401 | // Response | |
402 | let url = Url::parse(&self.url)?; | |
403 | let response = self.make_extract_request(url.path()).await?; | |
404 | let resp_url = response.url(); | |
405 | let body = response.text(); | |
406 | let html = Html::parse_document(body); | |
407 | ||
408 | // Directory listing heuristic detection to not continue scanning | |
409 | // Index of /: apache | |
410 | // Directory Listing for /: tomcat, | |
411 | // Directory Listing -- /: ASP.NET | |
412 | // <host> - /: iis, azure, skipping due to loose heuristic | |
413 | let title_selector = Selector::parse("title").unwrap(); | |
414 | for t in html.select(&title_selector) { | |
415 | let title = t.inner_html().to_lowercase(); | |
416 | if title.contains("directory listing for /") | |
417 | || title.contains("index of /") | |
418 | || title.contains("directory listing -- /") | |
419 | { | |
420 | log::debug!("Directory listing heuristic detection from \"{}\"", title); | |
421 | dirlist_flag = true; | |
422 | ||
423 | self.extract_links_by_attr(resp_url, &mut links, &html, "a", "href"); | |
424 | self.update_stats(links.len())?; | |
425 | ||
426 | log::trace!("exit: parse_html -> {:?} {}", links, dirlist_flag); | |
427 | return Ok((links, dirlist_flag)); | |
428 | } | |
429 | } | |
430 | ||
431 | // Extract Links | |
432 | self.extract_links_by_attr(resp_url, &mut links, &html, "a", "href"); | |
433 | self.extract_links_by_attr(resp_url, &mut links, &html, "img", "src"); | |
434 | self.extract_links_by_attr(resp_url, &mut links, &html, "form", "action"); | |
435 | self.extract_links_by_attr(resp_url, &mut links, &html, "script", "src"); | |
436 | self.extract_links_by_attr(resp_url, &mut links, &html, "iframe", "src"); | |
437 | self.extract_links_by_attr(resp_url, &mut links, &html, "div", "src"); | |
438 | self.extract_links_by_attr(resp_url, &mut links, &html, "frame", "src"); | |
439 | self.extract_links_by_attr(resp_url, &mut links, &html, "embed", "src"); | |
440 | self.extract_links_by_attr(resp_url, &mut links, &html, "script", "src"); | |
441 | ||
442 | self.update_stats(links.len())?; | |
443 | ||
444 | log::trace!("exit: parse_html -> {:?} {}", links, dirlist_flag); | |
445 | Ok((links, dirlist_flag)) | |
446 | } | |
447 | ||
448 | /// simple helper to get html links by tag/attribute and add it to the `links` HashSet | |
449 | fn extract_links_by_attr( | |
450 | &self, | |
451 | resp_url: &Url, | |
452 | links: &mut HashSet<String>, | |
453 | html: &Html, | |
454 | html_tag: &str, | |
455 | html_attr: &str, | |
456 | ) { | |
457 | log::trace!("enter: extract_links_by_attr"); | |
458 | ||
459 | let selector = Selector::parse(html_tag).unwrap(); | |
460 | let tags = html | |
461 | .select(&selector) | |
462 | .filter(|a| a.value().attrs().any(|attr| attr.0 == html_attr)); | |
463 | for t in tags { | |
464 | if let Some(link) = t.value().attr(html_attr) { | |
465 | log::debug!("Parsed link \"{}\" from {}", link, resp_url.as_str()); | |
466 | ||
467 | match Url::parse(link) { | |
468 | Ok(absolute) => { | |
469 | if absolute.domain() != resp_url.domain() | |
470 | || absolute.host() != resp_url.host() | |
471 | { | |
472 | // domains/ips are not the same, don't scan things that aren't part of the original | |
473 | // target url | |
474 | continue; | |
475 | } | |
476 | ||
477 | if self.add_all_sub_paths(absolute.path(), links).is_err() { | |
478 | log::warn!("could not add sub-paths from {} to {:?}", absolute, links); | |
479 | } | |
480 | } | |
481 | Err(e) => { | |
482 | // this is the expected error that happens when we try to parse a url fragment | |
483 | // ex: Url::parse("/login") -> Err("relative URL without a base") | |
484 | // while this is technically an error, these are good results for us | |
485 | if e.to_string().contains("relative URL without a base") { | |
486 | if self.add_all_sub_paths(link, links).is_err() { | |
487 | log::warn!("could not add sub-paths from {} to {:?}", link, links); | |
488 | } | |
489 | } else { | |
490 | // unexpected error has occurred | |
491 | log::warn!("Could not parse given url: {}", e); | |
492 | self.handles.stats.send(AddError(Other)).unwrap_or_default(); | |
493 | } | |
494 | } | |
495 | } | |
496 | } | |
497 | } | |
498 | ||
499 | log::trace!("exit: extract_links_by_attr"); | |
500 | } | |
501 | ||
502 | /// helper function that simply requests at <location> on the given url's base url | |
368 | 503 | /// |
369 | 504 | /// example: |
370 | /// http://localhost/api/users -> http://localhost/robots.txt | |
371 | /// | |
372 | /// The length of the given path has no effect on what's requested; it's always | |
373 | /// base url + /robots.txt | |
374 | pub(super) async fn request_robots_txt(&self) -> Result<FeroxResponse> { | |
375 | log::trace!("enter: get_robots_file"); | |
505 | /// http://localhost/api/users -> http://localhost/<location> | |
506 | pub(super) async fn make_extract_request(&self, location: &str) -> Result<FeroxResponse> { | |
507 | log::trace!("enter: make_extract_request"); | |
376 | 508 | |
377 | 509 | // more often than not, domain/robots.txt will redirect to www.domain/robots.txt or something |
378 | 510 | // similar; to account for that, create a client that will follow redirects, regardless of |
396 | 528 | )?; |
397 | 529 | |
398 | 530 | let mut url = Url::parse(&self.url)?; |
399 | url.set_path("/robots.txt"); // overwrite existing path with /robots.txt | |
531 | url.set_path(location); // overwrite existing path | |
400 | 532 | |
401 | 533 | // purposefully not using logged_request here due to using the special client |
402 | 534 | let response = make_request( |
403 | 535 | &client, |
404 | 536 | &url, |
537 | DEFAULT_METHOD, | |
538 | None, | |
405 | 539 | self.handles.config.output_level, |
406 | 540 | &self.handles.config, |
407 | 541 | self.handles.stats.tx.clone(), |
408 | 542 | ) |
409 | 543 | .await?; |
410 | 544 | |
411 | let ferox_response = | |
412 | FeroxResponse::from(response, &self.url, true, self.handles.config.output_level).await; | |
413 | ||
414 | log::trace!("exit: get_robots_file -> {}", ferox_response); | |
545 | let ferox_response = FeroxResponse::from( | |
546 | response, | |
547 | &self.url, | |
548 | DEFAULT_METHOD, | |
549 | true, | |
550 | self.handles.config.output_level, | |
551 | ) | |
552 | .await; | |
553 | ||
554 | log::trace!("exit: make_extract_request -> {}", ferox_response); | |
415 | 555 | Ok(ferox_response) |
416 | 556 | } |
417 | 557 |
3 | 3 | use crate::scan_manager::ScanOrder; |
4 | 4 | use crate::{ |
5 | 5 | event_handlers::Handles, scan_manager::FeroxScans, utils::make_request, Command, FeroxChannel, |
6 | DEFAULT_METHOD, | |
6 | 7 | }; |
7 | 8 | use anyhow::Result; |
8 | 9 | use httpmock::{Method::GET, MockServer}; |
18 | 19 | /// Extractor for testing response bodies |
19 | 20 | static ref BODY_EXT: Extractor<'static> = setup_extractor(ExtractionTarget::ResponseBody, Arc::new(FeroxScans::default())); |
20 | 21 | |
22 | /// Extractor for testing paring html | |
23 | static ref PARSEHTML_EXT: Extractor<'static> = setup_extractor(ExtractionTarget::ParseHtml, Arc::new(FeroxScans::default())); | |
24 | ||
21 | 25 | /// FeroxResponse for Extractor |
22 | 26 | static ref RESPONSE: FeroxResponse = get_test_response(); |
23 | 27 | } |
40 | 44 | ExtractionTarget::RobotsTxt => builder |
41 | 45 | .url("http://localhost") |
42 | 46 | .target(ExtractionTarget::RobotsTxt), |
47 | ExtractionTarget::ParseHtml => builder | |
48 | .url("http://localhost") | |
49 | .target(ExtractionTarget::ParseHtml), | |
43 | 50 | }; |
44 | 51 | |
45 | 52 | let config = Arc::new(Configuration::new().unwrap()); |
221 | 228 | let response = make_request( |
222 | 229 | &client, |
223 | 230 | &url, |
231 | DEFAULT_METHOD, | |
232 | None, | |
224 | 233 | OutputLevel::Default, |
225 | 234 | &config, |
226 | 235 | tx_stats.clone(), |
230 | 239 | let (handles, _rx) = Handles::for_testing(None, None); |
231 | 240 | |
232 | 241 | let handles = Arc::new(handles); |
233 | let ferox_response = | |
234 | FeroxResponse::from(response, &srv.url(""), true, OutputLevel::Default).await; | |
242 | let ferox_response = FeroxResponse::from( | |
243 | response, | |
244 | &srv.url(""), | |
245 | DEFAULT_METHOD, | |
246 | true, | |
247 | OutputLevel::Default, | |
248 | ) | |
249 | .await; | |
235 | 250 | |
236 | 251 | let extractor = Extractor { |
237 | 252 | links_regex: Regex::new(LINKFINDER_REGEX).unwrap(), |
242 | 257 | handles: handles.clone(), |
243 | 258 | }; |
244 | 259 | |
245 | let links = extractor.extract_from_body().await?; | |
260 | let links = (extractor.extract_from_body().await?).0; | |
246 | 261 | |
247 | 262 | assert!(links.is_empty()); |
248 | 263 | assert_eq!(mock.hits(), 1); |
270 | 285 | handles, |
271 | 286 | }; |
272 | 287 | |
273 | let resp = extractor.request_robots_txt().await?; | |
288 | let resp = extractor.make_extract_request("/robots.txt").await?; | |
274 | 289 | |
275 | 290 | assert!(matches!(resp.status(), &StatusCode::OK)); |
276 | 291 | println!("{}", resp); |
303 | 318 | .handles(handles) |
304 | 319 | .build()?; |
305 | 320 | |
306 | let resp = extractor.request_robots_txt().await?; | |
321 | let resp = extractor.make_extract_request("/robots.txt").await?; | |
307 | 322 | |
308 | 323 | assert!(matches!(resp.status(), &StatusCode::OK)); |
309 | 324 | assert_eq!(resp.content_length(), 19); |
6 | 6 | skip_fail, |
7 | 7 | utils::{fmt_err, logged_request}, |
8 | 8 | Command::AddFilter, |
9 | SIMILARITY_THRESHOLD, | |
9 | DEFAULT_METHOD, SIMILARITY_THRESHOLD, | |
10 | 10 | }; |
11 | 11 | use anyhow::Result; |
12 | 12 | use fuzzyhash::FuzzyHash; |
71 | 71 | let url = skip_fail!(Url::parse(similarity_filter)); |
72 | 72 | |
73 | 73 | // attempt to request the given url |
74 | let resp = skip_fail!(logged_request(&url, handles.clone()).await); | |
74 | let resp = skip_fail!(logged_request(&url, DEFAULT_METHOD, None, handles.clone()).await); | |
75 | 75 | |
76 | 76 | // if successful, create a filter based on the response's body |
77 | let fr = | |
78 | FeroxResponse::from(resp, similarity_filter, true, handles.config.output_level).await; | |
77 | let fr = FeroxResponse::from( | |
78 | resp, | |
79 | similarity_filter, | |
80 | DEFAULT_METHOD, | |
81 | true, | |
82 | handles.config.output_level, | |
83 | ) | |
84 | .await; | |
79 | 85 | |
80 | 86 | // hash the response body and store the resulting hash in the filter object |
81 | 87 | let hash = FuzzyHash::new(&fr.text()).to_string(); |
121 | 121 | size: 83, |
122 | 122 | dynamic: 0, |
123 | 123 | dont_filter: false, |
124 | method: "GET".to_owned(), | |
124 | 125 | }; |
125 | 126 | |
126 | 127 | assert!(filter.should_filter_response(&resp)); |
151 | 152 | size: 0, |
152 | 153 | dynamic: 59, // content-length - 5 (len('stuff')) |
153 | 154 | dont_filter: false, |
155 | method: "GET".to_owned(), | |
154 | 156 | }; |
155 | 157 | |
156 | 158 | println!("resp: {:?}: filter: {:?}", resp, filter); |
0 | 0 | use super::*; |
1 | use crate::url::FeroxUrl; | |
1 | use crate::{url::FeroxUrl, DEFAULT_METHOD}; | |
2 | 2 | |
3 | 3 | /// Data holder for two pieces of data needed when auto-filtering out wildcard responses |
4 | 4 | /// |
16 | 16 | |
17 | 17 | /// size of the response that should be included with filters passed via runtime configuration |
18 | 18 | pub size: u64, |
19 | ||
20 | /// method used in request that should be included with filters passed via runtime configuration | |
21 | pub method: String, | |
19 | 22 | |
20 | 23 | /// whether or not the user passed -D on the command line |
21 | 24 | pub(super) dont_filter: bool, |
39 | 42 | Self { |
40 | 43 | dont_filter: false, |
41 | 44 | size: u64::MAX, |
45 | method: DEFAULT_METHOD.to_owned(), | |
42 | 46 | dynamic: u64::MAX, |
43 | 47 | } |
44 | 48 | } |
59 | 63 | return false; |
60 | 64 | } |
61 | 65 | |
62 | if self.size != u64::MAX && self.size == response.content_length() { | |
66 | if self.size != u64::MAX | |
67 | && self.size == response.content_length() | |
68 | && self.method == response.method().as_str() | |
69 | { | |
63 | 70 | // static wildcard size found during testing |
64 | 71 | // size isn't default, size equals response length, and auto-filter is on |
65 | 72 | log::debug!("static wildcard: filtered out {}", response.url()); |
67 | 74 | return true; |
68 | 75 | } |
69 | 76 | |
70 | if self.size == u64::MAX && response.content_length() == 0 { | |
77 | if self.size == u64::MAX | |
78 | && response.content_length() == 0 | |
79 | && self.method == response.method().as_str() | |
80 | { | |
71 | 81 | // static wildcard size found during testing |
72 | 82 | // but response length was zero; pointed out by @Tib3rius |
73 | 83 | log::debug!("static wildcard: filtered out {}", response.url()); |
12 | 12 | skip_fail, |
13 | 13 | url::FeroxUrl, |
14 | 14 | utils::{ferox_print, fmt_err, logged_request, status_colorizer}, |
15 | DEFAULT_METHOD, | |
15 | 16 | }; |
16 | 17 | |
17 | 18 | /// length of a standard UUID, used when determining wildcard responses |
19 | 20 | |
20 | 21 | /// wrapper around ugly string formatting |
21 | 22 | macro_rules! format_template { |
22 | ($template:expr, $length:expr) => { | |
23 | ($template:expr, $method:expr, $length:expr) => { | |
23 | 24 | format!( |
24 | 25 | $template, |
25 | 26 | status_colorizer("WLD"), |
27 | $method, | |
26 | 28 | "-", |
27 | 29 | "-", |
28 | 30 | "-", |
88 | 90 | return Ok(0); |
89 | 91 | } |
90 | 92 | |
93 | let data = match self.handles.config.data.is_empty() { | |
94 | true => None, | |
95 | false => Some(self.handles.config.data.as_slice()), | |
96 | }; | |
97 | ||
91 | 98 | let ferox_url = FeroxUrl::from_string(target_url, self.handles.clone()); |
92 | 99 | |
93 | let ferox_response = self.make_wildcard_request(&ferox_url, 1).await?; | |
94 | ||
95 | // found a wildcard response | |
96 | let mut wildcard = WildcardFilter::new(self.handles.config.dont_filter); | |
97 | ||
98 | let wc_length = ferox_response.content_length(); | |
99 | ||
100 | if wc_length == 0 { | |
101 | log::trace!("exit: wildcard_test -> 1"); | |
100 | for method in self.handles.config.methods.iter() { | |
101 | let ferox_response = self | |
102 | .make_wildcard_request(&ferox_url, method.as_str(), data, 1) | |
103 | .await?; | |
104 | ||
105 | // found a wildcard response | |
106 | let mut wildcard = WildcardFilter::new(self.handles.config.dont_filter); | |
107 | ||
108 | let wc_length = ferox_response.content_length(); | |
109 | ||
110 | if wc_length == 0 { | |
111 | log::trace!("exit: wildcard_test -> 1"); | |
112 | self.send_filter(wildcard)?; | |
113 | return Ok(1); | |
114 | } | |
115 | ||
116 | // content length of wildcard is non-zero, perform additional tests: | |
117 | // make a second request, with a known-sized (64) longer request | |
118 | let resp_two = self | |
119 | .make_wildcard_request(&ferox_url, method.as_str(), data, 3) | |
120 | .await?; | |
121 | ||
122 | let wc2_length = resp_two.content_length(); | |
123 | ||
124 | wildcard.method = resp_two.method().as_str().to_owned(); | |
125 | ||
126 | if wc2_length == wc_length + (UUID_LENGTH * 2) { | |
127 | // second length is what we'd expect to see if the requested url is | |
128 | // reflected in the response along with some static content; aka custom 404 | |
129 | let url_len = ferox_url.path_length()?; | |
130 | ||
131 | wildcard.dynamic = wc_length - url_len; | |
132 | ||
133 | if matches!( | |
134 | self.handles.config.output_level, | |
135 | OutputLevel::Default | OutputLevel::Quiet | |
136 | ) { | |
137 | let msg = format_template!("{} {:>8} {:>9} {:>9} {:>9} Wildcard response is dynamic; {} ({} + url length) responses; toggle this behavior by using {}\n", method, wildcard.dynamic); | |
138 | ferox_print(&msg, &PROGRESS_PRINTER); | |
139 | } | |
140 | } else if wc_length == wc2_length { | |
141 | wildcard.size = wc_length; | |
142 | ||
143 | if matches!( | |
144 | self.handles.config.output_level, | |
145 | OutputLevel::Default | OutputLevel::Quiet | |
146 | ) { | |
147 | let msg = format_template!("{} {:>8} {:>9} {:>9} {:>9} Wildcard response is static; {} {} responses; toggle this behavior by using {}\n", method, wildcard.size); | |
148 | ferox_print(&msg, &PROGRESS_PRINTER); | |
149 | } | |
150 | } | |
151 | ||
102 | 152 | self.send_filter(wildcard)?; |
103 | return Ok(1); | |
104 | } | |
105 | ||
106 | // content length of wildcard is non-zero, perform additional tests: | |
107 | // make a second request, with a known-sized (64) longer request | |
108 | let resp_two = self.make_wildcard_request(&ferox_url, 3).await?; | |
109 | ||
110 | let wc2_length = resp_two.content_length(); | |
111 | ||
112 | if wc2_length == wc_length + (UUID_LENGTH * 2) { | |
113 | // second length is what we'd expect to see if the requested url is | |
114 | // reflected in the response along with some static content; aka custom 404 | |
115 | let url_len = ferox_url.path_length()?; | |
116 | ||
117 | wildcard.dynamic = wc_length - url_len; | |
118 | ||
119 | if matches!( | |
120 | self.handles.config.output_level, | |
121 | OutputLevel::Default | OutputLevel::Quiet | |
122 | ) { | |
123 | let msg = format_template!("{} {:>9} {:>9} {:>9} Wildcard response is dynamic; {} ({} + url length) responses; toggle this behavior by using {}\n", wildcard.dynamic); | |
124 | ferox_print(&msg, &PROGRESS_PRINTER); | |
125 | } | |
126 | } else if wc_length == wc2_length { | |
127 | wildcard.size = wc_length; | |
128 | ||
129 | if matches!( | |
130 | self.handles.config.output_level, | |
131 | OutputLevel::Default | OutputLevel::Quiet | |
132 | ) { | |
133 | let msg = format_template!("{} {:>9} {:>9} {:>9} Wildcard response is static; {} {} responses; toggle this behavior by using {}\n", wildcard.size); | |
134 | ferox_print(&msg, &PROGRESS_PRINTER); | |
135 | } | |
136 | } | |
137 | ||
138 | self.send_filter(wildcard)?; | |
153 | } | |
139 | 154 | |
140 | 155 | log::trace!("exit: wildcard_test"); |
141 | 156 | Ok(2) |
150 | 165 | async fn make_wildcard_request( |
151 | 166 | &self, |
152 | 167 | target: &FeroxUrl, |
168 | method: &str, | |
169 | data: Option<&[u8]>, | |
153 | 170 | length: usize, |
154 | 171 | ) -> Result<FeroxResponse> { |
155 | 172 | log::trace!("enter: make_wildcard_request({}, {})", target, length); |
165 | 182 | |
166 | 183 | let nonexistent_url = target.format(&unique_str, slash)?; |
167 | 184 | |
168 | let response = logged_request(&nonexistent_url.to_owned(), self.handles.clone()).await?; | |
185 | let response = logged_request( | |
186 | &nonexistent_url.to_owned(), | |
187 | method, | |
188 | data, | |
189 | self.handles.clone(), | |
190 | ) | |
191 | .await?; | |
169 | 192 | |
170 | 193 | if self |
171 | 194 | .handles |
177 | 200 | let mut ferox_response = FeroxResponse::from( |
178 | 201 | response, |
179 | 202 | &target.target, |
203 | method, | |
180 | 204 | true, |
181 | 205 | self.handles.config.output_level, |
182 | 206 | ) |
222 | 246 | let url = FeroxUrl::from_string(target_url, self.handles.clone()); |
223 | 247 | let request = skip_fail!(url.format("", None)); |
224 | 248 | |
225 | let result = logged_request(&request, self.handles.clone()).await; | |
249 | let result = logged_request(&request, DEFAULT_METHOD, None, self.handles.clone()).await; | |
226 | 250 | |
227 | 251 | match result { |
228 | 252 | Ok(_) => { |
86 | 86 | StatusCode::INTERNAL_SERVER_ERROR, |
87 | 87 | ]; |
88 | 88 | |
89 | /// Default method for requests | |
90 | pub(crate) const DEFAULT_METHOD: &str = "GET"; | |
91 | ||
89 | 92 | /// Default filename for config file settings |
90 | 93 | /// |
91 | 94 | /// Expected location is in the same directory as the feroxbuster binary. |
50 | 50 | let mut words = Vec::new(); |
51 | 51 | |
52 | 52 | for line in reader.lines() { |
53 | let result = match line { | |
54 | Ok(read_line) => read_line, | |
55 | Err(_) => continue, | |
56 | }; | |
57 | ||
58 | if result.starts_with('#') || result.is_empty() { | |
59 | continue; | |
60 | } | |
61 | ||
62 | words.push(result); | |
53 | line.map(|result| { | |
54 | if !result.starts_with('#') && !result.is_empty() { | |
55 | words.push(result); | |
56 | } | |
57 | }) | |
58 | .ok(); | |
63 | 59 | } |
64 | 60 | |
65 | 61 | log::trace!( |
0 | use clap::{App, Arg, ArgGroup}; | |
0 | use clap::{ | |
1 | crate_authors, crate_description, crate_name, crate_version, App, Arg, ArgGroup, ValueHint, | |
2 | }; | |
1 | 3 | use lazy_static::lazy_static; |
2 | 4 | use regex::Regex; |
3 | 5 | use std::env; |
13 | 15 | /// - 1d |
14 | 16 | pub static ref TIMESPEC_REGEX: Regex = |
15 | 17 | Regex::new(r"^(?i)(?P<n>\d+)(?P<m>[smdh])$").expect("Could not compile regex"); |
18 | ||
19 | /// help string for user agent, your guess is as good as mine as to why this is required... | |
20 | static ref DEFAULT_USER_AGENT: String = format!( | |
21 | "Sets the User-Agent (default: feroxbuster/{})", | |
22 | crate_version!() | |
23 | ); | |
16 | 24 | } |
17 | 25 | |
18 | 26 | /// Create and return an instance of [clap::App](https://docs.rs/clap/latest/clap/struct.App.html), i.e. the Command Line Interface's configuration |
19 | pub fn initialize() -> App<'static, 'static> { | |
20 | let mut app = App::new("feroxbuster") | |
21 | .version(env!("CARGO_PKG_VERSION")) | |
22 | .author("Ben 'epi' Risher (@epi052)") | |
23 | .about("A fast, simple, recursive content discovery tool written in Rust") | |
24 | .arg( | |
25 | Arg::with_name("wordlist") | |
26 | .short("w") | |
27 | .long("wordlist") | |
28 | .value_name("FILE") | |
29 | .help("Path to the wordlist") | |
30 | .takes_value(true), | |
31 | ) | |
32 | .arg( | |
33 | Arg::with_name("url") | |
34 | .short("u") | |
27 | pub fn initialize() -> App<'static> { | |
28 | let app = App::new(crate_name!()) | |
29 | .version(crate_version!()) | |
30 | .author(crate_authors!()) | |
31 | .about(crate_description!()); | |
32 | ||
33 | ///////////////////////////////////////////////////////////////////// | |
34 | // group - target selection | |
35 | ///////////////////////////////////////////////////////////////////// | |
36 | let app = app | |
37 | .arg( | |
38 | Arg::new("url") | |
39 | .short('u') | |
35 | 40 | .long("url") |
36 | .required_unless_one(&["stdin", "resume_from"]) | |
41 | .required_unless_present_any(&["stdin", "resume_from"]) | |
42 | .help_heading("Target selection") | |
37 | 43 | .value_name("URL") |
38 | .multiple(true) | |
39 | .use_delimiter(true) | |
40 | .help("The target URL(s) (required, unless --stdin used)"), | |
41 | ) | |
42 | .arg( | |
43 | Arg::with_name("threads") | |
44 | .short("t") | |
45 | .long("threads") | |
46 | .value_name("THREADS") | |
47 | .takes_value(true) | |
48 | .help("Number of concurrent threads (default: 50)"), | |
49 | ) | |
50 | .arg( | |
51 | Arg::with_name("depth") | |
52 | .short("d") | |
53 | .long("depth") | |
54 | .value_name("RECURSION_DEPTH") | |
55 | .takes_value(true) | |
56 | .help("Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)"), | |
57 | ) | |
58 | .arg( | |
59 | Arg::with_name("timeout") | |
60 | .short("T") | |
61 | .long("timeout") | |
62 | .value_name("SECONDS") | |
63 | .takes_value(true) | |
64 | .help("Number of seconds before a request times out (default: 7)"), | |
65 | ) | |
66 | .arg( | |
67 | Arg::with_name("verbosity") | |
68 | .short("v") | |
69 | .long("verbosity") | |
70 | .takes_value(false) | |
71 | .multiple(true) | |
72 | .conflicts_with("silent") | |
73 | .help("Increase verbosity level (use -vv or more for greater effect. [CAUTION] 4 -v's is probably too much)"), | |
74 | ) | |
75 | .arg( | |
76 | Arg::with_name("proxy") | |
77 | .short("p") | |
78 | .long("proxy") | |
79 | .takes_value(true) | |
80 | .value_name("PROXY") | |
81 | .help( | |
82 | "Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)", | |
83 | ), | |
84 | ) | |
85 | .arg( | |
86 | Arg::with_name("replay_proxy") | |
87 | .short("P") | |
88 | .long("replay-proxy") | |
89 | .takes_value(true) | |
90 | .value_name("REPLAY_PROXY") | |
91 | .help( | |
92 | "Send only unfiltered requests through a Replay Proxy, instead of all requests", | |
93 | ), | |
94 | ) | |
95 | .arg( | |
96 | Arg::with_name("replay_codes") | |
97 | .short("R") | |
98 | .long("replay-codes") | |
99 | .value_name("REPLAY_CODE") | |
100 | .takes_value(true) | |
101 | .multiple(true) | |
102 | .use_delimiter(true) | |
103 | .requires("replay_proxy") | |
104 | .help( | |
105 | "Status Codes to send through a Replay Proxy when found (default: --status-codes value)", | |
106 | ), | |
107 | ) | |
108 | .arg( | |
109 | Arg::with_name("status_codes") | |
110 | .short("s") | |
111 | .long("status-codes") | |
112 | .value_name("STATUS_CODE") | |
113 | .takes_value(true) | |
114 | .multiple(true) | |
115 | .use_delimiter(true) | |
116 | .help( | |
117 | "Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)", | |
118 | ), | |
119 | ) | |
120 | .arg( | |
121 | Arg::with_name("silent") | |
122 | .long("silent") | |
123 | .takes_value(false) | |
124 | .conflicts_with("quiet") | |
125 | .help("Only print URLs + turn off logging (good for piping a list of urls to other commands)") | |
126 | ) | |
127 | .arg( | |
128 | Arg::with_name("quiet") | |
129 | .short("q") | |
130 | .long("quiet") | |
131 | .takes_value(false) | |
132 | .help("Hide progress bars and banner (good for tmux windows w/ notifications)") | |
133 | ) | |
134 | .arg( | |
135 | Arg::with_name("auto_tune") | |
136 | .long("auto-tune") | |
137 | .takes_value(false) | |
138 | .conflicts_with("auto_bail") | |
139 | .help("Automatically lower scan rate when an excessive amount of errors are encountered") | |
140 | ) | |
141 | .arg( | |
142 | Arg::with_name("auto_bail") | |
143 | .long("auto-bail") | |
144 | .takes_value(false) | |
145 | .help("Automatically stop scanning when an excessive amount of errors are encountered") | |
146 | ) | |
147 | .arg( | |
148 | Arg::with_name("json") | |
149 | .long("json") | |
150 | .takes_value(false) | |
151 | .requires("output_files") | |
152 | .help("Emit JSON logs to --output and --debug-log instead of normal text") | |
153 | ) | |
154 | .arg( | |
155 | Arg::with_name("dont_filter") | |
156 | .short("D") | |
157 | .long("dont-filter") | |
158 | .takes_value(false) | |
159 | .help("Don't auto-filter wildcard responses") | |
160 | ) | |
161 | .arg( | |
162 | Arg::with_name("output") | |
163 | .short("o") | |
164 | .long("output") | |
165 | .value_name("FILE") | |
166 | .help("Output file to write results to (use w/ --json for JSON entries)") | |
167 | .takes_value(true), | |
168 | ) | |
169 | .arg( | |
170 | Arg::with_name("resume_from") | |
44 | .use_delimiter(true) | |
45 | .value_hint(ValueHint::Url) | |
46 | .help("The target URL (required, unless [--stdin || --resume-from] used)"), | |
47 | ) | |
48 | .arg( | |
49 | Arg::new("stdin") | |
50 | .long("stdin") | |
51 | .help_heading("Target selection") | |
52 | .takes_value(false) | |
53 | .help("Read url(s) from STDIN") | |
54 | .conflicts_with("url") | |
55 | ) | |
56 | .arg( | |
57 | Arg::new("resume_from") | |
171 | 58 | .long("resume-from") |
59 | .value_hint(ValueHint::FilePath) | |
172 | 60 | .value_name("STATE_FILE") |
61 | .help_heading("Target selection") | |
173 | 62 | .help("State file from which to resume a partially complete scan (ex. --resume-from ferox-1606586780.state)") |
174 | 63 | .conflicts_with("url") |
175 | 64 | .takes_value(true), |
176 | ) | |
177 | .arg( | |
178 | Arg::with_name("debug_log") | |
65 | ); | |
66 | ||
67 | ///////////////////////////////////////////////////////////////////// | |
68 | // group - proxy settings | |
69 | ///////////////////////////////////////////////////////////////////// | |
70 | let app = app | |
71 | .arg( | |
72 | Arg::new("proxy") | |
73 | .short('p') | |
74 | .long("proxy") | |
75 | .takes_value(true) | |
76 | .value_name("PROXY") | |
77 | .value_hint(ValueHint::Url) | |
78 | .help_heading("Proxy settings") | |
79 | .help( | |
80 | "Proxy to use for requests (ex: http(s)://host:port, socks5(h)://host:port)", | |
81 | ), | |
82 | ) | |
83 | .arg( | |
84 | Arg::new("replay_proxy") | |
85 | .short('P') | |
86 | .long("replay-proxy") | |
87 | .takes_value(true) | |
88 | .value_hint(ValueHint::Url) | |
89 | .value_name("REPLAY_PROXY") | |
90 | .help_heading("Proxy settings") | |
91 | .help( | |
92 | "Send only unfiltered requests through a Replay Proxy, instead of all requests", | |
93 | ), | |
94 | ) | |
95 | .arg( | |
96 | Arg::new("replay_codes") | |
97 | .short('R') | |
98 | .long("replay-codes") | |
99 | .value_name("REPLAY_CODE") | |
100 | .takes_value(true) | |
101 | .multiple_values(true) | |
102 | .multiple_occurrences(true) | |
103 | .use_delimiter(true) | |
104 | .requires("replay_proxy") | |
105 | .help_heading("Proxy settings") | |
106 | .help( | |
107 | "Status Codes to send through a Replay Proxy when found (default: --status-codes value)", | |
108 | ), | |
109 | ); | |
110 | ||
111 | ///////////////////////////////////////////////////////////////////// | |
112 | // group - request settings | |
113 | ///////////////////////////////////////////////////////////////////// | |
114 | let app = app | |
115 | .arg( | |
116 | Arg::new("user_agent") | |
117 | .short('a') | |
118 | .long("user-agent") | |
119 | .value_name("USER_AGENT") | |
120 | .takes_value(true) | |
121 | .help_heading("Request settings") | |
122 | .help(&**DEFAULT_USER_AGENT), | |
123 | ) | |
124 | .arg( | |
125 | Arg::new("random_agent") | |
126 | .short('A') | |
127 | .long("random-agent") | |
128 | .takes_value(false) | |
129 | .help_heading("Request settings") | |
130 | .help("Use a random User-Agent"), | |
131 | ) | |
132 | .arg( | |
133 | Arg::new("extensions") | |
134 | .short('x') | |
135 | .long("extensions") | |
136 | .value_name("FILE_EXTENSION") | |
137 | .takes_value(true) | |
138 | .multiple_values(true) | |
139 | .multiple_occurrences(true) | |
140 | .use_delimiter(true) | |
141 | .help_heading("Request settings") | |
142 | .help( | |
143 | "File extension(s) to search for (ex: -x php -x pdf js)", | |
144 | ), | |
145 | ) | |
146 | .arg( | |
147 | Arg::new("methods") | |
148 | .short('m') | |
149 | .long("methods") | |
150 | .value_name("HTTP_METHODS") | |
151 | .takes_value(true) | |
152 | .multiple_values(true) | |
153 | .multiple_occurrences(true) | |
154 | .use_delimiter(true) | |
155 | .help_heading("Request settings") | |
156 | .help( | |
157 | "Which HTTP request method(s) should be sent (default: GET)", | |
158 | ), | |
159 | ) | |
160 | .arg( | |
161 | Arg::new("data") | |
162 | .long("data") | |
163 | .value_name("DATA") | |
164 | .takes_value(true) | |
165 | .help_heading("Request settings") | |
166 | .help( | |
167 | "Request's Body; can read data from a file if input starts with an @ (ex: @post.bin)", | |
168 | ), | |
169 | ) | |
170 | .arg( | |
171 | Arg::new("headers") | |
172 | .short('H') | |
173 | .long("headers") | |
174 | .value_name("HEADER") | |
175 | .takes_value(true) | |
176 | .help_heading("Request settings") | |
177 | .multiple_values(true) | |
178 | .multiple_occurrences(true) | |
179 | .use_delimiter(true) | |
180 | .help( | |
181 | "Specify HTTP headers to be used in each request (ex: -H Header:val -H 'stuff: things')", | |
182 | ), | |
183 | ) | |
184 | .arg( | |
185 | Arg::new("cookies") | |
186 | .short('b') | |
187 | .long("cookies") | |
188 | .value_name("COOKIE") | |
189 | .takes_value(true) | |
190 | .multiple_values(true) | |
191 | .multiple_occurrences(true) | |
192 | .use_delimiter(true) | |
193 | .help_heading("Request settings") | |
194 | .help( | |
195 | "Specify HTTP cookies to be used in each request (ex: -b stuff=things)", | |
196 | ), | |
197 | ) | |
198 | .arg( | |
199 | Arg::new("queries") | |
200 | .short('Q') | |
201 | .long("query") | |
202 | .value_name("QUERY") | |
203 | .takes_value(true) | |
204 | .multiple_values(true) | |
205 | .multiple_occurrences(true) | |
206 | .use_delimiter(true) | |
207 | .help_heading("Request settings") | |
208 | .help( | |
209 | "Request's URL query parameters (ex: -Q token=stuff -Q secret=key)", | |
210 | ), | |
211 | ) | |
212 | .arg( | |
213 | Arg::new("add_slash") | |
214 | .short('f') | |
215 | .long("add-slash") | |
216 | .help_heading("Request settings") | |
217 | .takes_value(false) | |
218 | .help("Append / to each request's URL") | |
219 | ); | |
220 | ||
221 | ///////////////////////////////////////////////////////////////////// | |
222 | // group - request filters | |
223 | ///////////////////////////////////////////////////////////////////// | |
224 | let app = app.arg( | |
225 | Arg::new("url_denylist") | |
226 | .long("dont-scan") | |
227 | .value_name("URL") | |
228 | .takes_value(true) | |
229 | .multiple_values(true) | |
230 | .multiple_occurrences(true) | |
231 | .use_delimiter(true) | |
232 | .help_heading("Request filters") | |
233 | .help("URL(s) or Regex Pattern(s) to exclude from recursion/scans"), | |
234 | ); | |
235 | ||
236 | ///////////////////////////////////////////////////////////////////// | |
237 | // group - response filters | |
238 | ///////////////////////////////////////////////////////////////////// | |
239 | let app = app | |
240 | .arg( | |
241 | Arg::new("filter_size") | |
242 | .short('S') | |
243 | .long("filter-size") | |
244 | .value_name("SIZE") | |
245 | .takes_value(true) | |
246 | .multiple_values(true) | |
247 | .multiple_occurrences(true) | |
248 | .use_delimiter(true) | |
249 | .help_heading("Response filters") | |
250 | .help( | |
251 | "Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)", | |
252 | ), | |
253 | ) | |
254 | .arg( | |
255 | Arg::new("filter_regex") | |
256 | .short('X') | |
257 | .long("filter-regex") | |
258 | .value_name("REGEX") | |
259 | .takes_value(true) | |
260 | .multiple_values(true) | |
261 | .multiple_occurrences(true) | |
262 | .use_delimiter(true) | |
263 | .help_heading("Response filters") | |
264 | .help( | |
265 | "Filter out messages via regular expression matching on the response's body (ex: -X '^ignore me$')", | |
266 | ), | |
267 | ) | |
268 | .arg( | |
269 | Arg::new("filter_words") | |
270 | .short('W') | |
271 | .long("filter-words") | |
272 | .value_name("WORDS") | |
273 | .takes_value(true) | |
274 | .multiple_values(true) | |
275 | .multiple_occurrences(true) | |
276 | .use_delimiter(true) | |
277 | .help_heading("Response filters") | |
278 | .help( | |
279 | "Filter out messages of a particular word count (ex: -W 312 -W 91,82)", | |
280 | ), | |
281 | ) | |
282 | .arg( | |
283 | Arg::new("filter_lines") | |
284 | .short('N') | |
285 | .long("filter-lines") | |
286 | .value_name("LINES") | |
287 | .takes_value(true) | |
288 | .multiple_values(true) | |
289 | .multiple_occurrences(true) | |
290 | .use_delimiter(true) | |
291 | .help_heading("Response filters") | |
292 | .help( | |
293 | "Filter out messages of a particular line count (ex: -N 20 -N 31,30)", | |
294 | ), | |
295 | ) | |
296 | .arg( | |
297 | Arg::new("filter_status") | |
298 | .short('C') | |
299 | .long("filter-status") | |
300 | .value_name("STATUS_CODE") | |
301 | .takes_value(true) | |
302 | .multiple_values(true) | |
303 | .multiple_occurrences(true) | |
304 | .use_delimiter(true) | |
305 | .help_heading("Response filters") | |
306 | .help( | |
307 | "Filter out status codes (deny list) (ex: -C 200 -C 401)", | |
308 | ), | |
309 | ) | |
310 | .arg( | |
311 | Arg::new("filter_similar") | |
312 | .long("filter-similar-to") | |
313 | .value_name("UNWANTED_PAGE") | |
314 | .takes_value(true) | |
315 | .multiple_values(true) | |
316 | .multiple_occurrences(true) | |
317 | .value_hint(ValueHint::Url) | |
318 | .use_delimiter(true) | |
319 | .help_heading("Response filters") | |
320 | .help( | |
321 | "Filter out pages that are similar to the given page (ex. --filter-similar-to http://site.xyz/soft404)", | |
322 | ), | |
323 | ) | |
324 | .arg( | |
325 | Arg::new("status_codes") | |
326 | .short('s') | |
327 | .long("status-codes") | |
328 | .value_name("STATUS_CODE") | |
329 | .takes_value(true) | |
330 | .multiple_values(true) | |
331 | .multiple_occurrences(true) | |
332 | .use_delimiter(true) | |
333 | .help_heading("Response filters") | |
334 | .help( | |
335 | "Status Codes to include (allow list) (default: 200 204 301 302 307 308 401 403 405)", | |
336 | ), | |
337 | ); | |
338 | ||
339 | ///////////////////////////////////////////////////////////////////// | |
340 | // group - client settings | |
341 | ///////////////////////////////////////////////////////////////////// | |
342 | let app = app | |
343 | .arg( | |
344 | Arg::new("timeout") | |
345 | .short('T') | |
346 | .long("timeout") | |
347 | .value_name("SECONDS") | |
348 | .takes_value(true) | |
349 | .help_heading("Client settings") | |
350 | .help("Number of seconds before a client's request times out (default: 7)"), | |
351 | ) | |
352 | .arg( | |
353 | Arg::new("redirects") | |
354 | .short('r') | |
355 | .long("redirects") | |
356 | .takes_value(false) | |
357 | .help_heading("Client settings") | |
358 | .help("Allow client to follow redirects"), | |
359 | ) | |
360 | .arg( | |
361 | Arg::new("insecure") | |
362 | .short('k') | |
363 | .long("insecure") | |
364 | .takes_value(false) | |
365 | .help_heading("Client settings") | |
366 | .help("Disables TLS certificate validation in the client"), | |
367 | ); | |
368 | ||
369 | ///////////////////////////////////////////////////////////////////// | |
370 | // group - scan settings | |
371 | ///////////////////////////////////////////////////////////////////// | |
372 | let app = app | |
373 | .arg( | |
374 | Arg::new("threads") | |
375 | .short('t') | |
376 | .long("threads") | |
377 | .value_name("THREADS") | |
378 | .takes_value(true) | |
379 | .help_heading("Scan settings") | |
380 | .help("Number of concurrent threads (default: 50)"), | |
381 | ) | |
382 | .arg( | |
383 | Arg::new("no_recursion") | |
384 | .short('n') | |
385 | .long("no-recursion") | |
386 | .takes_value(false) | |
387 | .help_heading("Scan settings") | |
388 | .help("Do not scan recursively"), | |
389 | ) | |
390 | .arg( | |
391 | Arg::new("depth") | |
392 | .short('d') | |
393 | .long("depth") | |
394 | .value_name("RECURSION_DEPTH") | |
395 | .takes_value(true) | |
396 | .help_heading("Scan settings") | |
397 | .help("Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)"), | |
398 | ).arg( | |
399 | Arg::new("extract_links") | |
400 | .short('e') | |
401 | .long("extract-links") | |
402 | .takes_value(false) | |
403 | .help_heading("Scan settings") | |
404 | .help("Extract links from response body (html, javascript, etc...); make new requests based on findings") | |
405 | ) | |
406 | .arg( | |
407 | Arg::new("scan_limit") | |
408 | .short('L') | |
409 | .long("scan-limit") | |
410 | .value_name("SCAN_LIMIT") | |
411 | .takes_value(true) | |
412 | .help_heading("Scan settings") | |
413 | .help("Limit total number of concurrent scans (default: 0, i.e. no limit)") | |
414 | ) | |
415 | .arg( | |
416 | Arg::new("parallel") | |
417 | .long("parallel") | |
418 | .value_name("PARALLEL_SCANS") | |
419 | .takes_value(true) | |
420 | .requires("stdin") | |
421 | .help_heading("Scan settings") | |
422 | .help("Run parallel feroxbuster instances (one child process per url passed via stdin)") | |
423 | ) | |
424 | .arg( | |
425 | Arg::new("rate_limit") | |
426 | .long("rate-limit") | |
427 | .value_name("RATE_LIMIT") | |
428 | .takes_value(true) | |
429 | .conflicts_with("auto_tune") | |
430 | .help_heading("Scan settings") | |
431 | .help("Limit number of requests per second (per directory) (default: 0, i.e. no limit)") | |
432 | ) | |
433 | .arg( | |
434 | Arg::new("time_limit") | |
435 | .long("time-limit") | |
436 | .value_name("TIME_SPEC") | |
437 | .takes_value(true) | |
438 | .validator(valid_time_spec) | |
439 | .help_heading("Scan settings") | |
440 | .help("Limit total run time of all scans (ex: --time-limit 10m)") | |
441 | ) | |
442 | .arg( | |
443 | Arg::new("wordlist") | |
444 | .short('w') | |
445 | .long("wordlist") | |
446 | .value_hint(ValueHint::FilePath) | |
447 | .value_name("FILE") | |
448 | .help("Path to the wordlist") | |
449 | .help_heading("Scan settings") | |
450 | .takes_value(true), | |
451 | ).arg( | |
452 | Arg::new("auto_tune") | |
453 | .long("auto-tune") | |
454 | .takes_value(false) | |
455 | .conflicts_with("auto_bail") | |
456 | .help_heading("Scan settings") | |
457 | .help("Automatically lower scan rate when an excessive amount of errors are encountered") | |
458 | ) | |
459 | .arg( | |
460 | Arg::new("auto_bail") | |
461 | .long("auto-bail") | |
462 | .takes_value(false) | |
463 | .help_heading("Scan settings") | |
464 | .help("Automatically stop scanning when an excessive amount of errors are encountered") | |
465 | ).arg( | |
466 | Arg::new("dont_filter") | |
467 | .short('D') | |
468 | .long("dont-filter") | |
469 | .takes_value(false) | |
470 | .help_heading("Scan settings") | |
471 | .help("Don't auto-filter wildcard responses") | |
472 | ); | |
473 | ||
474 | ///////////////////////////////////////////////////////////////////// | |
475 | // group - output settings | |
476 | ///////////////////////////////////////////////////////////////////// | |
477 | let app = app | |
478 | .arg( | |
479 | Arg::new("verbosity") | |
480 | .short('v') | |
481 | .long("verbosity") | |
482 | .takes_value(false) | |
483 | .multiple_occurrences(true) | |
484 | .conflicts_with("silent") | |
485 | .help_heading("Output settings") | |
486 | .help("Increase verbosity level (use -vv or more for greater effect. [CAUTION] 4 -v's is probably too much)"), | |
487 | ).arg( | |
488 | Arg::new("silent") | |
489 | .long("silent") | |
490 | .takes_value(false) | |
491 | .conflicts_with("quiet") | |
492 | .help_heading("Output settings") | |
493 | .help("Only print URLs + turn off logging (good for piping a list of urls to other commands)") | |
494 | ) | |
495 | .arg( | |
496 | Arg::new("quiet") | |
497 | .short('q') | |
498 | .long("quiet") | |
499 | .takes_value(false) | |
500 | .help_heading("Output settings") | |
501 | .help("Hide progress bars and banner (good for tmux windows w/ notifications)") | |
502 | ) | |
503 | ||
504 | .arg( | |
505 | Arg::new("json") | |
506 | .long("json") | |
507 | .takes_value(false) | |
508 | .requires("output_files") | |
509 | .help_heading("Output settings") | |
510 | .help("Emit JSON logs to --output and --debug-log instead of normal text") | |
511 | ).arg( | |
512 | Arg::new("output") | |
513 | .short('o') | |
514 | .long("output") | |
515 | .value_hint(ValueHint::FilePath) | |
516 | .value_name("FILE") | |
517 | .help_heading("Output settings") | |
518 | .help("Output file to write results to (use w/ --json for JSON entries)") | |
519 | .takes_value(true), | |
520 | ) | |
521 | .arg( | |
522 | Arg::new("debug_log") | |
179 | 523 | .long("debug-log") |
180 | 524 | .value_name("FILE") |
525 | .value_hint(ValueHint::FilePath) | |
526 | .help_heading("Output settings") | |
181 | 527 | .help("Output file to write log entries (use w/ --json for JSON entries)") |
182 | 528 | .takes_value(true), |
183 | ) | |
184 | .arg( | |
185 | Arg::with_name("user_agent") | |
186 | .short("a") | |
187 | .long("user-agent") | |
188 | .value_name("USER_AGENT") | |
189 | .takes_value(true) | |
190 | .help( | |
191 | "Sets the User-Agent (default: feroxbuster/VERSION)" | |
192 | ), | |
193 | ) | |
194 | .arg( | |
195 | Arg::with_name("random_agent") | |
196 | .short("A") | |
197 | .long("random-agent") | |
198 | .takes_value(false) | |
199 | .help( | |
200 | "Use a random User-Agent" | |
201 | ), | |
202 | ) | |
203 | .arg( | |
204 | Arg::with_name("redirects") | |
205 | .short("r") | |
206 | .long("redirects") | |
207 | .takes_value(false) | |
208 | .help("Follow redirects") | |
209 | ) | |
210 | .arg( | |
211 | Arg::with_name("insecure") | |
212 | .short("k") | |
213 | .long("insecure") | |
214 | .takes_value(false) | |
215 | .help("Disables TLS certificate validation") | |
216 | ) | |
217 | .arg( | |
218 | Arg::with_name("extensions") | |
219 | .short("x") | |
220 | .long("extensions") | |
221 | .value_name("FILE_EXTENSION") | |
222 | .takes_value(true) | |
223 | .multiple(true) | |
224 | .use_delimiter(true) | |
225 | .help( | |
226 | "File extension(s) to search for (ex: -x php -x pdf js)", | |
227 | ), | |
228 | ) | |
229 | .arg( | |
230 | Arg::with_name("url_denylist") | |
231 | .long("dont-scan") | |
232 | .value_name("URL") | |
233 | .takes_value(true) | |
234 | .multiple(true) | |
235 | .use_delimiter(true) | |
236 | .help( | |
237 | "URL(s) or Regex Pattern(s) to exclude from recursion/scans", | |
238 | ), | |
239 | ) | |
240 | .arg( | |
241 | Arg::with_name("headers") | |
242 | .short("H") | |
243 | .long("headers") | |
244 | .value_name("HEADER") | |
245 | .takes_value(true) | |
246 | .multiple(true) | |
247 | .use_delimiter(true) | |
248 | .help( | |
249 | "Specify HTTP headers (ex: -H Header:val 'stuff: things')", | |
250 | ), | |
251 | ) | |
252 | .arg( | |
253 | Arg::with_name("queries") | |
254 | .short("Q") | |
255 | .long("query") | |
256 | .value_name("QUERY") | |
257 | .takes_value(true) | |
258 | .multiple(true) | |
259 | .use_delimiter(true) | |
260 | .help( | |
261 | "Specify URL query parameters (ex: -Q token=stuff -Q secret=key)", | |
262 | ), | |
263 | ) | |
264 | .arg( | |
265 | Arg::with_name("no_recursion") | |
266 | .short("n") | |
267 | .long("no-recursion") | |
268 | .takes_value(false) | |
269 | .help("Do not scan recursively") | |
270 | ) | |
271 | .arg( | |
272 | Arg::with_name("add_slash") | |
273 | .short("f") | |
274 | .long("add-slash") | |
275 | .takes_value(false) | |
276 | .help("Append / to each request") | |
277 | ) | |
278 | .arg( | |
279 | Arg::with_name("stdin") | |
280 | .long("stdin") | |
281 | .takes_value(false) | |
282 | .help("Read url(s) from STDIN") | |
283 | .conflicts_with("url") | |
284 | ) | |
285 | .arg( | |
286 | Arg::with_name("filter_size") | |
287 | .short("S") | |
288 | .long("filter-size") | |
289 | .value_name("SIZE") | |
290 | .takes_value(true) | |
291 | .multiple(true) | |
292 | .use_delimiter(true) | |
293 | .help( | |
294 | "Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)", | |
295 | ), | |
296 | ) | |
297 | .arg( | |
298 | Arg::with_name("filter_regex") | |
299 | .short("X") | |
300 | .long("filter-regex") | |
301 | .value_name("REGEX") | |
302 | .takes_value(true) | |
303 | .multiple(true) | |
304 | .use_delimiter(true) | |
305 | .help( | |
306 | "Filter out messages via regular expression matching on the response's body (ex: -X '^ignore me$')", | |
307 | ), | |
308 | ) | |
309 | .arg( | |
310 | Arg::with_name("filter_words") | |
311 | .short("W") | |
312 | .long("filter-words") | |
313 | .value_name("WORDS") | |
314 | .takes_value(true) | |
315 | .multiple(true) | |
316 | .use_delimiter(true) | |
317 | .help( | |
318 | "Filter out messages of a particular word count (ex: -W 312 -W 91,82)", | |
319 | ), | |
320 | ) | |
321 | .arg( | |
322 | Arg::with_name("filter_lines") | |
323 | .short("N") | |
324 | .long("filter-lines") | |
325 | .value_name("LINES") | |
326 | .takes_value(true) | |
327 | .multiple(true) | |
328 | .use_delimiter(true) | |
329 | .help( | |
330 | "Filter out messages of a particular line count (ex: -N 20 -N 31,30)", | |
331 | ), | |
332 | ) | |
333 | .arg( | |
334 | Arg::with_name("filter_status") | |
335 | .short("C") | |
336 | .long("filter-status") | |
337 | .value_name("STATUS_CODE") | |
338 | .takes_value(true) | |
339 | .multiple(true) | |
340 | .use_delimiter(true) | |
341 | .help( | |
342 | "Filter out status codes (deny list) (ex: -C 200 -C 401)", | |
343 | ), | |
344 | ) | |
345 | .arg( | |
346 | Arg::with_name("filter_similar") | |
347 | .long("filter-similar-to") | |
348 | .value_name("UNWANTED_PAGE") | |
349 | .takes_value(true) | |
350 | .multiple(true) | |
351 | .use_delimiter(true) | |
352 | .help( | |
353 | "Filter out pages that are similar to the given page (ex. --filter-similar-to http://site.xyz/soft404)", | |
354 | ), | |
355 | ) | |
356 | .arg( | |
357 | Arg::with_name("extract_links") | |
358 | .short("e") | |
359 | .long("extract-links") | |
360 | .takes_value(false) | |
361 | .help("Extract links from response body (html, javascript, etc...); make new requests based on findings (default: false)") | |
362 | ) | |
363 | .arg( | |
364 | Arg::with_name("scan_limit") | |
365 | .short("L") | |
366 | .long("scan-limit") | |
367 | .value_name("SCAN_LIMIT") | |
368 | .takes_value(true) | |
369 | .help("Limit total number of concurrent scans (default: 0, i.e. no limit)") | |
370 | ) | |
371 | .arg( | |
372 | Arg::with_name("parallel") | |
373 | .long("parallel") | |
374 | .value_name("PARALLEL_SCANS") | |
375 | .takes_value(true) | |
376 | .requires("stdin") | |
377 | .help("Run parallel feroxbuster instances (one child process per url passed via stdin)") | |
378 | ) | |
379 | .arg( | |
380 | Arg::with_name("rate_limit") | |
381 | .long("rate-limit") | |
382 | .value_name("RATE_LIMIT") | |
383 | .takes_value(true) | |
384 | .conflicts_with("auto_tune") | |
385 | .help("Limit number of requests per second (per directory) (default: 0, i.e. no limit)") | |
386 | ) | |
387 | .arg( | |
388 | Arg::with_name("time_limit") | |
389 | .long("time-limit") | |
390 | .value_name("TIME_SPEC") | |
391 | .takes_value(true) | |
392 | .validator(valid_time_spec) | |
393 | .help("Limit total run time of all scans (ex: --time-limit 10m)") | |
394 | ) | |
395 | .group(ArgGroup::with_name("output_files") | |
396 | .args(&["debug_log", "output"]) | |
397 | .multiple(true) | |
398 | ) | |
399 | .after_help(r#"NOTE: | |
400 | Options that take multiple values are very flexible. Consider the following ways of specifying | |
401 | extensions: | |
402 | ./feroxbuster -u http://127.1 -x pdf -x js,html -x php txt json,docx | |
403 | ||
404 | The command above adds .pdf, .js, .html, .php, .txt, .json, and .docx to each url | |
405 | ||
406 | All of the methods above (multiple flags, space separated, comma separated, etc...) are valid | |
407 | and interchangeable. The same goes for urls, headers, status codes, queries, and size filters. | |
408 | ||
409 | EXAMPLES: | |
410 | Multiple headers: | |
411 | ./feroxbuster -u http://127.1 -H Accept:application/json "Authorization: Bearer {token}" | |
412 | ||
413 | IPv6, non-recursive scan with INFO-level logging enabled: | |
414 | ./feroxbuster -u http://[::1] --no-recursion -vv | |
415 | ||
416 | Read urls from STDIN; pipe only resulting urls out to another tool | |
417 | cat targets | ./feroxbuster --stdin --silent -s 200 301 302 --redirects -x js | fff -s 200 -o js-files | |
418 | ||
419 | Proxy traffic through Burp | |
420 | ./feroxbuster -u http://127.1 --insecure --proxy http://127.0.0.1:8080 | |
421 | ||
422 | Proxy traffic through a SOCKS proxy | |
423 | ./feroxbuster -u http://127.1 --proxy socks5://127.0.0.1:9050 | |
424 | ||
425 | Pass auth token via query parameter | |
426 | ./feroxbuster -u http://127.1 --query token=0123456789ABCDEF | |
427 | ||
428 | Find links in javascript/html and make additional requests based on results | |
429 | ./feroxbuster -u http://127.1 --extract-links | |
430 | ||
431 | Ludicrous speed... go! | |
432 | ./feroxbuster -u http://127.1 -t 200 | |
433 | "#); | |
434 | ||
529 | ); | |
530 | ||
531 | ///////////////////////////////////////////////////////////////////// | |
532 | // group - miscellaneous | |
533 | ///////////////////////////////////////////////////////////////////// | |
534 | let mut app = app | |
535 | .group( | |
536 | ArgGroup::new("output_files") | |
537 | .args(&["debug_log", "output"]) | |
538 | .multiple(true), | |
539 | ) | |
540 | .after_long_help(EPILOGUE); | |
541 | ||
542 | ///////////////////////////////////////////////////////////////////// | |
543 | // end parser | |
544 | ///////////////////////////////////////////////////////////////////// | |
435 | 545 | for arg in env::args() { |
436 | 546 | // secure-77 noticed that when an incorrect flag/option is used, the short help message is printed |
437 | 547 | // which is fine, but if you add -h|--help, it still errors out on the bad flag/option, |
438 | 548 | // never showing the full help message. This code addresses that behavior |
439 | if arg == "--help" || arg == "-h" { | |
549 | if arg == "--help" { | |
440 | 550 | app.print_long_help().unwrap(); |
441 | 551 | println!(); // just a newline to mirror original --help output |
442 | 552 | process::exit(0); |
553 | } else if arg == "-h" { | |
554 | // same for -h, just shorter | |
555 | app.print_help().unwrap(); | |
556 | println!(); | |
557 | process::exit(0); | |
443 | 558 | } |
444 | 559 | } |
445 | 560 | |
447 | 562 | } |
448 | 563 | |
449 | 564 | /// Validate that a string is formatted as a number followed by s, m, h, or d (10d, 30s, etc...) |
450 | fn valid_time_spec(time_spec: String) -> Result<(), String> { | |
451 | match TIMESPEC_REGEX.is_match(&time_spec) { | |
565 | fn valid_time_spec(time_spec: &str) -> Result<(), String> { | |
566 | match TIMESPEC_REGEX.is_match(time_spec) { | |
452 | 567 | true => Ok(()), |
453 | 568 | false => { |
454 | 569 | let msg = format!( |
460 | 575 | } |
461 | 576 | } |
462 | 577 | |
578 | const EPILOGUE: &str = r#"NOTE: | |
579 | Options that take multiple values are very flexible. Consider the following ways of specifying | |
580 | extensions: | |
581 | ./feroxbuster -u http://127.1 -x pdf -x js,html -x php txt json,docx | |
582 | ||
583 | The command above adds .pdf, .js, .html, .php, .txt, .json, and .docx to each url | |
584 | ||
585 | All of the methods above (multiple flags, space separated, comma separated, etc...) are valid | |
586 | and interchangeable. The same goes for urls, headers, status codes, queries, and size filters. | |
587 | ||
588 | EXAMPLES: | |
589 | Multiple headers: | |
590 | ./feroxbuster -u http://127.1 -H Accept:application/json "Authorization: Bearer {token}" | |
591 | ||
592 | IPv6, non-recursive scan with INFO-level logging enabled: | |
593 | ./feroxbuster -u http://[::1] --no-recursion -vv | |
594 | ||
595 | Read urls from STDIN; pipe only resulting urls out to another tool | |
596 | cat targets | ./feroxbuster --stdin --silent -s 200 301 302 --redirects -x js | fff -s 200 -o js-files | |
597 | ||
598 | Proxy traffic through Burp | |
599 | ./feroxbuster -u http://127.1 --insecure --proxy http://127.0.0.1:8080 | |
600 | ||
601 | Proxy traffic through a SOCKS proxy | |
602 | ./feroxbuster -u http://127.1 --proxy socks5://127.0.0.1:9050 | |
603 | ||
604 | Pass auth token via query parameter | |
605 | ./feroxbuster -u http://127.1 --query token=0123456789ABCDEF | |
606 | ||
607 | Find links in javascript/html and make additional requests based on results | |
608 | ./feroxbuster -u http://127.1 --extract-links | |
609 | ||
610 | Ludicrous speed... go! | |
611 | ./feroxbuster -u http://127.1 -threads 200 | |
612 | ||
613 | Limit to a total of 60 active requests at any given time (threads * scan limit) | |
614 | ./feroxbuster -u http://127.1 --threads 30 --scan-limit 2 | |
615 | ||
616 | Send all 200/302 responses to a proxy (only proxy requests/responses you care about) | |
617 | ./feroxbuster -u http://127.1 --replay-proxy http://localhost:8080 --replay-codes 200 302 --insecure | |
618 | ||
619 | Abort or reduce scan speed to individual directory scans when too many errors have occurred | |
620 | ./feroxbuster -u http://127.1 --auto-bail | |
621 | ./feroxbuster -u http://127.1 --auto-tune | |
622 | ||
623 | Examples and demonstrations of all features | |
624 | https://epi052.github.io/feroxbuster-docs/docs/examples/ | |
625 | "#; | |
626 | ||
463 | 627 | #[cfg(test)] |
464 | 628 | mod tests { |
465 | 629 | use super::*; |
478 | 642 | /// that i didn't hose up the regex. Going to consolidate them into a single test |
479 | 643 | fn validate_valid_time_spec_validation() { |
480 | 644 | let float_rejected = "1.4m"; |
481 | assert!(valid_time_spec(float_rejected.into()).is_err()); | |
645 | assert!(valid_time_spec(float_rejected).is_err()); | |
482 | 646 | |
483 | 647 | let negative_rejected = "-1m"; |
484 | assert!(valid_time_spec(negative_rejected.into()).is_err()); | |
648 | assert!(valid_time_spec(negative_rejected).is_err()); | |
485 | 649 | |
486 | 650 | let only_number_rejected = "1"; |
487 | assert!(valid_time_spec(only_number_rejected.into()).is_err()); | |
651 | assert!(valid_time_spec(only_number_rejected).is_err()); | |
488 | 652 | |
489 | 653 | let only_measurement_rejected = "m"; |
490 | assert!(valid_time_spec(only_measurement_rejected.into()).is_err()); | |
654 | assert!(valid_time_spec(only_measurement_rejected).is_err()); | |
491 | 655 | |
492 | 656 | for accepted_measurement in &["s", "m", "h", "d", "S", "M", "H", "D"] { |
493 | 657 | // all upper/lowercase should be good |
494 | assert!(valid_time_spec(format!("1{}", *accepted_measurement)).is_ok()); | |
658 | assert!(valid_time_spec(&format!("1{}", *accepted_measurement)).is_ok()); | |
495 | 659 | } |
496 | 660 | |
497 | 661 | let leading_space_rejected = " 14m"; |
498 | assert!(valid_time_spec(leading_space_rejected.into()).is_err()); | |
662 | assert!(valid_time_spec(leading_space_rejected).is_err()); | |
499 | 663 | |
500 | 664 | let trailing_space_rejected = "14m "; |
501 | assert!(valid_time_spec(trailing_space_rejected.into()).is_err()); | |
665 | assert!(valid_time_spec(trailing_space_rejected).is_err()); | |
502 | 666 | |
503 | 667 | let space_between_rejected = "1 4m"; |
504 | assert!(valid_time_spec(space_between_rejected.into()).is_err()); | |
668 | assert!(valid_time_spec(space_between_rejected).is_err()); | |
505 | 669 | } |
506 | 670 | } |
34 | 34 | |
35 | 35 | style = match bar_type { |
36 | 36 | BarType::Hidden => style.template(""), |
37 | BarType::Default => style | |
38 | .template("[{bar:.cyan/blue}] - {elapsed:<4} {pos:>7}/{len:7} {per_sec:7} {prefix}"), | |
37 | BarType::Default => style.template( | |
38 | "[{bar:.cyan/blue}] - {elapsed:<4} {pos:>7}/{len:7} {per_sec:7} {prefix} {msg}", | |
39 | ), | |
39 | 40 | BarType::Message => style.template(&format!( |
40 | "[{{bar:.cyan/blue}}] - {{elapsed:<4}} {{pos:>7}}/{{len:7}} {:7} {{prefix}}", | |
41 | "[{{bar:.cyan/blue}}] - {{elapsed:<4}} {{pos:>7}}/{{len:7}} {:7} {{prefix}} {{msg}}", | |
41 | 42 | "-" |
42 | 43 | )), |
43 | 44 | BarType::Total => { |
6 | 6 | }; |
7 | 7 | |
8 | 8 | use anyhow::{Context, Result}; |
9 | use console::style; | |
9 | 10 | use reqwest::{ |
10 | 11 | header::{HeaderMap, HeaderName, HeaderValue}, |
11 | Response, StatusCode, Url, | |
12 | Method, Response, StatusCode, Url, | |
12 | 13 | }; |
13 | 14 | use serde::ser::SerializeStruct; |
14 | 15 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; |
35 | 36 | /// The `StatusCode` of this `FeroxResponse` |
36 | 37 | status: StatusCode, |
37 | 38 | |
39 | /// The HTTP Request `Method` of this `FeroxResponse` | |
40 | method: Method, | |
41 | ||
38 | 42 | /// The full response text |
39 | 43 | text: String, |
40 | 44 | |
65 | 69 | url: Url::parse("http://localhost").unwrap(), |
66 | 70 | original_url: "".to_string(), |
67 | 71 | status: Default::default(), |
72 | method: Method::default(), | |
68 | 73 | text: "".to_string(), |
69 | 74 | content_length: 0, |
70 | 75 | line_count: 0, |
82 | 87 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
83 | 88 | write!( |
84 | 89 | f, |
85 | "FeroxResponse {{ url: {}, status: {}, content-length: {} }}", | |
90 | "FeroxResponse {{ url: {}, method: {}, status: {}, content-length: {} }}", | |
86 | 91 | self.url(), |
92 | self.method(), | |
87 | 93 | self.status(), |
88 | 94 | self.content_length() |
89 | 95 | ) |
95 | 101 | /// Get the `StatusCode` of this `FeroxResponse` |
96 | 102 | pub fn status(&self) -> &StatusCode { |
97 | 103 | &self.status |
104 | } | |
105 | ||
106 | /// Get the `Method` of this `FeroxResponse` | |
107 | pub fn method(&self) -> &Method { | |
108 | &self.method | |
98 | 109 | } |
99 | 110 | |
100 | 111 | /// Get the `wildcard` of this `FeroxResponse` |
192 | 203 | pub async fn from( |
193 | 204 | response: Response, |
194 | 205 | original_url: &str, |
206 | method: &str, | |
195 | 207 | read_body: bool, |
196 | 208 | output_level: OutputLevel, |
197 | 209 | ) -> Self { |
223 | 235 | url, |
224 | 236 | original_url: original_url.to_string(), |
225 | 237 | status, |
238 | method: Method::from_bytes(method.as_bytes()).unwrap_or(Method::GET), | |
226 | 239 | content_length, |
227 | 240 | text, |
228 | 241 | headers, |
335 | 348 | let words = self.word_count().to_string(); |
336 | 349 | let chars = self.content_length().to_string(); |
337 | 350 | let status = self.status().as_str(); |
351 | let method = self.method().as_str(); | |
338 | 352 | let wild_status = status_colorizer("WLD"); |
353 | ||
354 | let mut url_with_redirect = match ( | |
355 | self.status().is_redirection(), | |
356 | self.headers().get("Location").is_some(), | |
357 | ) { | |
358 | (true, true) => { | |
359 | // redirect with Location header, show where it goes if possible | |
360 | let loc = self | |
361 | .headers() | |
362 | .get("Location") | |
363 | .unwrap() // known Some() already | |
364 | .to_str() | |
365 | .unwrap_or("Unknown"); | |
366 | ||
367 | // prettify the redirect target | |
368 | let loc = style(loc).yellow(); | |
369 | ||
370 | format!("{} => {loc}", self.url()) | |
371 | } | |
372 | _ => { | |
373 | // no redirect, just use the normal url | |
374 | self.url().to_string() | |
375 | } | |
376 | }; | |
339 | 377 | |
340 | 378 | if self.wildcard && matches!(self.output_level, OutputLevel::Default | OutputLevel::Quiet) { |
341 | 379 | // --silent was not used and response is a wildcard, special messages abound when |
343 | 381 | |
344 | 382 | // create the base message |
345 | 383 | let mut message = format!( |
346 | "{} {:>8}l {:>8}w {:>8}c Got {} for {} (url length: {})\n", | |
384 | "{} {:>8} {:>8}l {:>8}w {:>8}c Got {} for {} (url length: {})\n", | |
347 | 385 | wild_status, |
386 | method, | |
348 | 387 | lines, |
349 | 388 | words, |
350 | 389 | chars, |
354 | 393 | ); |
355 | 394 | |
356 | 395 | if self.status().is_redirection() { |
357 | // when it's a redirect, show where it goes, if possible | |
358 | if let Some(next_loc) = self.headers().get("Location") { | |
359 | let next_loc_str = next_loc.to_str().unwrap_or("Unknown"); | |
360 | ||
361 | let redirect_msg = format!( | |
362 | "{} {:>9} {:>9} {:>9} {} redirects to => {}\n", | |
363 | wild_status, | |
364 | "-", | |
365 | "-", | |
366 | "-", | |
367 | self.url(), | |
368 | next_loc_str | |
369 | ); | |
370 | ||
371 | message.push_str(&redirect_msg); | |
372 | } | |
373 | } | |
374 | ||
375 | // base message + redirection message (if appropriate) | |
396 | // initial wildcard messages are wordy enough, put the redirect by itself | |
397 | url_with_redirect = format!( | |
398 | "{} {:>9} {:>9} {:>9} {}\n", | |
399 | wild_status, "-", "-", "-", url_with_redirect | |
400 | ); | |
401 | ||
402 | // base message + redirection message (either empty string or redir msg) | |
403 | message.push_str(&url_with_redirect); | |
404 | } | |
405 | ||
376 | 406 | message |
377 | 407 | } else { |
378 | 408 | // not a wildcard, just create a normal entry |
379 | 409 | utils::create_report_string( |
380 | 410 | self.status.as_str(), |
411 | method, | |
381 | 412 | &lines, |
382 | 413 | &words, |
383 | 414 | &chars, |
384 | self.url().as_str(), | |
415 | &url_with_redirect, | |
385 | 416 | self.output_level, |
386 | 417 | ) |
387 | 418 | } |
447 | 478 | state.serialize_field("path", self.url.path())?; |
448 | 479 | state.serialize_field("wildcard", &self.wildcard)?; |
449 | 480 | state.serialize_field("status", &self.status.as_u16())?; |
481 | state.serialize_field("method", &self.method.as_str())?; | |
450 | 482 | state.serialize_field("content_length", &self.content_length)?; |
451 | 483 | state.serialize_field("line_count", &self.line_count)?; |
452 | 484 | state.serialize_field("word_count", &self.word_count)?; |
467 | 499 | url: Url::parse("http://localhost").unwrap(), |
468 | 500 | original_url: String::new(), |
469 | 501 | status: StatusCode::OK, |
502 | method: Method::GET, | |
470 | 503 | text: String::new(), |
471 | 504 | content_length: 0, |
472 | 505 | headers: HeaderMap::new(), |
499 | 532 | response.status = status; |
500 | 533 | } |
501 | 534 | } |
535 | } | |
536 | } | |
537 | "method" => { | |
538 | if let Some(method) = value.as_str() { | |
539 | response.method = Method::from_bytes(method.as_bytes()).unwrap_or_default(); | |
502 | 540 | } |
503 | 541 | } |
504 | 542 | "content_length" => { |
558 | 596 | url, |
559 | 597 | original_url: String::new(), |
560 | 598 | status: Default::default(), |
599 | method: Default::default(), | |
561 | 600 | text: "".to_string(), |
562 | 601 | content_length: 0, |
563 | 602 | line_count: 0, |
580 | 619 | url, |
581 | 620 | original_url: String::new(), |
582 | 621 | status: Default::default(), |
622 | method: Default::default(), | |
583 | 623 | text: "".to_string(), |
584 | 624 | content_length: 0, |
585 | 625 | line_count: 0, |
602 | 642 | url, |
603 | 643 | original_url: String::new(), |
604 | 644 | status: Default::default(), |
645 | method: Default::default(), | |
605 | 646 | text: "".to_string(), |
606 | 647 | content_length: 0, |
607 | 648 | line_count: 0, |
624 | 665 | url, |
625 | 666 | original_url: String::new(), |
626 | 667 | status: Default::default(), |
668 | method: Default::default(), | |
627 | 669 | text: "".to_string(), |
628 | 670 | content_length: 0, |
629 | 671 | line_count: 0, |
646 | 688 | url, |
647 | 689 | original_url: String::new(), |
648 | 690 | status: Default::default(), |
691 | method: Default::default(), | |
649 | 692 | text: "".to_string(), |
650 | 693 | content_length: 0, |
651 | 694 | line_count: 0, |
302 | 302 | #[test] |
303 | 303 | /// given a FeroxResponses, test that it serializes into the proper JSON entry |
304 | 304 | fn ferox_responses_serialize() { |
305 | let json_response = r#"{"type":"response","url":"https://nerdcore.com/css","original_url":"https://nerdcore.com","path":"/css","wildcard":true,"status":301,"content_length":173,"line_count":10,"word_count":16,"headers":{"server":"nginx/1.16.1"}}"#; | |
305 | let json_response = r#"{"type":"response","url":"https://nerdcore.com/css","original_url":"https://nerdcore.com","path":"/css","wildcard":true,"status":301,"method":"GET","content_length":173,"line_count":10,"word_count":16,"headers":{"server":"nginx/1.16.1"}}"#; | |
306 | 306 | let response: FeroxResponse = serde_json::from_str(json_response).unwrap(); |
307 | 307 | |
308 | 308 | let responses = FeroxResponses::default(); |
320 | 320 | /// given a FeroxResponse, test that it serializes into the proper JSON entry |
321 | 321 | fn ferox_response_serialize_and_deserialize() { |
322 | 322 | // deserialize |
323 | let json_response = r#"{"type":"response","url":"https://nerdcore.com/css","original_url":"https://nerdcore.com","path":"/css","wildcard":true,"status":301,"content_length":173,"line_count":10,"word_count":16,"headers":{"server":"nginx/1.16.1"}}"#; | |
323 | let json_response = r#"{"type":"response","url":"https://nerdcore.com/css","original_url":"https://nerdcore.com","path":"/css","wildcard":true,"status":301,"method":"GET","content_length":173,"line_count":10,"word_count":16,"headers":{"server":"nginx/1.16.1"}}"#; | |
324 | 324 | let response: FeroxResponse = serde_json::from_str(json_response).unwrap(); |
325 | 325 | |
326 | 326 | assert_eq!(response.url().as_str(), "https://nerdcore.com/css"); |
409 | 409 | r#""redirects":false"#, |
410 | 410 | r#""insecure":false"#, |
411 | 411 | r#""extensions":[]"#, |
412 | r#""methods":["GET"],"#, | |
413 | r#""data":[]"#, | |
412 | 414 | r#""headers""#, |
413 | 415 | r#""queries":[]"#, |
414 | 416 | r#""no_recursion":false"#, |
436 | 438 | r#""path":"/css""#, |
437 | 439 | r#""wildcard":true"#, |
438 | 440 | r#""status":301"#, |
441 | r#""method":"GET""#, | |
439 | 442 | r#""content_length":173"#, |
440 | 443 | r#""line_count":10"#, |
441 | 444 | r#""word_count":16"#, |
0 | 0 | use std::{ops::Deref, sync::atomic::Ordering, sync::Arc, time::Instant}; |
1 | 1 | |
2 | 2 | use anyhow::{bail, Result}; |
3 | use console::style; | |
3 | 4 | use futures::{stream, StreamExt}; |
4 | 5 | use lazy_static::lazy_static; |
5 | 6 | use tokio::sync::Semaphore; |
9 | 10 | Command::{AddError, AddToF64Field, SubtractFromUsizeField}, |
10 | 11 | Handles, |
11 | 12 | }, |
12 | extractor::{ExtractionTarget::RobotsTxt, ExtractorBuilder}, | |
13 | extractor::{ExtractionTarget, ExtractorBuilder}, | |
13 | 14 | heuristics, |
14 | 15 | scan_manager::{FeroxResponses, MenuCmdResult, ScanOrder, ScanStatus, PAUSE_SCAN}, |
15 | 16 | statistics::{ |
42 | 43 | /// wordlist that's already been read from disk |
43 | 44 | wordlist: Arc<Vec<String>>, |
44 | 45 | |
45 | /// limiter that restricts the number of active FeroxScanners | |
46 | /// limiter that restricts the number of active FeroxScanners | |
46 | 47 | scan_limiter: Arc<Semaphore>, |
47 | 48 | } |
48 | 49 | |
72 | 73 | log::trace!("enter: scan_url"); |
73 | 74 | log::info!("Starting scan against: {}", self.target_url); |
74 | 75 | |
75 | let scan_timer = Instant::now(); | |
76 | ||
77 | if matches!(self.order, ScanOrder::Initial) && self.handles.config.extract_links { | |
78 | // only grab robots.txt on the initial scan_url calls. all fresh dirs will be passed | |
79 | // to try_recursion | |
76 | let mut scan_timer = Instant::now(); | |
77 | let mut dirlist_flag = false; | |
78 | ||
79 | if self.handles.config.extract_links { | |
80 | // parse html for links (i.e. web scraping) | |
80 | 81 | let extractor = ExtractorBuilder::default() |
82 | .target(ExtractionTarget::ParseHtml) | |
81 | 83 | .url(&self.target_url) |
82 | 84 | .handles(self.handles.clone()) |
83 | .target(RobotsTxt) | |
84 | 85 | .build()?; |
85 | ||
86 | let links = extractor.extract().await?; | |
86 | let extract_out = extractor.extract().await?; | |
87 | let links = extract_out.0; | |
88 | dirlist_flag = extract_out.1; | |
87 | 89 | extractor.request_links(links).await?; |
90 | ||
91 | if matches!(self.order, ScanOrder::Initial) { | |
92 | // check for robots.txt (cannot be in subdirs) | |
93 | let extractor = ExtractorBuilder::default() | |
94 | .target(ExtractionTarget::RobotsTxt) | |
95 | .url(&self.target_url) | |
96 | .handles(self.handles.clone()) | |
97 | .build()?; | |
98 | let links = (extractor.extract().await?).0; | |
99 | extractor.request_links(links).await?; | |
100 | } | |
88 | 101 | } |
89 | 102 | |
90 | 103 | let scanned_urls = self.handles.ferox_scans()?; |
91 | ||
92 | 104 | let ferox_scan = match scanned_urls.get_scan_by_url(&self.target_url) { |
93 | 105 | Some(scan) => { |
94 | 106 | scan.set_status(ScanStatus::Running)?; |
105 | 117 | |
106 | 118 | let progress_bar = ferox_scan.progress_bar(); |
107 | 119 | |
120 | // Directory listing heuristic detection to not continue scanning | |
121 | if dirlist_flag { | |
122 | log::trace!("exit: scan_url -> Directory listing heuristic"); | |
123 | ||
124 | self.handles.stats.send(AddToF64Field( | |
125 | DirScanTimes, | |
126 | scan_timer.elapsed().as_secs_f64(), | |
127 | ))?; | |
128 | ||
129 | self.handles.stats.send(SubtractFromUsizeField( | |
130 | TotalExpected, | |
131 | progress_bar.length() as usize, | |
132 | ))?; | |
133 | ||
134 | progress_bar.reset_eta(); | |
135 | progress_bar.finish_with_message(&format!( | |
136 | "=> {}", | |
137 | style("Directory listing").blue().bright() | |
138 | )); | |
139 | ||
140 | ferox_scan.finish()?; | |
141 | ||
142 | return Ok(()); | |
143 | } | |
144 | ||
108 | 145 | // When acquire is called and the semaphore has remaining permits, the function immediately |
109 | 146 | // returns a permit. However, if no remaining permits are available, acquire (asynchronously) |
110 | 147 | // waits until an outstanding permit is dropped, at which point, the freed permit is assigned |
111 | 148 | // to the caller. |
112 | 149 | let _permit = self.scan_limiter.acquire().await; |
150 | if self.handles.config.scan_limit > 0 { | |
151 | scan_timer = Instant::now(); | |
152 | progress_bar.reset(); | |
153 | } | |
113 | 154 | |
114 | 155 | // Arc clones to be passed around to the various scans |
115 | 156 | let looping_words = self.wordlist.clone(); |
122 | 163 | } |
123 | 164 | |
124 | 165 | let requester = Arc::new(Requester::from(self, ferox_scan.clone())?); |
125 | let increment_len = (self.handles.config.extensions.len() + 1) as u64; | |
166 | let increment_len = | |
167 | ((self.handles.config.extensions.len() + 1) * self.handles.config.methods.len()) as u64; | |
126 | 168 | |
127 | 169 | // producer tasks (mp of mpsc); responsible for making requests |
128 | 170 | let producers = stream::iter(looping_words.deref().to_owned()) |
10 | 10 | log::trace!("enter: initialize({}, {:?})", num_words, handles); |
11 | 11 | |
12 | 12 | // number of requests only needs to be calculated once, and then can be reused |
13 | let num_reqs_expected: u64 = if handles.config.extensions.is_empty() { | |
14 | num_words.try_into()? | |
15 | } else { | |
16 | let total = num_words * (handles.config.extensions.len() + 1); | |
17 | total.try_into()? | |
18 | }; | |
13 | let num_reqs_expected: u64 = | |
14 | (num_words * (handles.config.extensions.len() + 1) * (handles.config.methods.len())) | |
15 | .try_into()?; | |
19 | 16 | |
20 | 17 | { |
21 | 18 | // no real reason to keep the arc around beyond this call |
16 | 16 | Command::{self, AddError, SubtractFromUsizeField}, |
17 | 17 | Handles, |
18 | 18 | }, |
19 | extractor::{ExtractionTarget::ResponseBody, ExtractorBuilder}, | |
19 | extractor::{ExtractionTarget, ExtractorBuilder}, | |
20 | 20 | response::FeroxResponse, |
21 | 21 | scan_manager::{FeroxScan, ScanStatus}, |
22 | 22 | statistics::{StatError::Other, StatField::TotalExpected}, |
309 | 309 | || !self.handles.config.regex_denylist.is_empty(); |
310 | 310 | |
311 | 311 | for url in urls { |
312 | // auto_tune is true, or rate_limit was set (mutually exclusive to user) | |
313 | // and a rate_limiter has been created | |
314 | // short-circuiting the lock access behind the first boolean check | |
315 | let should_tune = self.handles.config.auto_tune || self.handles.config.rate_limit > 0; | |
316 | let should_limit = should_tune && self.rate_limiter.read().await.is_some(); | |
317 | ||
318 | if should_limit { | |
319 | // found a rate limiter, limit that junk! | |
320 | if let Err(e) = self.limit().await { | |
321 | log::warn!("Could not rate limit scan: {}", e); | |
322 | self.handles.stats.send(AddError(Other)).unwrap_or_default(); | |
312 | for method in self.handles.config.methods.iter() { | |
313 | // auto_tune is true, or rate_limit was set (mutually exclusive to user) | |
314 | // and a rate_limiter has been created | |
315 | // short-circuiting the lock access behind the first boolean check | |
316 | let should_tune = | |
317 | self.handles.config.auto_tune || self.handles.config.rate_limit > 0; | |
318 | let should_limit = should_tune && self.rate_limiter.read().await.is_some(); | |
319 | ||
320 | if should_limit { | |
321 | // found a rate limiter, limit that junk! | |
322 | if let Err(e) = self.limit().await { | |
323 | log::warn!("Could not rate limit scan: {}", e); | |
324 | self.handles.stats.send(AddError(Other)).unwrap_or_default(); | |
325 | } | |
323 | 326 | } |
324 | } | |
325 | ||
326 | if should_test_deny && should_deny_url(&url, self.handles.clone())? { | |
327 | // can't allow a denied url to be requested | |
328 | continue; | |
329 | } | |
330 | ||
331 | let response = logged_request(&url, self.handles.clone()).await?; | |
332 | ||
333 | if (should_tune || self.handles.config.auto_bail) | |
334 | && !atomic_load!(self.policy_data.cooling_down, Ordering::SeqCst) | |
335 | { | |
336 | // only check for policy enforcement when the trigger isn't on cooldown and tuning | |
337 | // or bailing is in place (should_tune used here because when auto-tune is on, we'll | |
338 | // reach this without a rate_limiter in place) | |
339 | match self.policy_data.policy { | |
340 | RequesterPolicy::AutoTune => { | |
341 | if let Some(trigger) = self.should_enforce_policy() { | |
342 | self.tune(trigger).await?; | |
327 | ||
328 | if should_test_deny && should_deny_url(&url, self.handles.clone())? { | |
329 | // can't allow a denied url to be requested | |
330 | continue; | |
331 | } | |
332 | ||
333 | let response = logged_request( | |
334 | &url, | |
335 | method.as_str(), | |
336 | Some(self.handles.config.data.as_slice()), | |
337 | self.handles.clone(), | |
338 | ) | |
339 | .await?; | |
340 | ||
341 | if (should_tune || self.handles.config.auto_bail) | |
342 | && !atomic_load!(self.policy_data.cooling_down, Ordering::SeqCst) | |
343 | { | |
344 | // only check for policy enforcement when the trigger isn't on cooldown and tuning | |
345 | // or bailing is in place (should_tune used here because when auto-tune is on, we'll | |
346 | // reach this without a rate_limiter in place) | |
347 | match self.policy_data.policy { | |
348 | RequesterPolicy::AutoTune => { | |
349 | if let Some(trigger) = self.should_enforce_policy() { | |
350 | self.tune(trigger).await?; | |
351 | } | |
352 | } | |
353 | RequesterPolicy::AutoBail => { | |
354 | if let Some(trigger) = self.should_enforce_policy() { | |
355 | self.bail(trigger).await?; | |
356 | } | |
357 | } | |
358 | RequesterPolicy::Default => {} | |
359 | } | |
360 | } | |
361 | ||
362 | // response came back without error, convert it to FeroxResponse | |
363 | let ferox_response = FeroxResponse::from( | |
364 | response, | |
365 | &self.target_url, | |
366 | method, | |
367 | true, | |
368 | self.handles.config.output_level, | |
369 | ) | |
370 | .await; | |
371 | ||
372 | // do recursion if appropriate | |
373 | if !self.handles.config.no_recursion { | |
374 | self.handles | |
375 | .send_scan_command(Command::TryRecursion(Box::new( | |
376 | ferox_response.clone(), | |
377 | )))?; | |
378 | let (tx, rx) = oneshot::channel::<bool>(); | |
379 | self.handles.send_scan_command(Command::Sync(tx))?; | |
380 | rx.await?; | |
381 | } | |
382 | ||
383 | // purposefully doing recursion before filtering. the thought process is that | |
384 | // even though this particular url is filtered, subsequent urls may not | |
385 | if self | |
386 | .handles | |
387 | .filters | |
388 | .data | |
389 | .should_filter_response(&ferox_response, self.handles.stats.tx.clone()) | |
390 | { | |
391 | continue; | |
392 | } | |
393 | ||
394 | if self.handles.config.extract_links && !ferox_response.status().is_redirection() { | |
395 | let extractor = ExtractorBuilder::default() | |
396 | .target(ExtractionTarget::ResponseBody) | |
397 | .response(&ferox_response) | |
398 | .handles(self.handles.clone()) | |
399 | .build()?; | |
400 | let new_links: HashSet<_>; | |
401 | let extracted = (extractor.extract().await?).0; | |
402 | ||
403 | { | |
404 | // gain and quickly drop the read lock on seen_links, using it while unlocked | |
405 | // to determine if there are any new links to process | |
406 | let read_links = self.seen_links.read().await; | |
407 | new_links = extracted.difference(&read_links).cloned().collect(); | |
408 | } | |
409 | ||
410 | if !new_links.is_empty() { | |
411 | // using is_empty instead of direct iteration to acquire the write lock behind | |
412 | // some kind of less expensive gate (and not in a loop, obv) | |
413 | let mut write_links = self.seen_links.write().await; | |
414 | for new_link in &new_links { | |
415 | write_links.insert(new_link.to_owned()); | |
343 | 416 | } |
344 | 417 | } |
345 | RequesterPolicy::AutoBail => { | |
346 | if let Some(trigger) = self.should_enforce_policy() { | |
347 | self.bail(trigger).await?; | |
348 | } | |
349 | } | |
350 | RequesterPolicy::Default => {} | |
418 | ||
419 | extractor.request_links(new_links).await?; | |
351 | 420 | } |
352 | } | |
353 | ||
354 | // response came back without error, convert it to FeroxResponse | |
355 | let ferox_response = FeroxResponse::from( | |
356 | response, | |
357 | &self.target_url, | |
358 | true, | |
359 | self.handles.config.output_level, | |
360 | ) | |
361 | .await; | |
362 | ||
363 | // do recursion if appropriate | |
364 | if !self.handles.config.no_recursion { | |
365 | self.handles | |
366 | .send_scan_command(Command::TryRecursion(Box::new(ferox_response.clone())))?; | |
367 | let (tx, rx) = oneshot::channel::<bool>(); | |
368 | self.handles.send_scan_command(Command::Sync(tx))?; | |
369 | rx.await?; | |
370 | } | |
371 | ||
372 | // purposefully doing recursion before filtering. the thought process is that | |
373 | // even though this particular url is filtered, subsequent urls may not | |
374 | if self | |
375 | .handles | |
376 | .filters | |
377 | .data | |
378 | .should_filter_response(&ferox_response, self.handles.stats.tx.clone()) | |
379 | { | |
380 | continue; | |
381 | } | |
382 | ||
383 | if self.handles.config.extract_links && !ferox_response.status().is_redirection() { | |
384 | let extractor = ExtractorBuilder::default() | |
385 | .target(ResponseBody) | |
386 | .response(&ferox_response) | |
387 | .handles(self.handles.clone()) | |
388 | .build()?; | |
389 | ||
390 | let new_links: HashSet<_>; | |
391 | let extracted = extractor.extract().await?; | |
392 | ||
393 | { | |
394 | // gain and quickly drop the read lock on seen_links, using it while unlocked | |
395 | // to determine if there are any new links to process | |
396 | let read_links = self.seen_links.read().await; | |
397 | new_links = extracted.difference(&read_links).cloned().collect(); | |
421 | ||
422 | // everything else should be reported | |
423 | if let Err(e) = ferox_response.send_report(self.handles.output.tx.clone()) { | |
424 | log::warn!("Could not send FeroxResponse to output handler: {}", e); | |
398 | 425 | } |
399 | ||
400 | if !new_links.is_empty() { | |
401 | // using is_empty instead of direct iteration to acquire the write lock behind | |
402 | // some kind of less expensive gate (and not in a loop, obv) | |
403 | let mut write_links = self.seen_links.write().await; | |
404 | for new_link in &new_links { | |
405 | write_links.insert(new_link.to_owned()); | |
406 | } | |
407 | } | |
408 | ||
409 | extractor.request_links(new_links).await?; | |
410 | } | |
411 | ||
412 | // everything else should be reported | |
413 | if let Err(e) = ferox_response.send_report(self.handles.output.tx.clone()) { | |
414 | log::warn!("Could not send FeroxResponse to output handler: {}", e); | |
415 | 426 | } |
416 | 427 | } |
417 | 428 |
1 | 1 | use console::{strip_ansi_codes, style, user_attended}; |
2 | 2 | use indicatif::ProgressBar; |
3 | 3 | use regex::Regex; |
4 | use reqwest::{Client, Response, StatusCode, Url}; | |
4 | use reqwest::{Client, Method, Response, StatusCode, Url}; | |
5 | 5 | #[cfg(not(target_os = "windows"))] |
6 | 6 | use rlimit::{getrlimit, setrlimit, Resource}; |
7 | 7 | use std::{ |
94 | 94 | |
95 | 95 | /// wrapper for make_request used to pass error/response codes to FeroxScans for per-scan stats |
96 | 96 | /// tracking of information related to auto-tune/bail |
97 | pub async fn logged_request(url: &Url, handles: Arc<Handles>) -> Result<Response> { | |
97 | pub async fn logged_request( | |
98 | url: &Url, | |
99 | method: &str, | |
100 | data: Option<&[u8]>, | |
101 | handles: Arc<Handles>, | |
102 | ) -> Result<Response> { | |
98 | 103 | let client = &handles.config.client; |
99 | 104 | let level = handles.config.output_level; |
100 | 105 | let tx_stats = handles.stats.tx.clone(); |
101 | 106 | |
102 | let response = make_request(client, url, level, &handles.config, tx_stats).await; | |
107 | let response = make_request(client, url, method, data, level, &handles.config, tx_stats).await; | |
103 | 108 | |
104 | 109 | let scans = handles.ferox_scans()?; |
105 | ||
106 | 110 | match response { |
107 | 111 | Ok(resp) => { |
108 | 112 | match resp.status() { |
125 | 129 | pub async fn make_request( |
126 | 130 | client: &Client, |
127 | 131 | url: &Url, |
132 | method: &str, | |
133 | data: Option<&[u8]>, | |
128 | 134 | output_level: OutputLevel, |
129 | 135 | config: &Configuration, |
130 | 136 | tx_stats: UnboundedSender<Command>, |
136 | 142 | tx_stats |
137 | 143 | ); |
138 | 144 | |
139 | let mut request = client.get(url.to_owned()); | |
145 | let mut request = client.request(Method::from_bytes(method.as_bytes())?, url.to_owned()); | |
146 | if let Some(body_data) = data { | |
147 | request = request.body(body_data.to_vec()); | |
148 | } | |
140 | 149 | |
141 | 150 | if config.random_agent { |
142 | 151 | let index = unsafe { |
158 | 167 | } else if e.is_redirect() { |
159 | 168 | if let Some(last_redirect) = e.url() { |
160 | 169 | // get where we were headed (last_redirect) and where we came from (url) |
161 | let fancy_message = format!("{} !=> {}", url, last_redirect); | |
162 | ||
163 | let report = if let Some(msg_status) = e.status() { | |
164 | send_command!(tx_stats, AddStatus(msg_status)); | |
165 | create_report_string( | |
166 | msg_status.as_str(), | |
167 | "-1", | |
168 | "-1", | |
169 | "-1", | |
170 | &fancy_message, | |
171 | output_level, | |
172 | ) | |
173 | } else { | |
174 | create_report_string("UNK", "-1", "-1", "-1", &fancy_message, output_level) | |
170 | let fancy_message = format!( | |
171 | "{} !=> {} ({})", | |
172 | url, | |
173 | last_redirect, | |
174 | style("too many redirects").red(), | |
175 | ); | |
176 | ||
177 | let msg_status = match e.status() { | |
178 | Some(status) => status.to_string(), | |
179 | None => "ERR".to_string(), | |
175 | 180 | }; |
181 | ||
182 | let report = create_report_string( | |
183 | &msg_status, | |
184 | method, | |
185 | "-1", | |
186 | "-1", | |
187 | "-1", | |
188 | &fancy_message, | |
189 | output_level, | |
190 | ); | |
176 | 191 | |
177 | 192 | send_command!(tx_stats, AddError(Redirection)); |
178 | 193 | |
203 | 218 | /// 200 127l 283w 4134c http://localhost/faq |
204 | 219 | pub fn create_report_string( |
205 | 220 | status: &str, |
221 | method: &str, | |
206 | 222 | line_count: &str, |
207 | 223 | word_count: &str, |
208 | 224 | content_length: &str, |
216 | 232 | // normal printing with status and sizes |
217 | 233 | let color_status = status_colorizer(status); |
218 | 234 | format!( |
219 | "{} {:>8}l {:>8}w {:>8}c {}\n", | |
220 | color_status, line_count, word_count, content_length, url | |
235 | "{} {:>8} {:>8}l {:>8}w {:>8}c {}\n", | |
236 | color_status, method, line_count, word_count, content_length, url | |
221 | 237 | ) |
222 | 238 | } |
223 | 239 | } |
1030 | 1030 | .and(predicate::str::contains("User-Agent").not()), |
1031 | 1031 | ); |
1032 | 1032 | } |
1033 | ||
1034 | #[test] | |
1035 | /// test allows non-existent wordlist to trigger the banner printing to stderr | |
1036 | /// expect to see all mandatory prints + methods | |
1037 | fn banner_prints_methods() { | |
1038 | Command::cargo_bin("feroxbuster") | |
1039 | .unwrap() | |
1040 | .arg("--url") | |
1041 | .arg("http://localhost") | |
1042 | .arg("-m") | |
1043 | .arg("PUT") | |
1044 | .arg("--methods") | |
1045 | .arg("OPTIONS") | |
1046 | .assert() | |
1047 | .success() | |
1048 | .stderr( | |
1049 | predicate::str::contains("─┬─") | |
1050 | .and(predicate::str::contains("Target Url")) | |
1051 | .and(predicate::str::contains("http://localhost")) | |
1052 | .and(predicate::str::contains("Threads")) | |
1053 | .and(predicate::str::contains("Wordlist")) | |
1054 | .and(predicate::str::contains("Status Codes")) | |
1055 | .and(predicate::str::contains("Timeout (secs)")) | |
1056 | .and(predicate::str::contains("User-Agent")) | |
1057 | .and(predicate::str::contains("HTTP methods")) | |
1058 | .and(predicate::str::contains("[PUT, OPTIONS]")) | |
1059 | .and(predicate::str::contains("─┴─")), | |
1060 | ); | |
1061 | } | |
1062 | ||
1063 | #[test] | |
1064 | /// test allows non-existent wordlist to trigger the banner printing to stderr | |
1065 | /// expect to see all mandatory prints + data body | |
1066 | fn banner_prints_data() { | |
1067 | Command::cargo_bin("feroxbuster") | |
1068 | .unwrap() | |
1069 | .arg("--url") | |
1070 | .arg("http://localhost") | |
1071 | .arg("-m") | |
1072 | .arg("PUT") | |
1073 | .arg("--methods") | |
1074 | .arg("POST") | |
1075 | .arg("--data") | |
1076 | .arg("some_data") | |
1077 | .assert() | |
1078 | .success() | |
1079 | .stderr( | |
1080 | predicate::str::contains("─┬─") | |
1081 | .and(predicate::str::contains("Target Url")) | |
1082 | .and(predicate::str::contains("http://localhost")) | |
1083 | .and(predicate::str::contains("Threads")) | |
1084 | .and(predicate::str::contains("Wordlist")) | |
1085 | .and(predicate::str::contains("Status Codes")) | |
1086 | .and(predicate::str::contains("Timeout (secs)")) | |
1087 | .and(predicate::str::contains("User-Agent")) | |
1088 | .and(predicate::str::contains("HTTP Body")) | |
1089 | .and(predicate::str::contains("some_data")) | |
1090 | .and(predicate::str::contains("─┴─")), | |
1091 | ); | |
1092 | } |
287 | 287 | ); |
288 | 288 | |
289 | 289 | assert_eq!(mock.hits(), 1); |
290 | assert_eq!(mock_dir.hits(), 1); | |
290 | assert_eq!(mock_dir.hits(), 2); | |
291 | 291 | assert_eq!(mock_two.hits(), 1); |
292 | 292 | assert_eq!(mock_file.hits(), 1); |
293 | 293 | assert_eq!(mock_disallowed.hits(), 1); |
370 | 370 | } |
371 | 371 | |
372 | 372 | #[test] |
373 | /// serve a directory listing with a file and and a folder contained within it. ferox should | |
374 | /// find both links and request each one. | |
375 | fn extractor_finds_directory_listing_links_and_displays_files() { | |
376 | let srv = MockServer::start(); | |
377 | let (tmp_dir, file) = setup_tmp_directory(&["invalid".to_string()], "wordlist").unwrap(); | |
378 | ||
379 | let mock_root = srv.mock(|when, then| { | |
380 | when.method(GET).path("/"); | |
381 | then.status(200).body( | |
382 | r#" | |
383 | <html> | |
384 | <head> | |
385 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> | |
386 | <title>Directory listing for /</title> | |
387 | </head> | |
388 | <body> | |
389 | <h1>Directory listing for /</h1> | |
390 | <hr> | |
391 | <ul> | |
392 | <li><a href="disallowed-subdir/">disallowed-subdir/</a></li> | |
393 | <li><a href="LICENSE">LICENSE</a></li> | |
394 | <li><a href="misc/">misc/</a></li> | |
395 | </ul> | |
396 | <hr> | |
397 | </body> | |
398 | </html> | |
399 | "#, | |
400 | ); | |
401 | }); | |
402 | ||
403 | let mock_root_file = srv.mock(|when, then| { | |
404 | when.method(GET).path("/LICENSE"); | |
405 | then.status(200).body("im a little teapot"); // 18 | |
406 | }); | |
407 | ||
408 | let mock_dir_disallowed = srv.mock(|when, then| { | |
409 | when.method(GET).path("/disallowed-subdir"); | |
410 | then.status(404); | |
411 | }); | |
412 | ||
413 | let mock_dir_redir = srv.mock(|when, then| { | |
414 | when.method(GET).path("/misc"); | |
415 | then.status(301).header("Location", &srv.url("/misc/")); | |
416 | }); | |
417 | let mock_dir = srv.mock(|when, then| { | |
418 | when.method(GET).path("/misc/"); | |
419 | then.status(200).body( | |
420 | r#" | |
421 | <html> | |
422 | <head> | |
423 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> | |
424 | <title>Directory listing for /misc</title> | |
425 | </head> | |
426 | <body> | |
427 | <h1>Directory listing for /misc</h1> | |
428 | <hr> | |
429 | <ul> | |
430 | <li><a href="LICENSE">LICENSE</a></li> | |
431 | <li><a href="stupidfile.php">stupidfile.php</a></li> | |
432 | </ul> | |
433 | <hr> | |
434 | </body> | |
435 | </html> | |
436 | "#, | |
437 | ); | |
438 | }); | |
439 | ||
440 | let mock_dir_file = srv.mock(|when, then| { | |
441 | when.method(GET).path("/misc/LICENSE"); | |
442 | then.status(200).body("i too, am a container for tea"); // 29 | |
443 | }); | |
444 | ||
445 | let mock_dir_file_ext = srv.mock(|when, then| { | |
446 | when.method(GET).path("/misc/stupidfile.php"); | |
447 | then.status(200).body("im a little teapot too"); // 22 | |
448 | }); | |
449 | ||
450 | let cmd = Command::cargo_bin("feroxbuster") | |
451 | .unwrap() | |
452 | .arg("--url") | |
453 | .arg(srv.url("/")) | |
454 | .arg("--wordlist") | |
455 | .arg(file.as_os_str()) | |
456 | .arg("--extract-links") | |
457 | .arg("--redirects") | |
458 | .unwrap(); | |
459 | ||
460 | cmd.assert().success().stdout( | |
461 | predicate::str::contains("/LICENSE") // 2 directories contain LICENSE | |
462 | .count(2) | |
463 | .and(predicate::str::contains("18c")) | |
464 | .and(predicate::str::contains("/misc/stupidfile.php")) | |
465 | .and(predicate::str::contains("22c")) | |
466 | .and(predicate::str::contains("/misc/LICENSE")) | |
467 | .and(predicate::str::contains("29c")) | |
468 | .and(predicate::str::contains("200").count(3)), | |
469 | ); | |
470 | ||
471 | assert_eq!(mock_root.hits(), 2); | |
472 | assert_eq!(mock_root_file.hits(), 1); | |
473 | assert_eq!(mock_dir_disallowed.hits(), 1); | |
474 | assert_eq!(mock_dir_redir.hits(), 1); | |
475 | assert_eq!(mock_dir.hits(), 2); | |
476 | assert_eq!(mock_dir_file.hits(), 1); | |
477 | assert_eq!(mock_dir_file_ext.hits(), 1); | |
478 | teardown_tmp_directory(tmp_dir); | |
479 | } | |
480 | ||
481 | #[test] | |
482 | /// serve a directory listing with a file and and a folder contained within it. ferox should | |
483 | /// find both links and request each one. This is the non-recursive version of the test above | |
484 | fn extractor_finds_directory_listing_links_and_displays_files_non_recursive() { | |
485 | let srv = MockServer::start(); | |
486 | let (tmp_dir, file) = setup_tmp_directory(&["invalid".to_string()], "wordlist").unwrap(); | |
487 | ||
488 | let mock_root = srv.mock(|when, then| { | |
489 | when.method(GET).path("/"); | |
490 | then.status(200).body( | |
491 | r#" | |
492 | <html> | |
493 | <head> | |
494 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> | |
495 | <title>Directory listing for /</title> | |
496 | </head> | |
497 | <body> | |
498 | <h1>Directory listing for /</h1> | |
499 | <hr> | |
500 | <ul> | |
501 | <li><a href="disallowed-subdir/">disallowed-subdir/</a></li> | |
502 | <li><a href="LICENSE">LICENSE</a></li> | |
503 | <li><a href="misc/">misc/</a></li> | |
504 | </ul> | |
505 | <hr> | |
506 | </body> | |
507 | </html> | |
508 | "#, | |
509 | ); | |
510 | }); | |
511 | ||
512 | let mock_root_file = srv.mock(|when, then| { | |
513 | when.method(GET).path("/LICENSE"); | |
514 | then.status(200).body("im a little teapot"); // 18 | |
515 | }); | |
516 | ||
517 | let mock_dir_disallowed = srv.mock(|when, then| { | |
518 | when.method(GET).path("/disallowed-subdir"); | |
519 | then.status(404); | |
520 | }); | |
521 | ||
522 | let mock_dir_redir = srv.mock(|when, then| { | |
523 | when.method(GET).path("/misc"); | |
524 | then.status(301).header("Location", &srv.url("/misc/")); | |
525 | }); | |
526 | let mock_dir = srv.mock(|when, then| { | |
527 | when.method(GET).path("/misc/"); | |
528 | then.status(200).body( | |
529 | r#" | |
530 | <html> | |
531 | <head> | |
532 | <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> | |
533 | <title>Directory listing for /misc</title> | |
534 | </head> | |
535 | <body> | |
536 | <h1>Directory listing for /misc</h1> | |
537 | <hr> | |
538 | <ul> | |
539 | <li><a href="LICENSE">LICENSE</a></li> | |
540 | <li><a href="stupidfile.php">stupidfile.php</a></li> | |
541 | </ul> | |
542 | <hr> | |
543 | </body> | |
544 | </html> | |
545 | "#, | |
546 | ); | |
547 | }); | |
548 | ||
549 | let mock_dir_file = srv.mock(|when, then| { | |
550 | when.method(GET).path("/misc/LICENSE"); | |
551 | then.status(200).body("i too, am a container for tea"); // 29 | |
552 | }); | |
553 | ||
554 | let mock_dir_file_ext = srv.mock(|when, then| { | |
555 | when.method(GET).path("/misc/stupidfile.php"); | |
556 | then.status(200).body("im a little teapot too"); // 22 | |
557 | }); | |
558 | ||
559 | let cmd = Command::cargo_bin("feroxbuster") | |
560 | .unwrap() | |
561 | .arg("--url") | |
562 | .arg(srv.url("/")) | |
563 | .arg("--wordlist") | |
564 | .arg(file.as_os_str()) | |
565 | .arg("--extract-links") | |
566 | .arg("--redirects") | |
567 | .arg("--no-recursion") | |
568 | .unwrap(); | |
569 | ||
570 | cmd.assert().success().stdout( | |
571 | predicate::str::contains("/LICENSE") | |
572 | .and(predicate::str::contains("18c")) | |
573 | .and(predicate::str::contains("/misc/stupidfile.php")) | |
574 | .not() | |
575 | .and(predicate::str::contains("22c")) | |
576 | .not() | |
577 | .and(predicate::str::contains("/misc/LICENSE").not()) | |
578 | .and(predicate::str::contains("29c").not()) | |
579 | .and(predicate::str::contains("200").count(1)), | |
580 | ); | |
581 | ||
582 | assert_eq!(mock_root.hits(), 2); | |
583 | assert_eq!(mock_root_file.hits(), 1); | |
584 | assert_eq!(mock_dir_disallowed.hits(), 1); | |
585 | assert_eq!(mock_dir_redir.hits(), 1); | |
586 | assert_eq!(mock_dir.hits(), 1); | |
587 | assert_eq!(mock_dir_file.hits(), 0); | |
588 | assert_eq!(mock_dir_file_ext.hits(), 0); | |
589 | teardown_tmp_directory(tmp_dir); | |
590 | } | |
591 | ||
592 | #[test] | |
373 | 593 | /// send a request to a page that contains a link that contains a directory that returns a 403 |
374 | 594 | /// --extract-links should find the link and make recurse into the 403 directory, finding LICENSE |
375 | 595 | fn extractor_recurses_into_403_directories() -> Result<(), Box<dyn std::error::Error>> { |
415 | 635 | |
416 | 636 | assert_eq!(mock.hits(), 1); |
417 | 637 | assert_eq!(mock_two.hits(), 1); |
418 | assert_eq!(forbidden_dir.hits(), 1); | |
638 | assert_eq!(forbidden_dir.hits(), 2); | |
419 | 639 | teardown_tmp_directory(tmp_dir); |
420 | 640 | Ok(()) |
421 | 641 | } |
453 | 453 | assert!(contents.contains("WLD")); |
454 | 454 | assert!(contents.contains("301")); |
455 | 455 | assert!(contents.contains("/some-redirect")); |
456 | assert!(contents.contains("redirects to => ")); | |
456 | assert!(contents.contains(" => ")); | |
457 | 457 | assert!(contents.contains(&srv.url("/"))); |
458 | 458 | assert!(contents.contains("(url length: 32)")); |
459 | 459 | |
460 | 460 | cmd.assert().success().stdout( |
461 | predicate::str::contains("redirects to => ") | |
461 | predicate::str::contains(" => ") | |
462 | 462 | .and(predicate::str::contains("/some-redirect")) |
463 | 463 | .and(predicate::str::contains("301")) |
464 | 464 | .and(predicate::str::contains(srv.url("/"))) |
32 | 32 | /// |
33 | 33 | /// For more information try --help |
34 | 34 | /// |
35 | /// the new behavior we expect to see is to print the long form help message, of which | |
36 | /// Ludicrous speed... go! is near the bottom of that output, so we can test for that | |
35 | /// the new behavior we expect to see is to print the short form help message, of which | |
36 | /// "[CAUTION] 4 -v's is probably too much" is near the bottom of that output, so we can test for that | |
37 | 37 | fn parser_incorrect_param_with_tack_h() { |
38 | 38 | Command::cargo_bin("feroxbuster") |
39 | 39 | .unwrap() |
41 | 41 | .arg("-h") |
42 | 42 | .assert() |
43 | 43 | .success() |
44 | .stdout(predicate::str::contains("Ludicrous speed... go!")); | |
44 | .stdout(predicate::str::contains( | |
45 | "[CAUTION] 4 -v's is probably too much", | |
46 | )); | |
45 | 47 | } |