Skip to content

Commit

Permalink
Merge pull request #100 from epi052/FEATURE-response-code-blacklist
Browse files Browse the repository at this point in the history
Feature response code blacklist
  • Loading branch information
epi052 authored Oct 31, 2020
2 parents 9db0dc5 + f64f021 commit 2317521
Show file tree
Hide file tree
Showing 18 changed files with 536 additions and 281 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "feroxbuster"
version = "1.2.0"
version = "1.3.0"
authors = ["Ben 'epi' Risher <[email protected]>"]
license = "MIT"
edition = "2018"
Expand Down
48 changes: 31 additions & 17 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ This attack is also known as Predictable Resource Location, File Enumeration, Di
- [Proxy traffic through a SOCKS proxy](#proxy-traffic-through-a-socks-proxy)
- [Pass auth token via query parameter](#pass-auth-token-via-query-parameter)
- [Limit Total Number of Concurrent Scans (new in `v1.2.0`)](#limit-total-number-of-concurrent-scans-new-in-v120)
- [Filter Response by Status Code (new in `v1.3.0`)](#filter-response-by-status-code--new-in-v130)
- [Comparison w/ Similar Tools](#-comparison-w-similar-tools)
- [Common Problems/Issues (FAQ)](#-common-problemsissues-faq)
- [No file descriptors available](#no-file-descriptors-available)
Expand Down Expand Up @@ -221,8 +222,8 @@ Configuration begins with with the following built-in default values baked into
- threads: `50`
- verbosity: `0` (no logging enabled)
- scan_limit: `0` (no limit imposed on concurrent scans)
- statuscodes: `200 204 301 302 307 308 401 403 405`
- useragent: `feroxbuster/VERSION`
- status_codes: `200 204 301 302 307 308 401 403 405`
- user_agent: `feroxbuster/VERSION`
- recursion depth: `4`
- auto-filter wildcards - `true`
- output: `stdout`
Expand Down Expand Up @@ -272,25 +273,26 @@ A pre-made configuration file with examples of all available settings can be fou
# Any setting used here can be overridden by the corresponding command line option/argument
#
# wordlist = "/wordlists/jhaddix/all.txt"
# statuscodes = [200, 500]
# status_codes = [200, 500]
# filter_status = [301]
# threads = 1
# timeout = 5
# proxy = "http://127.0.0.1:8080"
# verbosity = 1
# scan_limit = 6
# quiet = true
# output = "/targets/ellingson_mineral_company/gibson.txt"
# useragent = "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0"
# user_agent = "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0"
# redirects = true
# insecure = true
# extensions = ["php", "html"]
# norecursion = true
# addslash = true
# no_recursion = true
# add_slash = true
# stdin = true
# dontfilter = true
# dont_filter = true
# extract_links = true
# depth = 1
# sizefilters = [5174]
# filter_size = [5174]
# queries = [["name","value"], ["rick", "astley"]]

# headers can be specified on multiple lines or as an inline table
Expand All @@ -315,13 +317,13 @@ USAGE:
feroxbuster [FLAGS] [OPTIONS] --url <URL>...
FLAGS:
-f, --addslash Append / to each request
-D, --dontfilter Don't auto-filter wildcard responses
-f, --add-slash Append / to each request
-D, --dont-filter Don't auto-filter wildcard responses
-e, --extract-links Extract links from response body (html, javascript, etc...); make new requests based on
findings (default: false)
-h, --help Prints help information
-k, --insecure Disables TLS certificate validation
-n, --norecursion Do not scan recursively
-n, --no-recursion Do not scan recursively
-q, --quiet Only print URLs; Don't print status codes, response size, running config, etc...
-r, --redirects Follow redirects
--stdin Read url(s) from STDIN
Expand All @@ -331,17 +333,19 @@ FLAGS:
OPTIONS:
-d, --depth <RECURSION_DEPTH> Maximum recursion depth, a depth of 0 is infinite recursion (default: 4)
-x, --extensions <FILE_EXTENSION>... File extension(s) to search for (ex: -x php -x pdf js)
-S, --filter-size <SIZE>... Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)
-C, --filter-status <STATUS_CODE>... Filter out status codes (deny list) (ex: -C 200 -S 401)
-H, --headers <HEADER>... Specify HTTP headers (ex: -H Header:val 'stuff: things')
-o, --output <FILE> Output file to write results to (default: stdout)
-p, --proxy <PROXY> Proxy to use for requests (ex: http(s)://host:port, socks5://host:port)
-Q, --query <QUERY>... Specify URL query parameters (ex: -Q token=stuff -Q secret=key)
-L, --scan-limit <SCAN_LIMIT> Limit total number of concurrent scans (default: 7)
-S, --sizefilter <SIZE>... Filter out messages of a particular size (ex: -S 5120 -S 4927,1970)
-s, --statuscodes <STATUS_CODE>... Status Codes of interest (default: 200 204 301 302 307 308 401 403 405)
-L, --scan-limit <SCAN_LIMIT> Limit total number of concurrent scans (default: 0, i.e. no limit)
-s, --status-codes <STATUS_CODE>... Status Codes to include (allow list) (default: 200 204 301 302 307 308 401
403 405)
-t, --threads <THREADS> Number of concurrent threads (default: 50)
-T, --timeout <SECONDS> Number of seconds before a request times out (default: 7)
-u, --url <URL>... The target URL(s) (required, unless --stdin used)
-a, --useragent <USER_AGENT> Sets the User-Agent (default: feroxbuster/VERSION)
-a, --user-agent <USER_AGENT> Sets the User-Agent (default: feroxbuster/VERSION)
-w, --wordlist <FILE> Path to the wordlist
```

Expand Down Expand Up @@ -399,7 +403,7 @@ With `--extract-links`
### IPv6, non-recursive scan with INFO-level logging enabled

```
./feroxbuster -u http://[::1] --norecursion -vv
./feroxbuster -u http://[::1] --no-recursion -vv
```

### Read urls from STDIN; pipe only resulting urls out to another tool
Expand Down Expand Up @@ -438,6 +442,16 @@ discovered directories can only begin scanning when the total number of active s

![limit-demo](img/limit-demo.gif)

### Filter Response by Status Code (new in `v1.3.0`)

Version 1.3.0 included an overhaul to the filtering system which will allow for a wide array of filters to be added
with minimal effort. The first such filter is a Status Code Filter. As responses come back from the scanned server,
each one is checked against a list of known filters and either displayed or not according to which filters are set.

```
./feroxbuster -u http://127.1 --filter-status 301
```

## 🧐 Comparison w/ Similar Tools

There are quite a few similar tools for forced browsing/content discovery. Burp Suite Pro, Dirb, Dirbuster, etc...
Expand All @@ -460,7 +474,7 @@ a few of the use-cases in which feroxbuster may be a better fit:
|------------------------------------------------------------------|---|---|---|
| fast ||||
| easy to use ||| |
| blacklist status codes (in addition to whitelist) | |||
| filter out responses by status code (new in `v1.3.0`) | |||
| allows recursion || ||
| can specify query parameters || ||
| SOCKS proxy support || | |
Expand Down
13 changes: 7 additions & 6 deletions ferox-config.toml.example
Original file line number Diff line number Diff line change
Expand Up @@ -8,25 +8,26 @@
# Any setting used here can be overridden by the corresponding command line option/argument
#
# wordlist = "/wordlists/seclists/Discovery/Web-Content/raft-medium-directories.txt"
# statuscodes = [200, 500]
# status_codes = [200, 500]
# filter_status = [301]
# threads = 1
# timeout = 5
# proxy = "http://127.0.0.1:8080"
# verbosity = 1
# scan_limit = 6
# quiet = true
# output = "/targets/ellingson_mineral_company/gibson.txt"
# useragent = "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0"
# user_agent = "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0"
# redirects = true
# insecure = true
# extensions = ["php", "html"]
# norecursion = true
# addslash = true
# no_recursion = true
# add_slash = true
# stdin = true
# dontfilter = true
# dont_filter = true
# extract_links = true
# depth = 1
# sizefilters = [5174]
# filter_size = [5174]
# queries = [["name","value"], ["rick", "astley"]]

# headers can be specified on multiple lines or as an inline table
Expand Down
43 changes: 32 additions & 11 deletions src/banner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ by Ben "epi" Risher {} ver: {}"#,

let mut codes = vec![];

for code in &config.statuscodes {
for code in &config.status_codes {
codes.push(status_colorizer(&code.to_string()))
}

Expand Down Expand Up @@ -190,6 +190,27 @@ by Ben "epi" Risher {} ver: {}"#,
)
.unwrap_or_default(); // 🆗

if !config.filter_status.is_empty() {
// exception here for optional print due to me wanting the allows and denys to be printed
// one after the other
let mut code_filters = vec![];

for code in &config.filter_status {
code_filters.push(status_colorizer(&code.to_string()))
}

writeln!(
&mut writer,
"{}",
format_banner_entry!(
"\u{1f5d1}",
"Status Code Filters",
format!("[{}]", code_filters.join(", "))
)
)
.unwrap_or_default(); // 🗑
}

writeln!(
&mut writer,
"{}",
Expand All @@ -200,7 +221,7 @@ by Ben "epi" Risher {} ver: {}"#,
writeln!(
&mut writer,
"{}",
format_banner_entry!("\u{1F9a1}", "User-Agent", config.useragent)
format_banner_entry!("\u{1F9a1}", "User-Agent", config.user_agent)
)
.unwrap_or_default(); // 🦡

Expand Down Expand Up @@ -234,8 +255,8 @@ by Ben "epi" Risher {} ver: {}"#,
}
}

if !config.sizefilters.is_empty() {
for filter in &config.sizefilters {
if !config.filter_size.is_empty() {
for filter in &config.filter_size {
writeln!(
&mut writer,
"{}",
Expand Down Expand Up @@ -309,11 +330,11 @@ by Ben "epi" Risher {} ver: {}"#,
.unwrap_or_default(); // 📍
}

if config.dontfilter {
if config.dont_filter {
writeln!(
&mut writer,
"{}",
format_banner_entry!("\u{1f92a}", "Filter Wildcards", !config.dontfilter)
format_banner_entry!("\u{1f92a}", "Filter Wildcards", !config.dont_filter)
)
.unwrap_or_default(); // 🤪
}
Expand Down Expand Up @@ -355,16 +376,16 @@ by Ben "epi" Risher {} ver: {}"#,
_ => {}
}

if config.addslash {
if config.add_slash {
writeln!(
&mut writer,
"{}",
format_banner_entry!("\u{1fa93}", "Add Slash", config.addslash)
format_banner_entry!("\u{1fa93}", "Add Slash", config.add_slash)
)
.unwrap_or_default(); // 🪓
}

if !config.norecursion {
if !config.no_recursion {
if config.depth == 0 {
writeln!(
&mut writer,
Expand All @@ -384,7 +405,7 @@ by Ben "epi" Risher {} ver: {}"#,
writeln!(
&mut writer,
"{}",
format_banner_entry!("\u{1f6ab}", "Do Not Recurse", config.norecursion)
format_banner_entry!("\u{1f6ab}", "Do Not Recurse", config.no_recursion)
)
.unwrap_or_default(); // 🚫
}
Expand Down Expand Up @@ -436,7 +457,7 @@ mod tests {
/// test to hit no execution of statuscode for loop in banner
async fn banner_intialize_without_status_codes() {
let mut config = Configuration::default();
config.statuscodes = vec![];
config.status_codes = vec![];
initialize(
&[String::from("http://localhost")],
&config,
Expand Down
4 changes: 2 additions & 2 deletions src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::time::Duration;
/// Create and return an instance of [reqwest::Client](https://docs.rs/reqwest/latest/reqwest/struct.Client.html)
pub fn initialize(
timeout: u64,
useragent: &str,
user_agent: &str,
redirects: bool,
insecure: bool,
headers: &HashMap<String, String>,
Expand All @@ -27,7 +27,7 @@ pub fn initialize(

let client = Client::builder()
.timeout(Duration::new(timeout, 0))
.user_agent(useragent)
.user_agent(user_agent)
.danger_accept_invalid_certs(insecure)
.default_headers(header_map)
.redirect(policy);
Expand Down
Loading

0 comments on commit 2317521

Please sign in to comment.