diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
new file mode 100644
index 0000000..c22d61c
--- /dev/null
+++ b/.github/FUNDING.yml
@@ -0,0 +1,12 @@
+# These are supported funding model platforms
+
+github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
+patreon: j3ssie
+open_collective: jaeles-project
+ko_fi: # Replace with a single Ko-fi username
+tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
+community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
+liberapay: # Replace with a single Liberapay username
+issuehunt: # Replace with a single IssueHunt username
+otechie: # Replace with a single Otechie username
+custom: [ 'https://paypal.me/j3ssiejjj' ]
diff --git a/.goreleaser.yml b/.goreleaser.yml
index d47304f..187518f 100644
--- a/.goreleaser.yml
+++ b/.goreleaser.yml
@@ -11,12 +11,20 @@ builds:
       - 386
       - arm
       - arm64
+    ignore:
+      - goos: darwin
+        goarch: 386
 
 archives:
-  - id: tgz
-    format: tar.gz
+  - name_template: "{{ .ProjectName }}_{{ .Tag }}_{{ .Os }}_{{ .Arch }}"
     replacements:
-      darwin: macOS
-    format_overrides:
-      - goos: windows
-        format: zip
\ No newline at end of file
+      darwin: macos
+      386: i386
+      amd64: x86_64
+    wrap_in_directory: true
+    format: zip
+
+checksum:
+  name_template: "{{ .ProjectName }}_checksums.txt"
+snapshot:
+  name_template: "{{ .Tag }}"
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..c69c3a0
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,22 @@
+MIT License
+
+Copyright (c) 2020 j3ssie
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/README.md b/README.md
index 9d797e3..8cb77e7 100644
--- a/README.md
+++ b/README.md
@@ -1,12 +1,25 @@
-# gospider
-**gospider** is a simple spider written in Go.
- 
+# GoSpider
+
+**GoSpider** - Fast web spider written in Go
+
+
+## Painless integrate Gospider into your recon workflow?
+
+<p align="center">
+  <img alt="huntersuite" src="https://user-images.githubusercontent.com/23289085/101143253-35ea6b80-3649-11eb-9130-d1fc306c9a76.png" height="200" />
+  <p align="center">
+Enjoying this tool? Support it's development and take your game to the next level by using <a href="https://huntersuite.io">HunterSuite.io</a>
+  </p>
+</p>
+
 ## Installation
+
 ```
 go get -u github.com/jaeles-project/gospider
 ```
 
 ## Features
+
 * Fast web crawling
 * Brute force and parse sitemap.xml
 * Parse robots.txt
@@ -20,9 +33,13 @@ go get -u github.com/jaeles-project/gospider
 * Crawl multiple sites in parallel
 * Random mobile/web User-Agent
 
+## Showcases
+[![asciicast](https://asciinema.org/a/301827.svg)](https://asciinema.org/a/301827)
+
 ## Usage
-```
-A Simple Web Spider - v1.1.0 by @theblackturtle
+
+```shell
+Fast web spider written in Go - v1.1.2 by @thebl4ckturtle & @j3ssiejjj
 
 Usage:
   gospider [flags]
@@ -33,9 +50,9 @@ Flags:
   -p, --proxy string           Proxy (Ex: http://127.0.0.1:8080)
   -o, --output string          Output folder
   -u, --user-agent string      User Agent to use
-                                web: random web user-agent
-                                mobi: random mobile user-agent
-                                or you can set your special user-agent (default "web")
+                               	web: random web user-agent
+                               	mobi: random mobile user-agent
+                               	or you can set your special user-agent (default "web")
       --cookie string          Cookie to use (testA=a; testB=b)
   -H, --header stringArray     Header to use (Use multiple flag to set multiple header)
       --burp string            Load headers and cookie from burp raw http request
@@ -46,20 +63,33 @@ Flags:
   -k, --delay int              Delay is the duration to wait before creating a new request to the matching domains (second)
   -K, --random-delay int       RandomDelay is the extra randomized duration to wait added to Delay before creating a new request (second)
   -m, --timeout int            Request timeout (second) (default 10)
+  -B, --base                   Disable all and only use HTML content
+      --js                     Enable linkfinder in javascript file (default true)
       --sitemap                Try to crawl sitemap.xml
       --robots                 Try to crawl robots.txt (default true)
-  -a, --other-source           Find URLs from 3rd party (Archive.org, CommonCrawl.org, VirusTotal.com)
+  -a, --other-source           Find URLs from 3rd party (Archive.org, CommonCrawl.org, VirusTotal.com, AlienVault.com)
   -w, --include-subs           Include subdomains crawled from 3rd party. Default is main domain
   -r, --include-other-source   Also include other-source's urls (still crawl and request)
       --debug                  Turn on debug mode
   -v, --verbose                Turn on verbose
+  -q, --quiet                  Suppress all the output and only show URL
       --no-redirect            Disable redirect
       --version                Check version
   -h, --help                   help for gospider
+
 ```
 
 ## Example commands
+
+### Quite output
+
+```
+gospider -q -s "https://google.com/"
+```
+
+
 #### Run with single site
+
 ```
 gospider -s "https://google.com/" -o output -c 10 -d 1
 ```
@@ -97,5 +127,10 @@ gospider -s "https://google.com/" -o output -c 10 -d 1 --other-source --burp bur
 gospider -s "https://google.com/" -o output -c 10 -d 1 --blacklist ".(woff|pdf)"
 ```
 
-## Showcases
-[![asciicast](https://asciinema.org/a/301827.svg)](https://asciinema.org/a/301827)
+## License
+
+`Gospider` is made with ♥  by [@j3ssiejjj](https://twitter.com/j3ssiejjj) & [@thebl4ckturtle](https://twitter.com/thebl4ckturtle) and it is released under the MIT license.
+
+## Donation
+
+[![paypal](https://www.paypalobjects.com/en_US/i/btn/btn_donateCC_LG.gif)](https://paypal.me/j3ssiejjj)
diff --git a/core/crawler.go b/core/crawler.go
index df642e3..0f0a2f2 100644
--- a/core/crawler.go
+++ b/core/crawler.go
@@ -1,419 +1,481 @@
 package core
 
 import (
-	"bufio"
-	"crypto/tls"
-	"fmt"
-	"github.com/gocolly/colly/v2"
-	"github.com/gocolly/colly/v2/extensions"
-	"github.com/spf13/cobra"
-	"github.com/theblackturtle/gospider/stringset"
-	"net"
-	"net/http"
-	"net/url"
-	"os"
-	"regexp"
-	"strings"
-	"time"
+    "bufio"
+    "crypto/tls"
+    "fmt"
+    "net"
+    "net/http"
+    "net/url"
+    "os"
+    "regexp"
+    "strings"
+    "time"
+
+    "github.com/gocolly/colly/v2"
+    "github.com/gocolly/colly/v2/extensions"
+    "github.com/jaeles-project/gospider/stringset"
+    "github.com/spf13/cobra"
 )
 
 var DefaultHTTPTransport = &http.Transport{
-	Dial: (&net.Dialer{
-		Timeout:   10 * time.Second,
-		KeepAlive: 30 * time.Second,
-		DualStack: true,
-	}).Dial,
-	MaxIdleConns:    100,
-	MaxConnsPerHost: 1000,
-	IdleConnTimeout: 30 * time.Second,
-
-	TLSHandshakeTimeout:   10 * time.Second,
-	ExpectContinueTimeout: 1 * time.Second,
-	ResponseHeaderTimeout: 3 * time.Second,
-	DisableCompression:    true,
-	TLSClientConfig:       &tls.Config{InsecureSkipVerify: true},
+    DialContext: (&net.Dialer{
+        Timeout: 10 * time.Second,
+        // Default is 15 seconds
+        KeepAlive: 30 * time.Second,
+    }).DialContext,
+    MaxIdleConns:    100,
+    MaxConnsPerHost: 1000,
+    IdleConnTimeout: 30 * time.Second,
+
+    // ExpectContinueTimeout: 1 * time.Second,
+    // ResponseHeaderTimeout: 3 * time.Second,
+    // DisableCompression:    false,
+    TLSClientConfig: &tls.Config{InsecureSkipVerify: true, Renegotiation: tls.RenegotiateOnceAsClient},
 }
 
 type Crawler struct {
-	cmd                 *cobra.Command
-	C                   *colly.Collector
-	LinkFinderCollector *colly.Collector
-	Output              *Output
-
-	subSet  *stringset.StringFilter
-	awsSet  *stringset.StringFilter
-	jsSet   *stringset.StringFilter
-	urlSet  *stringset.StringFilter
-	formSet *stringset.StringFilter
-
-	site   *url.URL
-	domain string
+    cmd                 *cobra.Command
+    C                   *colly.Collector
+    LinkFinderCollector *colly.Collector
+    Output              *Output
+
+    subSet  *stringset.StringFilter
+    awsSet  *stringset.StringFilter
+    jsSet   *stringset.StringFilter
+    urlSet  *stringset.StringFilter
+    formSet *stringset.StringFilter
+
+    site   *url.URL
+    domain string
+    quiet  bool
 }
 
 func NewCrawler(site *url.URL, cmd *cobra.Command) *Crawler {
-	domain := GetDomain(site)
-	if domain == "" {
-		Logger.Error("Failed to parse domain")
-		os.Exit(1)
-	}
-	Logger.Infof("Crawling site: %s", site)
-
-	maxDepth, _ := cmd.Flags().GetInt("depth")
-	concurrent, _ := cmd.Flags().GetInt("concurrent")
-	delay, _ := cmd.Flags().GetInt("delay")
-	randomDelay, _ := cmd.Flags().GetInt("random-delay")
-
-	c := colly.NewCollector(
-		colly.Async(true),
-		colly.MaxDepth(maxDepth),
-		colly.IgnoreRobotsTxt(),
-	)
-
-	// Setup http client
-	client := &http.Client{}
-
-	// Set proxy
-	proxy, _ := cmd.Flags().GetString("proxy")
-	if proxy != "" {
-		Logger.Info("Proxy: %s", proxy)
-		pU, err := url.Parse(proxy)
-		if err != nil {
-			Logger.Error("Failed to set proxy")
-		} else {
-			DefaultHTTPTransport.Proxy = http.ProxyURL(pU)
-		}
-	}
-
-	// Set request timeout
-	timeout, _ := cmd.Flags().GetInt("timeout")
-	if timeout == 0 {
-		Logger.Info("Your input timeout is 0. Gospider will set it to 10 seconds")
-		client.Timeout = 10 * time.Second
-	} else {
-		client.Timeout = time.Duration(timeout) * time.Second
-	}
-
-	// Disable redirect
-	noRedirect, _ := cmd.Flags().GetBool("no-redirect")
-	if noRedirect {
-		client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
-			nextLocation := req.Response.Header.Get("Location")
-			Logger.Debugf("Found Redirect: %s", nextLocation)
-			// Allow in redirect from http to https
-			if strings.Contains(nextLocation, site.Hostname()) {
-				Logger.Infof("Redirecting to: %s", nextLocation)
-				return nil
-			}
-			return http.ErrUseLastResponse
-		}
-	}
-
-	// Set client transport
-	client.Transport = DefaultHTTPTransport
-	c.SetClient(client)
-
-	// Get headers here to overwrite if "burp" flag used
-	burpFile, _ := cmd.Flags().GetString("burp")
-	if burpFile != "" {
-		bF, err := os.Open(burpFile)
-		if err != nil {
-			Logger.Errorf("Failed to open Burp File: %s", err)
-		} else {
-			rd := bufio.NewReader(bF)
-			req, err := http.ReadRequest(rd)
-			if err != nil {
-				Logger.Errorf("Failed to Parse Raw Request in %s: %s", burpFile, err)
-			} else {
-				// Set cookie
-				c.OnRequest(func(r *colly.Request) {
-					r.Headers.Set("Cookie", GetRawCookie(req.Cookies()))
-				})
-
-				// Set headers
-				c.OnRequest(func(r *colly.Request) {
-					for k, v := range req.Header {
-						r.Headers.Set(strings.TrimSpace(k), strings.TrimSpace(v[0]))
-					}
-				})
-
-			}
-		}
-	}
-
-	// Set cookies
-	cookie, _ := cmd.Flags().GetString("cookie")
-	if cookie != "" && burpFile == "" {
-		c.OnRequest(func(r *colly.Request) {
-			r.Headers.Set("Cookie", cookie)
-		})
-	}
-
-	// Set headers
-	headers, _ := cmd.Flags().GetStringArray("header")
-	if burpFile == "" {
-		for _, h := range headers {
-			headerArgs := strings.SplitN(h, ":", 2)
-			headerKey := strings.TrimSpace(headerArgs[0])
-			headerValue := strings.TrimSpace(headerArgs[1])
-			c.OnRequest(func(r *colly.Request) {
-				r.Headers.Set(headerKey, headerValue)
-			})
-		}
-	}
-
-	// Set User-Agent
-	randomUA, _ := cmd.Flags().GetString("user-agent")
-	switch ua := strings.ToLower(randomUA); {
-	case ua == "mobi":
-		extensions.RandomMobileUserAgent(c)
-	case ua == "web":
-		extensions.RandomUserAgent(c)
-	default:
-		c.UserAgent = ua
-	}
-
-	// Set referer
-	extensions.Referer(c)
-
-	// Init Output
-	var output *Output
-	outputFolder, _ := cmd.Flags().GetString("output")
-	if outputFolder != "" {
-		filename := strings.ReplaceAll(site.Hostname(), ".", "_")
-		output = NewOutput(outputFolder, filename)
-	}
-
-	// Set url whitelist regex
-	sRegex := regexp.MustCompile(`^https?:\/\/(?:[\w\-\_]+\.)+` + domain)
-	mRegex := regexp.MustCompile(`^https?:\/\/` + domain)
-	c.URLFilters = append(c.URLFilters, sRegex, mRegex)
-
-	// Set Limit Rule
-	err := c.Limit(&colly.LimitRule{
-		DomainGlob:  domain,
-		Parallelism: concurrent,
-		Delay:       time.Duration(delay) * time.Second,
-		RandomDelay: time.Duration(randomDelay) * time.Second,
-	})
-	if err != nil {
-		Logger.Errorf("Failed to set Limit Rule: %s", err)
-		os.Exit(1)
-	}
-
-	// GoSpider default disallowed  regex
-	disallowedRegex := `(?i).(jpg|jpeg|gif|css|tif|tiff|png|ttf|woff|woff2|ico)(?:\?|#|$)`
-	c.DisallowedURLFilters = append(c.DisallowedURLFilters, regexp.MustCompile(disallowedRegex))
-
-	// Set optional blacklist url regex
-	blacklists, _ := cmd.Flags().GetString("blacklist")
-	if blacklists != "" {
-		c.DisallowedURLFilters = append(c.DisallowedURLFilters, regexp.MustCompile(blacklists))
-	}
-
-	linkFinderCollector := c.Clone()
-	// Try to request as much as Javascript source and don't care about domain.
-	// The result of link finder will be send to Link Finder Collector to check is it working or not.
-	linkFinderCollector.URLFilters = nil
-
-	return &Crawler{
-		cmd:                 cmd,
-		C:                   c,
-		LinkFinderCollector: linkFinderCollector,
-		site:                site,
-		domain:              domain,
-		Output:              output,
-		urlSet:              stringset.NewStringFilter(),
-		subSet:              stringset.NewStringFilter(),
-		jsSet:               stringset.NewStringFilter(),
-		formSet:             stringset.NewStringFilter(),
-		awsSet:              stringset.NewStringFilter(),
-	}
+    domain := GetDomain(site)
+    if domain == "" {
+        Logger.Error("Failed to parse domain")
+        os.Exit(1)
+    }
+    Logger.Infof("Start crawling: %s", site)
+
+    quiet, _ := cmd.Flags().GetBool("quiet")
+    maxDepth, _ := cmd.Flags().GetInt("depth")
+    concurrent, _ := cmd.Flags().GetInt("concurrent")
+    delay, _ := cmd.Flags().GetInt("delay")
+    randomDelay, _ := cmd.Flags().GetInt("random-delay")
+
+    c := colly.NewCollector(
+        colly.Async(true),
+        colly.MaxDepth(maxDepth),
+        colly.IgnoreRobotsTxt(),
+    )
+
+    // Setup http client
+    client := &http.Client{}
+
+    // Set proxy
+    proxy, _ := cmd.Flags().GetString("proxy")
+    if proxy != "" {
+        Logger.Infof("Proxy: %s", proxy)
+        pU, err := url.Parse(proxy)
+        if err != nil {
+            Logger.Error("Failed to set proxy")
+        } else {
+            DefaultHTTPTransport.Proxy = http.ProxyURL(pU)
+        }
+    }
+
+    // Set request timeout
+    timeout, _ := cmd.Flags().GetInt("timeout")
+    if timeout == 0 {
+        Logger.Info("Your input timeout is 0. Gospider will set it to 10 seconds")
+        client.Timeout = 10 * time.Second
+    } else {
+        client.Timeout = time.Duration(timeout) * time.Second
+    }
+
+    // Disable redirect
+    noRedirect, _ := cmd.Flags().GetBool("no-redirect")
+    if noRedirect {
+        client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
+            nextLocation := req.Response.Header.Get("Location")
+            Logger.Debugf("Found Redirect: %s", nextLocation)
+            // Allow in redirect from http to https or in same hostname
+            // We just check contain hostname or not because we set URLFilter in main collector so if
+            // the URL is https://otherdomain.com/?url=maindomain.com, it will reject it
+            if strings.Contains(nextLocation, site.Hostname()) {
+                Logger.Infof("Redirecting to: %s", nextLocation)
+                return nil
+            }
+            return http.ErrUseLastResponse
+        }
+    }
+
+    // Set client transport
+    client.Transport = DefaultHTTPTransport
+    c.SetClient(client)
+
+    // Get headers here to overwrite if "burp" flag used
+    burpFile, _ := cmd.Flags().GetString("burp")
+    if burpFile != "" {
+        bF, err := os.Open(burpFile)
+        if err != nil {
+            Logger.Errorf("Failed to open Burp File: %s", err)
+        } else {
+            rd := bufio.NewReader(bF)
+            req, err := http.ReadRequest(rd)
+            if err != nil {
+                Logger.Errorf("Failed to Parse Raw Request in %s: %s", burpFile, err)
+            } else {
+                // Set cookie
+                c.OnRequest(func(r *colly.Request) {
+                    r.Headers.Set("Cookie", GetRawCookie(req.Cookies()))
+                })
+
+                // Set headers
+                c.OnRequest(func(r *colly.Request) {
+                    for k, v := range req.Header {
+                        r.Headers.Set(strings.TrimSpace(k), strings.TrimSpace(v[0]))
+                    }
+                })
+
+            }
+        }
+    }
+
+    // Set cookies
+    cookie, _ := cmd.Flags().GetString("cookie")
+    if cookie != "" && burpFile == "" {
+        c.OnRequest(func(r *colly.Request) {
+            r.Headers.Set("Cookie", cookie)
+        })
+    }
+
+    // Set headers
+    headers, _ := cmd.Flags().GetStringArray("header")
+    if burpFile == "" {
+        for _, h := range headers {
+            headerArgs := strings.SplitN(h, ":", 2)
+            headerKey := strings.TrimSpace(headerArgs[0])
+            headerValue := strings.TrimSpace(headerArgs[1])
+            c.OnRequest(func(r *colly.Request) {
+                r.Headers.Set(headerKey, headerValue)
+            })
+        }
+    }
+
+    // Set User-Agent
+    randomUA, _ := cmd.Flags().GetString("user-agent")
+    switch ua := strings.ToLower(randomUA); {
+    case ua == "mobi":
+        extensions.RandomMobileUserAgent(c)
+    case ua == "web":
+        extensions.RandomUserAgent(c)
+    default:
+        c.UserAgent = ua
+    }
+
+    // Set referer
+    extensions.Referer(c)
+
+    // Init Output
+    var output *Output
+    outputFolder, _ := cmd.Flags().GetString("output")
+    if outputFolder != "" {
+        filename := strings.ReplaceAll(site.Hostname(), ".", "_")
+        output = NewOutput(outputFolder, filename)
+    }
+    // Set url whitelist regex
+    sRegex := regexp.MustCompile(domain)
+    c.URLFilters = append(c.URLFilters, sRegex)
+
+    // Set Limit Rule
+    err := c.Limit(&colly.LimitRule{
+        DomainGlob:  domain,
+        Parallelism: concurrent,
+        Delay:       time.Duration(delay) * time.Second,
+        RandomDelay: time.Duration(randomDelay) * time.Second,
+    })
+    if err != nil {
+        Logger.Errorf("Failed to set Limit Rule: %s", err)
+        os.Exit(1)
+    }
+
+    // GoSpider default disallowed regex
+    disallowedRegex := `(?i)\.(png|apng|bmp|gif|ico|cur|jpg|jpeg|jfif|pjp|pjpeg|svg|tif|tiff|webp|xbm|3gp|aac|flac|mpg|mpeg|mp3|mp4|m4a|m4v|m4p|oga|ogg|ogv|mov|wav|webm|eot|woff|woff2|ttf|otf|css)(?:\?|#|$)`
+    c.DisallowedURLFilters = append(c.DisallowedURLFilters, regexp.MustCompile(disallowedRegex))
+
+    // Set optional blacklist url regex
+    blacklists, _ := cmd.Flags().GetString("blacklist")
+    if blacklists != "" {
+        c.DisallowedURLFilters = append(c.DisallowedURLFilters, regexp.MustCompile(blacklists))
+    }
+
+    // Set optional whitelist url regex
+    whiteLists, _ := cmd.Flags().GetString("whitelist")
+    if whiteLists != "" {
+        c.URLFilters = make([]*regexp.Regexp, 0)
+        c.URLFilters = append(c.URLFilters, regexp.MustCompile(whiteLists))
+    }
+
+    whiteListDomain, _ := cmd.Flags().GetString("whitelist-domain")
+    if whiteListDomain != "" {
+        c.URLFilters = make([]*regexp.Regexp, 0)
+        c.URLFilters = append(c.URLFilters, regexp.MustCompile("http(s)?://"+whiteListDomain))
+    }
+
+    linkFinderCollector := c.Clone()
+    // Try to request as much as Javascript source and don't care about domain.
+    // The result of link finder will be send to Link Finder Collector to check is it working or not.
+    linkFinderCollector.URLFilters = nil
+    if whiteLists != "" {
+        linkFinderCollector.URLFilters = append(linkFinderCollector.URLFilters, regexp.MustCompile(whiteLists))
+    }
+    if whiteListDomain != "" {
+        linkFinderCollector.URLFilters = append(linkFinderCollector.URLFilters, regexp.MustCompile("http(s)?://"+whiteListDomain))
+    }
+
+    return &Crawler{
+        cmd:                 cmd,
+        C:                   c,
+        LinkFinderCollector: linkFinderCollector,
+        site:                site,
+        quiet:               quiet,
+        domain:              domain,
+        Output:              output,
+        urlSet:              stringset.NewStringFilter(),
+        subSet:              stringset.NewStringFilter(),
+        jsSet:               stringset.NewStringFilter(),
+        formSet:             stringset.NewStringFilter(),
+        awsSet:              stringset.NewStringFilter(),
+    }
 }
 
-func (crawler *Crawler) Start() {
-	// Setup Link Finder
-	crawler.setupLinkFinder()
-
-	// Handle url
-	crawler.C.OnHTML("[href]", func(e *colly.HTMLElement) {
-		urlString := e.Request.AbsoluteURL(e.Attr("href"))
-		urlString = FixUrl(urlString, crawler.site)
-		if urlString == "" {
-			return
-		}
-		if !crawler.urlSet.Duplicate(urlString) {
-			_ = e.Request.Visit(urlString)
-		}
-	})
-
-	// Handle form
-	crawler.C.OnHTML("form[action]", func(e *colly.HTMLElement) {
-		formUrl := e.Request.URL.String()
-		if !crawler.formSet.Duplicate(formUrl) {
-			outputFormat := fmt.Sprintf("[form] - %s", formUrl)
-			fmt.Println(outputFormat)
-			if crawler.Output != nil {
-				crawler.Output.WriteToFile(outputFormat)
-			}
-
-		}
-	})
-
-	// Find Upload Form
-	uploadFormSet := stringset.NewStringFilter()
-	crawler.C.OnHTML(`input[type="file"]`, func(e *colly.HTMLElement) {
-		uploadUrl := e.Request.URL.String()
-		if !uploadFormSet.Duplicate(uploadUrl) {
-			outputFormat := fmt.Sprintf("[upload-form] - %s", uploadUrl)
-			fmt.Println(outputFormat)
-			if crawler.Output != nil {
-				crawler.Output.WriteToFile(outputFormat)
-			}
-		}
-
-	})
-
-	// Handle js files
-	crawler.C.OnHTML("[src]", func(e *colly.HTMLElement) {
-		jsFileUrl := e.Request.AbsoluteURL(e.Attr("src"))
-		jsFileUrl = FixUrl(jsFileUrl, crawler.site)
-		if jsFileUrl == "" {
-			return
-		}
-
-		fileExt := GetExtType(jsFileUrl)
-		if fileExt == ".js" || fileExt == ".xml" || fileExt == ".json" {
-			if !crawler.jsSet.Duplicate(jsFileUrl) {
-				outputFormat := fmt.Sprintf("[javascript] - %s", jsFileUrl)
-				fmt.Println(outputFormat)
-				if crawler.Output != nil {
-					crawler.Output.WriteToFile(outputFormat)
-				}
-
-				// If JS file is minimal format. Try to find original format
-				if strings.Contains(jsFileUrl, ".min.js") {
-					originalJS := strings.ReplaceAll(jsFileUrl, ".min.js", ".js")
-					_ = crawler.LinkFinderCollector.Visit(originalJS)
-				}
-
-				// Send Javascript to Link Finder Collector
-				_ = crawler.LinkFinderCollector.Visit(jsFileUrl)
-			}
-		}
-	})
-
-	crawler.C.OnResponse(func(response *colly.Response) {
-		respStr := DecodeChars(string(response.Body))
-
-		crawler.findSubdomains(respStr)
-		crawler.findAWSS3(respStr)
-
-		// Verify which link is working
-		u := response.Request.URL.String()
-		outputFormat := fmt.Sprintf("[url] - [code-%d] - %s", response.StatusCode, u)
-		fmt.Println(outputFormat)
-		if crawler.Output != nil {
-			crawler.Output.WriteToFile(outputFormat)
-		}
-	})
-
-	crawler.C.OnError(func(response *colly.Response, err error) {
-		Logger.Debugf("Error request: %s - Status code: %v - Error: %s", response.Request.URL.String(), response.StatusCode, err)
-		/*
-			1xx Informational
-			2xx Success
-			3xx Redirection
-			4xx Client Error
-			5xx Server Error
-		*/
-
-		if response.StatusCode == 404 || response.StatusCode == 429 || response.StatusCode < 100 || response.StatusCode >= 500 {
-			return
-		}
-
-		u := response.Request.URL.String()
-		outputFormat := fmt.Sprintf("[url] - [code-%d] - %s", response.StatusCode, u)
-		fmt.Println(outputFormat)
-		if crawler.Output != nil {
-			crawler.Output.WriteToFile(outputFormat)
-		}
-	})
-
-	_ = crawler.C.Visit(crawler.site.String())
+func (crawler *Crawler) Start(linkfinder bool) {
+    // Setup Link Finder
+    if linkfinder {
+        crawler.setupLinkFinder()
+    }
+
+    // Handle url
+    crawler.C.OnHTML("[href]", func(e *colly.HTMLElement) {
+        urlString := e.Request.AbsoluteURL(e.Attr("href"))
+        urlString = FixUrl(crawler.site, urlString)
+        if urlString == "" {
+            return
+        }
+        if !crawler.urlSet.Duplicate(urlString) {
+            _ = e.Request.Visit(urlString)
+        }
+    })
+
+    // Handle form
+    crawler.C.OnHTML("form[action]", func(e *colly.HTMLElement) {
+        formUrl := e.Request.URL.String()
+        if !crawler.formSet.Duplicate(formUrl) {
+            outputFormat := fmt.Sprintf("[form] - %s", formUrl)
+            if !crawler.quiet {
+                fmt.Println(outputFormat)
+            }
+            if crawler.Output != nil {
+                crawler.Output.WriteToFile(outputFormat)
+            }
+
+        }
+    })
+
+    // Find Upload Form
+    uploadFormSet := stringset.NewStringFilter()
+    crawler.C.OnHTML(`input[type="file"]`, func(e *colly.HTMLElement) {
+        uploadUrl := e.Request.URL.String()
+        if !uploadFormSet.Duplicate(uploadUrl) {
+            outputFormat := fmt.Sprintf("[upload-form] - %s", uploadUrl)
+            if !crawler.quiet {
+                fmt.Println(outputFormat)
+            }
+            if crawler.Output != nil {
+                crawler.Output.WriteToFile(outputFormat)
+            }
+        }
+
+    })
+
+    // Handle js files
+    crawler.C.OnHTML("[src]", func(e *colly.HTMLElement) {
+        jsFileUrl := e.Request.AbsoluteURL(e.Attr("src"))
+        jsFileUrl = FixUrl(crawler.site, jsFileUrl)
+        if jsFileUrl == "" {
+            return
+        }
+
+        fileExt := GetExtType(jsFileUrl)
+        if fileExt == ".js" || fileExt == ".xml" || fileExt == ".json" {
+            if !crawler.jsSet.Duplicate(jsFileUrl) {
+                outputFormat := fmt.Sprintf("[javascript] - %s", jsFileUrl)
+                if !crawler.quiet {
+                    fmt.Println(outputFormat)
+                }
+                if crawler.Output != nil {
+                    crawler.Output.WriteToFile(outputFormat)
+                }
+
+                // If JS file is minimal format. Try to find original format
+                if strings.Contains(jsFileUrl, ".min.js") {
+                    originalJS := strings.ReplaceAll(jsFileUrl, ".min.js", ".js")
+                    _ = crawler.LinkFinderCollector.Visit(originalJS)
+                }
+
+                // Send Javascript to Link Finder Collector
+                _ = crawler.LinkFinderCollector.Visit(jsFileUrl)
+            }
+        }
+    })
+
+    crawler.C.OnResponse(func(response *colly.Response) {
+        respStr := DecodeChars(string(response.Body))
+
+        crawler.findSubdomains(respStr)
+        crawler.findAWSS3(respStr)
+
+        // Verify which link is working
+        u := response.Request.URL.String()
+
+        outputFormat := fmt.Sprintf("[url] - [code-%d] - %s", response.StatusCode, u)
+        if !crawler.quiet {
+            fmt.Println(outputFormat)
+        } else {
+            fmt.Println(u)
+        }
+        if crawler.Output != nil {
+            crawler.Output.WriteToFile(outputFormat)
+        }
+    })
+
+    crawler.C.OnError(func(response *colly.Response, err error) {
+        Logger.Debugf("Error request: %s - Status code: %v - Error: %s", response.Request.URL.String(), response.StatusCode, err)
+        /*
+        	1xx Informational
+        	2xx Success
+        	3xx Redirection
+        	4xx Client Error
+        	5xx Server Error
+        */
+        if response.StatusCode == 404 || response.StatusCode == 429 || response.StatusCode < 100 || response.StatusCode >= 500 {
+            return
+        }
+
+        u := response.Request.URL.String()
+        outputFormat := fmt.Sprintf("[url] - [code-%d] - %s", response.StatusCode, u)
+        if !crawler.quiet {
+            fmt.Println(outputFormat)
+        } else {
+            fmt.Println(u)
+        }
+        if crawler.Output != nil {
+            crawler.Output.WriteToFile(outputFormat)
+        }
+    })
+
+    err := crawler.C.Visit(crawler.site.String())
+    if err != nil {
+        Logger.Errorf("Failed to start %s: %s", crawler.site.String(), err)
+    }
 }
 
 // Find subdomains from response
 func (crawler *Crawler) findSubdomains(resp string) {
-	subs := GetSubdomains(resp, crawler.domain)
-	for _, sub := range subs {
-		if !crawler.subSet.Duplicate(sub) {
-			outputFormat := fmt.Sprintf("[subdomains] - %s", sub)
-			fmt.Println(outputFormat)
-			if crawler.Output != nil {
-				crawler.Output.WriteToFile(outputFormat)
-			}
-		}
-	}
+    subs := GetSubdomains(resp, crawler.domain)
+    for _, sub := range subs {
+        if !crawler.subSet.Duplicate(sub) {
+            outputFormat := fmt.Sprintf("[subdomains] - %s", sub)
+            if !crawler.quiet {
+                outputFormat = fmt.Sprintf("http://%s", sub)
+                fmt.Println(outputFormat)
+                outputFormat = fmt.Sprintf("https://%s", sub)
+                fmt.Println(outputFormat)
+            }
+            if crawler.Output != nil {
+                crawler.Output.WriteToFile(outputFormat)
+            }
+        }
+    }
 }
 
 // Find AWS S3 from response
 func (crawler *Crawler) findAWSS3(resp string) {
-	aws := GetAWSS3(resp)
-	for _, e := range aws {
-		if !crawler.awsSet.Duplicate(e) {
-			outputFormat := fmt.Sprintf("[aws-s3] - %s", e)
-			fmt.Println(outputFormat)
-			if crawler.Output != nil {
-				crawler.Output.WriteToFile(outputFormat)
-			}
-		}
-	}
+    aws := GetAWSS3(resp)
+    for _, e := range aws {
+        if !crawler.awsSet.Duplicate(e) {
+            outputFormat := fmt.Sprintf("[aws-s3] - %s", e)
+            if !crawler.quiet {
+                fmt.Println(outputFormat)
+            }
+            if crawler.Output != nil {
+                crawler.Output.WriteToFile(outputFormat)
+            }
+        }
+    }
 }
 
 // Setup link finder
 func (crawler *Crawler) setupLinkFinder() {
-	crawler.LinkFinderCollector.OnResponse(func(response *colly.Response) {
-		if response.StatusCode != 200 {
-			return
-		}
-
-		respStr := string(response.Body)
-
-		crawler.findAWSS3(respStr)
-		crawler.findSubdomains(respStr)
-
-		paths, err := LinkFinder(respStr)
-		if err != nil {
-			Logger.Error(err)
-			return
-		}
-
-		var inScope bool
-		if InScope(response.Request.URL, crawler.C.URLFilters) {
-			inScope = true
-		}
-		for _, path := range paths {
-			// JS Regex Result
-			outputFormat := fmt.Sprintf("[linkfinder] - [from: %s] - %s", response.Request.URL.String(), path)
-			fmt.Println(outputFormat)
-			if crawler.Output != nil {
-				crawler.Output.WriteToFile(outputFormat)
-			}
-
-			// Try to request JS path
-			// Try to generate URLs with main site
-			urlWithMainSite := FixUrl(path, crawler.site)
-			if urlWithMainSite != "" {
-				_ = crawler.C.Visit(urlWithMainSite)
-			}
-
-			// Try to generate URLs with the site where Javascript file host in (must be in main or sub domain)
-			if inScope {
-				urlWithJSHostIn := FixUrl(path, response.Request.URL)
-				if urlWithJSHostIn != "" {
-					_ = crawler.C.Visit(urlWithJSHostIn)
-				}
-			}
-		}
-	})
+    crawler.LinkFinderCollector.OnResponse(func(response *colly.Response) {
+
+        if response.StatusCode != 200 {
+            return
+        }
+
+        respStr := string(response.Body)
+
+        crawler.findAWSS3(respStr)
+        crawler.findSubdomains(respStr)
+
+        paths, err := LinkFinder(respStr)
+        if err != nil {
+            Logger.Error(err)
+            return
+        }
+
+        var inScope bool
+        if InScope(response.Request.URL, crawler.C.URLFilters) {
+            inScope = true
+        }
+        for _, relPath := range paths {
+            // JS Regex Result
+            if !crawler.quiet {
+                outputFormat := fmt.Sprintf("[linkfinder] - [from: %s] - %s", response.Request.URL.String(), relPath)
+                fmt.Println(outputFormat)
+            }
+
+            rebuildURL := FixUrl(crawler.site, relPath)
+            if rebuildURL == "" {
+                continue
+            }
+
+            outputFormat := fmt.Sprintf("[linkfinder] - %s", rebuildURL)
+            if !crawler.quiet {
+                fmt.Println(outputFormat)
+            }
+
+            if crawler.Output != nil {
+                crawler.Output.WriteToFile(outputFormat)
+            }
+
+            // Try to request JS path
+            // Try to generate URLs with main site
+
+            if rebuildURL != "" {
+                _ = crawler.C.Visit(rebuildURL)
+            }
+
+            // Try to generate URLs with the site where Javascript file host in (must be in main or sub domain)
+            if inScope {
+                urlWithJSHostIn := FixUrl(crawler.site, relPath)
+                if urlWithJSHostIn != "" {
+                    _ = crawler.C.Visit(urlWithJSHostIn)
+                }
+            }
+        }
+    })
 }
diff --git a/core/linkfinder.go b/core/linkfinder.go
index 680953b..1eba998 100644
--- a/core/linkfinder.go
+++ b/core/linkfinder.go
@@ -1,29 +1,29 @@
 package core
 
 import (
-	"regexp"
-	"strings"
+    "regexp"
+    "strings"
 )
 
 var linkFinderRegex = regexp.MustCompile(`(?:"|')(((?:[a-zA-Z]{1,10}://|//)[^"'/]{1,}\.[a-zA-Z]{2,}[^"']{0,})|((?:/|\.\./|\./)[^"'><,;| *()(%%$^/\\\[\]][^"'><,;|()]{1,})|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{1,}\.(?:[a-zA-Z]{1,4}|action)(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-/]{1,}/[a-zA-Z0-9_\-/]{3,}(?:[\?|#][^"|']{0,}|))|([a-zA-Z0-9_\-]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:[\?|#][^"|']{0,}|)))(?:"|')`)
 
 func LinkFinder(source string) ([]string, error) {
-	var links []string
-	//source = strings.ToLower(source)
-	if len(source) > 1000000 {
-		source = strings.ReplaceAll(source, ";", ";\r\n")
-		source = strings.ReplaceAll(source, ",", ",\r\n")
-	}
-	source = DecodeChars(source)
+    var links []string
+    // source = strings.ToLower(source)
+    if len(source) > 1000000 {
+        source = strings.ReplaceAll(source, ";", ";\r\n")
+        source = strings.ReplaceAll(source, ",", ",\r\n")
+    }
+    source = DecodeChars(source)
 
-	match := linkFinderRegex.FindAllStringSubmatch(source, -1)
-	for _, m := range match {
-		matchGroup1 := FilterNewLines(m[1])
-		if matchGroup1 == "" {
-			continue
-		}
-		links = append(links, matchGroup1)
-	}
-	links = Unique(links)
-	return links, nil
+    match := linkFinderRegex.FindAllStringSubmatch(source, -1)
+    for _, m := range match {
+        matchGroup1 := FilterNewLines(m[1])
+        if matchGroup1 == "" {
+            continue
+        }
+        links = append(links, matchGroup1)
+    }
+    links = Unique(links)
+    return links, nil
 }
diff --git a/core/logger.go b/core/logger.go
index b201ef2..7ff0b62 100644
--- a/core/logger.go
+++ b/core/logger.go
@@ -1,17 +1,23 @@
 package core
 
 import (
-	"github.com/sirupsen/logrus"
-	"os"
+    "os"
+
+    "github.com/sirupsen/logrus"
+    prefixed "github.com/x-cray/logrus-prefixed-formatter"
 )
 
 var Logger *logrus.Logger
 
 func init() {
-	Logger = logrus.New()
-	Logger.SetFormatter(&logrus.TextFormatter{
-		ForceColors:      true,
-		DisableTimestamp: true,
-	})
-	Logger.SetOutput(os.Stdout)
+    logger := logrus.New()
+    logger = &logrus.Logger{
+        Out:   os.Stderr,
+        Level: logrus.InfoLevel,
+        Formatter: &prefixed.TextFormatter{
+            ForceColors:     true,
+            ForceFormatting: true,
+        },
+    }
+    Logger = logger
 }
diff --git a/core/othersource.go b/core/othersource.go
index ac26b86..bd956b8 100644
--- a/core/othersource.go
+++ b/core/othersource.go
@@ -1,215 +1,215 @@
 package core
 
 import (
-	"bufio"
-	"encoding/json"
-	"fmt"
-	"io/ioutil"
-	"net/http"
-	"os"
-	"sync"
+    "bufio"
+    "encoding/json"
+    "fmt"
+    "io/ioutil"
+    "net/http"
+    "os"
+    "sync"
 )
 
 func OtherSources(domain string, includeSubs bool) []string {
-	noSubs := true
-	if includeSubs {
-		noSubs = false
-	}
-	var urls []string
-
-	fetchFns := []fetchFn{
-		getWaybackURLs,
-		getCommonCrawlURLs,
-		getVirusTotalURLs,
-		getOtxUrls,
-	}
-
-	var wg sync.WaitGroup
-
-	for _, fn := range fetchFns {
-		wUrlChan := make(chan wurl)
-		wg.Add(1)
-		fetch := fn
-		go func() {
-			defer wg.Done()
-			resp, err := fetch(domain, noSubs)
-			if err != nil {
-				return
-			}
-			for _, r := range resp {
-				wUrlChan <- r
-			}
-		}()
-
-		go func() {
-			wg.Wait()
-			close(wUrlChan)
-		}()
-
-		for w := range wUrlChan {
-			urls = append(urls, w.url)
-		}
-	}
-	return Unique(urls)
+    noSubs := true
+    if includeSubs {
+        noSubs = false
+    }
+    var urls []string
+
+    fetchFns := []fetchFn{
+        getWaybackURLs,
+        getCommonCrawlURLs,
+        getVirusTotalURLs,
+        getOtxUrls,
+    }
+
+    var wg sync.WaitGroup
+
+    for _, fn := range fetchFns {
+        wUrlChan := make(chan wurl)
+        wg.Add(1)
+        fetch := fn
+        go func() {
+            defer wg.Done()
+            resp, err := fetch(domain, noSubs)
+            if err != nil {
+                return
+            }
+            for _, r := range resp {
+                wUrlChan <- r
+            }
+        }()
+
+        go func() {
+            wg.Wait()
+            close(wUrlChan)
+        }()
+
+        for w := range wUrlChan {
+            urls = append(urls, w.url)
+        }
+    }
+    return Unique(urls)
 }
 
 type wurl struct {
-	date string
-	url  string
+    date string
+    url  string
 }
 
 type fetchFn func(string, bool) ([]wurl, error)
 
 func getWaybackURLs(domain string, noSubs bool) ([]wurl, error) {
-	subsWildcard := "*."
-	if noSubs {
-		subsWildcard = ""
-	}
-	res, err := http.Get(
-		fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=%s%s/*&output=json&collapse=urlkey", subsWildcard, domain),
-	)
-	if err != nil {
-		return []wurl{}, err
-	}
-
-	raw, err := ioutil.ReadAll(res.Body)
-
-	res.Body.Close()
-	if err != nil {
-		return []wurl{}, err
-	}
-
-	var wrapper [][]string
-	err = json.Unmarshal(raw, &wrapper)
-
-	out := make([]wurl, 0, len(wrapper))
-
-	skip := true
-	for _, urls := range wrapper {
-		// The first item is always just the string "original",
-		// so we should skip the first item
-		if skip {
-			skip = false
-			continue
-		}
-		out = append(out, wurl{date: urls[1], url: urls[2]})
-	}
-
-	return out, nil
+    subsWildcard := "*."
+    if noSubs {
+        subsWildcard = ""
+    }
+    res, err := http.Get(
+        fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=%s%s/*&output=json&collapse=urlkey", subsWildcard, domain),
+    )
+    if err != nil {
+        return []wurl{}, err
+    }
+
+    raw, err := ioutil.ReadAll(res.Body)
+
+    res.Body.Close()
+    if err != nil {
+        return []wurl{}, err
+    }
+
+    var wrapper [][]string
+    err = json.Unmarshal(raw, &wrapper)
+
+    out := make([]wurl, 0, len(wrapper))
+
+    skip := true
+    for _, urls := range wrapper {
+        // The first item is always just the string "original",
+        // so we should skip the first item
+        if skip {
+            skip = false
+            continue
+        }
+        out = append(out, wurl{date: urls[1], url: urls[2]})
+    }
+
+    return out, nil
 
 }
 
 func getCommonCrawlURLs(domain string, noSubs bool) ([]wurl, error) {
-	subsWildcard := "*."
-	if noSubs {
-		subsWildcard = ""
-	}
-	res, err := http.Get(
-		fmt.Sprintf("http://index.commoncrawl.org/CC-MAIN-2019-51-index?url=%s%s/*&output=json", subsWildcard, domain),
-	)
-	if err != nil {
-		return []wurl{}, err
-	}
-
-	defer res.Body.Close()
-	sc := bufio.NewScanner(res.Body)
-
-	out := make([]wurl, 0)
-
-	for sc.Scan() {
-		wrapper := struct {
-			URL       string `json:"url"`
-			Timestamp string `json:"timestamp"`
-		}{}
-		err = json.Unmarshal([]byte(sc.Text()), &wrapper)
-
-		if err != nil {
-			continue
-		}
-
-		out = append(out, wurl{date: wrapper.Timestamp, url: wrapper.URL})
-	}
-
-	return out, nil
+    subsWildcard := "*."
+    if noSubs {
+        subsWildcard = ""
+    }
+    res, err := http.Get(
+        fmt.Sprintf("http://index.commoncrawl.org/CC-MAIN-2019-51-index?url=%s%s/*&output=json", subsWildcard, domain),
+    )
+    if err != nil {
+        return []wurl{}, err
+    }
+
+    defer res.Body.Close()
+    sc := bufio.NewScanner(res.Body)
+
+    out := make([]wurl, 0)
+
+    for sc.Scan() {
+        wrapper := struct {
+            URL       string `json:"url"`
+            Timestamp string `json:"timestamp"`
+        }{}
+        err = json.Unmarshal([]byte(sc.Text()), &wrapper)
+
+        if err != nil {
+            continue
+        }
+
+        out = append(out, wurl{date: wrapper.Timestamp, url: wrapper.URL})
+    }
+
+    return out, nil
 
 }
 
 func getVirusTotalURLs(domain string, noSubs bool) ([]wurl, error) {
-	out := make([]wurl, 0)
+    out := make([]wurl, 0)
 
-	apiKey := os.Getenv("VT_API_KEY")
-	if apiKey == "" {
-		Logger.Warnf("You are not set VirusTotal API Key yet.")
-		return out, nil
-	}
+    apiKey := os.Getenv("VT_API_KEY")
+    if apiKey == "" {
+        Logger.Warnf("You are not set VirusTotal API Key yet.")
+        return out, nil
+    }
 
-	fetchURL := fmt.Sprintf(
-		"https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s",
-		apiKey,
-		domain,
-	)
+    fetchURL := fmt.Sprintf(
+        "https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s",
+        apiKey,
+        domain,
+    )
 
-	resp, err := http.Get(fetchURL)
-	if err != nil {
-		return out, err
-	}
-	defer resp.Body.Close()
+    resp, err := http.Get(fetchURL)
+    if err != nil {
+        return out, err
+    }
+    defer resp.Body.Close()
 
-	wrapper := struct {
-		URLs []struct {
-			URL string `json:"url"`
-		} `json:"detected_urls"`
-	}{}
+    wrapper := struct {
+        URLs []struct {
+            URL string `json:"url"`
+        } `json:"detected_urls"`
+    }{}
 
-	dec := json.NewDecoder(resp.Body)
+    dec := json.NewDecoder(resp.Body)
 
-	err = dec.Decode(&wrapper)
+    err = dec.Decode(&wrapper)
 
-	for _, u := range wrapper.URLs {
-		out = append(out, wurl{url: u.URL})
-	}
+    for _, u := range wrapper.URLs {
+        out = append(out, wurl{url: u.URL})
+    }
 
-	return out, nil
+    return out, nil
 }
 
 func getOtxUrls(domain string, noSubs bool) ([]wurl, error) {
-	var urls []wurl
-	page := 0
-	for {
-		r, err := http.Get(fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/hostname/%s/url_list?limit=50&page=%d", domain, page))
-		if err != nil {
-			return []wurl{}, err
-		}
-		bytes, err := ioutil.ReadAll(r.Body)
-		if err != nil {
-			return []wurl{}, err
-		}
-		r.Body.Close()
-
-		wrapper := struct {
-			HasNext    bool `json:"has_next"`
-			ActualSize int  `json:"actual_size"`
-			URLList    []struct {
-				Domain   string `json:"domain"`
-				URL      string `json:"url"`
-				Hostname string `json:"hostname"`
-				Httpcode int    `json:"httpcode"`
-				PageNum  int    `json:"page_num"`
-				FullSize int    `json:"full_size"`
-				Paged    bool   `json:"paged"`
-			} `json:"url_list"`
-		}{}
-		err = json.Unmarshal(bytes, &wrapper)
-		if err != nil {
-			return []wurl{}, err
-		}
-		for _, url := range wrapper.URLList {
-			urls = append(urls, wurl{url: url.URL})
-		}
-		if !wrapper.HasNext {
-			break
-		}
-		page++
-	}
-	return urls, nil
+    var urls []wurl
+    page := 0
+    for {
+        r, err := http.Get(fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/hostname/%s/url_list?limit=50&page=%d", domain, page))
+        if err != nil {
+            return []wurl{}, err
+        }
+        bytes, err := ioutil.ReadAll(r.Body)
+        if err != nil {
+            return []wurl{}, err
+        }
+        r.Body.Close()
+
+        wrapper := struct {
+            HasNext    bool `json:"has_next"`
+            ActualSize int  `json:"actual_size"`
+            URLList    []struct {
+                Domain   string `json:"domain"`
+                URL      string `json:"url"`
+                Hostname string `json:"hostname"`
+                Httpcode int    `json:"httpcode"`
+                PageNum  int    `json:"page_num"`
+                FullSize int    `json:"full_size"`
+                Paged    bool   `json:"paged"`
+            } `json:"url_list"`
+        }{}
+        err = json.Unmarshal(bytes, &wrapper)
+        if err != nil {
+            return []wurl{}, err
+        }
+        for _, url := range wrapper.URLList {
+            urls = append(urls, wurl{url: url.URL})
+        }
+        if !wrapper.HasNext {
+            break
+        }
+        page++
+    }
+    return urls, nil
 }
diff --git a/core/output.go b/core/output.go
index 882db9a..cc65833 100644
--- a/core/output.go
+++ b/core/output.go
@@ -1,34 +1,34 @@
 package core
 
 import (
-	"os"
-	"path/filepath"
-	"sync"
+    "os"
+    "path/filepath"
+    "sync"
 )
 
 type Output struct {
-	mu sync.Mutex
-	f  *os.File
+    mu sync.Mutex
+    f  *os.File
 }
 
 func NewOutput(folder, filename string) *Output {
-	outFile := filepath.Join(folder, filename)
-	f, err := os.OpenFile(outFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, os.ModePerm)
-	if err != nil {
-		Logger.Errorf("Failed to open file to write Output: %s", err)
-		os.Exit(1)
-	}
-	return &Output{
-		f: f,
-	}
+    outFile := filepath.Join(folder, filename)
+    f, err := os.OpenFile(outFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, os.ModePerm)
+    if err != nil {
+        Logger.Errorf("Failed to open file to write Output: %s", err)
+        os.Exit(1)
+    }
+    return &Output{
+        f: f,
+    }
 }
 
 func (o *Output) WriteToFile(msg string) {
-	o.mu.Lock()
-	defer o.mu.Unlock()
-	_, _ = o.f.WriteString(msg + "\n")
+    o.mu.Lock()
+    defer o.mu.Unlock()
+    _, _ = o.f.WriteString(msg + "\n")
 }
 
 func (o *Output) Close() {
-	o.f.Close()
+    o.f.Close()
 }
diff --git a/core/robots.go b/core/robots.go
index 3b0cd92..8c9e137 100644
--- a/core/robots.go
+++ b/core/robots.go
@@ -1,48 +1,52 @@
 package core
 
 import (
-	"fmt"
-	"github.com/gocolly/colly/v2"
-	"io/ioutil"
-	"net/http"
-	"net/url"
-	"regexp"
-	"strings"
-	"sync"
+    "fmt"
+    "io/ioutil"
+    "net/http"
+    "net/url"
+    "regexp"
+    "strings"
+    "sync"
+
+    "github.com/gocolly/colly/v2"
 )
 
-func ParseRobots(site *url.URL, output *Output, c *colly.Collector, wg *sync.WaitGroup) {
-	defer wg.Done()
-	robotsURL := site.String() + "/robots.txt"
+func ParseRobots(site *url.URL, quiet bool, output *Output, c *colly.Collector, wg *sync.WaitGroup) {
+    defer wg.Done()
+    robotsURL := site.String() + "/robots.txt"
+
+    resp, err := http.Get(robotsURL)
+    if err != nil {
+        return
+    }
+    if resp.StatusCode == 200 {
+        Logger.Infof("Found robots.txt: %s", robotsURL)
+        body, err := ioutil.ReadAll(resp.Body)
+        if err != nil {
+            return
+        }
+        lines := strings.Split(string(body), "\n")
 
-	resp, err := http.Get(robotsURL)
-	if err != nil {
-		return
-	}
-	if resp.StatusCode == 200 {
-		Logger.Infof("Found robots.txt: %s", robotsURL)
-		body, err := ioutil.ReadAll(resp.Body)
-		if err != nil {
-			return
-		}
-		lines := strings.Split(string(body), "\n")
+        var re = regexp.MustCompile(".*llow: ")
+        for _, line := range lines {
+            if strings.Contains(line, "llow: ") {
+                url := re.ReplaceAllString(line, "")
+                url = FixUrl(site, url)
+                if url == "" {
+                    continue
+                }
+                outputFormat := fmt.Sprintf("[robots] - %s", url)
+                if !quiet {
+                    fmt.Println(outputFormat)
+                }
 
-		var re = regexp.MustCompile(".*llow: ")
-		for _, line := range lines {
-			if strings.Contains(line, "llow: ") {
-				url := re.ReplaceAllString(line, "")
-				url = FixUrl(url, site)
-				if url == "" {
-					continue
-				}
-				outputFormat := fmt.Sprintf("[robots] - %s", url)
-				fmt.Println(outputFormat)
-				if output != nil {
-					output.WriteToFile(outputFormat)
-				}
-				_ = c.Visit(url)
-			}
-		}
-	}
+                if output != nil {
+                    output.WriteToFile(outputFormat)
+                }
+                _ = c.Visit(url)
+            }
+        }
+    }
 
 }
diff --git a/core/sitemap.go b/core/sitemap.go
index 248b513..b29a6dd 100644
--- a/core/sitemap.go
+++ b/core/sitemap.go
@@ -1,31 +1,34 @@
 package core
 
 import (
-	"fmt"
-	"github.com/gocolly/colly/v2"
-	sitemap "github.com/oxffaa/gopher-parse-sitemap"
-	"net/url"
-	"sync"
+    "fmt"
+    "net/url"
+    "sync"
+
+    "github.com/gocolly/colly/v2"
+    sitemap "github.com/oxffaa/gopher-parse-sitemap"
 )
 
-func ParseSiteMap(site *url.URL, output *Output, c *colly.Collector, wg *sync.WaitGroup) {
-	defer wg.Done()
-	sitemapUrls := []string{"/sitemap.xml", "/sitemap_news.xml", "/sitemap_index.xml", "/sitemap-index.xml", "/sitemapindex.xml",
-		"/sitemap-news.xml", "/post-sitemap.xml", "/page-sitemap.xml", "/portfolio-sitemap.xml", "/home_slider-sitemap.xml", "/category-sitemap.xml",
-		"/author-sitemap.xml"}
+func ParseSiteMap(site *url.URL, quite bool, output *Output, c *colly.Collector, wg *sync.WaitGroup) {
+    defer wg.Done()
+    sitemapUrls := []string{"/sitemap.xml", "/sitemap_news.xml", "/sitemap_index.xml", "/sitemap-index.xml", "/sitemapindex.xml",
+        "/sitemap-news.xml", "/post-sitemap.xml", "/page-sitemap.xml", "/portfolio-sitemap.xml", "/home_slider-sitemap.xml", "/category-sitemap.xml",
+        "/author-sitemap.xml"}
 
-	for _, path := range sitemapUrls {
-		// Ignore error when that not valid sitemap.xml path
-		Logger.Infof("Trying to find %s", site.String()+path)
-		_ = sitemap.ParseFromSite(site.String()+path, func(entry sitemap.Entry) error {
-			outputFormat := fmt.Sprintf("[sitemap] - %s", entry.GetLocation())
-			fmt.Println(outputFormat)
-			if output != nil {
-				output.WriteToFile(outputFormat)
-			}
-			_ = c.Visit(entry.GetLocation())
-			return nil
-		})
-	}
+    for _, path := range sitemapUrls {
+        // Ignore error when that not valid sitemap.xml path
+        Logger.Infof("Trying to find %s", site.String()+path)
+        _ = sitemap.ParseFromSite(site.String()+path, func(entry sitemap.Entry) error {
+            outputFormat := fmt.Sprintf("[sitemap] - %s", entry.GetLocation())
+            if !quite {
+                fmt.Println(outputFormat)
+            }
+            if output != nil {
+                output.WriteToFile(outputFormat)
+            }
+            _ = c.Visit(entry.GetLocation())
+            return nil
+        })
+    }
 
 }
diff --git a/core/utils.go b/core/utils.go
index 5cf1853..098303a 100644
--- a/core/utils.go
+++ b/core/utils.go
@@ -1,154 +1,203 @@
 package core
 
 import (
-	"fmt"
-	"golang.org/x/net/publicsuffix"
-	"net/http"
-	"net/url"
-	"path"
-	"regexp"
-	"strings"
+    "bufio"
+    "fmt"
+    "net/http"
+    "net/url"
+    "os"
+    "path"
+    "regexp"
+    "strings"
+
+    "github.com/mitchellh/go-homedir"
+    "golang.org/x/net/publicsuffix"
 )
 
 var nameStripRE = regexp.MustCompile("(?i)^((20)|(25)|(2b)|(2f)|(3d)|(3a)|(40))+")
 
 func GetRawCookie(cookies []*http.Cookie) string {
-	var rawCookies []string
-	for _, c := range cookies {
-		e := fmt.Sprintf("%s=%s", c.Name, c.Value)
-		rawCookies = append(rawCookies, e)
-	}
-	return strings.Join(rawCookies, "; ")
+    var rawCookies []string
+    for _, c := range cookies {
+        e := fmt.Sprintf("%s=%s", c.Name, c.Value)
+        rawCookies = append(rawCookies, e)
+    }
+    return strings.Join(rawCookies, "; ")
 }
 
 func GetDomain(site *url.URL) string {
-	domain, err := publicsuffix.EffectiveTLDPlusOne(site.Hostname())
-	if err != nil {
-		return ""
-	}
-	return domain
+    domain, err := publicsuffix.EffectiveTLDPlusOne(site.Hostname())
+    if err != nil {
+        return ""
+    }
+    return domain
 }
 
-func FixUrl(url string, site *url.URL) string {
-	var newUrl string
-	if strings.HasPrefix(url, "//") {
-		// //google.com/example.php
-		newUrl = site.Scheme + ":" + url
-
-	} else if strings.HasPrefix(url, "http") {
-		// http://google.com || https://google.com
-		newUrl = url
-
-	} else if !strings.HasPrefix(url, "//") {
-		if strings.HasPrefix(url, "/") {
-			// Ex: /?thread=10
-			newUrl = site.Scheme + "://" + site.Host + url
-
-		} else {
-			if strings.HasPrefix(url, ".") {
-				if strings.HasPrefix(url, "..") {
-					newUrl = site.Scheme + "://" + site.Host + url[2:]
-				} else {
-					newUrl = site.Scheme + "://" + site.Host + url[1:]
-				}
-			} else {
-				// "console/test.php"
-				newUrl = site.Scheme + "://" + site.Host + "/" + url
-			}
-		}
-	}
-	return newUrl
+// func FixUrl(site *url.URL, nextLoc string) string {
+//     var newUrl string
+//     if strings.HasPrefix(nextLoc, "//") {
+//         // //google.com/example.php
+//         newUrl = site.Scheme + ":" + nextLoc
+//
+//     } else if strings.HasPrefix(nextLoc, "http") {
+//         // http://google.com || https://google.com
+//         newUrl = nextLoc
+//
+//     } else if !strings.HasPrefix(nextLoc, "//") {
+//         // if strings.HasPrefix(nextLoc, "/") {
+//         //     // Ex: /?thread=10
+//         //     newUrl = site.Scheme + "://" + site.Host + nextLoc
+//         //
+//         // } else {
+//         //     if strings.HasPrefix(nextLoc, ".") {
+//         //         if strings.HasPrefix(nextLoc, "..") {
+//         //             newUrl = site.Scheme + "://" + site.Host + nextLoc[2:]
+//         //         } else {
+//         //             newUrl = site.Scheme + "://" + site.Host + nextLoc[1:]
+//         //         }
+//         //     } else {
+//         //         // "console/test.php"
+//         //         newUrl = site.Scheme + "://" + site.Host + "/" + nextLoc
+//         //     }
+//         // }
+//         nextLocUrl, err := url.Parse(nextLoc)
+//         if err != nil {
+//             return ""
+//         }
+//         newUrl = site.ResolveReference(nextLocUrl).String()
+//     }
+//     return newUrl
+// }
+
+func FixUrl(mainSite *url.URL, nextLoc string) string {
+    nextLocUrl, err := url.Parse(nextLoc)
+    if err != nil {
+        return ""
+    }
+    return mainSite.ResolveReference(nextLocUrl).String()
 }
 
 func Unique(intSlice []string) []string {
-	keys := make(map[string]bool)
-	var list []string
-	for _, entry := range intSlice {
-		if _, value := keys[entry]; !value {
-			keys[entry] = true
-			list = append(list, entry)
-		}
-	}
-	return list
+    keys := make(map[string]bool)
+    var list []string
+    for _, entry := range intSlice {
+        if _, value := keys[entry]; !value {
+            keys[entry] = true
+            list = append(list, entry)
+        }
+    }
+    return list
 }
 
 func LoadCookies(rawCookie string) []*http.Cookie {
-	httpCookies := []*http.Cookie{}
-	cookies := strings.Split(rawCookie, ";")
-	for _, cookie := range cookies {
-		cookieArgs := strings.SplitN(cookie, "=", 2)
-		if len(cookieArgs) > 2 {
-			continue
-		}
-
-		ck := &http.Cookie{Name: strings.TrimSpace(cookieArgs[0]), Value: strings.TrimSpace(cookieArgs[1])}
-		httpCookies = append(httpCookies, ck)
-	}
-	return httpCookies
+    httpCookies := []*http.Cookie{}
+    cookies := strings.Split(rawCookie, ";")
+    for _, cookie := range cookies {
+        cookieArgs := strings.SplitN(cookie, "=", 2)
+        if len(cookieArgs) > 2 {
+            continue
+        }
+
+        ck := &http.Cookie{Name: strings.TrimSpace(cookieArgs[0]), Value: strings.TrimSpace(cookieArgs[1])}
+        httpCookies = append(httpCookies, ck)
+    }
+    return httpCookies
 }
 
 func GetExtType(rawUrl string) string {
-	u, err := url.Parse(rawUrl)
-	if err != nil {
-		return ""
-	}
-	return path.Ext(u.Path)
+    u, err := url.Parse(rawUrl)
+    if err != nil {
+        return ""
+    }
+    return path.Ext(u.Path)
 }
 
 func CleanSubdomain(s string) string {
-	s = strings.TrimSpace(strings.ToLower(s))
-	s = strings.TrimPrefix(s, "*.")
-	//s = strings.Trim("u00","")
-	s = cleanName(s)
-	return s
+    s = strings.TrimSpace(strings.ToLower(s))
+    s = strings.TrimPrefix(s, "*.")
+    // s = strings.Trim("u00","")
+    s = cleanName(s)
+    return s
 }
 
 // Clean up the names scraped from the web.
 // Get from Amass
 func cleanName(name string) string {
-	for {
-		if i := nameStripRE.FindStringIndex(name); i != nil {
-			name = name[i[1]:]
-		} else {
-			break
-		}
-	}
-
-	name = strings.Trim(name, "-")
-	// Remove dots at the beginning of names
-	if len(name) > 1 && name[0] == '.' {
-		name = name[1:]
-	}
-	return name
+    for {
+        if i := nameStripRE.FindStringIndex(name); i != nil {
+            name = name[i[1]:]
+        } else {
+            break
+        }
+    }
+
+    name = strings.Trim(name, "-")
+    // Remove dots at the beginning of names
+    if len(name) > 1 && name[0] == '.' {
+        name = name[1:]
+    }
+    return name
 }
 
 func FilterNewLines(s string) string {
-	return regexp.MustCompile(`[\t\r\n]+`).ReplaceAllString(strings.TrimSpace(s), " ")
+    return regexp.MustCompile(`[\t\r\n]+`).ReplaceAllString(strings.TrimSpace(s), " ")
 }
 
 func DecodeChars(s string) string {
-	source, err := url.QueryUnescape(s)
-	if err == nil {
-		s = source
-	}
-
-	// In case json encoded chars
-	replacer := strings.NewReplacer(
-		`\u002f`, "/",
-		`\U002F`, "/",
-		`\u002F`, "/",
-		`\u0026`, "&",
-		`\U0026`, "&",
-	)
-	s = replacer.Replace(s)
-	return s
+    source, err := url.QueryUnescape(s)
+    if err == nil {
+        s = source
+    }
+
+    // In case json encoded chars
+    replacer := strings.NewReplacer(
+        `\u002f`, "/",
+        `\u0026`, "&",
+    )
+    s = replacer.Replace(s)
+    return s
 }
 
 func InScope(u *url.URL, regexps []*regexp.Regexp) bool {
-	for _, r := range regexps {
-		if r.MatchString(u.Hostname()) {
-			return true
-		}
-	}
-	return false
+    for _, r := range regexps {
+        if r.MatchString(u.Hostname()) {
+            return true
+        }
+    }
+    return false
+}
+
+// NormalizePath the path
+func NormalizePath(path string) string {
+    if strings.HasPrefix(path, "~") {
+        path, _ = homedir.Expand(path)
+    }
+    return path
+}
+
+// ReadingLines Reading file and return content as []string
+func ReadingLines(filename string) []string {
+    var result []string
+    if strings.HasPrefix(filename, "~") {
+        filename, _ = homedir.Expand(filename)
+    }
+    file, err := os.Open(filename)
+    if err != nil {
+        return result
+    }
+    defer file.Close()
+
+    scanner := bufio.NewScanner(file)
+    for scanner.Scan() {
+        val := strings.TrimSpace(scanner.Text())
+        if val == "" {
+            continue
+        }
+        result = append(result, val)
+    }
+
+    if err := scanner.Err(); err != nil {
+        return result
+    }
+    return result
 }
diff --git a/core/utils_test.go b/core/utils_test.go
index 814ab3b..8776685 100644
--- a/core/utils_test.go
+++ b/core/utils_test.go
@@ -1,8 +1,14 @@
 package core
 
-import "testing"
+import (
+    "testing"
+)
 
 func TestGetExtType(t *testing.T) {
-	url := "https://domain.com/data/avatars/m/123/12312312.jpg?1562846649"
-	t.Log(GetExtType(url))
+    url := "https://domain.com/data/avatars/m/123/12312312.jpg?1562846649"
+    t.Log(GetExtType(url))
+}
+
+func TestFixUrl(t *testing.T) {
+    //
 }
diff --git a/core/version.go b/core/version.go
index 45c5f73..5821fd9 100644
--- a/core/version.go
+++ b/core/version.go
@@ -1,7 +1,7 @@
 package core
 
 const (
-	CLIName = "gospider"
-	AUTHOR  = "@theblackturtle"
-	VERSION = "v1.1.0"
+    CLIName = "gospider"
+    AUTHOR  = "@thebl4ckturtle & @j3ssiejjj"
+    VERSION = "v1.1.4"
 )
diff --git a/go.mod b/go.mod
index 134dc2b..b8dfc19 100644
--- a/go.mod
+++ b/go.mod
@@ -1,11 +1,17 @@
-module github.com/theblackturtle/gospider
+module github.com/jaeles-project/gospider
 
-go 1.13
+go 1.15
 
 require (
-	github.com/gocolly/colly/v2 v2.0.1
+	github.com/gocolly/colly/v2 v2.1.0
+	github.com/mattn/go-colorable v0.1.7 // indirect
+	github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
+	github.com/mitchellh/go-homedir v1.1.0
+	github.com/onsi/ginkgo v1.14.2 // indirect
+	github.com/onsi/gomega v1.10.4 // indirect
 	github.com/oxffaa/gopher-parse-sitemap v0.0.0-20191021113419-005d2eb1def4
-	github.com/sirupsen/logrus v1.4.2
-	github.com/spf13/cobra v0.0.5
-	golang.org/x/net v0.0.0-20200202094626-16171245cfb2
+	github.com/sirupsen/logrus v1.6.0
+	github.com/spf13/cobra v1.0.0
+	github.com/x-cray/logrus-prefixed-formatter v0.5.2
+	golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb
 )
diff --git a/go.sum b/go.sum
index 4aa7fc5..16bf75b 100644
--- a/go.sum
+++ b/go.sum
@@ -1,90 +1,274 @@
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
-github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
-github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
-github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
-github.com/antchfx/htmlquery v1.0.0 h1:O5IXz8fZF3B3MW+B33MZWbTHBlYmcfw0BAxgErHuaMA=
-github.com/antchfx/htmlquery v1.0.0/go.mod h1:MS9yksVSQXls00iXkiMqXr0J+umL/AmxXKuP28SUJM8=
-github.com/antchfx/xmlquery v1.0.0 h1:YuEPqexGG2opZKNc9JU3Zw6zFXwC47wNcy6/F8oKsrM=
-github.com/antchfx/xmlquery v1.0.0/go.mod h1:/+CnyD/DzHRnv2eRxrVbieRU/FIF6N0C+7oTtyUtCKk=
-github.com/antchfx/xpath v1.0.0 h1:Q5gFgh2O40VTSwMOVbFE7nFNRBu3tS21Tn0KAWeEjtk=
-github.com/antchfx/xpath v1.0.0/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
+github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
+github.com/PuerkitoBio/goquery v1.5.1 h1:PSPBGne8NIUWw+/7vFBV+kG2J/5MOjbzc7154OaKCSE=
+github.com/PuerkitoBio/goquery v1.5.1/go.mod h1:GsLWisAFVj4WgDibEWF4pvYnkVQBpKBKeU+7zCJoLcc=
+github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
+github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
+github.com/andybalholm/cascadia v1.1.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
+github.com/andybalholm/cascadia v1.2.0 h1:vuRCkM5Ozh/BfmsaTm26kbjm0mIOM3yS5Ek/F5h18aE=
+github.com/andybalholm/cascadia v1.2.0/go.mod h1:YCyR8vOZT9aZ1CHEd8ap0gMVm2aFgxBp0T0eFw1RUQY=
+github.com/antchfx/htmlquery v1.2.3 h1:sP3NFDneHx2stfNXCKbhHFo8XgNjCACnU/4AO5gWz6M=
+github.com/antchfx/htmlquery v1.2.3/go.mod h1:B0ABL+F5irhhMWg54ymEZinzMSi0Kt3I2if0BLYa3V0=
+github.com/antchfx/xmlquery v1.2.4 h1:T/SH1bYdzdjTMoz2RgsfVKbM5uWh3gjDYYepFqQmFv4=
+github.com/antchfx/xmlquery v1.2.4/go.mod h1:KQQuESaxSlqugE2ZBcM/qn+ebIpt+d+4Xx7YcSGAIrM=
+github.com/antchfx/xpath v1.1.6/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
+github.com/antchfx/xpath v1.1.8 h1:PcL6bIX42Px5usSx6xRYw/wjB3wYGkj0MJ9MBzEKVgk=
+github.com/antchfx/xpath v1.1.8/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk=
 github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
+github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
+github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
 github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
-github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
 github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
-github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
+github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
+github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
+github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
+github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
 github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
+github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
+github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
+github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
+github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
 github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
 github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
 github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI=
-github.com/gocolly/colly/v2 v2.0.1 h1:GGPzBEdrEsavhzVK00FQXMMHBHRpwrbbCCcEKM/0Evw=
-github.com/gocolly/colly/v2 v2.0.1/go.mod h1:ePrRZlJcLTU2C/f8pJzXfkdBtBDHL5hOaKLcBoiJcq8=
-github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
+github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA=
+github.com/gocolly/colly/v2 v2.1.0 h1:k0DuZkDoCsx51bKpRJNEmcxcp+W5N8ziuwGaSDuFoGs=
+github.com/gocolly/colly/v2 v2.1.0/go.mod h1:I2MuhsLjQ+Ex+IzK3afNS8/1qP3AedHOusRPcRdC5o0=
+github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
+github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
 github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0=
+github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
+github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
+github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
+github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
 github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
+github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
 github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
 github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
 github.com/jawher/mow.cli v1.1.0/go.mod h1:aNaQlc7ozF3vw6IJ2dHjp2ZFiA4ozMIYY6PyuRJwlUg=
+github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
+github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
 github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o=
 github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1 h1:mweAR1A6xJ3oS2pRaGiHgQ4OO8tzTaLawm8vnODuwDk=
+github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
 github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=
+github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
+github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
 github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
+github.com/mattn/go-colorable v0.1.7 h1:bQGKb3vps/j0E9GfJQ03JyhRuxsvdAanXlT9BTw3mdw=
+github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
+github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
+github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
+github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
+github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQE9x6ikvDFZS2mDVS3drnohI=
+github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
+github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
 github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
 github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
+github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78=
+github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
+github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
+github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
+github.com/onsi/ginkgo v1.14.2 h1:8mVmC9kjFFmA8H4pKMUhcblgifdkOIXPvbhN1T36q1M=
+github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY=
+github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
+github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
+github.com/onsi/gomega v1.10.4 h1:NiTx7EEvBzu9sFOD1zORteLSt3o8gnlvZZwSE9TnY9U=
+github.com/onsi/gomega v1.10.4/go.mod h1:g/HbgYopi++010VEqkFgJHKC09uJiW9UkXvMUuKHUCQ=
 github.com/oxffaa/gopher-parse-sitemap v0.0.0-20191021113419-005d2eb1def4 h1:2vmb32OdDhjZf2ETGDlr9n8RYXx7c+jXPxMiPbwnA+8=
 github.com/oxffaa/gopher-parse-sitemap v0.0.0-20191021113419-005d2eb1def4/go.mod h1:2JQx4jDHmWrbABvpOayg/+OTU6ehN0IyK2EHzceXpJo=
 github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
+github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
+github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
+github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso=
+github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
+github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
+github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
+github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
+github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
+github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
+github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
+github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
 github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI=
 github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
-github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=
-github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
+github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
+github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
+github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I=
+github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
+github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
+github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
 github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
 github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
-github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
-github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
+github.com/spf13/cobra v1.0.0 h1:6m/oheQuQ13N9ks4hubMG6BnvwOeaJrqSPLahSnczz8=
+github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
 github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
 github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
 github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
-github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
+github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
 github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
 github.com/temoto/robotstxt v1.1.1 h1:Gh8RCs8ouX3hRSxxK7B1mO5RFByQ4CmJZDwgom++JaA=
 github.com/temoto/robotstxt v1.1.1/go.mod h1:+1AmkuG3IYkh1kv0d2qEB9Le88ehNO0zwOr3ujewlOo=
-github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
+github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
+github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
+github.com/x-cray/logrus-prefixed-formatter v0.5.2 h1:00txxvfBM9muc0jiLIEAkAcIMJzfthRT6usrui8uGmg=
+github.com/x-cray/logrus-prefixed-formatter v0.5.2/go.mod h1:2duySbKsL6M18s5GU7VPsoEPHyzalCE06qoARUCeBBE=
+github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
 github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
-golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
+go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
+go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
+go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
+golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
 golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
 golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
 golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
 golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
-golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
 golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb h1:eBmm0M9fYhWpKZLjQUUKka/LtIxf46G4fxeEz5KJr9U=
+golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190606165138-5da285871e9c h1:+EXw7AwNOKzPFXMZ1yNjO40aWCh3PIquJB2fYlv9wcs=
-golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
 golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
 golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
-google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I=
-google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuhc=
+google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.24.0 h1:UhZDfRO8JRQru4/+LlLE0BRKGF8L+PICnvYZmx/fEGA=
+google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
+gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
+gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
+gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
+gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
diff --git a/main.go b/main.go
index 46bf1e6..674db50 100644
--- a/main.go
+++ b/main.go
@@ -1,197 +1,223 @@
 package main
 
 import (
-	"bufio"
-	"fmt"
-	"github.com/theblackturtle/gospider/core"
-	"io/ioutil"
-	"net/url"
-
-	"os"
-	"strings"
-	"sync"
-
-	"github.com/sirupsen/logrus"
-	"github.com/spf13/cobra"
+    "bufio"
+    "fmt"
+    "io/ioutil"
+    "net/url"
+    "os"
+    "strings"
+    "sync"
+
+    "github.com/jaeles-project/gospider/core"
+
+    "github.com/sirupsen/logrus"
+    "github.com/spf13/cobra"
 )
 
 var commands = &cobra.Command{
-	Use:  core.CLIName,
-	Long: fmt.Sprintf("A Simple Web Spider - %v by %v", core.VERSION, core.AUTHOR),
-	Run:  run,
+    Use:  core.CLIName,
+    Long: fmt.Sprintf("Fast web spider written in Go - %v by %v", core.VERSION, core.AUTHOR),
+    Run:  run,
 }
 
 func main() {
-	commands.Flags().StringP("site", "s", "", "Site to crawl")
-	commands.Flags().StringP("sites", "S", "", "Site list to crawl")
-	commands.Flags().StringP("proxy", "p", "", "Proxy (Ex: http://127.0.0.1:8080)")
-	commands.Flags().StringP("output", "o", "", "Output folder")
-	commands.Flags().StringP("user-agent", "u", "web", "User Agent to use\n\tweb: random web user-agent\n\tmobi: random mobile user-agent\n\tor you can set your special user-agent")
-	commands.Flags().StringP("cookie", "", "", "Cookie to use (testA=a; testB=b)")
-	commands.Flags().StringArrayP("header", "H", []string{}, "Header to use (Use multiple flag to set multiple header)")
-	commands.Flags().StringP("burp", "", "", "Load headers and cookie from burp raw http request")
-	commands.Flags().StringP("blacklist", "", "", "Blacklist URL Regex")
-
-	commands.Flags().IntP("threads", "t", 1, "Number of threads (Run sites in parallel)")
-	commands.Flags().IntP("concurrent", "c", 5, "The number of the maximum allowed concurrent requests of the matching domains")
-	commands.Flags().IntP("depth", "d", 1, "MaxDepth limits the recursion depth of visited URLs. (Set it to 0 for infinite recursion)")
-	commands.Flags().IntP("delay", "k", 0, "Delay is the duration to wait before creating a new request to the matching domains (second)")
-	commands.Flags().IntP("random-delay", "K", 0, "RandomDelay is the extra randomized duration to wait added to Delay before creating a new request (second)")
-	commands.Flags().IntP("timeout", "m", 10, "Request timeout (second)")
-
-	commands.Flags().BoolP("sitemap", "", false, "Try to crawl sitemap.xml")
-	commands.Flags().BoolP("robots", "", true, "Try to crawl robots.txt")
-	commands.Flags().BoolP("other-source", "a", false, "Find URLs from 3rd party (Archive.org, CommonCrawl.org, VirusTotal.com)")
-	commands.Flags().BoolP("include-subs", "w", false, "Include subdomains crawled from 3rd party. Default is main domain")
-	commands.Flags().BoolP("include-other-source", "r", false, "Also include other-source's urls (still crawl and request)")
-
-	commands.Flags().BoolP("debug", "", false, "Turn on debug mode")
-	commands.Flags().BoolP("verbose", "v", false, "Turn on verbose")
-	commands.Flags().BoolP("no-redirect", "", false, "Disable redirect")
-	commands.Flags().BoolP("version", "", false, "Check version")
-
-	commands.Flags().SortFlags = false
-	if err := commands.Execute(); err != nil {
-		core.Logger.Error(err)
-		os.Exit(1)
-	}
+    commands.Flags().StringP("site", "s", "", "Site to crawl")
+    commands.Flags().StringP("sites", "S", "", "Site list to crawl")
+    commands.Flags().StringP("proxy", "p", "", "Proxy (Ex: http://127.0.0.1:8080)")
+    commands.Flags().StringP("output", "o", "", "Output folder")
+    commands.Flags().StringP("user-agent", "u", "web", "User Agent to use\n\tweb: random web user-agent\n\tmobi: random mobile user-agent\n\tor you can set your special user-agent")
+    commands.Flags().StringP("cookie", "", "", "Cookie to use (testA=a; testB=b)")
+    commands.Flags().StringArrayP("header", "H", []string{}, "Header to use (Use multiple flag to set multiple header)")
+    commands.Flags().StringP("burp", "", "", "Load headers and cookie from burp raw http request")
+    commands.Flags().StringP("blacklist", "", "", "Blacklist URL Regex")
+    commands.Flags().StringP("whitelist", "", "", "Whitelist URL Regex")
+    commands.Flags().StringP("whitelist-domain", "", "", "Whitelist Domain")
+
+    commands.Flags().IntP("threads", "t", 1, "Number of threads (Run sites in parallel)")
+    commands.Flags().IntP("concurrent", "c", 5, "The number of the maximum allowed concurrent requests of the matching domains")
+    commands.Flags().IntP("depth", "d", 1, "MaxDepth limits the recursion depth of visited URLs. (Set it to 0 for infinite recursion)")
+    commands.Flags().IntP("delay", "k", 0, "Delay is the duration to wait before creating a new request to the matching domains (second)")
+    commands.Flags().IntP("random-delay", "K", 0, "RandomDelay is the extra randomized duration to wait added to Delay before creating a new request (second)")
+    commands.Flags().IntP("timeout", "m", 10, "Request timeout (second)")
+
+    commands.Flags().BoolP("base", "B", false, "Disable all and only use HTML content")
+    commands.Flags().BoolP("js", "", true, "Enable linkfinder in javascript file")
+    commands.Flags().BoolP("sitemap", "", false, "Try to crawl sitemap.xml")
+    commands.Flags().BoolP("robots", "", true, "Try to crawl robots.txt")
+    commands.Flags().BoolP("other-source", "a", false, "Find URLs from 3rd party (Archive.org, CommonCrawl.org, VirusTotal.com, AlienVault.com)")
+    commands.Flags().BoolP("include-subs", "w", false, "Include subdomains crawled from 3rd party. Default is main domain")
+    commands.Flags().BoolP("include-other-source", "r", false, "Also include other-source's urls (still crawl and request)")
+
+    commands.Flags().BoolP("debug", "", false, "Turn on debug mode")
+    commands.Flags().BoolP("verbose", "v", false, "Turn on verbose")
+    commands.Flags().BoolP("quiet", "q", false, "Suppress all the output and only show URL")
+    commands.Flags().BoolP("no-redirect", "", false, "Disable redirect")
+    commands.Flags().BoolP("version", "", false, "Check version")
+
+    commands.Flags().SortFlags = false
+    if err := commands.Execute(); err != nil {
+        core.Logger.Error(err)
+        os.Exit(1)
+    }
 }
 
-func run(cmd *cobra.Command, args []string) {
-	if cmd.Flags().NFlag() == 0 {
-		cmd.HelpFunc()(cmd, args)
-		os.Exit(1)
-	}
-
-	version, _ := cmd.Flags().GetBool("version")
-	if version {
-		fmt.Printf("Version: %s\n", core.VERSION)
-		os.Exit(0)
-	}
-
-	isDebug, _ := cmd.Flags().GetBool("debug")
-	if isDebug {
-		core.Logger.SetLevel(logrus.DebugLevel)
-	} else {
-		core.Logger.SetLevel(logrus.InfoLevel)
-	}
-
-	verbose, _ := cmd.Flags().GetBool("verbose")
-	if !verbose && !isDebug {
-		core.Logger.SetOutput(ioutil.Discard)
-	}
-
-	// Create output folder when save file option selected
-	outputFolder, _ := cmd.Flags().GetString("output")
-	if outputFolder != "" {
-		if _, err := os.Stat(outputFolder); os.IsNotExist(err) {
-			_ = os.Mkdir(outputFolder, os.ModePerm)
-		}
-	}
-
-	// Parse sites input
-	var siteList []string
-	siteInput, _ := cmd.Flags().GetString("site")
-	if siteInput != "" {
-		siteList = append(siteList, siteInput)
-	}
-	sitesListInput, _ := cmd.Flags().GetString("sites")
-	if sitesListInput != "" {
-		sitesFile, err := os.Open(sitesListInput)
-		if err != nil {
-			core.Logger.Error(err)
-			os.Exit(1)
-		}
-		defer sitesFile.Close()
-
-		sc := bufio.NewScanner(sitesFile)
-		for sc.Scan() {
-			line := strings.TrimSpace(sc.Text())
-			if err := sc.Err(); err == nil && line != "" {
-				siteList = append(siteList, line)
-			}
-		}
-	}
-
-	// Check again to make sure at least one site in slice
-	if len(siteList) == 0 {
-		core.Logger.Info("No site in list. Please check your site input again")
-		os.Exit(1)
-	}
-
-	threads, _ := cmd.Flags().GetInt("threads")
-	sitemap, _ := cmd.Flags().GetBool("sitemap")
-	robots, _ := cmd.Flags().GetBool("robots")
-	otherSource, _ := cmd.Flags().GetBool("other-source")
-	includeSubs, _ := cmd.Flags().GetBool("include-subs")
-	includeOtherSourceResult, _ := cmd.Flags().GetBool("include-other-source")
-
-	var wg sync.WaitGroup
-	inputChan := make(chan string, threads)
-	for i := 0; i < threads; i++ {
-		wg.Add(1)
-		go func() {
-			defer wg.Done()
-			for rawSite := range inputChan {
-				site, err := url.Parse(rawSite)
-				if err != nil {
-					logrus.Errorf("Failed to parse %s: %s", rawSite, err)
-					continue
-				}
-
-				var siteWg sync.WaitGroup
-				crawler := core.NewCrawler(site, cmd)
-
-				siteWg.Add(1)
-				go func() {
-					crawler.Start()
-					defer siteWg.Done()
-				}()
-
-				// Brute force Sitemap path
-				if sitemap {
-					siteWg.Add(1)
-					go core.ParseSiteMap(site, crawler.Output, crawler.C, &siteWg)
-				}
-
-				// Find Robots.txt
-				if robots {
-					siteWg.Add(1)
-					go core.ParseRobots(site, crawler.Output, crawler.C, &siteWg)
-				}
-
-				if otherSource {
-					siteWg.Add(1)
-					go func() {
-						defer siteWg.Done()
-						urls := core.OtherSources(site.Hostname(), includeSubs)
-						for _, url := range urls {
-							url = strings.TrimSpace(url)
-							if len(url) == 0 {
-								continue
-							}
-							outputFormat := fmt.Sprintf("[other-sources] - %s", url)
-							if includeOtherSourceResult {
-								fmt.Println(outputFormat)
-								if crawler.Output != nil {
-									crawler.Output.WriteToFile(outputFormat)
-								}
-							}
-							_ = crawler.C.Visit(url)
-						}
-					}()
-				}
-				siteWg.Wait()
-				crawler.C.Wait()
-				crawler.LinkFinderCollector.Wait()
-			}
-		}()
-	}
-
-	for _, site := range siteList {
-		inputChan <- site
-	}
-	close(inputChan)
-	wg.Wait()
-	core.Logger.Info("Done!!!")
+func run(cmd *cobra.Command, _ []string) {
+    version, _ := cmd.Flags().GetBool("version")
+    if version {
+        fmt.Printf("Version: %s\n", core.VERSION)
+        Examples()
+        os.Exit(0)
+    }
+
+    isDebug, _ := cmd.Flags().GetBool("debug")
+    if isDebug {
+        core.Logger.SetLevel(logrus.DebugLevel)
+    } else {
+        core.Logger.SetLevel(logrus.InfoLevel)
+    }
+
+    verbose, _ := cmd.Flags().GetBool("verbose")
+    if !verbose && !isDebug {
+        core.Logger.SetOutput(ioutil.Discard)
+    }
+
+    // Create output folder when save file option selected
+    outputFolder, _ := cmd.Flags().GetString("output")
+    if outputFolder != "" {
+        if _, err := os.Stat(outputFolder); os.IsNotExist(err) {
+            _ = os.Mkdir(outputFolder, os.ModePerm)
+        }
+    }
+
+    // Parse sites input
+    var siteList []string
+    siteInput, _ := cmd.Flags().GetString("site")
+    if siteInput != "" {
+        siteList = append(siteList, siteInput)
+    }
+    sitesListInput, _ := cmd.Flags().GetString("sites")
+    if sitesListInput != "" {
+        // parse from stdin
+        sitesFile := core.ReadingLines(sitesListInput)
+        if len(sitesFile) > 0 {
+            siteList = append(siteList, sitesFile...)
+        }
+    }
+
+    stat, _ := os.Stdin.Stat()
+    // detect if anything came from std
+    if (stat.Mode() & os.ModeCharDevice) == 0 {
+        sc := bufio.NewScanner(os.Stdin)
+        for sc.Scan() {
+            target := strings.TrimSpace(sc.Text())
+            if err := sc.Err(); err == nil && target != "" {
+                siteList = append(siteList, target)
+            }
+        }
+    }
+
+    // Check again to make sure at least one site in slice
+    if len(siteList) == 0 {
+        core.Logger.Info("No site in list. Please check your site input again")
+        os.Exit(1)
+    }
+
+    threads, _ := cmd.Flags().GetInt("threads")
+    sitemap, _ := cmd.Flags().GetBool("sitemap")
+    linkfinder, _ := cmd.Flags().GetBool("js")
+    quiet, _ := cmd.Flags().GetBool("quiet")
+    robots, _ := cmd.Flags().GetBool("robots")
+    otherSource, _ := cmd.Flags().GetBool("other-source")
+    includeSubs, _ := cmd.Flags().GetBool("include-subs")
+    includeOtherSourceResult, _ := cmd.Flags().GetBool("include-other-source")
+    // disable all options above
+    base, _ := cmd.Flags().GetBool("base")
+    if base {
+        linkfinder = false
+        robots = false
+        otherSource = false
+        includeSubs = false
+        includeOtherSourceResult = false
+    }
+
+    var wg sync.WaitGroup
+    inputChan := make(chan string, threads)
+    for i := 0; i < threads; i++ {
+        wg.Add(1)
+        go func() {
+            defer wg.Done()
+            for rawSite := range inputChan {
+                site, err := url.Parse(rawSite)
+                if err != nil {
+                    logrus.Errorf("Failed to parse %s: %s", rawSite, err)
+                    continue
+                }
+
+                var siteWg sync.WaitGroup
+
+                crawler := core.NewCrawler(site, cmd)
+                siteWg.Add(1)
+                go func() {
+                    defer siteWg.Done()
+                    crawler.Start(linkfinder)
+                }()
+
+                // Brute force Sitemap path
+                if sitemap {
+                    siteWg.Add(1)
+                    go core.ParseSiteMap(site, quiet, crawler.Output, crawler.C, &siteWg)
+                }
+
+                // Find Robots.txt
+                if robots {
+                    siteWg.Add(1)
+                    go core.ParseRobots(site, quiet, crawler.Output, crawler.C, &siteWg)
+                }
+
+                if otherSource {
+                    siteWg.Add(1)
+                    go func() {
+                        defer siteWg.Done()
+                        urls := core.OtherSources(site.Hostname(), includeSubs)
+                        for _, url := range urls {
+                            url = strings.TrimSpace(url)
+                            if len(url) == 0 {
+                                continue
+                            }
+                            outputFormat := fmt.Sprintf("[other-sources] - %s", url)
+                            if includeOtherSourceResult {
+                                if !quiet {
+                                    fmt.Println(outputFormat)
+                                }
+                                if crawler.Output != nil {
+                                    crawler.Output.WriteToFile(outputFormat)
+                                }
+                            }
+                            _ = crawler.C.Visit(url)
+                        }
+                    }()
+                }
+                siteWg.Wait()
+                crawler.C.Wait()
+                crawler.LinkFinderCollector.Wait()
+            }
+        }()
+    }
+
+    for _, site := range siteList {
+        inputChan <- site
+    }
+    close(inputChan)
+    wg.Wait()
+    core.Logger.Info("Done.")
+}
+
+func Examples() {
+    h := "\n\nExamples Command:\n"
+    h += `gospider -q -s "https://target.com/"` + "\n"
+    h += `gospider -s "https://target.com/" -o output -c 10 -d 1` + "\n"
+    h += `gospider -s "https://target.com/" -o output -c 10 -d 1 --other-source` + "\n"
+    h += `echo 'http://target.com | gospider -o output -c 10 -d 1 --other-source` + "\n"
+    fmt.Println(h)
 }
diff --git a/stringset/set.go b/stringset/set.go
index 95a24fc..1610695 100644
--- a/stringset/set.go
+++ b/stringset/set.go
@@ -1,3 +1,6 @@
+// Copyright 2017 Jeff Foley. All rights reserved.
+// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
+
 package stringset
 
 import (