diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..8a7cb14
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,20 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: "[Issue] "
+labels: ''
+assignees: ''
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**Subfinder version**
+Include the version of subfinder you are using, `subfinder -version`
+
+**Complete command you used to reproduce this**
+
+
+**Screenshots**
+Add screenshots of the error for a better context.
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..fd68bb1
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,14 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: "[Feature] "
+labels: ''
+assignees: ''
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml
index 9fbedbb..93466da 100644
--- a/.github/workflows/build.yaml
+++ b/.github/workflows/build.yaml
@@ -4,11 +4,34 @@ on:
     branches:
       - master
   pull_request:
-  
-jobs:          
+
+jobs:
+  lint:
+    name: golangci-lint
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout code
+        uses: actions/checkout@v2
+      - name: Run golangci-lint
+        uses: golangci/golangci-lint-action@v1
+        with:
+          # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
+          version: v1.29
+          args: --timeout 5m
+          working-directory: v2/cmd/subfinder/
+
+          # Optional: working directory, useful for monorepos
+          # working-directory: somedir
+
+          # Optional: golangci-lint command line arguments.
+          # args: --issues-exit-code=0
+
+          # Optional: show only new issues if it's a pull request. The default value is `false`.
+          # only-new-issues: true
+
   build:
     name: Build
-    runs-on: ubuntu-latest 
+    runs-on: ubuntu-latest
     steps:
       - name: Set up Go
         uses: actions/setup-go@v2
@@ -20,8 +43,8 @@ jobs:
 
       - name: Test
         run: go test .
-        working-directory: cmd/subfinder/
+        working-directory: v2/cmd/subfinder/
 
       - name: Build
         run: go build .
-        working-directory: cmd/subfinder/
\ No newline at end of file
+        working-directory: v2/cmd/subfinder/
diff --git a/.github/workflows/dockerhub-push-on-release.yml b/.github/workflows/dockerhub-push-on-release.yml
index 09ab9cc..a5156cb 100644
--- a/.github/workflows/dockerhub-push-on-release.yml
+++ b/.github/workflows/dockerhub-push-on-release.yml
@@ -1,7 +1,7 @@
 # dockerhub-push pushes docker build to dockerhub automatically
 # on the creation of a new release
 name: Publish to Dockerhub on creation of a new release
-on: 
+on:
   release:
     types: [published]
 jobs:
@@ -14,4 +14,4 @@ jobs:
       with:
         name: projectdiscovery/subfinder
         username: ${{ secrets.DOCKER_USERNAME }}
-        password: ${{ secrets.DOCKER_PASSWORD }}
\ No newline at end of file
+        password: ${{ secrets.DOCKER_PASSWORD }}
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 70cb60a..36c0a48 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -4,25 +4,25 @@ on:
     tags:
       - v*
 
-jobs: 
+jobs:
   release: 
     runs-on: ubuntu-latest
-    steps: 
-      - 
+    steps:
+      -
         name: "Check out code"
         uses: actions/checkout@v2
-        with: 
+        with:
           fetch-depth: 0
-      - 
+      -
         name: "Set up Go"
         uses: actions/setup-go@v2
-        with: 
+        with:
           go-version: 1.14
-      - 
-        env: 
+      -
+        env:
           GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
         name: "Create release on GitHub"
         uses: goreleaser/goreleaser-action@v2
-        with: 
+        with:
           args: "release --rm-dist"
-          version: latest
\ No newline at end of file
+          version: latest
diff --git a/.gitignore b/.gitignore
index d2af926..536163a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,5 @@
 .DS_Store
 cmd/subfinder/subfinder
+v2/cmd/subfinder/subfinder
 vendor/
 .idea
\ No newline at end of file
diff --git a/.golangci.yml b/.golangci.yml
new file mode 100644
index 0000000..1fcf952
--- /dev/null
+++ b/.golangci.yml
@@ -0,0 +1,116 @@
+linters-settings:
+  dupl:
+    threshold: 100
+  exhaustive:
+    default-signifies-exhaustive: false
+  # funlen:
+  #   lines: 100
+  #   statements: 50
+  goconst:
+    min-len: 2
+    min-occurrences: 2
+  gocritic:
+    enabled-tags:
+      - diagnostic
+      - experimental
+      - opinionated
+      - performance
+      - style
+    disabled-checks:
+      - dupImport # https://github.com/go-critic/go-critic/issues/845
+      - ifElseChain
+  # gocyclo:
+  #   min-complexity: 15
+  goimports:
+    local-prefixes: github.com/golangci/golangci-lint
+  golint:
+    min-confidence: 0
+  gomnd:
+    settings:
+      mnd:
+        # don't include the "operation" and "assign"
+        checks: argument,case,condition,return
+  govet:
+    check-shadowing: true
+    settings:
+      printf:
+        funcs:
+          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof
+          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf
+          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf
+          - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf
+  # lll:
+  #   line-length: 140
+  maligned:
+    suggest-new: true
+  misspell:
+    locale: US
+  nolintlint:
+    allow-leading-space: true # don't require machine-readable nolint directives (i.e. with no leading space)
+    allow-unused: false # report any unused nolint directives
+    require-explanation: false # don't require an explanation for nolint directives
+    require-specific: false # don't require nolint directives to be specific about which linter is being skipped
+
+linters:
+  # please, do not use `enable-all`: it's deprecated and will be removed soon.
+  # inverted configuration with `enable-all` and `disable` is not scalable during updates of golangci-lint
+  disable-all: true
+  enable:
+    - bodyclose
+    - deadcode
+    - dogsled
+    - dupl
+    - errcheck
+    - exhaustive
+    - gochecknoinits
+    - goconst
+    - gocritic
+    - gofmt
+    - goimports
+    - golint
+    - gomnd
+    - goprintffuncname
+    - gosimple
+    - govet
+    - ineffassign
+    - interfacer
+    - maligned
+    - misspell
+    - nakedret
+    - noctx
+    - nolintlint
+    - rowserrcheck
+    - scopelint
+    - staticcheck
+    - structcheck
+    - stylecheck
+    - typecheck
+    - unconvert
+    - unparam
+    - unused
+    - varcheck
+    - whitespace
+
+  # don't enable:
+  # - depguard
+  # - asciicheck
+  # - funlen
+  # - gochecknoglobals
+  # - gocognit
+  # - gocyclo
+  # - godot
+  # - godox
+  # - goerr113
+  # - gosec
+  # - lll
+  # - nestif
+  # - prealloc
+  # - testpackage
+  # - wsl
+
+# golangci.com configuration
+# https://github.com/golangci/golangci/wiki/Configuration
+service:
+  golangci-lint-version: 1.29.x # use the fixed version to not introduce new linters unexpectedly
+  prepare:
+    - echo "here I can run custom commands, but no preparation needed for this repo"
diff --git a/.goreleaser.yml b/.goreleaser.yml
index 47d83c7..e3b78af 100644
--- a/.goreleaser.yml
+++ b/.goreleaser.yml
@@ -1,6 +1,6 @@
 builds:
     - binary: subfinder
-      main: cmd/subfinder/main.go
+      main: v2/cmd/subfinder/main.go
       goos:
         - linux
         - windows
diff --git a/DISCLAIMER.md b/DISCLAIMER.md
index 2b2d986..2280435 100644
--- a/DISCLAIMER.md
+++ b/DISCLAIMER.md
@@ -6,7 +6,6 @@ Subfinder leverages multiple open APIs, it is developed for individuals to help
 - CommonCrawl: https://commoncrawl.org/terms-of-use/full
 - certspotter: https://sslmate.com/terms
 - dnsdumpster: https://hackertarget.com/terms
-- entrust: https://www.entrustdatacard.com/pages/terms-of-use
 - Google Transparency: https://policies.google.com/terms
 - Threatcrowd: https://www.alienvault.com/terms/website-terms-of-use07may2018
 
diff --git a/Dockerfile b/Dockerfile
index b03ce55..c9c45e2 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -6,6 +6,6 @@ RUN go get -u github.com/golang/dep/cmd/dep
 WORKDIR /go/src/app
 
 # Install
-RUN go get -u github.com/projectdiscovery/subfinder/cmd/subfinder
+RUN go get -u github.com/projectdiscovery/subfinder/v2/cmd/subfinder
 
 ENTRYPOINT ["subfinder"]
diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md
deleted file mode 100644
index a84de56..0000000
--- a/ISSUE_TEMPLATE.md
+++ /dev/null
@@ -1,22 +0,0 @@
-## What's the problem (or question)?
-<!--- If describing a bug, tell us what happens instead of the expected behavior -->
-<!--- If suggesting a change/improvement, explain the difference from current behavior -->
-
-## Do you have an idea for a solution?
-<!--- Not obligatory, but suggest a fix/reason for the bug, -->
-<!--- or ideas how to implement the addition or change -->
-
-## How can we reproduce the issue?
-<!--- Provide unambiguous set of steps to reproduce this bug. Include command to reproduce, if relevant (you can mask the sensitive data) -->
-1.
-2.
-3.
-4.
-
-## What are the running context details?
-<!--- Include as many relevant details about the running context you experienced the bug/problem in -->
-* Installation method (e.g. `pip`, `apt-get`, `git clone` or `zip`/`tar.gz`):
-* Client OS (e.g. `Microsoft Windows 10`)
-* Program version (see banner): 
-* Relevant console output (if any):
-* Exception traceback (if any):
diff --git a/README.md b/README.md
index 2d7f5f7..dc7bcb2 100644
--- a/README.md
+++ b/README.md
@@ -7,6 +7,11 @@
 [![License](https://img.shields.io/badge/license-MIT-_red.svg)](https://opensource.org/licenses/MIT)
 [![Go Report Card](https://goreportcard.com/badge/github.com/projectdiscovery/subfinder)](https://goreportcard.com/report/github.com/projectdiscovery/subfinder)
 [![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/projectdiscovery/subfinder/issues)
+[![GitHub Release](https://img.shields.io/github/release/projectdiscovery/subfinder)](https://github.com/projectdiscovery/subfinder/releases)
+[![Follow on Twitter](https://img.shields.io/twitter/follow/pdiscoveryio.svg?logo=twitter)](https://twitter.com/pdiscoveryio)
+[![Docker Images](https://img.shields.io/docker/pulls/projectdiscovery/subfinder.svg)](https://hub.docker.com/r/projectdiscovery/subfinder)
+[![Chat on Discord](https://img.shields.io/discord/695645237418131507.svg?logo=discord)](https://discord.gg/KECAGdH)
+
 
 
 subfinder is a subdomain discovery tool that discovers valid subdomains for websites by using passive online sources. It has a simple modular architecture and is optimized for speed. subfinder is built for doing one thing only - passive subdomain enumeration, and it does that very well.
@@ -38,7 +43,7 @@ We have designed subfinder to comply with all passive sources licenses, and usag
 
  - Simple and modular code base making it easy to contribute.
  - Fast And Powerful Resolution and wildcard elimination module
- - **Curated** passive sources to maximize results (26 Sources as of now)
+ - **Curated** passive sources to maximize results (35 Sources as of now)
  - Multiple Output formats supported (Json, File, Stdout)
  - Optimized for speed, very fast and **lightweight** on resources
  - **Stdin** and **stdout** support for integrating in workflows
@@ -46,35 +51,37 @@ We have designed subfinder to comply with all passive sources licenses, and usag
 
 # Usage
 
-```bash
+```sh
 subfinder -h
 ```
 This will display help for the tool. Here are all the switches it supports.
 
 | Flag | Description | Example |
 |------|-------------|---------|
-| -cd | Upload results to the Chaos API (api-key required) | subfinder -d uber.com -cd | 
-| -config string | Configuration file for API Keys, etc  | subfinder -config config.yaml | 
-| -d | Domain to find subdomains for | subfinder -d uber.com | 
-| -dL  | File containing list of domains to enumerate | subfinder -dL hackerone-hosts.txt | 
-| -exclude-sources | List of sources to exclude from enumeration | subfinder -exclude-sources archiveis | 
-| -max-time | Minutes to wait for enumeration results (default 10) | subfinder -max-time 1 | 
-| -nC | Don't Use colors in output | subfinder -nC | 
-| -nW | Remove Wildcard & Dead Subdomains from output | subfinder -nW | 
-| -ls | List all available sources | subfinder -ls | 
-| -o  | File to write output to (optional) | subfinder -o output.txt | 
-| -oD | Directory to write enumeration results to (optional) | subfinder -oD ~/outputs | 
+| -all | Use all sources (slow) for enumeration | subfinder -d uber.com -all |
+| -cd | Upload results to the Chaos API (api-key required) | subfinder -d uber.com -cd |
+| -config string | Configuration file for API Keys, etc  | subfinder -config config.yaml |
+| -d | Domain to find subdomains for | subfinder -d uber.com |
+| -dL  | File containing list of domains to enumerate | subfinder -dL hackerone-hosts.txt |
+| -exclude-sources | List of sources to exclude from enumeration | subfinder -exclude-sources archiveis |
+| -max-time | Minutes to wait for enumeration results (default 10) | subfinder -max-time 1 |
+| -nC | Don't Use colors in output | subfinder -nC |
+| -nW | Remove Wildcard & Dead Subdomains from output | subfinder -nW |
+| -ls | List all available sources | subfinder -ls |
+| -o  | File to write output to (optional) | subfinder -o output.txt |
+| -oD | Directory to write enumeration results to (optional) | subfinder -oD ~/outputs |
 | -oI | Write output in Host,IP format | subfinder -oI |
 | -oJ | Write output in JSON lines Format | subfinder -oJ |
-| -r | Comma-separated list of resolvers to use | subfinder -r 1.1.1.1,1.0.0.1 | 
+| -r | Comma-separated list of resolvers to use | subfinder -r 1.1.1.1,1.0.0.1 |
 | -rL | Text file containing list of resolvers to use | subfinder -rL resolvers.txt
-| -silent | Show only subdomains in output | subfinder -silent | 
-| -sources | Comma separated list of sources to use | subfinder -sources shodan,censys | 
-| -t | Number of concurrent goroutines for resolving (default 10) | subfinder -t 100 | 
-| -timeout | Seconds to wait before timing out (default 30) | subfinder -timeout 30 | 
-| -v | 	Show Verbose output | subfinder -v | 
-| -version | Show current program version | subfinder -version | 
-  
+| -recursive | Enumeration recursive subdomains | subfinder -d news.yahoo.com -recursive
+| -silent | Show only subdomains in output | subfinder -silent |
+| -sources | Comma separated list of sources to use | subfinder -sources shodan,censys |
+| -t | Number of concurrent goroutines for resolving (default 10) | subfinder -t 100 |
+| -timeout | Seconds to wait before timing out (default 30) | subfinder -timeout 30 |
+| -v | 	Show Verbose output | subfinder -v |
+| -version | Show current program version | subfinder -version |
+
 
 # Installation Instructions
 
@@ -82,60 +89,57 @@ This will display help for the tool. Here are all the switches it supports.
 
 The installation is easy. You can download the pre-built binaries for different platforms from the [releases](https://github.com/projectdiscovery/subfinder/releases/) page. Extract them using tar, move it to your `$PATH` and you're ready to go.
 
-```bash
-> tar -xzvf subfinder-linux-amd64.tar.gz
-> mv subfinder /usr/local/local/bin/
-> subfinder -h
+```sh
+▶ # download release from https://github.com/projectdiscovery/subfinder/releases/
+▶ tar -xzvf subfinder-linux-amd64.tar.gz
+▶ mv subfinder /usr/local/bin/
+▶ subfinder -h
 ```
 
 ### From Source
 
-subfinder requires go1.13+ to install successfully. Run the following command to get the repo - 
+subfinder requires **go1.14+** to install successfully. Run the following command to get the repo -
 
-```bash
-GO111MODULE=on go get -v github.com/projectdiscovery/subfinder/cmd/subfinder
+```sh
+GO111MODULE=on go get -u -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder
 ```
 
 ### From Github
 
-```bash
+```sh
 git clone https://github.com/projectdiscovery/subfinder.git
-cd subfinder/cmd/subfinder
+cd subfinder/v2/cmd/subfinder
 go build .
 mv subfinder /usr/local/bin/
 subfinder -h
 ```
 
-### Upgrading
-If you wish to upgrade the package you can use:
-
-```bash
-GO111MODULE=on go get -u -v github.com/projectdiscovery/subfinder/cmd/subfinder
-```
-
 ## Post Installation Instructions
 
 Subfinder will work after using the installation instructions however to configure Subfinder to work with certain services, you will need to have setup API keys. The following services do not work without an API key:
 
-- [Virustotal](https://www.virustotal.com)
-- [Passivetotal](http://passivetotal.org)
-- [SecurityTrails](http://securitytrails.com)
-- [Censys](https://censys.io)
 - [Binaryedge](https://binaryedge.io)
-- [Shodan](https://shodan.io)
-- [URLScan](https://urlscan.io)
+- [Certspotter](https://sslmate.com/certspotter/api/)
+- [Censys](https://censys.io)
 - [Chaos](https://chaos.projectdiscovery.io)
-- [Spyse](https://spyse.com)
 - [DnsDB](https://api.dnsdb.info)
-- [Zoomeye](https://www.zoomeye.org)
 - [Github](https://github.com)
 - [Intelx](https://intelx.io)
+- [Passivetotal](http://passivetotal.org)
+- [Recon.dev](https://recon.dev)
+- [Robtex](https://www.robtex.com/api/)
+- [SecurityTrails](http://securitytrails.com)
+- [Shodan](https://shodan.io)
+- [Spyse](https://spyse.com)
+- [Threatbook](https://threatbook.cn/api)
+- [Virustotal](https://www.virustotal.com)
+- [Zoomeye](https://www.zoomeye.org)
 
 Theses values are stored in the `$HOME/.config/subfinder/config.yaml` file which will be created when you run the tool for the first time. The configuration file uses the YAML format. Multiple API keys can be specified for each of these services from which one of them will be used for enumeration.
 
 For sources that require multiple keys, namely `Censys`, `Passivetotal`, they can be added by separating them via a colon (:).
 
-An example config file - 
+An example config file -
 
 ```yaml
 resolvers:
@@ -153,7 +157,7 @@ binaryedge:
 censys:
   - ac244e2f-b635-4581-878a-33f4e79a2c13:dd510d6e-1b6e-4655-83f6-f347b363def9
 certspotter: []
-passivetotal: 
+passivetotal:
   - sample-email@user.com:sample_password
 securitytrails: []
 shodan:
@@ -166,22 +170,22 @@ github:
 # Running Subfinder
 
 To run the tool on a target, just use the following command.
-```bash
-> subfinder -d freelancer.com
+```sh
+▶ subfinder -d freelancer.com
 ```
 
 This will run the tool against freelancer.com. There are a number of configuration options that you can pass along with this command. The verbose switch (-v) can be used to display verbose information.
 
-```bash
-[CERTSPOTTER] www.fi.freelancer.com
-[DNSDUMPSTER] hosting.freelancer.com
-[DNSDUMPSTER] support.freelancer.com
-[DNSDUMPSTER] accounts.freelancer.com
-[DNSDUMPSTER] phabricator.freelancer.com
-[DNSDUMPSTER] cdn1.freelancer.com
-[DNSDUMPSTER] t1.freelancer.com
-[DNSDUMPSTER] wdc.t1.freelancer.com
-[DNSDUMPSTER] dal.t1.freelancer.com
+```
+[threatcrowd] ns1.hosting.freelancer.com
+[threatcrowd] ns2.hosting.freelancer.com
+[threatcrowd] flash.freelancer.com
+[threatcrowd] auth.freelancer.com
+[chaos] alertmanager.accounts.freelancer.com
+[chaos] analytics01.freelancer.com
+[chaos] apidocs.freelancer.com
+[chaos] brains.freelancer.com
+[chaos] consul.accounts.freelancer.com
 ```
 
 The `-silent` switch can be used to show only subdomains found without any other info.
@@ -189,60 +193,31 @@ The `-silent` switch can be used to show only subdomains found without any other
 
 The `-o` command can be used to specify an output file.
 
-```bash
-> subfinder -d freelancer.com -o output.txt
+```sh
+▶ subfinder -d freelancer.com -o output.txt
 ```
 
 To run the tool on a list of domains, `-dL` option can be used. This requires a directory to write the output files. Subdomains for each domain from the list are written in a text file in the directory specified by the `-oD` flag with their name being the domain name.
 
-```bash
-> cat domains.txt
+```sh
+▶ cat domains.txt
 hackerone.com
 google.com
 
-> subfinder -dL domains.txt -oD ~/path/to/output
-> ls ~/path/to/output
+▶ subfinder -dL domains.txt -oD ~/path/to/output
+▶ ls ~/path/to/output
 
 hackerone.com.txt
 google.com.txt
 ```
 
-If you want to save results to a single file while using a domain list, specify the `-o` flag with the name of the output file.
-
-
-```bash
-> cat domains.txt
-hackerone.com
-google.com
-
-> subfinder -dL domains.txt -o ~/path/to/output.txt
-> ls ~/path/to/
-
-output.txt
-```
-
-If you want upload your data to chaos dataset, you can use `-cd` flag with your scan, chaos will resolve all the input and add valid subdomains to public dataset, which you can access on the go using [chaos-client](https://github.com/projectdiscovery/chaos-client)
-
-```bash
-> subfinder -d hackerone.com -cd 
-
-root@b0x:~# subfinder -d hackerone.com -cd 
-
-www.hackerone.com
-api.hackerone.com
-go.hackerone.com
-hackerone.com
-staging.hackerone.com
-[INF] Input processed successfully and subdomains with valid records will be updated to chaos dataset.
-```
-
-You can also get output in json format using `-oJ` switch. This switch saves the output in the JSON lines format. 
+You can also get output in json format using `-oJ` switch. This switch saves the output in the JSON lines format.
 
 If you use the JSON format, or the `Host:IP` format, then it becomes mandatory for you to use the **-nW** format as resolving is essential for these output format. By default, resolving the found subdomains is disabled.
 
-```bash
-> subfinder -d hackerone.com -o output.json -oJ -nW
-> cat output.json
+```sh
+▶ subfinder -d hackerone.com -o output.json -oJ -nW
+▶ cat output.json
 
 {"host":"www.hackerone.com","ip":"104.16.99.52"}
 {"host":"mta-sts.hackerone.com","ip":"185.199.108.153"}
@@ -250,23 +225,18 @@ If you use the JSON format, or the `Host:IP` format, then it becomes mandatory f
 {"host":"mta-sts.managed.hackerone.com","ip":"185.199.110.153"}
 ```
 
-You can specify custom resolvers too.
-```bash
-> subfinder -d freelancer.com -o result.txt -nW -v -r 8.8.8.8,1.1.1.1
-> subfinder -d freelancer.com -o result.txt -nW -v -rL resolvers.txt
-```
 
-**The new highlight of this release is the addition of stdin/stdout features.** Now, domains can be piped to subfinder and enumeration can be ran on them. For example - 
+**The new highlight of this release is the addition of stdin/stdout features.** Now, domains can be piped to subfinder and enumeration can be ran on them. For example -
 
-```bash
-> echo hackerone.com | subfinder -v
-> cat targets.txt | subfinder -v 
+```sh
+▶ echo hackerone.com | subfinder
+▶ cat targets.txt | subfinder
 ```
 
 The subdomains discovered can be piped to other tools too. For example, you can pipe the subdomains discovered by subfinder to httpx [httpx](https://github.com/projectdiscovery/httpx) which will then find running http servers on the host.
 
-```bash
-> echo hackerone.com | subfinder -silent | httpx -silent
+```sh
+▶ echo hackerone.com | subfinder -silent | httpx -silent
 
 http://hackerone.com
 http://www.hackerone.com
@@ -278,10 +248,10 @@ http://mta-sts.managed.hackerone.com
 
 ## Running in a Docker Container
 
-You can use the official dockerhub image at [subfinder](https://hub.docker.com/r/projectdiscovery/subfinder). Simply run - 
+You can use the official dockerhub image at [subfinder](https://hub.docker.com/r/projectdiscovery/subfinder). Simply run -
 
-```bash
-> docker pull projectdiscovery/subfinder
+```sh
+▶ docker pull projectdiscovery/subfinder
 ```
 
 The above command will pull the latest tagged release from the dockerhub repository.
@@ -290,31 +260,31 @@ If you want to build the container yourself manually, git clone the repo, then b
 
 - Clone the repo using `git clone https://github.com/projectdiscovery/subfinder.git`
 - Build your docker container
-```bash
+```sh
 docker build -t projectdiscovery/subfinder .
 ```
 
-- After building the container using either way, run the following - 
-```bash
+- After building the container using either way, run the following -
+```sh
 docker run -it projectdiscovery/subfinder
 ```
-> The above command is the same as running `-h`
+▶ The above command is the same as running `-h`
 
 If you are using docker, you need to first create your directory structure holding subfinder configuration file. After modifying the default config.yaml file, you can run:
 
-```bash
-> mkdir -p $HOME/.config/subfinder
-> cp config.yaml $HOME/.config/subfinder/config.yaml
-> nano $HOME/.config/subfinder/config.yaml
+```sh
+▶ mkdir -p $HOME/.config/subfinder
+▶ cp config.yaml $HOME/.config/subfinder/config.yaml
+▶ nano $HOME/.config/subfinder/config.yaml
 ```
 
 After that, you can pass it as a volume using the following sample command.
-```bash
-> docker run -v $HOME/.config/subfinder:/root/.config/subfinder -it projectdiscovery/subfinder -d freelancer.com
+```sh
+▶ docker run -v $HOME/.config/subfinder:/root/.config/subfinder -it projectdiscovery/subfinder -d freelancer.com
 ```
 
 For example, this runs the tool against uber.com and output the results to your host file system:
-```bash
+```sh
 docker run -v $HOME/.config/subfinder:/root/.config/subfinder -it projectdiscovery/subfinder -d uber.com > uber.com.txt
 ```
 
diff --git a/THANKS.md b/THANKS.md
index 56fa171..b853afc 100644
--- a/THANKS.md
+++ b/THANKS.md
@@ -4,8 +4,9 @@ Many people have contributed to subfinder making it a wonderful tool either by m
 
 - All the contributors at [CONTRIBUTORS](https://github.com/projectdiscovery/subfinder/graphs/contributors) who made subfinder what it is.
 
-We'd like to thank some additional amazing people, wo contributed a lot in subfinder's journey - 
+We'd like to thank some additional amazing people, who contributed a lot in subfinder's journey - 
 
-- @infosec-au - Donating to the project
-- @codingo - Initial work on the project, managing it, lot of work!
-- @picatz - Improving the structure of the project a lot. New ideas!
\ No newline at end of file
+- [@vzamanillo](https://github.com/vzamanillo) - For adding multiple features and overall project improvements.
+- [@infosec-au](https://github.com/infosec-au) - Donating to the project.
+- [@codingo](https://github.com/codingo) - Initial work on the project, managing it, lot of work!
+- [@picatz](https://github.com/picatz) - Improving the structure of the project a lot. New ideas!
\ No newline at end of file
diff --git a/config.yaml b/config.yaml
deleted file mode 100644
index 7155cd5..0000000
--- a/config.yaml
+++ /dev/null
@@ -1,68 +0,0 @@
-resolvers:
-  - 1.1.1.1
-  - 1.0.0.1
-  - 8.8.8.8
-  - 8.8.4.4
-  - 9.9.9.9
-  - 9.9.9.10
-  - 77.88.8.8
-  - 77.88.8.1
-  - 208.67.222.222
-  - 208.67.220.220
-sources:
-  - alienvault
-  - archiveis
-  - binaryedge
-  - bufferover
-  - censys
-  - certspotter
-  - certspotterold
-  - commoncrawl
-  - crtsh
-  - dnsdumpster
-  - dnsdb
-  - entrust
-  - github
-  - googleter
-  - hackertarget
-  - intelx
-  - ipv4info
-  - passivetotal
-  - rapiddns
-  - securitytrails
-  - shodan
-  - sitedossier
-  - sublist3r
-  - spyse
-  - threatcrowd
-  - threatminer
-  - urlscan
-  - virustotal
-  - waybackarchive
-  - zoomeye
-censys:
-  - <key-here>
-binaryedge:
-  - <key-here>
-certspotter:
-  - <key-here>
-github:
-  - <token-here>
-intelx:
-  - <public.intelx.io:key-here>
-passivetotal:
-  - <email:key-here>
-securitytrails:
-  - <key-here>
-virustotal:
-  - <key-here>
-urlscan:
-  - <key-here>
-chaos:
-  - <key-here>
-spyse:
-  - <key-here>
-shodan:
-  - <key-here>
-dnsdb:
-  - <key-here>
diff --git a/go.mod b/go.mod
deleted file mode 100644
index f1c0979..0000000
--- a/go.mod
+++ /dev/null
@@ -1,15 +0,0 @@
-module github.com/projectdiscovery/subfinder
-
-go 1.14
-
-require (
-	github.com/json-iterator/go v1.1.9
-	github.com/lib/pq v1.6.0
-	github.com/m-mizutani/urlscan-go v1.0.0
-	github.com/miekg/dns v1.1.29
-	github.com/pkg/errors v0.9.1
-	github.com/projectdiscovery/gologger v1.0.0
-	github.com/rs/xid v1.2.1
-	github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
-	gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c
-)
diff --git a/go.sum b/go.sum
deleted file mode 100644
index 957da47..0000000
--- a/go.sum
+++ /dev/null
@@ -1,87 +0,0 @@
-github.com/alexbrainman/sspi v0.0.0-20180613141037-e580b900e9f5/go.mod h1:976q2ETgjT2snVCf2ZaBnyBbVoPERGjUz+0sofzEfro=
-github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
-github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
-github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
-github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE=
-github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
-github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
-github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
-github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
-github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
-github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=
-github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
-github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
-github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
-github.com/jcmturner/gokrb5/v8 v8.2.0 h1:lzPl/30ZLkTveYsYZPKMcgXc8MbnE6RsTd4F9KgiLtk=
-github.com/jcmturner/gokrb5/v8 v8.2.0/go.mod h1:T1hnNppQsBtxW0tCHMHTkAt8n/sABdzZgZdoFrZaZNM=
-github.com/jcmturner/rpc/v2 v2.0.2 h1:gMB4IwRXYsWw4Bc6o/az2HJgFUA1ffSh90i26ZJ6Xl0=
-github.com/jcmturner/rpc/v2 v2.0.2/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
-github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
-github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
-github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
-github.com/k0kubun/pp v2.3.0+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg=
-github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/lib/pq v1.6.0 h1:I5DPxhYJChW9KYc66se+oKFFQX6VuQrKiprsX6ivRZc=
-github.com/lib/pq v1.6.0/go.mod h1:4vXEAYvW1fRQ2/FhZ78H73A60MHw1geSm145z2mdY1g=
-github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381 h1:bqDmpDG49ZRnB5PcgP0RXtQvnMSgIF14M7CBd2shtXs=
-github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
-github.com/m-mizutani/urlscan-go v1.0.0 h1:+fTiSRCQXdy3EM1BgO5gmAHFWbccTDdoEKy9Fa7m9xo=
-github.com/m-mizutani/urlscan-go v1.0.0/go.mod h1:ppEBT0e/xv0bPcVWKev4cYG7Ey8933JsOzEzovxGMjI=
-github.com/mattn/go-colorable v0.1.0/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
-github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
-github.com/miekg/dns v1.1.29 h1:xHBEhR+t5RzcFJjBLJlax2daXOrTYtr9z4WdKEfWFzg=
-github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
-github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
-github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
-github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
-github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
-github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
-github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/projectdiscovery/gologger v1.0.0 h1:XAQ8kHeVKXMjY4rLGh7eT5+oHU077BNEvs7X6n+vu1s=
-github.com/projectdiscovery/gologger v1.0.0/go.mod h1:Ok+axMqK53bWNwDSU1nTNwITLYMXMdZtRc8/y1c7sWE=
-github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
-github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
-github.com/sirupsen/logrus v1.3.0 h1:hI/7Q+DtNZ2kINb6qt/lS+IyXnHQe9e90POfeewL/ME=
-github.com/sirupsen/logrus v1.3.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
-github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
-github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
-github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
-github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
-golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20200117160349-530e935923ad/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4 h1:QmwruyY+bKbDDL0BaglrbZABEali68eoMFhTZpCjYVA=
-golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
-golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
-golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa h1:F+8P+gmewFQYRk6JoLQLwjBCTu3mcIURZfNkVweuRKA=
-golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
-golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe h1:6fAMxZRR6sl1Uq8U61gxU+kPTs2tR8uOySCbBP7BN/M=
-golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
-golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo=
-gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q=
-gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4=
-gopkg.in/jcmturner/gokrb5.v7 v7.5.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM=
-gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8=
-gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
-gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c h1:grhR+C34yXImVGp7EzNk+DTIk+323eIUWOmEevy6bDo=
-gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/pkg/passive/sources.go b/pkg/passive/sources.go
deleted file mode 100644
index 30b0890..0000000
--- a/pkg/passive/sources.go
+++ /dev/null
@@ -1,158 +0,0 @@
-package passive
-
-import (
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/alienvault"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/archiveis"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/binaryedge"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/bufferover"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/censys"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotter"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotterold"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/commoncrawl"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/crtsh"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdb"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdumpster"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/entrust"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/github"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/hackertarget"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/intelx"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/ipv4info"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/passivetotal"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/rapiddns"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/securitytrails"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/shodan"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/sitedossier"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/spyse"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/sublist3r"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatcrowd"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatminer"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/urlscan"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/virustotal"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/waybackarchive"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping/sources/zoomeye"
-)
-
-// DefaultSources contains the list of sources used by default
-var DefaultSources = []string{
-	"alienvault",
-	"archiveis",
-	"binaryedge",
-	"bufferover",
-	"censys",
-	"certspotter",
-	"certspotterold",
-	"commoncrawl",
-	"crtsh",
-	"dnsdumpster",
-	"dnsdb",
-	"entrust",
-	"github",
-	"hackertarget",
-	"ipv4info",
-	"intelx",
-	"passivetotal",
-	"rapiddns",
-	"securitytrails",
-	"shodan",
-	"sitedossier",
-	"spyse",
-	"sublist3r",
-	"threatcrowd",
-	"threatminer",
-	"urlscan",
-	"virustotal",
-	"waybackarchive",
-	"zoomeye",
-}
-
-// Agent is a struct for running passive subdomain enumeration
-// against a given host. It wraps subscraping package and provides
-// a layer to build upon.
-type Agent struct {
-	sources map[string]subscraping.Source
-}
-
-// New creates a new agent for passive subdomain discovery
-func New(sources []string, exclusions []string) *Agent {
-	// Create the agent, insert the sources and remove the excluded sources
-	agent := &Agent{sources: make(map[string]subscraping.Source)}
-
-	agent.addSources(sources)
-	agent.removeSources(exclusions)
-
-	return agent
-}
-
-// addSources adds the given list of sources to the source array
-func (a *Agent) addSources(sources []string) {
-	for _, source := range sources {
-		switch source {
-		case "alienvault":
-			a.sources[source] = &alienvault.Source{}
-		case "archiveis":
-			a.sources[source] = &archiveis.Source{}
-		case "binaryedge":
-			a.sources[source] = &binaryedge.Source{}
-		case "bufferover":
-			a.sources[source] = &bufferover.Source{}
-		case "censys":
-			a.sources[source] = &censys.Source{}
-		case "certspotter":
-			a.sources[source] = &certspotter.Source{}
-		case "certspotterold":
-			a.sources[source] = &certspotterold.Source{}
-		case "commoncrawl":
-			a.sources[source] = &commoncrawl.Source{}
-		case "crtsh":
-			a.sources[source] = &crtsh.Source{}
-		case "dnsdumpster":
-			a.sources[source] = &dnsdumpster.Source{}
-		case "dnsdb":
-			a.sources[source] = &dnsdb.Source{}
-		case "entrust":
-			a.sources[source] = &entrust.Source{}
-		case "github":
-			a.sources[source] = &github.Source{}
-		case "hackertarget":
-			a.sources[source] = &hackertarget.Source{}
-		case "ipv4info":
-			a.sources[source] = &ipv4info.Source{}
-		case "intelx":
-			a.sources[source] = &intelx.Source{}
-		case "passivetotal":
-			a.sources[source] = &passivetotal.Source{}
-		case "rapiddns":
-			a.sources[source] = &rapiddns.Source{}
-		case "securitytrails":
-			a.sources[source] = &securitytrails.Source{}
-		case "shodan":
-			a.sources[source] = &shodan.Source{}
-		case "sitedossier":
-			a.sources[source] = &sitedossier.Source{}
-		case "spyse":
-			a.sources[source] = &spyse.Source{}
-		case "sublist3r":
-			a.sources[source] = &sublist3r.Source{}
-		case "threatcrowd":
-			a.sources[source] = &threatcrowd.Source{}
-		case "threatminer":
-			a.sources[source] = &threatminer.Source{}
-		case "urlscan":
-			a.sources[source] = &urlscan.Source{}
-		case "virustotal":
-			a.sources[source] = &virustotal.Source{}
-		case "waybackarchive":
-			a.sources[source] = &waybackarchive.Source{}
-		case "zoomeye":
-			a.sources[source] = &zoomeye.Source{}
-		}
-	}
-}
-
-// removeSources deletes the given sources from the source map
-func (a *Agent) removeSources(sources []string) {
-	for _, source := range sources {
-		delete(a.sources, source)
-	}
-}
diff --git a/pkg/runner/utils.go b/pkg/runner/utils.go
deleted file mode 100644
index 0009ad3..0000000
--- a/pkg/runner/utils.go
+++ /dev/null
@@ -1,131 +0,0 @@
-package runner
-
-import (
-	"bufio"
-	"crypto/tls"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"net/http"
-	"strings"
-	"time"
-
-	jsoniter "github.com/json-iterator/go"
-	"github.com/pkg/errors"
-)
-
-// JSONResult contains the result for a host in JSON format
-type JSONResult struct {
-	Host string `json:"host"`
-	IP   string `json:"ip"`
-}
-
-func (r *Runner) UploadToChaos(reader io.Reader) error {
-	httpClient := &http.Client{
-		Transport: &http.Transport{
-			MaxIdleConnsPerHost: 100,
-			MaxIdleConns:        100,
-			TLSClientConfig: &tls.Config{
-				InsecureSkipVerify: true,
-			},
-		},
-		Timeout: time.Duration(600) * time.Second, // 10 minutes - uploads may take long
-	}
-
-	request, err := http.NewRequest("POST", "https://dns.projectdiscovery.io/dns/add", reader)
-	if err != nil {
-		return errors.Wrap(err, "could not create request")
-	}
-	request.Header.Set("Authorization", r.options.YAMLConfig.GetKeys().Chaos)
-
-	resp, err := httpClient.Do(request)
-	if err != nil {
-		return errors.Wrap(err, "could not make request")
-	}
-	defer func() {
-		io.Copy(ioutil.Discard, resp.Body)
-		resp.Body.Close()
-	}()
-
-	if resp.StatusCode != 200 {
-		return fmt.Errorf("invalid status code received: %d", resp.StatusCode)
-	}
-	return nil
-}
-
-// WriteHostOutput writes the output list of subdomain to an io.Writer
-func WriteHostOutput(results map[string]struct{}, writer io.Writer) error {
-	bufwriter := bufio.NewWriter(writer)
-	sb := &strings.Builder{}
-
-	for host := range results {
-		sb.WriteString(host)
-		sb.WriteString("\n")
-
-		_, err := bufwriter.WriteString(sb.String())
-		if err != nil {
-			bufwriter.Flush()
-			return err
-		}
-		sb.Reset()
-	}
-	return bufwriter.Flush()
-}
-
-// WriteHostOutputNoWildcard writes the output list of subdomain with nW flag to an io.Writer
-func WriteHostOutputNoWildcard(results map[string]string, writer io.Writer) error {
-	bufwriter := bufio.NewWriter(writer)
-	sb := &strings.Builder{}
-
-	for host := range results {
-		sb.WriteString(host)
-		sb.WriteString("\n")
-
-		_, err := bufwriter.WriteString(sb.String())
-		if err != nil {
-			bufwriter.Flush()
-			return err
-		}
-		sb.Reset()
-	}
-	return bufwriter.Flush()
-}
-
-// WriteJSONOutput writes the output list of subdomain in JSON to an io.Writer
-func WriteJSONOutput(results map[string]string, writer io.Writer) error {
-	encoder := jsoniter.NewEncoder(writer)
-
-	data := JSONResult{}
-
-	for host, ip := range results {
-		data.Host = host
-		data.IP = ip
-
-		err := encoder.Encode(&data)
-		if err != nil {
-			return err
-		}
-	}
-	return nil
-}
-
-// WriteHostIPOutput writes the output list of subdomain to an io.Writer
-func WriteHostIPOutput(results map[string]string, writer io.Writer) error {
-	bufwriter := bufio.NewWriter(writer)
-	sb := &strings.Builder{}
-
-	for host, ip := range results {
-		sb.WriteString(host)
-		sb.WriteString(",")
-		sb.WriteString(ip)
-		sb.WriteString("\n")
-
-		_, err := bufwriter.WriteString(sb.String())
-		if err != nil {
-			bufwriter.Flush()
-			return err
-		}
-		sb.Reset()
-	}
-	return bufwriter.Flush()
-}
diff --git a/pkg/subscraping/agent.go b/pkg/subscraping/agent.go
deleted file mode 100755
index 18e248f..0000000
--- a/pkg/subscraping/agent.go
+++ /dev/null
@@ -1,96 +0,0 @@
-package subscraping
-
-import (
-	"context"
-	"crypto/tls"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"net/http"
-	"net/url"
-	"time"
-)
-
-// NewSession creates a new session object for a domain
-func NewSession(domain string, keys Keys, timeout int) (*Session, error) {
-	client := &http.Client{
-		Transport: &http.Transport{
-			MaxIdleConns:        100,
-			MaxIdleConnsPerHost: 100,
-			TLSClientConfig: &tls.Config{
-				InsecureSkipVerify: true,
-			},
-		},
-		Timeout: time.Duration(timeout) * time.Second,
-	}
-
-	session := &Session{
-		Client: client,
-		Keys:   keys,
-	}
-
-	// Create a new extractor object for the current domain
-	extractor, err := NewSubdomainExtractor(domain)
-	session.Extractor = extractor
-
-	return session, err
-}
-
-// NormalGetWithContext makes a normal GET request to a URL with context
-func (s *Session) NormalGetWithContext(ctx context.Context, url string) (*http.Response, error) {
-	req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
-	if err != nil {
-		return nil, err
-	}
-
-	// Don't randomize user agents, as they cause issues sometimes
-	req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
-	req.Header.Set("Accept", "*/*")
-	req.Header.Set("Accept-Language", "en")
-
-	return httpRequestWrapper(s.Client, req)
-}
-
-// Get makes a GET request to a URL
-func (s *Session) Get(ctx context.Context, url string, cookies string, headers map[string]string) (*http.Response, error) {
-	req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
-	if err != nil {
-		return nil, err
-	}
-
-	req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
-	req.Header.Set("Accept", "*/*")
-	req.Header.Set("Accept-Language", "en")
-
-	if cookies != "" {
-		req.Header.Set("Cookie", cookies)
-	}
-
-	if headers != nil {
-		for key, value := range headers {
-			req.Header.Set(key, value)
-		}
-	}
-
-	return httpRequestWrapper(s.Client, req)
-}
-
-func (s *Session) DiscardHttpResponse(response *http.Response) {
-	if response != nil {
-		io.Copy(ioutil.Discard, response.Body)
-		response.Body.Close()
-	}
-}
-
-func httpRequestWrapper(client *http.Client, request *http.Request) (*http.Response, error) {
-	resp, err := client.Do(request)
-	if err != nil {
-		return nil, err
-	}
-
-	if resp.StatusCode != http.StatusOK {
-		requestUrl, _ := url.QueryUnescape(request.URL.String())
-		return resp, fmt.Errorf("Unexpected status code %d received from %s", resp.StatusCode, requestUrl)
-	}
-	return resp, nil
-}
diff --git a/pkg/subscraping/sources/binaryedge/binaryedge.go b/pkg/subscraping/sources/binaryedge/binaryedge.go
deleted file mode 100755
index 64630d8..0000000
--- a/pkg/subscraping/sources/binaryedge/binaryedge.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package binaryedge
-
-import (
-	"context"
-	"fmt"
-
-	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-type binaryedgeResponse struct {
-	Subdomains []string `json:"events"`
-	Total      int      `json:"total"`
-}
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	go func() {
-		if session.Keys.Binaryedge == "" {
-			close(results)
-			return
-		}
-
-		resp, err := session.Get(ctx, fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s", domain), "", map[string]string{"X-Key": session.Keys.Binaryedge})
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
-			return
-		}
-
-		response := new(binaryedgeResponse)
-		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			resp.Body.Close()
-			close(results)
-			return
-		}
-		resp.Body.Close()
-
-		for _, subdomain := range response.Subdomains {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
-		}
-
-		remaining := response.Total - 100
-		currentPage := 2
-
-		for {
-			further := s.getSubdomains(ctx, domain, &remaining, &currentPage, session, results)
-			if !further {
-				break
-			}
-		}
-		close(results)
-	}()
-
-	return results
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "binaryedge"
-}
-
-func (s *Source) getSubdomains(ctx context.Context, domain string, remaining, currentPage *int, session *subscraping.Session, results chan subscraping.Result) bool {
-	for {
-		select {
-		case <-ctx.Done():
-			return false
-		default:
-			resp, err := session.Get(ctx, fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s?page=%d", domain, *currentPage), "", map[string]string{"X-Key": session.Keys.Binaryedge})
-			if err != nil {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				return false
-			}
-
-			response := binaryedgeResponse{}
-			err = jsoniter.NewDecoder(resp.Body).Decode(&response)
-			if err != nil {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				resp.Body.Close()
-				return false
-			}
-			resp.Body.Close()
-
-			for _, subdomain := range response.Subdomains {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
-			}
-
-			*remaining = *remaining - 100
-			if *remaining <= 0 {
-				return false
-			}
-			*currentPage++
-			return true
-		}
-	}
-}
diff --git a/pkg/subscraping/sources/bufferover/bufferover.go b/pkg/subscraping/sources/bufferover/bufferover.go
deleted file mode 100755
index 88e8bda..0000000
--- a/pkg/subscraping/sources/bufferover/bufferover.go
+++ /dev/null
@@ -1,57 +0,0 @@
-// Package bufferover is a bufferover Scraping Engine in Golang
-package bufferover
-
-import (
-	"context"
-	"fmt"
-	"io/ioutil"
-
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	go func() {
-		// Run enumeration on subdomain dataset for historical SONAR datasets
-		s.getData(ctx, fmt.Sprintf("https://dns.bufferover.run/dns?q=.%s", domain), session, results)
-		s.getData(ctx, fmt.Sprintf("https://tls.bufferover.run/dns?q=.%s", domain), session, results)
-
-		close(results)
-	}()
-
-	return results
-}
-
-func (s *Source) getData(ctx context.Context, URL string, session *subscraping.Session, results chan subscraping.Result) {
-	resp, err := session.NormalGetWithContext(ctx, URL)
-	if err != nil {
-		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-		session.DiscardHttpResponse(resp)
-		return
-	}
-
-	body, err := ioutil.ReadAll(resp.Body)
-	if err != nil {
-		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-		resp.Body.Close()
-		return
-	}
-	resp.Body.Close()
-
-	src := string(body)
-
-	for _, subdomain := range session.Extractor.FindAllString(src, -1) {
-		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
-	}
-	return
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "bufferover"
-}
diff --git a/pkg/subscraping/sources/dnsdb/dnsdb.go b/pkg/subscraping/sources/dnsdb/dnsdb.go
deleted file mode 100644
index a59624d..0000000
--- a/pkg/subscraping/sources/dnsdb/dnsdb.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package dnsdb
-
-import (
-	"bufio"
-	"context"
-	"encoding/json"
-	"fmt"
-	"strings"
-
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-type dnsdbResponse struct {
-	Name string `json:"rrname"`
-}
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	if session.Keys.DNSDB == "" {
-		close(results)
-	} else {
-		headers := map[string]string{
-			"X-API-KEY":    session.Keys.DNSDB,
-			"Accept":       "application/json",
-			"Content-Type": "application/json",
-		}
-
-		go func() {
-			resp, err := session.Get(ctx, fmt.Sprintf("https://api.dnsdb.info/lookup/rrset/name/*.%s?limit=1000000000000", domain), "", headers)
-			if err != nil {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				session.DiscardHttpResponse(resp)				
-				close(results)
-				return
-			}
-
-			defer resp.Body.Close()
-			// Get the response body
-			scanner := bufio.NewScanner(resp.Body)
-			for scanner.Scan() {
-				line := scanner.Text()
-				if line == "" {
-					continue
-				}
-				out := &dnsdbResponse{}
-				err := json.Unmarshal([]byte(line), out)
-				if err != nil {
-					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-					resp.Body.Close()
-					close(results)
-					return
-				}
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: strings.TrimSuffix(out.Name, ".")}
-				out = nil
-			}
-			close(results)
-		}()
-	}
-	return results
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "DNSDB"
-}
diff --git a/pkg/subscraping/sources/entrust/entrust.go b/pkg/subscraping/sources/entrust/entrust.go
deleted file mode 100755
index 0141868..0000000
--- a/pkg/subscraping/sources/entrust/entrust.go
+++ /dev/null
@@ -1,53 +0,0 @@
-package entrust
-
-import (
-	"context"
-	"fmt"
-	"io/ioutil"
-	"strings"
-
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	go func() {
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://ctsearch.entrust.com/api/v1/certificates?fields=issuerCN,subjectO,issuerDN,issuerO,subjectDN,signAlg,san,publicKeyType,publicKeySize,validFrom,validTo,sn,ev,logEntries.logName,subjectCNReversed,cert&domain=%s&includeExpired=true&exactMatch=false&limit=5000", domain))
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
-			return
-		}
-
-		body, err := ioutil.ReadAll(resp.Body)
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			resp.Body.Close()
-			close(results)
-			return
-		}
-		resp.Body.Close()
-
-		src := string(body)
-
-		for _, subdomain := range session.Extractor.FindAllString(src, -1) {
-			subdomain = strings.TrimPrefix(subdomain, "u003d")
-
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
-		}
-		close(results)
-	}()
-
-	return results
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "entrust"
-}
diff --git a/pkg/subscraping/sources/github/github.go b/pkg/subscraping/sources/github/github.go
deleted file mode 100644
index f4331f0..0000000
--- a/pkg/subscraping/sources/github/github.go
+++ /dev/null
@@ -1,211 +0,0 @@
-// GitHub search package, based on gwen001's https://github.com/gwen001/github-search github-subdomains
-package github
-
-import (
-	"context"
-	"fmt"
-	"io/ioutil"
-	"net/http"
-	"net/url"
-	"regexp"
-	"strconv"
-	"strings"
-	"time"
-
-	jsoniter "github.com/json-iterator/go"
-
-	"github.com/projectdiscovery/gologger"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-	"github.com/tomnomnom/linkheader"
-)
-
-type textMatch struct {
-	Fragment string `json:"fragment"`
-}
-
-type item struct {
-	Name    		string `json:"name"`
-	HtmlUrl 		string `json:"html_url"`
-	TextMatches []textMatch `json:"text_matches"`
-}
-
-type response struct {
-	TotalCount int    `json:"total_count"`
-	Items      []item `json:"items"`
-}
-
-// Source is the passive scraping agent
-type Source struct{}
-
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	go func() {
-		if len(session.Keys.GitHub) == 0 {
-			close(results)
-			return
-		}
-
-		tokens := NewTokenManager(session.Keys.GitHub)
-
-		// search on GitHub with exact match
-		searchURL := fmt.Sprintf("https://api.github.com/search/code?per_page=100&q=\"%s\"", domain)
-		s.enumerate(ctx, searchURL, s.DomainRegexp(domain), tokens, session, results)
-		close(results)
-	}()
-
-	return results
-}
-
-func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *regexp.Regexp, tokens *Tokens, session *subscraping.Session, results chan subscraping.Result) {
-	select {
-	case <-ctx.Done():
-		return
-	default:
-	}
-
-	token := tokens.Get()
-
-	if token.RetryAfter > 0 {
-		if len(tokens.pool) == 1 {
-			gologger.Verbosef("GitHub Search request rate limit exceeded, waiting for %d seconds before retry... \n", s.Name(), token.RetryAfter)
-			time.Sleep(time.Duration(token.RetryAfter) * time.Second)
-		} else {
-			token = tokens.Get()
-		}
-	}
-
-	headers := map[string]string{
-		"Accept":        "application/vnd.github.v3.text-match+json",
-		"Authorization": "token " + token.Hash,
-	}
-
-	// Initial request to GitHub search
-	resp, err := session.Get(ctx, searchURL, "", headers)
-	isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
-
-	if err != nil && !isForbidden {
-		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-		session.DiscardHttpResponse(resp)
-		return
-	} else {
-		// Retry enumerarion after Retry-After seconds on rate limit abuse detected
-		ratelimitRemaining, _ := strconv.ParseInt(resp.Header.Get("X-Ratelimit-Remaining"), 10, 64)
-		if isForbidden && ratelimitRemaining == 0 {
-			retryAfterSeconds, _ := strconv.ParseInt(resp.Header.Get("Retry-After"), 10, 64)
-			tokens.setCurrentTokenExceeded(retryAfterSeconds)
-
-			s.enumerate(ctx, searchURL, domainRegexp, tokens, session, results)
-			} else {
-				// Links header, first, next, last...
-				linksHeader := linkheader.Parse(resp.Header.Get("Link"))
-
-				data := response{}
-
-				// Marshall json reponse
-				err = jsoniter.NewDecoder(resp.Body).Decode(&data)
-				resp.Body.Close()
-				if err != nil {
-					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-					return
-				}
-
-				// Response items iteration
-				for _, item := range data.Items {
-					resp, err := session.NormalGetWithContext(ctx, rawUrl(item.HtmlUrl))
-					if err != nil {
-						if resp != nil && resp.StatusCode != http.StatusNotFound {
-							session.DiscardHttpResponse(resp)
-							results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-							return
-						}
-					}
-
-					var subdomains []string
-
-					if resp.StatusCode == http.StatusOK {
-						// Get the item code from the raw file url
-						code, err := ioutil.ReadAll(resp.Body)
-						resp.Body.Close()
-						if err != nil {
-							results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-							return
-						}
-						// Search for domain matches in the code
-						subdomains = append(subdomains, matches(domainRegexp, normalizeContent(string(code)))...)
-					}
-
-					// Text matches iteration per item
-					for _, textMatch := range item.TextMatches {
-						// Search for domain matches in the text fragment
-						subdomains = append(subdomains, matches(domainRegexp, normalizeContent(textMatch.Fragment))...)
-					}
-
-					for _, subdomain := range unique(subdomains) {
-						results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
-					}
-				}
-
-				// Proccess the next link recursively
-				for _, link := range linksHeader {
-					if link.Rel == "next" {
-						nextUrl, err := url.QueryUnescape(link.URL)
-						if err != nil {
-							results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-							return
-						}
-						s.enumerate(ctx, nextUrl, domainRegexp, tokens, session, results)
-					}
-				}
-			}
-	}
-
-}
-
-// Normalize content before matching, query unescape, remove tabs and new line chars
-func normalizeContent(content string) string {
-	normalizedContent, _ := url.QueryUnescape(content)
-	normalizedContent = strings.Replace(normalizedContent, "\\t", "", -1)
-	normalizedContent = strings.Replace(normalizedContent, "\\n", "", -1)
-	return normalizedContent
-}
-
-// Remove duplicates from string array
-func unique(arr []string) []string {
-    occured := map[string]bool{}
-    result := []string{}
-    for e := range arr {
-        if occured[arr[e]] != true {
-            occured[arr[e]] = true
-            result = append(result, arr[e])
-        }
-    }
-    return result
-}
-
-// Find matches by regular expression in any content
-func matches(regexp *regexp.Regexp, content string) []string {
-	var matches []string
-	match := regexp.FindAllString(content, -1)
-	if len(match) > 0 {
-		matches = unique(match)
-	}
-	return matches
-}
-
-// Raw URL to get the files code and match for subdomains
-func rawUrl(htmlUrl string) string {
-	domain := strings.Replace(htmlUrl, "https://github.com/", "https://raw.githubusercontent.com/", -1)
-	return strings.Replace(domain, "/blob/", "/", -1)
-}
-
-// Domain regular expression to match subdomains in github files code
-func (s *Source) DomainRegexp(domain string) *regexp.Regexp {
-	rdomain := strings.Replace(domain, ".", "\\.", -1)
-	return regexp.MustCompile("(\\w+[.])*" + rdomain)
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "github"
-}
diff --git a/pkg/subscraping/sources/hackertarget/hackertarget.go b/pkg/subscraping/sources/hackertarget/hackertarget.go
deleted file mode 100755
index 5c0669a..0000000
--- a/pkg/subscraping/sources/hackertarget/hackertarget.go
+++ /dev/null
@@ -1,50 +0,0 @@
-package hackertarget
-
-import (
-	"context"
-	"fmt"
-	"io/ioutil"
-
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	go func() {
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("http://api.hackertarget.com/hostsearch/?q=%s", domain))
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
-			return
-		}
-
-		// Get the response body
-		body, err := ioutil.ReadAll(resp.Body)
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			resp.Body.Close()
-			close(results)
-			return
-		}
-		resp.Body.Close()
-		src := string(body)
-
-		for _, match := range session.Extractor.FindAllString(src, -1) {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
-		}
-		close(results)
-	}()
-
-	return results
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "hackertarget"
-}
diff --git a/pkg/subscraping/sources/shodan/shodan.go b/pkg/subscraping/sources/shodan/shodan.go
deleted file mode 100644
index b258abb..0000000
--- a/pkg/subscraping/sources/shodan/shodan.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package shodan
-
-import (
-	"context"
-	"strconv"
-
-	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-type shodanResult struct {
-	Matches []shodanObject `json:"matches"`
-	Result  int            `json:"result"`
-	Error   string         `json:"error"`
-}
-
-type shodanObject struct {
-	Hostnames []string `json:"hostnames"`
-}
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	go func() {
-		if session.Keys.Shodan == "" {
-			close(results)
-			return
-		}
-
-		for currentPage := 0; currentPage <= 10; currentPage++ {
-			resp, err := session.NormalGetWithContext(ctx, "https://api.shodan.io/shodan/host/search?query=hostname:"+domain+"&page="+strconv.Itoa(currentPage)+"&key="+session.Keys.Shodan)
-			if err != nil {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				session.DiscardHttpResponse(resp)
-				close(results)
-				return
-			}
-
-			var response shodanResult
-			err = jsoniter.NewDecoder(resp.Body).Decode(&response)
-			if err != nil {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				resp.Body.Close()
-				close(results)
-				return
-			}
-			resp.Body.Close()
-
-			if response.Error != "" || len(response.Matches) == 0 {
-				close(results)
-				return
-			}
-
-			for _, block := range response.Matches {
-				for _, hostname := range block.Hostnames {
-					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname}
-				}
-			}
-		}
-		close(results)
-	}()
-
-	return results
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "shodan"
-}
diff --git a/pkg/subscraping/sources/sitedossier/sitedossier.go b/pkg/subscraping/sources/sitedossier/sitedossier.go
deleted file mode 100755
index 99f61db..0000000
--- a/pkg/subscraping/sources/sitedossier/sitedossier.go
+++ /dev/null
@@ -1,84 +0,0 @@
-package sitedossier
-
-import (
-	"context"
-	"fmt"
-	"io/ioutil"
-	"math/rand"
-	"regexp"
-	"time"
-
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-var reNext = regexp.MustCompile("<a href=\"([A-Za-z0-9\\/.]+)\"><b>")
-
-type agent struct {
-	results chan subscraping.Result
-	session *subscraping.Session
-}
-
-func (a *agent) enumerate(ctx context.Context, baseURL string) error {
-	for {
-		select {
-		case <-ctx.Done():
-			return nil
-		default:
-			resp, err := a.session.NormalGetWithContext(ctx, baseURL)
-			if err != nil {
-				a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
-				a.session.DiscardHttpResponse(resp)
-				close(a.results)
-				return err
-			}
-
-			body, err := ioutil.ReadAll(resp.Body)
-			if err != nil {
-				a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
-				resp.Body.Close()
-				close(a.results)
-				return err
-			}
-			resp.Body.Close()
-			src := string(body)
-
-			for _, match := range a.session.Extractor.FindAllString(src, -1) {
-				a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Subdomain, Value: match}
-			}
-
-			match1 := reNext.FindStringSubmatch(src)
-			time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
-
-			if len(match1) > 0 {
-				a.enumerate(ctx, "http://www.sitedossier.com"+match1[1])
-			}
-			return nil
-		}
-	}
-}
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	a := agent{
-		session: session,
-		results: results,
-	}
-
-	go func() {
-		err := a.enumerate(ctx, fmt.Sprintf("http://www.sitedossier.com/parentdomain/%s", domain))
-		if err == nil {
-			close(a.results)
-		}
-	}()
-	return results
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "sitedossier"
-}
diff --git a/pkg/subscraping/sources/waybackarchive/waybackarchive.go b/pkg/subscraping/sources/waybackarchive/waybackarchive.go
deleted file mode 100755
index 64be137..0000000
--- a/pkg/subscraping/sources/waybackarchive/waybackarchive.go
+++ /dev/null
@@ -1,53 +0,0 @@
-package waybackarchive
-
-import (
-	"context"
-	"fmt"
-	"io/ioutil"
-	"strings"
-
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
-)
-
-// Source is the passive scraping agent
-type Source struct{}
-
-// Run function returns all subdomains found with the service
-func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
-	results := make(chan subscraping.Result)
-
-	go func() {
-		pagesResp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=*.%s/*&output=json&fl=original&collapse=urlkey", domain))
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(pagesResp)
-			close(results)
-			return
-		}
-
-		body, err := ioutil.ReadAll(pagesResp.Body)
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			pagesResp.Body.Close()
-			close(results)
-			return
-		}
-		pagesResp.Body.Close()
-
-		match := session.Extractor.FindAllString(string(body), -1)
-		for _, subdomain := range match {
-			subdomain = strings.TrimPrefix(subdomain, "25")
-			subdomain = strings.TrimPrefix(subdomain, "2F")
-
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
-		}
-		close(results)
-	}()
-
-	return results
-}
-
-// Name returns the name of the source
-func (s *Source) Name() string {
-	return "waybackarchive"
-}
diff --git a/pkg/subscraping/types.go b/pkg/subscraping/types.go
deleted file mode 100755
index 4b3b9a9..0000000
--- a/pkg/subscraping/types.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package subscraping
-
-import (
-	"context"
-	"net/http"
-	"regexp"
-)
-
-// Source is an interface inherited by each passive source
-type Source interface {
-	// Run takes a domain as argument and a session object
-	// which contains the extractor for subdomains, http client
-	// and other stuff.
-	Run(context.Context, string, *Session) <-chan Result
-	// Name returns the name of the source
-	Name() string
-}
-
-// Session is the option passed to the source, an option is created
-// uniquely for eac source.
-type Session struct {
-	// Extractor is the regex for subdomains created for each domain
-	Extractor *regexp.Regexp
-	// Keys is the API keys for the application
-	Keys Keys
-	// Client is the current http client
-	Client *http.Client
-}
-
-// Keys contains the current API Keys we have in store
-type Keys struct {
-	Binaryedge           string `json:"binaryedge"`
-	CensysToken          string `json:"censysUsername"`
-	CensysSecret         string `json:"censysPassword"`
-	Certspotter          string `json:"certspotter"`
-	Chaos                string `json:"chaos"`
-	DNSDB                string `json:"dnsdb"`
-	GitHub               []string `json:"github"`
-	IntelXHost           string `json:"intelXHost"`
-	IntelXKey            string `json:"intelXKey"`
-	PassiveTotalUsername string `json:"passivetotal_username"`
-	PassiveTotalPassword string `json:"passivetotal_password"`
-	Securitytrails       string `json:"securitytrails"`
-	Shodan               string `json:"shodan"`
-	Spyse                string `json:"spyse"`
-	URLScan              string `json:"urlscan"`
-	Virustotal           string `json:"virustotal"`
-	ZoomEyeUsername      string `json:"zoomeye_username"`
-	ZoomEyePassword      string `json:"zoomeye_password"`
-}
-
-// Result is a result structure returned by a source
-type Result struct {
-	Type   ResultType
-	Source string
-	Value  string
-	Error  error
-}
-
-// ResultType is the type of result returned by the source
-type ResultType int
-
-// Types of results returned by the source
-const (
-	Subdomain ResultType = iota
-	Error
-)
diff --git a/cmd/subfinder/main.go b/v2/cmd/subfinder/main.go
similarity index 53%
rename from cmd/subfinder/main.go
rename to v2/cmd/subfinder/main.go
index 7d84d89..d955d51 100644
--- a/cmd/subfinder/main.go
+++ b/v2/cmd/subfinder/main.go
@@ -1,20 +1,24 @@
 package main
 
 import (
+	"context"
+
+	// Attempts to increase the OS file descriptors - Fail silently
+	_ "github.com/projectdiscovery/fdmax/autofdmax"
 	"github.com/projectdiscovery/gologger"
-	"github.com/projectdiscovery/subfinder/pkg/runner"
+	"github.com/projectdiscovery/subfinder/v2/pkg/runner"
 )
 
 func main() {
 	// Parse the command line flags and read config files
 	options := runner.ParseOptions()
 
-	runner, err := runner.NewRunner(options)
+	newRunner, err := runner.NewRunner(options)
 	if err != nil {
 		gologger.Fatalf("Could not create runner: %s\n", err)
 	}
 
-	err = runner.RunEnumeration()
+	err = newRunner.RunEnumeration(context.Background())
 	if err != nil {
 		gologger.Fatalf("Could not run enumeration: %s\n", err)
 	}
diff --git a/v2/go.mod b/v2/go.mod
new file mode 100644
index 0000000..866e956
--- /dev/null
+++ b/v2/go.mod
@@ -0,0 +1,23 @@
+module github.com/projectdiscovery/subfinder/v2
+
+go 1.14
+
+require (
+	github.com/hako/durafmt v0.0.0-20200710122514-c0fb7b4da026
+	github.com/json-iterator/go v1.1.10
+	github.com/lib/pq v1.8.0
+	github.com/logrusorgru/aurora v2.0.3+incompatible // indirect
+	github.com/miekg/dns v1.1.31
+	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+	github.com/modern-go/reflect2 v1.0.1 // indirect
+	github.com/pkg/errors v0.9.1
+	github.com/projectdiscovery/chaos-client v0.1.6
+	github.com/projectdiscovery/fdmax v0.0.2
+	github.com/projectdiscovery/gologger v1.0.1
+	github.com/rs/xid v1.2.1
+	github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
+	golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a // indirect
+	golang.org/x/net v0.0.0-20200925080053-05aa5d4ee321 // indirect
+	golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d // indirect
+	gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776
+)
diff --git a/v2/go.sum b/v2/go.sum
new file mode 100644
index 0000000..502812a
--- /dev/null
+++ b/v2/go.sum
@@ -0,0 +1,68 @@
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/hako/durafmt v0.0.0-20200710122514-c0fb7b4da026 h1:BpJ2o0OR5FV7vrkDYfXYVJQeMNWa8RhklZOpW2ITAIQ=
+github.com/hako/durafmt v0.0.0-20200710122514-c0fb7b4da026/go.mod h1:5Scbynm8dF1XAPwIwkGPqzkM/shndPm79Jd1003hTjE=
+github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
+github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/lib/pq v1.8.0 h1:9xohqzkUwzR4Ga4ivdTcawVS89YSDVxXMa3xJX3cGzg=
+github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
+github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381 h1:bqDmpDG49ZRnB5PcgP0RXtQvnMSgIF14M7CBd2shtXs=
+github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
+github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8=
+github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
+github.com/miekg/dns v1.1.31 h1:sJFOl9BgwbYAWOGEwr61FU28pqsBNdpRBnhGXtO06Oo=
+github.com/miekg/dns v1.1.31/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
+github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/projectdiscovery/chaos-client v0.1.6 h1:AbIN7xUszjUi7FxI4qUVSqJ3um+6eImE/xstbNS0A1M=
+github.com/projectdiscovery/chaos-client v0.1.6/go.mod h1:F5omaoJh/vMvWnZhKD4zFFA5ti+RPwUletwepKSyfxk=
+github.com/projectdiscovery/fdmax v0.0.2 h1:d0HqNC4kbrMWT669u9W7ksFS7UBvnW0zmgY6FBU45UY=
+github.com/projectdiscovery/fdmax v0.0.2/go.mod h1:mbR7lJ9EONyxEfcsL2LlGtOSlzCQ5VraLzoJa/VTrAs=
+github.com/projectdiscovery/gologger v1.0.0/go.mod h1:Ok+axMqK53bWNwDSU1nTNwITLYMXMdZtRc8/y1c7sWE=
+github.com/projectdiscovery/gologger v1.0.1 h1:FzoYQZnxz9DCvSi/eg5A6+ET4CQ0CDUs27l6Exr8zMQ=
+github.com/projectdiscovery/gologger v1.0.1/go.mod h1:Ok+axMqK53bWNwDSU1nTNwITLYMXMdZtRc8/y1c7sWE=
+github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
+github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
+github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550 h1:ObdrDkeb4kJdCP557AjRjq69pTHfNouLtWZG7j9rPN8=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a h1:vclmkQCjlDX5OydZ9wv8rBCcS0QyQY66Mpf/7BZbInM=
+golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190923162816-aa69164e4478 h1:l5EDrHhldLYb3ZRHDUhXF7Om7MvYXnkV9/iQNo1lX6g=
+golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200925080053-05aa5d4ee321 h1:lleNcKRbcaC8MqgLwghIkzZ2JBQAb7QQ9MiwRt1BisA=
+golang.org/x/net v0.0.0-20200925080053-05aa5d4ee321/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe h1:6fAMxZRR6sl1Uq8U61gxU+kPTs2tR8uOySCbBP7BN/M=
+golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d h1:L/IKR6COd7ubZrs2oTnTi73IhgqJ71c9s80WsQnh0Es=
+golang.org/x/sys v0.0.0-20200923182605-d9f96fdee20d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/pkg/passive/doc.go b/v2/pkg/passive/doc.go
similarity index 99%
rename from pkg/passive/doc.go
rename to v2/pkg/passive/doc.go
index 0d7ea64..022a55a 100644
--- a/pkg/passive/doc.go
+++ b/v2/pkg/passive/doc.go
@@ -1,4 +1,3 @@
 // Package passive provides capability for doing passive subdomain
 // enumeration on targets.
 package passive
-
diff --git a/pkg/passive/passive.go b/v2/pkg/passive/passive.go
similarity index 83%
rename from pkg/passive/passive.go
rename to v2/pkg/passive/passive.go
index 6881ab1..d4be059 100644
--- a/pkg/passive/passive.go
+++ b/v2/pkg/passive/passive.go
@@ -7,11 +7,11 @@ import (
 	"time"
 
 	"github.com/projectdiscovery/gologger"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 // EnumerateSubdomains enumerates all the subdomains for a given domain
-func (a *Agent) EnumerateSubdomains(domain string, keys subscraping.Keys, timeout int, maxEnumTime time.Duration) chan subscraping.Result {
+func (a *Agent) EnumerateSubdomains(domain string, keys *subscraping.Keys, timeout int, maxEnumTime time.Duration) chan subscraping.Result {
 	results := make(chan subscraping.Result)
 
 	go func() {
@@ -36,7 +36,7 @@ func (a *Agent) EnumerateSubdomains(domain string, keys subscraping.Keys, timeou
 					results <- resp
 				}
 
-				duration := time.Now().Sub(now)
+				duration := time.Since(now)
 				timeTakenMutex.Lock()
 				timeTaken[source] = fmt.Sprintf("Source took %s for enumeration\n", duration)
 				timeTakenMutex.Unlock()
diff --git a/v2/pkg/passive/sources.go b/v2/pkg/passive/sources.go
new file mode 100644
index 0000000..78b6cab
--- /dev/null
+++ b/v2/pkg/passive/sources.go
@@ -0,0 +1,228 @@
+package passive
+
+import (
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/alienvault"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/anubis"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/archiveis"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/binaryedge"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/bufferover"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/cebaidu"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/censys"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/certspotter"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/certspotterold"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/chaos"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/commoncrawl"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/crtsh"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/dnsdb"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/dnsdumpster"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/github"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hackertarget"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/intelx"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/ipv4info"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/passivetotal"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/rapiddns"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/recon"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/riddler"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/robtex"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/securitytrails"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/shodan"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/sitedossier"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/spyse"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/sublist3r"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/threatbook"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/threatcrowd"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/threatminer"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/virustotal"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/waybackarchive"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/ximcx"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/zoomeye"
+)
+
+// DefaultSources contains the list of fast sources used by default.
+var DefaultSources = []string{
+	"alienvault",
+	"anubis",
+	"bufferover",
+	"cebaidu",
+	"certspotter",
+	"certspotterold",
+	"censys",
+	"chaos",
+	"crtsh",
+	"dnsdumpster",
+	"hackertarget",
+	"intelx",
+	"ipv4info",
+	"passivetotal",
+	"robtex",
+	"riddler",
+	"securitytrails",
+	"shodan",
+	"spyse",
+	"sublist3r",
+	"threatcrowd",
+	"threatminer",
+	"virustotal",
+}
+
+// DefaultRecursiveSources contains list of default recursive sources
+var DefaultRecursiveSources = []string{
+	"alienvault",
+	"binaryedge",
+	"bufferover",
+	"cebaidu",
+	"certspotter",
+	"certspotterold",
+	"crtsh",
+	"dnsdumpster",
+	"hackertarget",
+	"ipv4info",
+	"passivetotal",
+	"securitytrails",
+	"sublist3r",
+	"virustotal",
+	"ximcx",
+}
+
+// DefaultAllSources contains list of all sources
+var DefaultAllSources = []string{
+	"alienvault",
+	"anubis",
+	"archiveis",
+	"binaryedge",
+	"bufferover",
+	"cebaidu",
+	"censys",
+	"certspotter",
+	"certspotterold",
+	"chaos",
+	"commoncrawl",
+	"crtsh",
+	"dnsdumpster",
+	"dnsdb",
+	"github",
+	"hackertarget",
+	"ipv4info",
+	"intelx",
+	"passivetotal",
+	"rapiddns",
+	"riddler",
+	"recon",
+	"robtex",
+	"securitytrails",
+	"shodan",
+	"sitedossier",
+	"spyse",
+	"sublist3r",
+	"threatbook",
+	"threatcrowd",
+	"threatminer",
+	"virustotal",
+	"waybackarchive",
+	"ximcx",
+	"zoomeye",
+}
+
+// Agent is a struct for running passive subdomain enumeration
+// against a given host. It wraps subscraping package and provides
+// a layer to build upon.
+type Agent struct {
+	sources map[string]subscraping.Source
+}
+
+// New creates a new agent for passive subdomain discovery
+func New(sources, exclusions []string) *Agent {
+	// Create the agent, insert the sources and remove the excluded sources
+	agent := &Agent{sources: make(map[string]subscraping.Source)}
+
+	agent.addSources(sources)
+	agent.removeSources(exclusions)
+
+	return agent
+}
+
+// addSources adds the given list of sources to the source array
+func (a *Agent) addSources(sources []string) {
+	for _, source := range sources {
+		switch source {
+		case "alienvault":
+			a.sources[source] = &alienvault.Source{}
+		case "anubis":
+			a.sources[source] = &anubis.Source{}
+		case "archiveis":
+			a.sources[source] = &archiveis.Source{}
+		case "binaryedge":
+			a.sources[source] = &binaryedge.Source{}
+		case "bufferover":
+			a.sources[source] = &bufferover.Source{}
+		case "cebaidu":
+			a.sources[source] = &cebaidu.Source{}
+		case "censys":
+			a.sources[source] = &censys.Source{}
+		case "certspotter":
+			a.sources[source] = &certspotter.Source{}
+		case "certspotterold":
+			a.sources[source] = &certspotterold.Source{}
+		case "chaos":
+			a.sources[source] = &chaos.Source{}
+		case "commoncrawl":
+			a.sources[source] = &commoncrawl.Source{}
+		case "crtsh":
+			a.sources[source] = &crtsh.Source{}
+		case "dnsdumpster":
+			a.sources[source] = &dnsdumpster.Source{}
+		case "dnsdb":
+			a.sources[source] = &dnsdb.Source{}
+		case "github":
+			a.sources[source] = &github.Source{}
+		case "hackertarget":
+			a.sources[source] = &hackertarget.Source{}
+		case "ipv4info":
+			a.sources[source] = &ipv4info.Source{}
+		case "intelx":
+			a.sources[source] = &intelx.Source{}
+		case "passivetotal":
+			a.sources[source] = &passivetotal.Source{}
+		case "rapiddns":
+			a.sources[source] = &rapiddns.Source{}
+		case "recon":
+			a.sources[source] = &recon.Source{}
+		case "riddler":
+			a.sources[source] = &riddler.Source{}
+		case "robtex":
+			a.sources[source] = &robtex.Source{}
+		case "securitytrails":
+			a.sources[source] = &securitytrails.Source{}
+		case "shodan":
+			a.sources[source] = &shodan.Source{}
+		case "sitedossier":
+			a.sources[source] = &sitedossier.Source{}
+		case "spyse":
+			a.sources[source] = &spyse.Source{}
+		case "sublist3r":
+			a.sources[source] = &sublist3r.Source{}
+		case "threatbook":
+			a.sources[source] = &threatbook.Source{}
+		case "threatcrowd":
+			a.sources[source] = &threatcrowd.Source{}
+		case "threatminer":
+			a.sources[source] = &threatminer.Source{}
+		case "virustotal":
+			a.sources[source] = &virustotal.Source{}
+		case "waybackarchive":
+			a.sources[source] = &waybackarchive.Source{}
+		case "ximcx":
+			a.sources[source] = &ximcx.Source{}
+		case "zoomeye":
+			a.sources[source] = &zoomeye.Source{}
+		}
+	}
+}
+
+// removeSources deletes the given sources from the source map
+func (a *Agent) removeSources(sources []string) {
+	for _, source := range sources {
+		delete(a.sources, source)
+	}
+}
diff --git a/pkg/resolve/client.go b/v2/pkg/resolve/client.go
similarity index 100%
rename from pkg/resolve/client.go
rename to v2/pkg/resolve/client.go
diff --git a/pkg/resolve/doc.go b/v2/pkg/resolve/doc.go
similarity index 100%
rename from pkg/resolve/doc.go
rename to v2/pkg/resolve/doc.go
diff --git a/pkg/resolve/resolve.go b/v2/pkg/resolve/resolve.go
similarity index 76%
rename from pkg/resolve/resolve.go
rename to v2/pkg/resolve/resolve.go
index b935202..e33785a 100644
--- a/pkg/resolve/resolve.go
+++ b/v2/pkg/resolve/resolve.go
@@ -16,7 +16,7 @@ const (
 // for a given host.
 type ResolutionPool struct {
 	*Resolver
-	Tasks          chan string
+	Tasks          chan HostEntry
 	Results        chan Result
 	wg             *sync.WaitGroup
 	removeWildcard bool
@@ -24,12 +24,19 @@ type ResolutionPool struct {
 	wildcardIPs map[string]struct{}
 }
 
+// HostEntry defines a host with the source
+type HostEntry struct {
+	Host   string `json:"host"`
+	Source string `json:"source"`
+}
+
 // Result contains the result for a host resolution
 type Result struct {
-	Type  ResultType
-	Host  string
-	IP    string
-	Error error
+	Type   ResultType
+	Host   string
+	IP     string
+	Error  error
+	Source string
 }
 
 // ResultType is the type of result found
@@ -45,7 +52,7 @@ const (
 func (r *Resolver) NewResolutionPool(workers int, removeWildcard bool) *ResolutionPool {
 	resolutionPool := &ResolutionPool{
 		Resolver:       r,
-		Tasks:          make(chan string),
+		Tasks:          make(chan HostEntry),
 		Results:        make(chan Result),
 		wg:             &sync.WaitGroup{},
 		removeWildcard: removeWildcard,
@@ -69,7 +76,7 @@ func (r *ResolutionPool) InitWildcards(domain string) error {
 	for i := 0; i < maxWildcardChecks; i++ {
 		uid := xid.New().String()
 
-		hosts, err := r.getARecords(uid + "." + domain)
+		hosts, err := r.getARecords(HostEntry{Host: uid + "." + domain})
 		if err != nil {
 			return err
 		}
@@ -85,13 +92,13 @@ func (r *ResolutionPool) InitWildcards(domain string) error {
 func (r *ResolutionPool) resolveWorker() {
 	for task := range r.Tasks {
 		if !r.removeWildcard {
-			r.Results <- Result{Type: Subdomain, Host: task, IP: ""}
+			r.Results <- Result{Type: Subdomain, Host: task.Host, IP: "", Source: task.Source}
 			continue
 		}
 
 		hosts, err := r.getARecords(task)
 		if err != nil {
-			r.Results <- Result{Type: Error, Error: err}
+			r.Results <- Result{Type: Error, Host: task.Host, Source: task.Source, Error: err}
 			continue
 		}
 
@@ -101,18 +108,18 @@ func (r *ResolutionPool) resolveWorker() {
 
 		for _, host := range hosts {
 			// Ignore the host if it exists in wildcard ips map
-			if _, ok := r.wildcardIPs[host]; ok {
+			if _, ok := r.wildcardIPs[host]; ok { //nolint:staticcheck //search alternatives for "comma ok"
 				continue
 			}
 		}
 
-		r.Results <- Result{Type: Subdomain, Host: task, IP: hosts[0]}
+		r.Results <- Result{Type: Subdomain, Host: task.Host, IP: hosts[0], Source: task.Source}
 	}
 	r.wg.Done()
 }
 
 // getARecords gets all the A records for a given host
-func (r *ResolutionPool) getARecords(host string) ([]string, error) {
+func (r *ResolutionPool) getARecords(hostEntry HostEntry) ([]string, error) {
 	var iteration int
 
 	m := new(dns.Msg)
@@ -120,7 +127,7 @@ func (r *ResolutionPool) getARecords(host string) ([]string, error) {
 	m.RecursionDesired = true
 	m.Question = make([]dns.Question, 1)
 	m.Question[0] = dns.Question{
-		Name:   dns.Fqdn(host),
+		Name:   dns.Fqdn(hostEntry.Host),
 		Qtype:  dns.TypeA,
 		Qclass: dns.ClassINET,
 	}
diff --git a/pkg/runner/banners.go b/v2/pkg/runner/banners.go
similarity index 60%
rename from pkg/runner/banners.go
rename to v2/pkg/runner/banners.go
index fffc8f0..b7ff55e 100644
--- a/pkg/runner/banners.go
+++ b/v2/pkg/runner/banners.go
@@ -2,19 +2,19 @@ package runner
 
 import (
 	"github.com/projectdiscovery/gologger"
-	"github.com/projectdiscovery/subfinder/pkg/passive"
-	"github.com/projectdiscovery/subfinder/pkg/resolve"
+	"github.com/projectdiscovery/subfinder/v2/pkg/passive"
+	"github.com/projectdiscovery/subfinder/v2/pkg/resolve"
 )
 
 const banner = `
-        _     __ _         _         
-____  _| |__ / _(_)_ _  __| |___ _ _ 
+        _     __ _         _
+____  _| |__ / _(_)_ _  __| |___ _ _
 (_-< || | '_ \  _| | ' \/ _  / -_) '_|
-/__/\_,_|_.__/_| |_|_||_\__,_\___|_| v2
+/__/\_,_|_.__/_| |_|_||_\__,_\___|_| v2.4.5
 `
 
 // Version is the current version of subfinder
-const Version = `2.3.8`
+const Version = `2.4.5`
 
 // showBanner is used to show the banner to the user
 func showBanner() {
@@ -32,6 +32,20 @@ func (options *Options) normalRunTasks() {
 	if err != nil {
 		gologger.Fatalf("Could not read configuration file %s: %s\n", options.ConfigFile, err)
 	}
+
+	// If we have a different version of subfinder installed
+	// previously, use the new iteration of config file.
+	if configFile.Version != Version {
+		configFile.Sources = passive.DefaultSources
+		configFile.AllSources = passive.DefaultAllSources
+		configFile.Recursive = passive.DefaultRecursiveSources
+		configFile.Version = Version
+
+		err = configFile.MarshalWrite(options.ConfigFile)
+		if err != nil {
+			gologger.Fatalf("Could not update configuration file to %s: %s\n", options.ConfigFile, err)
+		}
+	}
 	options.YAMLConfig = configFile
 }
 
@@ -41,10 +55,14 @@ func (options *Options) firstRunTasks() {
 	// Create the configuration file and display information
 	// about it to the user.
 	config := ConfigFile{
-		// Use the default list of resolvers by marshalling it to the config
+		// Use the default list of resolvers by marshaling it to the config
 		Resolvers: resolve.DefaultResolvers,
 		// Use the default list of passive sources
 		Sources: passive.DefaultSources,
+		// Use the default list of all passive sources
+		AllSources: passive.DefaultAllSources,
+		// Use the default list of recursive sources
+		Recursive: passive.DefaultRecursiveSources,
 	}
 
 	err := config.MarshalWrite(options.ConfigFile)
diff --git a/v2/pkg/runner/chaosuploader.go b/v2/pkg/runner/chaosuploader.go
new file mode 100644
index 0000000..eab1e17
--- /dev/null
+++ b/v2/pkg/runner/chaosuploader.go
@@ -0,0 +1,55 @@
+package runner
+
+import (
+	"context"
+	"crypto/tls"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"net/http"
+	"time"
+
+	"github.com/pkg/errors"
+	"github.com/projectdiscovery/gologger"
+)
+
+// UploadToChaosTimeoutNano timeout to upload to Chaos in nanoseconds
+const UploadToChaosTimeoutNano = 600
+
+// UploadToChaos upload new data to Chaos dataset
+func (r *Runner) UploadToChaos(ctx context.Context, reader io.Reader) error {
+	httpClient := &http.Client{
+		Transport: &http.Transport{
+			MaxIdleConnsPerHost: 100,
+			MaxIdleConns:        100,
+			TLSClientConfig: &tls.Config{
+				InsecureSkipVerify: true,
+			},
+		},
+		Timeout: time.Duration(UploadToChaosTimeoutNano) * time.Second, // 10 minutes - uploads may take long
+	}
+
+	request, err := http.NewRequestWithContext(ctx, http.MethodPost, "https://dns.projectdiscovery.io/dns/add", reader)
+	if err != nil {
+		return errors.Wrap(err, "could not create request")
+	}
+	request.Header.Set("Authorization", r.options.YAMLConfig.GetKeys().Chaos)
+
+	resp, err := httpClient.Do(request)
+	if err != nil {
+		return errors.Wrap(err, "could not make request")
+	}
+	defer func() {
+		_, err := io.Copy(ioutil.Discard, resp.Body)
+		if err != nil {
+			gologger.Warningf("Could not discard response body: %s\n", err)
+			return
+		}
+		resp.Body.Close()
+	}()
+
+	if resp.StatusCode != http.StatusOK {
+		return fmt.Errorf("invalid status code received: %d", resp.StatusCode)
+	}
+	return nil
+}
diff --git a/pkg/runner/config.go b/v2/pkg/runner/config.go
similarity index 73%
rename from pkg/runner/config.go
rename to v2/pkg/runner/config.go
index fa693ff..81ebe40 100644
--- a/pkg/runner/config.go
+++ b/v2/pkg/runner/config.go
@@ -6,16 +6,26 @@ import (
 	"strings"
 	"time"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 	"gopkg.in/yaml.v3"
 )
 
+// MultipleKeyPartsLength is the max length for multiple keys
+const MultipleKeyPartsLength = 2
+
+// YAMLIndentCharLength number of chars for identation on write YAML to file
+const YAMLIndentCharLength = 4
+
 // ConfigFile contains the fields stored in the configuration file
 type ConfigFile struct {
 	// Resolvers contains the list of resolvers to use while resolving
 	Resolvers []string `yaml:"resolvers,omitempty"`
 	// Sources contains a list of sources to use for enumeration
 	Sources []string `yaml:"sources,omitempty"`
+	// AllSources contains the list of all sources for enumeration (slow)
+	AllSources []string `yaml:"all-sources,omitempty"`
+	// Recrusive contains the list of recursive subdomain enum sources
+	Recursive []string `yaml:"recursive,omitempty"`
 	// ExcludeSources contains the sources to not include in the enumeration process
 	ExcludeSources []string `yaml:"exclude-sources,omitempty"`
 	// API keys for different sources
@@ -27,12 +37,17 @@ type ConfigFile struct {
 	GitHub         []string `yaml:"github"`
 	IntelX         []string `yaml:"intelx"`
 	PassiveTotal   []string `yaml:"passivetotal"`
+	Recon          []string `yaml:"recon"`
+	Robtex         []string `yaml:"robtex"`
 	SecurityTrails []string `yaml:"securitytrails"`
 	Shodan         []string `yaml:"shodan"`
 	Spyse          []string `yaml:"spyse"`
+	ThreatBook     []string `yaml:"threatbook"`
 	URLScan        []string `yaml:"urlscan"`
 	Virustotal     []string `yaml:"virustotal"`
 	ZoomEye        []string `yaml:"zoomeye"`
+	// Version indicates the version of subfinder installed.
+	Version string `yaml:"subfinder-version"`
 }
 
 // GetConfigDirectory gets the subfinder config directory for a user
@@ -47,8 +62,12 @@ func GetConfigDirectory() (string, error) {
 		return config, err
 	}
 	config = directory + "/.config/subfinder"
+
 	// Create All directory for subfinder even if they exist
-	os.MkdirAll(config, os.ModePerm)
+	err = os.MkdirAll(config, os.ModePerm)
+	if err != nil {
+		return config, err
+	}
 
 	return config, nil
 }
@@ -63,16 +82,16 @@ func CheckConfigExists(configPath string) bool {
 	return false
 }
 
-// MarshalWrite writes the marshalled yaml config to disk
-func (c ConfigFile) MarshalWrite(file string) error {
-	f, err := os.OpenFile(file, os.O_WRONLY|os.O_CREATE, 0755)
+// MarshalWrite writes the marshaled yaml config to disk
+func (c *ConfigFile) MarshalWrite(file string) error {
+	f, err := os.OpenFile(file, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0755)
 	if err != nil {
 		return err
 	}
 
 	// Indent the spaces too
 	enc := yaml.NewEncoder(f)
-	enc.SetIndent(4)
+	enc.SetIndent(YAMLIndentCharLength)
 	err = enc.Encode(&c)
 	f.Close()
 	return err
@@ -94,7 +113,7 @@ func UnmarshalRead(file string) (ConfigFile, error) {
 // GetKeys gets the API keys from config file and creates a Keys struct
 // We use random selection of api keys from the list of keys supplied.
 // Keys that require 2 options are separated by colon (:).
-func (c ConfigFile) GetKeys() subscraping.Keys {
+func (c *ConfigFile) GetKeys() subscraping.Keys {
 	keys := subscraping.Keys{}
 
 	if len(c.Binaryedge) > 0 {
@@ -104,7 +123,7 @@ func (c ConfigFile) GetKeys() subscraping.Keys {
 	if len(c.Censys) > 0 {
 		censysKeys := c.Censys[rand.Intn(len(c.Censys))]
 		parts := strings.Split(censysKeys, ":")
-		if len(parts) == 2 {
+		if len(parts) == MultipleKeyPartsLength {
 			keys.CensysToken = parts[0]
 			keys.CensysSecret = parts[1]
 		}
@@ -126,7 +145,7 @@ func (c ConfigFile) GetKeys() subscraping.Keys {
 	if len(c.IntelX) > 0 {
 		intelxKeys := c.IntelX[rand.Intn(len(c.IntelX))]
 		parts := strings.Split(intelxKeys, ":")
-		if len(parts) == 2 {
+		if len(parts) == MultipleKeyPartsLength {
 			keys.IntelXHost = parts[0]
 			keys.IntelXKey = parts[1]
 		}
@@ -135,12 +154,20 @@ func (c ConfigFile) GetKeys() subscraping.Keys {
 	if len(c.PassiveTotal) > 0 {
 		passiveTotalKeys := c.PassiveTotal[rand.Intn(len(c.PassiveTotal))]
 		parts := strings.Split(passiveTotalKeys, ":")
-		if len(parts) == 2 {
+		if len(parts) == MultipleKeyPartsLength {
 			keys.PassiveTotalUsername = parts[0]
 			keys.PassiveTotalPassword = parts[1]
 		}
 	}
 
+	if len(c.Recon) > 0 {
+		keys.Recon = c.Recon[rand.Intn(len(c.Recon))]
+	}
+
+	if len(c.Robtex) > 0 {
+		keys.Robtex = c.Robtex[rand.Intn(len(c.Robtex))]
+	}
+
 	if len(c.SecurityTrails) > 0 {
 		keys.Securitytrails = c.SecurityTrails[rand.Intn(len(c.SecurityTrails))]
 	}
@@ -150,6 +177,9 @@ func (c ConfigFile) GetKeys() subscraping.Keys {
 	if len(c.Spyse) > 0 {
 		keys.Spyse = c.Spyse[rand.Intn(len(c.Spyse))]
 	}
+	if len(c.ThreatBook) > 0 {
+		keys.ThreatBook = c.ThreatBook[rand.Intn(len(c.ThreatBook))]
+	}
 	if len(c.URLScan) > 0 {
 		keys.URLScan = c.URLScan[rand.Intn(len(c.URLScan))]
 	}
@@ -159,7 +189,7 @@ func (c ConfigFile) GetKeys() subscraping.Keys {
 	if len(c.ZoomEye) > 0 {
 		zoomEyeKeys := c.ZoomEye[rand.Intn(len(c.ZoomEye))]
 		parts := strings.Split(zoomEyeKeys, ":")
-		if len(parts) == 2 {
+		if len(parts) == MultipleKeyPartsLength {
 			keys.ZoomEyeUsername = parts[0]
 			keys.ZoomEyePassword = parts[1]
 		}
diff --git a/pkg/runner/config_test.go b/v2/pkg/runner/config_test.go
similarity index 100%
rename from pkg/runner/config_test.go
rename to v2/pkg/runner/config_test.go
diff --git a/pkg/runner/doc.go b/v2/pkg/runner/doc.go
similarity index 100%
rename from pkg/runner/doc.go
rename to v2/pkg/runner/doc.go
diff --git a/pkg/runner/enumerate.go b/v2/pkg/runner/enumerate.go
similarity index 68%
rename from pkg/runner/enumerate.go
rename to v2/pkg/runner/enumerate.go
index 91794c6..ffd2e32 100644
--- a/pkg/runner/enumerate.go
+++ b/v2/pkg/runner/enumerate.go
@@ -2,18 +2,22 @@ package runner
 
 import (
 	"bytes"
+	"context"
 	"os"
 	"strings"
 	"sync"
 	"time"
 
+	"github.com/hako/durafmt"
 	"github.com/projectdiscovery/gologger"
-	"github.com/projectdiscovery/subfinder/pkg/resolve"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/resolve"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
+const maxNumCount = 2
+
 // EnumerateSingleDomain performs subdomain enumeration against a single domain
-func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error {
+func (r *Runner) EnumerateSingleDomain(ctx context.Context, domain, output string, appendToFile bool) error {
 	gologger.Infof("Enumerating subdomains for %s\n", domain)
 
 	// Get the API keys for sources from the configuration
@@ -33,12 +37,13 @@ func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error
 	}
 
 	// Run the passive subdomain enumeration
-	passiveResults := r.passiveAgent.EnumerateSubdomains(domain, keys, r.options.Timeout, time.Duration(r.options.MaxEnumerationTime)*time.Minute)
+	now := time.Now()
+	passiveResults := r.passiveAgent.EnumerateSubdomains(domain, &keys, r.options.Timeout, time.Duration(r.options.MaxEnumerationTime)*time.Minute)
 
 	wg := &sync.WaitGroup{}
 	wg.Add(1)
 	// Create a unique map for filtering duplicate subdomains out
-	uniqueMap := make(map[string]struct{})
+	uniqueMap := make(map[string]resolve.HostEntry)
 	// Process the results in a separate goroutine
 	go func() {
 		for result := range passiveResults {
@@ -57,7 +62,10 @@ func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error
 				if _, ok := uniqueMap[subdomain]; ok {
 					continue
 				}
-				uniqueMap[subdomain] = struct{}{}
+
+				hostEntry := resolve.HostEntry{Host: subdomain, Source: result.Source}
+
+				uniqueMap[subdomain] = hostEntry
 
 				// Log the verbose message about the found subdomain and send the
 				// host for resolution to the resolution pool
@@ -67,11 +75,7 @@ func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error
 				// queue. Otherwise, if mode is not verbose print the results on
 				// the screen as they are discovered.
 				if r.options.RemoveWildcard {
-					resolutionPool.Tasks <- subdomain
-				}
-
-				if !r.options.Verbose {
-					gologger.Silentf("%s\n", subdomain)
+					resolutionPool.Tasks <- hostEntry
 				}
 			}
 		}
@@ -84,7 +88,7 @@ func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error
 
 	// If the user asked to remove wildcards, listen from the results
 	// queue and write to the map. At the end, print the found results to the screen
-	foundResults := make(map[string]string)
+	foundResults := make(map[string]resolve.Result)
 	if r.options.RemoveWildcard {
 		// Process the results coming from the resolutions pool
 		for result := range resolutionPool.Results {
@@ -94,87 +98,83 @@ func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error
 			case resolve.Subdomain:
 				// Add the found subdomain to a map.
 				if _, ok := foundResults[result.Host]; !ok {
-					foundResults[result.Host] = result.IP
+					foundResults[result.Host] = result
 				}
 			}
 		}
 	}
 	wg.Wait()
 
+	outputter := NewOutputter(r.options.JSON)
+
 	// If verbose mode was used, then now print all the
 	// found subdomains on the screen together.
-	if r.options.Verbose {
+	var err error
+	if r.options.HostIP {
+		err = outputter.WriteHostIP(foundResults, os.Stdout)
+	} else {
 		if r.options.RemoveWildcard {
-			for result := range foundResults {
-				gologger.Silentf("%s\n", result)
-			}
+			err = outputter.WriteHostNoWildcard(foundResults, os.Stdout)
 		} else {
-			for result := range uniqueMap {
-				gologger.Silentf("%s\n", result)
-			}
+			err = outputter.WriteHost(uniqueMap, os.Stdout)
 		}
 	}
+	if err != nil {
+		gologger.Errorf("Could not verbose results for %s: %s\n", domain, err)
+		return err
+	}
+
+	// Show found subdomain count in any case.
+	duration := durafmt.Parse(time.Since(now)).LimitFirstN(maxNumCount).String()
+	if r.options.RemoveWildcard {
+		gologger.Infof("Found %d subdomains for %s in %s\n", len(foundResults), domain, duration)
+	} else {
+		gologger.Infof("Found %d subdomains for %s in %s\n", len(uniqueMap), domain, duration)
+	}
+
 	// In case the user has specified to upload to chaos, write everything to a temporary buffer and upload
 	if r.options.ChaosUpload {
 		var buf = &bytes.Buffer{}
-		err := WriteHostOutput(uniqueMap, buf)
-		// If an error occurs, do not interrupt, continue to check if user specifed an output file
+		err := outputter.WriteForChaos(uniqueMap, buf)
+		// If an error occurs, do not interrupt, continue to check if user specified an output file
 		if err != nil {
 			gologger.Errorf("Could not prepare results for chaos %s\n", err)
 		} else {
 			// no error in writing host output, upload to chaos
-			err = r.UploadToChaos(buf)
+			err = r.UploadToChaos(ctx, buf)
 			if err != nil {
 				gologger.Errorf("Could not upload results to chaos %s\n", err)
 			} else {
 				gologger.Infof("Input processed successfully and subdomains with valid records will be updated to chaos dataset.\n")
 			}
 			// clear buffer
-			buf = nil
+			buf.Reset()
 		}
 	}
-	// In case the user has given an output file, write all the found
-	// subdomains to the output file.
-	if output != "" {
-		// If the output format is json, append .json
-		// else append .txt
-		if r.options.OutputDirectory != "" {
-			if r.options.JSON {
-				output = output + ".json"
-			} else {
-				output = output + ".txt"
-			}
-		}
 
-		var file *os.File
-		var err error
-		if append {
-			file, err = os.OpenFile(output, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
-		} else {
-			file, err = os.Create(output)
-		}
+	if output != "" {
+		file, err := outputter.createFile(output, appendToFile)
 		if err != nil {
 			gologger.Errorf("Could not create file %s for %s: %s\n", output, domain, err)
 			return err
 		}
 
-		// Write the output to the file depending upon user requirement
+		defer file.Close()
+
 		if r.options.HostIP {
-			err = WriteHostIPOutput(foundResults, file)
-		} else if r.options.JSON {
-			err = WriteJSONOutput(foundResults, file)
+			err = outputter.WriteHostIP(foundResults, file)
 		} else {
 			if r.options.RemoveWildcard {
-				err = WriteHostOutputNoWildcard(foundResults, file)
+				err = outputter.WriteHostNoWildcard(foundResults, file)
 			} else {
-				err = WriteHostOutput(uniqueMap, file)
+				err = outputter.WriteHost(uniqueMap, file)
 			}
 		}
 		if err != nil {
 			gologger.Errorf("Could not write results to file %s for %s: %s\n", output, domain, err)
+			return err
 		}
-		file.Close()
-		return err
 	}
+
 	return nil
 }
diff --git a/pkg/runner/initialize.go b/v2/pkg/runner/initialize.go
similarity index 62%
rename from pkg/runner/initialize.go
rename to v2/pkg/runner/initialize.go
index fbd693a..69a5a4d 100644
--- a/pkg/runner/initialize.go
+++ b/v2/pkg/runner/initialize.go
@@ -3,28 +3,39 @@ package runner
 import (
 	"strings"
 
-	"github.com/projectdiscovery/subfinder/pkg/passive"
-	"github.com/projectdiscovery/subfinder/pkg/resolve"
+	"github.com/projectdiscovery/subfinder/v2/pkg/passive"
+	"github.com/projectdiscovery/subfinder/v2/pkg/resolve"
 )
 
 // initializePassiveEngine creates the passive engine and loads sources etc
 func (r *Runner) initializePassiveEngine() {
 	var sources, exclusions []string
 
-	// If there are any sources from CLI, only use them
-	// Otherwise, use the yaml file sources
-	if r.options.Sources != "" {
-		sources = append(sources, strings.Split(r.options.Sources, ",")...)
-	} else {
-		sources = append(sources, r.options.YAMLConfig.Sources...)
-	}
-
 	if r.options.ExcludeSources != "" {
 		exclusions = append(exclusions, strings.Split(r.options.ExcludeSources, ",")...)
 	} else {
 		exclusions = append(exclusions, r.options.YAMLConfig.ExcludeSources...)
 	}
 
+	// Use all sources if asked by the user
+	if r.options.All {
+		sources = append(sources, r.options.YAMLConfig.AllSources...)
+	}
+
+	// If only recursive sources are wanted, use them only.
+	if r.options.Recursive {
+		sources = append(sources, r.options.YAMLConfig.Recursive...)
+	}
+
+	// If there are any sources from CLI, only use them
+	// Otherwise, use the yaml file sources
+	if !r.options.All && !r.options.Recursive {
+		if r.options.Sources != "" {
+			sources = append(sources, strings.Split(r.options.Sources, ",")...)
+		} else {
+			sources = append(sources, r.options.YAMLConfig.Sources...)
+		}
+	}
 	r.passiveAgent = passive.New(sources, exclusions)
 }
 
@@ -44,9 +55,13 @@ func (r *Runner) initializeActiveEngine() error {
 
 	if r.options.Resolvers != "" {
 		resolvers = append(resolvers, strings.Split(r.options.Resolvers, ",")...)
-	} else {
+	} else if len(r.options.YAMLConfig.Resolvers) > 0 {
 		resolvers = append(resolvers, r.options.YAMLConfig.Resolvers...)
+	} else {
+		resolvers = append(resolvers, resolve.DefaultResolvers...)
 	}
+
 	r.resolverClient.AppendResolversFromSlice(resolvers)
+
 	return nil
 }
diff --git a/pkg/runner/options.go b/v2/pkg/runner/options.go
similarity index 90%
rename from pkg/runner/options.go
rename to v2/pkg/runner/options.go
index 8443008..2984e11 100644
--- a/pkg/runner/options.go
+++ b/v2/pkg/runner/options.go
@@ -15,26 +15,28 @@ import (
 type Options struct {
 	Verbose            bool   // Verbose flag indicates whether to show verbose output or not
 	NoColor            bool   // No-Color disables the colored output
+	ChaosUpload        bool   // ChaosUpload indicates whether to upload results to the Chaos API
+	JSON               bool   // JSON specifies whether to use json for output format or text file
+	HostIP             bool   // HostIP specifies whether to write subdomains in host:ip format
+	Silent             bool   // Silent suppresses any extra text and only writes subdomains to screen
+	ListSources        bool   // ListSources specifies whether to list all available sources
+	RemoveWildcard     bool   // RemoveWildcard specifies whether to remove potential wildcard or dead subdomains from the results.
+	Stdin              bool   // Stdin specifies whether stdin input was given to the process
+	Version            bool   // Version specifies if we should just show version and exit
+	Recursive          bool   // Recursive specifies whether to use only recursive subdomain enumeration sources
+	All                bool   // All specifies whether to use all (slow) sources.
 	Threads            int    // Thread controls the number of threads to use for active enumerations
 	Timeout            int    // Timeout is the seconds to wait for sources to respond
 	MaxEnumerationTime int    // MaxEnumerationTime is the maximum amount of time in mins to wait for enumeration
 	Domain             string // Domain is the domain to find subdomains for
 	DomainsFile        string // DomainsFile is the file containing list of domains to find subdomains for
-	ChaosUpload        bool   // ChaosUpload indicates whether to upload results to the Chaos API
 	Output             string // Output is the file to write found subdomains to.
 	OutputDirectory    string // OutputDirectory is the directory to write results to in case list of domains is given
-	JSON               bool   // JSON specifies whether to use json for output format or text file
-	HostIP             bool   // HostIP specifies whether to write subdomains in host:ip format
-	Silent             bool   // Silent suppresses any extra text and only writes subdomains to screen
 	Sources            string // Sources contains a comma-separated list of sources to use for enumeration
-	ListSources        bool   // ListSources specifies whether to list all available sources
 	ExcludeSources     string // ExcludeSources contains the comma-separated sources to not include in the enumeration process
 	Resolvers          string // Resolvers is the comma-separated resolvers to use for enumeration
 	ResolverList       string // ResolverList is a text file containing list of resolvers to use for enumeration
-	RemoveWildcard     bool   // RemoveWildcard specifies whether to remove potential wildcard or dead subdomains from the results.
 	ConfigFile         string // ConfigFile contains the location of the config file
-	Stdin              bool   // Stdin specifies whether stdin input was given to the process
-	Version            bool   // Version specifies if we should just show version and exit
 
 	YAMLConfig ConfigFile // YAMLConfig contains the unmarshalled yaml config file
 }
@@ -59,9 +61,12 @@ func ParseOptions() *Options {
 	flag.BoolVar(&options.ChaosUpload, "cd", false, "Upload results to the Chaos API (api-key required)")
 	flag.StringVar(&options.Output, "o", "", "File to write output to (optional)")
 	flag.StringVar(&options.OutputDirectory, "oD", "", "Directory to write enumeration results to (optional)")
+	flag.BoolVar(&options.JSON, "json", false, "Write output in JSON lines Format")
 	flag.BoolVar(&options.JSON, "oJ", false, "Write output in JSON lines Format")
 	flag.BoolVar(&options.HostIP, "oI", false, "Write output in Host,IP format")
 	flag.BoolVar(&options.Silent, "silent", false, "Show only subdomains in output")
+	flag.BoolVar(&options.Recursive, "recursive", false, "Use only recursive subdomain enumeration sources")
+	flag.BoolVar(&options.All, "all", false, "Use all sources (slow) for enumeration")
 	flag.StringVar(&options.Sources, "sources", "", "Comma separated list of sources to use")
 	flag.BoolVar(&options.ListSources, "ls", false, "List all available sources")
 	flag.StringVar(&options.ExcludeSources, "exclude-sources", "", "List of sources to exclude from enumeration")
@@ -122,7 +127,7 @@ func hasStdin() bool {
 }
 
 func listSources(options *Options) {
-	gologger.Infof("Current list of available sources. [%d]\n", len(options.YAMLConfig.Sources))
+	gologger.Infof("Current list of available sources. [%d]\n", len(options.YAMLConfig.AllSources))
 	gologger.Infof("Sources marked with an * needs key or token in order to work.\n")
 	gologger.Infof("You can modify %s to configure your keys / tokens.\n\n", options.ConfigFile)
 
@@ -130,13 +135,13 @@ func listSources(options *Options) {
 	needsKey := make(map[string]interface{})
 	keysElem := reflect.ValueOf(&keys).Elem()
 	for i := 0; i < keysElem.NumField(); i++ {
-			needsKey[strings.ToLower(keysElem.Type().Field(i).Name)] = keysElem.Field(i).Interface()
+		needsKey[strings.ToLower(keysElem.Type().Field(i).Name)] = keysElem.Field(i).Interface()
 	}
 
-	for _, source := range options.YAMLConfig.Sources {
+	for _, source := range options.YAMLConfig.AllSources {
 		message := "%s\n"
 		if _, ok := needsKey[source]; ok {
-				message = "%s *\n"
+			message = "%s *\n"
 		}
 		gologger.Silentf(message, source)
 	}
diff --git a/v2/pkg/runner/outputter.go b/v2/pkg/runner/outputter.go
new file mode 100644
index 0000000..a787f85
--- /dev/null
+++ b/v2/pkg/runner/outputter.go
@@ -0,0 +1,180 @@
+package runner
+
+import (
+	"bufio"
+	"errors"
+	"io"
+	"os"
+	"path/filepath"
+	"strings"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/resolve"
+)
+
+// OutPutter outputs content to writers.
+type OutPutter struct {
+	JSON bool
+}
+
+type jsonResult struct {
+	Host   string `json:"host"`
+	IP     string `json:"ip"`
+	Source string `json:"source"`
+}
+
+// NewOutputter creates a new Outputter
+func NewOutputter(json bool) *OutPutter {
+	return &OutPutter{JSON: json}
+}
+
+func (o *OutPutter) createFile(filename string, appendtoFile bool) (*os.File, error) {
+	if filename == "" {
+		return nil, errors.New("empty filename")
+	}
+
+	dir := filepath.Dir(filename)
+
+	if dir != "" {
+		if _, err := os.Stat(dir); os.IsNotExist(err) {
+			err := os.MkdirAll(dir, os.ModePerm)
+			if err != nil {
+				return nil, err
+			}
+		}
+	}
+
+	var file *os.File
+	var err error
+	if appendtoFile {
+		file, err = os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
+	} else {
+		file, err = os.Create(filename)
+	}
+	if err != nil {
+		return nil, err
+	}
+
+	return file, nil
+}
+
+// WriteForChaos prepares the buffer to upload to Chaos
+func (o *OutPutter) WriteForChaos(results map[string]resolve.HostEntry, writer io.Writer) error {
+	bufwriter := bufio.NewWriter(writer)
+	sb := &strings.Builder{}
+
+	for _, result := range results {
+		sb.WriteString(result.Host)
+		sb.WriteString("\n")
+
+		_, err := bufwriter.WriteString(sb.String())
+		if err != nil {
+			bufwriter.Flush()
+			return err
+		}
+		sb.Reset()
+	}
+	return bufwriter.Flush()
+}
+
+// WriteHostIP writes the output list of subdomain to an io.Writer
+func (o *OutPutter) WriteHostIP(results map[string]resolve.Result, writer io.Writer) error {
+	var err error
+	if o.JSON {
+		err = writeJSONHostIP(results, writer)
+	} else {
+		err = writePlainHostIP(results, writer)
+	}
+	return err
+}
+
+func writePlainHostIP(results map[string]resolve.Result, writer io.Writer) error {
+	bufwriter := bufio.NewWriter(writer)
+	sb := &strings.Builder{}
+
+	for _, result := range results {
+		sb.WriteString(result.Host)
+		sb.WriteString(",")
+		sb.WriteString(result.IP)
+		sb.WriteString(",")
+		sb.WriteString(result.Source)
+		sb.WriteString("\n")
+
+		_, err := bufwriter.WriteString(sb.String())
+		if err != nil {
+			bufwriter.Flush()
+			return err
+		}
+		sb.Reset()
+	}
+	return bufwriter.Flush()
+}
+
+func writeJSONHostIP(results map[string]resolve.Result, writer io.Writer) error {
+	encoder := jsoniter.NewEncoder(writer)
+
+	var data jsonResult
+
+	for _, result := range results {
+		data.Host = result.Host
+		data.IP = result.IP
+		data.Source = result.Source
+
+		err := encoder.Encode(&data)
+		if err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+// WriteHostNoWildcard writes the output list of subdomain with nW flag to an io.Writer
+func (o *OutPutter) WriteHostNoWildcard(results map[string]resolve.Result, writer io.Writer) error {
+	hosts := make(map[string]resolve.HostEntry)
+	for host, result := range results {
+		hosts[host] = resolve.HostEntry{Host: result.Host, Source: result.Source}
+	}
+
+	return o.WriteHost(hosts, writer)
+}
+
+// WriteHost writes the output list of subdomain to an io.Writer
+func (o *OutPutter) WriteHost(results map[string]resolve.HostEntry, writer io.Writer) error {
+	var err error
+	if o.JSON {
+		err = writeJSONHost(results, writer)
+	} else {
+		err = writePlainHost(results, writer)
+	}
+	return err
+}
+
+func writePlainHost(results map[string]resolve.HostEntry, writer io.Writer) error {
+	bufwriter := bufio.NewWriter(writer)
+	sb := &strings.Builder{}
+
+	for _, result := range results {
+		sb.WriteString(result.Host)
+		sb.WriteString("\n")
+
+		_, err := bufwriter.WriteString(sb.String())
+		if err != nil {
+			bufwriter.Flush()
+			return err
+		}
+		sb.Reset()
+	}
+	return bufwriter.Flush()
+}
+
+func writeJSONHost(results map[string]resolve.HostEntry, writer io.Writer) error {
+	encoder := jsoniter.NewEncoder(writer)
+
+	for _, result := range results {
+		err := encoder.Encode(result)
+		if err != nil {
+			return err
+		}
+	}
+	return nil
+}
diff --git a/pkg/runner/runner.go b/v2/pkg/runner/runner.go
similarity index 69%
rename from pkg/runner/runner.go
rename to v2/pkg/runner/runner.go
index 6b37277..e938b5f 100644
--- a/pkg/runner/runner.go
+++ b/v2/pkg/runner/runner.go
@@ -2,12 +2,13 @@ package runner
 
 import (
 	"bufio"
+	"context"
 	"io"
 	"os"
 	"path"
 
-	"github.com/projectdiscovery/subfinder/pkg/passive"
-	"github.com/projectdiscovery/subfinder/pkg/resolve"
+	"github.com/projectdiscovery/subfinder/v2/pkg/passive"
+	"github.com/projectdiscovery/subfinder/v2/pkg/resolve"
 )
 
 // Runner is an instance of the subdomain enumeration
@@ -37,10 +38,10 @@ func NewRunner(options *Options) (*Runner, error) {
 }
 
 // RunEnumeration runs the subdomain enumeration flow on the targets specified
-func (r *Runner) RunEnumeration() error {
+func (r *Runner) RunEnumeration(ctx context.Context) error {
 	// Check if only a single domain is sent as input. Process the domain now.
 	if r.options.Domain != "" {
-		return r.EnumerateSingleDomain(r.options.Domain, r.options.Output, false)
+		return r.EnumerateSingleDomain(ctx, r.options.Domain, r.options.Output, false)
 	}
 
 	// If we have multiple domains as input,
@@ -49,21 +50,21 @@ func (r *Runner) RunEnumeration() error {
 		if err != nil {
 			return err
 		}
-		err = r.EnumerateMultipleDomains(f)
+		err = r.EnumerateMultipleDomains(ctx, f)
 		f.Close()
 		return err
 	}
 
 	// If we have STDIN input, treat it as multiple domains
 	if r.options.Stdin {
-		return r.EnumerateMultipleDomains(os.Stdin)
+		return r.EnumerateMultipleDomains(ctx, os.Stdin)
 	}
 	return nil
 }
 
 // EnumerateMultipleDomains enumerates subdomains for multiple domains
 // We keep enumerating subdomains for a given domain until we reach an error
-func (r *Runner) EnumerateMultipleDomains(reader io.Reader) error {
+func (r *Runner) EnumerateMultipleDomains(ctx context.Context, reader io.Reader) error {
 	scanner := bufio.NewScanner(reader)
 	for scanner.Scan() {
 		domain := scanner.Text()
@@ -72,16 +73,21 @@ func (r *Runner) EnumerateMultipleDomains(reader io.Reader) error {
 		}
 
 		var err error
-		// If the user has specifed an output file, use that output file instead
+		// If the user has specified an output file, use that output file instead
 		// of creating a new output file for each domain. Else create a new file
 		// for each domain in the directory.
 		if r.options.Output != "" {
-			err = r.EnumerateSingleDomain(domain, r.options.Output, true)
+			err = r.EnumerateSingleDomain(ctx, domain, r.options.Output, true)
 		} else if r.options.OutputDirectory != "" {
 			outputFile := path.Join(r.options.OutputDirectory, domain)
-			err = r.EnumerateSingleDomain(domain, outputFile, false)
+			if r.options.JSON {
+				outputFile += ".json"
+			} else {
+				outputFile += ".txt"
+			}
+			err = r.EnumerateSingleDomain(ctx, domain, outputFile, false)
 		} else {
-			err = r.EnumerateSingleDomain(domain, "", true)
+			err = r.EnumerateSingleDomain(ctx, domain, "", true)
 		}
 		if err != nil {
 			return err
diff --git a/pkg/runner/validate.go b/v2/pkg/runner/validate.go
similarity index 79%
rename from pkg/runner/validate.go
rename to v2/pkg/runner/validate.go
index 95bc230..25d58be 100644
--- a/pkg/runner/validate.go
+++ b/v2/pkg/runner/validate.go
@@ -27,18 +27,10 @@ func (options *Options) validateOptions() error {
 		return errors.New("timeout cannot be zero")
 	}
 
-	// JSON cannot be used with hostIP
-	if options.JSON && options.HostIP {
-		return errors.New("hostip flag cannot be used with json flag")
-	}
-
-	// Always remove wildcard with hostip and json
+	// Always remove wildcard with hostip
 	if options.HostIP && !options.RemoveWildcard {
 		return errors.New("hostip flag must be used with RemoveWildcard option")
 	}
-	if options.JSON && !options.RemoveWildcard {
-		return errors.New("JSON flag must be used with RemoveWildcard option")
-	}
 
 	return nil
 }
diff --git a/v2/pkg/subscraping/agent.go b/v2/pkg/subscraping/agent.go
new file mode 100755
index 0000000..31cff36
--- /dev/null
+++ b/v2/pkg/subscraping/agent.go
@@ -0,0 +1,111 @@
+package subscraping
+
+import (
+	"context"
+	"crypto/tls"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"time"
+
+	"github.com/projectdiscovery/gologger"
+)
+
+// NewSession creates a new session object for a domain
+func NewSession(domain string, keys *Keys, timeout int) (*Session, error) {
+	client := &http.Client{
+		Transport: &http.Transport{
+			MaxIdleConns:        100,
+			MaxIdleConnsPerHost: 100,
+			TLSClientConfig: &tls.Config{
+				InsecureSkipVerify: true,
+			},
+		},
+		Timeout: time.Duration(timeout) * time.Second,
+	}
+
+	session := &Session{
+		Client: client,
+		Keys:   keys,
+	}
+
+	// Create a new extractor object for the current domain
+	extractor, err := NewSubdomainExtractor(domain)
+	session.Extractor = extractor
+
+	return session, err
+}
+
+// Get makes a GET request to a URL with extended parameters
+func (s *Session) Get(ctx context.Context, getURL, cookies string, headers map[string]string) (*http.Response, error) {
+	return s.HTTPRequest(ctx, http.MethodGet, getURL, cookies, headers, nil, BasicAuth{})
+}
+
+// SimpleGet makes a simple GET request to a URL
+func (s *Session) SimpleGet(ctx context.Context, getURL string) (*http.Response, error) {
+	return s.HTTPRequest(ctx, http.MethodGet, getURL, "", map[string]string{}, nil, BasicAuth{})
+}
+
+// Post makes a POST request to a URL with extended parameters
+func (s *Session) Post(ctx context.Context, postURL, cookies string, headers map[string]string, body io.Reader) (*http.Response, error) {
+	return s.HTTPRequest(ctx, http.MethodPost, postURL, cookies, headers, body, BasicAuth{})
+}
+
+// SimplePost makes a simple POST request to a URL
+func (s *Session) SimplePost(ctx context.Context, postURL, contentType string, body io.Reader) (*http.Response, error) {
+	return s.HTTPRequest(ctx, http.MethodPost, postURL, "", map[string]string{"Content-Type": contentType}, body, BasicAuth{})
+}
+
+// HTTPRequest makes any HTTP request to a URL with extended parameters
+func (s *Session) HTTPRequest(ctx context.Context, method, requestURL, cookies string, headers map[string]string, body io.Reader, basicAuth BasicAuth) (*http.Response, error) {
+	req, err := http.NewRequestWithContext(ctx, method, requestURL, body)
+	if err != nil {
+		return nil, err
+	}
+
+	req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
+	req.Header.Set("Accept", "*/*")
+	req.Header.Set("Accept-Language", "en")
+	req.Header.Set("Connection", "close")
+
+	if basicAuth.Username != "" || basicAuth.Password != "" {
+		req.SetBasicAuth(basicAuth.Username, basicAuth.Password)
+	}
+
+	if cookies != "" {
+		req.Header.Set("Cookie", cookies)
+	}
+
+	for key, value := range headers {
+		req.Header.Set(key, value)
+	}
+
+	return httpRequestWrapper(s.Client, req)
+}
+
+// DiscardHTTPResponse discards the response content by demand
+func (s *Session) DiscardHTTPResponse(response *http.Response) {
+	if response != nil {
+		_, err := io.Copy(ioutil.Discard, response.Body)
+		if err != nil {
+			gologger.Warningf("Could not discard response body: %s\n", err)
+			return
+		}
+		response.Body.Close()
+	}
+}
+
+func httpRequestWrapper(client *http.Client, request *http.Request) (*http.Response, error) {
+	resp, err := client.Do(request)
+	if err != nil {
+		return nil, err
+	}
+
+	if resp.StatusCode != http.StatusOK {
+		requestURL, _ := url.QueryUnescape(request.URL.String())
+		return resp, fmt.Errorf("unexpected status code %d received from %s", resp.StatusCode, requestURL)
+	}
+	return resp, nil
+}
diff --git a/pkg/subscraping/sources/alienvault/alienvault.go b/v2/pkg/subscraping/sources/alienvault/alienvault.go
similarity index 60%
rename from pkg/subscraping/sources/alienvault/alienvault.go
rename to v2/pkg/subscraping/sources/alienvault/alienvault.go
index 74dcb20..b249c5b 100644
--- a/pkg/subscraping/sources/alienvault/alienvault.go
+++ b/v2/pkg/subscraping/sources/alienvault/alienvault.go
@@ -5,10 +5,12 @@ import (
 	"encoding/json"
 	"fmt"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 type alienvaultResponse struct {
+	Detail     string `json:"detail"`
+	Error      string `json:"error"`
 	PassiveDNS []struct {
 		Hostname string `json:"hostname"`
 	} `json:"passive_dns"`
@@ -22,28 +24,33 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/passive_dns", domain))
-		if err != nil {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/passive_dns", domain))
+		if err != nil && resp == nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		otxResp := &alienvaultResponse{}
+		var response alienvaultResponse
 		// Get the response body and decode
-		err = json.NewDecoder(resp.Body).Decode(&otxResp)
+		err = json.NewDecoder(resp.Body).Decode(&response)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
-		for _, record := range otxResp.PassiveDNS {
+
+		if response.Error != "" {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s, %s", response.Detail, response.Error)}
+			return
+		}
+
+		for _, record := range response.PassiveDNS {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record.Hostname}
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/pkg/subscraping/sources/certspotterold/certspotterold.go b/v2/pkg/subscraping/sources/anubis/anubis.go
old mode 100755
new mode 100644
similarity index 61%
rename from pkg/subscraping/sources/certspotterold/certspotterold.go
rename to v2/pkg/subscraping/sources/anubis/anubis.go
index b7f8bfd..50a4ba3
--- a/pkg/subscraping/sources/certspotterold/certspotterold.go
+++ b/v2/pkg/subscraping/sources/anubis/anubis.go
@@ -1,11 +1,11 @@
-package certspotterold
+package anubis
 
 import (
 	"context"
 	"fmt"
-	"io/ioutil"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 // Source is the passive scraping agent
@@ -16,29 +16,28 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://certspotter.com/api/v0/certs?domain=%s", domain))
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://jldc.me/anubis/subdomains/%s", domain))
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		body, err := ioutil.ReadAll(resp.Body)
+		var subdomains []string
+		err = jsoniter.NewDecoder(resp.Body).Decode(&subdomains)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
-		resp.Body.Close()
 
-		src := string(body)
+		resp.Body.Close()
 
-		for _, subdomain := range session.Extractor.FindAllString(src, -1) {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+		for _, record := range subdomains {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record}
 		}
-		close(results)
 	}()
 
 	return results
@@ -46,5 +45,5 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 // Name returns the name of the source
 func (s *Source) Name() string {
-	return "certspotterold"
+	return "anubis"
 }
diff --git a/pkg/subscraping/sources/archiveis/archiveis.go b/v2/pkg/subscraping/sources/archiveis/archiveis.go
similarity index 76%
rename from pkg/subscraping/sources/archiveis/archiveis.go
rename to v2/pkg/subscraping/sources/archiveis/archiveis.go
index be7c749..c4e9d3a 100755
--- a/pkg/subscraping/sources/archiveis/archiveis.go
+++ b/v2/pkg/subscraping/sources/archiveis/archiveis.go
@@ -3,44 +3,45 @@ package archiveis
 
 import (
 	"context"
+	"fmt"
 	"io/ioutil"
 	"regexp"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
-// ArchiveIs is a struct for archiveurlsagent
-type ArchiveIs struct {
+type agent struct {
 	Results chan subscraping.Result
 	Session *subscraping.Session
 }
 
 var reNext = regexp.MustCompile("<a id=\"next\" style=\".*\" href=\"(.*)\">&rarr;</a>")
 
-func (a *ArchiveIs) enumerate(ctx context.Context, baseURL string) {
+func (a *agent) enumerate(ctx context.Context, baseURL string) {
 	select {
 	case <-ctx.Done():
 		return
 	default:
 	}
 
-	resp, err := a.Session.NormalGetWithContext(ctx, baseURL)
+	resp, err := a.Session.SimpleGet(ctx, baseURL)
 	if err != nil {
 		a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
-		a.Session.DiscardHttpResponse(resp)
+		a.Session.DiscardHTTPResponse(resp)
 		return
 	}
 
 	// Get the response body
 	body, err := ioutil.ReadAll(resp.Body)
-	resp.Body.Close()
 	if err != nil {
 		a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
+		resp.Body.Close()
 		return
 	}
 
-	src := string(body)
+	resp.Body.Close()
 
+	src := string(body)
 	for _, subdomain := range a.Session.Extractor.FindAllString(src, -1) {
 		a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Subdomain, Value: subdomain}
 	}
@@ -58,17 +59,17 @@ type Source struct{}
 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
 	results := make(chan subscraping.Result)
 
-	aInstance := ArchiveIs{
+	a := agent{
 		Session: session,
 		Results: results,
 	}
 
 	go func() {
-		aInstance.enumerate(ctx, "http://archive.is/*."+domain)
-		close(aInstance.Results)
+		a.enumerate(ctx, fmt.Sprintf("http://archive.is/*.%s", domain))
+		close(a.Results)
 	}()
 
-	return aInstance.Results
+	return a.Results
 }
 
 // Name returns the name of the source
diff --git a/v2/pkg/subscraping/sources/binaryedge/binaryedge.go b/v2/pkg/subscraping/sources/binaryedge/binaryedge.go
new file mode 100755
index 0000000..e19b4d7
--- /dev/null
+++ b/v2/pkg/subscraping/sources/binaryedge/binaryedge.go
@@ -0,0 +1,143 @@
+package binaryedge
+
+import (
+	"context"
+	"fmt"
+	"math"
+	"net/url"
+	"strconv"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+const (
+	v1                = "v1"
+	v2                = "v2"
+	baseAPIURLFmt     = "https://api.binaryedge.io/%s/query/domains/subdomain/%s"
+	v2SubscriptionURL = "https://api.binaryedge.io/v2/user/subscription"
+	v1PageSizeParam   = "pagesize"
+	pageParam         = "page"
+	firstPage         = 1
+	maxV1PageSize     = 10000
+)
+
+type subdomainsResponse struct {
+	Message    string      `json:"message"`
+	Title      string      `json:"title"`
+	Status     interface{} `json:"status"` // string for v1, int for v2
+	Subdomains []string    `json:"events"`
+	Page       int         `json:"page"`
+	PageSize   int         `json:"pagesize"`
+	Total      int         `json:"total"`
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		if session.Keys.Binaryedge == "" {
+			return
+		}
+
+		var baseURL string
+
+		authHeader := map[string]string{"X-Key": session.Keys.Binaryedge}
+
+		if isV2(ctx, session, authHeader) {
+			baseURL = fmt.Sprintf(baseAPIURLFmt, v2, domain)
+		} else {
+			authHeader = map[string]string{"X-Token": session.Keys.Binaryedge}
+			v1URLWithPageSize, err := addURLParam(fmt.Sprintf(baseAPIURLFmt, v1, domain), v1PageSizeParam, strconv.Itoa(maxV1PageSize))
+			if err != nil {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				return
+			}
+			baseURL = v1URLWithPageSize.String()
+		}
+
+		if baseURL == "" {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("can't get API URL")}
+			return
+		}
+
+		s.enumerate(ctx, session, baseURL, firstPage, authHeader, results)
+	}()
+
+	return results
+}
+
+func (s *Source) enumerate(ctx context.Context, session *subscraping.Session, baseURL string, page int, authHeader map[string]string, results chan subscraping.Result) {
+	pageURL, err := addURLParam(baseURL, pageParam, strconv.Itoa(page))
+	if err != nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		return
+	}
+
+	resp, err := session.Get(ctx, pageURL.String(), "", authHeader)
+	if err != nil && resp == nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		session.DiscardHTTPResponse(resp)
+		return
+	}
+
+	var response subdomainsResponse
+	err = jsoniter.NewDecoder(resp.Body).Decode(&response)
+	if err != nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		resp.Body.Close()
+		return
+	}
+
+	// Check error messages
+	if response.Message != "" && response.Status != nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf(response.Message)}
+	}
+
+	resp.Body.Close()
+
+	for _, subdomain := range response.Subdomains {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+	}
+
+	totalPages := int(math.Ceil(float64(response.Total) / float64(response.PageSize)))
+	nextPage := response.Page + 1
+	for currentPage := nextPage; currentPage <= totalPages; currentPage++ {
+		s.enumerate(ctx, session, baseURL, currentPage, authHeader, results)
+	}
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "binaryedge"
+}
+
+func isV2(ctx context.Context, session *subscraping.Session, authHeader map[string]string) bool {
+	resp, err := session.Get(ctx, v2SubscriptionURL, "", authHeader)
+	if err != nil {
+		session.DiscardHTTPResponse(resp)
+		return false
+	}
+
+	resp.Body.Close()
+
+	return true
+}
+
+func addURLParam(targetURL, name, value string) (*url.URL, error) {
+	u, err := url.Parse(targetURL)
+	if err != nil {
+		return u, err
+	}
+	q, _ := url.ParseQuery(u.RawQuery)
+	q.Add(name, value)
+	u.RawQuery = q.Encode()
+
+	return u, nil
+}
diff --git a/v2/pkg/subscraping/sources/bufferover/bufferover.go b/v2/pkg/subscraping/sources/bufferover/bufferover.go
new file mode 100755
index 0000000..b5672f8
--- /dev/null
+++ b/v2/pkg/subscraping/sources/bufferover/bufferover.go
@@ -0,0 +1,85 @@
+// Package bufferover is a bufferover Scraping Engine in Golang
+package bufferover
+
+import (
+	"context"
+	"fmt"
+	"strings"
+
+	jsoniter "github.com/json-iterator/go"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+type response struct {
+	Meta struct {
+		Errors []string `json:"Errors"`
+	} `json:"Meta"`
+	FDNSA   []string `json:"FDNS_A"`
+	RDNS    []string `json:"RDNS"`
+	Results []string `json:"Results"`
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		// Run enumeration on subdomain dataset for historical SONAR datasets
+		s.getData(ctx, fmt.Sprintf("https://dns.bufferover.run/dns?q=.%s", domain), session, results)
+		s.getData(ctx, fmt.Sprintf("https://tls.bufferover.run/dns?q=.%s", domain), session, results)
+
+		close(results)
+	}()
+
+	return results
+}
+
+func (s *Source) getData(ctx context.Context, sourceURL string, session *subscraping.Session, results chan subscraping.Result) {
+	resp, err := session.SimpleGet(ctx, sourceURL)
+	if err != nil && resp == nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		session.DiscardHTTPResponse(resp)
+		return
+	}
+
+	var bufforesponse response
+	err = jsoniter.NewDecoder(resp.Body).Decode(&bufforesponse)
+	if err != nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		resp.Body.Close()
+		return
+	}
+
+	resp.Body.Close()
+
+	metaErrors := bufforesponse.Meta.Errors
+
+	if len(metaErrors) > 0 {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", strings.Join(metaErrors, ", "))}
+		return
+	}
+
+	var subdomains []string
+
+	if len(bufforesponse.FDNSA) > 0 {
+		subdomains = bufforesponse.FDNSA
+		subdomains = append(subdomains, bufforesponse.RDNS...)
+	} else if len(bufforesponse.Results) > 0 {
+		subdomains = bufforesponse.Results
+	}
+
+	for _, subdomain := range subdomains {
+		for _, value := range session.Extractor.FindAllString(subdomain, -1) {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: value}
+		}
+	}
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "bufferover"
+}
diff --git a/v2/pkg/subscraping/sources/cebaidu/cebaidu.go b/v2/pkg/subscraping/sources/cebaidu/cebaidu.go
new file mode 100644
index 0000000..1366cc7
--- /dev/null
+++ b/v2/pkg/subscraping/sources/cebaidu/cebaidu.go
@@ -0,0 +1,63 @@
+package cebaidu
+
+import (
+	"context"
+	"fmt"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+// Source is the passive scraping agent
+type Source struct{}
+
+type domain struct {
+	Domain string `json:"domain"`
+}
+
+type cebaiduResponse struct {
+	Code    int64    `json:"code"`
+	Message string   `json:"message"`
+	Data    []domain `json:"data"`
+}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://ce.baidu.com/index/getRelatedSites?site_address=%s", domain))
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		var response cebaiduResponse
+		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			resp.Body.Close()
+			return
+		}
+		resp.Body.Close()
+
+		if response.Code > 0 {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%d, %s", response.Code, response.Message)}
+			return
+		}
+
+		for _, result := range response.Data {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: result.Domain}
+		}
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "cebaidu"
+}
diff --git a/pkg/subscraping/sources/censys/censys.go b/v2/pkg/subscraping/sources/censys/censys.go
similarity index 67%
rename from pkg/subscraping/sources/censys/censys.go
rename to v2/pkg/subscraping/sources/censys/censys.go
index d54c79a..afbf1f8 100644
--- a/pkg/subscraping/sources/censys/censys.go
+++ b/v2/pkg/subscraping/sources/censys/censys.go
@@ -3,11 +3,10 @@ package censys
 import (
 	"bytes"
 	"context"
-	"net/http"
 	"strconv"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 const maxCensysPages = 10
@@ -32,48 +31,48 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
+		defer close(results)
+
 		if session.Keys.CensysToken == "" || session.Keys.CensysSecret == "" {
-			close(results)
 			return
 		}
-		var response response
 
 		currentPage := 1
 		for {
 			var request = []byte(`{"query":"` + domain + `", "page":` + strconv.Itoa(currentPage) + `, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
 
-			req, err := http.NewRequestWithContext(ctx, "POST", "https://www.censys.io/api/v1/search/certificates", bytes.NewReader(request))
-			if err != nil {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				close(results)
-				return
-			}
-			req.SetBasicAuth(session.Keys.CensysToken, session.Keys.CensysSecret)
-			req.Header.Set("Content-Type", "application/json")
-			req.Header.Set("Accept", "application/json")
+			resp, err := session.HTTPRequest(
+				ctx,
+				"POST",
+				"https://www.censys.io/api/v1/search/certificates",
+				"",
+				map[string]string{"Content-Type": "application/json", "Accept": "application/json"},
+				bytes.NewReader(request),
+				subscraping.BasicAuth{Username: session.Keys.CensysToken, Password: session.Keys.CensysSecret},
+			)
 
-			resp, err := session.Client.Do(req)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				close(results)
+				session.DiscardHTTPResponse(resp)
 				return
 			}
 
-			err = jsoniter.NewDecoder(resp.Body).Decode(&response)
+			var censysResponse response
+			err = jsoniter.NewDecoder(resp.Body).Decode(&censysResponse)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 				resp.Body.Close()
-				close(results)
 				return
 			}
+
 			resp.Body.Close()
 
 			// Exit the censys enumeration if max pages is reached
-			if currentPage >= response.Metadata.Pages || currentPage >= maxCensysPages {
+			if currentPage >= censysResponse.Metadata.Pages || currentPage >= maxCensysPages {
 				break
 			}
 
-			for _, res := range response.Results {
+			for _, res := range censysResponse.Results {
 				for _, part := range res.Data {
 					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
 				}
@@ -84,7 +83,6 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 			currentPage++
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/pkg/subscraping/sources/certspotter/certspotter.go b/v2/pkg/subscraping/sources/certspotter/certspotter.go
similarity index 89%
rename from pkg/subscraping/sources/certspotter/certspotter.go
rename to v2/pkg/subscraping/sources/certspotter/certspotter.go
index 0f6affc..0b6d6ac 100755
--- a/pkg/subscraping/sources/certspotter/certspotter.go
+++ b/v2/pkg/subscraping/sources/certspotter/certspotter.go
@@ -5,7 +5,7 @@ import (
 	"fmt"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 type certspotterObject struct {
@@ -21,25 +21,24 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
+		defer close(results)
+
 		if session.Keys.Certspotter == "" {
-			close(results)
 			return
 		}
 
 		resp, err := session.Get(ctx, fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names", domain), "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		response := []certspotterObject{}
+		var response []certspotterObject
 		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
@@ -52,7 +51,6 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 		// if the number of responses is zero, close the channel and return.
 		if len(response) == 0 {
-			close(results)
 			return
 		}
 
@@ -63,16 +61,14 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 			resp, err := session.Get(ctx, reqURL, "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				close(results)
 				return
 			}
 
-			response := []certspotterObject{}
+			var response []certspotterObject
 			err = jsoniter.NewDecoder(resp.Body).Decode(&response)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 				resp.Body.Close()
-				close(results)
 				return
 			}
 			resp.Body.Close()
@@ -89,7 +85,6 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 			id = response[len(response)-1].ID
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/v2/pkg/subscraping/sources/certspotterold/certspotterold.go b/v2/pkg/subscraping/sources/certspotterold/certspotterold.go
new file mode 100755
index 0000000..4abed9b
--- /dev/null
+++ b/v2/pkg/subscraping/sources/certspotterold/certspotterold.go
@@ -0,0 +1,75 @@
+package certspotterold
+
+import (
+	"context"
+	"fmt"
+	"net/http"
+
+	jsoniter "github.com/json-iterator/go"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+type errorResponse struct {
+	Code    string `json:"code"`
+	Message string `json:"Message"`
+}
+
+type subdomain struct {
+	DNSNames []string `json:"dns_names"`
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://certspotter.com/api/v0/certs?domain=%s", domain))
+		if err != nil && resp == nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		if resp.StatusCode != http.StatusOK {
+			var errResponse errorResponse
+			err = jsoniter.NewDecoder(resp.Body).Decode(&errResponse)
+			if err != nil {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				resp.Body.Close()
+				return
+			}
+
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s: %s", errResponse.Code, errResponse.Message)}
+			resp.Body.Close()
+			return
+		}
+
+		var subdomains []subdomain
+		err = jsoniter.NewDecoder(resp.Body).Decode(&subdomains)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			resp.Body.Close()
+			return
+		}
+
+		resp.Body.Close()
+
+		for _, subdomain := range subdomains {
+			for _, dnsname := range subdomain.DNSNames {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: dnsname}
+			}
+		}
+	}()
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "certspotterold"
+}
diff --git a/v2/pkg/subscraping/sources/chaos/chaos.go b/v2/pkg/subscraping/sources/chaos/chaos.go
new file mode 100644
index 0000000..324050e
--- /dev/null
+++ b/v2/pkg/subscraping/sources/chaos/chaos.go
@@ -0,0 +1,43 @@
+package chaos
+
+import (
+	"context"
+	"fmt"
+
+	"github.com/projectdiscovery/chaos-client/pkg/chaos"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		if session.Keys.Chaos == "" {
+			return
+		}
+
+		chaosClient := chaos.New(session.Keys.Chaos)
+		for result := range chaosClient.GetSubdomains(&chaos.SubdomainsRequest{
+			Domain: domain,
+		}) {
+			if result.Error != nil {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: result.Error}
+				break
+			}
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: fmt.Sprintf("%s.%s", result.Subdomain, domain)}
+		}
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "chaos"
+}
diff --git a/pkg/subscraping/sources/commoncrawl/commoncrawl.go b/v2/pkg/subscraping/sources/commoncrawl/commoncrawl.go
similarity index 61%
rename from pkg/subscraping/sources/commoncrawl/commoncrawl.go
rename to v2/pkg/subscraping/sources/commoncrawl/commoncrawl.go
index 3b021f5..9901aa8 100755
--- a/pkg/subscraping/sources/commoncrawl/commoncrawl.go
+++ b/v2/pkg/subscraping/sources/commoncrawl/commoncrawl.go
@@ -1,14 +1,14 @@
 package commoncrawl
 
 import (
+	"bufio"
 	"context"
 	"fmt"
-	"io/ioutil"
 	"net/url"
 	"strings"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 const indexURL = "https://index.commoncrawl.org/collinfo.json"
@@ -28,20 +28,20 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, indexURL)
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, indexURL)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		indexes := []indexResponse{}
+		var indexes []indexResponse
 		err = jsoniter.NewDecoder(resp.Body).Decode(&indexes)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
@@ -64,7 +64,6 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 				break
 			}
 		}
-		close(results)
 	}()
 
 	return results
@@ -75,33 +74,38 @@ func (s *Source) Name() string {
 	return "commoncrawl"
 }
 
-func (s *Source) getSubdomains(ctx context.Context, searchURL string, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
+func (s *Source) getSubdomains(ctx context.Context, searchURL, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
 	for {
 		select {
 		case <-ctx.Done():
 			return false
 		default:
-			resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("%s?url=*.%s&output=json", searchURL, domain))
+			var headers = map[string]string{"Host": "index.commoncrawl.org"}
+			resp, err := session.Get(ctx, fmt.Sprintf("%s?url=*.%s", searchURL, domain), "", headers)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				session.DiscardHTTPResponse(resp)
 				return false
 			}
 
-			body, err := ioutil.ReadAll(resp.Body)
-			if err != nil {
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				resp.Body.Close()
-				return false
+			scanner := bufio.NewScanner(resp.Body)
+			for scanner.Scan() {
+				line := scanner.Text()
+				if line == "" {
+					continue
+				}
+				line, _ = url.QueryUnescape(line)
+				subdomain := session.Extractor.FindString(line)
+				if subdomain != "" {
+					// fix for triple encoded URL
+					subdomain = strings.ToLower(subdomain)
+					subdomain = strings.TrimPrefix(subdomain, "25")
+					subdomain = strings.TrimPrefix(subdomain, "2f")
+
+					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+				}
 			}
 			resp.Body.Close()
-
-			src, _ := url.QueryUnescape(string(body))
-
-			for _, subdomain := range session.Extractor.FindAllString(src, -1) {
-				subdomain = strings.TrimPrefix(subdomain, "25")
-
-				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
-			}
 			return true
 		}
 	}
diff --git a/pkg/subscraping/sources/crtsh/crtsh.go b/v2/pkg/subscraping/sources/crtsh/crtsh.go
similarity index 64%
rename from pkg/subscraping/sources/crtsh/crtsh.go
rename to v2/pkg/subscraping/sources/crtsh/crtsh.go
index 981a178..6e558f0 100755
--- a/pkg/subscraping/sources/crtsh/crtsh.go
+++ b/v2/pkg/subscraping/sources/crtsh/crtsh.go
@@ -4,14 +4,19 @@ import (
 	"context"
 	"database/sql"
 	"fmt"
-	"io/ioutil"
-	"strings"
+
+	jsoniter "github.com/json-iterator/go"
 
 	// postgres driver
 	_ "github.com/lib/pq"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
+type subdomain struct {
+	ID        int    `json:"id"`
+	NameValue string `json:"name_value"`
+}
+
 // Source is the passive scraping agent
 type Source struct{}
 
@@ -20,19 +25,18 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		found := s.getSubdomainsFromSQL(ctx, domain, session, results)
+		defer close(results)
+		found := s.getSubdomainsFromSQL(domain, results)
 		if found {
-			close(results)
 			return
 		}
 		_ = s.getSubdomainsFromHTTP(ctx, domain, session, results)
-		close(results)
 	}()
 
 	return results
 }
 
-func (s *Source) getSubdomainsFromSQL(ctx context.Context, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
+func (s *Source) getSubdomainsFromSQL(domain string, results chan subscraping.Result) bool {
 	db, err := sql.Open("postgres", "host=crt.sh user=guest dbname=certwatch sslmode=disable binary_parameters=yes")
 	if err != nil {
 		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
@@ -40,14 +44,18 @@ func (s *Source) getSubdomainsFromSQL(ctx context.Context, domain string, sessio
 	}
 
 	pattern := "%." + domain
-	rows, err := db.Query(`SELECT DISTINCT ci.NAME_VALUE as domain
-	FROM certificate_identity ci
-	WHERE reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower($1))
-	ORDER BY ci.NAME_VALUE`, pattern)
+	query := `SELECT DISTINCT ci.NAME_VALUE as domain FROM certificate_identity ci
+					  WHERE reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower($1))
+					  ORDER BY ci.NAME_VALUE`
+	rows, err := db.Query(query, pattern)
 	if err != nil {
 		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 		return false
 	}
+	if err := rows.Err(); err != nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		return false
+	}
 
 	var data string
 	// Parse all the rows getting subdomains
@@ -63,27 +71,27 @@ func (s *Source) getSubdomainsFromSQL(ctx context.Context, domain string, sessio
 }
 
 func (s *Source) getSubdomainsFromHTTP(ctx context.Context, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
-	resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain))
+	resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain))
 	if err != nil {
 		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-		session.DiscardHttpResponse(resp)
+		session.DiscardHTTPResponse(resp)
 		return false
 	}
 
-	body, err := ioutil.ReadAll(resp.Body)
+	var subdomains []subdomain
+	err = jsoniter.NewDecoder(resp.Body).Decode(&subdomains)
 	if err != nil {
 		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 		resp.Body.Close()
 		return false
 	}
-	resp.Body.Close()
 
-	// Also replace all newlines
-	src := strings.Replace(string(body), "\\n", " ", -1)
+	resp.Body.Close()
 
-	for _, subdomain := range session.Extractor.FindAllString(src, -1) {
-		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+	for _, subdomain := range subdomains {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain.NameValue}
 	}
+
 	return true
 }
 
diff --git a/v2/pkg/subscraping/sources/dnsdb/dnsdb.go b/v2/pkg/subscraping/sources/dnsdb/dnsdb.go
new file mode 100644
index 0000000..120ace3
--- /dev/null
+++ b/v2/pkg/subscraping/sources/dnsdb/dnsdb.go
@@ -0,0 +1,67 @@
+package dnsdb
+
+import (
+	"bufio"
+	"bytes"
+	"context"
+	"fmt"
+	"strings"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+type dnsdbResponse struct {
+	Name string `json:"rrname"`
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		if session.Keys.DNSDB == "" {
+			return
+		}
+
+		headers := map[string]string{
+			"X-API-KEY":    session.Keys.DNSDB,
+			"Accept":       "application/json",
+			"Content-Type": "application/json",
+		}
+
+		resp, err := session.Get(ctx, fmt.Sprintf("https://api.dnsdb.info/lookup/rrset/name/*.%s?limit=1000000000000", domain), "", headers)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		scanner := bufio.NewScanner(resp.Body)
+		for scanner.Scan() {
+			line := scanner.Text()
+			if line == "" {
+				continue
+			}
+			var response dnsdbResponse
+			err = jsoniter.NewDecoder(bytes.NewBufferString(line)).Decode(&response)
+			if err != nil {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				return
+			}
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: strings.TrimSuffix(response.Name, ".")}
+		}
+		resp.Body.Close()
+	}()
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "DNSDB"
+}
diff --git a/pkg/subscraping/sources/dnsdumpster/dnsdumpster.go b/v2/pkg/subscraping/sources/dnsdumpster/dnsdumpster.go
similarity index 58%
rename from pkg/subscraping/sources/dnsdumpster/dnsdumpster.go
rename to v2/pkg/subscraping/sources/dnsdumpster/dnsdumpster.go
index 170d4f8..8a3182d 100755
--- a/pkg/subscraping/sources/dnsdumpster/dnsdumpster.go
+++ b/v2/pkg/subscraping/sources/dnsdumpster/dnsdumpster.go
@@ -2,63 +2,54 @@ package dnsdumpster
 
 import (
 	"context"
+	"fmt"
 	"io/ioutil"
-	"net"
-	"net/http"
 	"net/url"
 	"regexp"
 	"strings"
-	"time"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
+// CSRFSubMatchLength CSRF regex submatch length
+const CSRFSubMatchLength = 2
+
 var re = regexp.MustCompile("<input type=\"hidden\" name=\"csrfmiddlewaretoken\" value=\"(.*)\">")
 
 // getCSRFToken gets the CSRF Token from the page
 func getCSRFToken(page string) string {
-	if subs := re.FindStringSubmatch(page); len(subs) == 2 {
+	if subs := re.FindStringSubmatch(page); len(subs) == CSRFSubMatchLength {
 		return strings.TrimSpace(subs[1])
 	}
 	return ""
 }
 
 // postForm posts a form for a domain and returns the response
-func postForm(token, domain string) (string, error) {
-	dial := net.Dialer{}
-	client := &http.Client{
-		Transport: &http.Transport{
-			DialContext:         dial.DialContext,
-			TLSHandshakeTimeout: 10 * time.Second,
-		},
-	}
+func postForm(ctx context.Context, session *subscraping.Session, token, domain string) (string, error) {
 	params := url.Values{
 		"csrfmiddlewaretoken": {token},
 		"targetip":            {domain},
 	}
 
-	req, err := http.NewRequest("POST", "https://dnsdumpster.com/", strings.NewReader(params.Encode()))
-	if err != nil {
-		return "", err
-	}
-
-	// The CSRF token needs to be sent as a cookie
-	cookie := &http.Cookie{
-		Name:   "csrftoken",
-		Domain: "dnsdumpster.com",
-		Value:  token,
-	}
-	req.AddCookie(cookie)
-
-	req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
-	req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
-	req.Header.Set("Referer", "https://dnsdumpster.com")
-	req.Header.Set("X-CSRF-Token", token)
+	resp, err := session.HTTPRequest(
+		ctx,
+		"POST",
+		"https://dnsdumpster.com/",
+		fmt.Sprintf("csrftoken=%s; Domain=dnsdumpster.com", token),
+		map[string]string{
+			"Content-Type": "application/x-www-form-urlencoded",
+			"Referer":      "https://dnsdumpster.com",
+			"X-CSRF-Token": token,
+		},
+		strings.NewReader(params.Encode()),
+		subscraping.BasicAuth{},
+	)
 
-	resp, err := client.Do(req)
 	if err != nil {
+		session.DiscardHTTPResponse(resp)
 		return "", err
 	}
+
 	// Now, grab the entire page
 	in, err := ioutil.ReadAll(resp.Body)
 	resp.Body.Close()
@@ -73,11 +64,12 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, "https://dnsdumpster.com/")
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, "https://dnsdumpster.com/")
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
@@ -85,23 +77,20 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
-		csrfToken := getCSRFToken(string(body))
 
-		data, err := postForm(csrfToken, domain)
+		csrfToken := getCSRFToken(string(body))
+		data, err := postForm(ctx, session, csrfToken, domain)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
 			return
 		}
 
 		for _, subdomain := range session.Extractor.FindAllString(data, -1) {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/v2/pkg/subscraping/sources/github/github.go b/v2/pkg/subscraping/sources/github/github.go
new file mode 100644
index 0000000..d384de8
--- /dev/null
+++ b/v2/pkg/subscraping/sources/github/github.go
@@ -0,0 +1,192 @@
+// Package github GitHub search package
+// Based on gwen001's https://github.com/gwen001/github-search github-subdomains
+package github
+
+import (
+	"bufio"
+	"context"
+	"fmt"
+	"net/http"
+	"net/url"
+	"regexp"
+	"strconv"
+	"strings"
+	"time"
+
+	jsoniter "github.com/json-iterator/go"
+
+	"github.com/projectdiscovery/gologger"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+	"github.com/tomnomnom/linkheader"
+)
+
+type textMatch struct {
+	Fragment string `json:"fragment"`
+}
+
+type item struct {
+	Name        string      `json:"name"`
+	HTMLURL     string      `json:"html_url"`
+	TextMatches []textMatch `json:"text_matches"`
+}
+
+type response struct {
+	TotalCount int    `json:"total_count"`
+	Items      []item `json:"items"`
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		if len(session.Keys.GitHub) == 0 {
+			return
+		}
+
+		tokens := NewTokenManager(session.Keys.GitHub)
+
+		searchURL := fmt.Sprintf("https://api.github.com/search/code?per_page=100&q=%s&sort=created&order=asc", domain)
+		s.enumerate(ctx, searchURL, domainRegexp(domain), tokens, session, results)
+	}()
+
+	return results
+}
+
+func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *regexp.Regexp, tokens *Tokens, session *subscraping.Session, results chan subscraping.Result) {
+	select {
+	case <-ctx.Done():
+		return
+	default:
+	}
+
+	token := tokens.Get()
+
+	if token.RetryAfter > 0 {
+		if len(tokens.pool) == 1 {
+			gologger.Verbosef("GitHub Search request rate limit exceeded, waiting for %d seconds before retry... \n", s.Name(), token.RetryAfter)
+			time.Sleep(time.Duration(token.RetryAfter) * time.Second)
+		} else {
+			token = tokens.Get()
+		}
+	}
+
+	headers := map[string]string{"Accept": "application/vnd.github.v3.text-match+json", "Authorization": "token " + token.Hash}
+
+	// Initial request to GitHub search
+	resp, err := session.Get(ctx, searchURL, "", headers)
+	isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
+	if err != nil && !isForbidden {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		session.DiscardHTTPResponse(resp)
+		return
+	}
+
+	// Retry enumerarion after Retry-After seconds on rate limit abuse detected
+	ratelimitRemaining, _ := strconv.ParseInt(resp.Header.Get("X-Ratelimit-Remaining"), 10, 64)
+	if isForbidden && ratelimitRemaining == 0 {
+		retryAfterSeconds, _ := strconv.ParseInt(resp.Header.Get("Retry-After"), 10, 64)
+		tokens.setCurrentTokenExceeded(retryAfterSeconds)
+		resp.Body.Close()
+
+		s.enumerate(ctx, searchURL, domainRegexp, tokens, session, results)
+	}
+
+	var data response
+
+	// Marshall json response
+	err = jsoniter.NewDecoder(resp.Body).Decode(&data)
+	if err != nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		resp.Body.Close()
+		return
+	}
+
+	resp.Body.Close()
+
+	err = proccesItems(ctx, data.Items, domainRegexp, s.Name(), session, results)
+	if err != nil {
+		results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+		return
+	}
+
+	// Links header, first, next, last...
+	linksHeader := linkheader.Parse(resp.Header.Get("Link"))
+	// Process the next link recursively
+	for _, link := range linksHeader {
+		if link.Rel == "next" {
+			nextURL, err := url.QueryUnescape(link.URL)
+			if err != nil {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				return
+			}
+			s.enumerate(ctx, nextURL, domainRegexp, tokens, session, results)
+		}
+	}
+}
+
+// proccesItems procceses github response items
+func proccesItems(ctx context.Context, items []item, domainRegexp *regexp.Regexp, name string, session *subscraping.Session, results chan subscraping.Result) error {
+	for _, item := range items {
+		// find subdomains in code
+		resp, err := session.SimpleGet(ctx, rawURL(item.HTMLURL))
+		if err != nil {
+			if resp != nil && resp.StatusCode != http.StatusNotFound {
+				session.DiscardHTTPResponse(resp)
+			}
+			return err
+		}
+
+		if resp.StatusCode == http.StatusOK {
+			scanner := bufio.NewScanner(resp.Body)
+			for scanner.Scan() {
+				line := scanner.Text()
+				if line == "" {
+					continue
+				}
+				for _, subdomain := range domainRegexp.FindAllString(normalizeContent(line), -1) {
+					results <- subscraping.Result{Source: name, Type: subscraping.Subdomain, Value: subdomain}
+				}
+			}
+			resp.Body.Close()
+		}
+
+		// find subdomains in text matches
+		for _, textMatch := range item.TextMatches {
+			for _, subdomain := range domainRegexp.FindAllString(normalizeContent(textMatch.Fragment), -1) {
+				results <- subscraping.Result{Source: name, Type: subscraping.Subdomain, Value: subdomain}
+			}
+		}
+	}
+	return nil
+}
+
+// Normalize content before matching, query unescape, remove tabs and new line chars
+func normalizeContent(content string) string {
+	normalizedContent, _ := url.QueryUnescape(content)
+	normalizedContent = strings.ReplaceAll(normalizedContent, "\\t", "")
+	normalizedContent = strings.ReplaceAll(normalizedContent, "\\n", "")
+	return normalizedContent
+}
+
+// Raw URL to get the files code and match for subdomains
+func rawURL(htmlURL string) string {
+	domain := strings.ReplaceAll(htmlURL, "https://github.com/", "https://raw.githubusercontent.com/")
+	return strings.ReplaceAll(domain, "/blob/", "/")
+}
+
+// DomainRegexp regular expression to match subdomains in github files code
+func domainRegexp(domain string) *regexp.Regexp {
+	rdomain := strings.ReplaceAll(domain, ".", "\\.")
+	return regexp.MustCompile("(\\w+[.])*" + rdomain)
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "github"
+}
diff --git a/pkg/subscraping/sources/github/tokenmanager.go b/v2/pkg/subscraping/sources/github/tokenmanager.go
similarity index 67%
rename from pkg/subscraping/sources/github/tokenmanager.go
rename to v2/pkg/subscraping/sources/github/tokenmanager.go
index 298ea81..effdfd7 100644
--- a/pkg/subscraping/sources/github/tokenmanager.go
+++ b/v2/pkg/subscraping/sources/github/tokenmanager.go
@@ -2,21 +2,25 @@ package github
 
 import "time"
 
-type token struct {
+// Token struct
+type Token struct {
 	Hash         string
 	RetryAfter   int64
 	ExceededTime time.Time
 }
 
+// Tokens is the internal struct to manage the current token
+// and the pool
 type Tokens struct {
 	current int
-	pool    []token
+	pool    []Token
 }
 
+// NewTokenManager initialize the tokens pool
 func NewTokenManager(keys []string) *Tokens {
-	pool := []token{}
+	pool := []Token{}
 	for _, key := range keys {
-		t := token{Hash: key, ExceededTime: time.Time{}, RetryAfter: 0}
+		t := Token{Hash: key, ExceededTime: time.Time{}, RetryAfter: 0}
 		pool = append(pool, t)
 	}
 
@@ -28,7 +32,7 @@ func NewTokenManager(keys []string) *Tokens {
 
 func (r *Tokens) setCurrentTokenExceeded(retryAfter int64) {
 	if r.current >= len(r.pool) {
-		r.current = r.current % len(r.pool)
+		r.current %= len(r.pool)
 	}
 	if r.pool[r.current].RetryAfter == 0 {
 		r.pool[r.current].ExceededTime = time.Now()
@@ -36,14 +40,15 @@ func (r *Tokens) setCurrentTokenExceeded(retryAfter int64) {
 	}
 }
 
-func (r *Tokens) Get() token {
+// Get returns a new token from the token pool
+func (r *Tokens) Get() *Token {
 	resetExceededTokens(r)
 
 	if r.current >= len(r.pool) {
-		r.current = r.current % len(r.pool)
+		r.current %= len(r.pool)
 	}
 
-	result := r.pool[r.current]
+	result := &r.pool[r.current]
 	r.current++
 
 	return result
diff --git a/v2/pkg/subscraping/sources/hackertarget/hackertarget.go b/v2/pkg/subscraping/sources/hackertarget/hackertarget.go
new file mode 100755
index 0000000..b9bf03f
--- /dev/null
+++ b/v2/pkg/subscraping/sources/hackertarget/hackertarget.go
@@ -0,0 +1,49 @@
+package hackertarget
+
+import (
+	"bufio"
+	"context"
+	"fmt"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("http://api.hackertarget.com/hostsearch/?q=%s", domain))
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		defer resp.Body.Close()
+
+		scanner := bufio.NewScanner(resp.Body)
+		for scanner.Scan() {
+			line := scanner.Text()
+			if line == "" {
+				continue
+			}
+			match := session.Extractor.FindAllString(line, -1)
+			for _, subdomain := range match {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+			}
+		}
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "hackertarget"
+}
diff --git a/pkg/subscraping/sources/intelx/intelx.go b/v2/pkg/subscraping/sources/intelx/intelx.go
similarity index 86%
rename from pkg/subscraping/sources/intelx/intelx.go
rename to v2/pkg/subscraping/sources/intelx/intelx.go
index c07bbef..3bb7c5c 100644
--- a/pkg/subscraping/sources/intelx/intelx.go
+++ b/v2/pkg/subscraping/sources/intelx/intelx.go
@@ -6,14 +6,13 @@ import (
 	"encoding/json"
 	"fmt"
 	"io/ioutil"
-	"net/http"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 type searchResponseType struct {
-	Id     string `json:"id"`
+	ID     string `json:"id"`
 	Status int    `json:"status"`
 }
 
@@ -44,6 +43,7 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 	go func() {
 		defer close(results)
+
 		if session.Keys.IntelXKey == "" || session.Keys.IntelXHost == "" {
 			return
 		}
@@ -63,10 +63,10 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 			return
 		}
 
-		resp, err := http.Post(searchURL, "application/json", bytes.NewBuffer(body))
+		resp, err := session.SimplePost(ctx, searchURL, "application/json", bytes.NewBuffer(body))
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
@@ -74,30 +74,37 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+			resp.Body.Close()
 			return
 		}
 
-		resultsURL := fmt.Sprintf("https://%s/phonebook/search/result?k=%s&id=%s&limit=10000", session.Keys.IntelXHost, session.Keys.IntelXKey, response.Id)
+		resp.Body.Close()
+
+		resultsURL := fmt.Sprintf("https://%s/phonebook/search/result?k=%s&id=%s&limit=10000", session.Keys.IntelXHost, session.Keys.IntelXKey, response.ID)
 		status := 0
 		for status == 0 || status == 3 {
 			resp, err = session.Get(ctx, resultsURL, "", nil)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				session.DiscardHTTPResponse(resp)
 				return
 			}
 			var response searchResultType
 			err = jsoniter.NewDecoder(resp.Body).Decode(&response)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				resp.Body.Close()
 				return
 			}
-			body, err = ioutil.ReadAll(resp.Body)
+
+			_, err = ioutil.ReadAll(resp.Body)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+				resp.Body.Close()
 				return
 			}
 			resp.Body.Close()
+
 			status = response.Status
 			for _, hostname := range response.Selectors {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname.Selectvalue}
diff --git a/pkg/subscraping/sources/ipv4info/ipv4info.go b/v2/pkg/subscraping/sources/ipv4info/ipv4info.go
similarity index 79%
rename from pkg/subscraping/sources/ipv4info/ipv4info.go
rename to v2/pkg/subscraping/sources/ipv4info/ipv4info.go
index ac94c2b..dfc4c65 100755
--- a/pkg/subscraping/sources/ipv4info/ipv4info.go
+++ b/v2/pkg/subscraping/sources/ipv4info/ipv4info.go
@@ -2,11 +2,13 @@ package ipv4info
 
 import (
 	"context"
+	"fmt"
 	"io/ioutil"
+	"net/http"
 	"regexp"
 	"strconv"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 // Source is the passive scraping agent
@@ -17,11 +19,12 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, "http://ipv4info.com/search/"+domain)
-		if err != nil {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("http://ipv4info.com/search/%s", domain))
+		if err != nil && resp == nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
@@ -29,50 +32,53 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
+
 		src := string(body)
 
+		if resp.StatusCode != http.StatusOK {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", src)}
+			return
+		}
+
 		regxTokens := regexp.MustCompile("/ip-address/(.*)/" + domain)
 		matchTokens := regxTokens.FindAllString(src, -1)
 
-		if len(matchTokens) <= 0 {
-			close(results)
+		if len(matchTokens) == 0 {
 			return
 		}
+
 		token := matchTokens[0]
 
-		resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
+		resp, err = session.SimpleGet(ctx, "http://ipv4info.com"+token)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
 		body, err = ioutil.ReadAll(resp.Body)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			resp.Body.Close()
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 		resp.Body.Close()
-		src = string(body)
 
+		src = string(body)
 		regxTokens = regexp.MustCompile("/dns/(.*?)/" + domain)
 		matchTokens = regxTokens.FindAllString(src, -1)
-		if len(matchTokens) <= 0 {
-			close(results)
+		if len(matchTokens) == 0 {
 			return
 		}
-		token = matchTokens[0]
 
-		resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
+		token = matchTokens[0]
+		resp, err = session.SimpleGet(ctx, "http://ipv4info.com"+token)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
@@ -80,24 +86,22 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
-		src = string(body)
 
+		src = string(body)
 		regxTokens = regexp.MustCompile("/subdomains/(.*?)/" + domain)
 		matchTokens = regxTokens.FindAllString(src, -1)
-		if len(matchTokens) <= 0 {
-			close(results)
+		if len(matchTokens) == 0 {
 			return
 		}
-		token = matchTokens[0]
 
-		resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
+		token = matchTokens[0]
+		resp, err = session.SimpleGet(ctx, "http://ipv4info.com"+token)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
@@ -105,12 +109,11 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
-		src = string(body)
 
+		src = string(body)
 		for _, match := range session.Extractor.FindAllString(src, -1) {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
 		}
@@ -122,7 +125,6 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 				break
 			}
 		}
-		close(results)
 	}()
 
 	return results
@@ -146,7 +148,7 @@ func (s *Source) getSubdomains(ctx context.Context, domain string, nextPage *int
 			}
 			token := matchTokens[0]
 
-			resp, err := session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
+			resp, err := session.SimpleGet(ctx, "http://ipv4info.com"+token)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 				return false
diff --git a/pkg/subscraping/sources/passivetotal/passivetotal.go b/v2/pkg/subscraping/sources/passivetotal/passivetotal.go
similarity index 67%
rename from pkg/subscraping/sources/passivetotal/passivetotal.go
rename to v2/pkg/subscraping/sources/passivetotal/passivetotal.go
index 03b5ff9..76495a3 100755
--- a/pkg/subscraping/sources/passivetotal/passivetotal.go
+++ b/v2/pkg/subscraping/sources/passivetotal/passivetotal.go
@@ -3,10 +3,9 @@ package passivetotal
 import (
 	"bytes"
 	"context"
-	"net/http"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 type response struct {
@@ -21,37 +20,35 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
+		defer close(results)
+
 		if session.Keys.PassiveTotalUsername == "" || session.Keys.PassiveTotalPassword == "" {
-			close(results)
 			return
 		}
 
 		// Create JSON Get body
 		var request = []byte(`{"query":"` + domain + `"}`)
 
-		req, err := http.NewRequestWithContext(ctx, "GET", "https://api.passivetotal.org/v2/enrichment/subdomains", bytes.NewBuffer(request))
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
-			return
-		}
-
-		req.SetBasicAuth(session.Keys.PassiveTotalUsername, session.Keys.PassiveTotalPassword)
-		req.Header.Set("Content-Type", "application/json")
-
-		resp, err := session.Client.Do(req)
+		resp, err := session.HTTPRequest(
+			ctx,
+			"GET",
+			"https://api.passivetotal.org/v2/enrichment/subdomains",
+			"",
+			map[string]string{"Content-Type": "application/json"},
+			bytes.NewBuffer(request),
+			subscraping.BasicAuth{Username: session.Keys.PassiveTotalUsername, Password: session.Keys.PassiveTotalPassword},
+		)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		data := response{}
+		var data response
 		err = jsoniter.NewDecoder(resp.Body).Decode(&data)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
@@ -60,7 +57,6 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 			finalSubdomain := subdomain + "." + domain
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: finalSubdomain}
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/pkg/subscraping/sources/rapiddns/rapiddns.go b/v2/pkg/subscraping/sources/rapiddns/rapiddns.go
similarity index 83%
rename from pkg/subscraping/sources/rapiddns/rapiddns.go
rename to v2/pkg/subscraping/sources/rapiddns/rapiddns.go
index abb2828..01754e4 100644
--- a/pkg/subscraping/sources/rapiddns/rapiddns.go
+++ b/v2/pkg/subscraping/sources/rapiddns/rapiddns.go
@@ -5,7 +5,7 @@ import (
 	"context"
 	"io/ioutil"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 // Source is the passive scraping agent
@@ -17,20 +17,23 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 	go func() {
 		defer close(results)
-		resp, err := session.NormalGetWithContext(ctx, "https://rapiddns.io/subdomain/"+domain+"?full=1")
+
+		resp, err := session.SimpleGet(ctx, "https://rapiddns.io/subdomain/"+domain+"?full=1")
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
 		body, err := ioutil.ReadAll(resp.Body)
-		resp.Body.Close()
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			resp.Body.Close()
 			return
 		}
 
+		resp.Body.Close()
+
 		src := string(body)
 		for _, subdomain := range session.Extractor.FindAllString(src, -1) {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
diff --git a/pkg/subscraping/sources/threatminer/threatminer.go b/v2/pkg/subscraping/sources/recon/recon.go
old mode 100755
new mode 100644
similarity index 57%
rename from pkg/subscraping/sources/threatminer/threatminer.go
rename to v2/pkg/subscraping/sources/recon/recon.go
index 755e0c1..4e45ade
--- a/pkg/subscraping/sources/threatminer/threatminer.go
+++ b/v2/pkg/subscraping/sources/recon/recon.go
@@ -1,13 +1,17 @@
-package threatminer
+package recon
 
 import (
 	"context"
+	"encoding/json"
 	"fmt"
-	"io/ioutil"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
+type subdomain struct {
+	RawDomain string `json:"rawDomain"`
+}
+
 // Source is the passive scraping agent
 type Source struct{}
 
@@ -16,30 +20,31 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://api.threatminer.org/v2/domain.php?q=%s&rt=5", domain))
+		defer close(results)
+
+		if session.Keys.Recon == "" {
+			return
+		}
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://recon.dev/api/search?key=%s&domain=%s", session.Keys.Recon, domain))
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		// Get the response body
-		body, err := ioutil.ReadAll(resp.Body)
+		var subdomains []subdomain
+		err = json.NewDecoder(resp.Body).Decode(&subdomains)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 		resp.Body.Close()
 
-		src := string(body)
-
-		for _, match := range session.Extractor.FindAllString(src, -1) {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
+		for _, subdomain := range subdomains {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain.RawDomain}
 		}
-		close(results)
 	}()
 
 	return results
@@ -47,5 +52,5 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 // Name returns the name of the source
 func (s *Source) Name() string {
-	return "threatminer"
+	return "recon"
 }
diff --git a/v2/pkg/subscraping/sources/riddler/riddler.go b/v2/pkg/subscraping/sources/riddler/riddler.go
new file mode 100644
index 0000000..f369cf4
--- /dev/null
+++ b/v2/pkg/subscraping/sources/riddler/riddler.go
@@ -0,0 +1,48 @@
+package riddler
+
+import (
+	"bufio"
+	"context"
+	"fmt"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://riddler.io/search?q=pld:%s&view_type=data_table", domain))
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		scanner := bufio.NewScanner(resp.Body)
+		for scanner.Scan() {
+			line := scanner.Text()
+			if line == "" {
+				continue
+			}
+			subdomain := session.Extractor.FindString(line)
+			if subdomain != "" {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+			}
+		}
+		resp.Body.Close()
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "riddler"
+}
diff --git a/v2/pkg/subscraping/sources/robtex/robtext.go b/v2/pkg/subscraping/sources/robtex/robtext.go
new file mode 100644
index 0000000..9d0ffaa
--- /dev/null
+++ b/v2/pkg/subscraping/sources/robtex/robtext.go
@@ -0,0 +1,95 @@
+package robtex
+
+import (
+	"bufio"
+	"bytes"
+	"context"
+	"fmt"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+const (
+	addrRecord     = "A"
+	iPv6AddrRecord = "AAAA"
+	baseURL        = "https://proapi.robtex.com/pdns"
+)
+
+// Source is the passive scraping agent
+type Source struct{}
+
+type result struct {
+	Rrname string `json:"rrname"`
+	Rrdata string `json:"rrdata"`
+	Rrtype string `json:"rrtype"`
+}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		if session.Keys.Robtex == "" {
+			return
+		}
+
+		headers := map[string]string{"Content-Type": "application/x-ndjson"}
+
+		ips, err := enumerate(ctx, session, fmt.Sprintf("%s/forward/%s?key=%s", baseURL, domain, session.Keys.Robtex), headers)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			return
+		}
+
+		for _, result := range ips {
+			if result.Rrtype == addrRecord || result.Rrtype == iPv6AddrRecord {
+				domains, err := enumerate(ctx, session, fmt.Sprintf("%s/reverse/%s?key=%s", baseURL, result.Rrdata, session.Keys.Robtex), headers)
+				if err != nil {
+					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+					return
+				}
+				for _, result := range domains {
+					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: result.Rrdata}
+				}
+			}
+		}
+	}()
+	return results
+}
+
+func enumerate(ctx context.Context, session *subscraping.Session, targetURL string, headers map[string]string) ([]result, error) {
+	var results []result
+
+	resp, err := session.Get(ctx, targetURL, "", headers)
+	if err != nil {
+		session.DiscardHTTPResponse(resp)
+		return results, err
+	}
+
+	scanner := bufio.NewScanner(resp.Body)
+	for scanner.Scan() {
+		line := scanner.Text()
+		if line == "" {
+			continue
+		}
+		var response result
+		err = jsoniter.NewDecoder(bytes.NewBufferString(line)).Decode(&response)
+		if err != nil {
+			return results, err
+		}
+
+		results = append(results, response)
+	}
+
+	resp.Body.Close()
+
+	return results, nil
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "robtex"
+}
diff --git a/pkg/subscraping/sources/securitytrails/securitytrails.go b/v2/pkg/subscraping/sources/securitytrails/securitytrails.go
similarity index 79%
rename from pkg/subscraping/sources/securitytrails/securitytrails.go
rename to v2/pkg/subscraping/sources/securitytrails/securitytrails.go
index 71f2276..2e525e0 100755
--- a/pkg/subscraping/sources/securitytrails/securitytrails.go
+++ b/v2/pkg/subscraping/sources/securitytrails/securitytrails.go
@@ -6,7 +6,7 @@ import (
 	"strings"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 type response struct {
@@ -21,39 +21,38 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
+		defer close(results)
+
 		if session.Keys.Securitytrails == "" {
-			close(results)
 			return
 		}
 
 		resp, err := session.Get(ctx, fmt.Sprintf("https://api.securitytrails.com/v1/domain/%s/subdomains", domain), "", map[string]string{"APIKEY": session.Keys.Securitytrails})
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		response := response{}
-		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
+		var securityTrailsResponse response
+		err = jsoniter.NewDecoder(resp.Body).Decode(&securityTrailsResponse)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
+
 		resp.Body.Close()
 
-		for _, subdomain := range response.Subdomains {
+		for _, subdomain := range securityTrailsResponse.Subdomains {
 			if strings.HasSuffix(subdomain, ".") {
-				subdomain = subdomain + domain
+				subdomain += domain
 			} else {
 				subdomain = subdomain + "." + domain
 			}
 
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/v2/pkg/subscraping/sources/shodan/shodan.go b/v2/pkg/subscraping/sources/shodan/shodan.go
new file mode 100644
index 0000000..31c68f7
--- /dev/null
+++ b/v2/pkg/subscraping/sources/shodan/shodan.go
@@ -0,0 +1,74 @@
+package shodan
+
+import (
+	"context"
+	"fmt"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+// Source is the passive scraping agent
+type Source struct{}
+
+type dnsdbLookupResponse struct {
+	Domain string `json:"domain"`
+	Data   []struct {
+		Subdomain string `json:"subdomain"`
+		Type      string `json:"type"`
+		Value     string `json:"value"`
+	} `json:"data"`
+	Result int    `json:"result"`
+	Error  string `json:"error"`
+}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		if session.Keys.Shodan == "" {
+			return
+		}
+
+		searchURL := fmt.Sprintf("https://api.shodan.io/dns/domain/%s?key=%s", domain, session.Keys.Shodan)
+		resp, err := session.SimpleGet(ctx, searchURL)
+		if err != nil {
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		defer resp.Body.Close()
+
+		var response dnsdbLookupResponse
+		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			return
+		}
+
+		if response.Error != "" {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%v", response.Error)}
+			return
+		}
+
+		for _, data := range response.Data {
+			if data.Subdomain != "" {
+				if data.Type == "CNAME" {
+					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: data.Value}
+				} else if data.Type == "A" {
+					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: fmt.Sprintf("%s.%s", data.Subdomain, domain)}
+				}
+			}
+		}
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "shodan"
+}
diff --git a/v2/pkg/subscraping/sources/sitedossier/sitedossier.go b/v2/pkg/subscraping/sources/sitedossier/sitedossier.go
new file mode 100755
index 0000000..c91c438
--- /dev/null
+++ b/v2/pkg/subscraping/sources/sitedossier/sitedossier.go
@@ -0,0 +1,83 @@
+package sitedossier
+
+import (
+	"context"
+	"fmt"
+	"io/ioutil"
+	"math/rand"
+	"net/http"
+	"regexp"
+	"time"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+// SleepRandIntn is the integer value to get the pseudo-random number
+// to sleep before find the next match
+const SleepRandIntn = 5
+
+var reNext = regexp.MustCompile(`<a href="([A-Za-z0-9/.]+)"><b>`)
+
+type agent struct {
+	results chan subscraping.Result
+	session *subscraping.Session
+}
+
+func (a *agent) enumerate(ctx context.Context, baseURL string) {
+	select {
+	case <-ctx.Done():
+		return
+	default:
+	}
+
+	resp, err := a.session.SimpleGet(ctx, baseURL)
+	isnotfound := resp != nil && resp.StatusCode == http.StatusNotFound
+	if err != nil && !isnotfound {
+		a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
+		a.session.DiscardHTTPResponse(resp)
+	}
+
+	body, err := ioutil.ReadAll(resp.Body)
+	if err != nil {
+		a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
+		resp.Body.Close()
+	}
+	resp.Body.Close()
+
+	src := string(body)
+	for _, match := range a.session.Extractor.FindAllString(src, -1) {
+		a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Subdomain, Value: match}
+	}
+
+	match1 := reNext.FindStringSubmatch(src)
+	time.Sleep(time.Duration((3 + rand.Intn(SleepRandIntn))) * time.Second)
+
+	if len(match1) > 0 {
+		a.enumerate(ctx, "http://www.sitedossier.com"+match1[1])
+	}
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	a := agent{
+		session: session,
+		results: results,
+	}
+
+	go func() {
+		a.enumerate(ctx, fmt.Sprintf("http://www.sitedossier.com/parentdomain/%s", domain))
+		close(a.results)
+	}()
+
+	return a.results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "sitedossier"
+}
diff --git a/pkg/subscraping/sources/spyse/spyse.go b/v2/pkg/subscraping/sources/spyse/spyse.go
similarity index 74%
rename from pkg/subscraping/sources/spyse/spyse.go
rename to v2/pkg/subscraping/sources/spyse/spyse.go
index 8dc3983..347f2a6 100644
--- a/pkg/subscraping/sources/spyse/spyse.go
+++ b/v2/pkg/subscraping/sources/spyse/spyse.go
@@ -2,88 +2,80 @@ package spyse
 
 import (
 	"context"
-	"strconv"
 	"fmt"
+	"strconv"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
-
 type resultObject struct {
 	Name string `json:"name"`
 }
 
 type dataObject struct {
-	Items []resultObject `json:"items"`
-	Total_Count int `json:"total_count"`
+	Items      []resultObject `json:"items"`
+	TotalCount int            `json:"total_count"`
 }
 
 type errorObject struct {
-	Code string `json:"code"`
+	Code    string `json:"code"`
 	Message string `json:"message"`
 }
 
-
 type spyseResult struct {
-	Data dataObject `json:"data"`
+	Data  dataObject    `json:"data"`
 	Error []errorObject `json:"error"`
 }
 
-
+// Source is the passive scraping agent
 type Source struct{}
 
+// Run function returns all subdomains found with the service
 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
 	results := make(chan subscraping.Result)
 
 	go func() {
+		defer close(results)
+
 		if session.Keys.Spyse == "" {
-			close(results)
 			return
 		}
 
-		maxCount := 100;
+		maxCount := 100
 
 		for offSet := 0; offSet <= maxCount; offSet += 100 {
 			resp, err := session.Get(ctx, fmt.Sprintf("https://api.spyse.com/v3/data/domain/subdomain?domain=%s&limit=100&offset=%s", domain, strconv.Itoa(offSet)), "", map[string]string{"Authorization": "Bearer " + session.Keys.Spyse})
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-				session.DiscardHttpResponse(resp)
-				close(results)
+				session.DiscardHTTPResponse(resp)
 				return
 			}
 
-
-			var response spyseResult;
-
+			var response spyseResult
 			err = jsoniter.NewDecoder(resp.Body).Decode(&response)
-
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 				resp.Body.Close()
-				close(results)
 				return
 			}
 			resp.Body.Close()
 
-			if response.Data.Total_Count == 0 {
-				close(results)
+			if response.Data.TotalCount == 0 {
 				return
 			}
 
-			maxCount = response.Data.Total_Count;
+			maxCount = response.Data.TotalCount
 
 			for _, hostname := range response.Data.Items {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname.Name}
 			}
 		}
-		close(results)
 	}()
 
 	return results
 }
 
-
 // Name returns the name of the source
 func (s *Source) Name() string {
 	return "spyse"
diff --git a/pkg/subscraping/sources/sublist3r/subllist3r.go b/v2/pkg/subscraping/sources/sublist3r/subllist3r.go
similarity index 74%
rename from pkg/subscraping/sources/sublist3r/subllist3r.go
rename to v2/pkg/subscraping/sources/sublist3r/subllist3r.go
index 2ea9c74..1b89656 100644
--- a/pkg/subscraping/sources/sublist3r/subllist3r.go
+++ b/v2/pkg/subscraping/sources/sublist3r/subllist3r.go
@@ -5,7 +5,7 @@ import (
 	"encoding/json"
 	"fmt"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 // Source is the passive scraping agent
@@ -16,28 +16,28 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://api.sublist3r.com/search.php?domain=%s", domain))
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://api.sublist3r.com/search.php?domain=%s", domain))
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
-		defer resp.Body.Close()
+
 		var subdomains []string
-		// Get the response body and unmarshal
 		err = json.NewDecoder(resp.Body).Decode(&subdomains)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
 
+		resp.Body.Close()
+
 		for _, subdomain := range subdomains {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/v2/pkg/subscraping/sources/threatbook/threatbook.go b/v2/pkg/subscraping/sources/threatbook/threatbook.go
new file mode 100644
index 0000000..f7de108
--- /dev/null
+++ b/v2/pkg/subscraping/sources/threatbook/threatbook.go
@@ -0,0 +1,78 @@
+package threatbook
+
+import (
+	"context"
+	"fmt"
+	"strconv"
+
+	jsoniter "github.com/json-iterator/go"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+type threatBookResponse struct {
+	ResponseCode int64  `json:"response_code"`
+	VerboseMsg   string `json:"verbose_msg"`
+	Data         struct {
+		Domain     string `json:"domain"`
+		SubDomains struct {
+			Total string   `json:"total"`
+			Data  []string `json:"data"`
+		} `json:"sub_domains"`
+	} `json:"data"`
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		if session.Keys.ThreatBook == "" {
+			return
+		}
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://api.threatbook.cn/v3/domain/sub_domains?apikey=%s&resource=%s", session.Keys.ThreatBook, domain))
+		if err != nil && resp == nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		var response threatBookResponse
+		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			resp.Body.Close()
+			return
+		}
+		resp.Body.Close()
+
+		if response.ResponseCode != 0 {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("code %d, %s", response.ResponseCode, response.VerboseMsg)}
+			return
+		}
+
+		total, err := strconv.ParseInt(response.Data.SubDomains.Total, 10, 64)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			return
+		}
+
+		if total > 0 {
+			for _, subdomain := range response.Data.SubDomains.Data {
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+			}
+		}
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "threatbook"
+}
diff --git a/pkg/subscraping/sources/threatcrowd/threatcrowd.go b/v2/pkg/subscraping/sources/threatcrowd/threatcrowd.go
similarity index 59%
rename from pkg/subscraping/sources/threatcrowd/threatcrowd.go
rename to v2/pkg/subscraping/sources/threatcrowd/threatcrowd.go
index a27ada3..1de0d63 100755
--- a/pkg/subscraping/sources/threatcrowd/threatcrowd.go
+++ b/v2/pkg/subscraping/sources/threatcrowd/threatcrowd.go
@@ -3,11 +3,16 @@ package threatcrowd
 import (
 	"context"
 	"fmt"
-	"io/ioutil"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	jsoniter "github.com/json-iterator/go"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
+type response struct {
+	Subdomains []string `json:"subdomains"`
+}
+
 // Source is the passive scraping agent
 type Source struct{}
 
@@ -16,30 +21,27 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=%s", domain))
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=%s", domain))
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		// Get the response body
-		body, err := ioutil.ReadAll(resp.Body)
+		defer resp.Body.Close()
+
+		var data response
+		err = jsoniter.NewDecoder(resp.Body).Decode(&data)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			resp.Body.Close()
-			close(results)
 			return
 		}
-		resp.Body.Close()
-
-		src := string(body)
 
-		for _, match := range session.Extractor.FindAllString(src, -1) {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
+		for _, subdomain := range data.Subdomains {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/v2/pkg/subscraping/sources/threatminer/threatminer.go b/v2/pkg/subscraping/sources/threatminer/threatminer.go
new file mode 100755
index 0000000..20c41b8
--- /dev/null
+++ b/v2/pkg/subscraping/sources/threatminer/threatminer.go
@@ -0,0 +1,55 @@
+package threatminer
+
+import (
+	"context"
+	"fmt"
+
+	jsoniter "github.com/json-iterator/go"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+type response struct {
+	StatusCode    string   `json:"status_code"`
+	StatusMessage string   `json:"status_message"`
+	Results       []string `json:"results"`
+}
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://api.threatminer.org/v2/domain.php?q=%s&rt=5", domain))
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		defer resp.Body.Close()
+
+		var data response
+		err = jsoniter.NewDecoder(resp.Body).Decode(&data)
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			return
+		}
+
+		for _, subdomain := range data.Results {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+		}
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "threatminer"
+}
diff --git a/pkg/subscraping/sources/virustotal/virustotal.go b/v2/pkg/subscraping/sources/virustotal/virustotal.go
similarity index 75%
rename from pkg/subscraping/sources/virustotal/virustotal.go
rename to v2/pkg/subscraping/sources/virustotal/virustotal.go
index 6442e44..640698f 100755
--- a/pkg/subscraping/sources/virustotal/virustotal.go
+++ b/v2/pkg/subscraping/sources/virustotal/virustotal.go
@@ -5,7 +5,7 @@ import (
 	"fmt"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 type response struct {
@@ -20,33 +20,32 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
+		defer close(results)
+
 		if session.Keys.Virustotal == "" {
-			close(results)
 			return
 		}
 
-		resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s", session.Keys.Virustotal, domain))
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s", session.Keys.Virustotal, domain))
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			session.DiscardHttpResponse(resp)
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		data := response{}
+		var data response
 		err = jsoniter.NewDecoder(resp.Body).Decode(&data)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 			resp.Body.Close()
-			close(results)
 			return
 		}
+
 		resp.Body.Close()
 
 		for _, subdomain := range data.Subdomains {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
 		}
-		close(results)
 	}()
 
 	return results
diff --git a/v2/pkg/subscraping/sources/waybackarchive/waybackarchive.go b/v2/pkg/subscraping/sources/waybackarchive/waybackarchive.go
new file mode 100755
index 0000000..8c36346
--- /dev/null
+++ b/v2/pkg/subscraping/sources/waybackarchive/waybackarchive.go
@@ -0,0 +1,57 @@
+package waybackarchive
+
+import (
+	"bufio"
+	"context"
+	"fmt"
+	"net/url"
+	"strings"
+
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
+)
+
+// Source is the passive scraping agent
+type Source struct{}
+
+// Run function returns all subdomains found with the service
+func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
+	results := make(chan subscraping.Result)
+
+	go func() {
+		defer close(results)
+
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=*.%s/*&output=txt&fl=original&collapse=urlkey", domain))
+		if err != nil {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
+			session.DiscardHTTPResponse(resp)
+			return
+		}
+
+		defer resp.Body.Close()
+
+		scanner := bufio.NewScanner(resp.Body)
+		for scanner.Scan() {
+			line := scanner.Text()
+			if line == "" {
+				continue
+			}
+			line, _ = url.QueryUnescape(line)
+			subdomain := session.Extractor.FindString(line)
+			if subdomain != "" {
+				// fix for triple encoded URL
+				subdomain = strings.ToLower(subdomain)
+				subdomain = strings.TrimPrefix(subdomain, "25")
+				subdomain = strings.TrimPrefix(subdomain, "2f")
+
+				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
+			}
+		}
+	}()
+
+	return results
+}
+
+// Name returns the name of the source
+func (s *Source) Name() string {
+	return "waybackarchive"
+}
diff --git a/pkg/subscraping/sources/urlscan/urlscan.go b/v2/pkg/subscraping/sources/ximcx/ximcx.go
old mode 100755
new mode 100644
similarity index 53%
rename from pkg/subscraping/sources/urlscan/urlscan.go
rename to v2/pkg/subscraping/sources/ximcx/ximcx.go
index ddb61d9..7bee37d
--- a/pkg/subscraping/sources/urlscan/urlscan.go
+++ b/v2/pkg/subscraping/sources/ximcx/ximcx.go
@@ -1,54 +1,57 @@
-package urlscan
+package ximcx
 
 import (
 	"context"
 	"fmt"
 
 	jsoniter "github.com/json-iterator/go"
-	"github.com/m-mizutani/urlscan-go/urlscan"
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 // Source is the passive scraping agent
 type Source struct{}
 
+type domain struct {
+	Domain string `json:"domain"`
+}
+
+type ximcxResponse struct {
+	Code    int64    `json:"code"`
+	Message string   `json:"message"`
+	Data    []domain `json:"data"`
+}
+
 // Run function returns all subdomains found with the service
 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
 	results := make(chan subscraping.Result)
 
 	go func() {
-		if session.Keys.URLScan == "" {
-			close(results)
-			return
-		}
+		defer close(results)
 
-		client := urlscan.NewClient(session.Keys.URLScan)
-		task, err := client.Submit(urlscan.SubmitArguments{URL: fmt.Sprintf("https://%s", domain)})
+		resp, err := session.SimpleGet(ctx, fmt.Sprintf("http://sbd.ximcx.cn/DomainServlet?domain=%s", domain))
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+			session.DiscardHTTPResponse(resp)
 			return
 		}
 
-		err = task.Wait()
+		var response ximcxResponse
+		err = jsoniter.NewDecoder(resp.Body).Decode(&response)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+			resp.Body.Close()
 			return
 		}
+		resp.Body.Close()
 
-		data, err := jsoniter.Marshal(task.Result.Data)
-		if err != nil {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
+		if response.Code > 0 {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%d, %s", response.Code, response.Message)}
 			return
 		}
 
-		match := session.Extractor.FindAllString(string(data), -1)
-		for _, m := range match {
-			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: m}
+		for _, result := range response.Data {
+			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: result.Domain}
 		}
-		close(results)
 	}()
 
 	return results
@@ -56,5 +59,5 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 
 // Name returns the name of the source
 func (s *Source) Name() string {
-	return "urlscan"
+	return "ximcx"
 }
diff --git a/pkg/subscraping/sources/zoomeye/zoomeye.go b/v2/pkg/subscraping/sources/zoomeye/zoomeye.go
similarity index 74%
rename from pkg/subscraping/sources/zoomeye/zoomeye.go
rename to v2/pkg/subscraping/sources/zoomeye/zoomeye.go
index 9a2c92c..4955fda 100644
--- a/pkg/subscraping/sources/zoomeye/zoomeye.go
+++ b/v2/pkg/subscraping/sources/zoomeye/zoomeye.go
@@ -6,11 +6,9 @@ import (
 	"encoding/json"
 	"errors"
 	"fmt"
-	"io"
-	"io/ioutil"
 	"net/http"
 
-	"github.com/projectdiscovery/subfinder/pkg/subscraping"
+	"github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
 )
 
 // zoomAuth holds the ZoomEye credentials
@@ -39,22 +37,23 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 	results := make(chan subscraping.Result)
 
 	go func() {
+		defer close(results)
+
 		if session.Keys.ZoomEyeUsername == "" || session.Keys.ZoomEyePassword == "" {
-			close(results)
 			return
 		}
-		jwt, err := doLogin(session)
+
+		jwt, err := doLogin(ctx, session)
 		if err != nil {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-			close(results)
 			return
 		}
 		// check if jwt is null
 		if jwt == "" {
 			results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("could not log into zoomeye")}
-			close(results)
 			return
 		}
+
 		headers := map[string]string{
 			"Authorization": fmt.Sprintf("JWT %s", jwt),
 			"Accept":        "application/json",
@@ -67,22 +66,20 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 			if err != nil {
 				if !isForbidden && currentPage == 0 {
 					results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
-					session.DiscardHttpResponse(resp)
+					session.DiscardHTTPResponse(resp)
 				}
-				close(results)
 				return
 			}
 
-			defer resp.Body.Close()
-			res := &zoomeyeResults{}
-			err = json.NewDecoder(resp.Body).Decode(res)
+			var res zoomeyeResults
+			err = json.NewDecoder(resp.Body).Decode(&res)
 			if err != nil {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
 				resp.Body.Close()
-				close(results)
 				return
 			}
 			resp.Body.Close()
+
 			for _, r := range res.Matches {
 				results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: r.Site}
 				for _, domain := range r.Domains {
@@ -91,14 +88,13 @@ func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Se
 			}
 			currentPage++
 		}
-		close(results)
 	}()
 
 	return results
 }
 
 // doLogin performs authentication on the ZoomEye API
-func doLogin(session *subscraping.Session) (string, error) {
+func doLogin(ctx context.Context, session *subscraping.Session) (string, error) {
 	creds := &zoomAuth{
 		User: session.Keys.ZoomEyeUsername,
 		Pass: session.Keys.ZoomEyePassword,
@@ -107,25 +103,16 @@ func doLogin(session *subscraping.Session) (string, error) {
 	if err != nil {
 		return "", err
 	}
-	req, err := http.NewRequest("POST", "https://api.zoomeye.org/user/login", bytes.NewBuffer(body))
+	resp, err := session.SimplePost(ctx, "https://api.zoomeye.org/user/login", "application/json", bytes.NewBuffer(body))
 	if err != nil {
+		session.DiscardHTTPResponse(resp)
 		return "", err
 	}
-	req.Header.Add("Content-Type", "application/json")
-	resp, err := session.Client.Do(req)
-	if err != nil {
-		return "", err
-	}
-	// if not 200, bad credentials
-	if resp.StatusCode != 200 {
-		io.Copy(ioutil.Discard, resp.Body)
-		resp.Body.Close()
-		return "", fmt.Errorf("login failed, non-200 response from zoomeye")
-	}
 
 	defer resp.Body.Close()
-	login := &loginResp{}
-	err = json.NewDecoder(resp.Body).Decode(login)
+
+	var login loginResp
+	err = json.NewDecoder(resp.Body).Decode(&login)
 	if err != nil {
 		return "", err
 	}
diff --git a/v2/pkg/subscraping/types.go b/v2/pkg/subscraping/types.go
new file mode 100755
index 0000000..8a0c445
--- /dev/null
+++ b/v2/pkg/subscraping/types.go
@@ -0,0 +1,76 @@
+package subscraping
+
+import (
+	"context"
+	"net/http"
+	"regexp"
+)
+
+// BasicAuth request's Authorization header
+type BasicAuth struct {
+	Username string
+	Password string
+}
+
+// Source is an interface inherited by each passive source
+type Source interface {
+	// Run takes a domain as argument and a session object
+	// which contains the extractor for subdomains, http client
+	// and other stuff.
+	Run(context.Context, string, *Session) <-chan Result
+	// Name returns the name of the source
+	Name() string
+}
+
+// Session is the option passed to the source, an option is created
+// uniquely for eac source.
+type Session struct {
+	// Extractor is the regex for subdomains created for each domain
+	Extractor *regexp.Regexp
+	// Keys is the API keys for the application
+	Keys *Keys
+	// Client is the current http client
+	Client *http.Client
+}
+
+// Keys contains the current API Keys we have in store
+type Keys struct {
+	Binaryedge           string   `json:"binaryedge"`
+	CensysToken          string   `json:"censysUsername"`
+	CensysSecret         string   `json:"censysPassword"`
+	Certspotter          string   `json:"certspotter"`
+	Chaos                string   `json:"chaos"`
+	DNSDB                string   `json:"dnsdb"`
+	GitHub               []string `json:"github"`
+	IntelXHost           string   `json:"intelXHost"`
+	IntelXKey            string   `json:"intelXKey"`
+	PassiveTotalUsername string   `json:"passivetotal_username"`
+	PassiveTotalPassword string   `json:"passivetotal_password"`
+	Recon                string   `json:"recon"`
+	Robtex               string   `json:"robtex"`
+	Securitytrails       string   `json:"securitytrails"`
+	Shodan               string   `json:"shodan"`
+	Spyse                string   `json:"spyse"`
+	ThreatBook           string   `json:"threatbook"`
+	URLScan              string   `json:"urlscan"`
+	Virustotal           string   `json:"virustotal"`
+	ZoomEyeUsername      string   `json:"zoomeye_username"`
+	ZoomEyePassword      string   `json:"zoomeye_password"`
+}
+
+// Result is a result structure returned by a source
+type Result struct {
+	Type   ResultType
+	Source string
+	Value  string
+	Error  error
+}
+
+// ResultType is the type of result returned by the source
+type ResultType int
+
+// Types of results returned by the source
+const (
+	Subdomain ResultType = iota
+	Error
+)
diff --git a/pkg/subscraping/utils.go b/v2/pkg/subscraping/utils.go
similarity index 100%
rename from pkg/subscraping/utils.go
rename to v2/pkg/subscraping/utils.go