Codebase list subfinder / upstream/2.3.8
New upstream version 2.3.8 Sophie Brun 3 years ago
66 changed file(s) with 4708 addition(s) and 0 deletion(s). Raw diff Collapse all Expand all
0 name: Build
1 on:
2 push:
3 branches:
4 - master
5 pull_request:
6
7 jobs:
8 build:
9 name: Build
10 runs-on: ubuntu-latest
11 steps:
12 - name: Set up Go
13 uses: actions/setup-go@v2
14 with:
15 go-version: 1.13
16
17 - name: Check out code
18 uses: actions/checkout@v2
19
20 - name: Test
21 run: go test .
22 working-directory: cmd/subfinder/
23
24 - name: Build
25 run: go build .
26 working-directory: cmd/subfinder/
0 # dockerhub-push pushes docker build to dockerhub automatically
1 # on the creation of a new release
2 name: Publish to Dockerhub on creation of a new release
3 on:
4 release:
5 types: [published]
6 jobs:
7 build:
8 runs-on: ubuntu-latest
9 steps:
10 - uses: actions/checkout@master
11 - name: Publish to Dockerhub Registry
12 uses: elgohr/Publish-Docker-Github-Action@master
13 with:
14 name: projectdiscovery/subfinder
15 username: ${{ secrets.DOCKER_USERNAME }}
16 password: ${{ secrets.DOCKER_PASSWORD }}
0 name: Release
1 on:
2 create:
3 tags:
4 - v*
5
6 jobs:
7 release:
8 runs-on: ubuntu-latest
9 steps:
10 -
11 name: "Check out code"
12 uses: actions/checkout@v2
13 with:
14 fetch-depth: 0
15 -
16 name: "Set up Go"
17 uses: actions/setup-go@v2
18 with:
19 go-version: 1.14
20 -
21 env:
22 GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
23 name: "Create release on GitHub"
24 uses: goreleaser/goreleaser-action@v2
25 with:
26 args: "release --rm-dist"
27 version: latest
0 .DS_Store
1 cmd/subfinder/subfinder
2 vendor/
3 .idea
0 builds:
1 - binary: subfinder
2 main: cmd/subfinder/main.go
3 goos:
4 - linux
5 - windows
6 - darwin
7 goarch:
8 - amd64
9 - 386
10 - arm
11 - arm64
12
13 archives:
14 - id: tgz
15 format: tar.gz
16 replacements:
17 darwin: macOS
18 format_overrides:
19 - goos: windows
20 format: zip
0 ## Disclaimer
1
2 Subfinder leverages multiple open APIs, it is developed for individuals to help them for research or internal work. If you wish to incorporate this tool into a commercial offering or purposes, you must agree to the Terms of the leveraged services:
3
4 - Project Sonar / Bufferover: https://opendata.rapid7.com/about
5 - CommonCrawl: https://commoncrawl.org/terms-of-use/full
6 - certspotter: https://sslmate.com/terms
7 - dnsdumpster: https://hackertarget.com/terms
8 - entrust: https://www.entrustdatacard.com/pages/terms-of-use
9 - Google Transparency: https://policies.google.com/terms
10 - Threatcrowd: https://www.alienvault.com/terms/website-terms-of-use07may2018
11
12 ---
13
14 You expressly understand and agree that Subfinder (creators and contributors) shall not be liable for any damages or losses resulting from your use of this tool or third-party products that use it.
15
16 Creators aren't in charge of any and have/has no responsibility for any kind of:
17
18 - Unlawful or illegal use of the tool.
19 - Legal or Law infringement (acted in any country, state, municipality, place) by third parties and users.
20 - Act against ethical and / or human moral, ethic, and peoples and cultures of the world.
21 - Malicious act, capable of causing damage to third parties, promoted or distributed by third parties or the user through this tool.
22
23
24 ### Contact
25
26 Please contact at [email protected] for any questions.
0 # Build Container
1 FROM golang:1.13.4-alpine3.10 AS build-env
2 MAINTAINER Ice3man ([email protected])
3 RUN apk add --no-cache --upgrade git openssh-client ca-certificates
4 RUN go get -u github.com/golang/dep/cmd/dep
5 WORKDIR /go/src/app
6
7 # Install
8 RUN go get -u github.com/projectdiscovery/subfinder/cmd/subfinder
9
10 ENTRYPOINT ["subfinder"]
0 ## What's the problem (or question)?
1 <!--- If describing a bug, tell us what happens instead of the expected behavior -->
2 <!--- If suggesting a change/improvement, explain the difference from current behavior -->
3
4 ## Do you have an idea for a solution?
5 <!--- Not obligatory, but suggest a fix/reason for the bug, -->
6 <!--- or ideas how to implement the addition or change -->
7
8 ## How can we reproduce the issue?
9 <!--- Provide unambiguous set of steps to reproduce this bug. Include command to reproduce, if relevant (you can mask the sensitive data) -->
10 1.
11 2.
12 3.
13 4.
14
15 ## What are the running context details?
16 <!--- Include as many relevant details about the running context you experienced the bug/problem in -->
17 * Installation method (e.g. `pip`, `apt-get`, `git clone` or `zip`/`tar.gz`):
18 * Client OS (e.g. `Microsoft Windows 10`)
19 * Program version (see banner):
20 * Relevant console output (if any):
21 * Exception traceback (if any):
0 MIT License
1
2 Copyright (c) Exposed Atoms Pvt Ltd
3
4 Permission is hereby granted, free of charge, to any person obtaining a copy
5 of this software and associated documentation files (the "Software"), to deal
6 in the Software without restriction, including without limitation the rights
7 to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
8 copies of the Software, and to permit persons to whom the Software is
9 furnished to do so, subject to the following conditions:
10
11 The above copyright notice and this permission notice shall be included in all
12 copies or substantial portions of the Software.
13
14 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
19 OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 SOFTWARE.
0 <h1 align="left">
1 <img src="static/subfinder-logo.png" alt="subfinder" width="170px"></a>
2 <br>
3 </h1>
4
5
6 [![License](https://img.shields.io/badge/license-MIT-_red.svg)](https://opensource.org/licenses/MIT)
7 [![Go Report Card](https://goreportcard.com/badge/github.com/projectdiscovery/subfinder)](https://goreportcard.com/report/github.com/projectdiscovery/subfinder)
8 [![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/projectdiscovery/subfinder/issues)
9
10
11 subfinder is a subdomain discovery tool that discovers valid subdomains for websites by using passive online sources. It has a simple modular architecture and is optimized for speed. subfinder is built for doing one thing only - passive subdomain enumeration, and it does that very well.
12
13 We have designed subfinder to comply with all passive sources licenses, and usage restrictions, as well as maintained a consistently passive model to make it useful to both penetration testers and bug bounty hunters alike.
14
15
16 # Resources
17 - [Features](#features)
18 - [Usage](#usage)
19 - [Installation Instuctions (direct)](#direct-installation)
20 - [Installation Instructions](#installation-instructions)
21 - [From Binary](#from-binary)
22 - [From Source](#from-source)
23 - [From Github](#from-github)
24 - [Upgrading](#upgrading)
25 - [Post Installation Instructions](#post-installation-instructions)
26 - [Running subfinder](#running-subfinder)
27 - [Running in a Docker Container](#running-in-a-docker-container)
28
29
30 # Features
31
32 <h1 align="left">
33 <img src="static/subfinder-run.png" alt="subfinder" width="700px"></a>
34 <br>
35 </h1>
36
37
38 - Simple and modular code base making it easy to contribute.
39 - Fast And Powerful Resolution and wildcard elimination module
40 - **Curated** passive sources to maximize results (26 Sources as of now)
41 - Multiple Output formats supported (Json, File, Stdout)
42 - Optimized for speed, very fast and **lightweight** on resources
43 - **Stdin** and **stdout** support for integrating in workflows
44
45
46 # Usage
47
48 ```bash
49 subfinder -h
50 ```
51 This will display help for the tool. Here are all the switches it supports.
52
53 | Flag | Description | Example |
54 |------|-------------|---------|
55 | -cd | Upload results to the Chaos API (api-key required) | subfinder -d uber.com -cd |
56 | -config string | Configuration file for API Keys, etc | subfinder -config config.yaml |
57 | -d | Domain to find subdomains for | subfinder -d uber.com |
58 | -dL | File containing list of domains to enumerate | subfinder -dL hackerone-hosts.txt |
59 | -exclude-sources | List of sources to exclude from enumeration | subfinder -exclude-sources archiveis |
60 | -max-time | Minutes to wait for enumeration results (default 10) | subfinder -max-time 1 |
61 | -nC | Don't Use colors in output | subfinder -nC |
62 | -nW | Remove Wildcard & Dead Subdomains from output | subfinder -nW |
63 | -ls | List all available sources | subfinder -ls |
64 | -o | File to write output to (optional) | subfinder -o output.txt |
65 | -oD | Directory to write enumeration results to (optional) | subfinder -oD ~/outputs |
66 | -oI | Write output in Host,IP format | subfinder -oI |
67 | -oJ | Write output in JSON lines Format | subfinder -oJ |
68 | -r | Comma-separated list of resolvers to use | subfinder -r 1.1.1.1,1.0.0.1 |
69 | -rL | Text file containing list of resolvers to use | subfinder -rL resolvers.txt
70 | -silent | Show only subdomains in output | subfinder -silent |
71 | -sources | Comma separated list of sources to use | subfinder -sources shodan,censys |
72 | -t | Number of concurrent goroutines for resolving (default 10) | subfinder -t 100 |
73 | -timeout | Seconds to wait before timing out (default 30) | subfinder -timeout 30 |
74 | -v | Show Verbose output | subfinder -v |
75 | -version | Show current program version | subfinder -version |
76
77
78 # Installation Instructions
79
80 ### From Binary
81
82 The installation is easy. You can download the pre-built binaries for different platforms from the [releases](https://github.com/projectdiscovery/subfinder/releases/) page. Extract them using tar, move it to your `$PATH` and you're ready to go.
83
84 ```bash
85 > tar -xzvf subfinder-linux-amd64.tar.gz
86 > mv subfinder /usr/local/local/bin/
87 > subfinder -h
88 ```
89
90 ### From Source
91
92 subfinder requires go1.13+ to install successfully. Run the following command to get the repo -
93
94 ```bash
95 GO111MODULE=on go get -v github.com/projectdiscovery/subfinder/cmd/subfinder
96 ```
97
98 ### From Github
99
100 ```bash
101 git clone https://github.com/projectdiscovery/subfinder.git
102 cd subfinder/cmd/subfinder
103 go build .
104 mv subfinder /usr/local/bin/
105 subfinder -h
106 ```
107
108 ### Upgrading
109 If you wish to upgrade the package you can use:
110
111 ```bash
112 GO111MODULE=on go get -u -v github.com/projectdiscovery/subfinder/cmd/subfinder
113 ```
114
115 ## Post Installation Instructions
116
117 Subfinder will work after using the installation instructions however to configure Subfinder to work with certain services, you will need to have setup API keys. The following services do not work without an API key:
118
119 - [Virustotal](https://www.virustotal.com)
120 - [Passivetotal](http://passivetotal.org)
121 - [SecurityTrails](http://securitytrails.com)
122 - [Censys](https://censys.io)
123 - [Binaryedge](https://binaryedge.io)
124 - [Shodan](https://shodan.io)
125 - [URLScan](https://urlscan.io)
126 - [Chaos](https://chaos.projectdiscovery.io)
127 - [Spyse](https://spyse.com)
128 - [DnsDB](https://api.dnsdb.info)
129 - [Zoomeye](https://www.zoomeye.org)
130 - [Github](https://github.com)
131 - [Intelx](https://intelx.io)
132
133 Theses values are stored in the `$HOME/.config/subfinder/config.yaml` file which will be created when you run the tool for the first time. The configuration file uses the YAML format. Multiple API keys can be specified for each of these services from which one of them will be used for enumeration.
134
135 For sources that require multiple keys, namely `Censys`, `Passivetotal`, they can be added by separating them via a colon (:).
136
137 An example config file -
138
139 ```yaml
140 resolvers:
141 - 1.1.1.1
142 - 1.0.0.1
143 sources:
144 - binaryedge
145 - bufferover
146 - censys
147 - passivetotal
148 - sitedossier
149 binaryedge:
150 - 0bf8919b-aab9-42e4-9574-d3b639324597
151 - ac244e2f-b635-4581-878a-33f4e79a2c13
152 censys:
153 - ac244e2f-b635-4581-878a-33f4e79a2c13:dd510d6e-1b6e-4655-83f6-f347b363def9
154 certspotter: []
155 passivetotal:
156 - [email protected]:sample_password
157 securitytrails: []
158 shodan:
159 - AAAAClP1bJJSRMEYJazgwhJKrggRwKA
160 github:
161 - d23a554bbc1aabb208c9acfbd2dd41ce7fc9db39
162 - asdsd54bbc1aabb208c9acfbd2dd41ce7fc9db39
163 ```
164
165 # Running Subfinder
166
167 To run the tool on a target, just use the following command.
168 ```bash
169 > subfinder -d freelancer.com
170 ```
171
172 This will run the tool against freelancer.com. There are a number of configuration options that you can pass along with this command. The verbose switch (-v) can be used to display verbose information.
173
174 ```bash
175 [CERTSPOTTER] www.fi.freelancer.com
176 [DNSDUMPSTER] hosting.freelancer.com
177 [DNSDUMPSTER] support.freelancer.com
178 [DNSDUMPSTER] accounts.freelancer.com
179 [DNSDUMPSTER] phabricator.freelancer.com
180 [DNSDUMPSTER] cdn1.freelancer.com
181 [DNSDUMPSTER] t1.freelancer.com
182 [DNSDUMPSTER] wdc.t1.freelancer.com
183 [DNSDUMPSTER] dal.t1.freelancer.com
184 ```
185
186 The `-silent` switch can be used to show only subdomains found without any other info.
187
188
189 The `-o` command can be used to specify an output file.
190
191 ```bash
192 > subfinder -d freelancer.com -o output.txt
193 ```
194
195 To run the tool on a list of domains, `-dL` option can be used. This requires a directory to write the output files. Subdomains for each domain from the list are written in a text file in the directory specified by the `-oD` flag with their name being the domain name.
196
197 ```bash
198 > cat domains.txt
199 hackerone.com
200 google.com
201
202 > subfinder -dL domains.txt -oD ~/path/to/output
203 > ls ~/path/to/output
204
205 hackerone.com.txt
206 google.com.txt
207 ```
208
209 If you want to save results to a single file while using a domain list, specify the `-o` flag with the name of the output file.
210
211
212 ```bash
213 > cat domains.txt
214 hackerone.com
215 google.com
216
217 > subfinder -dL domains.txt -o ~/path/to/output.txt
218 > ls ~/path/to/
219
220 output.txt
221 ```
222
223 If you want upload your data to chaos dataset, you can use `-cd` flag with your scan, chaos will resolve all the input and add valid subdomains to public dataset, which you can access on the go using [chaos-client](https://github.com/projectdiscovery/chaos-client)
224
225 ```bash
226 > subfinder -d hackerone.com -cd
227
228 root@b0x:~# subfinder -d hackerone.com -cd
229
230 www.hackerone.com
231 api.hackerone.com
232 go.hackerone.com
233 hackerone.com
234 staging.hackerone.com
235 [INF] Input processed successfully and subdomains with valid records will be updated to chaos dataset.
236 ```
237
238 You can also get output in json format using `-oJ` switch. This switch saves the output in the JSON lines format.
239
240 If you use the JSON format, or the `Host:IP` format, then it becomes mandatory for you to use the **-nW** format as resolving is essential for these output format. By default, resolving the found subdomains is disabled.
241
242 ```bash
243 > subfinder -d hackerone.com -o output.json -oJ -nW
244 > cat output.json
245
246 {"host":"www.hackerone.com","ip":"104.16.99.52"}
247 {"host":"mta-sts.hackerone.com","ip":"185.199.108.153"}
248 {"host":"hackerone.com","ip":"104.16.100.52"}
249 {"host":"mta-sts.managed.hackerone.com","ip":"185.199.110.153"}
250 ```
251
252 You can specify custom resolvers too.
253 ```bash
254 > subfinder -d freelancer.com -o result.txt -nW -v -r 8.8.8.8,1.1.1.1
255 > subfinder -d freelancer.com -o result.txt -nW -v -rL resolvers.txt
256 ```
257
258 **The new highlight of this release is the addition of stdin/stdout features.** Now, domains can be piped to subfinder and enumeration can be ran on them. For example -
259
260 ```bash
261 > echo hackerone.com | subfinder -v
262 > cat targets.txt | subfinder -v
263 ```
264
265 The subdomains discovered can be piped to other tools too. For example, you can pipe the subdomains discovered by subfinder to httpx [httpx](https://github.com/projectdiscovery/httpx) which will then find running http servers on the host.
266
267 ```bash
268 > echo hackerone.com | subfinder -silent | httpx -silent
269
270 http://hackerone.com
271 http://www.hackerone.com
272 http://docs.hackerone.com
273 http://api.hackerone.com
274 https://docs.hackerone.com
275 http://mta-sts.managed.hackerone.com
276 ```
277
278 ## Running in a Docker Container
279
280 You can use the official dockerhub image at [subfinder](https://hub.docker.com/r/projectdiscovery/subfinder). Simply run -
281
282 ```bash
283 > docker pull projectdiscovery/subfinder
284 ```
285
286 The above command will pull the latest tagged release from the dockerhub repository.
287
288 If you want to build the container yourself manually, git clone the repo, then build and run the following commands
289
290 - Clone the repo using `git clone https://github.com/projectdiscovery/subfinder.git`
291 - Build your docker container
292 ```bash
293 docker build -t projectdiscovery/subfinder .
294 ```
295
296 - After building the container using either way, run the following -
297 ```bash
298 docker run -it projectdiscovery/subfinder
299 ```
300 > The above command is the same as running `-h`
301
302 If you are using docker, you need to first create your directory structure holding subfinder configuration file. After modifying the default config.yaml file, you can run:
303
304 ```bash
305 > mkdir -p $HOME/.config/subfinder
306 > cp config.yaml $HOME/.config/subfinder/config.yaml
307 > nano $HOME/.config/subfinder/config.yaml
308 ```
309
310 After that, you can pass it as a volume using the following sample command.
311 ```bash
312 > docker run -v $HOME/.config/subfinder:/root/.config/subfinder -it projectdiscovery/subfinder -d freelancer.com
313 ```
314
315 For example, this runs the tool against uber.com and output the results to your host file system:
316 ```bash
317 docker run -v $HOME/.config/subfinder:/root/.config/subfinder -it projectdiscovery/subfinder -d uber.com > uber.com.txt
318 ```
319
320 # License
321
322 subfinder is made with 🖤 by the [projectdiscovery](https://projectdiscovery.io) team. Community contributions have made the project what it is. See the **[Thanks.md](https://github.com/projectdiscovery/subfinder/blob/master/THANKS.md)** file for more details.
323
324 Read the disclaimer for usage at [DISCLAIMER.md](https://github.com/projectdiscovery/subfinder/blob/master/DISCLAIMER.md) and [contact us](mailto:[email protected]) for any API removal.
0 ### Thanks
1
2 Many people have contributed to subfinder making it a wonderful tool either by making a pull request fixing some stuff or giving generous donations to support the furthur development of this tool. Here, we recognize these persons and thank them.
3
4 - All the contributors at [CONTRIBUTORS](https://github.com/projectdiscovery/subfinder/graphs/contributors) who made subfinder what it is.
5
6 We'd like to thank some additional amazing people, wo contributed a lot in subfinder's journey -
7
8 - @infosec-au - Donating to the project
9 - @codingo - Initial work on the project, managing it, lot of work!
10 - @picatz - Improving the structure of the project a lot. New ideas!
0 package main
1
2 import (
3 "github.com/projectdiscovery/gologger"
4 "github.com/projectdiscovery/subfinder/pkg/runner"
5 )
6
7 func main() {
8 // Parse the command line flags and read config files
9 options := runner.ParseOptions()
10
11 runner, err := runner.NewRunner(options)
12 if err != nil {
13 gologger.Fatalf("Could not create runner: %s\n", err)
14 }
15
16 err = runner.RunEnumeration()
17 if err != nil {
18 gologger.Fatalf("Could not run enumeration: %s\n", err)
19 }
20 }
0 resolvers:
1 - 1.1.1.1
2 - 1.0.0.1
3 - 8.8.8.8
4 - 8.8.4.4
5 - 9.9.9.9
6 - 9.9.9.10
7 - 77.88.8.8
8 - 77.88.8.1
9 - 208.67.222.222
10 - 208.67.220.220
11 sources:
12 - alienvault
13 - archiveis
14 - binaryedge
15 - bufferover
16 - censys
17 - certspotter
18 - certspotterold
19 - commoncrawl
20 - crtsh
21 - dnsdumpster
22 - dnsdb
23 - entrust
24 - github
25 - googleter
26 - hackertarget
27 - intelx
28 - ipv4info
29 - passivetotal
30 - rapiddns
31 - securitytrails
32 - shodan
33 - sitedossier
34 - sublist3r
35 - spyse
36 - threatcrowd
37 - threatminer
38 - urlscan
39 - virustotal
40 - waybackarchive
41 - zoomeye
42 censys:
43 - <key-here>
44 binaryedge:
45 - <key-here>
46 certspotter:
47 - <key-here>
48 github:
49 - <token-here>
50 intelx:
51 - <public.intelx.io:key-here>
52 passivetotal:
53 - <email:key-here>
54 securitytrails:
55 - <key-here>
56 virustotal:
57 - <key-here>
58 urlscan:
59 - <key-here>
60 chaos:
61 - <key-here>
62 spyse:
63 - <key-here>
64 shodan:
65 - <key-here>
66 dnsdb:
67 - <key-here>
0 module github.com/projectdiscovery/subfinder
1
2 go 1.14
3
4 require (
5 github.com/json-iterator/go v1.1.9
6 github.com/lib/pq v1.6.0
7 github.com/m-mizutani/urlscan-go v1.0.0
8 github.com/miekg/dns v1.1.29
9 github.com/pkg/errors v0.9.1
10 github.com/projectdiscovery/gologger v1.0.0
11 github.com/rs/xid v1.2.1
12 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
13 gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c
14 )
0 github.com/alexbrainman/sspi v0.0.0-20180613141037-e580b900e9f5/go.mod h1:976q2ETgjT2snVCf2ZaBnyBbVoPERGjUz+0sofzEfro=
1 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
2 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
3 github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
4 github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
5 github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
6 github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE=
7 github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
8 github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
9 github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
10 github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
11 github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
12 github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=
13 github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
14 github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
15 github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
16 github.com/jcmturner/gokrb5/v8 v8.2.0 h1:lzPl/30ZLkTveYsYZPKMcgXc8MbnE6RsTd4F9KgiLtk=
17 github.com/jcmturner/gokrb5/v8 v8.2.0/go.mod h1:T1hnNppQsBtxW0tCHMHTkAt8n/sABdzZgZdoFrZaZNM=
18 github.com/jcmturner/rpc/v2 v2.0.2 h1:gMB4IwRXYsWw4Bc6o/az2HJgFUA1ffSh90i26ZJ6Xl0=
19 github.com/jcmturner/rpc/v2 v2.0.2/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
20 github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
21 github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
22 github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
23 github.com/k0kubun/pp v2.3.0+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg=
24 github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
25 github.com/lib/pq v1.6.0 h1:I5DPxhYJChW9KYc66se+oKFFQX6VuQrKiprsX6ivRZc=
26 github.com/lib/pq v1.6.0/go.mod h1:4vXEAYvW1fRQ2/FhZ78H73A60MHw1geSm145z2mdY1g=
27 github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381 h1:bqDmpDG49ZRnB5PcgP0RXtQvnMSgIF14M7CBd2shtXs=
28 github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
29 github.com/m-mizutani/urlscan-go v1.0.0 h1:+fTiSRCQXdy3EM1BgO5gmAHFWbccTDdoEKy9Fa7m9xo=
30 github.com/m-mizutani/urlscan-go v1.0.0/go.mod h1:ppEBT0e/xv0bPcVWKev4cYG7Ey8933JsOzEzovxGMjI=
31 github.com/mattn/go-colorable v0.1.0/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
32 github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
33 github.com/miekg/dns v1.1.29 h1:xHBEhR+t5RzcFJjBLJlax2daXOrTYtr9z4WdKEfWFzg=
34 github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
35 github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
36 github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
37 github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
38 github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
39 github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
40 github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
41 github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
42 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
43 github.com/projectdiscovery/gologger v1.0.0 h1:XAQ8kHeVKXMjY4rLGh7eT5+oHU077BNEvs7X6n+vu1s=
44 github.com/projectdiscovery/gologger v1.0.0/go.mod h1:Ok+axMqK53bWNwDSU1nTNwITLYMXMdZtRc8/y1c7sWE=
45 github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
46 github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
47 github.com/sirupsen/logrus v1.3.0 h1:hI/7Q+DtNZ2kINb6qt/lS+IyXnHQe9e90POfeewL/ME=
48 github.com/sirupsen/logrus v1.3.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
49 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
50 github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
51 github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
52 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
53 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
54 github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
55 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
56 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
57 golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
58 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
59 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
60 golang.org/x/crypto v0.0.0-20200117160349-530e935923ad/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
61 golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4 h1:QmwruyY+bKbDDL0BaglrbZABEali68eoMFhTZpCjYVA=
62 golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
63 golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
64 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
65 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
66 golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
67 golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa h1:F+8P+gmewFQYRk6JoLQLwjBCTu3mcIURZfNkVweuRKA=
68 golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
69 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
70 golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
71 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
72 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
73 golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe h1:6fAMxZRR6sl1Uq8U61gxU+kPTs2tR8uOySCbBP7BN/M=
74 golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
75 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
76 golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
77 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
78 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
79 gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo=
80 gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q=
81 gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4=
82 gopkg.in/jcmturner/gokrb5.v7 v7.5.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM=
83 gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8=
84 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
85 gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c h1:grhR+C34yXImVGp7EzNk+DTIk+323eIUWOmEevy6bDo=
86 gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
0 // Package passive provides capability for doing passive subdomain
1 // enumeration on targets.
2 package passive
3
0 package passive
1
2 import (
3 "context"
4 "fmt"
5 "sync"
6 "time"
7
8 "github.com/projectdiscovery/gologger"
9 "github.com/projectdiscovery/subfinder/pkg/subscraping"
10 )
11
12 // EnumerateSubdomains enumerates all the subdomains for a given domain
13 func (a *Agent) EnumerateSubdomains(domain string, keys subscraping.Keys, timeout int, maxEnumTime time.Duration) chan subscraping.Result {
14 results := make(chan subscraping.Result)
15
16 go func() {
17 session, err := subscraping.NewSession(domain, keys, timeout)
18 if err != nil {
19 results <- subscraping.Result{Type: subscraping.Error, Error: fmt.Errorf("could not init passive session for %s: %s", domain, err)}
20 }
21
22 ctx, cancel := context.WithTimeout(context.Background(), maxEnumTime)
23
24 timeTaken := make(map[string]string)
25 timeTakenMutex := &sync.Mutex{}
26
27 wg := &sync.WaitGroup{}
28 // Run each source in parallel on the target domain
29 for source, runner := range a.sources {
30 wg.Add(1)
31
32 now := time.Now()
33 go func(source string, runner subscraping.Source) {
34 for resp := range runner.Run(ctx, domain, session) {
35 results <- resp
36 }
37
38 duration := time.Now().Sub(now)
39 timeTakenMutex.Lock()
40 timeTaken[source] = fmt.Sprintf("Source took %s for enumeration\n", duration)
41 timeTakenMutex.Unlock()
42
43 wg.Done()
44 }(source, runner)
45 }
46 wg.Wait()
47
48 for source, data := range timeTaken {
49 gologger.Verbosef(data, source)
50 }
51
52 close(results)
53 cancel()
54 }()
55
56 return results
57 }
0 package passive
1
2 import (
3 "github.com/projectdiscovery/subfinder/pkg/subscraping"
4 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/alienvault"
5 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/archiveis"
6 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/binaryedge"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/bufferover"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/censys"
9 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotter"
10 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotterold"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/commoncrawl"
12 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/crtsh"
13 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdb"
14 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdumpster"
15 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/entrust"
16 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/github"
17 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/hackertarget"
18 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/intelx"
19 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/ipv4info"
20 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/passivetotal"
21 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/rapiddns"
22 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/securitytrails"
23 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/shodan"
24 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/sitedossier"
25 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/spyse"
26 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/sublist3r"
27 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatcrowd"
28 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatminer"
29 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/urlscan"
30 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/virustotal"
31 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/waybackarchive"
32 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/zoomeye"
33 )
34
35 // DefaultSources contains the list of sources used by default
36 var DefaultSources = []string{
37 "alienvault",
38 "archiveis",
39 "binaryedge",
40 "bufferover",
41 "censys",
42 "certspotter",
43 "certspotterold",
44 "commoncrawl",
45 "crtsh",
46 "dnsdumpster",
47 "dnsdb",
48 "entrust",
49 "github",
50 "hackertarget",
51 "ipv4info",
52 "intelx",
53 "passivetotal",
54 "rapiddns",
55 "securitytrails",
56 "shodan",
57 "sitedossier",
58 "spyse",
59 "sublist3r",
60 "threatcrowd",
61 "threatminer",
62 "urlscan",
63 "virustotal",
64 "waybackarchive",
65 "zoomeye",
66 }
67
68 // Agent is a struct for running passive subdomain enumeration
69 // against a given host. It wraps subscraping package and provides
70 // a layer to build upon.
71 type Agent struct {
72 sources map[string]subscraping.Source
73 }
74
75 // New creates a new agent for passive subdomain discovery
76 func New(sources []string, exclusions []string) *Agent {
77 // Create the agent, insert the sources and remove the excluded sources
78 agent := &Agent{sources: make(map[string]subscraping.Source)}
79
80 agent.addSources(sources)
81 agent.removeSources(exclusions)
82
83 return agent
84 }
85
86 // addSources adds the given list of sources to the source array
87 func (a *Agent) addSources(sources []string) {
88 for _, source := range sources {
89 switch source {
90 case "alienvault":
91 a.sources[source] = &alienvault.Source{}
92 case "archiveis":
93 a.sources[source] = &archiveis.Source{}
94 case "binaryedge":
95 a.sources[source] = &binaryedge.Source{}
96 case "bufferover":
97 a.sources[source] = &bufferover.Source{}
98 case "censys":
99 a.sources[source] = &censys.Source{}
100 case "certspotter":
101 a.sources[source] = &certspotter.Source{}
102 case "certspotterold":
103 a.sources[source] = &certspotterold.Source{}
104 case "commoncrawl":
105 a.sources[source] = &commoncrawl.Source{}
106 case "crtsh":
107 a.sources[source] = &crtsh.Source{}
108 case "dnsdumpster":
109 a.sources[source] = &dnsdumpster.Source{}
110 case "dnsdb":
111 a.sources[source] = &dnsdb.Source{}
112 case "entrust":
113 a.sources[source] = &entrust.Source{}
114 case "github":
115 a.sources[source] = &github.Source{}
116 case "hackertarget":
117 a.sources[source] = &hackertarget.Source{}
118 case "ipv4info":
119 a.sources[source] = &ipv4info.Source{}
120 case "intelx":
121 a.sources[source] = &intelx.Source{}
122 case "passivetotal":
123 a.sources[source] = &passivetotal.Source{}
124 case "rapiddns":
125 a.sources[source] = &rapiddns.Source{}
126 case "securitytrails":
127 a.sources[source] = &securitytrails.Source{}
128 case "shodan":
129 a.sources[source] = &shodan.Source{}
130 case "sitedossier":
131 a.sources[source] = &sitedossier.Source{}
132 case "spyse":
133 a.sources[source] = &spyse.Source{}
134 case "sublist3r":
135 a.sources[source] = &sublist3r.Source{}
136 case "threatcrowd":
137 a.sources[source] = &threatcrowd.Source{}
138 case "threatminer":
139 a.sources[source] = &threatminer.Source{}
140 case "urlscan":
141 a.sources[source] = &urlscan.Source{}
142 case "virustotal":
143 a.sources[source] = &virustotal.Source{}
144 case "waybackarchive":
145 a.sources[source] = &waybackarchive.Source{}
146 case "zoomeye":
147 a.sources[source] = &zoomeye.Source{}
148 }
149 }
150 }
151
152 // removeSources deletes the given sources from the source map
153 func (a *Agent) removeSources(sources []string) {
154 for _, source := range sources {
155 delete(a.sources, source)
156 }
157 }
0 package resolve
1
2 import (
3 "bufio"
4 "math/rand"
5 "os"
6 "time"
7 )
8
9 // DefaultResolvers contains the default list of resolvers known to be good
10 var DefaultResolvers = []string{
11 "1.1.1.1", // Cloudflare primary
12 "1.0.0.1", // Cloudlfare secondary
13 "8.8.8.8", // Google primary
14 "8.8.4.4", // Google secondary
15 "9.9.9.9", // Quad9 Primary
16 "9.9.9.10", // Quad9 Secondary
17 "77.88.8.8", // Yandex Primary
18 "77.88.8.1", // Yandex Secondary
19 "208.67.222.222", // OpenDNS Primary
20 "208.67.220.220", // OpenDNS Secondary
21 }
22
23 // Resolver is a struct for resolving DNS names
24 type Resolver struct {
25 resolvers []string
26 rand *rand.Rand
27 }
28
29 // New creates a new resolver struct with the default resolvers
30 func New() *Resolver {
31 return &Resolver{
32 resolvers: []string{},
33 rand: rand.New(rand.NewSource(time.Now().UnixNano())),
34 }
35 }
36
37 // AppendResolversFromFile appends the resolvers read from a file to the list of resolvers
38 func (r *Resolver) AppendResolversFromFile(file string) error {
39 f, err := os.Open(file)
40 if err != nil {
41 return err
42 }
43 scanner := bufio.NewScanner(f)
44 for scanner.Scan() {
45 text := scanner.Text()
46 if text == "" {
47 continue
48 }
49 r.resolvers = append(r.resolvers, text)
50 }
51 f.Close()
52 return scanner.Err()
53 }
54
55 // AppendResolversFromSlice appends the slice to the list of resolvers
56 func (r *Resolver) AppendResolversFromSlice(list []string) {
57 r.resolvers = append(r.resolvers, list...)
58 }
0 // Package resolve is used to handle resolving records
1 // It also handles wildcard subdomains and rotating resolvers.
2 package resolve
0 package resolve
1
2 import (
3 "sync"
4
5 "github.com/miekg/dns"
6 "github.com/rs/xid"
7 )
8
9 const (
10 maxResolveRetries = 5
11 maxWildcardChecks = 3
12 )
13
14 // ResolutionPool is a pool of resolvers created for resolving subdomains
15 // for a given host.
16 type ResolutionPool struct {
17 *Resolver
18 Tasks chan string
19 Results chan Result
20 wg *sync.WaitGroup
21 removeWildcard bool
22
23 wildcardIPs map[string]struct{}
24 }
25
26 // Result contains the result for a host resolution
27 type Result struct {
28 Type ResultType
29 Host string
30 IP string
31 Error error
32 }
33
34 // ResultType is the type of result found
35 type ResultType int
36
37 // Types of data result can return
38 const (
39 Subdomain ResultType = iota
40 Error
41 )
42
43 // NewResolutionPool creates a pool of resolvers for resolving subdomains of a given domain
44 func (r *Resolver) NewResolutionPool(workers int, removeWildcard bool) *ResolutionPool {
45 resolutionPool := &ResolutionPool{
46 Resolver: r,
47 Tasks: make(chan string),
48 Results: make(chan Result),
49 wg: &sync.WaitGroup{},
50 removeWildcard: removeWildcard,
51 wildcardIPs: make(map[string]struct{}),
52 }
53
54 go func() {
55 for i := 0; i < workers; i++ {
56 resolutionPool.wg.Add(1)
57 go resolutionPool.resolveWorker()
58 }
59 resolutionPool.wg.Wait()
60 close(resolutionPool.Results)
61 }()
62
63 return resolutionPool
64 }
65
66 // InitWildcards inits the wildcard ips array
67 func (r *ResolutionPool) InitWildcards(domain string) error {
68 for i := 0; i < maxWildcardChecks; i++ {
69 uid := xid.New().String()
70
71 hosts, err := r.getARecords(uid + "." + domain)
72 if err != nil {
73 return err
74 }
75
76 // Append all wildcard ips found for domains
77 for _, host := range hosts {
78 r.wildcardIPs[host] = struct{}{}
79 }
80 }
81 return nil
82 }
83
84 func (r *ResolutionPool) resolveWorker() {
85 for task := range r.Tasks {
86 if !r.removeWildcard {
87 r.Results <- Result{Type: Subdomain, Host: task, IP: ""}
88 continue
89 }
90
91 hosts, err := r.getARecords(task)
92 if err != nil {
93 r.Results <- Result{Type: Error, Error: err}
94 continue
95 }
96
97 if len(hosts) == 0 {
98 continue
99 }
100
101 for _, host := range hosts {
102 // Ignore the host if it exists in wildcard ips map
103 if _, ok := r.wildcardIPs[host]; ok {
104 continue
105 }
106 }
107
108 r.Results <- Result{Type: Subdomain, Host: task, IP: hosts[0]}
109 }
110 r.wg.Done()
111 }
112
113 // getARecords gets all the A records for a given host
114 func (r *ResolutionPool) getARecords(host string) ([]string, error) {
115 var iteration int
116
117 m := new(dns.Msg)
118 m.Id = dns.Id()
119 m.RecursionDesired = true
120 m.Question = make([]dns.Question, 1)
121 m.Question[0] = dns.Question{
122 Name: dns.Fqdn(host),
123 Qtype: dns.TypeA,
124 Qclass: dns.ClassINET,
125 }
126 exchange:
127 iteration++
128 in, err := dns.Exchange(m, r.resolvers[r.rand.Intn(len(r.resolvers))]+":53")
129 if err != nil {
130 // Retry in case of I/O error
131 if iteration <= maxResolveRetries {
132 goto exchange
133 }
134 return nil, err
135 }
136 // Ignore the error in case we have bad result
137 if in != nil && in.Rcode != dns.RcodeSuccess {
138 return nil, nil
139 }
140
141 var hosts []string
142 for _, record := range in.Answer {
143 if t, ok := record.(*dns.A); ok {
144 hosts = append(hosts, t.A.String())
145 }
146 }
147
148 return hosts, nil
149 }
0 package runner
1
2 import (
3 "github.com/projectdiscovery/gologger"
4 "github.com/projectdiscovery/subfinder/pkg/passive"
5 "github.com/projectdiscovery/subfinder/pkg/resolve"
6 )
7
8 const banner = `
9 _ __ _ _
10 ____ _| |__ / _(_)_ _ __| |___ _ _
11 (_-< || | '_ \ _| | ' \/ _ / -_) '_|
12 /__/\_,_|_.__/_| |_|_||_\__,_\___|_| v2
13 `
14
15 // Version is the current version of subfinder
16 const Version = `2.3.8`
17
18 // showBanner is used to show the banner to the user
19 func showBanner() {
20 gologger.Printf("%s\n", banner)
21 gologger.Printf("\t\tprojectdiscovery.io\n\n")
22
23 gologger.Labelf("Use with caution. You are responsible for your actions\n")
24 gologger.Labelf("Developers assume no liability and are not responsible for any misuse or damage.\n")
25 gologger.Labelf("By using subfinder, you also agree to the terms of the APIs used.\n\n")
26 }
27
28 // normalRunTasks runs the normal startup tasks
29 func (options *Options) normalRunTasks() {
30 configFile, err := UnmarshalRead(options.ConfigFile)
31 if err != nil {
32 gologger.Fatalf("Could not read configuration file %s: %s\n", options.ConfigFile, err)
33 }
34 options.YAMLConfig = configFile
35 }
36
37 // firstRunTasks runs some housekeeping tasks done
38 // when the program is ran for the first time
39 func (options *Options) firstRunTasks() {
40 // Create the configuration file and display information
41 // about it to the user.
42 config := ConfigFile{
43 // Use the default list of resolvers by marshalling it to the config
44 Resolvers: resolve.DefaultResolvers,
45 // Use the default list of passive sources
46 Sources: passive.DefaultSources,
47 }
48
49 err := config.MarshalWrite(options.ConfigFile)
50 if err != nil {
51 gologger.Fatalf("Could not write configuration file to %s: %s\n", options.ConfigFile, err)
52 }
53 options.YAMLConfig = config
54
55 gologger.Infof("Configuration file saved to %s\n", options.ConfigFile)
56 }
0 package runner
1
2 import (
3 "math/rand"
4 "os"
5 "strings"
6 "time"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 "gopkg.in/yaml.v3"
10 )
11
12 // ConfigFile contains the fields stored in the configuration file
13 type ConfigFile struct {
14 // Resolvers contains the list of resolvers to use while resolving
15 Resolvers []string `yaml:"resolvers,omitempty"`
16 // Sources contains a list of sources to use for enumeration
17 Sources []string `yaml:"sources,omitempty"`
18 // ExcludeSources contains the sources to not include in the enumeration process
19 ExcludeSources []string `yaml:"exclude-sources,omitempty"`
20 // API keys for different sources
21 Binaryedge []string `yaml:"binaryedge"`
22 Censys []string `yaml:"censys"`
23 Certspotter []string `yaml:"certspotter"`
24 Chaos []string `yaml:"chaos"`
25 DNSDB []string `yaml:"dnsdb"`
26 GitHub []string `yaml:"github"`
27 IntelX []string `yaml:"intelx"`
28 PassiveTotal []string `yaml:"passivetotal"`
29 SecurityTrails []string `yaml:"securitytrails"`
30 Shodan []string `yaml:"shodan"`
31 Spyse []string `yaml:"spyse"`
32 URLScan []string `yaml:"urlscan"`
33 Virustotal []string `yaml:"virustotal"`
34 ZoomEye []string `yaml:"zoomeye"`
35 }
36
37 // GetConfigDirectory gets the subfinder config directory for a user
38 func GetConfigDirectory() (string, error) {
39 // Seed the random number generator
40 rand.Seed(time.Now().UnixNano())
41
42 var config string
43
44 directory, err := os.UserHomeDir()
45 if err != nil {
46 return config, err
47 }
48 config = directory + "/.config/subfinder"
49 // Create All directory for subfinder even if they exist
50 os.MkdirAll(config, os.ModePerm)
51
52 return config, nil
53 }
54
55 // CheckConfigExists checks if the config file exists in the given path
56 func CheckConfigExists(configPath string) bool {
57 if _, err := os.Stat(configPath); err == nil {
58 return true
59 } else if os.IsNotExist(err) {
60 return false
61 }
62 return false
63 }
64
65 // MarshalWrite writes the marshalled yaml config to disk
66 func (c ConfigFile) MarshalWrite(file string) error {
67 f, err := os.OpenFile(file, os.O_WRONLY|os.O_CREATE, 0755)
68 if err != nil {
69 return err
70 }
71
72 // Indent the spaces too
73 enc := yaml.NewEncoder(f)
74 enc.SetIndent(4)
75 err = enc.Encode(&c)
76 f.Close()
77 return err
78 }
79
80 // UnmarshalRead reads the unmarshalled config yaml file from disk
81 func UnmarshalRead(file string) (ConfigFile, error) {
82 config := ConfigFile{}
83
84 f, err := os.Open(file)
85 if err != nil {
86 return config, err
87 }
88 err = yaml.NewDecoder(f).Decode(&config)
89 f.Close()
90 return config, err
91 }
92
93 // GetKeys gets the API keys from config file and creates a Keys struct
94 // We use random selection of api keys from the list of keys supplied.
95 // Keys that require 2 options are separated by colon (:).
96 func (c ConfigFile) GetKeys() subscraping.Keys {
97 keys := subscraping.Keys{}
98
99 if len(c.Binaryedge) > 0 {
100 keys.Binaryedge = c.Binaryedge[rand.Intn(len(c.Binaryedge))]
101 }
102
103 if len(c.Censys) > 0 {
104 censysKeys := c.Censys[rand.Intn(len(c.Censys))]
105 parts := strings.Split(censysKeys, ":")
106 if len(parts) == 2 {
107 keys.CensysToken = parts[0]
108 keys.CensysSecret = parts[1]
109 }
110 }
111
112 if len(c.Certspotter) > 0 {
113 keys.Certspotter = c.Certspotter[rand.Intn(len(c.Certspotter))]
114 }
115 if len(c.Chaos) > 0 {
116 keys.Chaos = c.Chaos[rand.Intn(len(c.Chaos))]
117 }
118 if (len(c.DNSDB)) > 0 {
119 keys.DNSDB = c.DNSDB[rand.Intn(len(c.DNSDB))]
120 }
121 if (len(c.GitHub)) > 0 {
122 keys.GitHub = c.GitHub
123 }
124
125 if len(c.IntelX) > 0 {
126 intelxKeys := c.IntelX[rand.Intn(len(c.IntelX))]
127 parts := strings.Split(intelxKeys, ":")
128 if len(parts) == 2 {
129 keys.IntelXHost = parts[0]
130 keys.IntelXKey = parts[1]
131 }
132 }
133
134 if len(c.PassiveTotal) > 0 {
135 passiveTotalKeys := c.PassiveTotal[rand.Intn(len(c.PassiveTotal))]
136 parts := strings.Split(passiveTotalKeys, ":")
137 if len(parts) == 2 {
138 keys.PassiveTotalUsername = parts[0]
139 keys.PassiveTotalPassword = parts[1]
140 }
141 }
142
143 if len(c.SecurityTrails) > 0 {
144 keys.Securitytrails = c.SecurityTrails[rand.Intn(len(c.SecurityTrails))]
145 }
146 if len(c.Shodan) > 0 {
147 keys.Shodan = c.Shodan[rand.Intn(len(c.Shodan))]
148 }
149 if len(c.Spyse) > 0 {
150 keys.Spyse = c.Spyse[rand.Intn(len(c.Spyse))]
151 }
152 if len(c.URLScan) > 0 {
153 keys.URLScan = c.URLScan[rand.Intn(len(c.URLScan))]
154 }
155 if len(c.Virustotal) > 0 {
156 keys.Virustotal = c.Virustotal[rand.Intn(len(c.Virustotal))]
157 }
158 if len(c.ZoomEye) > 0 {
159 zoomEyeKeys := c.ZoomEye[rand.Intn(len(c.ZoomEye))]
160 parts := strings.Split(zoomEyeKeys, ":")
161 if len(parts) == 2 {
162 keys.ZoomEyeUsername = parts[0]
163 keys.ZoomEyePassword = parts[1]
164 }
165 }
166
167 return keys
168 }
0 package runner
1
2 import (
3 "os"
4 "testing"
5
6 "github.com/stretchr/testify/assert"
7 )
8
9 func TestConfigGetDirectory(t *testing.T) {
10 directory, err := GetConfigDirectory()
11 if err != nil {
12 t.Fatalf("Expected nil got %v while getting home\n", err)
13 }
14 home, err := os.UserHomeDir()
15 if err != nil {
16 t.Fatalf("Expected nil got %v while getting dir\n", err)
17 }
18 config := home + "/.config/subfinder"
19
20 assert.Equal(t, directory, config, "Directory and config should be equal")
21 }
0 // Package runner implements the mechanism to drive the
1 // subdomain enumeration process
2 package runner
0 package runner
1
2 import (
3 "bytes"
4 "os"
5 "strings"
6 "sync"
7 "time"
8
9 "github.com/projectdiscovery/gologger"
10 "github.com/projectdiscovery/subfinder/pkg/resolve"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping"
12 )
13
14 // EnumerateSingleDomain performs subdomain enumeration against a single domain
15 func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error {
16 gologger.Infof("Enumerating subdomains for %s\n", domain)
17
18 // Get the API keys for sources from the configuration
19 // and also create the active resolving engine for the domain.
20 keys := r.options.YAMLConfig.GetKeys()
21
22 // Check if the user has asked to remove wildcards explicitly.
23 // If yes, create the resolution pool and get the wildcards for the current domain
24 var resolutionPool *resolve.ResolutionPool
25 if r.options.RemoveWildcard {
26 resolutionPool = r.resolverClient.NewResolutionPool(r.options.Threads, r.options.RemoveWildcard)
27 err := resolutionPool.InitWildcards(domain)
28 if err != nil {
29 // Log the error but don't quit.
30 gologger.Warningf("Could not get wildcards for domain %s: %s\n", domain, err)
31 }
32 }
33
34 // Run the passive subdomain enumeration
35 passiveResults := r.passiveAgent.EnumerateSubdomains(domain, keys, r.options.Timeout, time.Duration(r.options.MaxEnumerationTime)*time.Minute)
36
37 wg := &sync.WaitGroup{}
38 wg.Add(1)
39 // Create a unique map for filtering duplicate subdomains out
40 uniqueMap := make(map[string]struct{})
41 // Process the results in a separate goroutine
42 go func() {
43 for result := range passiveResults {
44 switch result.Type {
45 case subscraping.Error:
46 gologger.Warningf("Could not run source %s: %s\n", result.Source, result.Error)
47 case subscraping.Subdomain:
48 // Validate the subdomain found and remove wildcards from
49 if !strings.HasSuffix(result.Value, "."+domain) {
50 continue
51 }
52 subdomain := strings.ReplaceAll(strings.ToLower(result.Value), "*.", "")
53
54 // Check if the subdomain is a duplicate. If not,
55 // send the subdomain for resolution.
56 if _, ok := uniqueMap[subdomain]; ok {
57 continue
58 }
59 uniqueMap[subdomain] = struct{}{}
60
61 // Log the verbose message about the found subdomain and send the
62 // host for resolution to the resolution pool
63 gologger.Verbosef("%s\n", result.Source, subdomain)
64
65 // If the user asked to remove wildcard then send on the resolve
66 // queue. Otherwise, if mode is not verbose print the results on
67 // the screen as they are discovered.
68 if r.options.RemoveWildcard {
69 resolutionPool.Tasks <- subdomain
70 }
71
72 if !r.options.Verbose {
73 gologger.Silentf("%s\n", subdomain)
74 }
75 }
76 }
77 // Close the task channel only if wildcards are asked to be removed
78 if r.options.RemoveWildcard {
79 close(resolutionPool.Tasks)
80 }
81 wg.Done()
82 }()
83
84 // If the user asked to remove wildcards, listen from the results
85 // queue and write to the map. At the end, print the found results to the screen
86 foundResults := make(map[string]string)
87 if r.options.RemoveWildcard {
88 // Process the results coming from the resolutions pool
89 for result := range resolutionPool.Results {
90 switch result.Type {
91 case resolve.Error:
92 gologger.Warningf("Could not resolve host: %s\n", result.Error)
93 case resolve.Subdomain:
94 // Add the found subdomain to a map.
95 if _, ok := foundResults[result.Host]; !ok {
96 foundResults[result.Host] = result.IP
97 }
98 }
99 }
100 }
101 wg.Wait()
102
103 // If verbose mode was used, then now print all the
104 // found subdomains on the screen together.
105 if r.options.Verbose {
106 if r.options.RemoveWildcard {
107 for result := range foundResults {
108 gologger.Silentf("%s\n", result)
109 }
110 } else {
111 for result := range uniqueMap {
112 gologger.Silentf("%s\n", result)
113 }
114 }
115 }
116 // In case the user has specified to upload to chaos, write everything to a temporary buffer and upload
117 if r.options.ChaosUpload {
118 var buf = &bytes.Buffer{}
119 err := WriteHostOutput(uniqueMap, buf)
120 // If an error occurs, do not interrupt, continue to check if user specifed an output file
121 if err != nil {
122 gologger.Errorf("Could not prepare results for chaos %s\n", err)
123 } else {
124 // no error in writing host output, upload to chaos
125 err = r.UploadToChaos(buf)
126 if err != nil {
127 gologger.Errorf("Could not upload results to chaos %s\n", err)
128 } else {
129 gologger.Infof("Input processed successfully and subdomains with valid records will be updated to chaos dataset.\n")
130 }
131 // clear buffer
132 buf = nil
133 }
134 }
135 // In case the user has given an output file, write all the found
136 // subdomains to the output file.
137 if output != "" {
138 // If the output format is json, append .json
139 // else append .txt
140 if r.options.OutputDirectory != "" {
141 if r.options.JSON {
142 output = output + ".json"
143 } else {
144 output = output + ".txt"
145 }
146 }
147
148 var file *os.File
149 var err error
150 if append {
151 file, err = os.OpenFile(output, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
152 } else {
153 file, err = os.Create(output)
154 }
155 if err != nil {
156 gologger.Errorf("Could not create file %s for %s: %s\n", output, domain, err)
157 return err
158 }
159
160 // Write the output to the file depending upon user requirement
161 if r.options.HostIP {
162 err = WriteHostIPOutput(foundResults, file)
163 } else if r.options.JSON {
164 err = WriteJSONOutput(foundResults, file)
165 } else {
166 if r.options.RemoveWildcard {
167 err = WriteHostOutputNoWildcard(foundResults, file)
168 } else {
169 err = WriteHostOutput(uniqueMap, file)
170 }
171 }
172 if err != nil {
173 gologger.Errorf("Could not write results to file %s for %s: %s\n", output, domain, err)
174 }
175 file.Close()
176 return err
177 }
178 return nil
179 }
0 package runner
1
2 import (
3 "strings"
4
5 "github.com/projectdiscovery/subfinder/pkg/passive"
6 "github.com/projectdiscovery/subfinder/pkg/resolve"
7 )
8
9 // initializePassiveEngine creates the passive engine and loads sources etc
10 func (r *Runner) initializePassiveEngine() {
11 var sources, exclusions []string
12
13 // If there are any sources from CLI, only use them
14 // Otherwise, use the yaml file sources
15 if r.options.Sources != "" {
16 sources = append(sources, strings.Split(r.options.Sources, ",")...)
17 } else {
18 sources = append(sources, r.options.YAMLConfig.Sources...)
19 }
20
21 if r.options.ExcludeSources != "" {
22 exclusions = append(exclusions, strings.Split(r.options.ExcludeSources, ",")...)
23 } else {
24 exclusions = append(exclusions, r.options.YAMLConfig.ExcludeSources...)
25 }
26
27 r.passiveAgent = passive.New(sources, exclusions)
28 }
29
30 // initializeActiveEngine creates the resolver used to resolve the found subdomains
31 func (r *Runner) initializeActiveEngine() error {
32 r.resolverClient = resolve.New()
33
34 // If the file has been provided, read resolvers from the file
35 if r.options.ResolverList != "" {
36 err := r.resolverClient.AppendResolversFromFile(r.options.ResolverList)
37 if err != nil {
38 return err
39 }
40 }
41
42 var resolvers []string
43
44 if r.options.Resolvers != "" {
45 resolvers = append(resolvers, strings.Split(r.options.Resolvers, ",")...)
46 } else {
47 resolvers = append(resolvers, r.options.YAMLConfig.Resolvers...)
48 }
49 r.resolverClient.AppendResolversFromSlice(resolvers)
50 return nil
51 }
0 package runner
1
2 import (
3 "flag"
4 "os"
5 "path"
6 "reflect"
7 "strings"
8
9 "github.com/projectdiscovery/gologger"
10 )
11
12 // Options contains the configuration options for tuning
13 // the subdomain enumeration process.
14 type Options struct {
15 Verbose bool // Verbose flag indicates whether to show verbose output or not
16 NoColor bool // No-Color disables the colored output
17 Threads int // Thread controls the number of threads to use for active enumerations
18 Timeout int // Timeout is the seconds to wait for sources to respond
19 MaxEnumerationTime int // MaxEnumerationTime is the maximum amount of time in mins to wait for enumeration
20 Domain string // Domain is the domain to find subdomains for
21 DomainsFile string // DomainsFile is the file containing list of domains to find subdomains for
22 ChaosUpload bool // ChaosUpload indicates whether to upload results to the Chaos API
23 Output string // Output is the file to write found subdomains to.
24 OutputDirectory string // OutputDirectory is the directory to write results to in case list of domains is given
25 JSON bool // JSON specifies whether to use json for output format or text file
26 HostIP bool // HostIP specifies whether to write subdomains in host:ip format
27 Silent bool // Silent suppresses any extra text and only writes subdomains to screen
28 Sources string // Sources contains a comma-separated list of sources to use for enumeration
29 ListSources bool // ListSources specifies whether to list all available sources
30 ExcludeSources string // ExcludeSources contains the comma-separated sources to not include in the enumeration process
31 Resolvers string // Resolvers is the comma-separated resolvers to use for enumeration
32 ResolverList string // ResolverList is a text file containing list of resolvers to use for enumeration
33 RemoveWildcard bool // RemoveWildcard specifies whether to remove potential wildcard or dead subdomains from the results.
34 ConfigFile string // ConfigFile contains the location of the config file
35 Stdin bool // Stdin specifies whether stdin input was given to the process
36 Version bool // Version specifies if we should just show version and exit
37
38 YAMLConfig ConfigFile // YAMLConfig contains the unmarshalled yaml config file
39 }
40
41 // ParseOptions parses the command line flags provided by a user
42 func ParseOptions() *Options {
43 options := &Options{}
44
45 config, err := GetConfigDirectory()
46 if err != nil {
47 // This should never be reached
48 gologger.Fatalf("Could not get user home: %s\n", err)
49 }
50
51 flag.BoolVar(&options.Verbose, "v", false, "Show Verbose output")
52 flag.BoolVar(&options.NoColor, "nC", false, "Don't Use colors in output")
53 flag.IntVar(&options.Threads, "t", 10, "Number of concurrent goroutines for resolving")
54 flag.IntVar(&options.Timeout, "timeout", 30, "Seconds to wait before timing out")
55 flag.IntVar(&options.MaxEnumerationTime, "max-time", 10, "Minutes to wait for enumeration results")
56 flag.StringVar(&options.Domain, "d", "", "Domain to find subdomains for")
57 flag.StringVar(&options.DomainsFile, "dL", "", "File containing list of domains to enumerate")
58 flag.BoolVar(&options.ChaosUpload, "cd", false, "Upload results to the Chaos API (api-key required)")
59 flag.StringVar(&options.Output, "o", "", "File to write output to (optional)")
60 flag.StringVar(&options.OutputDirectory, "oD", "", "Directory to write enumeration results to (optional)")
61 flag.BoolVar(&options.JSON, "oJ", false, "Write output in JSON lines Format")
62 flag.BoolVar(&options.HostIP, "oI", false, "Write output in Host,IP format")
63 flag.BoolVar(&options.Silent, "silent", false, "Show only subdomains in output")
64 flag.StringVar(&options.Sources, "sources", "", "Comma separated list of sources to use")
65 flag.BoolVar(&options.ListSources, "ls", false, "List all available sources")
66 flag.StringVar(&options.ExcludeSources, "exclude-sources", "", "List of sources to exclude from enumeration")
67 flag.StringVar(&options.Resolvers, "r", "", "Comma-separated list of resolvers to use")
68 flag.StringVar(&options.ResolverList, "rL", "", "Text file containing list of resolvers to use")
69 flag.BoolVar(&options.RemoveWildcard, "nW", false, "Remove Wildcard & Dead Subdomains from output")
70 flag.StringVar(&options.ConfigFile, "config", path.Join(config, "config.yaml"), "Configuration file for API Keys, etc")
71 flag.BoolVar(&options.Version, "version", false, "Show version of subfinder")
72 flag.Parse()
73
74 // Check if stdin pipe was given
75 options.Stdin = hasStdin()
76
77 // Read the inputs and configure the logging
78 options.configureOutput()
79
80 // Show the user the banner
81 showBanner()
82
83 if options.Version {
84 gologger.Infof("Current Version: %s\n", Version)
85 os.Exit(0)
86 }
87
88 // Check if the config file exists. If not, it means this is the
89 // first run of the program. Show the first run notices and initialize the config file.
90 // Else show the normal banners and read the yaml fiile to the config
91 if !CheckConfigExists(options.ConfigFile) {
92 options.firstRunTasks()
93 } else {
94 options.normalRunTasks()
95 }
96
97 if options.ListSources {
98 listSources(options)
99 os.Exit(0)
100 }
101
102 // Validate the options passed by the user and if any
103 // invalid options have been used, exit.
104 err = options.validateOptions()
105 if err != nil {
106 gologger.Fatalf("Program exiting: %s\n", err)
107 }
108
109 return options
110 }
111
112 func hasStdin() bool {
113 fi, err := os.Stdin.Stat()
114 if err != nil {
115 return false
116 }
117 if fi.Mode()&os.ModeNamedPipe == 0 {
118 return false
119 }
120 return true
121 }
122
123 func listSources(options *Options) {
124 gologger.Infof("Current list of available sources. [%d]\n", len(options.YAMLConfig.Sources))
125 gologger.Infof("Sources marked with an * needs key or token in order to work.\n")
126 gologger.Infof("You can modify %s to configure your keys / tokens.\n\n", options.ConfigFile)
127
128 keys := options.YAMLConfig.GetKeys()
129 needsKey := make(map[string]interface{})
130 keysElem := reflect.ValueOf(&keys).Elem()
131 for i := 0; i < keysElem.NumField(); i++ {
132 needsKey[strings.ToLower(keysElem.Type().Field(i).Name)] = keysElem.Field(i).Interface()
133 }
134
135 for _, source := range options.YAMLConfig.Sources {
136 message := "%s\n"
137 if _, ok := needsKey[source]; ok {
138 message = "%s *\n"
139 }
140 gologger.Silentf(message, source)
141 }
142 }
0 package runner
1
2 import (
3 "bufio"
4 "io"
5 "os"
6 "path"
7
8 "github.com/projectdiscovery/subfinder/pkg/passive"
9 "github.com/projectdiscovery/subfinder/pkg/resolve"
10 )
11
12 // Runner is an instance of the subdomain enumeration
13 // client used to orchestrate the whole process.
14 type Runner struct {
15 options *Options
16 passiveAgent *passive.Agent
17 resolverClient *resolve.Resolver
18 }
19
20 // NewRunner creates a new runner struct instance by parsing
21 // the configuration options, configuring sources, reading lists
22 // and setting up loggers, etc.
23 func NewRunner(options *Options) (*Runner, error) {
24 runner := &Runner{options: options}
25
26 // Initialize the passive subdomain enumeration engine
27 runner.initializePassiveEngine()
28
29 // Initialize the active subdomain enumeration engine
30 err := runner.initializeActiveEngine()
31 if err != nil {
32 return nil, err
33 }
34
35 return runner, nil
36 }
37
38 // RunEnumeration runs the subdomain enumeration flow on the targets specified
39 func (r *Runner) RunEnumeration() error {
40 // Check if only a single domain is sent as input. Process the domain now.
41 if r.options.Domain != "" {
42 return r.EnumerateSingleDomain(r.options.Domain, r.options.Output, false)
43 }
44
45 // If we have multiple domains as input,
46 if r.options.DomainsFile != "" {
47 f, err := os.Open(r.options.DomainsFile)
48 if err != nil {
49 return err
50 }
51 err = r.EnumerateMultipleDomains(f)
52 f.Close()
53 return err
54 }
55
56 // If we have STDIN input, treat it as multiple domains
57 if r.options.Stdin {
58 return r.EnumerateMultipleDomains(os.Stdin)
59 }
60 return nil
61 }
62
63 // EnumerateMultipleDomains enumerates subdomains for multiple domains
64 // We keep enumerating subdomains for a given domain until we reach an error
65 func (r *Runner) EnumerateMultipleDomains(reader io.Reader) error {
66 scanner := bufio.NewScanner(reader)
67 for scanner.Scan() {
68 domain := scanner.Text()
69 if domain == "" {
70 continue
71 }
72
73 var err error
74 // If the user has specifed an output file, use that output file instead
75 // of creating a new output file for each domain. Else create a new file
76 // for each domain in the directory.
77 if r.options.Output != "" {
78 err = r.EnumerateSingleDomain(domain, r.options.Output, true)
79 } else if r.options.OutputDirectory != "" {
80 outputFile := path.Join(r.options.OutputDirectory, domain)
81 err = r.EnumerateSingleDomain(domain, outputFile, false)
82 } else {
83 err = r.EnumerateSingleDomain(domain, "", true)
84 }
85 if err != nil {
86 return err
87 }
88 }
89 return nil
90 }
0 package runner
1
2 import (
3 "bufio"
4 "crypto/tls"
5 "fmt"
6 "io"
7 "io/ioutil"
8 "net/http"
9 "strings"
10 "time"
11
12 jsoniter "github.com/json-iterator/go"
13 "github.com/pkg/errors"
14 )
15
16 // JSONResult contains the result for a host in JSON format
17 type JSONResult struct {
18 Host string `json:"host"`
19 IP string `json:"ip"`
20 }
21
22 func (r *Runner) UploadToChaos(reader io.Reader) error {
23 httpClient := &http.Client{
24 Transport: &http.Transport{
25 MaxIdleConnsPerHost: 100,
26 MaxIdleConns: 100,
27 TLSClientConfig: &tls.Config{
28 InsecureSkipVerify: true,
29 },
30 },
31 Timeout: time.Duration(600) * time.Second, // 10 minutes - uploads may take long
32 }
33
34 request, err := http.NewRequest("POST", "https://dns.projectdiscovery.io/dns/add", reader)
35 if err != nil {
36 return errors.Wrap(err, "could not create request")
37 }
38 request.Header.Set("Authorization", r.options.YAMLConfig.GetKeys().Chaos)
39
40 resp, err := httpClient.Do(request)
41 if err != nil {
42 return errors.Wrap(err, "could not make request")
43 }
44 defer func() {
45 io.Copy(ioutil.Discard, resp.Body)
46 resp.Body.Close()
47 }()
48
49 if resp.StatusCode != 200 {
50 return fmt.Errorf("invalid status code received: %d", resp.StatusCode)
51 }
52 return nil
53 }
54
55 // WriteHostOutput writes the output list of subdomain to an io.Writer
56 func WriteHostOutput(results map[string]struct{}, writer io.Writer) error {
57 bufwriter := bufio.NewWriter(writer)
58 sb := &strings.Builder{}
59
60 for host := range results {
61 sb.WriteString(host)
62 sb.WriteString("\n")
63
64 _, err := bufwriter.WriteString(sb.String())
65 if err != nil {
66 bufwriter.Flush()
67 return err
68 }
69 sb.Reset()
70 }
71 return bufwriter.Flush()
72 }
73
74 // WriteHostOutputNoWildcard writes the output list of subdomain with nW flag to an io.Writer
75 func WriteHostOutputNoWildcard(results map[string]string, writer io.Writer) error {
76 bufwriter := bufio.NewWriter(writer)
77 sb := &strings.Builder{}
78
79 for host := range results {
80 sb.WriteString(host)
81 sb.WriteString("\n")
82
83 _, err := bufwriter.WriteString(sb.String())
84 if err != nil {
85 bufwriter.Flush()
86 return err
87 }
88 sb.Reset()
89 }
90 return bufwriter.Flush()
91 }
92
93 // WriteJSONOutput writes the output list of subdomain in JSON to an io.Writer
94 func WriteJSONOutput(results map[string]string, writer io.Writer) error {
95 encoder := jsoniter.NewEncoder(writer)
96
97 data := JSONResult{}
98
99 for host, ip := range results {
100 data.Host = host
101 data.IP = ip
102
103 err := encoder.Encode(&data)
104 if err != nil {
105 return err
106 }
107 }
108 return nil
109 }
110
111 // WriteHostIPOutput writes the output list of subdomain to an io.Writer
112 func WriteHostIPOutput(results map[string]string, writer io.Writer) error {
113 bufwriter := bufio.NewWriter(writer)
114 sb := &strings.Builder{}
115
116 for host, ip := range results {
117 sb.WriteString(host)
118 sb.WriteString(",")
119 sb.WriteString(ip)
120 sb.WriteString("\n")
121
122 _, err := bufwriter.WriteString(sb.String())
123 if err != nil {
124 bufwriter.Flush()
125 return err
126 }
127 sb.Reset()
128 }
129 return bufwriter.Flush()
130 }
0 package runner
1
2 import (
3 "errors"
4
5 "github.com/projectdiscovery/gologger"
6 )
7
8 // validateOptions validates the configuration options passed
9 func (options *Options) validateOptions() error {
10 // Check if domain, list of domains, or stdin info was provided.
11 // If none was provided, then return.
12 if options.Domain == "" && options.DomainsFile == "" && !options.Stdin {
13 return errors.New("no input list provided")
14 }
15
16 // Both verbose and silent flags were used
17 if options.Verbose && options.Silent {
18 return errors.New("both verbose and silent mode specified")
19 }
20
21 // Validate threads and options
22 if options.Threads == 0 {
23 return errors.New("threads cannot be zero")
24 }
25 if options.Timeout == 0 {
26 return errors.New("timeout cannot be zero")
27 }
28
29 // JSON cannot be used with hostIP
30 if options.JSON && options.HostIP {
31 return errors.New("hostip flag cannot be used with json flag")
32 }
33
34 // Always remove wildcard with hostip and json
35 if options.HostIP && !options.RemoveWildcard {
36 return errors.New("hostip flag must be used with RemoveWildcard option")
37 }
38 if options.JSON && !options.RemoveWildcard {
39 return errors.New("JSON flag must be used with RemoveWildcard option")
40 }
41
42 return nil
43 }
44
45 // configureOutput configures the output on the screen
46 func (options *Options) configureOutput() {
47 // If the user desires verbose output, show verbose output
48 if options.Verbose {
49 gologger.MaxLevel = gologger.Verbose
50 }
51 if options.NoColor {
52 gologger.UseColors = false
53 }
54 if options.Silent {
55 gologger.MaxLevel = gologger.Silent
56 }
57 }
0 package subscraping
1
2 import (
3 "context"
4 "crypto/tls"
5 "fmt"
6 "io"
7 "io/ioutil"
8 "net/http"
9 "net/url"
10 "time"
11 )
12
13 // NewSession creates a new session object for a domain
14 func NewSession(domain string, keys Keys, timeout int) (*Session, error) {
15 client := &http.Client{
16 Transport: &http.Transport{
17 MaxIdleConns: 100,
18 MaxIdleConnsPerHost: 100,
19 TLSClientConfig: &tls.Config{
20 InsecureSkipVerify: true,
21 },
22 },
23 Timeout: time.Duration(timeout) * time.Second,
24 }
25
26 session := &Session{
27 Client: client,
28 Keys: keys,
29 }
30
31 // Create a new extractor object for the current domain
32 extractor, err := NewSubdomainExtractor(domain)
33 session.Extractor = extractor
34
35 return session, err
36 }
37
38 // NormalGetWithContext makes a normal GET request to a URL with context
39 func (s *Session) NormalGetWithContext(ctx context.Context, url string) (*http.Response, error) {
40 req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
41 if err != nil {
42 return nil, err
43 }
44
45 // Don't randomize user agents, as they cause issues sometimes
46 req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
47 req.Header.Set("Accept", "*/*")
48 req.Header.Set("Accept-Language", "en")
49
50 return httpRequestWrapper(s.Client, req)
51 }
52
53 // Get makes a GET request to a URL
54 func (s *Session) Get(ctx context.Context, url string, cookies string, headers map[string]string) (*http.Response, error) {
55 req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
56 if err != nil {
57 return nil, err
58 }
59
60 req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
61 req.Header.Set("Accept", "*/*")
62 req.Header.Set("Accept-Language", "en")
63
64 if cookies != "" {
65 req.Header.Set("Cookie", cookies)
66 }
67
68 if headers != nil {
69 for key, value := range headers {
70 req.Header.Set(key, value)
71 }
72 }
73
74 return httpRequestWrapper(s.Client, req)
75 }
76
77 func (s *Session) DiscardHttpResponse(response *http.Response) {
78 if response != nil {
79 io.Copy(ioutil.Discard, response.Body)
80 response.Body.Close()
81 }
82 }
83
84 func httpRequestWrapper(client *http.Client, request *http.Request) (*http.Response, error) {
85 resp, err := client.Do(request)
86 if err != nil {
87 return nil, err
88 }
89
90 if resp.StatusCode != http.StatusOK {
91 requestUrl, _ := url.QueryUnescape(request.URL.String())
92 return resp, fmt.Errorf("Unexpected status code %d received from %s", resp.StatusCode, requestUrl)
93 }
94 return resp, nil
95 }
0 package alienvault
1
2 import (
3 "context"
4 "encoding/json"
5 "fmt"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type alienvaultResponse struct {
11 PassiveDNS []struct {
12 Hostname string `json:"hostname"`
13 } `json:"passive_dns"`
14 }
15
16 // Source is the passive scraping agent
17 type Source struct{}
18
19 // Run function returns all subdomains found with the service
20 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
21 results := make(chan subscraping.Result)
22
23 go func() {
24 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/passive_dns", domain))
25 if err != nil {
26 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
27 session.DiscardHttpResponse(resp)
28 close(results)
29 return
30 }
31
32 otxResp := &alienvaultResponse{}
33 // Get the response body and decode
34 err = json.NewDecoder(resp.Body).Decode(&otxResp)
35 if err != nil {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
37 resp.Body.Close()
38 close(results)
39 return
40 }
41 resp.Body.Close()
42 for _, record := range otxResp.PassiveDNS {
43 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record.Hostname}
44 }
45 close(results)
46 }()
47
48 return results
49 }
50
51 // Name returns the name of the source
52 func (s *Source) Name() string {
53 return "alienvault"
54 }
0 // Package archiveis is a Archiveis Scraping Engine in Golang
1 package archiveis
2
3 import (
4 "context"
5 "io/ioutil"
6 "regexp"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // ArchiveIs is a struct for archiveurlsagent
12 type ArchiveIs struct {
13 Results chan subscraping.Result
14 Session *subscraping.Session
15 }
16
17 var reNext = regexp.MustCompile("<a id=\"next\" style=\".*\" href=\"(.*)\">&rarr;</a>")
18
19 func (a *ArchiveIs) enumerate(ctx context.Context, baseURL string) {
20 select {
21 case <-ctx.Done():
22 return
23 default:
24 }
25
26 resp, err := a.Session.NormalGetWithContext(ctx, baseURL)
27 if err != nil {
28 a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
29 a.Session.DiscardHttpResponse(resp)
30 return
31 }
32
33 // Get the response body
34 body, err := ioutil.ReadAll(resp.Body)
35 resp.Body.Close()
36 if err != nil {
37 a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
38 return
39 }
40
41 src := string(body)
42
43 for _, subdomain := range a.Session.Extractor.FindAllString(src, -1) {
44 a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Subdomain, Value: subdomain}
45 }
46
47 match1 := reNext.FindStringSubmatch(src)
48 if len(match1) > 0 {
49 a.enumerate(ctx, match1[1])
50 }
51 }
52
53 // Source is the passive scraping agent
54 type Source struct{}
55
56 // Run function returns all subdomains found with the service
57 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
58 results := make(chan subscraping.Result)
59
60 aInstance := ArchiveIs{
61 Session: session,
62 Results: results,
63 }
64
65 go func() {
66 aInstance.enumerate(ctx, "http://archive.is/*."+domain)
67 close(aInstance.Results)
68 }()
69
70 return aInstance.Results
71 }
72
73 // Name returns the name of the source
74 func (s *Source) Name() string {
75 return "archiveis"
76 }
0 package binaryedge
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type binaryedgeResponse struct {
11 Subdomains []string `json:"events"`
12 Total int `json:"total"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.Binaryedge == "" {
24 close(results)
25 return
26 }
27
28 resp, err := session.Get(ctx, fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s", domain), "", map[string]string{"X-Key": session.Keys.Binaryedge})
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 session.DiscardHttpResponse(resp)
32 close(results)
33 return
34 }
35
36 response := new(binaryedgeResponse)
37 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 close(results)
42 return
43 }
44 resp.Body.Close()
45
46 for _, subdomain := range response.Subdomains {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
48 }
49
50 remaining := response.Total - 100
51 currentPage := 2
52
53 for {
54 further := s.getSubdomains(ctx, domain, &remaining, &currentPage, session, results)
55 if !further {
56 break
57 }
58 }
59 close(results)
60 }()
61
62 return results
63 }
64
65 // Name returns the name of the source
66 func (s *Source) Name() string {
67 return "binaryedge"
68 }
69
70 func (s *Source) getSubdomains(ctx context.Context, domain string, remaining, currentPage *int, session *subscraping.Session, results chan subscraping.Result) bool {
71 for {
72 select {
73 case <-ctx.Done():
74 return false
75 default:
76 resp, err := session.Get(ctx, fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s?page=%d", domain, *currentPage), "", map[string]string{"X-Key": session.Keys.Binaryedge})
77 if err != nil {
78 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
79 return false
80 }
81
82 response := binaryedgeResponse{}
83 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 resp.Body.Close()
87 return false
88 }
89 resp.Body.Close()
90
91 for _, subdomain := range response.Subdomains {
92 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
93 }
94
95 *remaining = *remaining - 100
96 if *remaining <= 0 {
97 return false
98 }
99 *currentPage++
100 return true
101 }
102 }
103 }
0 // Package bufferover is a bufferover Scraping Engine in Golang
1 package bufferover
2
3 import (
4 "context"
5 "fmt"
6 "io/ioutil"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 // Run enumeration on subdomain dataset for historical SONAR datasets
20 s.getData(ctx, fmt.Sprintf("https://dns.bufferover.run/dns?q=.%s", domain), session, results)
21 s.getData(ctx, fmt.Sprintf("https://tls.bufferover.run/dns?q=.%s", domain), session, results)
22
23 close(results)
24 }()
25
26 return results
27 }
28
29 func (s *Source) getData(ctx context.Context, URL string, session *subscraping.Session, results chan subscraping.Result) {
30 resp, err := session.NormalGetWithContext(ctx, URL)
31 if err != nil {
32 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
33 session.DiscardHttpResponse(resp)
34 return
35 }
36
37 body, err := ioutil.ReadAll(resp.Body)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 return
42 }
43 resp.Body.Close()
44
45 src := string(body)
46
47 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
49 }
50 return
51 }
52
53 // Name returns the name of the source
54 func (s *Source) Name() string {
55 return "bufferover"
56 }
0 package censys
1
2 import (
3 "bytes"
4 "context"
5 "net/http"
6 "strconv"
7
8 jsoniter "github.com/json-iterator/go"
9 "github.com/projectdiscovery/subfinder/pkg/subscraping"
10 )
11
12 const maxCensysPages = 10
13
14 type resultsq struct {
15 Data []string `json:"parsed.extensions.subject_alt_name.dns_names"`
16 Data1 []string `json:"parsed.names"`
17 }
18
19 type response struct {
20 Results []resultsq `json:"results"`
21 Metadata struct {
22 Pages int `json:"pages"`
23 } `json:"metadata"`
24 }
25
26 // Source is the passive scraping agent
27 type Source struct{}
28
29 // Run function returns all subdomains found with the service
30 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
31 results := make(chan subscraping.Result)
32
33 go func() {
34 if session.Keys.CensysToken == "" || session.Keys.CensysSecret == "" {
35 close(results)
36 return
37 }
38 var response response
39
40 currentPage := 1
41 for {
42 var request = []byte(`{"query":"` + domain + `", "page":` + strconv.Itoa(currentPage) + `, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
43
44 req, err := http.NewRequestWithContext(ctx, "POST", "https://www.censys.io/api/v1/search/certificates", bytes.NewReader(request))
45 if err != nil {
46 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
47 close(results)
48 return
49 }
50 req.SetBasicAuth(session.Keys.CensysToken, session.Keys.CensysSecret)
51 req.Header.Set("Content-Type", "application/json")
52 req.Header.Set("Accept", "application/json")
53
54 resp, err := session.Client.Do(req)
55 if err != nil {
56 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
57 close(results)
58 return
59 }
60
61 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
62 if err != nil {
63 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
64 resp.Body.Close()
65 close(results)
66 return
67 }
68 resp.Body.Close()
69
70 // Exit the censys enumeration if max pages is reached
71 if currentPage >= response.Metadata.Pages || currentPage >= maxCensysPages {
72 break
73 }
74
75 for _, res := range response.Results {
76 for _, part := range res.Data {
77 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
78 }
79 for _, part := range res.Data1 {
80 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
81 }
82 }
83
84 currentPage++
85 }
86 close(results)
87 }()
88
89 return results
90 }
91
92 // Name returns the name of the source
93 func (s *Source) Name() string {
94 return "censys"
95 }
0 package certspotter
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type certspotterObject struct {
11 ID string `json:"id"`
12 DNSNames []string `json:"dns_names"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.Certspotter == "" {
24 close(results)
25 return
26 }
27
28 resp, err := session.Get(ctx, fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names", domain), "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 session.DiscardHttpResponse(resp)
32 close(results)
33 return
34 }
35
36 response := []certspotterObject{}
37 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 close(results)
42 return
43 }
44 resp.Body.Close()
45
46 for _, cert := range response {
47 for _, subdomain := range cert.DNSNames {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
49 }
50 }
51
52 // if the number of responses is zero, close the channel and return.
53 if len(response) == 0 {
54 close(results)
55 return
56 }
57
58 id := response[len(response)-1].ID
59 for {
60 reqURL := fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names&after=%s", domain, id)
61
62 resp, err := session.Get(ctx, reqURL, "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
63 if err != nil {
64 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
65 close(results)
66 return
67 }
68
69 response := []certspotterObject{}
70 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
71 if err != nil {
72 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
73 resp.Body.Close()
74 close(results)
75 return
76 }
77 resp.Body.Close()
78
79 if len(response) == 0 {
80 break
81 }
82
83 for _, cert := range response {
84 for _, subdomain := range cert.DNSNames {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
86 }
87 }
88
89 id = response[len(response)-1].ID
90 }
91 close(results)
92 }()
93
94 return results
95 }
96
97 // Name returns the name of the source
98 func (s *Source) Name() string {
99 return "certspotter"
100 }
0 package certspotterold
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://certspotter.com/api/v0/certs?domain=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 body, err := ioutil.ReadAll(resp.Body)
27 if err != nil {
28 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
29 resp.Body.Close()
30 close(results)
31 return
32 }
33 resp.Body.Close()
34
35 src := string(body)
36
37 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
39 }
40 close(results)
41 }()
42
43 return results
44 }
45
46 // Name returns the name of the source
47 func (s *Source) Name() string {
48 return "certspotterold"
49 }
0 package commoncrawl
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "net/url"
7 "strings"
8
9 jsoniter "github.com/json-iterator/go"
10 "github.com/projectdiscovery/subfinder/pkg/subscraping"
11 )
12
13 const indexURL = "https://index.commoncrawl.org/collinfo.json"
14
15 type indexResponse struct {
16 ID string `json:"id"`
17 APIURL string `json:"cdx-api"`
18 }
19
20 // Source is the passive scraping agent
21 type Source struct{}
22
23 var years = [...]string{"2020", "2019", "2018", "2017"}
24
25 // Run function returns all subdomains found with the service
26 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
27 results := make(chan subscraping.Result)
28
29 go func() {
30 resp, err := session.NormalGetWithContext(ctx, indexURL)
31 if err != nil {
32 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
33 session.DiscardHttpResponse(resp)
34 close(results)
35 return
36 }
37
38 indexes := []indexResponse{}
39 err = jsoniter.NewDecoder(resp.Body).Decode(&indexes)
40 if err != nil {
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
42 resp.Body.Close()
43 close(results)
44 return
45 }
46 resp.Body.Close()
47
48 searchIndexes := make(map[string]string)
49 for _, year := range years {
50 for _, index := range indexes {
51 if strings.Contains(index.ID, year) {
52 if _, ok := searchIndexes[year]; !ok {
53 searchIndexes[year] = index.APIURL
54 break
55 }
56 }
57 }
58 }
59
60 for _, apiURL := range searchIndexes {
61 further := s.getSubdomains(ctx, apiURL, domain, session, results)
62 if !further {
63 break
64 }
65 }
66 close(results)
67 }()
68
69 return results
70 }
71
72 // Name returns the name of the source
73 func (s *Source) Name() string {
74 return "commoncrawl"
75 }
76
77 func (s *Source) getSubdomains(ctx context.Context, searchURL string, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
78 for {
79 select {
80 case <-ctx.Done():
81 return false
82 default:
83 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("%s?url=*.%s&output=json", searchURL, domain))
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 return false
87 }
88
89 body, err := ioutil.ReadAll(resp.Body)
90 if err != nil {
91 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
92 resp.Body.Close()
93 return false
94 }
95 resp.Body.Close()
96
97 src, _ := url.QueryUnescape(string(body))
98
99 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
100 subdomain = strings.TrimPrefix(subdomain, "25")
101
102 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
103 }
104 return true
105 }
106 }
107 }
0 package crtsh
1
2 import (
3 "context"
4 "database/sql"
5 "fmt"
6 "io/ioutil"
7 "strings"
8
9 // postgres driver
10 _ "github.com/lib/pq"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping"
12 )
13
14 // Source is the passive scraping agent
15 type Source struct{}
16
17 // Run function returns all subdomains found with the service
18 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
19 results := make(chan subscraping.Result)
20
21 go func() {
22 found := s.getSubdomainsFromSQL(ctx, domain, session, results)
23 if found {
24 close(results)
25 return
26 }
27 _ = s.getSubdomainsFromHTTP(ctx, domain, session, results)
28 close(results)
29 }()
30
31 return results
32 }
33
34 func (s *Source) getSubdomainsFromSQL(ctx context.Context, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
35 db, err := sql.Open("postgres", "host=crt.sh user=guest dbname=certwatch sslmode=disable binary_parameters=yes")
36 if err != nil {
37 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
38 return false
39 }
40
41 pattern := "%." + domain
42 rows, err := db.Query(`SELECT DISTINCT ci.NAME_VALUE as domain
43 FROM certificate_identity ci
44 WHERE reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower($1))
45 ORDER BY ci.NAME_VALUE`, pattern)
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 return false
49 }
50
51 var data string
52 // Parse all the rows getting subdomains
53 for rows.Next() {
54 err := rows.Scan(&data)
55 if err != nil {
56 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
57 return false
58 }
59 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: data}
60 }
61 return true
62 }
63
64 func (s *Source) getSubdomainsFromHTTP(ctx context.Context, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
65 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain))
66 if err != nil {
67 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
68 session.DiscardHttpResponse(resp)
69 return false
70 }
71
72 body, err := ioutil.ReadAll(resp.Body)
73 if err != nil {
74 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
75 resp.Body.Close()
76 return false
77 }
78 resp.Body.Close()
79
80 // Also replace all newlines
81 src := strings.Replace(string(body), "\\n", " ", -1)
82
83 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
84 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
85 }
86 return true
87 }
88
89 // Name returns the name of the source
90 func (s *Source) Name() string {
91 return "crtsh"
92 }
0 package dnsdb
1
2 import (
3 "bufio"
4 "context"
5 "encoding/json"
6 "fmt"
7 "strings"
8
9 "github.com/projectdiscovery/subfinder/pkg/subscraping"
10 )
11
12 type dnsdbResponse struct {
13 Name string `json:"rrname"`
14 }
15
16 // Source is the passive scraping agent
17 type Source struct{}
18
19 // Run function returns all subdomains found with the service
20 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
21 results := make(chan subscraping.Result)
22
23 if session.Keys.DNSDB == "" {
24 close(results)
25 } else {
26 headers := map[string]string{
27 "X-API-KEY": session.Keys.DNSDB,
28 "Accept": "application/json",
29 "Content-Type": "application/json",
30 }
31
32 go func() {
33 resp, err := session.Get(ctx, fmt.Sprintf("https://api.dnsdb.info/lookup/rrset/name/*.%s?limit=1000000000000", domain), "", headers)
34 if err != nil {
35 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
36 session.DiscardHttpResponse(resp)
37 close(results)
38 return
39 }
40
41 defer resp.Body.Close()
42 // Get the response body
43 scanner := bufio.NewScanner(resp.Body)
44 for scanner.Scan() {
45 line := scanner.Text()
46 if line == "" {
47 continue
48 }
49 out := &dnsdbResponse{}
50 err := json.Unmarshal([]byte(line), out)
51 if err != nil {
52 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
53 resp.Body.Close()
54 close(results)
55 return
56 }
57 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: strings.TrimSuffix(out.Name, ".")}
58 out = nil
59 }
60 close(results)
61 }()
62 }
63 return results
64 }
65
66 // Name returns the name of the source
67 func (s *Source) Name() string {
68 return "DNSDB"
69 }
0 package dnsdumpster
1
2 import (
3 "context"
4 "io/ioutil"
5 "net"
6 "net/http"
7 "net/url"
8 "regexp"
9 "strings"
10 "time"
11
12 "github.com/projectdiscovery/subfinder/pkg/subscraping"
13 )
14
15 var re = regexp.MustCompile("<input type=\"hidden\" name=\"csrfmiddlewaretoken\" value=\"(.*)\">")
16
17 // getCSRFToken gets the CSRF Token from the page
18 func getCSRFToken(page string) string {
19 if subs := re.FindStringSubmatch(page); len(subs) == 2 {
20 return strings.TrimSpace(subs[1])
21 }
22 return ""
23 }
24
25 // postForm posts a form for a domain and returns the response
26 func postForm(token, domain string) (string, error) {
27 dial := net.Dialer{}
28 client := &http.Client{
29 Transport: &http.Transport{
30 DialContext: dial.DialContext,
31 TLSHandshakeTimeout: 10 * time.Second,
32 },
33 }
34 params := url.Values{
35 "csrfmiddlewaretoken": {token},
36 "targetip": {domain},
37 }
38
39 req, err := http.NewRequest("POST", "https://dnsdumpster.com/", strings.NewReader(params.Encode()))
40 if err != nil {
41 return "", err
42 }
43
44 // The CSRF token needs to be sent as a cookie
45 cookie := &http.Cookie{
46 Name: "csrftoken",
47 Domain: "dnsdumpster.com",
48 Value: token,
49 }
50 req.AddCookie(cookie)
51
52 req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
53 req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
54 req.Header.Set("Referer", "https://dnsdumpster.com")
55 req.Header.Set("X-CSRF-Token", token)
56
57 resp, err := client.Do(req)
58 if err != nil {
59 return "", err
60 }
61 // Now, grab the entire page
62 in, err := ioutil.ReadAll(resp.Body)
63 resp.Body.Close()
64 return string(in), err
65 }
66
67 // Source is the passive scraping agent
68 type Source struct{}
69
70 // Run function returns all subdomains found with the service
71 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
72 results := make(chan subscraping.Result)
73
74 go func() {
75 resp, err := session.NormalGetWithContext(ctx, "https://dnsdumpster.com/")
76 if err != nil {
77 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
78 session.DiscardHttpResponse(resp)
79 close(results)
80 return
81 }
82
83 body, err := ioutil.ReadAll(resp.Body)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 resp.Body.Close()
87 close(results)
88 return
89 }
90 resp.Body.Close()
91 csrfToken := getCSRFToken(string(body))
92
93 data, err := postForm(csrfToken, domain)
94 if err != nil {
95 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
96 close(results)
97 return
98 }
99
100 for _, subdomain := range session.Extractor.FindAllString(data, -1) {
101 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
102 }
103 close(results)
104 }()
105
106 return results
107 }
108
109 // Name returns the name of the source
110 func (s *Source) Name() string {
111 return "dnsdumpster"
112 }
0 package entrust
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "strings"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://ctsearch.entrust.com/api/v1/certificates?fields=issuerCN,subjectO,issuerDN,issuerO,subjectDN,signAlg,san,publicKeyType,publicKeySize,validFrom,validTo,sn,ev,logEntries.logName,subjectCNReversed,cert&domain=%s&includeExpired=true&exactMatch=false&limit=5000", domain))
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(resp)
23 close(results)
24 return
25 }
26
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35
36 src := string(body)
37
38 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
39 subdomain = strings.TrimPrefix(subdomain, "u003d")
40
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
42 }
43 close(results)
44 }()
45
46 return results
47 }
48
49 // Name returns the name of the source
50 func (s *Source) Name() string {
51 return "entrust"
52 }
0 // GitHub search package, based on gwen001's https://github.com/gwen001/github-search github-subdomains
1 package github
2
3 import (
4 "context"
5 "fmt"
6 "io/ioutil"
7 "net/http"
8 "net/url"
9 "regexp"
10 "strconv"
11 "strings"
12 "time"
13
14 jsoniter "github.com/json-iterator/go"
15
16 "github.com/projectdiscovery/gologger"
17 "github.com/projectdiscovery/subfinder/pkg/subscraping"
18 "github.com/tomnomnom/linkheader"
19 )
20
21 type textMatch struct {
22 Fragment string `json:"fragment"`
23 }
24
25 type item struct {
26 Name string `json:"name"`
27 HtmlUrl string `json:"html_url"`
28 TextMatches []textMatch `json:"text_matches"`
29 }
30
31 type response struct {
32 TotalCount int `json:"total_count"`
33 Items []item `json:"items"`
34 }
35
36 // Source is the passive scraping agent
37 type Source struct{}
38
39 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
40 results := make(chan subscraping.Result)
41
42 go func() {
43 if len(session.Keys.GitHub) == 0 {
44 close(results)
45 return
46 }
47
48 tokens := NewTokenManager(session.Keys.GitHub)
49
50 // search on GitHub with exact match
51 searchURL := fmt.Sprintf("https://api.github.com/search/code?per_page=100&q=\"%s\"", domain)
52 s.enumerate(ctx, searchURL, s.DomainRegexp(domain), tokens, session, results)
53 close(results)
54 }()
55
56 return results
57 }
58
59 func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *regexp.Regexp, tokens *Tokens, session *subscraping.Session, results chan subscraping.Result) {
60 select {
61 case <-ctx.Done():
62 return
63 default:
64 }
65
66 token := tokens.Get()
67
68 if token.RetryAfter > 0 {
69 if len(tokens.pool) == 1 {
70 gologger.Verbosef("GitHub Search request rate limit exceeded, waiting for %d seconds before retry... \n", s.Name(), token.RetryAfter)
71 time.Sleep(time.Duration(token.RetryAfter) * time.Second)
72 } else {
73 token = tokens.Get()
74 }
75 }
76
77 headers := map[string]string{
78 "Accept": "application/vnd.github.v3.text-match+json",
79 "Authorization": "token " + token.Hash,
80 }
81
82 // Initial request to GitHub search
83 resp, err := session.Get(ctx, searchURL, "", headers)
84 isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
85
86 if err != nil && !isForbidden {
87 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
88 session.DiscardHttpResponse(resp)
89 return
90 } else {
91 // Retry enumerarion after Retry-After seconds on rate limit abuse detected
92 ratelimitRemaining, _ := strconv.ParseInt(resp.Header.Get("X-Ratelimit-Remaining"), 10, 64)
93 if isForbidden && ratelimitRemaining == 0 {
94 retryAfterSeconds, _ := strconv.ParseInt(resp.Header.Get("Retry-After"), 10, 64)
95 tokens.setCurrentTokenExceeded(retryAfterSeconds)
96
97 s.enumerate(ctx, searchURL, domainRegexp, tokens, session, results)
98 } else {
99 // Links header, first, next, last...
100 linksHeader := linkheader.Parse(resp.Header.Get("Link"))
101
102 data := response{}
103
104 // Marshall json reponse
105 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
106 resp.Body.Close()
107 if err != nil {
108 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
109 return
110 }
111
112 // Response items iteration
113 for _, item := range data.Items {
114 resp, err := session.NormalGetWithContext(ctx, rawUrl(item.HtmlUrl))
115 if err != nil {
116 if resp != nil && resp.StatusCode != http.StatusNotFound {
117 session.DiscardHttpResponse(resp)
118 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
119 return
120 }
121 }
122
123 var subdomains []string
124
125 if resp.StatusCode == http.StatusOK {
126 // Get the item code from the raw file url
127 code, err := ioutil.ReadAll(resp.Body)
128 resp.Body.Close()
129 if err != nil {
130 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
131 return
132 }
133 // Search for domain matches in the code
134 subdomains = append(subdomains, matches(domainRegexp, normalizeContent(string(code)))...)
135 }
136
137 // Text matches iteration per item
138 for _, textMatch := range item.TextMatches {
139 // Search for domain matches in the text fragment
140 subdomains = append(subdomains, matches(domainRegexp, normalizeContent(textMatch.Fragment))...)
141 }
142
143 for _, subdomain := range unique(subdomains) {
144 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
145 }
146 }
147
148 // Proccess the next link recursively
149 for _, link := range linksHeader {
150 if link.Rel == "next" {
151 nextUrl, err := url.QueryUnescape(link.URL)
152 if err != nil {
153 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
154 return
155 }
156 s.enumerate(ctx, nextUrl, domainRegexp, tokens, session, results)
157 }
158 }
159 }
160 }
161
162 }
163
164 // Normalize content before matching, query unescape, remove tabs and new line chars
165 func normalizeContent(content string) string {
166 normalizedContent, _ := url.QueryUnescape(content)
167 normalizedContent = strings.Replace(normalizedContent, "\\t", "", -1)
168 normalizedContent = strings.Replace(normalizedContent, "\\n", "", -1)
169 return normalizedContent
170 }
171
172 // Remove duplicates from string array
173 func unique(arr []string) []string {
174 occured := map[string]bool{}
175 result := []string{}
176 for e := range arr {
177 if occured[arr[e]] != true {
178 occured[arr[e]] = true
179 result = append(result, arr[e])
180 }
181 }
182 return result
183 }
184
185 // Find matches by regular expression in any content
186 func matches(regexp *regexp.Regexp, content string) []string {
187 var matches []string
188 match := regexp.FindAllString(content, -1)
189 if len(match) > 0 {
190 matches = unique(match)
191 }
192 return matches
193 }
194
195 // Raw URL to get the files code and match for subdomains
196 func rawUrl(htmlUrl string) string {
197 domain := strings.Replace(htmlUrl, "https://github.com/", "https://raw.githubusercontent.com/", -1)
198 return strings.Replace(domain, "/blob/", "/", -1)
199 }
200
201 // Domain regular expression to match subdomains in github files code
202 func (s *Source) DomainRegexp(domain string) *regexp.Regexp {
203 rdomain := strings.Replace(domain, ".", "\\.", -1)
204 return regexp.MustCompile("(\\w+[.])*" + rdomain)
205 }
206
207 // Name returns the name of the source
208 func (s *Source) Name() string {
209 return "github"
210 }
0 package github
1
2 import "time"
3
4 type token struct {
5 Hash string
6 RetryAfter int64
7 ExceededTime time.Time
8 }
9
10 type Tokens struct {
11 current int
12 pool []token
13 }
14
15 func NewTokenManager(keys []string) *Tokens {
16 pool := []token{}
17 for _, key := range keys {
18 t := token{Hash: key, ExceededTime: time.Time{}, RetryAfter: 0}
19 pool = append(pool, t)
20 }
21
22 return &Tokens{
23 current: 0,
24 pool: pool,
25 }
26 }
27
28 func (r *Tokens) setCurrentTokenExceeded(retryAfter int64) {
29 if r.current >= len(r.pool) {
30 r.current = r.current % len(r.pool)
31 }
32 if r.pool[r.current].RetryAfter == 0 {
33 r.pool[r.current].ExceededTime = time.Now()
34 r.pool[r.current].RetryAfter = retryAfter
35 }
36 }
37
38 func (r *Tokens) Get() token {
39 resetExceededTokens(r)
40
41 if r.current >= len(r.pool) {
42 r.current = r.current % len(r.pool)
43 }
44
45 result := r.pool[r.current]
46 r.current++
47
48 return result
49 }
50
51 func resetExceededTokens(r *Tokens) {
52 for i, token := range r.pool {
53 if token.RetryAfter > 0 {
54 if int64(time.Since(token.ExceededTime)/time.Second) > token.RetryAfter {
55 r.pool[i].ExceededTime = time.Time{}
56 r.pool[i].RetryAfter = 0
57 }
58 }
59 }
60 }
0 package hackertarget
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("http://api.hackertarget.com/hostsearch/?q=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 // Get the response body
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35 src := string(body)
36
37 for _, match := range session.Extractor.FindAllString(src, -1) {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
39 }
40 close(results)
41 }()
42
43 return results
44 }
45
46 // Name returns the name of the source
47 func (s *Source) Name() string {
48 return "hackertarget"
49 }
0 package intelx
1
2 import (
3 "bytes"
4 "context"
5 "encoding/json"
6 "fmt"
7 "io/ioutil"
8 "net/http"
9
10 jsoniter "github.com/json-iterator/go"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping"
12 )
13
14 type searchResponseType struct {
15 Id string `json:"id"`
16 Status int `json:"status"`
17 }
18
19 type selectorType struct {
20 Selectvalue string `json:"selectorvalue"`
21 }
22
23 type searchResultType struct {
24 Selectors []selectorType `json:"selectors"`
25 Status int `json:"status"`
26 }
27
28 type requestBody struct {
29 Term string
30 Maxresults int
31 Media int
32 Target int
33 Terminate []int
34 Timeout int
35 }
36
37 // Source is the passive scraping agent
38 type Source struct{}
39
40 // Run function returns all subdomains found with the service
41 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
42 results := make(chan subscraping.Result)
43
44 go func() {
45 defer close(results)
46 if session.Keys.IntelXKey == "" || session.Keys.IntelXHost == "" {
47 return
48 }
49
50 searchURL := fmt.Sprintf("https://%s/phonebook/search?k=%s", session.Keys.IntelXHost, session.Keys.IntelXKey)
51 reqBody := requestBody{
52 Term: domain,
53 Maxresults: 100000,
54 Media: 0,
55 Target: 1,
56 Timeout: 20,
57 }
58
59 body, err := json.Marshal(reqBody)
60 if err != nil {
61 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
62 return
63 }
64
65 resp, err := http.Post(searchURL, "application/json", bytes.NewBuffer(body))
66 if err != nil {
67 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
68 session.DiscardHttpResponse(resp)
69 return
70 }
71
72 var response searchResponseType
73 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
74 if err != nil {
75 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
76 close(results)
77 return
78 }
79
80 resultsURL := fmt.Sprintf("https://%s/phonebook/search/result?k=%s&id=%s&limit=10000", session.Keys.IntelXHost, session.Keys.IntelXKey, response.Id)
81 status := 0
82 for status == 0 || status == 3 {
83 resp, err = session.Get(ctx, resultsURL, "", nil)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 return
87 }
88 var response searchResultType
89 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
90 if err != nil {
91 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
92 return
93 }
94 body, err = ioutil.ReadAll(resp.Body)
95 if err != nil {
96 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
97 return
98 }
99 resp.Body.Close()
100 status = response.Status
101 for _, hostname := range response.Selectors {
102 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname.Selectvalue}
103 }
104 }
105 }()
106
107 return results
108 }
109
110 // Name returns the name of the source
111 func (s *Source) Name() string {
112 return "intelx"
113 }
0 package ipv4info
1
2 import (
3 "context"
4 "io/ioutil"
5 "regexp"
6 "strconv"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 resp, err := session.NormalGetWithContext(ctx, "http://ipv4info.com/search/"+domain)
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(resp)
23 close(results)
24 return
25 }
26
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35 src := string(body)
36
37 regxTokens := regexp.MustCompile("/ip-address/(.*)/" + domain)
38 matchTokens := regxTokens.FindAllString(src, -1)
39
40 if len(matchTokens) <= 0 {
41 close(results)
42 return
43 }
44 token := matchTokens[0]
45
46 resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
47 if err != nil {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
49 close(results)
50 return
51 }
52
53 body, err = ioutil.ReadAll(resp.Body)
54 if err != nil {
55 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
56 resp.Body.Close()
57 close(results)
58 return
59 }
60 resp.Body.Close()
61 src = string(body)
62
63 regxTokens = regexp.MustCompile("/dns/(.*?)/" + domain)
64 matchTokens = regxTokens.FindAllString(src, -1)
65 if len(matchTokens) <= 0 {
66 close(results)
67 return
68 }
69 token = matchTokens[0]
70
71 resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
72 if err != nil {
73 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
74 close(results)
75 return
76 }
77
78 body, err = ioutil.ReadAll(resp.Body)
79 if err != nil {
80 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
81 resp.Body.Close()
82 close(results)
83 return
84 }
85 resp.Body.Close()
86 src = string(body)
87
88 regxTokens = regexp.MustCompile("/subdomains/(.*?)/" + domain)
89 matchTokens = regxTokens.FindAllString(src, -1)
90 if len(matchTokens) <= 0 {
91 close(results)
92 return
93 }
94 token = matchTokens[0]
95
96 resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
97 if err != nil {
98 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
99 close(results)
100 return
101 }
102
103 body, err = ioutil.ReadAll(resp.Body)
104 if err != nil {
105 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
106 resp.Body.Close()
107 close(results)
108 return
109 }
110 resp.Body.Close()
111 src = string(body)
112
113 for _, match := range session.Extractor.FindAllString(src, -1) {
114 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
115 }
116 nextPage := 1
117
118 for {
119 further := s.getSubdomains(ctx, domain, &nextPage, src, session, results)
120 if !further {
121 break
122 }
123 }
124 close(results)
125 }()
126
127 return results
128 }
129
130 // Name returns the name of the source
131 func (s *Source) Name() string {
132 return "ipv4info"
133 }
134
135 func (s *Source) getSubdomains(ctx context.Context, domain string, nextPage *int, src string, session *subscraping.Session, results chan subscraping.Result) bool {
136 for {
137 select {
138 case <-ctx.Done():
139 return false
140 default:
141 regxTokens := regexp.MustCompile("/subdomains/.*/page" + strconv.Itoa(*nextPage) + "/" + domain + ".html")
142 matchTokens := regxTokens.FindAllString(src, -1)
143 if len(matchTokens) == 0 {
144 return false
145 }
146 token := matchTokens[0]
147
148 resp, err := session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
149 if err != nil {
150 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
151 return false
152 }
153 body, err := ioutil.ReadAll(resp.Body)
154 if err != nil {
155 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
156 resp.Body.Close()
157 return false
158 }
159 resp.Body.Close()
160 src = string(body)
161 for _, match := range session.Extractor.FindAllString(src, -1) {
162 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
163 }
164 *nextPage++
165 return true
166 }
167 }
168 }
0 package passivetotal
1
2 import (
3 "bytes"
4 "context"
5 "net/http"
6
7 jsoniter "github.com/json-iterator/go"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 type response struct {
12 Subdomains []string `json:"subdomains"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.PassiveTotalUsername == "" || session.Keys.PassiveTotalPassword == "" {
24 close(results)
25 return
26 }
27
28 // Create JSON Get body
29 var request = []byte(`{"query":"` + domain + `"}`)
30
31 req, err := http.NewRequestWithContext(ctx, "GET", "https://api.passivetotal.org/v2/enrichment/subdomains", bytes.NewBuffer(request))
32 if err != nil {
33 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
34 close(results)
35 return
36 }
37
38 req.SetBasicAuth(session.Keys.PassiveTotalUsername, session.Keys.PassiveTotalPassword)
39 req.Header.Set("Content-Type", "application/json")
40
41 resp, err := session.Client.Do(req)
42 if err != nil {
43 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
44 close(results)
45 return
46 }
47
48 data := response{}
49 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
50 if err != nil {
51 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
52 resp.Body.Close()
53 close(results)
54 return
55 }
56 resp.Body.Close()
57
58 for _, subdomain := range data.Subdomains {
59 finalSubdomain := subdomain + "." + domain
60 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: finalSubdomain}
61 }
62 close(results)
63 }()
64
65 return results
66 }
67
68 // Name returns the name of the source
69 func (s *Source) Name() string {
70 return "passivetotal"
71 }
0 // Package rapiddns is a RapidDNS Scraping Engine in Golang
1 package rapiddns
2
3 import (
4 "context"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 defer close(results)
19 resp, err := session.NormalGetWithContext(ctx, "https://rapiddns.io/subdomain/"+domain+"?full=1")
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(resp)
23 return
24 }
25
26 body, err := ioutil.ReadAll(resp.Body)
27 resp.Body.Close()
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 return
31 }
32
33 src := string(body)
34 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
35 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
36 }
37 }()
38
39 return results
40 }
41
42 // Name returns the name of the source
43 func (s *Source) Name() string {
44 return "rapiddns"
45 }
0 package securitytrails
1
2 import (
3 "context"
4 "fmt"
5 "strings"
6
7 jsoniter "github.com/json-iterator/go"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 type response struct {
12 Subdomains []string `json:"subdomains"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.Securitytrails == "" {
24 close(results)
25 return
26 }
27
28 resp, err := session.Get(ctx, fmt.Sprintf("https://api.securitytrails.com/v1/domain/%s/subdomains", domain), "", map[string]string{"APIKEY": session.Keys.Securitytrails})
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 session.DiscardHttpResponse(resp)
32 close(results)
33 return
34 }
35
36 response := response{}
37 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 close(results)
42 return
43 }
44 resp.Body.Close()
45
46 for _, subdomain := range response.Subdomains {
47 if strings.HasSuffix(subdomain, ".") {
48 subdomain = subdomain + domain
49 } else {
50 subdomain = subdomain + "." + domain
51 }
52
53 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
54 }
55 close(results)
56 }()
57
58 return results
59 }
60
61 // Name returns the name of the source
62 func (s *Source) Name() string {
63 return "securitytrails"
64 }
0 package shodan
1
2 import (
3 "context"
4 "strconv"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type shodanResult struct {
11 Matches []shodanObject `json:"matches"`
12 Result int `json:"result"`
13 Error string `json:"error"`
14 }
15
16 type shodanObject struct {
17 Hostnames []string `json:"hostnames"`
18 }
19
20 // Source is the passive scraping agent
21 type Source struct{}
22
23 // Run function returns all subdomains found with the service
24 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
25 results := make(chan subscraping.Result)
26
27 go func() {
28 if session.Keys.Shodan == "" {
29 close(results)
30 return
31 }
32
33 for currentPage := 0; currentPage <= 10; currentPage++ {
34 resp, err := session.NormalGetWithContext(ctx, "https://api.shodan.io/shodan/host/search?query=hostname:"+domain+"&page="+strconv.Itoa(currentPage)+"&key="+session.Keys.Shodan)
35 if err != nil {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
37 session.DiscardHttpResponse(resp)
38 close(results)
39 return
40 }
41
42 var response shodanResult
43 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
44 if err != nil {
45 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
46 resp.Body.Close()
47 close(results)
48 return
49 }
50 resp.Body.Close()
51
52 if response.Error != "" || len(response.Matches) == 0 {
53 close(results)
54 return
55 }
56
57 for _, block := range response.Matches {
58 for _, hostname := range block.Hostnames {
59 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname}
60 }
61 }
62 }
63 close(results)
64 }()
65
66 return results
67 }
68
69 // Name returns the name of the source
70 func (s *Source) Name() string {
71 return "shodan"
72 }
0 package sitedossier
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "math/rand"
7 "regexp"
8 "time"
9
10 "github.com/projectdiscovery/subfinder/pkg/subscraping"
11 )
12
13 var reNext = regexp.MustCompile("<a href=\"([A-Za-z0-9\\/.]+)\"><b>")
14
15 type agent struct {
16 results chan subscraping.Result
17 session *subscraping.Session
18 }
19
20 func (a *agent) enumerate(ctx context.Context, baseURL string) error {
21 for {
22 select {
23 case <-ctx.Done():
24 return nil
25 default:
26 resp, err := a.session.NormalGetWithContext(ctx, baseURL)
27 if err != nil {
28 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
29 a.session.DiscardHttpResponse(resp)
30 close(a.results)
31 return err
32 }
33
34 body, err := ioutil.ReadAll(resp.Body)
35 if err != nil {
36 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
37 resp.Body.Close()
38 close(a.results)
39 return err
40 }
41 resp.Body.Close()
42 src := string(body)
43
44 for _, match := range a.session.Extractor.FindAllString(src, -1) {
45 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Subdomain, Value: match}
46 }
47
48 match1 := reNext.FindStringSubmatch(src)
49 time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
50
51 if len(match1) > 0 {
52 a.enumerate(ctx, "http://www.sitedossier.com"+match1[1])
53 }
54 return nil
55 }
56 }
57 }
58
59 // Source is the passive scraping agent
60 type Source struct{}
61
62 // Run function returns all subdomains found with the service
63 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
64 results := make(chan subscraping.Result)
65
66 a := agent{
67 session: session,
68 results: results,
69 }
70
71 go func() {
72 err := a.enumerate(ctx, fmt.Sprintf("http://www.sitedossier.com/parentdomain/%s", domain))
73 if err == nil {
74 close(a.results)
75 }
76 }()
77 return results
78 }
79
80 // Name returns the name of the source
81 func (s *Source) Name() string {
82 return "sitedossier"
83 }
0 package spyse
1
2 import (
3 "context"
4 "strconv"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11
12 type resultObject struct {
13 Name string `json:"name"`
14 }
15
16 type dataObject struct {
17 Items []resultObject `json:"items"`
18 Total_Count int `json:"total_count"`
19 }
20
21 type errorObject struct {
22 Code string `json:"code"`
23 Message string `json:"message"`
24 }
25
26
27 type spyseResult struct {
28 Data dataObject `json:"data"`
29 Error []errorObject `json:"error"`
30 }
31
32
33 type Source struct{}
34
35 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
36 results := make(chan subscraping.Result)
37
38 go func() {
39 if session.Keys.Spyse == "" {
40 close(results)
41 return
42 }
43
44 maxCount := 100;
45
46 for offSet := 0; offSet <= maxCount; offSet += 100 {
47 resp, err := session.Get(ctx, fmt.Sprintf("https://api.spyse.com/v3/data/domain/subdomain?domain=%s&limit=100&offset=%s", domain, strconv.Itoa(offSet)), "", map[string]string{"Authorization": "Bearer " + session.Keys.Spyse})
48 if err != nil {
49 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
50 session.DiscardHttpResponse(resp)
51 close(results)
52 return
53 }
54
55
56 var response spyseResult;
57
58 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
59
60 if err != nil {
61 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
62 resp.Body.Close()
63 close(results)
64 return
65 }
66 resp.Body.Close()
67
68 if response.Data.Total_Count == 0 {
69 close(results)
70 return
71 }
72
73 maxCount = response.Data.Total_Count;
74
75 for _, hostname := range response.Data.Items {
76 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname.Name}
77 }
78 }
79 close(results)
80 }()
81
82 return results
83 }
84
85
86 // Name returns the name of the source
87 func (s *Source) Name() string {
88 return "spyse"
89 }
0 package sublist3r
1
2 import (
3 "context"
4 "encoding/json"
5 "fmt"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://api.sublist3r.com/search.php?domain=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25 defer resp.Body.Close()
26 var subdomains []string
27 // Get the response body and unmarshal
28 err = json.NewDecoder(resp.Body).Decode(&subdomains)
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 resp.Body.Close()
32 close(results)
33 return
34 }
35
36 for _, subdomain := range subdomains {
37 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
38 }
39 close(results)
40 }()
41
42 return results
43 }
44
45 // Name returns the name of the source
46 func (s *Source) Name() string {
47 return "sublist3r"
48 }
0 package threatcrowd
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 // Get the response body
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35
36 src := string(body)
37
38 for _, match := range session.Extractor.FindAllString(src, -1) {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
40 }
41 close(results)
42 }()
43
44 return results
45 }
46
47 // Name returns the name of the source
48 func (s *Source) Name() string {
49 return "threatcrowd"
50 }
0 package threatminer
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://api.threatminer.org/v2/domain.php?q=%s&rt=5", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 // Get the response body
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35
36 src := string(body)
37
38 for _, match := range session.Extractor.FindAllString(src, -1) {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
40 }
41 close(results)
42 }()
43
44 return results
45 }
46
47 // Name returns the name of the source
48 func (s *Source) Name() string {
49 return "threatminer"
50 }
0 package urlscan
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/m-mizutani/urlscan-go/urlscan"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 if session.Keys.URLScan == "" {
20 close(results)
21 return
22 }
23
24 client := urlscan.NewClient(session.Keys.URLScan)
25 task, err := client.Submit(urlscan.SubmitArguments{URL: fmt.Sprintf("https://%s", domain)})
26 if err != nil {
27 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
28 close(results)
29 return
30 }
31
32 err = task.Wait()
33 if err != nil {
34 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
35 close(results)
36 return
37 }
38
39 data, err := jsoniter.Marshal(task.Result.Data)
40 if err != nil {
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
42 close(results)
43 return
44 }
45
46 match := session.Extractor.FindAllString(string(data), -1)
47 for _, m := range match {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: m}
49 }
50 close(results)
51 }()
52
53 return results
54 }
55
56 // Name returns the name of the source
57 func (s *Source) Name() string {
58 return "urlscan"
59 }
0 package virustotal
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type response struct {
11 Subdomains []string `json:"subdomains"`
12 }
13
14 // Source is the passive scraping agent
15 type Source struct{}
16
17 // Run function returns all subdomains found with the service
18 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
19 results := make(chan subscraping.Result)
20
21 go func() {
22 if session.Keys.Virustotal == "" {
23 close(results)
24 return
25 }
26
27 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s", session.Keys.Virustotal, domain))
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 session.DiscardHttpResponse(resp)
31 close(results)
32 return
33 }
34
35 data := response{}
36 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
37 if err != nil {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
39 resp.Body.Close()
40 close(results)
41 return
42 }
43 resp.Body.Close()
44
45 for _, subdomain := range data.Subdomains {
46 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
47 }
48 close(results)
49 }()
50
51 return results
52 }
53
54 // Name returns the name of the source
55 func (s *Source) Name() string {
56 return "virustotal"
57 }
0 package waybackarchive
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "strings"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 pagesResp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=*.%s/*&output=json&fl=original&collapse=urlkey", domain))
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(pagesResp)
23 close(results)
24 return
25 }
26
27 body, err := ioutil.ReadAll(pagesResp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 pagesResp.Body.Close()
31 close(results)
32 return
33 }
34 pagesResp.Body.Close()
35
36 match := session.Extractor.FindAllString(string(body), -1)
37 for _, subdomain := range match {
38 subdomain = strings.TrimPrefix(subdomain, "25")
39 subdomain = strings.TrimPrefix(subdomain, "2F")
40
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
42 }
43 close(results)
44 }()
45
46 return results
47 }
48
49 // Name returns the name of the source
50 func (s *Source) Name() string {
51 return "waybackarchive"
52 }
0 package zoomeye
1
2 import (
3 "bytes"
4 "context"
5 "encoding/json"
6 "errors"
7 "fmt"
8 "io"
9 "io/ioutil"
10 "net/http"
11
12 "github.com/projectdiscovery/subfinder/pkg/subscraping"
13 )
14
15 // zoomAuth holds the ZoomEye credentials
16 type zoomAuth struct {
17 User string `json:"username"`
18 Pass string `json:"password"`
19 }
20
21 type loginResp struct {
22 JWT string `json:"access_token"`
23 }
24
25 // search results
26 type zoomeyeResults struct {
27 Matches []struct {
28 Site string `json:"site"`
29 Domains []string `json:"domains"`
30 } `json:"matches"`
31 }
32
33 // Source is the passive scraping agent
34 type Source struct{}
35
36 // Run function returns all subdomains found with the service
37 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
38 results := make(chan subscraping.Result)
39
40 go func() {
41 if session.Keys.ZoomEyeUsername == "" || session.Keys.ZoomEyePassword == "" {
42 close(results)
43 return
44 }
45 jwt, err := doLogin(session)
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 close(results)
49 return
50 }
51 // check if jwt is null
52 if jwt == "" {
53 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("could not log into zoomeye")}
54 close(results)
55 return
56 }
57 headers := map[string]string{
58 "Authorization": fmt.Sprintf("JWT %s", jwt),
59 "Accept": "application/json",
60 "Content-Type": "application/json",
61 }
62 for currentPage := 0; currentPage <= 100; currentPage++ {
63 api := fmt.Sprintf("https://api.zoomeye.org/web/search?query=hostname:%s&page=%d", domain, currentPage)
64 resp, err := session.Get(ctx, api, "", headers)
65 isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
66 if err != nil {
67 if !isForbidden && currentPage == 0 {
68 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
69 session.DiscardHttpResponse(resp)
70 }
71 close(results)
72 return
73 }
74
75 defer resp.Body.Close()
76 res := &zoomeyeResults{}
77 err = json.NewDecoder(resp.Body).Decode(res)
78 if err != nil {
79 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
80 resp.Body.Close()
81 close(results)
82 return
83 }
84 resp.Body.Close()
85 for _, r := range res.Matches {
86 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: r.Site}
87 for _, domain := range r.Domains {
88 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: domain}
89 }
90 }
91 currentPage++
92 }
93 close(results)
94 }()
95
96 return results
97 }
98
99 // doLogin performs authentication on the ZoomEye API
100 func doLogin(session *subscraping.Session) (string, error) {
101 creds := &zoomAuth{
102 User: session.Keys.ZoomEyeUsername,
103 Pass: session.Keys.ZoomEyePassword,
104 }
105 body, err := json.Marshal(&creds)
106 if err != nil {
107 return "", err
108 }
109 req, err := http.NewRequest("POST", "https://api.zoomeye.org/user/login", bytes.NewBuffer(body))
110 if err != nil {
111 return "", err
112 }
113 req.Header.Add("Content-Type", "application/json")
114 resp, err := session.Client.Do(req)
115 if err != nil {
116 return "", err
117 }
118 // if not 200, bad credentials
119 if resp.StatusCode != 200 {
120 io.Copy(ioutil.Discard, resp.Body)
121 resp.Body.Close()
122 return "", fmt.Errorf("login failed, non-200 response from zoomeye")
123 }
124
125 defer resp.Body.Close()
126 login := &loginResp{}
127 err = json.NewDecoder(resp.Body).Decode(login)
128 if err != nil {
129 return "", err
130 }
131 return login.JWT, nil
132 }
133
134 // Name returns the name of the source
135 func (s *Source) Name() string {
136 return "zoomeye"
137 }
0 package subscraping
1
2 import (
3 "context"
4 "net/http"
5 "regexp"
6 )
7
8 // Source is an interface inherited by each passive source
9 type Source interface {
10 // Run takes a domain as argument and a session object
11 // which contains the extractor for subdomains, http client
12 // and other stuff.
13 Run(context.Context, string, *Session) <-chan Result
14 // Name returns the name of the source
15 Name() string
16 }
17
18 // Session is the option passed to the source, an option is created
19 // uniquely for eac source.
20 type Session struct {
21 // Extractor is the regex for subdomains created for each domain
22 Extractor *regexp.Regexp
23 // Keys is the API keys for the application
24 Keys Keys
25 // Client is the current http client
26 Client *http.Client
27 }
28
29 // Keys contains the current API Keys we have in store
30 type Keys struct {
31 Binaryedge string `json:"binaryedge"`
32 CensysToken string `json:"censysUsername"`
33 CensysSecret string `json:"censysPassword"`
34 Certspotter string `json:"certspotter"`
35 Chaos string `json:"chaos"`
36 DNSDB string `json:"dnsdb"`
37 GitHub []string `json:"github"`
38 IntelXHost string `json:"intelXHost"`
39 IntelXKey string `json:"intelXKey"`
40 PassiveTotalUsername string `json:"passivetotal_username"`
41 PassiveTotalPassword string `json:"passivetotal_password"`
42 Securitytrails string `json:"securitytrails"`
43 Shodan string `json:"shodan"`
44 Spyse string `json:"spyse"`
45 URLScan string `json:"urlscan"`
46 Virustotal string `json:"virustotal"`
47 ZoomEyeUsername string `json:"zoomeye_username"`
48 ZoomEyePassword string `json:"zoomeye_password"`
49 }
50
51 // Result is a result structure returned by a source
52 type Result struct {
53 Type ResultType
54 Source string
55 Value string
56 Error error
57 }
58
59 // ResultType is the type of result returned by the source
60 type ResultType int
61
62 // Types of results returned by the source
63 const (
64 Subdomain ResultType = iota
65 Error
66 )
0 package subscraping
1
2 import (
3 "regexp"
4 "sync"
5 )
6
7 var subdomainExtractorMutex = &sync.Mutex{}
8
9 // NewSubdomainExtractor creates a new regular expression to extract
10 // subdomains from text based on the given domain.
11 func NewSubdomainExtractor(domain string) (*regexp.Regexp, error) {
12 subdomainExtractorMutex.Lock()
13 defer subdomainExtractorMutex.Unlock()
14 extractor, err := regexp.Compile(`[a-zA-Z0-9\*_.-]+\.` + domain)
15 if err != nil {
16 return nil, err
17 }
18 return extractor, nil
19 }
20
21 // Exists check if a key exist in a slice
22 func Exists(values []string, key string) bool {
23 for _, v := range values {
24 if v == key {
25 return true
26 }
27 }
28 return false
29 }
Binary diff not shown
Binary diff not shown