Codebase list subfinder / run/64e2e8ae-0c47-4b3a-aeea-28649eecde7c/upstream
Import upstream version 2.5.4 Kali Janitor 1 year, 6 months ago
146 changed file(s) with 6903 addition(s) and 4586 deletion(s). Raw diff Collapse all Expand all
0 ---
1 name: Bug report
2 about: Create a report to help us improve
3 title: "[Issue] "
4 labels: ''
5 assignees: ''
6
7 ---
8
9 **Describe the bug**
10 A clear and concise description of what the bug is.
11
12 **Subfinder version**
13 Include the version of subfinder you are using, `subfinder -version`
14
15 **Complete command you used to reproduce this**
16
17
18 **Screenshots**
19 Add screenshots of the error for a better context.
0 blank_issues_enabled: false
1
2 contact_links:
3 - name: Ask an question / advise on using subfinder
4 url: https://github.com/projectdiscovery/subfinder/discussions/categories/q-a
5 about: Ask a question or request support for using subfinder
6
7 - name: Share idea / feature to discuss for subfinder
8 url: https://github.com/projectdiscovery/subfinder/discussions/categories/ideas
9 about: Share idea / feature to discuss for subfinder
10
11 - name: Connect with PD Team (Discord)
12 url: https://discord.gg/projectdiscovery
13 about: Connect with PD Team for direct communication
0 ---
1 name: Feature request
2 about: Request feature to implement in this project
3 labels: 'Type: Enhancement'
4 ---
5
6 <!--
7 1. Please make sure to provide a detailed description with all the relevant information that might be required to start working on this feature.
8 2. In case you are not sure about your request or whether the particular feature is already supported or not, please start a discussion instead.
9 3. GitHub Discussion: https://github.com/projectdiscovery/subfinder/discussions/categories/ideas
10 4. Join our discord server at https://discord.gg/projectdiscovery to discuss the idea on the #subfinder channel.
11 -->
12
13 ### Please describe your feature request:
14 <!-- A clear and concise description of feature to implement -->
15
16 ### Describe the use case of this feature:
17 <!-- A clear and concise description of the feature request's motivation and the use-cases in which it could be useful. -->
0 ---
1 name: Issue report
2 about: Create a report to help us to improve the project
3 labels: 'Type: Bug'
4
5 ---
6
7 <!--
8 1. Please search to see if an issue already exists for the bug you encountered.
9 2. For support requests, FAQs or "How to" questions, please use the GitHub Discussions section instead - https://github.com/projectdiscovery/subfinder/discussions or
10 3. Join our discord server at https://discord.gg/projectdiscovery and post the question on the #subfinder channel.
11 -->
12
13 <!-- ISSUES MISSING IMPORTANT INFORMATION MAY BE CLOSED WITHOUT INVESTIGATION. -->
14
15 ### Subfinder version:
16 <!-- You can find current version of subfinder with "subfinder -version" -->
17 <!-- We only accept issues that are reproducible on the latest version of subfinder. -->
18 <!-- You can find the latest version of project at https://github.com/projectdiscovery/subfinder/releases/ -->
19
20 ### Current Behavior:
21 <!-- A concise description of what you're experiencing. -->
22
23 ### Expected Behavior:
24 <!-- A concise description of what you expected to happen. -->
25
26 ### Steps To Reproduce:
27 <!--
28 Example: steps to reproduce the behavior:
29 1. Run 'subfinder ..'
30 2. See error...
31 -->
32
33
34 ### Anything else:
35 <!-- Links? References? Screnshots? Anything that will give us more context about the issue that you are encountering! -->
0 # To get started with Dependabot version updates, you'll need to specify which
1 # package ecosystems to update and where the package manifests are located.
2 # Please see the documentation for all configuration options:
3 # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
4
5 version: 2
6 updates:
7
8 # Maintain dependencies for GitHub Actions
9 - package-ecosystem: "github-actions"
10 directory: "/"
11 schedule:
12 interval: "weekly"
13 target-branch: "dev"
14 commit-message:
15 prefix: "chore"
16 include: "scope"
17 labels:
18 - "Type: Maintenance"
19
20 # Maintain dependencies for go modules
21 - package-ecosystem: "gomod"
22 directory: "v2/"
23 schedule:
24 interval: "daily"
25 target-branch: "dev"
26 commit-message:
27 prefix: "chore"
28 include: "scope"
29 labels:
30 - "Type: Maintenance"
31
32 # Maintain dependencies for docker
33 - package-ecosystem: "docker"
34 directory: "/"
35 schedule:
36 interval: "weekly"
37 target-branch: "dev"
38 commit-message:
39 prefix: "chore"
40 include: "scope"
41 labels:
42 - "Type: Maintenance"
0 name: 🔨 Build Test
1 on:
2 push:
3 pull_request:
4 workflow_dispatch:
5
6 jobs:
7 build:
8 name: Test Builds
9 runs-on: ${{ matrix.os }}
10 strategy:
11 matrix:
12 os: [ubuntu-latest, windows-latest, macOS-latest]
13 steps:
14 - name: Set up Go
15 uses: actions/setup-go@v3
16 with:
17 go-version: 1.18
18
19 - name: Check out code
20 uses: actions/checkout@v3
21
22 - name: Build
23 run: go build ./...
24 working-directory: v2/
25
26 - name: Test
27 run: go test ./...
28 working-directory: v2/
29
30 - name: Integration Tests
31 env:
32 GH_ACTION: true
33 DNSREPO_API_KEY: ${{secrets.DNSREPO_API}}
34 run: bash run.sh
35 working-directory: v2/cmd/integration-test/
36
37 - name: Race Condition Tests
38 run: go build -race ./...
39 working-directory: v2/
+0
-27
.github/workflows/build.yaml less more
0 name: Build
1 on:
2 push:
3 branches:
4 - master
5 pull_request:
6
7 jobs:
8 build:
9 name: Build
10 runs-on: ubuntu-latest
11 steps:
12 - name: Set up Go
13 uses: actions/setup-go@v2
14 with:
15 go-version: 1.13
16
17 - name: Check out code
18 uses: actions/checkout@v2
19
20 - name: Test
21 run: go test .
22 working-directory: cmd/subfinder/
23
24 - name: Build
25 run: go build .
26 working-directory: cmd/subfinder/
0 name: 🚨 CodeQL Analysis
1
2 on:
3 workflow_dispatch:
4 pull_request:
5 branches:
6 - dev
7
8 jobs:
9 analyze:
10 name: Analyze
11 runs-on: ubuntu-latest
12 permissions:
13 actions: read
14 contents: read
15 security-events: write
16
17 strategy:
18 fail-fast: false
19 matrix:
20 language: [ 'go' ]
21 # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
22
23 steps:
24 - name: Checkout repository
25 uses: actions/checkout@v3
26
27 # Initializes the CodeQL tools for scanning.
28 - name: Initialize CodeQL
29 uses: github/codeql-action/init@v2
30 with:
31 languages: ${{ matrix.language }}
32
33 - name: Autobuild
34 uses: github/codeql-action/autobuild@v2
35
36 - name: Perform CodeQL Analysis
37 uses: github/codeql-action/analyze@v2
+0
-17
.github/workflows/dockerhub-push-on-release.yml less more
0 # dockerhub-push pushes docker build to dockerhub automatically
1 # on the creation of a new release
2 name: Publish to Dockerhub on creation of a new release
3 on:
4 release:
5 types: [published]
6 jobs:
7 build:
8 runs-on: ubuntu-latest
9 steps:
10 - uses: actions/checkout@master
11 - name: Publish to Dockerhub Registry
12 uses: elgohr/Publish-Docker-Github-Action@master
13 with:
14 name: projectdiscovery/subfinder
15 username: ${{ secrets.DOCKER_USERNAME }}
16 password: ${{ secrets.DOCKER_PASSWORD }}
0 name: 🌥 Docker Push
1
2 on:
3 workflow_run:
4 workflows: ["🎉 Release Binary"]
5 types:
6 - completed
7 workflow_dispatch:
8
9 jobs:
10 docker:
11 runs-on: ubuntu-latest
12 steps:
13
14 - name: Checkout
15 uses: actions/checkout@v3
16
17 - name: Get Github tag
18 id: meta
19 run: |
20 echo "::set-output name=tag::$(curl --silent "https://api.github.com/repos/projectdiscovery/subfinder/releases/latest" | jq -r .tag_name)"
21
22 - name: Set up QEMU
23 uses: docker/setup-qemu-action@v2
24
25 - name: Set up Docker Buildx
26 uses: docker/setup-buildx-action@v2
27
28 - name: Login to DockerHub
29 uses: docker/login-action@v2
30 with:
31 username: ${{ secrets.DOCKER_USERNAME }}
32 password: ${{ secrets.DOCKER_TOKEN }}
33
34 - name: Build and push
35 uses: docker/build-push-action@v3
36 with:
37 context: .
38 platforms: linux/amd64,linux/arm64,linux/arm
39 push: true
40 tags: projectdiscovery/subfinder:latest,projectdiscovery/subfinder:${{ steps.meta.outputs.tag }}
0 name: 🙏🏻 Lint Test
1 on:
2 push:
3 pull_request:
4 workflow_dispatch:
5
6 jobs:
7 lint:
8 name: Lint Test
9 runs-on: ubuntu-latest
10 steps:
11 - uses: actions/checkout@v3
12 - uses: actions/setup-go@v3
13 with:
14 go-version: 1.18
15 - name: Run golangci-lint
16 uses: golangci/[email protected]
17 with:
18 version: latest
19 args: --timeout 5m
20 working-directory: v2/
0 name: 🎉 Release Binary
1
2 on:
3 create:
4 tags:
5 - v*
6 workflow_dispatch:
7
8 jobs:
9 release:
10 runs-on: ubuntu-latest
11 steps:
12 - name: "Check out code"
13 uses: actions/checkout@v3
14 with:
15 fetch-depth: 0
16
17 - name: "Set up Go"
18 uses: actions/setup-go@v3
19 with:
20 go-version: 1.18
21
22 - name: "Create release on GitHub"
23 uses: goreleaser/goreleaser-action@v3
24 with:
25 args: "release --rm-dist"
26 version: latest
27 workdir: v2/
28 env:
29 GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
30 SLACK_WEBHOOK: "${{ secrets.RELEASE_SLACK_WEBHOOK }}"
+0
-28
.github/workflows/release.yml less more
0 name: Release
1 on:
2 create:
3 tags:
4 - v*
5
6 jobs:
7 release:
8 runs-on: ubuntu-latest
9 steps:
10 -
11 name: "Check out code"
12 uses: actions/checkout@v2
13 with:
14 fetch-depth: 0
15 -
16 name: "Set up Go"
17 uses: actions/setup-go@v2
18 with:
19 go-version: 1.14
20 -
21 env:
22 GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
23 name: "Create release on GitHub"
24 uses: goreleaser/goreleaser-action@v2
25 with:
26 args: "release --rm-dist"
27 version: latest
0 name: 👮🏼‍♂️ Sonarcloud
1 on:
2 push:
3 branches:
4 - master
5 - dev
6 pull_request:
7 types: [opened, synchronize, reopened]
8 workflow_dispatch:
9
10 jobs:
11 sonarcloud:
12 name: SonarCloud
13 runs-on: ubuntu-latest
14 steps:
15 - uses: actions/checkout@v3
16 with:
17 fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
18
19 - name: "Set up Go"
20 uses: actions/setup-go@v3
21 with:
22 go-version: 1.18
23
24 - name: Run unit Tests
25 working-directory: v2/
26 run: |
27 go test -coverprofile=./cov.out ./...
28
29 - name: Run Gosec Security Scanner
30 working-directory: v2/
31 run: |
32 go install github.com/securego/gosec/cmd/gosec@latest
33 gosec -no-fail -fmt=sonarqube -out report.json ./...
34
35 - name: SonarCloud Scan
36 uses: SonarSource/sonarcloud-github-action@master
37 env:
38 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
39 SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
00 .DS_Store
11 cmd/subfinder/subfinder
2 # subfinder binary when built with `go build`
3 v2/cmd/subfinder/subfinder
4 # subfinder binary when built with `make`
5 v2/subfinder
26 vendor/
3 .idea
7 .idea
8 .devcontainer
+0
-21
.goreleaser.yml less more
0 builds:
1 - binary: subfinder
2 main: cmd/subfinder/main.go
3 goos:
4 - linux
5 - windows
6 - darwin
7 goarch:
8 - amd64
9 - 386
10 - arm
11 - arm64
12
13 archives:
14 - id: tgz
15 format: tar.gz
16 replacements:
17 darwin: macOS
18 format_overrides:
19 - goos: windows
20 format: zip
11
22 Subfinder leverages multiple open APIs, it is developed for individuals to help them for research or internal work. If you wish to incorporate this tool into a commercial offering or purposes, you must agree to the Terms of the leveraged services:
33
4 - Project Sonar / Bufferover: https://opendata.rapid7.com/about
4 - Project Sonar / Bufferover: https://opendata.rapid7.com/about / [https://tls.bufferover.run](https://tls.bufferover.run/dns?q)
55 - CommonCrawl: https://commoncrawl.org/terms-of-use/full
66 - certspotter: https://sslmate.com/terms
77 - dnsdumpster: https://hackertarget.com/terms
8 - entrust: https://www.entrustdatacard.com/pages/terms-of-use
98 - Google Transparency: https://policies.google.com/terms
10 - Threatcrowd: https://www.alienvault.com/terms/website-terms-of-use07may2018
9 - Alienvault: https://www.alienvault.com/terms/website-terms-of-use07may2018
1110
1211 ---
1312
0 # Build Container
1 FROM golang:1.13.4-alpine3.10 AS build-env
2 MAINTAINER Ice3man ([email protected])
3 RUN apk add --no-cache --upgrade git openssh-client ca-certificates
4 RUN go get -u github.com/golang/dep/cmd/dep
5 WORKDIR /go/src/app
0 # Build
1 FROM golang:1.19.2-alpine AS build-env
2 RUN apk add build-base
3 RUN go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest
64
7 # Install
8 RUN go get -u github.com/projectdiscovery/subfinder/cmd/subfinder
5 # Release
6 FROM alpine:3.16.2
7 RUN apk -U upgrade --no-cache \
8 && apk add --no-cache bind-tools ca-certificates
9 COPY --from=build-env /go/bin/subfinder /usr/local/bin/subfinder
910
1011 ENTRYPOINT ["subfinder"]
+0
-22
ISSUE_TEMPLATE.md less more
0 ## What's the problem (or question)?
1 <!--- If describing a bug, tell us what happens instead of the expected behavior -->
2 <!--- If suggesting a change/improvement, explain the difference from current behavior -->
3
4 ## Do you have an idea for a solution?
5 <!--- Not obligatory, but suggest a fix/reason for the bug, -->
6 <!--- or ideas how to implement the addition or change -->
7
8 ## How can we reproduce the issue?
9 <!--- Provide unambiguous set of steps to reproduce this bug. Include command to reproduce, if relevant (you can mask the sensitive data) -->
10 1.
11 2.
12 3.
13 4.
14
15 ## What are the running context details?
16 <!--- Include as many relevant details about the running context you experienced the bug/problem in -->
17 * Installation method (e.g. `pip`, `apt-get`, `git clone` or `zip`/`tar.gz`):
18 * Client OS (e.g. `Microsoft Windows 10`)
19 * Program version (see banner):
20 * Relevant console output (if any):
21 * Exception traceback (if any):
00 MIT License
11
2 Copyright (c) Exposed Atoms Pvt Ltd
2 Copyright (c) 2021 ProjectDiscovery, Inc.
33
44 Permission is hereby granted, free of charge, to any person obtaining a copy
55 of this software and associated documentation files (the "Software"), to deal
0 <h1 align="left">
1 <img src="static/subfinder-logo.png" alt="subfinder" width="170px"></a>
0 <h1 align="center">
1 <img src="static/subfinder-logo.png" alt="subfinder" width="200px"></a>
22 <br>
33 </h1>
44
5
6 [![License](https://img.shields.io/badge/license-MIT-_red.svg)](https://opensource.org/licenses/MIT)
7 [![Go Report Card](https://goreportcard.com/badge/github.com/projectdiscovery/subfinder)](https://goreportcard.com/report/github.com/projectdiscovery/subfinder)
8 [![contributions welcome](https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat)](https://github.com/projectdiscovery/subfinder/issues)
9
10
11 subfinder is a subdomain discovery tool that discovers valid subdomains for websites by using passive online sources. It has a simple modular architecture and is optimized for speed. subfinder is built for doing one thing only - passive subdomain enumeration, and it does that very well.
12
13 We have designed subfinder to comply with all passive sources licenses, and usage restrictions, as well as maintained a consistently passive model to make it useful to both penetration testers and bug bounty hunters alike.
14
15
16 # Resources
17 - [Features](#features)
18 - [Usage](#usage)
19 - [Installation Instuctions (direct)](#direct-installation)
20 - [Installation Instructions](#installation-instructions)
21 - [From Binary](#from-binary)
22 - [From Source](#from-source)
23 - [From Github](#from-github)
24 - [Upgrading](#upgrading)
25 - [Post Installation Instructions](#post-installation-instructions)
26 - [Running subfinder](#running-subfinder)
27 - [Running in a Docker Container](#running-in-a-docker-container)
28
29
30 # Features
5 <h4 align="center">Fast passive subdomain enumeration tool.</h4>
6
7
8 <p align="center">
9 <a href="https://goreportcard.com/report/github.com/projectdiscovery/subfinder/v2"><img src="https://goreportcard.com/badge/github.com/projectdiscovery/subfinder"></a>
10 <a href="https://github.com/projectdiscovery/subfinder/issues"><img src="https://img.shields.io/badge/contributions-welcome-brightgreen.svg?style=flat"></a>
11 <a href="https://github.com/projectdiscovery/subfinder/releases"><img src="https://img.shields.io/github/release/projectdiscovery/subfinder"></a>
12 <a href="https://twitter.com/pdiscoveryio"><img src="https://img.shields.io/twitter/follow/pdiscoveryio.svg?logo=twitter"></a>
13 <a href="https://discord.gg/projectdiscovery"><img src="https://img.shields.io/discord/695645237418131507.svg?logo=discord"></a>
14 </p>
15
16 <p align="center">
17 <a href="#features">Features</a> •
18 <a href="#installation">Install</a> •
19 <a href="#running-subfinder">Usage</a> •
20 <a href="#post-installation-instructions">API Setup</a> •
21 <a href="#subfinder-go-library">Library</a> •
22 <a href="https://discord.gg/projectdiscovery">Join Discord</a>
23 </p>
24
25 ---
26
27
28 Subfinder is a subdomain discovery tool that discovers valid subdomains for websites by using passive online sources. It has a simple modular architecture and is optimized for speed. subfinder is built for doing one thing only - passive subdomain enumeration, and it does that very well.
29
30 We have designed `subfinder` to comply with all passive sources licenses, and usage restrictions, as well as maintained a consistently passive model to make it useful to both penetration testers and bug bounty hunters alike.
31
32
33 # Features
3134
3235 <h1 align="left">
3336 <img src="static/subfinder-run.png" alt="subfinder" width="700px"></a>
3538 </h1>
3639
3740
38 - Simple and modular code base making it easy to contribute.
39 - Fast And Powerful Resolution and wildcard elimination module
40 - **Curated** passive sources to maximize results (26 Sources as of now)
41 - Fast and powerful resolution and wildcard elimination module
42 - **Curated** passive sources to maximize results
4143 - Multiple Output formats supported (Json, File, Stdout)
4244 - Optimized for speed, very fast and **lightweight** on resources
43 - **Stdin** and **stdout** support for integrating in workflows
45 - **STDIN/OUT** support for integrating in workflows
4446
4547
4648 # Usage
4749
48 ```bash
50 ```sh
4951 subfinder -h
5052 ```
5153 This will display help for the tool. Here are all the switches it supports.
5254
53 | Flag | Description | Example |
54 |------|-------------|---------|
55 | -cd | Upload results to the Chaos API (api-key required) | subfinder -d uber.com -cd |
56 | -config string | Configuration file for API Keys, etc | subfinder -config config.yaml |
57 | -d | Domain to find subdomains for | subfinder -d uber.com |
58 | -dL | File containing list of domains to enumerate | subfinder -dL hackerone-hosts.txt |
59 | -exclude-sources | List of sources to exclude from enumeration | subfinder -exclude-sources archiveis |
60 | -max-time | Minutes to wait for enumeration results (default 10) | subfinder -max-time 1 |
61 | -nC | Don't Use colors in output | subfinder -nC |
62 | -nW | Remove Wildcard & Dead Subdomains from output | subfinder -nW |
63 | -ls | List all available sources | subfinder -ls |
64 | -o | File to write output to (optional) | subfinder -o output.txt |
65 | -oD | Directory to write enumeration results to (optional) | subfinder -oD ~/outputs |
66 | -oI | Write output in Host,IP format | subfinder -oI |
67 | -oJ | Write output in JSON lines Format | subfinder -oJ |
68 | -r | Comma-separated list of resolvers to use | subfinder -r 1.1.1.1,1.0.0.1 |
69 | -rL | Text file containing list of resolvers to use | subfinder -rL resolvers.txt
70 | -silent | Show only subdomains in output | subfinder -silent |
71 | -sources | Comma separated list of sources to use | subfinder -sources shodan,censys |
72 | -t | Number of concurrent goroutines for resolving (default 10) | subfinder -t 100 |
73 | -timeout | Seconds to wait before timing out (default 30) | subfinder -timeout 30 |
74 | -v | Show Verbose output | subfinder -v |
75 | -version | Show current program version | subfinder -version |
76
77
78 # Installation Instructions
79
80 ### From Binary
81
82 The installation is easy. You can download the pre-built binaries for different platforms from the [releases](https://github.com/projectdiscovery/subfinder/releases/) page. Extract them using tar, move it to your `$PATH` and you're ready to go.
83
84 ```bash
85 > tar -xzvf subfinder-linux-amd64.tar.gz
86 > mv subfinder /usr/local/local/bin/
87 > subfinder -h
88 ```
89
90 ### From Source
91
92 subfinder requires go1.13+ to install successfully. Run the following command to get the repo -
93
94 ```bash
95 GO111MODULE=on go get -v github.com/projectdiscovery/subfinder/cmd/subfinder
96 ```
97
98 ### From Github
99
100 ```bash
101 git clone https://github.com/projectdiscovery/subfinder.git
102 cd subfinder/cmd/subfinder
103 go build .
104 mv subfinder /usr/local/bin/
105 subfinder -h
106 ```
107
108 ### Upgrading
109 If you wish to upgrade the package you can use:
110
111 ```bash
112 GO111MODULE=on go get -u -v github.com/projectdiscovery/subfinder/cmd/subfinder
113 ```
55 ```yaml
56 Flags:
57 INPUT:
58 -d, -domain string[] domains to find subdomains for
59 -dL, -list string file containing list of domains for subdomain discovery
60
61 SOURCE:
62 -s, -sources string[] specific sources to use for discovery (-s crtsh,github). Use -ls to display all available sources.
63 -recursive use only sources that can handle subdomains recursively (e.g. subdomain.domain.tld vs domain.tld)
64 -all use all sources for enumeration (slow)
65 -es, -exclude-sources string[] sources to exclude from enumeration (-es alienvault,zoomeye)
66
67 FILTER:
68 -m, -match string[] subdomain or list of subdomain to match (file or comma separated)
69 -f, -filter string[] subdomain or list of subdomain to filter (file or comma separated)
70
71 RATE-LIMIT:
72 -rl, -rate-limit int maximum number of http requests to send per second
73 -t int number of concurrent goroutines for resolving (-active only) (default 10)
74
75 OUTPUT:
76 -o, -output string file to write output to
77 -oJ, -json write output in JSONL(ines) format
78 -oD, -output-dir string directory to write output (-dL only)
79 -cs, -collect-sources include all sources in the output (-json only)
80 -oI, -ip include host IP in output (-active only)
81
82 CONFIGURATION:
83 -config string flag config file (default "$HOME/.config/subfinder/config.yaml")
84 -pc, -provider-config string provider config file (default "$HOME/.config/subfinder/provider-config.yaml")
85 -r string[] comma separated list of resolvers to use
86 -rL, -rlist string file containing list of resolvers to use
87 -nW, -active display active subdomains only
88 -proxy string http proxy to use with subfinder
89 -ei, -exclude-ip exclude IPs from the list of domains
90
91 DEBUG:
92 -silent show only subdomains in output
93 -version show version of subfinder
94 -v show verbose output
95 -nc, -no-color disable color in output
96 -ls, -list-sources list all available sources
97
98 OPTIMIZATION:
99 -timeout int seconds to wait before timing out (default 30)
100 -max-time int minutes to wait for enumeration results (default 10)
101 ```
102
103 # Installation
104
105 Subfinder requires **go1.17** to install successfully. Run the following command to install the latest version:
106
107 ```sh
108 go install -v github.com/projectdiscovery/subfinder/v2/cmd/subfinder@latest
109 ```
110
114111
115112 ## Post Installation Instructions
116113
117114 Subfinder will work after using the installation instructions however to configure Subfinder to work with certain services, you will need to have setup API keys. The following services do not work without an API key:
118115
119 - [Virustotal](https://www.virustotal.com)
120 - [Passivetotal](http://passivetotal.org)
121 - [SecurityTrails](http://securitytrails.com)
122 - [Censys](https://censys.io)
123 - [Binaryedge](https://binaryedge.io)
124 - [Shodan](https://shodan.io)
125 - [URLScan](https://urlscan.io)
126 - [Chaos](https://chaos.projectdiscovery.io)
127 - [Spyse](https://spyse.com)
128 - [DnsDB](https://api.dnsdb.info)
129 - [Zoomeye](https://www.zoomeye.org)
130 - [Github](https://github.com)
131 - [Intelx](https://intelx.io)
132
133 Theses values are stored in the `$HOME/.config/subfinder/config.yaml` file which will be created when you run the tool for the first time. The configuration file uses the YAML format. Multiple API keys can be specified for each of these services from which one of them will be used for enumeration.
116 [BeVigil](https://bevigil.com/osint-api), [Binaryedge](https://binaryedge.io), [C99](https://api.c99.nl/), [Certspotter](https://sslmate.com/certspotter/api/), [Chinaz](http://my.chinaz.com/ChinazAPI/DataCenter/MyDataApi), [Censys](https://censys.io), [Chaos](https://chaos.projectdiscovery.io), [DnsDB](https://api.dnsdb.info), [Fofa](https://fofa.info/static_pages/api_help), [Github](https://github.com), [Intelx](https://intelx.io), [Passivetotal](http://passivetotal.org), [Robtex](https://www.robtex.com/api/), [SecurityTrails](http://securitytrails.com), [Shodan](https://shodan.io), [Threatbook](https://x.threatbook.cn/en), [Virustotal](https://www.virustotal.com), [WhoisXML API](https://whoisxmlapi.com/), [Zoomeye](https://www.zoomeye.org)
117
118 These values are stored in the `$HOME/.config/subfinder/provider-config.yaml` file which will be created when you run the tool for the first time. The configuration file uses the YAML format. Multiple API keys can be specified for each of these services from which one of them will be used for enumeration.
134119
135120 For sources that require multiple keys, namely `Censys`, `Passivetotal`, they can be added by separating them via a colon (:).
136121
137 An example config file -
122 An example provider config file -
138123
139124 ```yaml
140 resolvers:
141 - 1.1.1.1
142 - 1.0.0.1
143 sources:
144 - binaryedge
145 - bufferover
146 - censys
147 - passivetotal
148 - sitedossier
149125 binaryedge:
150126 - 0bf8919b-aab9-42e4-9574-d3b639324597
151127 - ac244e2f-b635-4581-878a-33f4e79a2c13
152128 censys:
153129 - ac244e2f-b635-4581-878a-33f4e79a2c13:dd510d6e-1b6e-4655-83f6-f347b363def9
154130 certspotter: []
155 passivetotal:
131 passivetotal:
156132 - [email protected]:sample_password
157133 securitytrails: []
158134 shodan:
159135 - AAAAClP1bJJSRMEYJazgwhJKrggRwKA
160136 github:
161 - d23a554bbc1aabb208c9acfbd2dd41ce7fc9db39
162 - asdsd54bbc1aabb208c9acfbd2dd41ce7fc9db39
137 - ghp_lkyJGU3jv1xmwk4SDXavrLDJ4dl2pSJMzj4X
138 - ghp_gkUuhkIYdQPj13ifH4KA3cXRn8JD2lqir2d4
163139 ```
164140
165141 # Running Subfinder
166142
167143 To run the tool on a target, just use the following command.
168 ```bash
169 > subfinder -d freelancer.com
170 ```
171
172 This will run the tool against freelancer.com. There are a number of configuration options that you can pass along with this command. The verbose switch (-v) can be used to display verbose information.
173
174 ```bash
175 [CERTSPOTTER] www.fi.freelancer.com
176 [DNSDUMPSTER] hosting.freelancer.com
177 [DNSDUMPSTER] support.freelancer.com
178 [DNSDUMPSTER] accounts.freelancer.com
179 [DNSDUMPSTER] phabricator.freelancer.com
180 [DNSDUMPSTER] cdn1.freelancer.com
181 [DNSDUMPSTER] t1.freelancer.com
182 [DNSDUMPSTER] wdc.t1.freelancer.com
183 [DNSDUMPSTER] dal.t1.freelancer.com
184 ```
185
186 The `-silent` switch can be used to show only subdomains found without any other info.
187
188
189 The `-o` command can be used to specify an output file.
190
191 ```bash
192 > subfinder -d freelancer.com -o output.txt
193 ```
194
195 To run the tool on a list of domains, `-dL` option can be used. This requires a directory to write the output files. Subdomains for each domain from the list are written in a text file in the directory specified by the `-oD` flag with their name being the domain name.
196
197 ```bash
198 > cat domains.txt
144
145 ```console
146 subfinder -d hackerone.com
147
148 __ _____ __
149 _______ __/ /_ / __(_)___ ____/ /__ _____
150 / ___/ / / / __ \/ /_/ / __ \/ __ / _ \/ ___/
151 (__ ) /_/ / /_/ / __/ / / / / /_/ / __/ /
152 /____/\__,_/_.___/_/ /_/_/ /_/\__,_/\___/_/ v2.4.9
153
154 projectdiscovery.io
155
156 Use with caution. You are responsible for your actions
157 Developers assume no liability and are not responsible for any misuse or damage.
158 By using subfinder, you also agree to the terms of the APIs used.
159
160 [INF] Enumerating subdomains for hackerone.com
161
162 www.hackerone.com
163 support.hackerone.com
164 links.hackerone.com
165 api.hackerone.com
166 o1.email.hackerone.com
167 go.hackerone.com
168 3d.hackerone.com
169 resources.hackerone.com
170 a.ns.hackerone.com
171 b.ns.hackerone.com
172 mta-sts.hackerone.com
173 docs.hackerone.com
174 mta-sts.forwarding.hackerone.com
175 gslink.hackerone.com
199176 hackerone.com
200 google.com
201
202 > subfinder -dL domains.txt -oD ~/path/to/output
203 > ls ~/path/to/output
204
205 hackerone.com.txt
206 google.com.txt
207 ```
208
209 If you want to save results to a single file while using a domain list, specify the `-o` flag with the name of the output file.
210
211
212 ```bash
213 > cat domains.txt
214 hackerone.com
215 google.com
216
217 > subfinder -dL domains.txt -o ~/path/to/output.txt
218 > ls ~/path/to/
219
220 output.txt
221 ```
222
223 If you want upload your data to chaos dataset, you can use `-cd` flag with your scan, chaos will resolve all the input and add valid subdomains to public dataset, which you can access on the go using [chaos-client](https://github.com/projectdiscovery/chaos-client)
224
225 ```bash
226 > subfinder -d hackerone.com -cd
227
228 root@b0x:~# subfinder -d hackerone.com -cd
229
230 www.hackerone.com
231 api.hackerone.com
232 go.hackerone.com
233 hackerone.com
234 staging.hackerone.com
235 [INF] Input processed successfully and subdomains with valid records will be updated to chaos dataset.
236 ```
237
238 You can also get output in json format using `-oJ` switch. This switch saves the output in the JSON lines format.
239
240 If you use the JSON format, or the `Host:IP` format, then it becomes mandatory for you to use the **-nW** format as resolving is essential for these output format. By default, resolving the found subdomains is disabled.
241
242 ```bash
243 > subfinder -d hackerone.com -o output.json -oJ -nW
244 > cat output.json
245
246 {"host":"www.hackerone.com","ip":"104.16.99.52"}
247 {"host":"mta-sts.hackerone.com","ip":"185.199.108.153"}
248 {"host":"hackerone.com","ip":"104.16.100.52"}
249 {"host":"mta-sts.managed.hackerone.com","ip":"185.199.110.153"}
250 ```
251
252 You can specify custom resolvers too.
253 ```bash
254 > subfinder -d freelancer.com -o result.txt -nW -v -r 8.8.8.8,1.1.1.1
255 > subfinder -d freelancer.com -o result.txt -nW -v -rL resolvers.txt
256 ```
257
258 **The new highlight of this release is the addition of stdin/stdout features.** Now, domains can be piped to subfinder and enumeration can be ran on them. For example -
259
260 ```bash
261 > echo hackerone.com | subfinder -v
262 > cat targets.txt | subfinder -v
177 info.hackerone.com
178 mta-sts.managed.hackerone.com
179 events.hackerone.com
180
181 [INF] Found 18 subdomains for hackerone.com in 3 seconds 672 milliseconds
263182 ```
264183
265184 The subdomains discovered can be piped to other tools too. For example, you can pipe the subdomains discovered by subfinder to httpx [httpx](https://github.com/projectdiscovery/httpx) which will then find running http servers on the host.
266185
267 ```bash
268 > echo hackerone.com | subfinder -silent | httpx -silent
186 ```console
187 echo hackerone.com | subfinder -silent | httpx -silent
269188
270189 http://hackerone.com
271190 http://www.hackerone.com
275194 http://mta-sts.managed.hackerone.com
276195 ```
277196
278 ## Running in a Docker Container
279
280 You can use the official dockerhub image at [subfinder](https://hub.docker.com/r/projectdiscovery/subfinder). Simply run -
281
282 ```bash
283 > docker pull projectdiscovery/subfinder
284 ```
285
286 The above command will pull the latest tagged release from the dockerhub repository.
287
288 If you want to build the container yourself manually, git clone the repo, then build and run the following commands
289
290 - Clone the repo using `git clone https://github.com/projectdiscovery/subfinder.git`
291 - Build your docker container
292 ```bash
293 docker build -t projectdiscovery/subfinder .
294 ```
295
296 - After building the container using either way, run the following -
297 ```bash
298 docker run -it projectdiscovery/subfinder
299 ```
300 > The above command is the same as running `-h`
301
302 If you are using docker, you need to first create your directory structure holding subfinder configuration file. After modifying the default config.yaml file, you can run:
303
304 ```bash
305 > mkdir -p $HOME/.config/subfinder
306 > cp config.yaml $HOME/.config/subfinder/config.yaml
307 > nano $HOME/.config/subfinder/config.yaml
308 ```
309
310 After that, you can pass it as a volume using the following sample command.
311 ```bash
312 > docker run -v $HOME/.config/subfinder:/root/.config/subfinder -it projectdiscovery/subfinder -d freelancer.com
313 ```
314
315 For example, this runs the tool against uber.com and output the results to your host file system:
316 ```bash
317 docker run -v $HOME/.config/subfinder:/root/.config/subfinder -it projectdiscovery/subfinder -d uber.com > uber.com.txt
318 ```
197 <table>
198 <tr>
199 <td>
200
201 ## Subfinder with docker
202
203 Pull the latest tagged [subfinder](https://hub.docker.com/r/projectdiscovery/subfinder) docker image:
204
205 ```sh
206 docker pull projectdiscovery/subfinder:latest
207 ```
208
209 Running subfinder using docker image:
210
211 ```sh
212 docker run projectdiscovery/subfinder:latest -d hackerone.com
213 ```
214
215 Running subfinder using docker image with local config file:
216
217 ```sh
218 docker run -v $HOME/.config/subfinder:/root/.config/subfinder -t projectdiscovery/subfinder -d hackerone.com
219 ```
220
221 </td>
222 </tr>
223 </table>
224
225 <table>
226 <tr>
227 <td>
228
229 ## Subfinder Go library
230
231 Usage example:
232
233 ```go
234 package main
235
236 import (
237 "bytes"
238 "context"
239 "fmt"
240 "io"
241 "log"
242
243 "github.com/projectdiscovery/subfinder/v2/pkg/passive"
244 "github.com/projectdiscovery/subfinder/v2/pkg/resolve"
245 "github.com/projectdiscovery/subfinder/v2/pkg/runner"
246 )
247
248 func main() {
249 runnerInstance, err := runner.NewRunner(&runner.Options{
250 Threads: 10, // Thread controls the number of threads to use for active enumerations
251 Timeout: 30, // Timeout is the seconds to wait for sources to respond
252 MaxEnumerationTime: 10, // MaxEnumerationTime is the maximum amount of time in mins to wait for enumeration
253 Resolvers: resolve.DefaultResolvers, // Use the default list of resolvers by marshaling it to the config
254 Sources: passive.DefaultSources, // Use the default list of passive sources
255 AllSources: passive.DefaultAllSources, // Use the default list of all passive sources
256 Recursive: passive.DefaultRecursiveSources, // Use the default list of recursive sources
257 Providers: &runner.Providers{}, // Use empty api keys for all providers
258 })
259
260 buf := bytes.Buffer{}
261 err = runnerInstance.EnumerateSingleDomain(context.Background(), "projectdiscovery.io", []io.Writer{&buf})
262 if err != nil {
263 log.Fatal(err)
264 }
265
266 data, err := io.ReadAll(&buf)
267 if err != nil {
268 log.Fatal(err)
269 }
270
271 fmt.Printf("%s", data)
272 }
273 ```
274
275 </td>
276 </tr>
277 </table>
278
279 ### Resources
280
281 - [Recon with Me !!!](https://dhiyaneshgeek.github.io/bug/bounty/2020/02/06/recon-with-me/)
319282
320283 # License
321284
322 subfinder is made with 🖤 by the [projectdiscovery](https://projectdiscovery.io) team. Community contributions have made the project what it is. See the **[Thanks.md](https://github.com/projectdiscovery/subfinder/blob/master/THANKS.md)** file for more details.
285 `subfinder` is made with 🖤 by the [projectdiscovery](https://projectdiscovery.io) team. Community contributions have made the project what it is. See the **[Thanks.md](https://github.com/projectdiscovery/subfinder/blob/master/THANKS.md)** file for more details.
323286
324287 Read the disclaimer for usage at [DISCLAIMER.md](https://github.com/projectdiscovery/subfinder/blob/master/DISCLAIMER.md) and [contact us](mailto:[email protected]) for any API removal.
33
44 - All the contributors at [CONTRIBUTORS](https://github.com/projectdiscovery/subfinder/graphs/contributors) who made subfinder what it is.
55
6 We'd like to thank some additional amazing people, wo contributed a lot in subfinder's journey -
6 We'd like to thank some additional amazing people, who contributed a lot in subfinder's journey -
77
8 - @infosec-au - Donating to the project
9 - @codingo - Initial work on the project, managing it, lot of work!
10 - @picatz - Improving the structure of the project a lot. New ideas!
8 - [@vzamanillo](https://github.com/vzamanillo) - For adding multiple features and overall project improvements.
9 - [@infosec-au](https://github.com/infosec-au) - Donating to the project.
10 - [@codingo](https://github.com/codingo) - Initial work on the project, managing it, lot of work!
11 - [@picatz](https://github.com/picatz) - Improving the structure of the project a lot. New ideas!
+0
-21
cmd/subfinder/main.go less more
0 package main
1
2 import (
3 "github.com/projectdiscovery/gologger"
4 "github.com/projectdiscovery/subfinder/pkg/runner"
5 )
6
7 func main() {
8 // Parse the command line flags and read config files
9 options := runner.ParseOptions()
10
11 runner, err := runner.NewRunner(options)
12 if err != nil {
13 gologger.Fatalf("Could not create runner: %s\n", err)
14 }
15
16 err = runner.RunEnumeration()
17 if err != nil {
18 gologger.Fatalf("Could not run enumeration: %s\n", err)
19 }
20 }
+0
-68
config.yaml less more
0 resolvers:
1 - 1.1.1.1
2 - 1.0.0.1
3 - 8.8.8.8
4 - 8.8.4.4
5 - 9.9.9.9
6 - 9.9.9.10
7 - 77.88.8.8
8 - 77.88.8.1
9 - 208.67.222.222
10 - 208.67.220.220
11 sources:
12 - alienvault
13 - archiveis
14 - binaryedge
15 - bufferover
16 - censys
17 - certspotter
18 - certspotterold
19 - commoncrawl
20 - crtsh
21 - dnsdumpster
22 - dnsdb
23 - entrust
24 - github
25 - googleter
26 - hackertarget
27 - intelx
28 - ipv4info
29 - passivetotal
30 - rapiddns
31 - securitytrails
32 - shodan
33 - sitedossier
34 - sublist3r
35 - spyse
36 - threatcrowd
37 - threatminer
38 - urlscan
39 - virustotal
40 - waybackarchive
41 - zoomeye
42 censys:
43 - <key-here>
44 binaryedge:
45 - <key-here>
46 certspotter:
47 - <key-here>
48 github:
49 - <token-here>
50 intelx:
51 - <public.intelx.io:key-here>
52 passivetotal:
53 - <email:key-here>
54 securitytrails:
55 - <key-here>
56 virustotal:
57 - <key-here>
58 urlscan:
59 - <key-here>
60 chaos:
61 - <key-here>
62 spyse:
63 - <key-here>
64 shodan:
65 - <key-here>
66 dnsdb:
67 - <key-here>
+0
-15
go.mod less more
0 module github.com/projectdiscovery/subfinder
1
2 go 1.14
3
4 require (
5 github.com/json-iterator/go v1.1.9
6 github.com/lib/pq v1.6.0
7 github.com/m-mizutani/urlscan-go v1.0.0
8 github.com/miekg/dns v1.1.29
9 github.com/pkg/errors v0.9.1
10 github.com/projectdiscovery/gologger v1.0.0
11 github.com/rs/xid v1.2.1
12 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
13 gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c
14 )
+0
-87
go.sum less more
0 github.com/alexbrainman/sspi v0.0.0-20180613141037-e580b900e9f5/go.mod h1:976q2ETgjT2snVCf2ZaBnyBbVoPERGjUz+0sofzEfro=
1 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
2 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
3 github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
4 github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
5 github.com/gorilla/sessions v1.2.0/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
6 github.com/hashicorp/go-uuid v1.0.2 h1:cfejS+Tpcp13yd5nYHWDI6qVCny6wyX2Mt5SGur2IGE=
7 github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
8 github.com/jcmturner/aescts/v2 v2.0.0 h1:9YKLH6ey7H4eDBXW8khjYslgyqG2xZikXP0EQFKrle8=
9 github.com/jcmturner/aescts/v2 v2.0.0/go.mod h1:AiaICIRyfYg35RUkr8yESTqvSy7csK90qZ5xfvvsoNs=
10 github.com/jcmturner/dnsutils/v2 v2.0.0 h1:lltnkeZGL0wILNvrNiVCR6Ro5PGU/SeBvVO/8c/iPbo=
11 github.com/jcmturner/dnsutils/v2 v2.0.0/go.mod h1:b0TnjGOvI/n42bZa+hmXL+kFJZsFT7G4t3HTlQ184QM=
12 github.com/jcmturner/gofork v1.0.0 h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=
13 github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/UM3ncEo0o=
14 github.com/jcmturner/goidentity/v6 v6.0.1 h1:VKnZd2oEIMorCTsFBnJWbExfNN7yZr3EhJAxwOkZg6o=
15 github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg=
16 github.com/jcmturner/gokrb5/v8 v8.2.0 h1:lzPl/30ZLkTveYsYZPKMcgXc8MbnE6RsTd4F9KgiLtk=
17 github.com/jcmturner/gokrb5/v8 v8.2.0/go.mod h1:T1hnNppQsBtxW0tCHMHTkAt8n/sABdzZgZdoFrZaZNM=
18 github.com/jcmturner/rpc/v2 v2.0.2 h1:gMB4IwRXYsWw4Bc6o/az2HJgFUA1ffSh90i26ZJ6Xl0=
19 github.com/jcmturner/rpc/v2 v2.0.2/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
20 github.com/json-iterator/go v1.1.9 h1:9yzud/Ht36ygwatGx56VwCZtlI/2AD15T1X2sjSuGns=
21 github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
22 github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
23 github.com/k0kubun/pp v2.3.0+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg=
24 github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
25 github.com/lib/pq v1.6.0 h1:I5DPxhYJChW9KYc66se+oKFFQX6VuQrKiprsX6ivRZc=
26 github.com/lib/pq v1.6.0/go.mod h1:4vXEAYvW1fRQ2/FhZ78H73A60MHw1geSm145z2mdY1g=
27 github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381 h1:bqDmpDG49ZRnB5PcgP0RXtQvnMSgIF14M7CBd2shtXs=
28 github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
29 github.com/m-mizutani/urlscan-go v1.0.0 h1:+fTiSRCQXdy3EM1BgO5gmAHFWbccTDdoEKy9Fa7m9xo=
30 github.com/m-mizutani/urlscan-go v1.0.0/go.mod h1:ppEBT0e/xv0bPcVWKev4cYG7Ey8933JsOzEzovxGMjI=
31 github.com/mattn/go-colorable v0.1.0/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
32 github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
33 github.com/miekg/dns v1.1.29 h1:xHBEhR+t5RzcFJjBLJlax2daXOrTYtr9z4WdKEfWFzg=
34 github.com/miekg/dns v1.1.29/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
35 github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
36 github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
37 github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
38 github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
39 github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
40 github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
41 github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
42 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
43 github.com/projectdiscovery/gologger v1.0.0 h1:XAQ8kHeVKXMjY4rLGh7eT5+oHU077BNEvs7X6n+vu1s=
44 github.com/projectdiscovery/gologger v1.0.0/go.mod h1:Ok+axMqK53bWNwDSU1nTNwITLYMXMdZtRc8/y1c7sWE=
45 github.com/rs/xid v1.2.1 h1:mhH9Nq+C1fY2l1XIpgxIiUOfNpRBYH1kKcr+qfKgjRc=
46 github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
47 github.com/sirupsen/logrus v1.3.0 h1:hI/7Q+DtNZ2kINb6qt/lS+IyXnHQe9e90POfeewL/ME=
48 github.com/sirupsen/logrus v1.3.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
49 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
50 github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
51 github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
52 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
53 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
54 github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
55 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
56 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
57 golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
58 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
59 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
60 golang.org/x/crypto v0.0.0-20200117160349-530e935923ad/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
61 golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4 h1:QmwruyY+bKbDDL0BaglrbZABEali68eoMFhTZpCjYVA=
62 golang.org/x/crypto v0.0.0-20200311171314-f7b00557c8c4/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
63 golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
64 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
65 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
66 golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
67 golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa h1:F+8P+gmewFQYRk6JoLQLwjBCTu3mcIURZfNkVweuRKA=
68 golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
69 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
70 golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
71 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
72 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
73 golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe h1:6fAMxZRR6sl1Uq8U61gxU+kPTs2tR8uOySCbBP7BN/M=
74 golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
75 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
76 golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
77 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
78 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
79 gopkg.in/jcmturner/aescts.v1 v1.0.1/go.mod h1:nsR8qBOg+OucoIW+WMhB3GspUQXq9XorLnQb9XtvcOo=
80 gopkg.in/jcmturner/dnsutils.v1 v1.0.1/go.mod h1:m3v+5svpVOhtFAP/wSz+yzh4Mc0Fg7eRhxkJMWSIz9Q=
81 gopkg.in/jcmturner/goidentity.v3 v3.0.0/go.mod h1:oG2kH0IvSYNIu80dVAyu/yoefjq1mNfM5bm88whjWx4=
82 gopkg.in/jcmturner/gokrb5.v7 v7.5.0/go.mod h1:l8VISx+WGYp+Fp7KRbsiUuXTTOnxIc3Tuvyavf11/WM=
83 gopkg.in/jcmturner/rpc.v1 v1.1.0/go.mod h1:YIdkC4XfD6GXbzje11McwsDuOlZQSb9W4vfLvuNnlv8=
84 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
85 gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c h1:grhR+C34yXImVGp7EzNk+DTIk+323eIUWOmEevy6bDo=
86 gopkg.in/yaml.v3 v3.0.0-20200605160147-a5ece683394c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+0
-4
pkg/passive/doc.go less more
0 // Package passive provides capability for doing passive subdomain
1 // enumeration on targets.
2 package passive
3
+0
-58
pkg/passive/passive.go less more
0 package passive
1
2 import (
3 "context"
4 "fmt"
5 "sync"
6 "time"
7
8 "github.com/projectdiscovery/gologger"
9 "github.com/projectdiscovery/subfinder/pkg/subscraping"
10 )
11
12 // EnumerateSubdomains enumerates all the subdomains for a given domain
13 func (a *Agent) EnumerateSubdomains(domain string, keys subscraping.Keys, timeout int, maxEnumTime time.Duration) chan subscraping.Result {
14 results := make(chan subscraping.Result)
15
16 go func() {
17 session, err := subscraping.NewSession(domain, keys, timeout)
18 if err != nil {
19 results <- subscraping.Result{Type: subscraping.Error, Error: fmt.Errorf("could not init passive session for %s: %s", domain, err)}
20 }
21
22 ctx, cancel := context.WithTimeout(context.Background(), maxEnumTime)
23
24 timeTaken := make(map[string]string)
25 timeTakenMutex := &sync.Mutex{}
26
27 wg := &sync.WaitGroup{}
28 // Run each source in parallel on the target domain
29 for source, runner := range a.sources {
30 wg.Add(1)
31
32 now := time.Now()
33 go func(source string, runner subscraping.Source) {
34 for resp := range runner.Run(ctx, domain, session) {
35 results <- resp
36 }
37
38 duration := time.Now().Sub(now)
39 timeTakenMutex.Lock()
40 timeTaken[source] = fmt.Sprintf("Source took %s for enumeration\n", duration)
41 timeTakenMutex.Unlock()
42
43 wg.Done()
44 }(source, runner)
45 }
46 wg.Wait()
47
48 for source, data := range timeTaken {
49 gologger.Verbosef(data, source)
50 }
51
52 close(results)
53 cancel()
54 }()
55
56 return results
57 }
+0
-158
pkg/passive/sources.go less more
0 package passive
1
2 import (
3 "github.com/projectdiscovery/subfinder/pkg/subscraping"
4 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/alienvault"
5 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/archiveis"
6 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/binaryedge"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/bufferover"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/censys"
9 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotter"
10 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/certspotterold"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/commoncrawl"
12 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/crtsh"
13 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdb"
14 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/dnsdumpster"
15 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/entrust"
16 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/github"
17 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/hackertarget"
18 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/intelx"
19 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/ipv4info"
20 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/passivetotal"
21 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/rapiddns"
22 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/securitytrails"
23 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/shodan"
24 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/sitedossier"
25 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/spyse"
26 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/sublist3r"
27 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatcrowd"
28 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/threatminer"
29 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/urlscan"
30 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/virustotal"
31 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/waybackarchive"
32 "github.com/projectdiscovery/subfinder/pkg/subscraping/sources/zoomeye"
33 )
34
35 // DefaultSources contains the list of sources used by default
36 var DefaultSources = []string{
37 "alienvault",
38 "archiveis",
39 "binaryedge",
40 "bufferover",
41 "censys",
42 "certspotter",
43 "certspotterold",
44 "commoncrawl",
45 "crtsh",
46 "dnsdumpster",
47 "dnsdb",
48 "entrust",
49 "github",
50 "hackertarget",
51 "ipv4info",
52 "intelx",
53 "passivetotal",
54 "rapiddns",
55 "securitytrails",
56 "shodan",
57 "sitedossier",
58 "spyse",
59 "sublist3r",
60 "threatcrowd",
61 "threatminer",
62 "urlscan",
63 "virustotal",
64 "waybackarchive",
65 "zoomeye",
66 }
67
68 // Agent is a struct for running passive subdomain enumeration
69 // against a given host. It wraps subscraping package and provides
70 // a layer to build upon.
71 type Agent struct {
72 sources map[string]subscraping.Source
73 }
74
75 // New creates a new agent for passive subdomain discovery
76 func New(sources []string, exclusions []string) *Agent {
77 // Create the agent, insert the sources and remove the excluded sources
78 agent := &Agent{sources: make(map[string]subscraping.Source)}
79
80 agent.addSources(sources)
81 agent.removeSources(exclusions)
82
83 return agent
84 }
85
86 // addSources adds the given list of sources to the source array
87 func (a *Agent) addSources(sources []string) {
88 for _, source := range sources {
89 switch source {
90 case "alienvault":
91 a.sources[source] = &alienvault.Source{}
92 case "archiveis":
93 a.sources[source] = &archiveis.Source{}
94 case "binaryedge":
95 a.sources[source] = &binaryedge.Source{}
96 case "bufferover":
97 a.sources[source] = &bufferover.Source{}
98 case "censys":
99 a.sources[source] = &censys.Source{}
100 case "certspotter":
101 a.sources[source] = &certspotter.Source{}
102 case "certspotterold":
103 a.sources[source] = &certspotterold.Source{}
104 case "commoncrawl":
105 a.sources[source] = &commoncrawl.Source{}
106 case "crtsh":
107 a.sources[source] = &crtsh.Source{}
108 case "dnsdumpster":
109 a.sources[source] = &dnsdumpster.Source{}
110 case "dnsdb":
111 a.sources[source] = &dnsdb.Source{}
112 case "entrust":
113 a.sources[source] = &entrust.Source{}
114 case "github":
115 a.sources[source] = &github.Source{}
116 case "hackertarget":
117 a.sources[source] = &hackertarget.Source{}
118 case "ipv4info":
119 a.sources[source] = &ipv4info.Source{}
120 case "intelx":
121 a.sources[source] = &intelx.Source{}
122 case "passivetotal":
123 a.sources[source] = &passivetotal.Source{}
124 case "rapiddns":
125 a.sources[source] = &rapiddns.Source{}
126 case "securitytrails":
127 a.sources[source] = &securitytrails.Source{}
128 case "shodan":
129 a.sources[source] = &shodan.Source{}
130 case "sitedossier":
131 a.sources[source] = &sitedossier.Source{}
132 case "spyse":
133 a.sources[source] = &spyse.Source{}
134 case "sublist3r":
135 a.sources[source] = &sublist3r.Source{}
136 case "threatcrowd":
137 a.sources[source] = &threatcrowd.Source{}
138 case "threatminer":
139 a.sources[source] = &threatminer.Source{}
140 case "urlscan":
141 a.sources[source] = &urlscan.Source{}
142 case "virustotal":
143 a.sources[source] = &virustotal.Source{}
144 case "waybackarchive":
145 a.sources[source] = &waybackarchive.Source{}
146 case "zoomeye":
147 a.sources[source] = &zoomeye.Source{}
148 }
149 }
150 }
151
152 // removeSources deletes the given sources from the source map
153 func (a *Agent) removeSources(sources []string) {
154 for _, source := range sources {
155 delete(a.sources, source)
156 }
157 }
+0
-59
pkg/resolve/client.go less more
0 package resolve
1
2 import (
3 "bufio"
4 "math/rand"
5 "os"
6 "time"
7 )
8
9 // DefaultResolvers contains the default list of resolvers known to be good
10 var DefaultResolvers = []string{
11 "1.1.1.1", // Cloudflare primary
12 "1.0.0.1", // Cloudlfare secondary
13 "8.8.8.8", // Google primary
14 "8.8.4.4", // Google secondary
15 "9.9.9.9", // Quad9 Primary
16 "9.9.9.10", // Quad9 Secondary
17 "77.88.8.8", // Yandex Primary
18 "77.88.8.1", // Yandex Secondary
19 "208.67.222.222", // OpenDNS Primary
20 "208.67.220.220", // OpenDNS Secondary
21 }
22
23 // Resolver is a struct for resolving DNS names
24 type Resolver struct {
25 resolvers []string
26 rand *rand.Rand
27 }
28
29 // New creates a new resolver struct with the default resolvers
30 func New() *Resolver {
31 return &Resolver{
32 resolvers: []string{},
33 rand: rand.New(rand.NewSource(time.Now().UnixNano())),
34 }
35 }
36
37 // AppendResolversFromFile appends the resolvers read from a file to the list of resolvers
38 func (r *Resolver) AppendResolversFromFile(file string) error {
39 f, err := os.Open(file)
40 if err != nil {
41 return err
42 }
43 scanner := bufio.NewScanner(f)
44 for scanner.Scan() {
45 text := scanner.Text()
46 if text == "" {
47 continue
48 }
49 r.resolvers = append(r.resolvers, text)
50 }
51 f.Close()
52 return scanner.Err()
53 }
54
55 // AppendResolversFromSlice appends the slice to the list of resolvers
56 func (r *Resolver) AppendResolversFromSlice(list []string) {
57 r.resolvers = append(r.resolvers, list...)
58 }
+0
-3
pkg/resolve/doc.go less more
0 // Package resolve is used to handle resolving records
1 // It also handles wildcard subdomains and rotating resolvers.
2 package resolve
+0
-150
pkg/resolve/resolve.go less more
0 package resolve
1
2 import (
3 "sync"
4
5 "github.com/miekg/dns"
6 "github.com/rs/xid"
7 )
8
9 const (
10 maxResolveRetries = 5
11 maxWildcardChecks = 3
12 )
13
14 // ResolutionPool is a pool of resolvers created for resolving subdomains
15 // for a given host.
16 type ResolutionPool struct {
17 *Resolver
18 Tasks chan string
19 Results chan Result
20 wg *sync.WaitGroup
21 removeWildcard bool
22
23 wildcardIPs map[string]struct{}
24 }
25
26 // Result contains the result for a host resolution
27 type Result struct {
28 Type ResultType
29 Host string
30 IP string
31 Error error
32 }
33
34 // ResultType is the type of result found
35 type ResultType int
36
37 // Types of data result can return
38 const (
39 Subdomain ResultType = iota
40 Error
41 )
42
43 // NewResolutionPool creates a pool of resolvers for resolving subdomains of a given domain
44 func (r *Resolver) NewResolutionPool(workers int, removeWildcard bool) *ResolutionPool {
45 resolutionPool := &ResolutionPool{
46 Resolver: r,
47 Tasks: make(chan string),
48 Results: make(chan Result),
49 wg: &sync.WaitGroup{},
50 removeWildcard: removeWildcard,
51 wildcardIPs: make(map[string]struct{}),
52 }
53
54 go func() {
55 for i := 0; i < workers; i++ {
56 resolutionPool.wg.Add(1)
57 go resolutionPool.resolveWorker()
58 }
59 resolutionPool.wg.Wait()
60 close(resolutionPool.Results)
61 }()
62
63 return resolutionPool
64 }
65
66 // InitWildcards inits the wildcard ips array
67 func (r *ResolutionPool) InitWildcards(domain string) error {
68 for i := 0; i < maxWildcardChecks; i++ {
69 uid := xid.New().String()
70
71 hosts, err := r.getARecords(uid + "." + domain)
72 if err != nil {
73 return err
74 }
75
76 // Append all wildcard ips found for domains
77 for _, host := range hosts {
78 r.wildcardIPs[host] = struct{}{}
79 }
80 }
81 return nil
82 }
83
84 func (r *ResolutionPool) resolveWorker() {
85 for task := range r.Tasks {
86 if !r.removeWildcard {
87 r.Results <- Result{Type: Subdomain, Host: task, IP: ""}
88 continue
89 }
90
91 hosts, err := r.getARecords(task)
92 if err != nil {
93 r.Results <- Result{Type: Error, Error: err}
94 continue
95 }
96
97 if len(hosts) == 0 {
98 continue
99 }
100
101 for _, host := range hosts {
102 // Ignore the host if it exists in wildcard ips map
103 if _, ok := r.wildcardIPs[host]; ok {
104 continue
105 }
106 }
107
108 r.Results <- Result{Type: Subdomain, Host: task, IP: hosts[0]}
109 }
110 r.wg.Done()
111 }
112
113 // getARecords gets all the A records for a given host
114 func (r *ResolutionPool) getARecords(host string) ([]string, error) {
115 var iteration int
116
117 m := new(dns.Msg)
118 m.Id = dns.Id()
119 m.RecursionDesired = true
120 m.Question = make([]dns.Question, 1)
121 m.Question[0] = dns.Question{
122 Name: dns.Fqdn(host),
123 Qtype: dns.TypeA,
124 Qclass: dns.ClassINET,
125 }
126 exchange:
127 iteration++
128 in, err := dns.Exchange(m, r.resolvers[r.rand.Intn(len(r.resolvers))]+":53")
129 if err != nil {
130 // Retry in case of I/O error
131 if iteration <= maxResolveRetries {
132 goto exchange
133 }
134 return nil, err
135 }
136 // Ignore the error in case we have bad result
137 if in != nil && in.Rcode != dns.RcodeSuccess {
138 return nil, nil
139 }
140
141 var hosts []string
142 for _, record := range in.Answer {
143 if t, ok := record.(*dns.A); ok {
144 hosts = append(hosts, t.A.String())
145 }
146 }
147
148 return hosts, nil
149 }
+0
-57
pkg/runner/banners.go less more
0 package runner
1
2 import (
3 "github.com/projectdiscovery/gologger"
4 "github.com/projectdiscovery/subfinder/pkg/passive"
5 "github.com/projectdiscovery/subfinder/pkg/resolve"
6 )
7
8 const banner = `
9 _ __ _ _
10 ____ _| |__ / _(_)_ _ __| |___ _ _
11 (_-< || | '_ \ _| | ' \/ _ / -_) '_|
12 /__/\_,_|_.__/_| |_|_||_\__,_\___|_| v2
13 `
14
15 // Version is the current version of subfinder
16 const Version = `2.3.8`
17
18 // showBanner is used to show the banner to the user
19 func showBanner() {
20 gologger.Printf("%s\n", banner)
21 gologger.Printf("\t\tprojectdiscovery.io\n\n")
22
23 gologger.Labelf("Use with caution. You are responsible for your actions\n")
24 gologger.Labelf("Developers assume no liability and are not responsible for any misuse or damage.\n")
25 gologger.Labelf("By using subfinder, you also agree to the terms of the APIs used.\n\n")
26 }
27
28 // normalRunTasks runs the normal startup tasks
29 func (options *Options) normalRunTasks() {
30 configFile, err := UnmarshalRead(options.ConfigFile)
31 if err != nil {
32 gologger.Fatalf("Could not read configuration file %s: %s\n", options.ConfigFile, err)
33 }
34 options.YAMLConfig = configFile
35 }
36
37 // firstRunTasks runs some housekeeping tasks done
38 // when the program is ran for the first time
39 func (options *Options) firstRunTasks() {
40 // Create the configuration file and display information
41 // about it to the user.
42 config := ConfigFile{
43 // Use the default list of resolvers by marshalling it to the config
44 Resolvers: resolve.DefaultResolvers,
45 // Use the default list of passive sources
46 Sources: passive.DefaultSources,
47 }
48
49 err := config.MarshalWrite(options.ConfigFile)
50 if err != nil {
51 gologger.Fatalf("Could not write configuration file to %s: %s\n", options.ConfigFile, err)
52 }
53 options.YAMLConfig = config
54
55 gologger.Infof("Configuration file saved to %s\n", options.ConfigFile)
56 }
+0
-169
pkg/runner/config.go less more
0 package runner
1
2 import (
3 "math/rand"
4 "os"
5 "strings"
6 "time"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 "gopkg.in/yaml.v3"
10 )
11
12 // ConfigFile contains the fields stored in the configuration file
13 type ConfigFile struct {
14 // Resolvers contains the list of resolvers to use while resolving
15 Resolvers []string `yaml:"resolvers,omitempty"`
16 // Sources contains a list of sources to use for enumeration
17 Sources []string `yaml:"sources,omitempty"`
18 // ExcludeSources contains the sources to not include in the enumeration process
19 ExcludeSources []string `yaml:"exclude-sources,omitempty"`
20 // API keys for different sources
21 Binaryedge []string `yaml:"binaryedge"`
22 Censys []string `yaml:"censys"`
23 Certspotter []string `yaml:"certspotter"`
24 Chaos []string `yaml:"chaos"`
25 DNSDB []string `yaml:"dnsdb"`
26 GitHub []string `yaml:"github"`
27 IntelX []string `yaml:"intelx"`
28 PassiveTotal []string `yaml:"passivetotal"`
29 SecurityTrails []string `yaml:"securitytrails"`
30 Shodan []string `yaml:"shodan"`
31 Spyse []string `yaml:"spyse"`
32 URLScan []string `yaml:"urlscan"`
33 Virustotal []string `yaml:"virustotal"`
34 ZoomEye []string `yaml:"zoomeye"`
35 }
36
37 // GetConfigDirectory gets the subfinder config directory for a user
38 func GetConfigDirectory() (string, error) {
39 // Seed the random number generator
40 rand.Seed(time.Now().UnixNano())
41
42 var config string
43
44 directory, err := os.UserHomeDir()
45 if err != nil {
46 return config, err
47 }
48 config = directory + "/.config/subfinder"
49 // Create All directory for subfinder even if they exist
50 os.MkdirAll(config, os.ModePerm)
51
52 return config, nil
53 }
54
55 // CheckConfigExists checks if the config file exists in the given path
56 func CheckConfigExists(configPath string) bool {
57 if _, err := os.Stat(configPath); err == nil {
58 return true
59 } else if os.IsNotExist(err) {
60 return false
61 }
62 return false
63 }
64
65 // MarshalWrite writes the marshalled yaml config to disk
66 func (c ConfigFile) MarshalWrite(file string) error {
67 f, err := os.OpenFile(file, os.O_WRONLY|os.O_CREATE, 0755)
68 if err != nil {
69 return err
70 }
71
72 // Indent the spaces too
73 enc := yaml.NewEncoder(f)
74 enc.SetIndent(4)
75 err = enc.Encode(&c)
76 f.Close()
77 return err
78 }
79
80 // UnmarshalRead reads the unmarshalled config yaml file from disk
81 func UnmarshalRead(file string) (ConfigFile, error) {
82 config := ConfigFile{}
83
84 f, err := os.Open(file)
85 if err != nil {
86 return config, err
87 }
88 err = yaml.NewDecoder(f).Decode(&config)
89 f.Close()
90 return config, err
91 }
92
93 // GetKeys gets the API keys from config file and creates a Keys struct
94 // We use random selection of api keys from the list of keys supplied.
95 // Keys that require 2 options are separated by colon (:).
96 func (c ConfigFile) GetKeys() subscraping.Keys {
97 keys := subscraping.Keys{}
98
99 if len(c.Binaryedge) > 0 {
100 keys.Binaryedge = c.Binaryedge[rand.Intn(len(c.Binaryedge))]
101 }
102
103 if len(c.Censys) > 0 {
104 censysKeys := c.Censys[rand.Intn(len(c.Censys))]
105 parts := strings.Split(censysKeys, ":")
106 if len(parts) == 2 {
107 keys.CensysToken = parts[0]
108 keys.CensysSecret = parts[1]
109 }
110 }
111
112 if len(c.Certspotter) > 0 {
113 keys.Certspotter = c.Certspotter[rand.Intn(len(c.Certspotter))]
114 }
115 if len(c.Chaos) > 0 {
116 keys.Chaos = c.Chaos[rand.Intn(len(c.Chaos))]
117 }
118 if (len(c.DNSDB)) > 0 {
119 keys.DNSDB = c.DNSDB[rand.Intn(len(c.DNSDB))]
120 }
121 if (len(c.GitHub)) > 0 {
122 keys.GitHub = c.GitHub
123 }
124
125 if len(c.IntelX) > 0 {
126 intelxKeys := c.IntelX[rand.Intn(len(c.IntelX))]
127 parts := strings.Split(intelxKeys, ":")
128 if len(parts) == 2 {
129 keys.IntelXHost = parts[0]
130 keys.IntelXKey = parts[1]
131 }
132 }
133
134 if len(c.PassiveTotal) > 0 {
135 passiveTotalKeys := c.PassiveTotal[rand.Intn(len(c.PassiveTotal))]
136 parts := strings.Split(passiveTotalKeys, ":")
137 if len(parts) == 2 {
138 keys.PassiveTotalUsername = parts[0]
139 keys.PassiveTotalPassword = parts[1]
140 }
141 }
142
143 if len(c.SecurityTrails) > 0 {
144 keys.Securitytrails = c.SecurityTrails[rand.Intn(len(c.SecurityTrails))]
145 }
146 if len(c.Shodan) > 0 {
147 keys.Shodan = c.Shodan[rand.Intn(len(c.Shodan))]
148 }
149 if len(c.Spyse) > 0 {
150 keys.Spyse = c.Spyse[rand.Intn(len(c.Spyse))]
151 }
152 if len(c.URLScan) > 0 {
153 keys.URLScan = c.URLScan[rand.Intn(len(c.URLScan))]
154 }
155 if len(c.Virustotal) > 0 {
156 keys.Virustotal = c.Virustotal[rand.Intn(len(c.Virustotal))]
157 }
158 if len(c.ZoomEye) > 0 {
159 zoomEyeKeys := c.ZoomEye[rand.Intn(len(c.ZoomEye))]
160 parts := strings.Split(zoomEyeKeys, ":")
161 if len(parts) == 2 {
162 keys.ZoomEyeUsername = parts[0]
163 keys.ZoomEyePassword = parts[1]
164 }
165 }
166
167 return keys
168 }
+0
-22
pkg/runner/config_test.go less more
0 package runner
1
2 import (
3 "os"
4 "testing"
5
6 "github.com/stretchr/testify/assert"
7 )
8
9 func TestConfigGetDirectory(t *testing.T) {
10 directory, err := GetConfigDirectory()
11 if err != nil {
12 t.Fatalf("Expected nil got %v while getting home\n", err)
13 }
14 home, err := os.UserHomeDir()
15 if err != nil {
16 t.Fatalf("Expected nil got %v while getting dir\n", err)
17 }
18 config := home + "/.config/subfinder"
19
20 assert.Equal(t, directory, config, "Directory and config should be equal")
21 }
+0
-3
pkg/runner/doc.go less more
0 // Package runner implements the mechanism to drive the
1 // subdomain enumeration process
2 package runner
+0
-180
pkg/runner/enumerate.go less more
0 package runner
1
2 import (
3 "bytes"
4 "os"
5 "strings"
6 "sync"
7 "time"
8
9 "github.com/projectdiscovery/gologger"
10 "github.com/projectdiscovery/subfinder/pkg/resolve"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping"
12 )
13
14 // EnumerateSingleDomain performs subdomain enumeration against a single domain
15 func (r *Runner) EnumerateSingleDomain(domain, output string, append bool) error {
16 gologger.Infof("Enumerating subdomains for %s\n", domain)
17
18 // Get the API keys for sources from the configuration
19 // and also create the active resolving engine for the domain.
20 keys := r.options.YAMLConfig.GetKeys()
21
22 // Check if the user has asked to remove wildcards explicitly.
23 // If yes, create the resolution pool and get the wildcards for the current domain
24 var resolutionPool *resolve.ResolutionPool
25 if r.options.RemoveWildcard {
26 resolutionPool = r.resolverClient.NewResolutionPool(r.options.Threads, r.options.RemoveWildcard)
27 err := resolutionPool.InitWildcards(domain)
28 if err != nil {
29 // Log the error but don't quit.
30 gologger.Warningf("Could not get wildcards for domain %s: %s\n", domain, err)
31 }
32 }
33
34 // Run the passive subdomain enumeration
35 passiveResults := r.passiveAgent.EnumerateSubdomains(domain, keys, r.options.Timeout, time.Duration(r.options.MaxEnumerationTime)*time.Minute)
36
37 wg := &sync.WaitGroup{}
38 wg.Add(1)
39 // Create a unique map for filtering duplicate subdomains out
40 uniqueMap := make(map[string]struct{})
41 // Process the results in a separate goroutine
42 go func() {
43 for result := range passiveResults {
44 switch result.Type {
45 case subscraping.Error:
46 gologger.Warningf("Could not run source %s: %s\n", result.Source, result.Error)
47 case subscraping.Subdomain:
48 // Validate the subdomain found and remove wildcards from
49 if !strings.HasSuffix(result.Value, "."+domain) {
50 continue
51 }
52 subdomain := strings.ReplaceAll(strings.ToLower(result.Value), "*.", "")
53
54 // Check if the subdomain is a duplicate. If not,
55 // send the subdomain for resolution.
56 if _, ok := uniqueMap[subdomain]; ok {
57 continue
58 }
59 uniqueMap[subdomain] = struct{}{}
60
61 // Log the verbose message about the found subdomain and send the
62 // host for resolution to the resolution pool
63 gologger.Verbosef("%s\n", result.Source, subdomain)
64
65 // If the user asked to remove wildcard then send on the resolve
66 // queue. Otherwise, if mode is not verbose print the results on
67 // the screen as they are discovered.
68 if r.options.RemoveWildcard {
69 resolutionPool.Tasks <- subdomain
70 }
71
72 if !r.options.Verbose {
73 gologger.Silentf("%s\n", subdomain)
74 }
75 }
76 }
77 // Close the task channel only if wildcards are asked to be removed
78 if r.options.RemoveWildcard {
79 close(resolutionPool.Tasks)
80 }
81 wg.Done()
82 }()
83
84 // If the user asked to remove wildcards, listen from the results
85 // queue and write to the map. At the end, print the found results to the screen
86 foundResults := make(map[string]string)
87 if r.options.RemoveWildcard {
88 // Process the results coming from the resolutions pool
89 for result := range resolutionPool.Results {
90 switch result.Type {
91 case resolve.Error:
92 gologger.Warningf("Could not resolve host: %s\n", result.Error)
93 case resolve.Subdomain:
94 // Add the found subdomain to a map.
95 if _, ok := foundResults[result.Host]; !ok {
96 foundResults[result.Host] = result.IP
97 }
98 }
99 }
100 }
101 wg.Wait()
102
103 // If verbose mode was used, then now print all the
104 // found subdomains on the screen together.
105 if r.options.Verbose {
106 if r.options.RemoveWildcard {
107 for result := range foundResults {
108 gologger.Silentf("%s\n", result)
109 }
110 } else {
111 for result := range uniqueMap {
112 gologger.Silentf("%s\n", result)
113 }
114 }
115 }
116 // In case the user has specified to upload to chaos, write everything to a temporary buffer and upload
117 if r.options.ChaosUpload {
118 var buf = &bytes.Buffer{}
119 err := WriteHostOutput(uniqueMap, buf)
120 // If an error occurs, do not interrupt, continue to check if user specifed an output file
121 if err != nil {
122 gologger.Errorf("Could not prepare results for chaos %s\n", err)
123 } else {
124 // no error in writing host output, upload to chaos
125 err = r.UploadToChaos(buf)
126 if err != nil {
127 gologger.Errorf("Could not upload results to chaos %s\n", err)
128 } else {
129 gologger.Infof("Input processed successfully and subdomains with valid records will be updated to chaos dataset.\n")
130 }
131 // clear buffer
132 buf = nil
133 }
134 }
135 // In case the user has given an output file, write all the found
136 // subdomains to the output file.
137 if output != "" {
138 // If the output format is json, append .json
139 // else append .txt
140 if r.options.OutputDirectory != "" {
141 if r.options.JSON {
142 output = output + ".json"
143 } else {
144 output = output + ".txt"
145 }
146 }
147
148 var file *os.File
149 var err error
150 if append {
151 file, err = os.OpenFile(output, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
152 } else {
153 file, err = os.Create(output)
154 }
155 if err != nil {
156 gologger.Errorf("Could not create file %s for %s: %s\n", output, domain, err)
157 return err
158 }
159
160 // Write the output to the file depending upon user requirement
161 if r.options.HostIP {
162 err = WriteHostIPOutput(foundResults, file)
163 } else if r.options.JSON {
164 err = WriteJSONOutput(foundResults, file)
165 } else {
166 if r.options.RemoveWildcard {
167 err = WriteHostOutputNoWildcard(foundResults, file)
168 } else {
169 err = WriteHostOutput(uniqueMap, file)
170 }
171 }
172 if err != nil {
173 gologger.Errorf("Could not write results to file %s for %s: %s\n", output, domain, err)
174 }
175 file.Close()
176 return err
177 }
178 return nil
179 }
+0
-52
pkg/runner/initialize.go less more
0 package runner
1
2 import (
3 "strings"
4
5 "github.com/projectdiscovery/subfinder/pkg/passive"
6 "github.com/projectdiscovery/subfinder/pkg/resolve"
7 )
8
9 // initializePassiveEngine creates the passive engine and loads sources etc
10 func (r *Runner) initializePassiveEngine() {
11 var sources, exclusions []string
12
13 // If there are any sources from CLI, only use them
14 // Otherwise, use the yaml file sources
15 if r.options.Sources != "" {
16 sources = append(sources, strings.Split(r.options.Sources, ",")...)
17 } else {
18 sources = append(sources, r.options.YAMLConfig.Sources...)
19 }
20
21 if r.options.ExcludeSources != "" {
22 exclusions = append(exclusions, strings.Split(r.options.ExcludeSources, ",")...)
23 } else {
24 exclusions = append(exclusions, r.options.YAMLConfig.ExcludeSources...)
25 }
26
27 r.passiveAgent = passive.New(sources, exclusions)
28 }
29
30 // initializeActiveEngine creates the resolver used to resolve the found subdomains
31 func (r *Runner) initializeActiveEngine() error {
32 r.resolverClient = resolve.New()
33
34 // If the file has been provided, read resolvers from the file
35 if r.options.ResolverList != "" {
36 err := r.resolverClient.AppendResolversFromFile(r.options.ResolverList)
37 if err != nil {
38 return err
39 }
40 }
41
42 var resolvers []string
43
44 if r.options.Resolvers != "" {
45 resolvers = append(resolvers, strings.Split(r.options.Resolvers, ",")...)
46 } else {
47 resolvers = append(resolvers, r.options.YAMLConfig.Resolvers...)
48 }
49 r.resolverClient.AppendResolversFromSlice(resolvers)
50 return nil
51 }
+0
-143
pkg/runner/options.go less more
0 package runner
1
2 import (
3 "flag"
4 "os"
5 "path"
6 "reflect"
7 "strings"
8
9 "github.com/projectdiscovery/gologger"
10 )
11
12 // Options contains the configuration options for tuning
13 // the subdomain enumeration process.
14 type Options struct {
15 Verbose bool // Verbose flag indicates whether to show verbose output or not
16 NoColor bool // No-Color disables the colored output
17 Threads int // Thread controls the number of threads to use for active enumerations
18 Timeout int // Timeout is the seconds to wait for sources to respond
19 MaxEnumerationTime int // MaxEnumerationTime is the maximum amount of time in mins to wait for enumeration
20 Domain string // Domain is the domain to find subdomains for
21 DomainsFile string // DomainsFile is the file containing list of domains to find subdomains for
22 ChaosUpload bool // ChaosUpload indicates whether to upload results to the Chaos API
23 Output string // Output is the file to write found subdomains to.
24 OutputDirectory string // OutputDirectory is the directory to write results to in case list of domains is given
25 JSON bool // JSON specifies whether to use json for output format or text file
26 HostIP bool // HostIP specifies whether to write subdomains in host:ip format
27 Silent bool // Silent suppresses any extra text and only writes subdomains to screen
28 Sources string // Sources contains a comma-separated list of sources to use for enumeration
29 ListSources bool // ListSources specifies whether to list all available sources
30 ExcludeSources string // ExcludeSources contains the comma-separated sources to not include in the enumeration process
31 Resolvers string // Resolvers is the comma-separated resolvers to use for enumeration
32 ResolverList string // ResolverList is a text file containing list of resolvers to use for enumeration
33 RemoveWildcard bool // RemoveWildcard specifies whether to remove potential wildcard or dead subdomains from the results.
34 ConfigFile string // ConfigFile contains the location of the config file
35 Stdin bool // Stdin specifies whether stdin input was given to the process
36 Version bool // Version specifies if we should just show version and exit
37
38 YAMLConfig ConfigFile // YAMLConfig contains the unmarshalled yaml config file
39 }
40
41 // ParseOptions parses the command line flags provided by a user
42 func ParseOptions() *Options {
43 options := &Options{}
44
45 config, err := GetConfigDirectory()
46 if err != nil {
47 // This should never be reached
48 gologger.Fatalf("Could not get user home: %s\n", err)
49 }
50
51 flag.BoolVar(&options.Verbose, "v", false, "Show Verbose output")
52 flag.BoolVar(&options.NoColor, "nC", false, "Don't Use colors in output")
53 flag.IntVar(&options.Threads, "t", 10, "Number of concurrent goroutines for resolving")
54 flag.IntVar(&options.Timeout, "timeout", 30, "Seconds to wait before timing out")
55 flag.IntVar(&options.MaxEnumerationTime, "max-time", 10, "Minutes to wait for enumeration results")
56 flag.StringVar(&options.Domain, "d", "", "Domain to find subdomains for")
57 flag.StringVar(&options.DomainsFile, "dL", "", "File containing list of domains to enumerate")
58 flag.BoolVar(&options.ChaosUpload, "cd", false, "Upload results to the Chaos API (api-key required)")
59 flag.StringVar(&options.Output, "o", "", "File to write output to (optional)")
60 flag.StringVar(&options.OutputDirectory, "oD", "", "Directory to write enumeration results to (optional)")
61 flag.BoolVar(&options.JSON, "oJ", false, "Write output in JSON lines Format")
62 flag.BoolVar(&options.HostIP, "oI", false, "Write output in Host,IP format")
63 flag.BoolVar(&options.Silent, "silent", false, "Show only subdomains in output")
64 flag.StringVar(&options.Sources, "sources", "", "Comma separated list of sources to use")
65 flag.BoolVar(&options.ListSources, "ls", false, "List all available sources")
66 flag.StringVar(&options.ExcludeSources, "exclude-sources", "", "List of sources to exclude from enumeration")
67 flag.StringVar(&options.Resolvers, "r", "", "Comma-separated list of resolvers to use")
68 flag.StringVar(&options.ResolverList, "rL", "", "Text file containing list of resolvers to use")
69 flag.BoolVar(&options.RemoveWildcard, "nW", false, "Remove Wildcard & Dead Subdomains from output")
70 flag.StringVar(&options.ConfigFile, "config", path.Join(config, "config.yaml"), "Configuration file for API Keys, etc")
71 flag.BoolVar(&options.Version, "version", false, "Show version of subfinder")
72 flag.Parse()
73
74 // Check if stdin pipe was given
75 options.Stdin = hasStdin()
76
77 // Read the inputs and configure the logging
78 options.configureOutput()
79
80 // Show the user the banner
81 showBanner()
82
83 if options.Version {
84 gologger.Infof("Current Version: %s\n", Version)
85 os.Exit(0)
86 }
87
88 // Check if the config file exists. If not, it means this is the
89 // first run of the program. Show the first run notices and initialize the config file.
90 // Else show the normal banners and read the yaml fiile to the config
91 if !CheckConfigExists(options.ConfigFile) {
92 options.firstRunTasks()
93 } else {
94 options.normalRunTasks()
95 }
96
97 if options.ListSources {
98 listSources(options)
99 os.Exit(0)
100 }
101
102 // Validate the options passed by the user and if any
103 // invalid options have been used, exit.
104 err = options.validateOptions()
105 if err != nil {
106 gologger.Fatalf("Program exiting: %s\n", err)
107 }
108
109 return options
110 }
111
112 func hasStdin() bool {
113 fi, err := os.Stdin.Stat()
114 if err != nil {
115 return false
116 }
117 if fi.Mode()&os.ModeNamedPipe == 0 {
118 return false
119 }
120 return true
121 }
122
123 func listSources(options *Options) {
124 gologger.Infof("Current list of available sources. [%d]\n", len(options.YAMLConfig.Sources))
125 gologger.Infof("Sources marked with an * needs key or token in order to work.\n")
126 gologger.Infof("You can modify %s to configure your keys / tokens.\n\n", options.ConfigFile)
127
128 keys := options.YAMLConfig.GetKeys()
129 needsKey := make(map[string]interface{})
130 keysElem := reflect.ValueOf(&keys).Elem()
131 for i := 0; i < keysElem.NumField(); i++ {
132 needsKey[strings.ToLower(keysElem.Type().Field(i).Name)] = keysElem.Field(i).Interface()
133 }
134
135 for _, source := range options.YAMLConfig.Sources {
136 message := "%s\n"
137 if _, ok := needsKey[source]; ok {
138 message = "%s *\n"
139 }
140 gologger.Silentf(message, source)
141 }
142 }
+0
-91
pkg/runner/runner.go less more
0 package runner
1
2 import (
3 "bufio"
4 "io"
5 "os"
6 "path"
7
8 "github.com/projectdiscovery/subfinder/pkg/passive"
9 "github.com/projectdiscovery/subfinder/pkg/resolve"
10 )
11
12 // Runner is an instance of the subdomain enumeration
13 // client used to orchestrate the whole process.
14 type Runner struct {
15 options *Options
16 passiveAgent *passive.Agent
17 resolverClient *resolve.Resolver
18 }
19
20 // NewRunner creates a new runner struct instance by parsing
21 // the configuration options, configuring sources, reading lists
22 // and setting up loggers, etc.
23 func NewRunner(options *Options) (*Runner, error) {
24 runner := &Runner{options: options}
25
26 // Initialize the passive subdomain enumeration engine
27 runner.initializePassiveEngine()
28
29 // Initialize the active subdomain enumeration engine
30 err := runner.initializeActiveEngine()
31 if err != nil {
32 return nil, err
33 }
34
35 return runner, nil
36 }
37
38 // RunEnumeration runs the subdomain enumeration flow on the targets specified
39 func (r *Runner) RunEnumeration() error {
40 // Check if only a single domain is sent as input. Process the domain now.
41 if r.options.Domain != "" {
42 return r.EnumerateSingleDomain(r.options.Domain, r.options.Output, false)
43 }
44
45 // If we have multiple domains as input,
46 if r.options.DomainsFile != "" {
47 f, err := os.Open(r.options.DomainsFile)
48 if err != nil {
49 return err
50 }
51 err = r.EnumerateMultipleDomains(f)
52 f.Close()
53 return err
54 }
55
56 // If we have STDIN input, treat it as multiple domains
57 if r.options.Stdin {
58 return r.EnumerateMultipleDomains(os.Stdin)
59 }
60 return nil
61 }
62
63 // EnumerateMultipleDomains enumerates subdomains for multiple domains
64 // We keep enumerating subdomains for a given domain until we reach an error
65 func (r *Runner) EnumerateMultipleDomains(reader io.Reader) error {
66 scanner := bufio.NewScanner(reader)
67 for scanner.Scan() {
68 domain := scanner.Text()
69 if domain == "" {
70 continue
71 }
72
73 var err error
74 // If the user has specifed an output file, use that output file instead
75 // of creating a new output file for each domain. Else create a new file
76 // for each domain in the directory.
77 if r.options.Output != "" {
78 err = r.EnumerateSingleDomain(domain, r.options.Output, true)
79 } else if r.options.OutputDirectory != "" {
80 outputFile := path.Join(r.options.OutputDirectory, domain)
81 err = r.EnumerateSingleDomain(domain, outputFile, false)
82 } else {
83 err = r.EnumerateSingleDomain(domain, "", true)
84 }
85 if err != nil {
86 return err
87 }
88 }
89 return nil
90 }
+0
-131
pkg/runner/utils.go less more
0 package runner
1
2 import (
3 "bufio"
4 "crypto/tls"
5 "fmt"
6 "io"
7 "io/ioutil"
8 "net/http"
9 "strings"
10 "time"
11
12 jsoniter "github.com/json-iterator/go"
13 "github.com/pkg/errors"
14 )
15
16 // JSONResult contains the result for a host in JSON format
17 type JSONResult struct {
18 Host string `json:"host"`
19 IP string `json:"ip"`
20 }
21
22 func (r *Runner) UploadToChaos(reader io.Reader) error {
23 httpClient := &http.Client{
24 Transport: &http.Transport{
25 MaxIdleConnsPerHost: 100,
26 MaxIdleConns: 100,
27 TLSClientConfig: &tls.Config{
28 InsecureSkipVerify: true,
29 },
30 },
31 Timeout: time.Duration(600) * time.Second, // 10 minutes - uploads may take long
32 }
33
34 request, err := http.NewRequest("POST", "https://dns.projectdiscovery.io/dns/add", reader)
35 if err != nil {
36 return errors.Wrap(err, "could not create request")
37 }
38 request.Header.Set("Authorization", r.options.YAMLConfig.GetKeys().Chaos)
39
40 resp, err := httpClient.Do(request)
41 if err != nil {
42 return errors.Wrap(err, "could not make request")
43 }
44 defer func() {
45 io.Copy(ioutil.Discard, resp.Body)
46 resp.Body.Close()
47 }()
48
49 if resp.StatusCode != 200 {
50 return fmt.Errorf("invalid status code received: %d", resp.StatusCode)
51 }
52 return nil
53 }
54
55 // WriteHostOutput writes the output list of subdomain to an io.Writer
56 func WriteHostOutput(results map[string]struct{}, writer io.Writer) error {
57 bufwriter := bufio.NewWriter(writer)
58 sb := &strings.Builder{}
59
60 for host := range results {
61 sb.WriteString(host)
62 sb.WriteString("\n")
63
64 _, err := bufwriter.WriteString(sb.String())
65 if err != nil {
66 bufwriter.Flush()
67 return err
68 }
69 sb.Reset()
70 }
71 return bufwriter.Flush()
72 }
73
74 // WriteHostOutputNoWildcard writes the output list of subdomain with nW flag to an io.Writer
75 func WriteHostOutputNoWildcard(results map[string]string, writer io.Writer) error {
76 bufwriter := bufio.NewWriter(writer)
77 sb := &strings.Builder{}
78
79 for host := range results {
80 sb.WriteString(host)
81 sb.WriteString("\n")
82
83 _, err := bufwriter.WriteString(sb.String())
84 if err != nil {
85 bufwriter.Flush()
86 return err
87 }
88 sb.Reset()
89 }
90 return bufwriter.Flush()
91 }
92
93 // WriteJSONOutput writes the output list of subdomain in JSON to an io.Writer
94 func WriteJSONOutput(results map[string]string, writer io.Writer) error {
95 encoder := jsoniter.NewEncoder(writer)
96
97 data := JSONResult{}
98
99 for host, ip := range results {
100 data.Host = host
101 data.IP = ip
102
103 err := encoder.Encode(&data)
104 if err != nil {
105 return err
106 }
107 }
108 return nil
109 }
110
111 // WriteHostIPOutput writes the output list of subdomain to an io.Writer
112 func WriteHostIPOutput(results map[string]string, writer io.Writer) error {
113 bufwriter := bufio.NewWriter(writer)
114 sb := &strings.Builder{}
115
116 for host, ip := range results {
117 sb.WriteString(host)
118 sb.WriteString(",")
119 sb.WriteString(ip)
120 sb.WriteString("\n")
121
122 _, err := bufwriter.WriteString(sb.String())
123 if err != nil {
124 bufwriter.Flush()
125 return err
126 }
127 sb.Reset()
128 }
129 return bufwriter.Flush()
130 }
+0
-58
pkg/runner/validate.go less more
0 package runner
1
2 import (
3 "errors"
4
5 "github.com/projectdiscovery/gologger"
6 )
7
8 // validateOptions validates the configuration options passed
9 func (options *Options) validateOptions() error {
10 // Check if domain, list of domains, or stdin info was provided.
11 // If none was provided, then return.
12 if options.Domain == "" && options.DomainsFile == "" && !options.Stdin {
13 return errors.New("no input list provided")
14 }
15
16 // Both verbose and silent flags were used
17 if options.Verbose && options.Silent {
18 return errors.New("both verbose and silent mode specified")
19 }
20
21 // Validate threads and options
22 if options.Threads == 0 {
23 return errors.New("threads cannot be zero")
24 }
25 if options.Timeout == 0 {
26 return errors.New("timeout cannot be zero")
27 }
28
29 // JSON cannot be used with hostIP
30 if options.JSON && options.HostIP {
31 return errors.New("hostip flag cannot be used with json flag")
32 }
33
34 // Always remove wildcard with hostip and json
35 if options.HostIP && !options.RemoveWildcard {
36 return errors.New("hostip flag must be used with RemoveWildcard option")
37 }
38 if options.JSON && !options.RemoveWildcard {
39 return errors.New("JSON flag must be used with RemoveWildcard option")
40 }
41
42 return nil
43 }
44
45 // configureOutput configures the output on the screen
46 func (options *Options) configureOutput() {
47 // If the user desires verbose output, show verbose output
48 if options.Verbose {
49 gologger.MaxLevel = gologger.Verbose
50 }
51 if options.NoColor {
52 gologger.UseColors = false
53 }
54 if options.Silent {
55 gologger.MaxLevel = gologger.Silent
56 }
57 }
+0
-96
pkg/subscraping/agent.go less more
0 package subscraping
1
2 import (
3 "context"
4 "crypto/tls"
5 "fmt"
6 "io"
7 "io/ioutil"
8 "net/http"
9 "net/url"
10 "time"
11 )
12
13 // NewSession creates a new session object for a domain
14 func NewSession(domain string, keys Keys, timeout int) (*Session, error) {
15 client := &http.Client{
16 Transport: &http.Transport{
17 MaxIdleConns: 100,
18 MaxIdleConnsPerHost: 100,
19 TLSClientConfig: &tls.Config{
20 InsecureSkipVerify: true,
21 },
22 },
23 Timeout: time.Duration(timeout) * time.Second,
24 }
25
26 session := &Session{
27 Client: client,
28 Keys: keys,
29 }
30
31 // Create a new extractor object for the current domain
32 extractor, err := NewSubdomainExtractor(domain)
33 session.Extractor = extractor
34
35 return session, err
36 }
37
38 // NormalGetWithContext makes a normal GET request to a URL with context
39 func (s *Session) NormalGetWithContext(ctx context.Context, url string) (*http.Response, error) {
40 req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
41 if err != nil {
42 return nil, err
43 }
44
45 // Don't randomize user agents, as they cause issues sometimes
46 req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
47 req.Header.Set("Accept", "*/*")
48 req.Header.Set("Accept-Language", "en")
49
50 return httpRequestWrapper(s.Client, req)
51 }
52
53 // Get makes a GET request to a URL
54 func (s *Session) Get(ctx context.Context, url string, cookies string, headers map[string]string) (*http.Response, error) {
55 req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
56 if err != nil {
57 return nil, err
58 }
59
60 req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
61 req.Header.Set("Accept", "*/*")
62 req.Header.Set("Accept-Language", "en")
63
64 if cookies != "" {
65 req.Header.Set("Cookie", cookies)
66 }
67
68 if headers != nil {
69 for key, value := range headers {
70 req.Header.Set(key, value)
71 }
72 }
73
74 return httpRequestWrapper(s.Client, req)
75 }
76
77 func (s *Session) DiscardHttpResponse(response *http.Response) {
78 if response != nil {
79 io.Copy(ioutil.Discard, response.Body)
80 response.Body.Close()
81 }
82 }
83
84 func httpRequestWrapper(client *http.Client, request *http.Request) (*http.Response, error) {
85 resp, err := client.Do(request)
86 if err != nil {
87 return nil, err
88 }
89
90 if resp.StatusCode != http.StatusOK {
91 requestUrl, _ := url.QueryUnescape(request.URL.String())
92 return resp, fmt.Errorf("Unexpected status code %d received from %s", resp.StatusCode, requestUrl)
93 }
94 return resp, nil
95 }
+0
-55
pkg/subscraping/sources/alienvault/alienvault.go less more
0 package alienvault
1
2 import (
3 "context"
4 "encoding/json"
5 "fmt"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type alienvaultResponse struct {
11 PassiveDNS []struct {
12 Hostname string `json:"hostname"`
13 } `json:"passive_dns"`
14 }
15
16 // Source is the passive scraping agent
17 type Source struct{}
18
19 // Run function returns all subdomains found with the service
20 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
21 results := make(chan subscraping.Result)
22
23 go func() {
24 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/passive_dns", domain))
25 if err != nil {
26 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
27 session.DiscardHttpResponse(resp)
28 close(results)
29 return
30 }
31
32 otxResp := &alienvaultResponse{}
33 // Get the response body and decode
34 err = json.NewDecoder(resp.Body).Decode(&otxResp)
35 if err != nil {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
37 resp.Body.Close()
38 close(results)
39 return
40 }
41 resp.Body.Close()
42 for _, record := range otxResp.PassiveDNS {
43 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record.Hostname}
44 }
45 close(results)
46 }()
47
48 return results
49 }
50
51 // Name returns the name of the source
52 func (s *Source) Name() string {
53 return "alienvault"
54 }
+0
-77
pkg/subscraping/sources/archiveis/archiveis.go less more
0 // Package archiveis is a Archiveis Scraping Engine in Golang
1 package archiveis
2
3 import (
4 "context"
5 "io/ioutil"
6 "regexp"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // ArchiveIs is a struct for archiveurlsagent
12 type ArchiveIs struct {
13 Results chan subscraping.Result
14 Session *subscraping.Session
15 }
16
17 var reNext = regexp.MustCompile("<a id=\"next\" style=\".*\" href=\"(.*)\">&rarr;</a>")
18
19 func (a *ArchiveIs) enumerate(ctx context.Context, baseURL string) {
20 select {
21 case <-ctx.Done():
22 return
23 default:
24 }
25
26 resp, err := a.Session.NormalGetWithContext(ctx, baseURL)
27 if err != nil {
28 a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
29 a.Session.DiscardHttpResponse(resp)
30 return
31 }
32
33 // Get the response body
34 body, err := ioutil.ReadAll(resp.Body)
35 resp.Body.Close()
36 if err != nil {
37 a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Error, Error: err}
38 return
39 }
40
41 src := string(body)
42
43 for _, subdomain := range a.Session.Extractor.FindAllString(src, -1) {
44 a.Results <- subscraping.Result{Source: "archiveis", Type: subscraping.Subdomain, Value: subdomain}
45 }
46
47 match1 := reNext.FindStringSubmatch(src)
48 if len(match1) > 0 {
49 a.enumerate(ctx, match1[1])
50 }
51 }
52
53 // Source is the passive scraping agent
54 type Source struct{}
55
56 // Run function returns all subdomains found with the service
57 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
58 results := make(chan subscraping.Result)
59
60 aInstance := ArchiveIs{
61 Session: session,
62 Results: results,
63 }
64
65 go func() {
66 aInstance.enumerate(ctx, "http://archive.is/*."+domain)
67 close(aInstance.Results)
68 }()
69
70 return aInstance.Results
71 }
72
73 // Name returns the name of the source
74 func (s *Source) Name() string {
75 return "archiveis"
76 }
+0
-104
pkg/subscraping/sources/binaryedge/binaryedge.go less more
0 package binaryedge
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type binaryedgeResponse struct {
11 Subdomains []string `json:"events"`
12 Total int `json:"total"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.Binaryedge == "" {
24 close(results)
25 return
26 }
27
28 resp, err := session.Get(ctx, fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s", domain), "", map[string]string{"X-Key": session.Keys.Binaryedge})
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 session.DiscardHttpResponse(resp)
32 close(results)
33 return
34 }
35
36 response := new(binaryedgeResponse)
37 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 close(results)
42 return
43 }
44 resp.Body.Close()
45
46 for _, subdomain := range response.Subdomains {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
48 }
49
50 remaining := response.Total - 100
51 currentPage := 2
52
53 for {
54 further := s.getSubdomains(ctx, domain, &remaining, &currentPage, session, results)
55 if !further {
56 break
57 }
58 }
59 close(results)
60 }()
61
62 return results
63 }
64
65 // Name returns the name of the source
66 func (s *Source) Name() string {
67 return "binaryedge"
68 }
69
70 func (s *Source) getSubdomains(ctx context.Context, domain string, remaining, currentPage *int, session *subscraping.Session, results chan subscraping.Result) bool {
71 for {
72 select {
73 case <-ctx.Done():
74 return false
75 default:
76 resp, err := session.Get(ctx, fmt.Sprintf("https://api.binaryedge.io/v2/query/domains/subdomain/%s?page=%d", domain, *currentPage), "", map[string]string{"X-Key": session.Keys.Binaryedge})
77 if err != nil {
78 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
79 return false
80 }
81
82 response := binaryedgeResponse{}
83 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 resp.Body.Close()
87 return false
88 }
89 resp.Body.Close()
90
91 for _, subdomain := range response.Subdomains {
92 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
93 }
94
95 *remaining = *remaining - 100
96 if *remaining <= 0 {
97 return false
98 }
99 *currentPage++
100 return true
101 }
102 }
103 }
+0
-57
pkg/subscraping/sources/bufferover/bufferover.go less more
0 // Package bufferover is a bufferover Scraping Engine in Golang
1 package bufferover
2
3 import (
4 "context"
5 "fmt"
6 "io/ioutil"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 // Run enumeration on subdomain dataset for historical SONAR datasets
20 s.getData(ctx, fmt.Sprintf("https://dns.bufferover.run/dns?q=.%s", domain), session, results)
21 s.getData(ctx, fmt.Sprintf("https://tls.bufferover.run/dns?q=.%s", domain), session, results)
22
23 close(results)
24 }()
25
26 return results
27 }
28
29 func (s *Source) getData(ctx context.Context, URL string, session *subscraping.Session, results chan subscraping.Result) {
30 resp, err := session.NormalGetWithContext(ctx, URL)
31 if err != nil {
32 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
33 session.DiscardHttpResponse(resp)
34 return
35 }
36
37 body, err := ioutil.ReadAll(resp.Body)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 return
42 }
43 resp.Body.Close()
44
45 src := string(body)
46
47 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
49 }
50 return
51 }
52
53 // Name returns the name of the source
54 func (s *Source) Name() string {
55 return "bufferover"
56 }
+0
-96
pkg/subscraping/sources/censys/censys.go less more
0 package censys
1
2 import (
3 "bytes"
4 "context"
5 "net/http"
6 "strconv"
7
8 jsoniter "github.com/json-iterator/go"
9 "github.com/projectdiscovery/subfinder/pkg/subscraping"
10 )
11
12 const maxCensysPages = 10
13
14 type resultsq struct {
15 Data []string `json:"parsed.extensions.subject_alt_name.dns_names"`
16 Data1 []string `json:"parsed.names"`
17 }
18
19 type response struct {
20 Results []resultsq `json:"results"`
21 Metadata struct {
22 Pages int `json:"pages"`
23 } `json:"metadata"`
24 }
25
26 // Source is the passive scraping agent
27 type Source struct{}
28
29 // Run function returns all subdomains found with the service
30 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
31 results := make(chan subscraping.Result)
32
33 go func() {
34 if session.Keys.CensysToken == "" || session.Keys.CensysSecret == "" {
35 close(results)
36 return
37 }
38 var response response
39
40 currentPage := 1
41 for {
42 var request = []byte(`{"query":"` + domain + `", "page":` + strconv.Itoa(currentPage) + `, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
43
44 req, err := http.NewRequestWithContext(ctx, "POST", "https://www.censys.io/api/v1/search/certificates", bytes.NewReader(request))
45 if err != nil {
46 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
47 close(results)
48 return
49 }
50 req.SetBasicAuth(session.Keys.CensysToken, session.Keys.CensysSecret)
51 req.Header.Set("Content-Type", "application/json")
52 req.Header.Set("Accept", "application/json")
53
54 resp, err := session.Client.Do(req)
55 if err != nil {
56 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
57 close(results)
58 return
59 }
60
61 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
62 if err != nil {
63 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
64 resp.Body.Close()
65 close(results)
66 return
67 }
68 resp.Body.Close()
69
70 // Exit the censys enumeration if max pages is reached
71 if currentPage >= response.Metadata.Pages || currentPage >= maxCensysPages {
72 break
73 }
74
75 for _, res := range response.Results {
76 for _, part := range res.Data {
77 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
78 }
79 for _, part := range res.Data1 {
80 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
81 }
82 }
83
84 currentPage++
85 }
86 close(results)
87 }()
88
89 return results
90 }
91
92 // Name returns the name of the source
93 func (s *Source) Name() string {
94 return "censys"
95 }
+0
-101
pkg/subscraping/sources/certspotter/certspotter.go less more
0 package certspotter
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type certspotterObject struct {
11 ID string `json:"id"`
12 DNSNames []string `json:"dns_names"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.Certspotter == "" {
24 close(results)
25 return
26 }
27
28 resp, err := session.Get(ctx, fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names", domain), "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 session.DiscardHttpResponse(resp)
32 close(results)
33 return
34 }
35
36 response := []certspotterObject{}
37 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 close(results)
42 return
43 }
44 resp.Body.Close()
45
46 for _, cert := range response {
47 for _, subdomain := range cert.DNSNames {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
49 }
50 }
51
52 // if the number of responses is zero, close the channel and return.
53 if len(response) == 0 {
54 close(results)
55 return
56 }
57
58 id := response[len(response)-1].ID
59 for {
60 reqURL := fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names&after=%s", domain, id)
61
62 resp, err := session.Get(ctx, reqURL, "", map[string]string{"Authorization": "Bearer " + session.Keys.Certspotter})
63 if err != nil {
64 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
65 close(results)
66 return
67 }
68
69 response := []certspotterObject{}
70 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
71 if err != nil {
72 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
73 resp.Body.Close()
74 close(results)
75 return
76 }
77 resp.Body.Close()
78
79 if len(response) == 0 {
80 break
81 }
82
83 for _, cert := range response {
84 for _, subdomain := range cert.DNSNames {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
86 }
87 }
88
89 id = response[len(response)-1].ID
90 }
91 close(results)
92 }()
93
94 return results
95 }
96
97 // Name returns the name of the source
98 func (s *Source) Name() string {
99 return "certspotter"
100 }
+0
-50
pkg/subscraping/sources/certspotterold/certspotterold.go less more
0 package certspotterold
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://certspotter.com/api/v0/certs?domain=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 body, err := ioutil.ReadAll(resp.Body)
27 if err != nil {
28 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
29 resp.Body.Close()
30 close(results)
31 return
32 }
33 resp.Body.Close()
34
35 src := string(body)
36
37 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
39 }
40 close(results)
41 }()
42
43 return results
44 }
45
46 // Name returns the name of the source
47 func (s *Source) Name() string {
48 return "certspotterold"
49 }
+0
-108
pkg/subscraping/sources/commoncrawl/commoncrawl.go less more
0 package commoncrawl
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "net/url"
7 "strings"
8
9 jsoniter "github.com/json-iterator/go"
10 "github.com/projectdiscovery/subfinder/pkg/subscraping"
11 )
12
13 const indexURL = "https://index.commoncrawl.org/collinfo.json"
14
15 type indexResponse struct {
16 ID string `json:"id"`
17 APIURL string `json:"cdx-api"`
18 }
19
20 // Source is the passive scraping agent
21 type Source struct{}
22
23 var years = [...]string{"2020", "2019", "2018", "2017"}
24
25 // Run function returns all subdomains found with the service
26 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
27 results := make(chan subscraping.Result)
28
29 go func() {
30 resp, err := session.NormalGetWithContext(ctx, indexURL)
31 if err != nil {
32 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
33 session.DiscardHttpResponse(resp)
34 close(results)
35 return
36 }
37
38 indexes := []indexResponse{}
39 err = jsoniter.NewDecoder(resp.Body).Decode(&indexes)
40 if err != nil {
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
42 resp.Body.Close()
43 close(results)
44 return
45 }
46 resp.Body.Close()
47
48 searchIndexes := make(map[string]string)
49 for _, year := range years {
50 for _, index := range indexes {
51 if strings.Contains(index.ID, year) {
52 if _, ok := searchIndexes[year]; !ok {
53 searchIndexes[year] = index.APIURL
54 break
55 }
56 }
57 }
58 }
59
60 for _, apiURL := range searchIndexes {
61 further := s.getSubdomains(ctx, apiURL, domain, session, results)
62 if !further {
63 break
64 }
65 }
66 close(results)
67 }()
68
69 return results
70 }
71
72 // Name returns the name of the source
73 func (s *Source) Name() string {
74 return "commoncrawl"
75 }
76
77 func (s *Source) getSubdomains(ctx context.Context, searchURL string, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
78 for {
79 select {
80 case <-ctx.Done():
81 return false
82 default:
83 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("%s?url=*.%s&output=json", searchURL, domain))
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 return false
87 }
88
89 body, err := ioutil.ReadAll(resp.Body)
90 if err != nil {
91 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
92 resp.Body.Close()
93 return false
94 }
95 resp.Body.Close()
96
97 src, _ := url.QueryUnescape(string(body))
98
99 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
100 subdomain = strings.TrimPrefix(subdomain, "25")
101
102 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
103 }
104 return true
105 }
106 }
107 }
+0
-93
pkg/subscraping/sources/crtsh/crtsh.go less more
0 package crtsh
1
2 import (
3 "context"
4 "database/sql"
5 "fmt"
6 "io/ioutil"
7 "strings"
8
9 // postgres driver
10 _ "github.com/lib/pq"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping"
12 )
13
14 // Source is the passive scraping agent
15 type Source struct{}
16
17 // Run function returns all subdomains found with the service
18 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
19 results := make(chan subscraping.Result)
20
21 go func() {
22 found := s.getSubdomainsFromSQL(ctx, domain, session, results)
23 if found {
24 close(results)
25 return
26 }
27 _ = s.getSubdomainsFromHTTP(ctx, domain, session, results)
28 close(results)
29 }()
30
31 return results
32 }
33
34 func (s *Source) getSubdomainsFromSQL(ctx context.Context, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
35 db, err := sql.Open("postgres", "host=crt.sh user=guest dbname=certwatch sslmode=disable binary_parameters=yes")
36 if err != nil {
37 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
38 return false
39 }
40
41 pattern := "%." + domain
42 rows, err := db.Query(`SELECT DISTINCT ci.NAME_VALUE as domain
43 FROM certificate_identity ci
44 WHERE reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower($1))
45 ORDER BY ci.NAME_VALUE`, pattern)
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 return false
49 }
50
51 var data string
52 // Parse all the rows getting subdomains
53 for rows.Next() {
54 err := rows.Scan(&data)
55 if err != nil {
56 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
57 return false
58 }
59 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: data}
60 }
61 return true
62 }
63
64 func (s *Source) getSubdomainsFromHTTP(ctx context.Context, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
65 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain))
66 if err != nil {
67 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
68 session.DiscardHttpResponse(resp)
69 return false
70 }
71
72 body, err := ioutil.ReadAll(resp.Body)
73 if err != nil {
74 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
75 resp.Body.Close()
76 return false
77 }
78 resp.Body.Close()
79
80 // Also replace all newlines
81 src := strings.Replace(string(body), "\\n", " ", -1)
82
83 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
84 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
85 }
86 return true
87 }
88
89 // Name returns the name of the source
90 func (s *Source) Name() string {
91 return "crtsh"
92 }
+0
-70
pkg/subscraping/sources/dnsdb/dnsdb.go less more
0 package dnsdb
1
2 import (
3 "bufio"
4 "context"
5 "encoding/json"
6 "fmt"
7 "strings"
8
9 "github.com/projectdiscovery/subfinder/pkg/subscraping"
10 )
11
12 type dnsdbResponse struct {
13 Name string `json:"rrname"`
14 }
15
16 // Source is the passive scraping agent
17 type Source struct{}
18
19 // Run function returns all subdomains found with the service
20 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
21 results := make(chan subscraping.Result)
22
23 if session.Keys.DNSDB == "" {
24 close(results)
25 } else {
26 headers := map[string]string{
27 "X-API-KEY": session.Keys.DNSDB,
28 "Accept": "application/json",
29 "Content-Type": "application/json",
30 }
31
32 go func() {
33 resp, err := session.Get(ctx, fmt.Sprintf("https://api.dnsdb.info/lookup/rrset/name/*.%s?limit=1000000000000", domain), "", headers)
34 if err != nil {
35 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
36 session.DiscardHttpResponse(resp)
37 close(results)
38 return
39 }
40
41 defer resp.Body.Close()
42 // Get the response body
43 scanner := bufio.NewScanner(resp.Body)
44 for scanner.Scan() {
45 line := scanner.Text()
46 if line == "" {
47 continue
48 }
49 out := &dnsdbResponse{}
50 err := json.Unmarshal([]byte(line), out)
51 if err != nil {
52 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
53 resp.Body.Close()
54 close(results)
55 return
56 }
57 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: strings.TrimSuffix(out.Name, ".")}
58 out = nil
59 }
60 close(results)
61 }()
62 }
63 return results
64 }
65
66 // Name returns the name of the source
67 func (s *Source) Name() string {
68 return "DNSDB"
69 }
+0
-113
pkg/subscraping/sources/dnsdumpster/dnsdumpster.go less more
0 package dnsdumpster
1
2 import (
3 "context"
4 "io/ioutil"
5 "net"
6 "net/http"
7 "net/url"
8 "regexp"
9 "strings"
10 "time"
11
12 "github.com/projectdiscovery/subfinder/pkg/subscraping"
13 )
14
15 var re = regexp.MustCompile("<input type=\"hidden\" name=\"csrfmiddlewaretoken\" value=\"(.*)\">")
16
17 // getCSRFToken gets the CSRF Token from the page
18 func getCSRFToken(page string) string {
19 if subs := re.FindStringSubmatch(page); len(subs) == 2 {
20 return strings.TrimSpace(subs[1])
21 }
22 return ""
23 }
24
25 // postForm posts a form for a domain and returns the response
26 func postForm(token, domain string) (string, error) {
27 dial := net.Dialer{}
28 client := &http.Client{
29 Transport: &http.Transport{
30 DialContext: dial.DialContext,
31 TLSHandshakeTimeout: 10 * time.Second,
32 },
33 }
34 params := url.Values{
35 "csrfmiddlewaretoken": {token},
36 "targetip": {domain},
37 }
38
39 req, err := http.NewRequest("POST", "https://dnsdumpster.com/", strings.NewReader(params.Encode()))
40 if err != nil {
41 return "", err
42 }
43
44 // The CSRF token needs to be sent as a cookie
45 cookie := &http.Cookie{
46 Name: "csrftoken",
47 Domain: "dnsdumpster.com",
48 Value: token,
49 }
50 req.AddCookie(cookie)
51
52 req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36")
53 req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
54 req.Header.Set("Referer", "https://dnsdumpster.com")
55 req.Header.Set("X-CSRF-Token", token)
56
57 resp, err := client.Do(req)
58 if err != nil {
59 return "", err
60 }
61 // Now, grab the entire page
62 in, err := ioutil.ReadAll(resp.Body)
63 resp.Body.Close()
64 return string(in), err
65 }
66
67 // Source is the passive scraping agent
68 type Source struct{}
69
70 // Run function returns all subdomains found with the service
71 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
72 results := make(chan subscraping.Result)
73
74 go func() {
75 resp, err := session.NormalGetWithContext(ctx, "https://dnsdumpster.com/")
76 if err != nil {
77 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
78 session.DiscardHttpResponse(resp)
79 close(results)
80 return
81 }
82
83 body, err := ioutil.ReadAll(resp.Body)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 resp.Body.Close()
87 close(results)
88 return
89 }
90 resp.Body.Close()
91 csrfToken := getCSRFToken(string(body))
92
93 data, err := postForm(csrfToken, domain)
94 if err != nil {
95 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
96 close(results)
97 return
98 }
99
100 for _, subdomain := range session.Extractor.FindAllString(data, -1) {
101 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
102 }
103 close(results)
104 }()
105
106 return results
107 }
108
109 // Name returns the name of the source
110 func (s *Source) Name() string {
111 return "dnsdumpster"
112 }
+0
-53
pkg/subscraping/sources/entrust/entrust.go less more
0 package entrust
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "strings"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://ctsearch.entrust.com/api/v1/certificates?fields=issuerCN,subjectO,issuerDN,issuerO,subjectDN,signAlg,san,publicKeyType,publicKeySize,validFrom,validTo,sn,ev,logEntries.logName,subjectCNReversed,cert&domain=%s&includeExpired=true&exactMatch=false&limit=5000", domain))
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(resp)
23 close(results)
24 return
25 }
26
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35
36 src := string(body)
37
38 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
39 subdomain = strings.TrimPrefix(subdomain, "u003d")
40
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
42 }
43 close(results)
44 }()
45
46 return results
47 }
48
49 // Name returns the name of the source
50 func (s *Source) Name() string {
51 return "entrust"
52 }
+0
-211
pkg/subscraping/sources/github/github.go less more
0 // GitHub search package, based on gwen001's https://github.com/gwen001/github-search github-subdomains
1 package github
2
3 import (
4 "context"
5 "fmt"
6 "io/ioutil"
7 "net/http"
8 "net/url"
9 "regexp"
10 "strconv"
11 "strings"
12 "time"
13
14 jsoniter "github.com/json-iterator/go"
15
16 "github.com/projectdiscovery/gologger"
17 "github.com/projectdiscovery/subfinder/pkg/subscraping"
18 "github.com/tomnomnom/linkheader"
19 )
20
21 type textMatch struct {
22 Fragment string `json:"fragment"`
23 }
24
25 type item struct {
26 Name string `json:"name"`
27 HtmlUrl string `json:"html_url"`
28 TextMatches []textMatch `json:"text_matches"`
29 }
30
31 type response struct {
32 TotalCount int `json:"total_count"`
33 Items []item `json:"items"`
34 }
35
36 // Source is the passive scraping agent
37 type Source struct{}
38
39 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
40 results := make(chan subscraping.Result)
41
42 go func() {
43 if len(session.Keys.GitHub) == 0 {
44 close(results)
45 return
46 }
47
48 tokens := NewTokenManager(session.Keys.GitHub)
49
50 // search on GitHub with exact match
51 searchURL := fmt.Sprintf("https://api.github.com/search/code?per_page=100&q=\"%s\"", domain)
52 s.enumerate(ctx, searchURL, s.DomainRegexp(domain), tokens, session, results)
53 close(results)
54 }()
55
56 return results
57 }
58
59 func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *regexp.Regexp, tokens *Tokens, session *subscraping.Session, results chan subscraping.Result) {
60 select {
61 case <-ctx.Done():
62 return
63 default:
64 }
65
66 token := tokens.Get()
67
68 if token.RetryAfter > 0 {
69 if len(tokens.pool) == 1 {
70 gologger.Verbosef("GitHub Search request rate limit exceeded, waiting for %d seconds before retry... \n", s.Name(), token.RetryAfter)
71 time.Sleep(time.Duration(token.RetryAfter) * time.Second)
72 } else {
73 token = tokens.Get()
74 }
75 }
76
77 headers := map[string]string{
78 "Accept": "application/vnd.github.v3.text-match+json",
79 "Authorization": "token " + token.Hash,
80 }
81
82 // Initial request to GitHub search
83 resp, err := session.Get(ctx, searchURL, "", headers)
84 isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
85
86 if err != nil && !isForbidden {
87 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
88 session.DiscardHttpResponse(resp)
89 return
90 } else {
91 // Retry enumerarion after Retry-After seconds on rate limit abuse detected
92 ratelimitRemaining, _ := strconv.ParseInt(resp.Header.Get("X-Ratelimit-Remaining"), 10, 64)
93 if isForbidden && ratelimitRemaining == 0 {
94 retryAfterSeconds, _ := strconv.ParseInt(resp.Header.Get("Retry-After"), 10, 64)
95 tokens.setCurrentTokenExceeded(retryAfterSeconds)
96
97 s.enumerate(ctx, searchURL, domainRegexp, tokens, session, results)
98 } else {
99 // Links header, first, next, last...
100 linksHeader := linkheader.Parse(resp.Header.Get("Link"))
101
102 data := response{}
103
104 // Marshall json reponse
105 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
106 resp.Body.Close()
107 if err != nil {
108 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
109 return
110 }
111
112 // Response items iteration
113 for _, item := range data.Items {
114 resp, err := session.NormalGetWithContext(ctx, rawUrl(item.HtmlUrl))
115 if err != nil {
116 if resp != nil && resp.StatusCode != http.StatusNotFound {
117 session.DiscardHttpResponse(resp)
118 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
119 return
120 }
121 }
122
123 var subdomains []string
124
125 if resp.StatusCode == http.StatusOK {
126 // Get the item code from the raw file url
127 code, err := ioutil.ReadAll(resp.Body)
128 resp.Body.Close()
129 if err != nil {
130 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
131 return
132 }
133 // Search for domain matches in the code
134 subdomains = append(subdomains, matches(domainRegexp, normalizeContent(string(code)))...)
135 }
136
137 // Text matches iteration per item
138 for _, textMatch := range item.TextMatches {
139 // Search for domain matches in the text fragment
140 subdomains = append(subdomains, matches(domainRegexp, normalizeContent(textMatch.Fragment))...)
141 }
142
143 for _, subdomain := range unique(subdomains) {
144 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
145 }
146 }
147
148 // Proccess the next link recursively
149 for _, link := range linksHeader {
150 if link.Rel == "next" {
151 nextUrl, err := url.QueryUnescape(link.URL)
152 if err != nil {
153 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
154 return
155 }
156 s.enumerate(ctx, nextUrl, domainRegexp, tokens, session, results)
157 }
158 }
159 }
160 }
161
162 }
163
164 // Normalize content before matching, query unescape, remove tabs and new line chars
165 func normalizeContent(content string) string {
166 normalizedContent, _ := url.QueryUnescape(content)
167 normalizedContent = strings.Replace(normalizedContent, "\\t", "", -1)
168 normalizedContent = strings.Replace(normalizedContent, "\\n", "", -1)
169 return normalizedContent
170 }
171
172 // Remove duplicates from string array
173 func unique(arr []string) []string {
174 occured := map[string]bool{}
175 result := []string{}
176 for e := range arr {
177 if occured[arr[e]] != true {
178 occured[arr[e]] = true
179 result = append(result, arr[e])
180 }
181 }
182 return result
183 }
184
185 // Find matches by regular expression in any content
186 func matches(regexp *regexp.Regexp, content string) []string {
187 var matches []string
188 match := regexp.FindAllString(content, -1)
189 if len(match) > 0 {
190 matches = unique(match)
191 }
192 return matches
193 }
194
195 // Raw URL to get the files code and match for subdomains
196 func rawUrl(htmlUrl string) string {
197 domain := strings.Replace(htmlUrl, "https://github.com/", "https://raw.githubusercontent.com/", -1)
198 return strings.Replace(domain, "/blob/", "/", -1)
199 }
200
201 // Domain regular expression to match subdomains in github files code
202 func (s *Source) DomainRegexp(domain string) *regexp.Regexp {
203 rdomain := strings.Replace(domain, ".", "\\.", -1)
204 return regexp.MustCompile("(\\w+[.])*" + rdomain)
205 }
206
207 // Name returns the name of the source
208 func (s *Source) Name() string {
209 return "github"
210 }
+0
-61
pkg/subscraping/sources/github/tokenmanager.go less more
0 package github
1
2 import "time"
3
4 type token struct {
5 Hash string
6 RetryAfter int64
7 ExceededTime time.Time
8 }
9
10 type Tokens struct {
11 current int
12 pool []token
13 }
14
15 func NewTokenManager(keys []string) *Tokens {
16 pool := []token{}
17 for _, key := range keys {
18 t := token{Hash: key, ExceededTime: time.Time{}, RetryAfter: 0}
19 pool = append(pool, t)
20 }
21
22 return &Tokens{
23 current: 0,
24 pool: pool,
25 }
26 }
27
28 func (r *Tokens) setCurrentTokenExceeded(retryAfter int64) {
29 if r.current >= len(r.pool) {
30 r.current = r.current % len(r.pool)
31 }
32 if r.pool[r.current].RetryAfter == 0 {
33 r.pool[r.current].ExceededTime = time.Now()
34 r.pool[r.current].RetryAfter = retryAfter
35 }
36 }
37
38 func (r *Tokens) Get() token {
39 resetExceededTokens(r)
40
41 if r.current >= len(r.pool) {
42 r.current = r.current % len(r.pool)
43 }
44
45 result := r.pool[r.current]
46 r.current++
47
48 return result
49 }
50
51 func resetExceededTokens(r *Tokens) {
52 for i, token := range r.pool {
53 if token.RetryAfter > 0 {
54 if int64(time.Since(token.ExceededTime)/time.Second) > token.RetryAfter {
55 r.pool[i].ExceededTime = time.Time{}
56 r.pool[i].RetryAfter = 0
57 }
58 }
59 }
60 }
+0
-50
pkg/subscraping/sources/hackertarget/hackertarget.go less more
0 package hackertarget
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("http://api.hackertarget.com/hostsearch/?q=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 // Get the response body
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35 src := string(body)
36
37 for _, match := range session.Extractor.FindAllString(src, -1) {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
39 }
40 close(results)
41 }()
42
43 return results
44 }
45
46 // Name returns the name of the source
47 func (s *Source) Name() string {
48 return "hackertarget"
49 }
+0
-114
pkg/subscraping/sources/intelx/intelx.go less more
0 package intelx
1
2 import (
3 "bytes"
4 "context"
5 "encoding/json"
6 "fmt"
7 "io/ioutil"
8 "net/http"
9
10 jsoniter "github.com/json-iterator/go"
11 "github.com/projectdiscovery/subfinder/pkg/subscraping"
12 )
13
14 type searchResponseType struct {
15 Id string `json:"id"`
16 Status int `json:"status"`
17 }
18
19 type selectorType struct {
20 Selectvalue string `json:"selectorvalue"`
21 }
22
23 type searchResultType struct {
24 Selectors []selectorType `json:"selectors"`
25 Status int `json:"status"`
26 }
27
28 type requestBody struct {
29 Term string
30 Maxresults int
31 Media int
32 Target int
33 Terminate []int
34 Timeout int
35 }
36
37 // Source is the passive scraping agent
38 type Source struct{}
39
40 // Run function returns all subdomains found with the service
41 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
42 results := make(chan subscraping.Result)
43
44 go func() {
45 defer close(results)
46 if session.Keys.IntelXKey == "" || session.Keys.IntelXHost == "" {
47 return
48 }
49
50 searchURL := fmt.Sprintf("https://%s/phonebook/search?k=%s", session.Keys.IntelXHost, session.Keys.IntelXKey)
51 reqBody := requestBody{
52 Term: domain,
53 Maxresults: 100000,
54 Media: 0,
55 Target: 1,
56 Timeout: 20,
57 }
58
59 body, err := json.Marshal(reqBody)
60 if err != nil {
61 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
62 return
63 }
64
65 resp, err := http.Post(searchURL, "application/json", bytes.NewBuffer(body))
66 if err != nil {
67 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
68 session.DiscardHttpResponse(resp)
69 return
70 }
71
72 var response searchResponseType
73 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
74 if err != nil {
75 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
76 close(results)
77 return
78 }
79
80 resultsURL := fmt.Sprintf("https://%s/phonebook/search/result?k=%s&id=%s&limit=10000", session.Keys.IntelXHost, session.Keys.IntelXKey, response.Id)
81 status := 0
82 for status == 0 || status == 3 {
83 resp, err = session.Get(ctx, resultsURL, "", nil)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 return
87 }
88 var response searchResultType
89 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
90 if err != nil {
91 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
92 return
93 }
94 body, err = ioutil.ReadAll(resp.Body)
95 if err != nil {
96 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
97 return
98 }
99 resp.Body.Close()
100 status = response.Status
101 for _, hostname := range response.Selectors {
102 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname.Selectvalue}
103 }
104 }
105 }()
106
107 return results
108 }
109
110 // Name returns the name of the source
111 func (s *Source) Name() string {
112 return "intelx"
113 }
+0
-169
pkg/subscraping/sources/ipv4info/ipv4info.go less more
0 package ipv4info
1
2 import (
3 "context"
4 "io/ioutil"
5 "regexp"
6 "strconv"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 resp, err := session.NormalGetWithContext(ctx, "http://ipv4info.com/search/"+domain)
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(resp)
23 close(results)
24 return
25 }
26
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35 src := string(body)
36
37 regxTokens := regexp.MustCompile("/ip-address/(.*)/" + domain)
38 matchTokens := regxTokens.FindAllString(src, -1)
39
40 if len(matchTokens) <= 0 {
41 close(results)
42 return
43 }
44 token := matchTokens[0]
45
46 resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
47 if err != nil {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
49 close(results)
50 return
51 }
52
53 body, err = ioutil.ReadAll(resp.Body)
54 if err != nil {
55 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
56 resp.Body.Close()
57 close(results)
58 return
59 }
60 resp.Body.Close()
61 src = string(body)
62
63 regxTokens = regexp.MustCompile("/dns/(.*?)/" + domain)
64 matchTokens = regxTokens.FindAllString(src, -1)
65 if len(matchTokens) <= 0 {
66 close(results)
67 return
68 }
69 token = matchTokens[0]
70
71 resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
72 if err != nil {
73 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
74 close(results)
75 return
76 }
77
78 body, err = ioutil.ReadAll(resp.Body)
79 if err != nil {
80 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
81 resp.Body.Close()
82 close(results)
83 return
84 }
85 resp.Body.Close()
86 src = string(body)
87
88 regxTokens = regexp.MustCompile("/subdomains/(.*?)/" + domain)
89 matchTokens = regxTokens.FindAllString(src, -1)
90 if len(matchTokens) <= 0 {
91 close(results)
92 return
93 }
94 token = matchTokens[0]
95
96 resp, err = session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
97 if err != nil {
98 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
99 close(results)
100 return
101 }
102
103 body, err = ioutil.ReadAll(resp.Body)
104 if err != nil {
105 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
106 resp.Body.Close()
107 close(results)
108 return
109 }
110 resp.Body.Close()
111 src = string(body)
112
113 for _, match := range session.Extractor.FindAllString(src, -1) {
114 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
115 }
116 nextPage := 1
117
118 for {
119 further := s.getSubdomains(ctx, domain, &nextPage, src, session, results)
120 if !further {
121 break
122 }
123 }
124 close(results)
125 }()
126
127 return results
128 }
129
130 // Name returns the name of the source
131 func (s *Source) Name() string {
132 return "ipv4info"
133 }
134
135 func (s *Source) getSubdomains(ctx context.Context, domain string, nextPage *int, src string, session *subscraping.Session, results chan subscraping.Result) bool {
136 for {
137 select {
138 case <-ctx.Done():
139 return false
140 default:
141 regxTokens := regexp.MustCompile("/subdomains/.*/page" + strconv.Itoa(*nextPage) + "/" + domain + ".html")
142 matchTokens := regxTokens.FindAllString(src, -1)
143 if len(matchTokens) == 0 {
144 return false
145 }
146 token := matchTokens[0]
147
148 resp, err := session.NormalGetWithContext(ctx, "http://ipv4info.com"+token)
149 if err != nil {
150 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
151 return false
152 }
153 body, err := ioutil.ReadAll(resp.Body)
154 if err != nil {
155 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
156 resp.Body.Close()
157 return false
158 }
159 resp.Body.Close()
160 src = string(body)
161 for _, match := range session.Extractor.FindAllString(src, -1) {
162 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
163 }
164 *nextPage++
165 return true
166 }
167 }
168 }
+0
-72
pkg/subscraping/sources/passivetotal/passivetotal.go less more
0 package passivetotal
1
2 import (
3 "bytes"
4 "context"
5 "net/http"
6
7 jsoniter "github.com/json-iterator/go"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 type response struct {
12 Subdomains []string `json:"subdomains"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.PassiveTotalUsername == "" || session.Keys.PassiveTotalPassword == "" {
24 close(results)
25 return
26 }
27
28 // Create JSON Get body
29 var request = []byte(`{"query":"` + domain + `"}`)
30
31 req, err := http.NewRequestWithContext(ctx, "GET", "https://api.passivetotal.org/v2/enrichment/subdomains", bytes.NewBuffer(request))
32 if err != nil {
33 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
34 close(results)
35 return
36 }
37
38 req.SetBasicAuth(session.Keys.PassiveTotalUsername, session.Keys.PassiveTotalPassword)
39 req.Header.Set("Content-Type", "application/json")
40
41 resp, err := session.Client.Do(req)
42 if err != nil {
43 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
44 close(results)
45 return
46 }
47
48 data := response{}
49 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
50 if err != nil {
51 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
52 resp.Body.Close()
53 close(results)
54 return
55 }
56 resp.Body.Close()
57
58 for _, subdomain := range data.Subdomains {
59 finalSubdomain := subdomain + "." + domain
60 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: finalSubdomain}
61 }
62 close(results)
63 }()
64
65 return results
66 }
67
68 // Name returns the name of the source
69 func (s *Source) Name() string {
70 return "passivetotal"
71 }
+0
-46
pkg/subscraping/sources/rapiddns/rapiddns.go less more
0 // Package rapiddns is a RapidDNS Scraping Engine in Golang
1 package rapiddns
2
3 import (
4 "context"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 defer close(results)
19 resp, err := session.NormalGetWithContext(ctx, "https://rapiddns.io/subdomain/"+domain+"?full=1")
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(resp)
23 return
24 }
25
26 body, err := ioutil.ReadAll(resp.Body)
27 resp.Body.Close()
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 return
31 }
32
33 src := string(body)
34 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
35 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
36 }
37 }()
38
39 return results
40 }
41
42 // Name returns the name of the source
43 func (s *Source) Name() string {
44 return "rapiddns"
45 }
+0
-65
pkg/subscraping/sources/securitytrails/securitytrails.go less more
0 package securitytrails
1
2 import (
3 "context"
4 "fmt"
5 "strings"
6
7 jsoniter "github.com/json-iterator/go"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 type response struct {
12 Subdomains []string `json:"subdomains"`
13 }
14
15 // Source is the passive scraping agent
16 type Source struct{}
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 if session.Keys.Securitytrails == "" {
24 close(results)
25 return
26 }
27
28 resp, err := session.Get(ctx, fmt.Sprintf("https://api.securitytrails.com/v1/domain/%s/subdomains", domain), "", map[string]string{"APIKEY": session.Keys.Securitytrails})
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 session.DiscardHttpResponse(resp)
32 close(results)
33 return
34 }
35
36 response := response{}
37 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 resp.Body.Close()
41 close(results)
42 return
43 }
44 resp.Body.Close()
45
46 for _, subdomain := range response.Subdomains {
47 if strings.HasSuffix(subdomain, ".") {
48 subdomain = subdomain + domain
49 } else {
50 subdomain = subdomain + "." + domain
51 }
52
53 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
54 }
55 close(results)
56 }()
57
58 return results
59 }
60
61 // Name returns the name of the source
62 func (s *Source) Name() string {
63 return "securitytrails"
64 }
+0
-73
pkg/subscraping/sources/shodan/shodan.go less more
0 package shodan
1
2 import (
3 "context"
4 "strconv"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type shodanResult struct {
11 Matches []shodanObject `json:"matches"`
12 Result int `json:"result"`
13 Error string `json:"error"`
14 }
15
16 type shodanObject struct {
17 Hostnames []string `json:"hostnames"`
18 }
19
20 // Source is the passive scraping agent
21 type Source struct{}
22
23 // Run function returns all subdomains found with the service
24 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
25 results := make(chan subscraping.Result)
26
27 go func() {
28 if session.Keys.Shodan == "" {
29 close(results)
30 return
31 }
32
33 for currentPage := 0; currentPage <= 10; currentPage++ {
34 resp, err := session.NormalGetWithContext(ctx, "https://api.shodan.io/shodan/host/search?query=hostname:"+domain+"&page="+strconv.Itoa(currentPage)+"&key="+session.Keys.Shodan)
35 if err != nil {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
37 session.DiscardHttpResponse(resp)
38 close(results)
39 return
40 }
41
42 var response shodanResult
43 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
44 if err != nil {
45 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
46 resp.Body.Close()
47 close(results)
48 return
49 }
50 resp.Body.Close()
51
52 if response.Error != "" || len(response.Matches) == 0 {
53 close(results)
54 return
55 }
56
57 for _, block := range response.Matches {
58 for _, hostname := range block.Hostnames {
59 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname}
60 }
61 }
62 }
63 close(results)
64 }()
65
66 return results
67 }
68
69 // Name returns the name of the source
70 func (s *Source) Name() string {
71 return "shodan"
72 }
+0
-84
pkg/subscraping/sources/sitedossier/sitedossier.go less more
0 package sitedossier
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "math/rand"
7 "regexp"
8 "time"
9
10 "github.com/projectdiscovery/subfinder/pkg/subscraping"
11 )
12
13 var reNext = regexp.MustCompile("<a href=\"([A-Za-z0-9\\/.]+)\"><b>")
14
15 type agent struct {
16 results chan subscraping.Result
17 session *subscraping.Session
18 }
19
20 func (a *agent) enumerate(ctx context.Context, baseURL string) error {
21 for {
22 select {
23 case <-ctx.Done():
24 return nil
25 default:
26 resp, err := a.session.NormalGetWithContext(ctx, baseURL)
27 if err != nil {
28 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
29 a.session.DiscardHttpResponse(resp)
30 close(a.results)
31 return err
32 }
33
34 body, err := ioutil.ReadAll(resp.Body)
35 if err != nil {
36 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
37 resp.Body.Close()
38 close(a.results)
39 return err
40 }
41 resp.Body.Close()
42 src := string(body)
43
44 for _, match := range a.session.Extractor.FindAllString(src, -1) {
45 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Subdomain, Value: match}
46 }
47
48 match1 := reNext.FindStringSubmatch(src)
49 time.Sleep(time.Duration((3 + rand.Intn(5))) * time.Second)
50
51 if len(match1) > 0 {
52 a.enumerate(ctx, "http://www.sitedossier.com"+match1[1])
53 }
54 return nil
55 }
56 }
57 }
58
59 // Source is the passive scraping agent
60 type Source struct{}
61
62 // Run function returns all subdomains found with the service
63 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
64 results := make(chan subscraping.Result)
65
66 a := agent{
67 session: session,
68 results: results,
69 }
70
71 go func() {
72 err := a.enumerate(ctx, fmt.Sprintf("http://www.sitedossier.com/parentdomain/%s", domain))
73 if err == nil {
74 close(a.results)
75 }
76 }()
77 return results
78 }
79
80 // Name returns the name of the source
81 func (s *Source) Name() string {
82 return "sitedossier"
83 }
+0
-90
pkg/subscraping/sources/spyse/spyse.go less more
0 package spyse
1
2 import (
3 "context"
4 "strconv"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11
12 type resultObject struct {
13 Name string `json:"name"`
14 }
15
16 type dataObject struct {
17 Items []resultObject `json:"items"`
18 Total_Count int `json:"total_count"`
19 }
20
21 type errorObject struct {
22 Code string `json:"code"`
23 Message string `json:"message"`
24 }
25
26
27 type spyseResult struct {
28 Data dataObject `json:"data"`
29 Error []errorObject `json:"error"`
30 }
31
32
33 type Source struct{}
34
35 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
36 results := make(chan subscraping.Result)
37
38 go func() {
39 if session.Keys.Spyse == "" {
40 close(results)
41 return
42 }
43
44 maxCount := 100;
45
46 for offSet := 0; offSet <= maxCount; offSet += 100 {
47 resp, err := session.Get(ctx, fmt.Sprintf("https://api.spyse.com/v3/data/domain/subdomain?domain=%s&limit=100&offset=%s", domain, strconv.Itoa(offSet)), "", map[string]string{"Authorization": "Bearer " + session.Keys.Spyse})
48 if err != nil {
49 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
50 session.DiscardHttpResponse(resp)
51 close(results)
52 return
53 }
54
55
56 var response spyseResult;
57
58 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
59
60 if err != nil {
61 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
62 resp.Body.Close()
63 close(results)
64 return
65 }
66 resp.Body.Close()
67
68 if response.Data.Total_Count == 0 {
69 close(results)
70 return
71 }
72
73 maxCount = response.Data.Total_Count;
74
75 for _, hostname := range response.Data.Items {
76 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname.Name}
77 }
78 }
79 close(results)
80 }()
81
82 return results
83 }
84
85
86 // Name returns the name of the source
87 func (s *Source) Name() string {
88 return "spyse"
89 }
+0
-49
pkg/subscraping/sources/sublist3r/subllist3r.go less more
0 package sublist3r
1
2 import (
3 "context"
4 "encoding/json"
5 "fmt"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://api.sublist3r.com/search.php?domain=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25 defer resp.Body.Close()
26 var subdomains []string
27 // Get the response body and unmarshal
28 err = json.NewDecoder(resp.Body).Decode(&subdomains)
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 resp.Body.Close()
32 close(results)
33 return
34 }
35
36 for _, subdomain := range subdomains {
37 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
38 }
39 close(results)
40 }()
41
42 return results
43 }
44
45 // Name returns the name of the source
46 func (s *Source) Name() string {
47 return "sublist3r"
48 }
+0
-51
pkg/subscraping/sources/threatcrowd/threatcrowd.go less more
0 package threatcrowd
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://www.threatcrowd.org/searchApi/v2/domain/report/?domain=%s", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 // Get the response body
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35
36 src := string(body)
37
38 for _, match := range session.Extractor.FindAllString(src, -1) {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
40 }
41 close(results)
42 }()
43
44 return results
45 }
46
47 // Name returns the name of the source
48 func (s *Source) Name() string {
49 return "threatcrowd"
50 }
+0
-51
pkg/subscraping/sources/threatminer/threatminer.go less more
0 package threatminer
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://api.threatminer.org/v2/domain.php?q=%s&rt=5", domain))
19 if err != nil {
20 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
21 session.DiscardHttpResponse(resp)
22 close(results)
23 return
24 }
25
26 // Get the response body
27 body, err := ioutil.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 close(results)
32 return
33 }
34 resp.Body.Close()
35
36 src := string(body)
37
38 for _, match := range session.Extractor.FindAllString(src, -1) {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: match}
40 }
41 close(results)
42 }()
43
44 return results
45 }
46
47 // Name returns the name of the source
48 func (s *Source) Name() string {
49 return "threatminer"
50 }
+0
-60
pkg/subscraping/sources/urlscan/urlscan.go less more
0 package urlscan
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/m-mizutani/urlscan-go/urlscan"
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 if session.Keys.URLScan == "" {
20 close(results)
21 return
22 }
23
24 client := urlscan.NewClient(session.Keys.URLScan)
25 task, err := client.Submit(urlscan.SubmitArguments{URL: fmt.Sprintf("https://%s", domain)})
26 if err != nil {
27 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
28 close(results)
29 return
30 }
31
32 err = task.Wait()
33 if err != nil {
34 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
35 close(results)
36 return
37 }
38
39 data, err := jsoniter.Marshal(task.Result.Data)
40 if err != nil {
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
42 close(results)
43 return
44 }
45
46 match := session.Extractor.FindAllString(string(data), -1)
47 for _, m := range match {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: m}
49 }
50 close(results)
51 }()
52
53 return results
54 }
55
56 // Name returns the name of the source
57 func (s *Source) Name() string {
58 return "urlscan"
59 }
+0
-58
pkg/subscraping/sources/virustotal/virustotal.go less more
0 package virustotal
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7 "github.com/projectdiscovery/subfinder/pkg/subscraping"
8 )
9
10 type response struct {
11 Subdomains []string `json:"subdomains"`
12 }
13
14 // Source is the passive scraping agent
15 type Source struct{}
16
17 // Run function returns all subdomains found with the service
18 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
19 results := make(chan subscraping.Result)
20
21 go func() {
22 if session.Keys.Virustotal == "" {
23 close(results)
24 return
25 }
26
27 resp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s", session.Keys.Virustotal, domain))
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 session.DiscardHttpResponse(resp)
31 close(results)
32 return
33 }
34
35 data := response{}
36 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
37 if err != nil {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
39 resp.Body.Close()
40 close(results)
41 return
42 }
43 resp.Body.Close()
44
45 for _, subdomain := range data.Subdomains {
46 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
47 }
48 close(results)
49 }()
50
51 return results
52 }
53
54 // Name returns the name of the source
55 func (s *Source) Name() string {
56 return "virustotal"
57 }
+0
-53
pkg/subscraping/sources/waybackarchive/waybackarchive.go less more
0 package waybackarchive
1
2 import (
3 "context"
4 "fmt"
5 "io/ioutil"
6 "strings"
7
8 "github.com/projectdiscovery/subfinder/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 pagesResp, err := session.NormalGetWithContext(ctx, fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=*.%s/*&output=json&fl=original&collapse=urlkey", domain))
20 if err != nil {
21 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
22 session.DiscardHttpResponse(pagesResp)
23 close(results)
24 return
25 }
26
27 body, err := ioutil.ReadAll(pagesResp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 pagesResp.Body.Close()
31 close(results)
32 return
33 }
34 pagesResp.Body.Close()
35
36 match := session.Extractor.FindAllString(string(body), -1)
37 for _, subdomain := range match {
38 subdomain = strings.TrimPrefix(subdomain, "25")
39 subdomain = strings.TrimPrefix(subdomain, "2F")
40
41 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
42 }
43 close(results)
44 }()
45
46 return results
47 }
48
49 // Name returns the name of the source
50 func (s *Source) Name() string {
51 return "waybackarchive"
52 }
+0
-138
pkg/subscraping/sources/zoomeye/zoomeye.go less more
0 package zoomeye
1
2 import (
3 "bytes"
4 "context"
5 "encoding/json"
6 "errors"
7 "fmt"
8 "io"
9 "io/ioutil"
10 "net/http"
11
12 "github.com/projectdiscovery/subfinder/pkg/subscraping"
13 )
14
15 // zoomAuth holds the ZoomEye credentials
16 type zoomAuth struct {
17 User string `json:"username"`
18 Pass string `json:"password"`
19 }
20
21 type loginResp struct {
22 JWT string `json:"access_token"`
23 }
24
25 // search results
26 type zoomeyeResults struct {
27 Matches []struct {
28 Site string `json:"site"`
29 Domains []string `json:"domains"`
30 } `json:"matches"`
31 }
32
33 // Source is the passive scraping agent
34 type Source struct{}
35
36 // Run function returns all subdomains found with the service
37 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
38 results := make(chan subscraping.Result)
39
40 go func() {
41 if session.Keys.ZoomEyeUsername == "" || session.Keys.ZoomEyePassword == "" {
42 close(results)
43 return
44 }
45 jwt, err := doLogin(session)
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 close(results)
49 return
50 }
51 // check if jwt is null
52 if jwt == "" {
53 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("could not log into zoomeye")}
54 close(results)
55 return
56 }
57 headers := map[string]string{
58 "Authorization": fmt.Sprintf("JWT %s", jwt),
59 "Accept": "application/json",
60 "Content-Type": "application/json",
61 }
62 for currentPage := 0; currentPage <= 100; currentPage++ {
63 api := fmt.Sprintf("https://api.zoomeye.org/web/search?query=hostname:%s&page=%d", domain, currentPage)
64 resp, err := session.Get(ctx, api, "", headers)
65 isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
66 if err != nil {
67 if !isForbidden && currentPage == 0 {
68 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
69 session.DiscardHttpResponse(resp)
70 }
71 close(results)
72 return
73 }
74
75 defer resp.Body.Close()
76 res := &zoomeyeResults{}
77 err = json.NewDecoder(resp.Body).Decode(res)
78 if err != nil {
79 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
80 resp.Body.Close()
81 close(results)
82 return
83 }
84 resp.Body.Close()
85 for _, r := range res.Matches {
86 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: r.Site}
87 for _, domain := range r.Domains {
88 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: domain}
89 }
90 }
91 currentPage++
92 }
93 close(results)
94 }()
95
96 return results
97 }
98
99 // doLogin performs authentication on the ZoomEye API
100 func doLogin(session *subscraping.Session) (string, error) {
101 creds := &zoomAuth{
102 User: session.Keys.ZoomEyeUsername,
103 Pass: session.Keys.ZoomEyePassword,
104 }
105 body, err := json.Marshal(&creds)
106 if err != nil {
107 return "", err
108 }
109 req, err := http.NewRequest("POST", "https://api.zoomeye.org/user/login", bytes.NewBuffer(body))
110 if err != nil {
111 return "", err
112 }
113 req.Header.Add("Content-Type", "application/json")
114 resp, err := session.Client.Do(req)
115 if err != nil {
116 return "", err
117 }
118 // if not 200, bad credentials
119 if resp.StatusCode != 200 {
120 io.Copy(ioutil.Discard, resp.Body)
121 resp.Body.Close()
122 return "", fmt.Errorf("login failed, non-200 response from zoomeye")
123 }
124
125 defer resp.Body.Close()
126 login := &loginResp{}
127 err = json.NewDecoder(resp.Body).Decode(login)
128 if err != nil {
129 return "", err
130 }
131 return login.JWT, nil
132 }
133
134 // Name returns the name of the source
135 func (s *Source) Name() string {
136 return "zoomeye"
137 }
+0
-67
pkg/subscraping/types.go less more
0 package subscraping
1
2 import (
3 "context"
4 "net/http"
5 "regexp"
6 )
7
8 // Source is an interface inherited by each passive source
9 type Source interface {
10 // Run takes a domain as argument and a session object
11 // which contains the extractor for subdomains, http client
12 // and other stuff.
13 Run(context.Context, string, *Session) <-chan Result
14 // Name returns the name of the source
15 Name() string
16 }
17
18 // Session is the option passed to the source, an option is created
19 // uniquely for eac source.
20 type Session struct {
21 // Extractor is the regex for subdomains created for each domain
22 Extractor *regexp.Regexp
23 // Keys is the API keys for the application
24 Keys Keys
25 // Client is the current http client
26 Client *http.Client
27 }
28
29 // Keys contains the current API Keys we have in store
30 type Keys struct {
31 Binaryedge string `json:"binaryedge"`
32 CensysToken string `json:"censysUsername"`
33 CensysSecret string `json:"censysPassword"`
34 Certspotter string `json:"certspotter"`
35 Chaos string `json:"chaos"`
36 DNSDB string `json:"dnsdb"`
37 GitHub []string `json:"github"`
38 IntelXHost string `json:"intelXHost"`
39 IntelXKey string `json:"intelXKey"`
40 PassiveTotalUsername string `json:"passivetotal_username"`
41 PassiveTotalPassword string `json:"passivetotal_password"`
42 Securitytrails string `json:"securitytrails"`
43 Shodan string `json:"shodan"`
44 Spyse string `json:"spyse"`
45 URLScan string `json:"urlscan"`
46 Virustotal string `json:"virustotal"`
47 ZoomEyeUsername string `json:"zoomeye_username"`
48 ZoomEyePassword string `json:"zoomeye_password"`
49 }
50
51 // Result is a result structure returned by a source
52 type Result struct {
53 Type ResultType
54 Source string
55 Value string
56 Error error
57 }
58
59 // ResultType is the type of result returned by the source
60 type ResultType int
61
62 // Types of results returned by the source
63 const (
64 Subdomain ResultType = iota
65 Error
66 )
+0
-30
pkg/subscraping/utils.go less more
0 package subscraping
1
2 import (
3 "regexp"
4 "sync"
5 )
6
7 var subdomainExtractorMutex = &sync.Mutex{}
8
9 // NewSubdomainExtractor creates a new regular expression to extract
10 // subdomains from text based on the given domain.
11 func NewSubdomainExtractor(domain string) (*regexp.Regexp, error) {
12 subdomainExtractorMutex.Lock()
13 defer subdomainExtractorMutex.Unlock()
14 extractor, err := regexp.Compile(`[a-zA-Z0-9\*_.-]+\.` + domain)
15 if err != nil {
16 return nil, err
17 }
18 return extractor, nil
19 }
20
21 // Exists check if a key exist in a slice
22 func Exists(values []string, key string) bool {
23 for _, v := range values {
24 if v == key {
25 return true
26 }
27 }
28 return false
29 }
0 sonar.projectKey=projectdiscovery_subfinder
1 sonar.organization=projectdiscovery
2
3 # This is the name and version displayed in the SonarCloud UI.
4 #sonar.projectName=dnsx
5 #sonar.projectVersion=1.0
6
7 # Path is relative to the sonar-project.properties file. Replace "\" by "/" on Windows.
8 sonar.sources=v2/
9 sonar.tests=v2/
10 sonar.test.inclusions=**/*_test.go
11 sonar.go.coverage.reportPaths=v2/cov.out
12 sonar.externalIssuesReportPaths=v2/report.json
13
14 # Encoding of the source code. Default is default system encoding
15 #sonar.sourceEncoding=UTF-8
0 before:
1 hooks:
2 - go mod tidy
3
4 builds:
5 - env:
6 - CGO_ENABLED=0
7 goos:
8 - windows
9 - linux
10 - darwin
11 goarch:
12 - amd64
13 - 386
14 - arm
15 - arm64
16
17 ignore:
18 - goos: darwin
19 goarch: '386'
20 - goos: windows
21 goarch: 'arm'
22
23 binary: '{{ .ProjectName }}'
24 main: cmd/subfinder/main.go
25
26 archives:
27 - format: zip
28 replacements:
29 darwin: macOS
30
31 checksum:
32 algorithm: sha256
33
34 announce:
35 slack:
36 enabled: true
37 channel: '#release'
38 username: GoReleaser
39 message_template: '{{ .ProjectName }} {{ .Tag }} is out! Check it out at {{ .ReleaseURL }}'
0 # Go parameters
1 GOCMD=go
2 GOBUILD=$(GOCMD) build
3 GOMOD=$(GOCMD) mod
4 GOTEST=$(GOCMD) test
5 GOFLAGS := -v
6 LDFLAGS := -s -w
7
8 ifneq ($(shell go env GOOS),darwin)
9 LDFLAGS := -extldflags "-static"
10 endif
11
12 all: build
13 build:
14 $(GOBUILD) $(GOFLAGS) -ldflags '$(LDFLAGS)' -o "subfinder" cmd/subfinder/main.go
15 test:
16 $(GOTEST) $(GOFLAGS) ./...
17 tidy:
18 $(GOMOD) tidy
0 package main
1
2 import (
3 "fmt"
4 "os"
5 "strings"
6
7 "github.com/logrusorgru/aurora"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/testutils"
10 )
11
12 var (
13 debug = os.Getenv("DEBUG") == "true"
14 githubAction = os.Getenv("GH_ACTION") == "true"
15 customTests = os.Getenv("TESTS")
16
17 success = aurora.Green("[✓]").String()
18 failed = aurora.Red("[✘]").String()
19
20 sourceTests = map[string]testutils.TestCase{
21 "dnsrepo": dnsrepoTestcases{},
22 }
23 )
24
25 func main() {
26 failedTestCases := runTests(toMap(toSlice(customTests)))
27
28 if len(failedTestCases) > 0 {
29 if githubAction {
30 debug = true
31 fmt.Println("::group::Failed integration tests in debug mode")
32 _ = runTests(failedTestCases)
33 fmt.Println("::endgroup::")
34 }
35 os.Exit(1)
36 }
37 }
38
39 func runTests(customTestCases map[string]struct{}) map[string]struct{} {
40 failedTestCases := map[string]struct{}{}
41
42 for source, testCase := range sourceTests {
43 if len(customTestCases) == 0 {
44 fmt.Printf("Running test cases for %q source\n", aurora.Blue(source))
45 }
46 if err, failedTemplatePath := execute(source, testCase); err != nil {
47 failedTestCases[failedTemplatePath] = struct{}{}
48 }
49 }
50 return failedTestCases
51 }
52
53 func execute(source string, testCase testutils.TestCase) (error, string) {
54 if err := testCase.Execute(); err != nil {
55 _, _ = fmt.Fprintf(os.Stderr, "%s Test \"%s\" failed: %s\n", failed, source, err)
56 return err, source
57 }
58
59 fmt.Printf("%s Test \"%s\" passed!\n", success, source)
60 return nil, ""
61 }
62
63 func expectResultsGreaterThanCount(results []string, expectedNumber int) error {
64 if len(results) > expectedNumber {
65 return nil
66 }
67 return fmt.Errorf("incorrect number of results: expected a result greater than %d,but got %d", expectedNumber, len(results))
68 }
69 func toSlice(value string) []string {
70 if strings.TrimSpace(value) == "" {
71 return []string{}
72 }
73
74 return strings.Split(value, ",")
75 }
76
77 func toMap(slice []string) map[string]struct{} {
78 result := make(map[string]struct{}, len(slice))
79 for _, value := range slice {
80 if _, ok := result[value]; !ok {
81 result[value] = struct{}{}
82 }
83 }
84 return result
85 }
0 #!/bin/bash
1
2 echo "::task~> Clean up & Build binaries files"
3 rm integration-test subfinder 2>/dev/null
4 cd ../subfinder
5 go build
6 mv subfinder ../integration-test/subfinder
7 cd ../integration-test
8 go build
9 echo "::done::"
10 echo "::task~> Run integration test"
11 ./integration-test
12 echo "::done::"
13 if [ $? -eq 0 ]
14 then
15 exit 0
16 else
17 exit 1
18 fi
0 package main
1
2 import (
3 "errors"
4 "fmt"
5 "os"
6
7 "github.com/projectdiscovery/subfinder/v2/pkg/testutils"
8 )
9
10 type dnsrepoTestcases struct{}
11
12 func (h dnsrepoTestcases) Execute() error {
13 token := os.Getenv("DNSREPO_API_KEY")
14 if token == "" {
15 return errors.New("missing dns repo api key")
16 }
17 dnsToken := fmt.Sprintf(`dnsrepo: [%s]`, token)
18 file, err := os.CreateTemp("", "provider.yaml")
19 if err != nil {
20 return err
21 }
22 defer os.RemoveAll(file.Name())
23 _, err = file.WriteString(dnsToken)
24 if err != nil {
25 return err
26 }
27 results, err := testutils.RunSubfinderAndGetResults(debug, "hackerone.com", "-s", "dnsrepo", "-provider-config", file.Name())
28 if err != nil {
29 return err
30 }
31 return expectResultsGreaterThanCount(results, 0)
32 }
0 package main
1
2 import (
3 // Attempts to increase the OS file descriptors - Fail silently
4 _ "github.com/projectdiscovery/fdmax/autofdmax"
5 "github.com/projectdiscovery/gologger"
6 "github.com/projectdiscovery/subfinder/v2/pkg/runner"
7 )
8
9 func main() {
10 // Parse the command line flags and read config files
11 options := runner.ParseOptions()
12
13 newRunner, err := runner.NewRunner(options)
14 if err != nil {
15 gologger.Fatal().Msgf("Could not create runner: %s\n", err)
16 }
17
18 err = newRunner.RunEnumeration()
19 if err != nil {
20 gologger.Fatal().Msgf("Could not run enumeration: %s\n", err)
21 }
22 }
0 module github.com/projectdiscovery/subfinder/v2
1
2 go 1.18
3
4 require (
5 github.com/corpix/uarand v0.2.0
6 github.com/hako/durafmt v0.0.0-20210316092057-3a2c319c1acd
7 github.com/json-iterator/go v1.1.12
8 github.com/lib/pq v1.10.7
9 github.com/projectdiscovery/chaos-client v0.3.0
10 github.com/projectdiscovery/dnsx v1.1.1
11 github.com/projectdiscovery/fdmax v0.0.3
12 github.com/projectdiscovery/fileutil v0.0.1
13 github.com/projectdiscovery/gologger v1.1.5-0.20220817095646-8663411b1b0b
14 github.com/projectdiscovery/ratelimit v0.0.1
15 github.com/rs/xid v1.4.0
16 github.com/stretchr/testify v1.8.0
17 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80
18 golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e
19 gopkg.in/yaml.v3 v3.0.1
20 )
21
22 require (
23 github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
24 github.com/aymerick/douceur v0.2.0 // indirect
25 github.com/dsnet/compress v0.0.1 // indirect
26 github.com/golang/snappy v0.0.4 // indirect
27 github.com/gorilla/css v1.0.0 // indirect
28 github.com/mholt/archiver v3.1.1+incompatible // indirect
29 github.com/microcosm-cc/bluemonday v1.0.20 // indirect
30 github.com/nwaples/rardecode v1.1.0 // indirect
31 github.com/pierrec/lz4 v2.6.0+incompatible // indirect
32 github.com/projectdiscovery/blackrock v0.0.0-20220628111055-35616c71b2dc // indirect
33 github.com/projectdiscovery/cdncheck v0.0.3 // indirect
34 github.com/projectdiscovery/httputil v0.0.0-20210906072657-f3a099cb20bc // indirect
35 github.com/projectdiscovery/iputil v0.0.0-20220712175312-b9406f31cdd8 // indirect
36 github.com/projectdiscovery/mapcidr v1.0.1 // indirect
37 github.com/projectdiscovery/retryablehttp-go v1.0.2 // indirect
38 github.com/projectdiscovery/sliceutil v0.0.0-20220625085859-c3a4ecb669f4 // indirect
39 github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca // indirect
40 github.com/ulikunitz/xz v0.5.7 // indirect
41 github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 // indirect
42 github.com/yl2chen/cidranger v1.0.2 // indirect
43 go.uber.org/atomic v1.9.0 // indirect
44 go.uber.org/multierr v1.8.0 // indirect
45 golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
46 golang.org/x/text v0.3.7 // indirect
47 golang.org/x/tools v0.1.12 // indirect
48 gopkg.in/djherbis/times.v1 v1.2.0 // indirect
49 )
50
51 require (
52 github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 // indirect
53 github.com/davecgh/go-spew v1.1.1 // indirect
54 github.com/logrusorgru/aurora v2.0.3+incompatible
55 github.com/miekg/dns v1.1.50 // indirect
56 github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
57 github.com/modern-go/reflect2 v1.0.2 // indirect
58 github.com/pkg/errors v0.9.1
59 github.com/pmezard/go-difflib v1.0.0 // indirect
60 github.com/projectdiscovery/goflags v0.1.1
61 github.com/projectdiscovery/retryabledns v1.0.15 // indirect
62 github.com/projectdiscovery/stringsutil v0.0.1 // indirect
63 golang.org/x/net v0.0.0-20220927171203-f486391704dc // indirect
64 golang.org/x/sys v0.0.0-20220731174439-a90be440212d // indirect
65 )
0 github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d h1:Byv0BzEl3/e6D5CLfI0j/7hiIEtvGVFPCZ7Ei2oq8iQ=
1 github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
2 github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
3 github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
4 github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08 h1:ox2F0PSMlrAAiAdknSRMDrAr8mfxPCfSZolH+/qQnyQ=
5 github.com/cnf/structhash v0.0.0-20201127153200-e1b16c1ebc08/go.mod h1:pCxVEbcm3AMg7ejXyorUXi6HQCzOIBf7zEDVPtw0/U4=
6 github.com/corpix/uarand v0.2.0 h1:U98xXwud/AVuCpkpgfPF7J5TQgr7R5tqT8VZP5KWbzE=
7 github.com/corpix/uarand v0.2.0/go.mod h1:/3Z1QIqWkDIhf6XWn/08/uMHoQ8JUoTIKc2iPchBOmM=
8 github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
9 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
10 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
11 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
12 github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
13 github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
14 github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
15 github.com/frankban/quicktest v1.11.3 h1:8sXhOn0uLys67V8EsXLc6eszDs8VXWxL3iRvebPhedY=
16 github.com/frankban/quicktest v1.11.3/go.mod h1:wRf/ReqHper53s+kmmSZizM8NamnL3IM0I9ntUbOk+k=
17 github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
18 github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
19 github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
20 github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
21 github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
22 github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
23 github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
24 github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
25 github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
26 github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
27 github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
28 github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
29 github.com/hako/durafmt v0.0.0-20210316092057-3a2c319c1acd h1:FsX+T6wA8spPe4c1K9vi7T0LvNCO1TTqiL8u7Wok2hw=
30 github.com/hako/durafmt v0.0.0-20210316092057-3a2c319c1acd/go.mod h1:VzxiSdG6j1pi7rwGm/xYI5RbtpBgM8sARDXlvEvxlu0=
31 github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
32 github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
33 github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
34 github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
35 github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
36 github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk=
37 github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
38 github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
39 github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
40 github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
41 github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
42 github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
43 github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
44 github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
45 github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw=
46 github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
47 github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
48 github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczGlG91VSDkswnjF5A8=
49 github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4=
50 github.com/mholt/archiver v3.1.1+incompatible h1:1dCVxuqs0dJseYEhi5pl7MYPH9zDa1wBi7mF09cbNkU=
51 github.com/mholt/archiver v3.1.1+incompatible/go.mod h1:Dh2dOXnSdiLxRiPoVfIr/fI1TwETms9B8CTWfeh7ROU=
52 github.com/microcosm-cc/bluemonday v1.0.18/go.mod h1:Z0r70sCuXHig8YpBzCc5eGHAap2K7e/u082ZUpDRRqM=
53 github.com/microcosm-cc/bluemonday v1.0.19/go.mod h1:QNzV2UbLK2/53oIIwTOyLUSABMkjZ4tqiyC1g/DyqxE=
54 github.com/microcosm-cc/bluemonday v1.0.20 h1:flpzsq4KU3QIYAYGV/szUat7H+GPOXR0B2JU5A1Wp8Y=
55 github.com/microcosm-cc/bluemonday v1.0.20/go.mod h1:yfBmMi8mxvaZut3Yytv+jTXRY8mxyjJ0/kQBTElld50=
56 github.com/miekg/dns v1.1.50 h1:DQUfb9uc6smULcREF09Uc+/Gd46YWqJd5DbpPE9xkcA=
57 github.com/miekg/dns v1.1.50/go.mod h1:e3IlAVfNqAllflbibAZEWOXOQ+Ynzk/dDozDxY7XnME=
58 github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
59 github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
60 github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
61 github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
62 github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
63 github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
64 github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
65 github.com/nwaples/rardecode v1.1.0 h1:vSxaY8vQhOcVr4mm5e8XllHWTiM4JF507A0Katqw7MQ=
66 github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
67 github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
68 github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
69 github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
70 github.com/pierrec/lz4 v2.6.0+incompatible h1:Ix9yFKn1nSPBLFl/yZknTp8TU5G4Ps0JDmguYK6iH1A=
71 github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
72 github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
73 github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
74 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
75 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
76 github.com/projectdiscovery/blackrock v0.0.0-20210415162320-b38689ae3a2e/go.mod h1:/IsapnEYiWG+yEDPXp0e8NWj3npzB9Ccy9lXEUJwMZs=
77 github.com/projectdiscovery/blackrock v0.0.0-20220628111055-35616c71b2dc h1:jqZK68yPOnNNRmwuXqytl+T9EbwneEUCvMDRjLe0J04=
78 github.com/projectdiscovery/blackrock v0.0.0-20220628111055-35616c71b2dc/go.mod h1:5tNGQP9kOfW+X5+40pZP8aqPYLHs45nJkFaSHLxdeH8=
79 github.com/projectdiscovery/cdncheck v0.0.3 h1:li2/rUJmhVXSqRFyhJMqi6pdBX6ZxMnwzBfE0Kifj/g=
80 github.com/projectdiscovery/cdncheck v0.0.3/go.mod h1:EevMeCG1ogBoUJYaa0Mv9R1VUboDm/DiynId7DboKy0=
81 github.com/projectdiscovery/chaos-client v0.3.0 h1:A4NgOYRCrlsSZUBTCT2HAT/uTEJly17+nWcXHRXR+Ko=
82 github.com/projectdiscovery/chaos-client v0.3.0/go.mod h1:AWx/KZgtBE5SULpsgyQLLfb+SQgVtCih83gvRtTpwl4=
83 github.com/projectdiscovery/dnsx v1.1.1 h1:yGYEH1vfVN7YdvdOtPzPvrc6JuHCi8wBTAkStP/f2QI=
84 github.com/projectdiscovery/dnsx v1.1.1/go.mod h1:DVvc+ePRCknahLpz4Y8nMppYOGUZhkEmYsTPuYx1a5w=
85 github.com/projectdiscovery/fdmax v0.0.3 h1:FM6lv9expZ/rEEBI9tkRh6tx3DV0gtpwzdc0h7bGPqg=
86 github.com/projectdiscovery/fdmax v0.0.3/go.mod h1:NWRcaR7JTO7fC27H4jCl9n7Z+KIredwpgw1fV+4KrKI=
87 github.com/projectdiscovery/fileutil v0.0.0-20210928100737-cab279c5d4b5/go.mod h1:U+QCpQnX8o2N2w0VUGyAzjM3yBAe4BKedVElxiImsx0=
88 github.com/projectdiscovery/fileutil v0.0.0-20220609150212-453ac591c36c/go.mod h1:g8wsrb0S5NtEN0JgVyyPeb3FQdArx+UMESmFX94bcGY=
89 github.com/projectdiscovery/fileutil v0.0.0-20220705195237-01becc2a8963/go.mod h1:DaY7wmLPMleyHDCD/14YApPCDtrARY4J8Eny2ZGsG/g=
90 github.com/projectdiscovery/fileutil v0.0.1 h1:3K3UqCDOan3LsvWhV0nyvVuMWSwCloNPUJIGcXsi1os=
91 github.com/projectdiscovery/fileutil v0.0.1/go.mod h1:Oo6ZEvXmQz/xPF0YukzmwpdW2LYinWCSEmzZOQsJCLg=
92 github.com/projectdiscovery/goflags v0.0.8/go.mod h1:GDSkWyXa6kfQjpJu10SO64DN8lXuKXVENlBMk8N7H80=
93 github.com/projectdiscovery/goflags v0.1.1 h1:AEtT14D9OC10HWyZwDQaSLjuK8ZKoBrSYlsLItvMKZI=
94 github.com/projectdiscovery/goflags v0.1.1/go.mod h1:/YBPA+1igSkQbwD7a91o0HUIwMDlsmQDRZL2oSYSyEQ=
95 github.com/projectdiscovery/gologger v1.0.1/go.mod h1:Ok+axMqK53bWNwDSU1nTNwITLYMXMdZtRc8/y1c7sWE=
96 github.com/projectdiscovery/gologger v1.1.4/go.mod h1:Bhb6Bdx2PV1nMaFLoXNBmHIU85iROS9y1tBuv7T5pMY=
97 github.com/projectdiscovery/gologger v1.1.5-0.20220817095646-8663411b1b0b h1:sncWNStu8+oT3vDvKKFncr5FxEui5Bs0ET2Qkj0AVBo=
98 github.com/projectdiscovery/gologger v1.1.5-0.20220817095646-8663411b1b0b/go.mod h1:6fC5JFfw/DPbkaNFb13402F4eha0Yntc2F87gHtIdkA=
99 github.com/projectdiscovery/hmap v0.0.1/go.mod h1:VDEfgzkKQdq7iGTKz8Ooul0NuYHQ8qiDs6r8bPD1Sb0=
100 github.com/projectdiscovery/httputil v0.0.0-20210906072657-f3a099cb20bc h1:C0L6pUvVI+sPJSBaPQJEG/HjPtg8Mgs2vEpsdrl064A=
101 github.com/projectdiscovery/httputil v0.0.0-20210906072657-f3a099cb20bc/go.mod h1:BueJPSPWAX11IFS6bdAqTkekiIz5Fgco5LVc1kqO9L4=
102 github.com/projectdiscovery/ipranger v0.0.2/go.mod h1:kcAIk/lo5rW+IzUrFkeYyXnFJ+dKwYooEOHGVPP/RWE=
103 github.com/projectdiscovery/iputil v0.0.0-20220712175312-b9406f31cdd8 h1:HRqev12wKvcwK1fe4pSlMfQdPHo9LfTxuFeRN4f3tS4=
104 github.com/projectdiscovery/iputil v0.0.0-20220712175312-b9406f31cdd8/go.mod h1:vHRC+9exsfSbEngMKDl0xiWqkxlLk3lHQZpbS2yFT8U=
105 github.com/projectdiscovery/mapcidr v0.0.4/go.mod h1:ALOIj6ptkWujNoX8RdQwB2mZ+kAmKuLJBq9T5gR5wG0=
106 github.com/projectdiscovery/mapcidr v1.0.1 h1:eaLBRrImwlYXv8vbXTwR4sxoQqIxR3Y5k/Sd7HhTIII=
107 github.com/projectdiscovery/mapcidr v1.0.1/go.mod h1:/qxlpxXZQFFjHynSc9u5O0kUPzH46VskECiwLiz7/vw=
108 github.com/projectdiscovery/ratelimit v0.0.0-20221004232058-7b82379157fa h1:XTRcow+zyZUYxOlLsSp1AdM4Jl+i/OpPbOZPsfU/IAI=
109 github.com/projectdiscovery/ratelimit v0.0.0-20221004232058-7b82379157fa/go.mod h1:zenrIElIcKg0Y9h7pMfTlw5vaI/kCl8uxXm+PfgbBSw=
110 github.com/projectdiscovery/ratelimit v0.0.1 h1:GnCfbKmkLdDLXT3QS4KS0zCsuDGkoRQE0YDbTqzQmS8=
111 github.com/projectdiscovery/ratelimit v0.0.1/go.mod h1:zenrIElIcKg0Y9h7pMfTlw5vaI/kCl8uxXm+PfgbBSw=
112 github.com/projectdiscovery/retryabledns v1.0.15 h1:3Nn119UwYsfUPC3g0q57ftz0Wb5Zl5ppvw8R0Xu0DEI=
113 github.com/projectdiscovery/retryabledns v1.0.15/go.mod h1:3YbsQVqP7jbQ3CDmarhyVtkJaJ8XcB7S19vMeyMxZxk=
114 github.com/projectdiscovery/retryablehttp-go v1.0.2 h1:LV1/KAQU+yeWhNVlvveaYFsjBYRwXlNEq0PvrezMV0U=
115 github.com/projectdiscovery/retryablehttp-go v1.0.2/go.mod h1:dx//aY9V247qHdsRf0vdWHTBZuBQ2vm6Dq5dagxrDYI=
116 github.com/projectdiscovery/sliceutil v0.0.0-20220617151003-15892688e1d6/go.mod h1:9YZb6LRjLYAvSOm65v787dwauurixSyjlqXyYa4rTTA=
117 github.com/projectdiscovery/sliceutil v0.0.0-20220625085859-c3a4ecb669f4 h1:C04j5gVVMXqFyBIetAz92SyPRYCpkFgIwZw0L/pps9Q=
118 github.com/projectdiscovery/sliceutil v0.0.0-20220625085859-c3a4ecb669f4/go.mod h1:RxDaccMjPzIuF7F8XbdGl1yOcqxN4YPiHr9xHpfCkGI=
119 github.com/projectdiscovery/stringsutil v0.0.0-20210804142656-fd3c28dbaafe/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
120 github.com/projectdiscovery/stringsutil v0.0.0-20220422150559-b54fb5dc6833/go.mod h1:oTRc18WBv9t6BpaN9XBY+QmG28PUpsyDzRht56Qf49I=
121 github.com/projectdiscovery/stringsutil v0.0.0-20220612082425-0037ce9f89f3/go.mod h1:mF5sh4jTghoGWwgUb9qWi5waTFklClDbtrqtJU93awc=
122 github.com/projectdiscovery/stringsutil v0.0.0-20220731064040-4b67f194751e/go.mod h1:32NYmKyHkKsmisAOAaWrR15lz2ysz2M8x3KMeeoRHoU=
123 github.com/projectdiscovery/stringsutil v0.0.1 h1:a6TCMT+D1aUsoZxNiYf9O30wiDOoLOHDwj89HBjr5BQ=
124 github.com/projectdiscovery/stringsutil v0.0.1/go.mod h1:TDi2LEqR3OML0BxGoMbbfAHSk5AdfHX762Oc302sgmM=
125 github.com/rs/xid v1.4.0 h1:qd7wPTDkN6KQx2VmMBLrpHkiyQwgFXRnkOLacUiaSNY=
126 github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
127 github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca h1:NugYot0LIVPxTvN8n+Kvkn6TrbMyxQiuvKdEwFdR9vI=
128 github.com/saintfish/chardet v0.0.0-20120816061221-3af4cd4741ca/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
129 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
130 github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
131 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
132 github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
133 github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
134 github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
135 github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
136 github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
137 github.com/stretchr/testify v1.7.3/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
138 github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
139 github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
140 github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
141 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 h1:nrZ3ySNYwJbSpD6ce9duiP+QkD3JuLCcWkdaehUS/3Y=
142 github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80/go.mod h1:iFyPdL66DjUD96XmzVL3ZntbzcflLnznH0fr99w5VqE=
143 github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
144 github.com/ulikunitz/xz v0.5.7 h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4=
145 github.com/ulikunitz/xz v0.5.7/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
146 github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8 h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=
147 github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
148 github.com/yl2chen/cidranger v1.0.2 h1:lbOWZVCG1tCRX4u24kuM1Tb4nHqWkDxwLdoS+SevawU=
149 github.com/yl2chen/cidranger v1.0.2/go.mod h1:9U1yz7WPYDwf0vpNWFaeRh0bjwz5RVgRy/9UEQfHl0g=
150 github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
151 github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
152 go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
153 go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
154 go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
155 go.uber.org/multierr v1.8.0 h1:dg6GjLku4EH+249NNmoIciG9N/jURbDG+pFlTkhzIC8=
156 go.uber.org/multierr v1.8.0/go.mod h1:7EAYxJLBy9rStEaz58O2t4Uvip6FSURkq8/ppBp95ak=
157 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
158 golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
159 golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
160 golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e h1:+WEEuIdZHnUeJJmEUjyYC2gfUMj69yZXw17EnHg/otA=
161 golang.org/x/exp v0.0.0-20220722155223-a9213eeb770e/go.mod h1:Kr81I6Kryrl9sr8s2FK3vxD90NdsKWRuOIl2O4CvYbA=
162 golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
163 golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
164 golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
165 golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
166 golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
167 golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
168 golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
169 golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
170 golang.org/x/net v0.0.0-20210521195947-fe42d452be8f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
171 golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
172 golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
173 golang.org/x/net v0.0.0-20220617184016-355a448f1bc9/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
174 golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
175 golang.org/x/net v0.0.0-20220728211354-c7608f3a8462/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
176 golang.org/x/net v0.0.0-20220826154423-83b083e8dc8b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
177 golang.org/x/net v0.0.0-20220927171203-f486391704dc h1:FxpXZdoBqT8RjqTy6i1E8nXHhW21wK7ptQ/EPIGxzPQ=
178 golang.org/x/net v0.0.0-20220927171203-f486391704dc/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
179 golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
180 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
181 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
182 golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw=
183 golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
184 golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
185 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
186 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
187 golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
188 golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
189 golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
190 golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
191 golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
192 golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
193 golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
194 golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
195 golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
196 golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
197 golang.org/x/sys v0.0.0-20220731174439-a90be440212d h1:Sv5ogFZatcgIMMtBSTTAgMYsicp25MXBubjXNDKwm80=
198 golang.org/x/sys v0.0.0-20220731174439-a90be440212d/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
199 golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
200 golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
201 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
202 golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
203 golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
204 golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
205 golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
206 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
207 golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
208 golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
209 golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=
210 golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
211 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
212 golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
213 golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
214 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
215 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
216 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
217 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
218 gopkg.in/djherbis/times.v1 v1.2.0 h1:UCvDKl1L/fmBygl2Y7hubXCnY7t4Yj46ZrBFNUipFbM=
219 gopkg.in/djherbis/times.v1 v1.2.0/go.mod h1:AQlg6unIsrsCEdQYhTzERy542dz6SFdQFZFv6mUY0P8=
220 gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
221 gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
222 gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
223 gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
224 gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
225 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
226 gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
227 gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
228 gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
0 // Package passive provides capability for doing passive subdomain
1 // enumeration on targets.
2 package passive
0 package passive
1
2 import (
3 "context"
4 "fmt"
5 "sync"
6 "time"
7
8 "github.com/projectdiscovery/gologger"
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 // EnumerateSubdomains enumerates all the subdomains for a given domain
13 func (a *Agent) EnumerateSubdomains(domain string, proxy string, rateLimit, timeout int, maxEnumTime time.Duration) chan subscraping.Result {
14 results := make(chan subscraping.Result)
15 go func() {
16 session, err := subscraping.NewSession(domain, proxy, rateLimit, timeout)
17 if err != nil {
18 results <- subscraping.Result{Type: subscraping.Error, Error: fmt.Errorf("could not init passive session for %s: %s", domain, err)}
19 }
20
21 ctx, cancel := context.WithTimeout(context.Background(), maxEnumTime)
22
23 timeTaken := make(map[string]string)
24 timeTakenMutex := &sync.Mutex{}
25
26 wg := &sync.WaitGroup{}
27 // Run each source in parallel on the target domain
28 for _, runner := range a.sources {
29 wg.Add(1)
30
31 now := time.Now()
32 go func(source subscraping.Source) {
33 for resp := range source.Run(ctx, domain, session) {
34 results <- resp
35 }
36
37 duration := time.Since(now)
38 timeTakenMutex.Lock()
39 timeTaken[source.Name()] = fmt.Sprintf("Source took %s for enumeration\n", duration)
40 timeTakenMutex.Unlock()
41
42 wg.Done()
43 }(runner)
44 }
45 wg.Wait()
46
47 for source, data := range timeTaken {
48 gologger.Verbose().Label(source).Msg(data)
49 }
50
51 close(results)
52 cancel()
53 }()
54
55 return results
56 }
0 package passive
1
2 import (
3 "fmt"
4 "strings"
5
6 "golang.org/x/exp/maps"
7
8 "github.com/projectdiscovery/gologger"
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/alienvault"
11 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/anubis"
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/bevigil"
13 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/binaryedge"
14 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/bufferover"
15 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/c99"
16 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/censys"
17 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/certspotter"
18 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/chaos"
19 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/chinaz"
20 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/commoncrawl"
21 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/crtsh"
22 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/dnsdb"
23 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/dnsdumpster"
24 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/dnsrepo"
25 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/fofa"
26 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/fullhunt"
27 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/github"
28 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/hackertarget"
29 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/intelx"
30 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/passivetotal"
31 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/quake"
32 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/rapiddns"
33 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/riddler"
34 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/robtex"
35 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/securitytrails"
36 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/shodan"
37 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/sitedossier"
38 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/sonarsearch"
39 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/threatbook"
40 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/threatminer"
41 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/virustotal"
42 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/waybackarchive"
43 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/whoisxmlapi"
44 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/zoomeye"
45 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping/sources/zoomeyeapi"
46 )
47
48 var AllSources = [...]subscraping.Source{
49 &alienvault.Source{},
50 &anubis.Source{},
51 &bevigil.Source{},
52 &binaryedge.Source{},
53 &bufferover.Source{},
54 &c99.Source{},
55 &censys.Source{},
56 &certspotter.Source{},
57 &chaos.Source{},
58 &chinaz.Source{},
59 &commoncrawl.Source{},
60 &crtsh.Source{},
61 &dnsdb.Source{},
62 &dnsdumpster.Source{},
63 &fofa.Source{},
64 &fullhunt.Source{},
65 &github.Source{},
66 &hackertarget.Source{},
67 &intelx.Source{},
68 &passivetotal.Source{},
69 &quake.Source{},
70 &rapiddns.Source{},
71 &riddler.Source{},
72 &robtex.Source{},
73 &securitytrails.Source{},
74 &shodan.Source{},
75 &sitedossier.Source{},
76 &sonarsearch.Source{},
77 &threatbook.Source{},
78 &threatminer.Source{},
79 &virustotal.Source{},
80 &waybackarchive.Source{},
81 &whoisxmlapi.Source{},
82 &zoomeye.Source{},
83 &zoomeyeapi.Source{},
84 &dnsrepo.Source{},
85 }
86
87 var NameSourceMap = make(map[string]subscraping.Source, len(AllSources))
88
89 func init() {
90 for _, currentSource := range AllSources {
91 NameSourceMap[strings.ToLower(currentSource.Name())] = currentSource
92 }
93 }
94
95 // Agent is a struct for running passive subdomain enumeration
96 // against a given host. It wraps subscraping package and provides
97 // a layer to build upon.
98 type Agent struct {
99 sources []subscraping.Source
100 }
101
102 // New creates a new agent for passive subdomain discovery
103 func New(sourceNames, excludedSourceNames []string, useAllSources, useSourcesSupportingRecurse bool) *Agent {
104 sources := make(map[string]subscraping.Source, len(AllSources))
105
106 if useAllSources {
107 maps.Copy(sources, NameSourceMap)
108 } else {
109 if len(sourceNames) > 0 {
110 for _, source := range sourceNames {
111 if NameSourceMap[source] == nil {
112 gologger.Warning().Msgf("There is no source with the name: '%s'", source)
113 } else {
114 sources[source] = NameSourceMap[source]
115 }
116 }
117 } else {
118 for _, currentSource := range AllSources {
119 if currentSource.IsDefault() {
120 sources[currentSource.Name()] = currentSource
121 }
122 }
123 }
124 }
125
126 if len(excludedSourceNames) > 0 {
127 for _, sourceName := range excludedSourceNames {
128 delete(sources, sourceName)
129 }
130 }
131
132 if useSourcesSupportingRecurse {
133 for sourceName, source := range sources {
134 if !source.HasRecursiveSupport() {
135 delete(sources, sourceName)
136 }
137 }
138 }
139
140 gologger.Debug().Msgf(fmt.Sprintf("Selected source(s) for this search: %s", strings.Join(maps.Keys(sources), ", ")))
141
142 // Create the agent, insert the sources and remove the excluded sources
143 agent := &Agent{sources: maps.Values(sources)}
144
145 return agent
146 }
0 package passive
1
2 import (
3 "fmt"
4 "strconv"
5 "testing"
6
7 "github.com/stretchr/testify/assert"
8 "golang.org/x/exp/maps"
9 )
10
11 var (
12 expectedAllSources = []string{
13 "alienvault",
14 "anubis",
15 "bevigil",
16 "binaryedge",
17 "bufferover",
18 "c99",
19 "censys",
20 "certspotter",
21 "chaos",
22 "chinaz",
23 "commoncrawl",
24 "crtsh",
25 "dnsdumpster",
26 "dnsdb",
27 "dnsrepo",
28 "fofa",
29 "fullhunt",
30 "github",
31 "hackertarget",
32 "intelx",
33 "passivetotal",
34 "quake",
35 "rapiddns",
36 "riddler",
37 "robtex",
38 "securitytrails",
39 "shodan",
40 "sitedossier",
41 "sonarsearch",
42 "threatbook",
43 "threatminer",
44 "virustotal",
45 "waybackarchive",
46 "whoisxmlapi",
47 "zoomeye",
48 "zoomeyeapi",
49 }
50
51 expectedDefaultSources = []string{
52 "alienvault",
53 "anubis",
54 "bevigil",
55 "bufferover",
56 "c99",
57 "certspotter",
58 "censys",
59 "chaos",
60 "chinaz",
61 "crtsh",
62 "dnsdumpster",
63 "dnsrepo",
64 "fofa",
65 "fullhunt",
66 "hackertarget",
67 "intelx",
68 "passivetotal",
69 "quake",
70 "robtex",
71 "riddler",
72 "securitytrails",
73 "shodan",
74 "threatminer",
75 "virustotal",
76 "whoisxmlapi",
77 }
78
79 expectedDefaultRecursiveSources = []string{
80 "alienvault",
81 "binaryedge",
82 "bufferover",
83 "certspotter",
84 "crtsh",
85 "dnsdumpster",
86 "hackertarget",
87 "passivetotal",
88 "securitytrails",
89 "sonarsearch",
90 "virustotal",
91 }
92 )
93
94 func TestSourceCategorization(t *testing.T) {
95 defaultSources := make([]string, 0, len(AllSources))
96 recursiveSources := make([]string, 0, len(AllSources))
97 for _, source := range AllSources {
98 sourceName := source.Name()
99 if source.IsDefault() {
100 defaultSources = append(defaultSources, sourceName)
101 }
102
103 if source.HasRecursiveSupport() {
104 recursiveSources = append(recursiveSources, sourceName)
105 }
106 }
107
108 assert.ElementsMatch(t, expectedDefaultSources, defaultSources)
109 assert.ElementsMatch(t, expectedDefaultRecursiveSources, recursiveSources)
110 assert.ElementsMatch(t, expectedAllSources, maps.Keys(NameSourceMap))
111 }
112
113 func TestSourceFiltering(t *testing.T) {
114 someSources := []string{
115 "alienvault",
116 "sonarsearch",
117 "chaos",
118 "virustotal",
119 }
120
121 someExclusions := []string{
122 "alienvault",
123 "virustotal",
124 }
125
126 tests := []struct {
127 sources []string
128 exclusions []string
129 withAllSources bool
130 withRecursion bool
131 expectedLength int
132 }{
133 {someSources, someExclusions, false, false, len(someSources) - len(someExclusions)},
134 {someSources, someExclusions, false, true, 1},
135 {someSources, someExclusions, true, false, len(AllSources) - len(someExclusions)},
136 {someSources, someExclusions, true, true, 9},
137
138 {someSources, []string{}, false, false, len(someSources)},
139 {someSources, []string{}, true, false, len(AllSources)},
140
141 {[]string{}, []string{}, false, false, len(expectedDefaultSources)},
142 {[]string{}, []string{}, false, true, 9},
143 {[]string{}, []string{}, true, false, len(AllSources)},
144 {[]string{}, []string{}, true, true, len(expectedDefaultRecursiveSources)},
145 }
146 for index, test := range tests {
147 t.Run(strconv.Itoa(index+1), func(t *testing.T) {
148 agent := New(test.sources, test.exclusions, test.withAllSources, test.withRecursion)
149
150 for _, v := range agent.sources {
151 fmt.Println(v.Name())
152 }
153
154 assert.Equal(t, test.expectedLength, len(agent.sources))
155 agent = nil
156 })
157 }
158 }
0 package passive
1
2 import (
3 "context"
4 "reflect"
5 "strings"
6 "testing"
7
8 "github.com/stretchr/testify/assert"
9
10 "github.com/projectdiscovery/gologger"
11 "github.com/projectdiscovery/gologger/levels"
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
13 )
14
15 func TestSourcesWithoutKeys(t *testing.T) {
16 domain := "hackerone.com"
17 timeout := 60
18
19 gologger.DefaultLogger.SetMaxLevel(levels.LevelDebug)
20
21 ctx := context.Background()
22 session, err := subscraping.NewSession(domain, "", 0, timeout)
23 assert.Nil(t, err)
24
25 var expected = subscraping.Result{Type: subscraping.Subdomain, Value: domain, Error: nil}
26
27 for _, source := range AllSources {
28 if source.NeedsKey() {
29 continue
30 }
31
32 t.Run(source.Name(), func(t *testing.T) {
33 var results []subscraping.Result
34
35 for result := range source.Run(ctx, domain, session) {
36 results = append(results, result)
37
38 assert.Equal(t, source.Name(), result.Source)
39
40 assert.Equal(t, expected.Type, result.Type)
41 assert.Equal(t, reflect.TypeOf(expected.Error), reflect.TypeOf(result.Error), result.Error)
42
43 assert.True(t, strings.HasSuffix(strings.ToLower(result.Value), strings.ToLower(expected.Value)))
44 }
45
46 assert.GreaterOrEqual(t, len(results), 1)
47 })
48 }
49 }
0 package resolve
1
2 import (
3 "github.com/projectdiscovery/dnsx/libs/dnsx"
4 )
5
6 // DefaultResolvers contains the default list of resolvers known to be good
7 var DefaultResolvers = []string{
8 "1.1.1.1:53", // Cloudflare primary
9 "1.0.0.1:53", // Cloudflare secondary
10 "8.8.8.8:53", // Google primary
11 "8.8.4.4:53", // Google secondary
12 "9.9.9.9:53", // Quad9 Primary
13 "9.9.9.10:53", // Quad9 Secondary
14 "77.88.8.8:53", // Yandex Primary
15 "77.88.8.1:53", // Yandex Secondary
16 "208.67.222.222:53", // OpenDNS Primary
17 "208.67.220.220:53", // OpenDNS Secondary
18 }
19
20 // Resolver is a struct for resolving DNS names
21 type Resolver struct {
22 DNSClient *dnsx.DNSX
23 Resolvers []string
24 }
25
26 // New creates a new resolver struct with the default resolvers
27 func New() *Resolver {
28 return &Resolver{
29 Resolvers: []string{},
30 }
31 }
0 // Package resolve is used to handle resolving records
1 // It also handles wildcard subdomains and rotating resolvers.
2 package resolve
0 package resolve
1
2 import (
3 "fmt"
4 "sync"
5
6 "github.com/rs/xid"
7 )
8
9 const (
10 maxWildcardChecks = 3
11 )
12
13 // ResolutionPool is a pool of resolvers created for resolving subdomains
14 // for a given host.
15 type ResolutionPool struct {
16 *Resolver
17 Tasks chan HostEntry
18 Results chan Result
19 wg *sync.WaitGroup
20 removeWildcard bool
21
22 wildcardIPs map[string]struct{}
23 }
24
25 // HostEntry defines a host with the source
26 type HostEntry struct {
27 Host string
28 Source string
29 }
30
31 // Result contains the result for a host resolution
32 type Result struct {
33 Type ResultType
34 Host string
35 IP string
36 Error error
37 Source string
38 }
39
40 // ResultType is the type of result found
41 type ResultType int
42
43 // Types of data result can return
44 const (
45 Subdomain ResultType = iota
46 Error
47 )
48
49 // NewResolutionPool creates a pool of resolvers for resolving subdomains of a given domain
50 func (r *Resolver) NewResolutionPool(workers int, removeWildcard bool) *ResolutionPool {
51 resolutionPool := &ResolutionPool{
52 Resolver: r,
53 Tasks: make(chan HostEntry),
54 Results: make(chan Result),
55 wg: &sync.WaitGroup{},
56 removeWildcard: removeWildcard,
57 wildcardIPs: make(map[string]struct{}),
58 }
59
60 go func() {
61 for i := 0; i < workers; i++ {
62 resolutionPool.wg.Add(1)
63 go resolutionPool.resolveWorker()
64 }
65 resolutionPool.wg.Wait()
66 close(resolutionPool.Results)
67 }()
68
69 return resolutionPool
70 }
71
72 // InitWildcards inits the wildcard ips array
73 func (r *ResolutionPool) InitWildcards(domain string) error {
74 for i := 0; i < maxWildcardChecks; i++ {
75 uid := xid.New().String()
76
77 hosts, _ := r.DNSClient.Lookup(uid + "." + domain)
78 if len(hosts) == 0 {
79 return fmt.Errorf("%s is not a wildcard domain", domain)
80 }
81
82 // Append all wildcard ips found for domains
83 for _, host := range hosts {
84 r.wildcardIPs[host] = struct{}{}
85 }
86 }
87 return nil
88 }
89
90 func (r *ResolutionPool) resolveWorker() {
91 for task := range r.Tasks {
92 if !r.removeWildcard {
93 r.Results <- Result{Type: Subdomain, Host: task.Host, IP: "", Source: task.Source}
94 continue
95 }
96
97 hosts, err := r.DNSClient.Lookup(task.Host)
98 if err != nil {
99 r.Results <- Result{Type: Error, Host: task.Host, Source: task.Source, Error: err}
100 continue
101 }
102
103 if len(hosts) == 0 {
104 continue
105 }
106
107 var skip bool
108 for _, host := range hosts {
109 // Ignore the host if it exists in wildcard ips map
110 if _, ok := r.wildcardIPs[host]; ok {
111 skip = true
112 break
113 }
114 }
115
116 if !skip {
117 r.Results <- Result{Type: Subdomain, Host: task.Host, IP: hosts[0], Source: task.Source}
118 }
119 }
120 r.wg.Done()
121 }
0 package runner
1
2 import (
3 "github.com/projectdiscovery/gologger"
4 )
5
6 const banner = `
7 __ _____ __
8 _______ __/ /_ / __(_)___ ____/ /__ _____
9 / ___/ / / / __ \/ /_/ / __ \/ __ / _ \/ ___/
10 (__ ) /_/ / /_/ / __/ / / / / /_/ / __/ /
11 /____/\__,_/_.___/_/ /_/_/ /_/\__,_/\___/_/ v2.5.4
12 `
13
14 // Version is the current version of subfinder
15 const Version = `v2.5.4`
16
17 // showBanner is used to show the banner to the user
18 func showBanner() {
19 gologger.Print().Msgf("%s\n", banner)
20 gologger.Print().Msgf("\t\tprojectdiscovery.io\n\n")
21
22 gologger.Print().Msgf("Use with caution. You are responsible for your actions\n")
23 gologger.Print().Msgf("Developers assume no liability and are not responsible for any misuse or damage.\n")
24 gologger.Print().Msgf("By using subfinder, you also agree to the terms of the APIs used.\n\n")
25 }
0 package runner
1
2 import (
3 "os"
4 "strings"
5
6 "gopkg.in/yaml.v3"
7
8 "github.com/projectdiscovery/gologger"
9 "github.com/projectdiscovery/subfinder/v2/pkg/passive"
10 )
11
12 // GetConfigDirectory gets the subfinder config directory for a user
13 func GetConfigDirectory() (string, error) {
14 var config string
15
16 directory, err := os.UserHomeDir()
17 if err != nil {
18 return config, err
19 }
20 config = directory + "/.config/subfinder"
21
22 // Create All directory for subfinder even if they exist
23 err = os.MkdirAll(config, os.ModePerm)
24 if err != nil {
25 return config, err
26 }
27
28 return config, nil
29 }
30
31 // CreateProviderConfigYAML marshals the input map to the given location on the disk
32 func CreateProviderConfigYAML(configFilePath string, sourcesRequiringApiKeysMap map[string][]string) error {
33 configFile, err := os.Create(configFilePath)
34 if err != nil {
35 return err
36 }
37 defer configFile.Close()
38
39 return yaml.NewEncoder(configFile).Encode(sourcesRequiringApiKeysMap)
40 }
41
42 // UnmarshalFrom writes the marshaled yaml config to disk
43 func UnmarshalFrom(file string) error {
44 f, err := os.Open(file)
45 if err != nil {
46 return err
47 }
48 defer f.Close()
49
50 sourceApiKeysMap := map[string][]string{}
51 err = yaml.NewDecoder(f).Decode(sourceApiKeysMap)
52 for _, source := range passive.AllSources {
53 sourceName := strings.ToLower(source.Name())
54 apiKeys := sourceApiKeysMap[sourceName]
55 if source.NeedsKey() && apiKeys != nil && len(apiKeys) > 0 {
56 gologger.Debug().Msgf("API key(s) found for %s.", sourceName)
57 source.AddApiKeys(apiKeys)
58 }
59 }
60 return err
61 }
0 package runner
1
2 import (
3 "github.com/stretchr/testify/require"
4 "os"
5 "testing"
6 )
7
8 func TestConfigGetDirectory(t *testing.T) {
9 directory, err := GetConfigDirectory()
10 if err != nil {
11 t.Fatalf("Expected nil got %v while getting home\n", err)
12 }
13 home, err := os.UserHomeDir()
14 if err != nil {
15 t.Fatalf("Expected nil got %v while getting dir\n", err)
16 }
17 config := home + "/.config/subfinder"
18
19 require.Equal(t, directory, config, "Directory and config should be equal")
20 }
0 // Package runner implements the mechanism to drive the
1 // subdomain enumeration process
2 package runner
0 package runner
1
2 import (
3 "io"
4 "strings"
5 "sync"
6 "time"
7
8 "github.com/hako/durafmt"
9
10 "github.com/projectdiscovery/gologger"
11 "github.com/projectdiscovery/subfinder/v2/pkg/resolve"
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
13 )
14
15 const maxNumCount = 2
16
17 // EnumerateSingleDomain performs subdomain enumeration against a single domain
18 func (r *Runner) EnumerateSingleDomain(domain string, writers []io.Writer) error {
19 gologger.Info().Msgf("Enumerating subdomains for '%s'\n", domain)
20
21 // Check if the user has asked to remove wildcards explicitly.
22 // If yes, create the resolution pool and get the wildcards for the current domain
23 var resolutionPool *resolve.ResolutionPool
24 if r.options.RemoveWildcard {
25 resolutionPool = r.resolverClient.NewResolutionPool(r.options.Threads, r.options.RemoveWildcard)
26 err := resolutionPool.InitWildcards(domain)
27 if err != nil {
28 // Log the error but don't quit.
29 gologger.Warning().Msgf("Could not get wildcards for domain '%s': %s\n", domain, err)
30 }
31 }
32
33 // Run the passive subdomain enumeration
34 now := time.Now()
35 passiveResults := r.passiveAgent.EnumerateSubdomains(domain, r.options.Proxy, r.options.RateLimit, r.options.Timeout, time.Duration(r.options.MaxEnumerationTime)*time.Minute)
36
37 wg := &sync.WaitGroup{}
38 wg.Add(1)
39 // Create a unique map for filtering duplicate subdomains out
40 uniqueMap := make(map[string]resolve.HostEntry)
41 // Create a map to track sources for each host
42 sourceMap := make(map[string]map[string]struct{})
43 // Process the results in a separate goroutine
44 go func() {
45 for result := range passiveResults {
46 switch result.Type {
47 case subscraping.Error:
48 gologger.Warning().Msgf("Could not run source '%s': %s\n", result.Source, result.Error)
49 case subscraping.Subdomain:
50 // Validate the subdomain found and remove wildcards from
51 if !strings.HasSuffix(result.Value, "."+domain) {
52 continue
53 }
54 subdomain := strings.ReplaceAll(strings.ToLower(result.Value), "*.", "")
55
56 if matchSubdomain := r.filterAndMatchSubdomain(subdomain); matchSubdomain {
57 if _, ok := uniqueMap[subdomain]; !ok {
58 sourceMap[subdomain] = make(map[string]struct{})
59 }
60
61 // Log the verbose message about the found subdomain per source
62 if _, ok := sourceMap[subdomain][result.Source]; !ok {
63 gologger.Verbose().Label(result.Source).Msg(subdomain)
64 }
65
66 sourceMap[subdomain][result.Source] = struct{}{}
67
68 // Check if the subdomain is a duplicate. If not,
69 // send the subdomain for resolution.
70 if _, ok := uniqueMap[subdomain]; ok {
71 continue
72 }
73
74 hostEntry := resolve.HostEntry{Host: subdomain, Source: result.Source}
75
76 uniqueMap[subdomain] = hostEntry
77 // If the user asked to remove wildcard then send on the resolve
78 // queue. Otherwise, if mode is not verbose print the results on
79 // the screen as they are discovered.
80 if r.options.RemoveWildcard {
81 resolutionPool.Tasks <- hostEntry
82 }
83 }
84 }
85 }
86 // Close the task channel only if wildcards are asked to be removed
87 if r.options.RemoveWildcard {
88 close(resolutionPool.Tasks)
89 }
90 wg.Done()
91 }()
92
93 // If the user asked to remove wildcards, listen from the results
94 // queue and write to the map. At the end, print the found results to the screen
95 foundResults := make(map[string]resolve.Result)
96 if r.options.RemoveWildcard {
97 // Process the results coming from the resolutions pool
98 for result := range resolutionPool.Results {
99 switch result.Type {
100 case resolve.Error:
101 gologger.Warning().Msgf("Could not resolve host: '%s'\n", result.Error)
102 case resolve.Subdomain:
103 // Add the found subdomain to a map.
104 if _, ok := foundResults[result.Host]; !ok {
105 foundResults[result.Host] = result
106 }
107 }
108 }
109 }
110 wg.Wait()
111 outputWriter := NewOutputWriter(r.options.JSON)
112 // Now output all results in output writers
113 var err error
114 for _, writer := range writers {
115 if r.options.HostIP {
116 err = outputWriter.WriteHostIP(domain, foundResults, writer)
117 } else {
118 if r.options.RemoveWildcard {
119 err = outputWriter.WriteHostNoWildcard(domain, foundResults, writer)
120 } else {
121 if r.options.CaptureSources {
122 err = outputWriter.WriteSourceHost(domain, sourceMap, writer)
123 } else {
124 err = outputWriter.WriteHost(domain, uniqueMap, writer)
125 }
126 }
127 }
128 if err != nil {
129 gologger.Error().Msgf("Could not write results for '%s': %s\n", domain, err)
130 return err
131 }
132 }
133
134 // Show found subdomain count in any case.
135 duration := durafmt.Parse(time.Since(now)).LimitFirstN(maxNumCount).String()
136 var numberOfSubDomains int
137 if r.options.RemoveWildcard {
138 numberOfSubDomains = len(foundResults)
139 } else {
140 numberOfSubDomains = len(uniqueMap)
141 }
142
143 gologger.Info().Msgf("Found %d subdomains for '%s' in %s\n", numberOfSubDomains, domain, duration)
144
145 return nil
146 }
147
148 func (r *Runner) filterAndMatchSubdomain(subdomain string) bool {
149 if r.options.filterRegexes != nil {
150 for _, filter := range r.options.filterRegexes {
151 if m := filter.MatchString(subdomain); m {
152 return false
153 }
154 }
155 }
156 if r.options.matchRegexes != nil {
157 for _, match := range r.options.matchRegexes {
158 if m := match.MatchString(subdomain); m {
159 return true
160 }
161 }
162 return false
163 }
164 return true
165 }
0 package runner
1
2 import (
3 "os"
4 "testing"
5
6 "github.com/stretchr/testify/require"
7 )
8
9 func TestFilterAndMatchSubdomain(t *testing.T) {
10 options := &Options{}
11 options.Domain = []string{"example.com"}
12 options.Threads = 10
13 options.Timeout = 10
14 options.Output = os.Stdout
15 t.Run("Literal Match", func(t *testing.T) {
16 options.Match = []string{"req.example.com"}
17 err := options.validateOptions()
18 if err != nil {
19 t.Fatalf("Expected nil got %v while validation\n", err)
20 }
21 runner, err := NewRunner(options)
22 if err != nil {
23 t.Fatalf("Expected nil got %v while creating runner\n", err)
24 }
25 match := runner.filterAndMatchSubdomain("req.example.com")
26 require.True(t, match, "Expecting a boolean True value ")
27 })
28 t.Run("Multiple Wildcards Match", func(t *testing.T) {
29 options.Match = []string{"*.ns.*.com"}
30 err := options.validateOptions()
31 if err != nil {
32 t.Fatalf("Expected nil got %v while validation\n", err)
33 }
34 runner, err := NewRunner(options)
35 if err != nil {
36 t.Fatalf("Expected nil got %v while creating runner\n", err)
37 }
38 subdomain := []string{"a.ns.example.com", "b.ns.hackerone.com"}
39 for _, sub := range subdomain {
40 match := runner.filterAndMatchSubdomain(sub)
41 require.True(t, match, "Expecting a boolean True value ")
42 }
43 })
44 t.Run("Sequential Match", func(t *testing.T) {
45 options.Match = []string{"*.ns.example.com", "*.hackerone.com"}
46 err := options.validateOptions()
47 if err != nil {
48 t.Fatalf("Expected nil got %v while validation\n", err)
49 }
50 runner, err := NewRunner(options)
51 if err != nil {
52 t.Fatalf("Expected nil got %v while creating runner\n", err)
53 }
54 subdomain := []string{"a.ns.example.com", "b.hackerone.com"}
55 for _, sub := range subdomain {
56 match := runner.filterAndMatchSubdomain(sub)
57 require.True(t, match, "Expecting a boolean True value ")
58 }
59 })
60 t.Run("Literal Filter", func(t *testing.T) {
61 options.Filter = []string{"req.example.com"}
62 err := options.validateOptions()
63 if err != nil {
64 t.Fatalf("Expected nil got %v while validation\n", err)
65 }
66 runner, err := NewRunner(options)
67 if err != nil {
68 t.Fatalf("Expected nil got %v while creating runner\n", err)
69 }
70 match := runner.filterAndMatchSubdomain("req.example.com")
71 require.False(t, match, "Expecting a boolean False value ")
72 })
73 t.Run("Multiple Wildcards Filter", func(t *testing.T) {
74 options.Filter = []string{"*.ns.*.com"}
75 err := options.validateOptions()
76 if err != nil {
77 t.Fatalf("Expected nil got %v while validation\n", err)
78 }
79 runner, err := NewRunner(options)
80 if err != nil {
81 t.Fatalf("Expected nil got %v while creating runner\n", err)
82 }
83 subdomain := []string{"a.ns.example.com", "b.ns.hackerone.com"}
84 for _, sub := range subdomain {
85 match := runner.filterAndMatchSubdomain(sub)
86 require.False(t, match, "Expecting a boolean False value ")
87 }
88 })
89 t.Run("Sequential Filter", func(t *testing.T) {
90 options.Filter = []string{"*.ns.example.com", "*.hackerone.com"}
91 err := options.validateOptions()
92 if err != nil {
93 t.Fatalf("Expected nil got %v while validation\n", err)
94 }
95 runner, err := NewRunner(options)
96 if err != nil {
97 t.Fatalf("Expected nil got %v while creating runner\n", err)
98 }
99 subdomain := []string{"a.ns.example.com", "b.hackerone.com"}
100 for _, sub := range subdomain {
101 match := runner.filterAndMatchSubdomain(sub)
102 require.False(t, match, "Expecting a boolean False value ")
103 }
104 })
105 t.Run("Filter and Match", func(t *testing.T) {
106 options.Filter = []string{"example.com"}
107 options.Match = []string{"hackerone.com"}
108 err := options.validateOptions()
109 if err != nil {
110 t.Fatalf("Expected nil got %v while validation\n", err)
111 }
112 runner, err := NewRunner(options)
113 if err != nil {
114 t.Fatalf("Expected nil got %v while creating runner\n", err)
115 }
116 subdomain := []string{"example.com", "example.com"}
117 for _, sub := range subdomain {
118 match := runner.filterAndMatchSubdomain(sub)
119 require.False(t, match, "Expecting a boolean False value ")
120 }
121 })
122
123 t.Run("Filter and Match - Same Root Domain", func(t *testing.T) {
124 options.Filter = []string{"example.com"}
125 options.Match = []string{"www.example.com"}
126 err := options.validateOptions()
127 if err != nil {
128 t.Fatalf("Expected nil got %v while validation\n", err)
129 }
130 runner, err := NewRunner(options)
131 if err != nil {
132 t.Fatalf("Expected nil got %v while creating runner\n", err)
133 }
134 subdomain := map[string]string{"filter": "example.com", "match": "www.example.com"}
135 for key, sub := range subdomain {
136 result := runner.filterAndMatchSubdomain(sub)
137 if key == "filter" {
138 require.False(t, result, "Expecting a boolean False value ")
139 } else {
140 require.True(t, result, "Expecting a boolean True value ")
141 }
142 }
143 })
144 }
0 package runner
1
2 import (
3 "net"
4 "strings"
5
6 "github.com/projectdiscovery/dnsx/libs/dnsx"
7 "github.com/projectdiscovery/subfinder/v2/pkg/passive"
8 "github.com/projectdiscovery/subfinder/v2/pkg/resolve"
9 )
10
11 // initializePassiveEngine creates the passive engine and loads sources etc
12 func (r *Runner) initializePassiveEngine() {
13 r.passiveAgent = passive.New(r.options.Sources, r.options.ExcludeSources, r.options.All, r.options.OnlyRecursive)
14 }
15
16 // initializeResolver creates the resolver used to resolve the found subdomains
17 func (r *Runner) initializeResolver() error {
18 var resolvers []string
19
20 // If the file has been provided, read resolvers from the file
21 if r.options.ResolverList != "" {
22 var err error
23 resolvers, err = loadFromFile(r.options.ResolverList)
24 if err != nil {
25 return err
26 }
27 }
28
29 if len(r.options.Resolvers) > 0 {
30 resolvers = append(resolvers, r.options.Resolvers...)
31 } else {
32 resolvers = append(resolvers, resolve.DefaultResolvers...)
33 }
34
35 // Add default 53 UDP port if missing
36 for i, resolver := range resolvers {
37 if !strings.Contains(resolver, ":") {
38 resolvers[i] = net.JoinHostPort(resolver, "53")
39 }
40 }
41
42 r.resolverClient = resolve.New()
43 var err error
44 r.resolverClient.DNSClient, err = dnsx.New(dnsx.Options{BaseResolvers: resolvers, MaxRetries: 5})
45 if err != nil {
46 return nil
47 }
48
49 return nil
50 }
0 package runner
1
2 import (
3 "errors"
4 "fmt"
5 "io"
6 "math/rand"
7 "os"
8 "os/user"
9 "path/filepath"
10 "regexp"
11 "strings"
12 "time"
13
14 "gopkg.in/yaml.v3"
15
16 "github.com/projectdiscovery/fileutil"
17 "github.com/projectdiscovery/goflags"
18 "github.com/projectdiscovery/gologger"
19 "github.com/projectdiscovery/subfinder/v2/pkg/passive"
20 "github.com/projectdiscovery/subfinder/v2/pkg/resolve"
21 )
22
23 var (
24 defaultConfigLocation = filepath.Join(userHomeDir(), ".config/subfinder/config.yaml")
25 defaultProviderConfigLocation = filepath.Join(userHomeDir(), ".config/subfinder/provider-config.yaml")
26 )
27
28 // Options contains the configuration options for tuning
29 // the subdomain enumeration process.
30 type Options struct {
31 Verbose bool // Verbose flag indicates whether to show verbose output or not
32 NoColor bool // NoColor disables the colored output
33 JSON bool // JSON specifies whether to use json for output format or text file
34 HostIP bool // HostIP specifies whether to write subdomains in host:ip format
35 Silent bool // Silent suppresses any extra text and only writes subdomains to screen
36 ListSources bool // ListSources specifies whether to list all available sources
37 RemoveWildcard bool // RemoveWildcard specifies whether to remove potential wildcard or dead subdomains from the results.
38 CaptureSources bool // CaptureSources specifies whether to save all sources that returned a specific domains or just the first source
39 Stdin bool // Stdin specifies whether stdin input was given to the process
40 Version bool // Version specifies if we should just show version and exit
41 OnlyRecursive bool // Recursive specifies whether to use only recursive subdomain enumeration sources
42 All bool // All specifies whether to use all (slow) sources.
43 Threads int // Threads controls the number of threads to use for active enumerations
44 Timeout int // Timeout is the seconds to wait for sources to respond
45 MaxEnumerationTime int // MaxEnumerationTime is the maximum amount of time in minutes to wait for enumeration
46 Domain goflags.StringSlice // Domain is the domain to find subdomains for
47 DomainsFile string // DomainsFile is the file containing list of domains to find subdomains for
48 Output io.Writer
49 OutputFile string // Output is the file to write found subdomains to.
50 OutputDirectory string // OutputDirectory is the directory to write results to in case list of domains is given
51 Sources goflags.StringSlice `yaml:"sources,omitempty"` // Sources contains a comma-separated list of sources to use for enumeration
52 ExcludeSources goflags.StringSlice `yaml:"exclude-sources,omitempty"` // ExcludeSources contains the comma-separated sources to not include in the enumeration process
53 Resolvers goflags.StringSlice `yaml:"resolvers,omitempty"` // Resolvers is the comma-separated resolvers to use for enumeration
54 ResolverList string // ResolverList is a text file containing list of resolvers to use for enumeration
55 Config string // Config contains the location of the config file
56 ProviderConfig string // ProviderConfig contains the location of the provider config file
57 Proxy string // HTTP proxy
58 RateLimit int // Maximum number of HTTP requests to send per second
59 ExcludeIps bool
60 Match goflags.StringSlice
61 Filter goflags.StringSlice
62 matchRegexes []*regexp.Regexp
63 filterRegexes []*regexp.Regexp
64 }
65
66 // ParseOptions parses the command line flags provided by a user
67 func ParseOptions() *Options {
68 // Seed default random number generator
69 rand.Seed(time.Now().UnixNano())
70
71 // Migrate config to provider config
72 if fileutil.FileExists(defaultConfigLocation) && !fileutil.FileExists(defaultProviderConfigLocation) {
73 gologger.Info().Msgf("Detected old '%s' config file, trying to migrate providers to '%s'\n", defaultConfigLocation, defaultProviderConfigLocation)
74 if err := migrateToProviderConfig(defaultConfigLocation, defaultProviderConfigLocation); err != nil {
75 gologger.Warning().Msgf("Could not migrate providers from existing config '%s' to provider config '%s': %s\n", defaultConfigLocation, defaultProviderConfigLocation, err)
76 } else {
77 // cleanup the existing config file post migration
78 _ = os.Remove(defaultConfigLocation)
79 gologger.Info().Msgf("Migration successful from '%s' to '%s'.\n", defaultConfigLocation, defaultProviderConfigLocation)
80 }
81 }
82
83 options := &Options{}
84
85 var err error
86 flagSet := goflags.NewFlagSet()
87 flagSet.SetDescription(`Subfinder is a subdomain discovery tool that discovers subdomains for websites by using passive online sources.`)
88
89 createGroup(flagSet, "input", "Input",
90 flagSet.StringSliceVarP(&options.Domain, "domain", "d", []string{}, "domains to find subdomains for", goflags.NormalizedStringSliceOptions),
91 flagSet.StringVarP(&options.DomainsFile, "list", "dL", "", "file containing list of domains for subdomain discovery"),
92 )
93
94 createGroup(flagSet, "source", "Source",
95 flagSet.StringSliceVarP(&options.Sources, "sources", "s", []string{}, "specific sources to use for discovery (-s crtsh,github). Use -ls to display all available sources.", goflags.NormalizedStringSliceOptions),
96 flagSet.BoolVar(&options.OnlyRecursive, "recursive", false, "use only sources that can handle subdomains recursively (e.g. subdomain.domain.tld vs domain.tld)"),
97 flagSet.BoolVar(&options.All, "all", false, "use all sources for enumeration (slow)"),
98 flagSet.StringSliceVarP(&options.ExcludeSources, "exclude-sources", "es", []string{}, "sources to exclude from enumeration (-es alienvault,zoomeye)", goflags.NormalizedStringSliceOptions),
99 )
100
101 createGroup(flagSet, "filter", "Filter",
102 flagSet.StringSliceVarP(&options.Match, "match", "m", []string{}, "subdomain or list of subdomain to match (file or comma separated)", goflags.FileNormalizedStringSliceOptions),
103 flagSet.StringSliceVarP(&options.Filter, "filter", "f", []string{}, " subdomain or list of subdomain to filter (file or comma separated)", goflags.FileNormalizedStringSliceOptions),
104 )
105
106 createGroup(flagSet, "rate-limit", "Rate-limit",
107 flagSet.IntVarP(&options.RateLimit, "rate-limit", "rl", 0, "maximum number of http requests to send per second"),
108 flagSet.IntVar(&options.Threads, "t", 10, "number of concurrent goroutines for resolving (-active only)"),
109 )
110
111 createGroup(flagSet, "output", "Output",
112 flagSet.StringVarP(&options.OutputFile, "output", "o", "", "file to write output to"),
113 flagSet.BoolVarP(&options.JSON, "json", "oJ", false, "write output in JSONL(ines) format"),
114 flagSet.StringVarP(&options.OutputDirectory, "output-dir", "oD", "", "directory to write output (-dL only)"),
115 flagSet.BoolVarP(&options.CaptureSources, "collect-sources", "cs", false, "include all sources in the output (-json only)"),
116 flagSet.BoolVarP(&options.HostIP, "ip", "oI", false, "include host IP in output (-active only)"),
117 )
118
119 createGroup(flagSet, "configuration", "Configuration",
120 flagSet.StringVar(&options.Config, "config", defaultConfigLocation, "flag config file"),
121 flagSet.StringVarP(&options.ProviderConfig, "provider-config", "pc", defaultProviderConfigLocation, "provider config file"),
122 flagSet.StringSliceVar(&options.Resolvers, "r", []string{}, "comma separated list of resolvers to use", goflags.NormalizedStringSliceOptions),
123 flagSet.StringVarP(&options.ResolverList, "rlist", "rL", "", "file containing list of resolvers to use"),
124 flagSet.BoolVarP(&options.RemoveWildcard, "active", "nW", false, "display active subdomains only"),
125 flagSet.StringVar(&options.Proxy, "proxy", "", "http proxy to use with subfinder"),
126 flagSet.BoolVarP(&options.ExcludeIps, "exclude-ip", "ei", false, "exclude IPs from the list of domains"),
127 )
128
129 createGroup(flagSet, "debug", "Debug",
130 flagSet.BoolVar(&options.Silent, "silent", false, "show only subdomains in output"),
131 flagSet.BoolVar(&options.Version, "version", false, "show version of subfinder"),
132 flagSet.BoolVar(&options.Verbose, "v", false, "show verbose output"),
133 flagSet.BoolVarP(&options.NoColor, "no-color", "nc", false, "disable color in output"),
134 flagSet.BoolVarP(&options.ListSources, "list-sources", "ls", false, "list all available sources"),
135 )
136
137 createGroup(flagSet, "optimization", "Optimization",
138 flagSet.IntVar(&options.Timeout, "timeout", 30, "seconds to wait before timing out"),
139 flagSet.IntVar(&options.MaxEnumerationTime, "max-time", 10, "minutes to wait for enumeration results"),
140 )
141
142 if err := flagSet.Parse(); err != nil {
143 fmt.Println(err.Error())
144 os.Exit(1)
145 }
146
147 if options.Config != defaultConfigLocation {
148 // An empty source file is not a fatal error
149 if err := flagSet.MergeConfigFile(options.Config); err != nil && !errors.Is(err, io.EOF) {
150 gologger.Fatal().Msgf("Could not read config: %s\n", err)
151 }
152 }
153
154 // Default output is stdout
155 options.Output = os.Stdout
156
157 // Check if stdin pipe was given
158 options.Stdin = hasStdin()
159
160 // Read the inputs and configure the logging
161 options.configureOutput()
162
163 if options.Version {
164 gologger.Info().Msgf("Current Version: %s\n", Version)
165 os.Exit(0)
166 }
167
168 options.preProcessOptions()
169
170 if !options.Silent {
171 showBanner()
172 }
173
174 // Check if the application loading with any provider configuration, then take it
175 // Otherwise load the default provider config
176 if fileutil.FileExists(options.ProviderConfig) {
177 gologger.Info().Msgf("Loading provider config from '%s'", options.ProviderConfig)
178 options.loadProvidersFrom(options.ProviderConfig)
179 } else {
180 gologger.Info().Msgf("Loading provider config from the default location: '%s'", defaultProviderConfigLocation)
181 options.loadProvidersFrom(defaultProviderConfigLocation)
182 }
183 if options.ListSources {
184 listSources(options)
185 os.Exit(0)
186 }
187
188 // Validate the options passed by the user and if any
189 // invalid options have been used, exit.
190 err = options.validateOptions()
191 if err != nil {
192 gologger.Fatal().Msgf("Program exiting: %s\n", err)
193 }
194
195 return options
196 }
197
198 // loadProvidersFrom runs the app with source config
199 func (options *Options) loadProvidersFrom(location string) {
200 // todo: move elsewhere
201 if len(options.Resolvers) == 0 {
202 options.Resolvers = resolve.DefaultResolvers
203 }
204
205 // We skip bailing out if file doesn't exist because we'll create it
206 // at the end of options parsing from default via goflags.
207 if err := UnmarshalFrom(location); isFatalErr(err) && !errors.Is(err, os.ErrNotExist) {
208 gologger.Fatal().Msgf("Could not read providers from '%s': %s\n", location, err)
209 }
210 }
211
212 func migrateToProviderConfig(defaultConfigLocation, defaultProviderLocation string) error {
213 configs, err := unMarshalToLowerCaseMap(defaultConfigLocation)
214 if err != nil {
215 return err
216 }
217
218 sourcesRequiringApiKeysMap := make(map[string][]string)
219 for _, source := range passive.AllSources {
220 if source.NeedsKey() {
221 sourceName := strings.ToLower(source.Name())
222 if sourceKeys, ok := configs[sourceName]; ok {
223 sourcesRequiringApiKeysMap[sourceName] = sourceKeys
224 } else {
225 sourcesRequiringApiKeysMap[sourceName] = []string{}
226 }
227 }
228 }
229
230 return CreateProviderConfigYAML(defaultProviderLocation, sourcesRequiringApiKeysMap)
231 }
232
233 func unMarshalToLowerCaseMap(defaultConfigLocation string) (map[string][]string, error) {
234 defaultConfigFile, err := os.Open(defaultConfigLocation)
235 if err != nil {
236 return nil, err
237 }
238 defer defaultConfigFile.Close()
239
240 configs := map[string][]string{}
241 if err := yaml.NewDecoder(defaultConfigFile).Decode(configs); isFatalErr(err) {
242 return nil, err
243 }
244
245 for k, v := range configs {
246 configs[strings.ToLower(k)] = v
247 }
248 return configs, nil
249 }
250
251 func isFatalErr(err error) bool {
252 return err != nil && !errors.Is(err, io.EOF)
253 }
254
255 func hasStdin() bool {
256 stat, err := os.Stdin.Stat()
257 if err != nil {
258 return false
259 }
260
261 isPipedFromChrDev := (stat.Mode() & os.ModeCharDevice) == 0
262 isPipedFromFIFO := (stat.Mode() & os.ModeNamedPipe) != 0
263
264 return isPipedFromChrDev || isPipedFromFIFO
265 }
266
267 func listSources(options *Options) {
268 gologger.Info().Msgf("Current list of available sources. [%d]\n", len(passive.AllSources))
269 gologger.Info().Msgf("Sources marked with an * need key(s) or token(s) to work.\n")
270 gologger.Info().Msgf("You can modify '%s' to configure your keys/tokens.\n\n", options.ProviderConfig)
271
272 for _, source := range passive.AllSources {
273 message := "%s\n"
274 sourceName := source.Name()
275 if source.NeedsKey() {
276 message = "%s *\n"
277 }
278 gologger.Silent().Msgf(message, sourceName)
279 }
280 }
281
282 func createGroup(flagSet *goflags.FlagSet, groupName, description string, flags ...*goflags.FlagData) {
283 flagSet.SetGroup(groupName, description)
284 for _, currentFlag := range flags {
285 currentFlag.Group(groupName)
286 }
287 }
288
289 func (options *Options) preProcessOptions() {
290 for i, domain := range options.Domain {
291 options.Domain[i], _ = sanitize(domain)
292 }
293 }
294
295 func userHomeDir() string {
296 usr, err := user.Current()
297 if err != nil {
298 gologger.Fatal().Msgf("Could not get user home directory: %s\n", err)
299 }
300 return usr.HomeDir
301 }
0 package runner
1
2 import (
3 "bufio"
4 "errors"
5 "io"
6 "os"
7 "path/filepath"
8 "strings"
9
10 jsoniter "github.com/json-iterator/go"
11
12 "github.com/projectdiscovery/subfinder/v2/pkg/resolve"
13 )
14
15 // OutputWriter outputs content to writers.
16 type OutputWriter struct {
17 JSON bool
18 }
19
20 type jsonSourceResult struct {
21 Host string `json:"host"`
22 Input string `json:"input"`
23 Source string `json:"source"`
24 }
25
26 type jsonSourceIPResult struct {
27 Host string `json:"host"`
28 IP string `json:"ip"`
29 Input string `json:"input"`
30 Source string `json:"source"`
31 }
32
33 type jsonSourcesResult struct {
34 Host string `json:"host"`
35 Input string `json:"input"`
36 Sources []string `json:"sources"`
37 }
38
39 // NewOutputWriter creates a new OutputWriter
40 func NewOutputWriter(json bool) *OutputWriter {
41 return &OutputWriter{JSON: json}
42 }
43
44 func (o *OutputWriter) createFile(filename string, appendToFile bool) (*os.File, error) {
45 if filename == "" {
46 return nil, errors.New("empty filename")
47 }
48
49 dir := filepath.Dir(filename)
50
51 if dir != "" {
52 if _, err := os.Stat(dir); os.IsNotExist(err) {
53 err := os.MkdirAll(dir, os.ModePerm)
54 if err != nil {
55 return nil, err
56 }
57 }
58 }
59
60 var file *os.File
61 var err error
62 if appendToFile {
63 file, err = os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
64 } else {
65 file, err = os.Create(filename)
66 }
67 if err != nil {
68 return nil, err
69 }
70
71 return file, nil
72 }
73
74 // WriteHostIP writes the output list of subdomain to an io.Writer
75 func (o *OutputWriter) WriteHostIP(input string, results map[string]resolve.Result, writer io.Writer) error {
76 var err error
77 if o.JSON {
78 err = writeJSONHostIP(input, results, writer)
79 } else {
80 err = writePlainHostIP(input, results, writer)
81 }
82 return err
83 }
84
85 func writePlainHostIP(_ string, results map[string]resolve.Result, writer io.Writer) error {
86 bufwriter := bufio.NewWriter(writer)
87 sb := &strings.Builder{}
88
89 for _, result := range results {
90 sb.WriteString(result.Host)
91 sb.WriteString(",")
92 sb.WriteString(result.IP)
93 sb.WriteString(",")
94 sb.WriteString(result.Source)
95 sb.WriteString("\n")
96
97 _, err := bufwriter.WriteString(sb.String())
98 if err != nil {
99 bufwriter.Flush()
100 return err
101 }
102 sb.Reset()
103 }
104 return bufwriter.Flush()
105 }
106
107 func writeJSONHostIP(input string, results map[string]resolve.Result, writer io.Writer) error {
108 encoder := jsoniter.NewEncoder(writer)
109
110 var data jsonSourceIPResult
111
112 for _, result := range results {
113 data.Host = result.Host
114 data.IP = result.IP
115 data.Input = input
116 data.Source = result.Source
117
118 err := encoder.Encode(&data)
119 if err != nil {
120 return err
121 }
122 }
123 return nil
124 }
125
126 // WriteHostNoWildcard writes the output list of subdomain with nW flag to an io.Writer
127 func (o *OutputWriter) WriteHostNoWildcard(input string, results map[string]resolve.Result, writer io.Writer) error {
128 hosts := make(map[string]resolve.HostEntry)
129 for host, result := range results {
130 hosts[host] = resolve.HostEntry{Host: result.Host, Source: result.Source}
131 }
132
133 return o.WriteHost(input, hosts, writer)
134 }
135
136 // WriteHost writes the output list of subdomain to an io.Writer
137 func (o *OutputWriter) WriteHost(input string, results map[string]resolve.HostEntry, writer io.Writer) error {
138 var err error
139 if o.JSON {
140 err = writeJSONHost(input, results, writer)
141 } else {
142 err = writePlainHost(input, results, writer)
143 }
144 return err
145 }
146
147 func writePlainHost(_ string, results map[string]resolve.HostEntry, writer io.Writer) error {
148 bufwriter := bufio.NewWriter(writer)
149 sb := &strings.Builder{}
150
151 for _, result := range results {
152 sb.WriteString(result.Host)
153 sb.WriteString("\n")
154
155 _, err := bufwriter.WriteString(sb.String())
156 if err != nil {
157 bufwriter.Flush()
158 return err
159 }
160 sb.Reset()
161 }
162 return bufwriter.Flush()
163 }
164
165 func writeJSONHost(input string, results map[string]resolve.HostEntry, writer io.Writer) error {
166 encoder := jsoniter.NewEncoder(writer)
167
168 var data jsonSourceResult
169 for _, result := range results {
170 data.Host = result.Host
171 data.Input = input
172 data.Source = result.Source
173 err := encoder.Encode(data)
174 if err != nil {
175 return err
176 }
177 }
178 return nil
179 }
180
181 // WriteSourceHost writes the output list of subdomain to an io.Writer
182 func (o *OutputWriter) WriteSourceHost(input string, sourceMap map[string]map[string]struct{}, writer io.Writer) error {
183 var err error
184 if o.JSON {
185 err = writeSourceJSONHost(input, sourceMap, writer)
186 } else {
187 err = writeSourcePlainHost(input, sourceMap, writer)
188 }
189 return err
190 }
191
192 func writeSourceJSONHost(input string, sourceMap map[string]map[string]struct{}, writer io.Writer) error {
193 encoder := jsoniter.NewEncoder(writer)
194
195 var data jsonSourcesResult
196
197 for host, sources := range sourceMap {
198 data.Host = host
199 data.Input = input
200 keys := make([]string, 0, len(sources))
201 for source := range sources {
202 keys = append(keys, source)
203 }
204 data.Sources = keys
205
206 err := encoder.Encode(&data)
207 if err != nil {
208 return err
209 }
210 }
211 return nil
212 }
213
214 func writeSourcePlainHost(_ string, sourceMap map[string]map[string]struct{}, writer io.Writer) error {
215 bufwriter := bufio.NewWriter(writer)
216 sb := &strings.Builder{}
217
218 for host, sources := range sourceMap {
219 sb.WriteString(host)
220 sb.WriteString(",[")
221 sourcesString := ""
222 for source := range sources {
223 sourcesString += source + ","
224 }
225 sb.WriteString(strings.Trim(sourcesString, ", "))
226 sb.WriteString("]\n")
227
228 _, err := bufwriter.WriteString(sb.String())
229 if err != nil {
230 bufwriter.Flush()
231 return err
232 }
233 sb.Reset()
234 }
235 return bufwriter.Flush()
236 }
0 package runner
1
2 import (
3 "bufio"
4 "io"
5 "os"
6 "path"
7 "regexp"
8 "strings"
9
10 "github.com/pkg/errors"
11
12 "github.com/projectdiscovery/gologger"
13 "github.com/projectdiscovery/subfinder/v2/pkg/passive"
14 "github.com/projectdiscovery/subfinder/v2/pkg/resolve"
15 )
16
17 // Runner is an instance of the subdomain enumeration
18 // client used to orchestrate the whole process.
19 type Runner struct {
20 options *Options
21 passiveAgent *passive.Agent
22 resolverClient *resolve.Resolver
23 }
24
25 // NewRunner creates a new runner struct instance by parsing
26 // the configuration options, configuring sources, reading lists
27 // and setting up loggers, etc.
28 func NewRunner(options *Options) (*Runner, error) {
29 runner := &Runner{options: options}
30
31 // Initialize the passive subdomain enumeration engine
32 runner.initializePassiveEngine()
33
34 // Initialize the subdomain resolver
35 err := runner.initializeResolver()
36 if err != nil {
37 return nil, err
38 }
39
40 return runner, nil
41 }
42
43 // RunEnumeration runs the subdomain enumeration flow on the targets specified
44 func (r *Runner) RunEnumeration() error {
45 outputs := []io.Writer{r.options.Output}
46
47 if len(r.options.Domain) > 0 {
48 domainsReader := strings.NewReader(strings.Join(r.options.Domain, "\n"))
49 return r.EnumerateMultipleDomains(domainsReader, outputs)
50 }
51
52 // If we have multiple domains as input,
53 if r.options.DomainsFile != "" {
54 f, err := os.Open(r.options.DomainsFile)
55 if err != nil {
56 return err
57 }
58 err = r.EnumerateMultipleDomains(f, outputs)
59 f.Close()
60 return err
61 }
62
63 // If we have STDIN input, treat it as multiple domains
64 if r.options.Stdin {
65 return r.EnumerateMultipleDomains(os.Stdin, outputs)
66 }
67 return nil
68 }
69
70 // EnumerateMultipleDomains enumerates subdomains for multiple domains
71 // We keep enumerating subdomains for a given domain until we reach an error
72 func (r *Runner) EnumerateMultipleDomains(reader io.Reader, writers []io.Writer) error {
73 scanner := bufio.NewScanner(reader)
74 ip, _ := regexp.Compile(`^([0-9\.]+$)`)
75 for scanner.Scan() {
76 domain, err := sanitize(scanner.Text())
77 isIp := ip.MatchString(domain)
78 if errors.Is(err, ErrEmptyInput) || (r.options.ExcludeIps && isIp) {
79 continue
80 }
81
82 var file *os.File
83 // If the user has specified an output file, use that output file instead
84 // of creating a new output file for each domain. Else create a new file
85 // for each domain in the directory.
86 if r.options.OutputFile != "" {
87 outputWriter := NewOutputWriter(r.options.JSON)
88 file, err = outputWriter.createFile(r.options.OutputFile, true)
89 if err != nil {
90 gologger.Error().Msgf("Could not create file %s for %s: %s\n", r.options.OutputFile, r.options.Domain, err)
91 return err
92 }
93
94 err = r.EnumerateSingleDomain(domain, append(writers, file))
95
96 file.Close()
97 } else if r.options.OutputDirectory != "" {
98 outputFile := path.Join(r.options.OutputDirectory, domain)
99 if r.options.JSON {
100 outputFile += ".json"
101 } else {
102 outputFile += ".txt"
103 }
104
105 outputWriter := NewOutputWriter(r.options.JSON)
106 file, err = outputWriter.createFile(outputFile, false)
107 if err != nil {
108 gologger.Error().Msgf("Could not create file %s for %s: %s\n", r.options.OutputFile, r.options.Domain, err)
109 return err
110 }
111
112 err = r.EnumerateSingleDomain(domain, append(writers, file))
113
114 file.Close()
115 } else {
116 err = r.EnumerateSingleDomain(domain, writers)
117 }
118 if err != nil {
119 return err
120 }
121 }
122 return nil
123 }
0 package runner
1
2 import (
3 "strings"
4
5 "github.com/pkg/errors"
6
7 "github.com/projectdiscovery/fileutil"
8 )
9
10 var (
11 ErrEmptyInput = errors.New("empty data")
12 )
13
14 func loadFromFile(file string) ([]string, error) {
15 chanItems, err := fileutil.ReadFile(file)
16 if err != nil {
17 return nil, err
18 }
19 var items []string
20 for item := range chanItems {
21 var err error
22 item, err = sanitize(item)
23 if errors.Is(err, ErrEmptyInput) {
24 continue
25 }
26 items = append(items, item)
27 }
28 return items, nil
29 }
30
31 func sanitize(data string) (string, error) {
32 data = strings.Trim(data, "\n\t\"' ")
33 if data == "" {
34 return "", ErrEmptyInput
35 }
36 return data, nil
37 }
0 package runner
1
2 import (
3 "errors"
4 "fmt"
5 "regexp"
6 "strings"
7
8 "github.com/projectdiscovery/gologger"
9 "github.com/projectdiscovery/gologger/formatter"
10 "github.com/projectdiscovery/gologger/levels"
11 )
12
13 // validateOptions validates the configuration options passed
14 func (options *Options) validateOptions() error {
15 // Check if domain, list of domains, or stdin info was provided.
16 // If none was provided, then return.
17 if len(options.Domain) == 0 && options.DomainsFile == "" && !options.Stdin {
18 return errors.New("no input list provided")
19 }
20
21 // Both verbose and silent flags were used
22 if options.Verbose && options.Silent {
23 return errors.New("both verbose and silent mode specified")
24 }
25
26 // Validate threads and options
27 if options.Threads == 0 {
28 return errors.New("threads cannot be zero")
29 }
30 if options.Timeout == 0 {
31 return errors.New("timeout cannot be zero")
32 }
33
34 // Always remove wildcard with hostip
35 if options.HostIP && !options.RemoveWildcard {
36 return errors.New("hostip flag must be used with RemoveWildcard option")
37 }
38
39 if options.Match != nil {
40 options.matchRegexes = make([]*regexp.Regexp, len(options.Match))
41 var err error
42 for i, re := range options.Match {
43 if options.matchRegexes[i], err = regexp.Compile(stripRegexString(re)); err != nil {
44 return errors.New("invalid value for match regex option")
45 }
46 }
47 }
48 if options.Filter != nil {
49 options.filterRegexes = make([]*regexp.Regexp, len(options.Filter))
50 var err error
51 for i, re := range options.Filter {
52 if options.filterRegexes[i], err = regexp.Compile(stripRegexString(re)); err != nil {
53 return errors.New("invalid value for filter regex option")
54 }
55 }
56 }
57 return nil
58 }
59 func stripRegexString(val string) string {
60 val = strings.ReplaceAll(val, ".", "\\.")
61 val = strings.ReplaceAll(val, "*", ".*")
62 return fmt.Sprint("^", val, "$")
63 }
64
65 // configureOutput configures the output on the screen
66 func (options *Options) configureOutput() {
67 // If the user desires verbose output, show verbose output
68 if options.Verbose {
69 gologger.DefaultLogger.SetMaxLevel(levels.LevelVerbose)
70 }
71 if options.NoColor {
72 gologger.DefaultLogger.SetFormatter(formatter.NewCLI(true))
73 }
74 if options.Silent {
75 gologger.DefaultLogger.SetMaxLevel(levels.LevelSilent)
76 }
77 }
0 package subscraping
1
2 import (
3 "bytes"
4 "context"
5 "crypto/tls"
6 "fmt"
7 "io"
8 "net/http"
9 "net/url"
10 "time"
11
12 "github.com/corpix/uarand"
13 "github.com/projectdiscovery/ratelimit"
14
15 "github.com/projectdiscovery/gologger"
16 )
17
18 // NewSession creates a new session object for a domain
19 func NewSession(domain string, proxy string, rateLimit, timeout int) (*Session, error) {
20 Transport := &http.Transport{
21 MaxIdleConns: 100,
22 MaxIdleConnsPerHost: 100,
23 TLSClientConfig: &tls.Config{
24 InsecureSkipVerify: true,
25 },
26 }
27
28 // Add proxy
29 if proxy != "" {
30 proxyURL, _ := url.Parse(proxy)
31 if proxyURL == nil {
32 // Log warning but continue anyway
33 gologger.Warning().Msgf("Invalid proxy provided: '%s'", proxy)
34 } else {
35 Transport.Proxy = http.ProxyURL(proxyURL)
36 }
37 }
38
39 client := &http.Client{
40 Transport: Transport,
41 Timeout: time.Duration(timeout) * time.Second,
42 }
43
44 session := &Session{Client: client}
45
46 // Initiate rate limit instance
47 if rateLimit > 0 {
48 session.RateLimiter = ratelimit.New(context.Background(), int64(rateLimit), time.Second)
49 } else {
50 session.RateLimiter = ratelimit.NewUnlimited(context.Background())
51 }
52
53 // Create a new extractor object for the current domain
54 extractor, err := NewSubdomainExtractor(domain)
55 session.Extractor = extractor
56
57 return session, err
58 }
59
60 // Get makes a GET request to a URL with extended parameters
61 func (s *Session) Get(ctx context.Context, getURL, cookies string, headers map[string]string) (*http.Response, error) {
62 return s.HTTPRequest(ctx, http.MethodGet, getURL, cookies, headers, nil, BasicAuth{})
63 }
64
65 // SimpleGet makes a simple GET request to a URL
66 func (s *Session) SimpleGet(ctx context.Context, getURL string) (*http.Response, error) {
67 return s.HTTPRequest(ctx, http.MethodGet, getURL, "", map[string]string{}, nil, BasicAuth{})
68 }
69
70 // Post makes a POST request to a URL with extended parameters
71 func (s *Session) Post(ctx context.Context, postURL, cookies string, headers map[string]string, body io.Reader) (*http.Response, error) {
72 return s.HTTPRequest(ctx, http.MethodPost, postURL, cookies, headers, body, BasicAuth{})
73 }
74
75 // SimplePost makes a simple POST request to a URL
76 func (s *Session) SimplePost(ctx context.Context, postURL, contentType string, body io.Reader) (*http.Response, error) {
77 return s.HTTPRequest(ctx, http.MethodPost, postURL, "", map[string]string{"Content-Type": contentType}, body, BasicAuth{})
78 }
79
80 // HTTPRequest makes any HTTP request to a URL with extended parameters
81 func (s *Session) HTTPRequest(ctx context.Context, method, requestURL, cookies string, headers map[string]string, body io.Reader, basicAuth BasicAuth) (*http.Response, error) {
82 req, err := http.NewRequestWithContext(ctx, method, requestURL, body)
83 if err != nil {
84 return nil, err
85 }
86
87 req.Header.Set("User-Agent", uarand.GetRandom())
88 req.Header.Set("Accept", "*/*")
89 req.Header.Set("Accept-Language", "en")
90 req.Header.Set("Connection", "close")
91
92 if basicAuth.Username != "" || basicAuth.Password != "" {
93 req.SetBasicAuth(basicAuth.Username, basicAuth.Password)
94 }
95
96 if cookies != "" {
97 req.Header.Set("Cookie", cookies)
98 }
99
100 for key, value := range headers {
101 req.Header.Set(key, value)
102 }
103
104 s.RateLimiter.Take()
105
106 return httpRequestWrapper(s.Client, req)
107 }
108
109 // DiscardHTTPResponse discards the response content by demand
110 func (s *Session) DiscardHTTPResponse(response *http.Response) {
111 if response != nil {
112 _, err := io.Copy(io.Discard, response.Body)
113 if err != nil {
114 gologger.Warning().Msgf("Could not discard response body: %s\n", err)
115 return
116 }
117 response.Body.Close()
118 }
119 }
120
121 func httpRequestWrapper(client *http.Client, request *http.Request) (*http.Response, error) {
122 response, err := client.Do(request)
123 if err != nil {
124 return nil, err
125 }
126
127 if response.StatusCode != http.StatusOK {
128 requestURL, _ := url.QueryUnescape(request.URL.String())
129
130 gologger.Debug().MsgFunc(func() string {
131 buffer := new(bytes.Buffer)
132 _, _ = buffer.ReadFrom(response.Body)
133 return fmt.Sprintf("Response for failed request against '%s':\n%s", requestURL, buffer.String())
134 })
135 return response, fmt.Errorf("unexpected status code %d received from '%s'", response.StatusCode, requestURL)
136 }
137 return response, nil
138 }
0 // Package subscraping contains the logic of scraping agents
1 package subscraping
0 // Package alienvault logic
1 package alienvault
2
3 import (
4 "context"
5 "encoding/json"
6 "fmt"
7
8 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
9 )
10
11 type alienvaultResponse struct {
12 Detail string `json:"detail"`
13 Error string `json:"error"`
14 PassiveDNS []struct {
15 Hostname string `json:"hostname"`
16 } `json:"passive_dns"`
17 }
18
19 // Source is the passive scraping agent
20 type Source struct{}
21
22 // Run function returns all subdomains found with the service
23 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
24 results := make(chan subscraping.Result)
25
26 go func() {
27 defer close(results)
28
29 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://otx.alienvault.com/api/v1/indicators/domain/%s/passive_dns", domain))
30 if err != nil && resp == nil {
31 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
32 session.DiscardHTTPResponse(resp)
33 return
34 }
35
36 var response alienvaultResponse
37 // Get the response body and decode
38 err = json.NewDecoder(resp.Body).Decode(&response)
39 if err != nil {
40 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
41 resp.Body.Close()
42 return
43 }
44 resp.Body.Close()
45
46 if response.Error != "" {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s, %s", response.Detail, response.Error)}
48 return
49 }
50
51 for _, record := range response.PassiveDNS {
52 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record.Hostname}
53 }
54 }()
55
56 return results
57 }
58
59 // Name returns the name of the source
60 func (s *Source) Name() string {
61 return "alienvault"
62 }
63
64 func (s *Source) IsDefault() bool {
65 return true
66 }
67
68 func (s *Source) HasRecursiveSupport() bool {
69 return true
70 }
71
72 func (s *Source) NeedsKey() bool {
73 return false
74 }
75
76 func (s *Source) AddApiKeys(_ []string) {
77 // no key needed
78 }
0 // Package anubis logic
1 package anubis
2
3 import (
4 "context"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 // Source is the passive scraping agent
13 type Source struct{}
14
15 // Run function returns all subdomains found with the service
16 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
17 results := make(chan subscraping.Result)
18
19 go func() {
20 defer close(results)
21
22 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://jonlu.ca/anubis/subdomains/%s", domain))
23 if err != nil {
24 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
25 session.DiscardHTTPResponse(resp)
26 return
27 }
28
29 var subdomains []string
30 err = jsoniter.NewDecoder(resp.Body).Decode(&subdomains)
31 if err != nil {
32 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
33 resp.Body.Close()
34 return
35 }
36
37 resp.Body.Close()
38
39 for _, record := range subdomains {
40 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record}
41 }
42 }()
43
44 return results
45 }
46
47 // Name returns the name of the source
48 func (s *Source) Name() string {
49 return "anubis"
50 }
51
52 func (s *Source) IsDefault() bool {
53 return true
54 }
55
56 func (s *Source) HasRecursiveSupport() bool {
57 return false
58 }
59
60 func (s *Source) NeedsKey() bool {
61 return false
62 }
63
64 func (s *Source) AddApiKeys(_ []string) {
65 // no key needed
66 }
0 // Package bevigil logic
1 package bevigil
2
3 import (
4 "context"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 type Response struct {
13 Domain string `json:"domain"`
14 Subdomains []string `json:"subdomains"`
15 }
16
17 type Source struct {
18 apiKeys []string
19 }
20
21 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
22 results := make(chan subscraping.Result)
23 go func() {
24 defer close(results)
25
26 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
27 if randomApiKey == "" {
28 return
29 }
30
31 getUrl := fmt.Sprintf("https://osint.bevigil.com/api/%s/subdomains/", domain)
32
33 resp, err := session.Get(ctx, getUrl, "", map[string]string{"X-Access-Token": randomApiKey, "User-Agent": "subfinder"})
34 if err != nil {
35 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
36 session.DiscardHTTPResponse(resp)
37 return
38 }
39
40 var subdomains []string
41 var response Response
42 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
43 if err != nil {
44 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
45 resp.Body.Close()
46 return
47 }
48
49 resp.Body.Close()
50
51 if len(response.Subdomains) > 0 {
52 subdomains = response.Subdomains
53 }
54
55 for _, subdomain := range subdomains {
56 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
57 }
58 }()
59
60 return results
61 }
62
63 func (s *Source) Name() string {
64 return "bevigil"
65 }
66
67 func (s *Source) IsDefault() bool {
68 return true
69 }
70
71 func (s *Source) HasRecursiveSupport() bool {
72 return false
73 }
74
75 func (s *Source) NeedsKey() bool {
76 return true
77 }
78
79 func (s *Source) AddApiKeys(keys []string) {
80 s.apiKeys = keys
81 }
0 // Package binaryedge logic
1 package binaryedge
2
3 import (
4 "context"
5 "fmt"
6 "math"
7 "net/url"
8 "strconv"
9
10 jsoniter "github.com/json-iterator/go"
11
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
13 )
14
15 const (
16 v1 = "v1"
17 v2 = "v2"
18 baseAPIURLFmt = "https://api.binaryedge.io/%s/query/domains/subdomain/%s"
19 v2SubscriptionURL = "https://api.binaryedge.io/v2/user/subscription"
20 v1PageSizeParam = "pagesize"
21 pageParam = "page"
22 firstPage = 1
23 maxV1PageSize = 10000
24 )
25
26 type subdomainsResponse struct {
27 Message string `json:"message"`
28 Title string `json:"title"`
29 Status interface{} `json:"status"` // string for v1, int for v2
30 Subdomains []string `json:"events"`
31 Page int `json:"page"`
32 PageSize int `json:"pagesize"`
33 Total int `json:"total"`
34 }
35
36 // Source is the passive scraping agent
37 type Source struct {
38 apiKeys []string
39 }
40
41 // Run function returns all subdomains found with the service
42 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
43 results := make(chan subscraping.Result)
44
45 go func() {
46 defer close(results)
47
48 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
49 if randomApiKey == "" {
50 return
51 }
52
53 var baseURL string
54
55 authHeader := map[string]string{"X-Key": randomApiKey}
56
57 if isV2(ctx, session, authHeader) {
58 baseURL = fmt.Sprintf(baseAPIURLFmt, v2, domain)
59 } else {
60 authHeader = map[string]string{"X-Token": randomApiKey}
61 v1URLWithPageSize, err := addURLParam(fmt.Sprintf(baseAPIURLFmt, v1, domain), v1PageSizeParam, strconv.Itoa(maxV1PageSize))
62 if err != nil {
63 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
64 return
65 }
66 baseURL = v1URLWithPageSize.String()
67 }
68
69 if baseURL == "" {
70 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("can't get API URL")}
71 return
72 }
73
74 s.enumerate(ctx, session, baseURL, firstPage, authHeader, results)
75 }()
76
77 return results
78 }
79
80 func (s *Source) enumerate(ctx context.Context, session *subscraping.Session, baseURL string, page int, authHeader map[string]string, results chan subscraping.Result) {
81 pageURL, err := addURLParam(baseURL, pageParam, strconv.Itoa(page))
82 if err != nil {
83 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
84 return
85 }
86
87 resp, err := session.Get(ctx, pageURL.String(), "", authHeader)
88 if err != nil && resp == nil {
89 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
90 session.DiscardHTTPResponse(resp)
91 return
92 }
93
94 var response subdomainsResponse
95 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
96 if err != nil {
97 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
98 resp.Body.Close()
99 return
100 }
101
102 // Check error messages
103 if response.Message != "" && response.Status != nil {
104 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf(response.Message)}
105 }
106
107 resp.Body.Close()
108
109 for _, subdomain := range response.Subdomains {
110 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
111 }
112
113 totalPages := int(math.Ceil(float64(response.Total) / float64(response.PageSize)))
114 nextPage := response.Page + 1
115 for currentPage := nextPage; currentPage <= totalPages; currentPage++ {
116 s.enumerate(ctx, session, baseURL, currentPage, authHeader, results)
117 }
118 }
119
120 // Name returns the name of the source
121 func (s *Source) Name() string {
122 return "binaryedge"
123 }
124
125 func (s *Source) IsDefault() bool {
126 return false
127 }
128
129 func (s *Source) HasRecursiveSupport() bool {
130 return true
131 }
132
133 func (s *Source) NeedsKey() bool {
134 return true
135 }
136
137 func (s *Source) AddApiKeys(keys []string) {
138 s.apiKeys = keys
139 }
140
141 func isV2(ctx context.Context, session *subscraping.Session, authHeader map[string]string) bool {
142 resp, err := session.Get(ctx, v2SubscriptionURL, "", authHeader)
143 if err != nil {
144 session.DiscardHTTPResponse(resp)
145 return false
146 }
147
148 resp.Body.Close()
149
150 return true
151 }
152
153 func addURLParam(targetURL, name, value string) (*url.URL, error) {
154 u, err := url.Parse(targetURL)
155 if err != nil {
156 return u, err
157 }
158 q, _ := url.ParseQuery(u.RawQuery)
159 q.Add(name, value)
160 u.RawQuery = q.Encode()
161
162 return u, nil
163 }
0 // Package bufferover is a bufferover Scraping Engine in Golang
1 package bufferover
2
3 import (
4 "context"
5 "fmt"
6 "strings"
7
8 jsoniter "github.com/json-iterator/go"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 type response struct {
14 Meta struct {
15 Errors []string `json:"Errors"`
16 } `json:"Meta"`
17 FDNSA []string `json:"FDNS_A"`
18 RDNS []string `json:"RDNS"`
19 Results []string `json:"Results"`
20 }
21
22 // Source is the passive scraping agent
23 type Source struct {
24 apiKeys []string
25 }
26
27 // Run function returns all subdomains found with the service
28 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
29 results := make(chan subscraping.Result)
30
31 go func() {
32 defer close(results)
33
34 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
35 if randomApiKey == "" {
36 return
37 }
38
39 s.getData(ctx, fmt.Sprintf("https://tls.bufferover.run/dns?q=.%s", domain), randomApiKey, session, results)
40 }()
41
42 return results
43 }
44
45 func (s *Source) getData(ctx context.Context, sourceURL string, apiKey string, session *subscraping.Session, results chan subscraping.Result) {
46 resp, err := session.Get(ctx, sourceURL, "", map[string]string{"x-api-key": apiKey})
47
48 if err != nil && resp == nil {
49 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
50 session.DiscardHTTPResponse(resp)
51 return
52 }
53
54 var bufforesponse response
55 err = jsoniter.NewDecoder(resp.Body).Decode(&bufforesponse)
56 if err != nil {
57 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
58 resp.Body.Close()
59 return
60 }
61
62 resp.Body.Close()
63
64 metaErrors := bufforesponse.Meta.Errors
65
66 if len(metaErrors) > 0 {
67 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", strings.Join(metaErrors, ", "))}
68 return
69 }
70
71 var subdomains []string
72
73 if len(bufforesponse.FDNSA) > 0 {
74 subdomains = bufforesponse.FDNSA
75 subdomains = append(subdomains, bufforesponse.RDNS...)
76 } else if len(bufforesponse.Results) > 0 {
77 subdomains = bufforesponse.Results
78 }
79
80 for _, subdomain := range subdomains {
81 for _, value := range session.Extractor.FindAllString(subdomain, -1) {
82 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: value}
83 }
84 }
85 }
86
87 // Name returns the name of the source
88 func (s *Source) Name() string {
89 return "bufferover"
90 }
91
92 func (s *Source) IsDefault() bool {
93 return true
94 }
95
96 func (s *Source) HasRecursiveSupport() bool {
97 return true
98 }
99
100 func (s *Source) NeedsKey() bool {
101 return true
102 }
103
104 func (s *Source) AddApiKeys(keys []string) {
105 s.apiKeys = keys
106 }
0 // Package c99 logic
1 package c99
2
3 import (
4 "context"
5 "fmt"
6 "strings"
7
8 jsoniter "github.com/json-iterator/go"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 // Source is the passive scraping agent
14 type Source struct {
15 apiKeys []string
16 }
17
18 type dnsdbLookupResponse struct {
19 Success bool `json:"success"`
20 Subdomains []struct {
21 Subdomain string `json:"subdomain"`
22 IP string `json:"ip"`
23 Cloudflare bool `json:"cloudflare"`
24 } `json:"subdomains"`
25 Error string `json:"error"`
26 }
27
28 // Run function returns all subdomains found with the service
29 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
30 results := make(chan subscraping.Result)
31
32 go func() {
33 defer close(results)
34
35 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
36 if randomApiKey == "" {
37 return
38 }
39
40 searchURL := fmt.Sprintf("https://api.c99.nl/subdomainfinder?key=%s&domain=%s&json", randomApiKey, domain)
41 resp, err := session.SimpleGet(ctx, searchURL)
42 if err != nil {
43 session.DiscardHTTPResponse(resp)
44 return
45 }
46
47 defer resp.Body.Close()
48
49 var response dnsdbLookupResponse
50 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
51 if err != nil {
52 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
53 return
54 }
55
56 if response.Error != "" {
57 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%v", response.Error)}
58 return
59 }
60
61 for _, data := range response.Subdomains {
62 if !strings.HasPrefix(data.Subdomain, ".") {
63 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: data.Subdomain}
64 }
65 }
66 }()
67
68 return results
69 }
70
71 // Name returns the name of the source
72 func (s *Source) Name() string {
73 return "c99"
74 }
75
76 func (s *Source) IsDefault() bool {
77 return true
78 }
79
80 func (s *Source) HasRecursiveSupport() bool {
81 return false
82 }
83
84 func (s *Source) NeedsKey() bool {
85 return true
86 }
87
88 func (s *Source) AddApiKeys(keys []string) {
89 s.apiKeys = keys
90 }
0 // Package censys logic
1 package censys
2
3 import (
4 "bytes"
5 "context"
6 "strconv"
7
8 jsoniter "github.com/json-iterator/go"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 const maxCensysPages = 10
14
15 type resultsq struct {
16 Data []string `json:"parsed.extensions.subject_alt_name.dns_names"`
17 Data1 []string `json:"parsed.names"`
18 }
19
20 type response struct {
21 Results []resultsq `json:"results"`
22 Metadata struct {
23 Pages int `json:"pages"`
24 } `json:"metadata"`
25 }
26
27 // Source is the passive scraping agent
28 type Source struct {
29 apiKeys []apiKey
30 }
31
32 type apiKey struct {
33 token string
34 secret string
35 }
36
37 // Run function returns all subdomains found with the service
38 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
39 results := make(chan subscraping.Result)
40
41 go func() {
42 defer close(results)
43
44 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
45 if randomApiKey.token == "" || randomApiKey.secret == "" {
46 return
47 }
48
49 currentPage := 1
50 for {
51 var request = []byte(`{"query":"` + domain + `", "page":` + strconv.Itoa(currentPage) + `, "fields":["parsed.names","parsed.extensions.subject_alt_name.dns_names"], "flatten":true}`)
52
53 resp, err := session.HTTPRequest(
54 ctx,
55 "POST",
56 "https://search.censys.io/api/v1/search/certificates",
57 "",
58 map[string]string{"Content-Type": "application/json", "Accept": "application/json"},
59 bytes.NewReader(request),
60 subscraping.BasicAuth{Username: randomApiKey.token, Password: randomApiKey.secret},
61 )
62
63 if err != nil {
64 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
65 session.DiscardHTTPResponse(resp)
66 return
67 }
68
69 var censysResponse response
70 err = jsoniter.NewDecoder(resp.Body).Decode(&censysResponse)
71 if err != nil {
72 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
73 resp.Body.Close()
74 return
75 }
76
77 resp.Body.Close()
78
79 for _, res := range censysResponse.Results {
80 for _, part := range res.Data {
81 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
82 }
83 for _, part := range res.Data1 {
84 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: part}
85 }
86 }
87
88 // Exit the censys enumeration if max pages is reached
89 if currentPage >= censysResponse.Metadata.Pages || currentPage >= maxCensysPages {
90 break
91 }
92
93 currentPage++
94 }
95 }()
96
97 return results
98 }
99
100 // Name returns the name of the source
101 func (s *Source) Name() string {
102 return "censys"
103 }
104
105 func (s *Source) IsDefault() bool {
106 return true
107 }
108
109 func (s *Source) HasRecursiveSupport() bool {
110 return false
111 }
112
113 func (s *Source) NeedsKey() bool {
114 return true
115 }
116
117 func (s *Source) AddApiKeys(keys []string) {
118 s.apiKeys = subscraping.CreateApiKeys(keys, func(k, v string) apiKey {
119 return apiKey{k, v}
120 })
121 }
0 // Package certspotter logic
1 package certspotter
2
3 import (
4 "context"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 type certspotterObject struct {
13 ID string `json:"id"`
14 DNSNames []string `json:"dns_names"`
15 }
16
17 // Source is the passive scraping agent
18 type Source struct {
19 apiKeys []string
20 }
21
22 // Run function returns all subdomains found with the service
23 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
24 results := make(chan subscraping.Result)
25
26 go func() {
27 defer close(results)
28
29 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
30 if randomApiKey == "" {
31 return
32 }
33
34 headers := map[string]string{"Authorization": "Bearer " + randomApiKey}
35 cookies := ""
36
37 resp, err := session.Get(ctx, fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names", domain), cookies, headers)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 session.DiscardHTTPResponse(resp)
41 return
42 }
43
44 var response []certspotterObject
45 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 resp.Body.Close()
49 return
50 }
51 resp.Body.Close()
52
53 for _, cert := range response {
54 for _, subdomain := range cert.DNSNames {
55 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
56 }
57 }
58
59 // if the number of responses is zero, close the channel and return.
60 if len(response) == 0 {
61 return
62 }
63
64 id := response[len(response)-1].ID
65 for {
66 reqURL := fmt.Sprintf("https://api.certspotter.com/v1/issuances?domain=%s&include_subdomains=true&expand=dns_names&after=%s", domain, id)
67
68 resp, err := session.Get(ctx, reqURL, cookies, headers)
69 if err != nil {
70 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
71 return
72 }
73
74 var response []certspotterObject
75 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
76 if err != nil {
77 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
78 resp.Body.Close()
79 return
80 }
81 resp.Body.Close()
82
83 if len(response) == 0 {
84 break
85 }
86
87 for _, cert := range response {
88 for _, subdomain := range cert.DNSNames {
89 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
90 }
91 }
92
93 id = response[len(response)-1].ID
94 }
95 }()
96
97 return results
98 }
99
100 // Name returns the name of the source
101 func (s *Source) Name() string {
102 return "certspotter"
103 }
104
105 func (s *Source) IsDefault() bool {
106 return true
107 }
108
109 func (s *Source) HasRecursiveSupport() bool {
110 return true
111 }
112
113 func (s *Source) NeedsKey() bool {
114 return true
115 }
116
117 func (s *Source) AddApiKeys(keys []string) {
118 s.apiKeys = keys
119 }
0 // Package chaos logic
1 package chaos
2
3 import (
4 "context"
5 "fmt"
6
7 "github.com/projectdiscovery/chaos-client/pkg/chaos"
8 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct {
13 apiKeys []string
14 }
15
16 // Run function returns all subdomains found with the service
17 func (s *Source) Run(_ context.Context, domain string, _ *subscraping.Session) <-chan subscraping.Result {
18 results := make(chan subscraping.Result)
19
20 go func() {
21 defer close(results)
22
23 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
24 if randomApiKey == "" {
25 return
26 }
27
28 chaosClient := chaos.New(randomApiKey)
29 for result := range chaosClient.GetSubdomains(&chaos.SubdomainsRequest{
30 Domain: domain,
31 }) {
32 if result.Error != nil {
33 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: result.Error}
34 break
35 }
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: fmt.Sprintf("%s.%s", result.Subdomain, domain)}
37 }
38 }()
39
40 return results
41 }
42
43 // Name returns the name of the source
44 func (s *Source) Name() string {
45 return "chaos"
46 }
47
48 func (s *Source) IsDefault() bool {
49 return true
50 }
51
52 func (s *Source) HasRecursiveSupport() bool {
53 return false
54 }
55
56 func (s *Source) NeedsKey() bool {
57 return true
58 }
59
60 func (s *Source) AddApiKeys(keys []string) {
61 s.apiKeys = keys
62 }
0 package chinaz
1
2 // chinaz http://my.chinaz.com/ChinazAPI/DataCenter/MyDataApi
3 import (
4 "context"
5 "fmt"
6 "io"
7
8 jsoniter "github.com/json-iterator/go"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 // Source is the passive scraping agent
14 type Source struct {
15 apiKeys []string
16 }
17
18 // Run function returns all subdomains found with the service
19 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
20 results := make(chan subscraping.Result)
21
22 go func() {
23 defer close(results)
24
25 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
26 if randomApiKey == "" {
27 return
28 }
29
30 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://apidatav2.chinaz.com/single/alexa?key=%s&domain=%s", randomApiKey, domain))
31 if err != nil {
32 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
33 session.DiscardHTTPResponse(resp)
34 return
35 }
36
37 body, err := io.ReadAll(resp.Body)
38
39 resp.Body.Close()
40
41 SubdomainList := jsoniter.Get(body, "Result").Get("ContributingSubdomainList")
42
43 if SubdomainList.ToBool() {
44 _data := []byte(SubdomainList.ToString())
45 for i := 0; i < SubdomainList.Size(); i++ {
46 subdomain := jsoniter.Get(_data, i, "DataUrl").ToString()
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
48 }
49 } else {
50 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
51 return
52 }
53 }()
54
55 return results
56 }
57
58 // Name returns the name of the source
59 func (s *Source) Name() string {
60 return "chinaz"
61 }
62
63 func (s *Source) IsDefault() bool {
64 return true
65 }
66
67 func (s *Source) HasRecursiveSupport() bool {
68 return false
69 }
70
71 func (s *Source) NeedsKey() bool {
72 return true
73 }
74
75 func (s *Source) AddApiKeys(keys []string) {
76 s.apiKeys = keys
77 }
0 // Package commoncrawl logic
1 package commoncrawl
2
3 import (
4 "bufio"
5 "context"
6 "fmt"
7 "net/url"
8 "strings"
9
10 jsoniter "github.com/json-iterator/go"
11
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
13 )
14
15 const indexURL = "https://index.commoncrawl.org/collinfo.json"
16
17 type indexResponse struct {
18 ID string `json:"id"`
19 APIURL string `json:"cdx-api"`
20 }
21
22 // Source is the passive scraping agent
23 type Source struct{}
24
25 var years = [...]string{"2020", "2019", "2018", "2017"}
26
27 // Run function returns all subdomains found with the service
28 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
29 results := make(chan subscraping.Result)
30
31 go func() {
32 defer close(results)
33
34 resp, err := session.SimpleGet(ctx, indexURL)
35 if err != nil {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
37 session.DiscardHTTPResponse(resp)
38 return
39 }
40
41 var indexes []indexResponse
42 err = jsoniter.NewDecoder(resp.Body).Decode(&indexes)
43 if err != nil {
44 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
45 resp.Body.Close()
46 return
47 }
48 resp.Body.Close()
49
50 searchIndexes := make(map[string]string)
51 for _, year := range years {
52 for _, index := range indexes {
53 if strings.Contains(index.ID, year) {
54 if _, ok := searchIndexes[year]; !ok {
55 searchIndexes[year] = index.APIURL
56 break
57 }
58 }
59 }
60 }
61
62 for _, apiURL := range searchIndexes {
63 further := s.getSubdomains(ctx, apiURL, domain, session, results)
64 if !further {
65 break
66 }
67 }
68 }()
69
70 return results
71 }
72
73 // Name returns the name of the source
74 func (s *Source) Name() string {
75 return "commoncrawl"
76 }
77
78 func (s *Source) IsDefault() bool {
79 return false
80 }
81
82 func (s *Source) HasRecursiveSupport() bool {
83 return false
84 }
85
86 func (s *Source) NeedsKey() bool {
87 return false
88 }
89
90 func (s *Source) AddApiKeys(_ []string) {
91 // no key needed
92 }
93
94 func (s *Source) getSubdomains(ctx context.Context, searchURL, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
95 for {
96 select {
97 case <-ctx.Done():
98 return false
99 default:
100 var headers = map[string]string{"Host": "index.commoncrawl.org"}
101 resp, err := session.Get(ctx, fmt.Sprintf("%s?url=*.%s", searchURL, domain), "", headers)
102 if err != nil {
103 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
104 session.DiscardHTTPResponse(resp)
105 return false
106 }
107
108 scanner := bufio.NewScanner(resp.Body)
109 for scanner.Scan() {
110 line := scanner.Text()
111 if line == "" {
112 continue
113 }
114 line, _ = url.QueryUnescape(line)
115 subdomain := session.Extractor.FindString(line)
116 if subdomain != "" {
117 // fix for triple encoded URL
118 subdomain = strings.ToLower(subdomain)
119 subdomain = strings.TrimPrefix(subdomain, "25")
120 subdomain = strings.TrimPrefix(subdomain, "2f")
121
122 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
123 }
124 }
125 resp.Body.Close()
126 return true
127 }
128 }
129 }
0 // Package crtsh logic
1 package crtsh
2
3 import (
4 "context"
5 "database/sql"
6 "fmt"
7 "strings"
8
9 jsoniter "github.com/json-iterator/go"
10
11 // postgres driver
12 _ "github.com/lib/pq"
13
14 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
15 )
16
17 type subdomain struct {
18 ID int `json:"id"`
19 NameValue string `json:"name_value"`
20 }
21
22 // Source is the passive scraping agent
23 type Source struct{}
24
25 // Run function returns all subdomains found with the service
26 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
27 results := make(chan subscraping.Result)
28
29 go func() {
30 defer close(results)
31
32 count := s.getSubdomainsFromSQL(domain, results)
33 if count > 0 {
34 return
35 }
36 _ = s.getSubdomainsFromHTTP(ctx, domain, session, results)
37 }()
38
39 return results
40 }
41
42 func (s *Source) getSubdomainsFromSQL(domain string, results chan subscraping.Result) int {
43 db, err := sql.Open("postgres", "host=crt.sh user=guest dbname=certwatch sslmode=disable binary_parameters=yes")
44 if err != nil {
45 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
46 return 0
47 }
48
49 defer db.Close()
50
51 pattern := "%." + domain
52 query := `SELECT DISTINCT ci.NAME_VALUE as domain FROM certificate_identity ci
53 WHERE reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower($1))
54 ORDER BY ci.NAME_VALUE`
55 rows, err := db.Query(query, pattern)
56 if err != nil {
57 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
58 return 0
59 }
60 if err := rows.Err(); err != nil {
61 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
62 return 0
63 }
64
65 var count int
66 var data string
67 // Parse all the rows getting subdomains
68 for rows.Next() {
69 err := rows.Scan(&data)
70 if err != nil {
71 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
72 return count
73 }
74 count++
75 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: data}
76 }
77 return count
78 }
79
80 func (s *Source) getSubdomainsFromHTTP(ctx context.Context, domain string, session *subscraping.Session, results chan subscraping.Result) bool {
81 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://crt.sh/?q=%%25.%s&output=json", domain))
82 if err != nil {
83 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
84 session.DiscardHTTPResponse(resp)
85 return false
86 }
87
88 var subdomains []subdomain
89 err = jsoniter.NewDecoder(resp.Body).Decode(&subdomains)
90 if err != nil {
91 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
92 resp.Body.Close()
93 return false
94 }
95
96 resp.Body.Close()
97
98 for _, subdomain := range subdomains {
99 for _, sub := range strings.Split(subdomain.NameValue, "\n") {
100 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: sub}
101 }
102 }
103
104 return true
105 }
106
107 // Name returns the name of the source
108 func (s *Source) Name() string {
109 return "crtsh"
110 }
111
112 func (s *Source) IsDefault() bool {
113 return true
114 }
115
116 func (s *Source) HasRecursiveSupport() bool {
117 return true
118 }
119
120 func (s *Source) NeedsKey() bool {
121 return false
122 }
123
124 func (s *Source) AddApiKeys(_ []string) {
125 // no key needed
126 }
0 // Package dnsdb logic
1 package dnsdb
2
3 import (
4 "bufio"
5 "bytes"
6 "context"
7 "fmt"
8 "strings"
9
10 jsoniter "github.com/json-iterator/go"
11
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
13 )
14
15 type dnsdbResponse struct {
16 Name string `json:"rrname"`
17 }
18
19 // Source is the passive scraping agent
20 type Source struct {
21 apiKeys []string
22 }
23
24 // Run function returns all subdomains found with the service
25 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
26 results := make(chan subscraping.Result)
27
28 go func() {
29 defer close(results)
30
31 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
32 if randomApiKey == "" {
33 return
34 }
35
36 headers := map[string]string{
37 "X-API-KEY": randomApiKey,
38 "Accept": "application/json",
39 "Content-Type": "application/json",
40 }
41
42 resp, err := session.Get(ctx, fmt.Sprintf("https://api.dnsdb.info/lookup/rrset/name/*.%s?limit=1000000000000", domain), "", headers)
43 if err != nil {
44 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
45 session.DiscardHTTPResponse(resp)
46 return
47 }
48
49 scanner := bufio.NewScanner(resp.Body)
50 for scanner.Scan() {
51 line := scanner.Text()
52 if line == "" {
53 continue
54 }
55 var response dnsdbResponse
56 err = jsoniter.NewDecoder(bytes.NewBufferString(line)).Decode(&response)
57 if err != nil {
58 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
59 return
60 }
61 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: strings.TrimSuffix(response.Name, ".")}
62 }
63 resp.Body.Close()
64 }()
65 return results
66 }
67
68 // Name returns the name of the source
69 func (s *Source) Name() string {
70 return "dnsdb"
71 }
72
73 func (s *Source) IsDefault() bool {
74 return false
75 }
76
77 func (s *Source) HasRecursiveSupport() bool {
78 return false
79 }
80
81 func (s *Source) NeedsKey() bool {
82 return true
83 }
84
85 func (s *Source) AddApiKeys(keys []string) {
86 s.apiKeys = keys
87 }
0 // Package dnsdumpster logic
1 package dnsdumpster
2
3 import (
4 "context"
5 "fmt"
6 "io"
7 "net/url"
8 "regexp"
9 "strings"
10
11 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
12 )
13
14 // CSRFSubMatchLength CSRF regex submatch length
15 const CSRFSubMatchLength = 2
16
17 var re = regexp.MustCompile("<input type=\"hidden\" name=\"csrfmiddlewaretoken\" value=\"(.*)\">")
18
19 // getCSRFToken gets the CSRF Token from the page
20 func getCSRFToken(page string) string {
21 if subs := re.FindStringSubmatch(page); len(subs) == CSRFSubMatchLength {
22 return strings.TrimSpace(subs[1])
23 }
24 return ""
25 }
26
27 // postForm posts a form for a domain and returns the response
28 func postForm(ctx context.Context, session *subscraping.Session, token, domain string) (string, error) {
29 params := url.Values{
30 "csrfmiddlewaretoken": {token},
31 "targetip": {domain},
32 "user": {"free"},
33 }
34
35 resp, err := session.HTTPRequest(
36 ctx,
37 "POST",
38 "https://dnsdumpster.com/",
39 fmt.Sprintf("csrftoken=%s; Domain=dnsdumpster.com", token),
40 map[string]string{
41 "Content-Type": "application/x-www-form-urlencoded",
42 "Referer": "https://dnsdumpster.com",
43 "X-CSRF-Token": token,
44 },
45 strings.NewReader(params.Encode()),
46 subscraping.BasicAuth{},
47 )
48
49 if err != nil {
50 session.DiscardHTTPResponse(resp)
51 return "", err
52 }
53
54 // Now, grab the entire page
55 in, err := io.ReadAll(resp.Body)
56 resp.Body.Close()
57 return string(in), err
58 }
59
60 // Source is the passive scraping agent
61 type Source struct{}
62
63 // Run function returns all subdomains found with the service
64 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
65 results := make(chan subscraping.Result)
66
67 go func() {
68 defer close(results)
69
70 resp, err := session.SimpleGet(ctx, "https://dnsdumpster.com/")
71 if err != nil {
72 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
73 session.DiscardHTTPResponse(resp)
74 return
75 }
76
77 body, err := io.ReadAll(resp.Body)
78 if err != nil {
79 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
80 resp.Body.Close()
81 return
82 }
83 resp.Body.Close()
84
85 csrfToken := getCSRFToken(string(body))
86 data, err := postForm(ctx, session, csrfToken, domain)
87 if err != nil {
88 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
89 return
90 }
91
92 for _, subdomain := range session.Extractor.FindAllString(data, -1) {
93 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
94 }
95 }()
96
97 return results
98 }
99
100 // Name returns the name of the source
101 func (s *Source) Name() string {
102 return "dnsdumpster"
103 }
104
105 func (s *Source) IsDefault() bool {
106 return true
107 }
108
109 func (s *Source) HasRecursiveSupport() bool {
110 return true
111 }
112
113 func (s *Source) NeedsKey() bool {
114 return false
115 }
116
117 func (s *Source) AddApiKeys(_ []string) {
118 // no key needed
119 }
0 package dnsrepo
1
2 import (
3 "context"
4 "encoding/json"
5 "fmt"
6 "io"
7 "strings"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 // Source is the passive scraping agent
13 type Source struct {
14 apiKeys []string
15 }
16
17 type DnsRepoResponse []struct {
18 Domain string
19 }
20
21 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
22 results := make(chan subscraping.Result)
23
24 go func() {
25 defer close(results)
26
27 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
28 if randomApiKey == "" {
29 return
30 }
31 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://dnsrepo.noc.org/api/?apikey=%s&search=%s", randomApiKey, domain))
32 if err != nil {
33 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
34 session.DiscardHTTPResponse(resp)
35 return
36 }
37 responseData, err := io.ReadAll(resp.Body)
38 if err != nil {
39 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
40 session.DiscardHTTPResponse(resp)
41 return
42 }
43 resp.Body.Close()
44 var result DnsRepoResponse
45 err = json.Unmarshal(responseData, &result)
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 session.DiscardHTTPResponse(resp)
49 return
50 }
51 for _, sub := range result {
52 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: strings.TrimSuffix(sub.Domain, ".")}
53 }
54
55 }()
56 return results
57 }
58
59 // Name returns the name of the source
60 func (s *Source) Name() string {
61 return "dnsrepo"
62 }
63
64 func (s *Source) IsDefault() bool {
65 return true
66 }
67
68 func (s *Source) HasRecursiveSupport() bool {
69 return false
70 }
71
72 func (s *Source) NeedsKey() bool {
73 return true
74 }
75
76 func (s *Source) AddApiKeys(keys []string) {
77 s.apiKeys = keys
78 }
0 // Package fofa logic
1 package fofa
2
3 import (
4 "context"
5 "encoding/base64"
6 "fmt"
7 "strings"
8
9 jsoniter "github.com/json-iterator/go"
10
11 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
12 )
13
14 type fofaResponse struct {
15 Error bool `json:"error"`
16 ErrMsg string `json:"errmsg"`
17 Size int `json:"size"`
18 Results []string `json:"results"`
19 }
20
21 // Source is the passive scraping agent
22 type Source struct {
23 apiKeys []apiKey
24 }
25
26 type apiKey struct {
27 username string
28 secret string
29 }
30
31 // Run function returns all subdomains found with the service
32 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
33 results := make(chan subscraping.Result)
34
35 go func() {
36 defer close(results)
37
38 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
39 if randomApiKey.username == "" || randomApiKey.secret == "" {
40 return
41 }
42
43 // fofa api doc https://fofa.info/static_pages/api_help
44 qbase64 := base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("domain=\"%s\"", domain)))
45 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://fofa.info/api/v1/search/all?full=true&fields=host&page=1&size=10000&email=%s&key=%s&qbase64=%s", randomApiKey.username, randomApiKey.secret, qbase64))
46 if err != nil && resp == nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 session.DiscardHTTPResponse(resp)
49 return
50 }
51
52 var response fofaResponse
53 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
54 if err != nil {
55 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
56 resp.Body.Close()
57 return
58 }
59 resp.Body.Close()
60
61 if response.Error {
62 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", response.ErrMsg)}
63 return
64 }
65
66 if response.Size > 0 {
67 for _, subdomain := range response.Results {
68 if strings.HasPrefix(strings.ToLower(subdomain), "http://") || strings.HasPrefix(strings.ToLower(subdomain), "https://") {
69 subdomain = subdomain[strings.Index(subdomain, "//")+2:]
70 }
71 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
72 }
73 }
74 }()
75
76 return results
77 }
78
79 // Name returns the name of the source
80 func (s *Source) Name() string {
81 return "fofa"
82 }
83
84 func (s *Source) IsDefault() bool {
85 return true
86 }
87
88 func (s *Source) HasRecursiveSupport() bool {
89 return false
90 }
91
92 func (s *Source) NeedsKey() bool {
93 return true
94 }
95
96 func (s *Source) AddApiKeys(keys []string) {
97 s.apiKeys = subscraping.CreateApiKeys(keys, func(k, v string) apiKey {
98 return apiKey{k, v}
99 })
100 }
0 package fullhunt
1
2 import (
3 "context"
4 "fmt"
5
6 jsoniter "github.com/json-iterator/go"
7
8 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
9 )
10
11 //fullhunt response
12 type fullHuntResponse struct {
13 Hosts []string `json:"hosts"`
14 Message string `json:"message"`
15 Status int `json:"status"`
16 }
17
18 // Source is the passive scraping agent
19 type Source struct {
20 apiKeys []string
21 }
22
23 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
24 results := make(chan subscraping.Result)
25
26 go func() {
27 defer close(results)
28
29 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
30 if randomApiKey == "" {
31 return
32 }
33
34 resp, err := session.Get(ctx, fmt.Sprintf("https://fullhunt.io/api/v1/domain/%s/subdomains", domain), "", map[string]string{"X-API-KEY": randomApiKey})
35 if err != nil {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
37 session.DiscardHTTPResponse(resp)
38 return
39 }
40
41 var response fullHuntResponse
42 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
43 if err != nil {
44 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
45 resp.Body.Close()
46 return
47 }
48 resp.Body.Close()
49 for _, record := range response.Hosts {
50 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record}
51 }
52 }()
53 return results
54 }
55
56 // Name returns the name of the source
57 func (s *Source) Name() string {
58 return "fullhunt"
59 }
60
61 func (s *Source) IsDefault() bool {
62 return true
63 }
64
65 func (s *Source) HasRecursiveSupport() bool {
66 return false
67 }
68
69 func (s *Source) NeedsKey() bool {
70 return true
71 }
72
73 func (s *Source) AddApiKeys(keys []string) {
74 s.apiKeys = keys
75 }
0 // Package github GitHub search package
1 // Based on gwen001's https://github.com/gwen001/github-search github-subdomains
2 package github
3
4 import (
5 "bufio"
6 "context"
7 "fmt"
8 "net/http"
9 "net/url"
10 "regexp"
11 "strconv"
12 "strings"
13 "time"
14
15 jsoniter "github.com/json-iterator/go"
16
17 "github.com/tomnomnom/linkheader"
18
19 "github.com/projectdiscovery/gologger"
20 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
21 )
22
23 type textMatch struct {
24 Fragment string `json:"fragment"`
25 }
26
27 type item struct {
28 Name string `json:"name"`
29 HTMLURL string `json:"html_url"`
30 TextMatches []textMatch `json:"text_matches"`
31 }
32
33 type response struct {
34 TotalCount int `json:"total_count"`
35 Items []item `json:"items"`
36 }
37
38 // Source is the passive scraping agent
39 type Source struct {
40 apiKeys []string
41 }
42
43 // Run function returns all subdomains found with the service
44 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
45 results := make(chan subscraping.Result)
46
47 go func() {
48 defer close(results)
49
50 if len(s.apiKeys) == 0 {
51 gologger.Debug().Msgf("Cannot use the '%s' source because there was no key defined for it.", s.Name())
52 return
53 }
54
55 tokens := NewTokenManager(s.apiKeys)
56
57 searchURL := fmt.Sprintf("https://api.github.com/search/code?per_page=100&q=%s&sort=created&order=asc", domain)
58 s.enumerate(ctx, searchURL, domainRegexp(domain), tokens, session, results)
59 }()
60
61 return results
62 }
63
64 func (s *Source) enumerate(ctx context.Context, searchURL string, domainRegexp *regexp.Regexp, tokens *Tokens, session *subscraping.Session, results chan subscraping.Result) {
65 select {
66 case <-ctx.Done():
67 return
68 default:
69 }
70
71 token := tokens.Get()
72
73 if token.RetryAfter > 0 {
74 if len(tokens.pool) == 1 {
75 gologger.Verbose().Label(s.Name()).Msgf("GitHub Search request rate limit exceeded, waiting for %d seconds before retry... \n", token.RetryAfter)
76 time.Sleep(time.Duration(token.RetryAfter) * time.Second)
77 } else {
78 token = tokens.Get()
79 }
80 }
81
82 headers := map[string]string{"Accept": "application/vnd.github.v3.text-match+json", "Authorization": "token " + token.Hash}
83
84 // Initial request to GitHub search
85 resp, err := session.Get(ctx, searchURL, "", headers)
86 isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
87 if err != nil && !isForbidden {
88 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
89 session.DiscardHTTPResponse(resp)
90 return
91 }
92
93 // Retry enumerarion after Retry-After seconds on rate limit abuse detected
94 ratelimitRemaining, _ := strconv.ParseInt(resp.Header.Get("X-Ratelimit-Remaining"), 10, 64)
95 if isForbidden && ratelimitRemaining == 0 {
96 retryAfterSeconds, _ := strconv.ParseInt(resp.Header.Get("Retry-After"), 10, 64)
97 tokens.setCurrentTokenExceeded(retryAfterSeconds)
98 resp.Body.Close()
99
100 s.enumerate(ctx, searchURL, domainRegexp, tokens, session, results)
101 }
102
103 var data response
104
105 // Marshall json response
106 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
107 if err != nil {
108 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
109 resp.Body.Close()
110 return
111 }
112
113 resp.Body.Close()
114
115 err = proccesItems(ctx, data.Items, domainRegexp, s.Name(), session, results)
116 if err != nil {
117 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
118 return
119 }
120
121 // Links header, first, next, last...
122 linksHeader := linkheader.Parse(resp.Header.Get("Link"))
123 // Process the next link recursively
124 for _, link := range linksHeader {
125 if link.Rel == "next" {
126 nextURL, err := url.QueryUnescape(link.URL)
127 if err != nil {
128 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
129 return
130 }
131 s.enumerate(ctx, nextURL, domainRegexp, tokens, session, results)
132 }
133 }
134 }
135
136 // proccesItems procceses github response items
137 func proccesItems(ctx context.Context, items []item, domainRegexp *regexp.Regexp, name string, session *subscraping.Session, results chan subscraping.Result) error {
138 for _, item := range items {
139 // find subdomains in code
140 resp, err := session.SimpleGet(ctx, rawURL(item.HTMLURL))
141 if err != nil {
142 if resp != nil && resp.StatusCode != http.StatusNotFound {
143 session.DiscardHTTPResponse(resp)
144 }
145 return err
146 }
147
148 if resp.StatusCode == http.StatusOK {
149 scanner := bufio.NewScanner(resp.Body)
150 for scanner.Scan() {
151 line := scanner.Text()
152 if line == "" {
153 continue
154 }
155 for _, subdomain := range domainRegexp.FindAllString(normalizeContent(line), -1) {
156 results <- subscraping.Result{Source: name, Type: subscraping.Subdomain, Value: subdomain}
157 }
158 }
159 resp.Body.Close()
160 }
161
162 // find subdomains in text matches
163 for _, textMatch := range item.TextMatches {
164 for _, subdomain := range domainRegexp.FindAllString(normalizeContent(textMatch.Fragment), -1) {
165 results <- subscraping.Result{Source: name, Type: subscraping.Subdomain, Value: subdomain}
166 }
167 }
168 }
169 return nil
170 }
171
172 // Normalize content before matching, query unescape, remove tabs and new line chars
173 func normalizeContent(content string) string {
174 normalizedContent, _ := url.QueryUnescape(content)
175 normalizedContent = strings.ReplaceAll(normalizedContent, "\\t", "")
176 normalizedContent = strings.ReplaceAll(normalizedContent, "\\n", "")
177 return normalizedContent
178 }
179
180 // Raw URL to get the files code and match for subdomains
181 func rawURL(htmlURL string) string {
182 domain := strings.ReplaceAll(htmlURL, "https://github.com/", "https://raw.githubusercontent.com/")
183 return strings.ReplaceAll(domain, "/blob/", "/")
184 }
185
186 // DomainRegexp regular expression to match subdomains in github files code
187 func domainRegexp(domain string) *regexp.Regexp {
188 rdomain := strings.ReplaceAll(domain, ".", "\\.")
189 return regexp.MustCompile("(\\w[a-zA-Z0-9][a-zA-Z0-9-\\.]*)" + rdomain)
190 }
191
192 // Name returns the name of the source
193 func (s *Source) Name() string {
194 return "github"
195 }
196
197 func (s *Source) IsDefault() bool {
198 return false
199 }
200
201 func (s *Source) HasRecursiveSupport() bool {
202 return false
203 }
204
205 func (s *Source) NeedsKey() bool {
206 return true
207 }
208
209 func (s *Source) AddApiKeys(keys []string) {
210 s.apiKeys = keys
211 }
0 package github
1
2 import "time"
3
4 // Token struct
5 type Token struct {
6 Hash string
7 RetryAfter int64
8 ExceededTime time.Time
9 }
10
11 // Tokens is the internal struct to manage the current token
12 // and the pool
13 type Tokens struct {
14 current int
15 pool []Token
16 }
17
18 // NewTokenManager initialize the tokens pool
19 func NewTokenManager(keys []string) *Tokens {
20 pool := []Token{}
21 for _, key := range keys {
22 t := Token{Hash: key, ExceededTime: time.Time{}, RetryAfter: 0}
23 pool = append(pool, t)
24 }
25
26 return &Tokens{
27 current: 0,
28 pool: pool,
29 }
30 }
31
32 func (r *Tokens) setCurrentTokenExceeded(retryAfter int64) {
33 if r.current >= len(r.pool) {
34 r.current %= len(r.pool)
35 }
36 if r.pool[r.current].RetryAfter == 0 {
37 r.pool[r.current].ExceededTime = time.Now()
38 r.pool[r.current].RetryAfter = retryAfter
39 }
40 }
41
42 // Get returns a new token from the token pool
43 func (r *Tokens) Get() *Token {
44 resetExceededTokens(r)
45
46 if r.current >= len(r.pool) {
47 r.current %= len(r.pool)
48 }
49
50 result := &r.pool[r.current]
51 r.current++
52
53 return result
54 }
55
56 func resetExceededTokens(r *Tokens) {
57 for i, token := range r.pool {
58 if token.RetryAfter > 0 {
59 if int64(time.Since(token.ExceededTime)/time.Second) > token.RetryAfter {
60 r.pool[i].ExceededTime = time.Time{}
61 r.pool[i].RetryAfter = 0
62 }
63 }
64 }
65 }
0 // Package hackertarget logic
1 package hackertarget
2
3 import (
4 "bufio"
5 "context"
6 "fmt"
7
8 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 defer close(results)
20
21 resp, err := session.SimpleGet(ctx, fmt.Sprintf("http://api.hackertarget.com/hostsearch/?q=%s", domain))
22 if err != nil {
23 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
24 session.DiscardHTTPResponse(resp)
25 return
26 }
27
28 defer resp.Body.Close()
29
30 scanner := bufio.NewScanner(resp.Body)
31 for scanner.Scan() {
32 line := scanner.Text()
33 if line == "" {
34 continue
35 }
36 match := session.Extractor.FindAllString(line, -1)
37 for _, subdomain := range match {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
39 }
40 }
41 }()
42
43 return results
44 }
45
46 // Name returns the name of the source
47 func (s *Source) Name() string {
48 return "hackertarget"
49 }
50
51 func (s *Source) IsDefault() bool {
52 return true
53 }
54
55 func (s *Source) HasRecursiveSupport() bool {
56 return true
57 }
58
59 func (s *Source) NeedsKey() bool {
60 return false
61 }
62
63 func (s *Source) AddApiKeys(_ []string) {
64 // no key needed
65 }
0 // Package intelx logic
1 package intelx
2
3 import (
4 "bytes"
5 "context"
6 "encoding/json"
7 "fmt"
8 "io"
9
10 jsoniter "github.com/json-iterator/go"
11
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
13 )
14
15 type searchResponseType struct {
16 ID string `json:"id"`
17 Status int `json:"status"`
18 }
19
20 type selectorType struct {
21 Selectvalue string `json:"selectorvalue"`
22 }
23
24 type searchResultType struct {
25 Selectors []selectorType `json:"selectors"`
26 Status int `json:"status"`
27 }
28
29 type requestBody struct {
30 Term string
31 Maxresults int
32 Media int
33 Target int
34 Terminate []int
35 Timeout int
36 }
37
38 // Source is the passive scraping agent
39 type Source struct {
40 apiKeys []apiKey
41 }
42
43 type apiKey struct {
44 host string
45 key string
46 }
47
48 // Run function returns all subdomains found with the service
49 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
50 results := make(chan subscraping.Result)
51
52 go func() {
53 defer close(results)
54
55 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
56 if randomApiKey.host == "" || randomApiKey.key == "" {
57 return
58 }
59
60 searchURL := fmt.Sprintf("https://%s/phonebook/search?k=%s", randomApiKey.host, randomApiKey.key)
61 reqBody := requestBody{
62 Term: domain,
63 Maxresults: 100000,
64 Media: 0,
65 Target: 1,
66 Timeout: 20,
67 }
68
69 body, err := json.Marshal(reqBody)
70 if err != nil {
71 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
72 return
73 }
74
75 resp, err := session.SimplePost(ctx, searchURL, "application/json", bytes.NewBuffer(body))
76 if err != nil {
77 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
78 session.DiscardHTTPResponse(resp)
79 return
80 }
81
82 var response searchResponseType
83 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 resp.Body.Close()
87 return
88 }
89
90 resp.Body.Close()
91
92 resultsURL := fmt.Sprintf("https://%s/phonebook/search/result?k=%s&id=%s&limit=10000", randomApiKey.host, randomApiKey.key, response.ID)
93 status := 0
94 for status == 0 || status == 3 {
95 resp, err = session.Get(ctx, resultsURL, "", nil)
96 if err != nil {
97 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
98 session.DiscardHTTPResponse(resp)
99 return
100 }
101 var response searchResultType
102 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
103 if err != nil {
104 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
105 resp.Body.Close()
106 return
107 }
108
109 _, err = io.ReadAll(resp.Body)
110 if err != nil {
111 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
112 resp.Body.Close()
113 return
114 }
115 resp.Body.Close()
116
117 status = response.Status
118 for _, hostname := range response.Selectors {
119 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: hostname.Selectvalue}
120 }
121 }
122 }()
123
124 return results
125 }
126
127 // Name returns the name of the source
128 func (s *Source) Name() string {
129 return "intelx"
130 }
131
132 func (s *Source) IsDefault() bool {
133 return true
134 }
135
136 func (s *Source) HasRecursiveSupport() bool {
137 return false
138 }
139
140 func (s *Source) NeedsKey() bool {
141 return true
142 }
143
144 func (s *Source) AddApiKeys(keys []string) {
145 s.apiKeys = subscraping.CreateApiKeys(keys, func(k, v string) apiKey {
146 return apiKey{k, v}
147 })
148 }
0 // Package passivetotal logic
1 package passivetotal
2
3 import (
4 "bytes"
5 "context"
6 "regexp"
7
8 jsoniter "github.com/json-iterator/go"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 var passiveTotalFilterRegex = regexp.MustCompile(`^(?:\d{1,3}\.){3}\d{1,3}\\032`)
14
15 type response struct {
16 Subdomains []string `json:"subdomains"`
17 }
18
19 // Source is the passive scraping agent
20 type Source struct {
21 apiKeys []apiKey
22 }
23
24 type apiKey struct {
25 username string
26 password string
27 }
28
29 // Run function returns all subdomains found with the service
30 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
31 results := make(chan subscraping.Result)
32
33 go func() {
34 defer close(results)
35
36 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
37 if randomApiKey.username == "" || randomApiKey.password == "" {
38 return
39 }
40
41 // Create JSON Get body
42 var request = []byte(`{"query":"` + domain + `"}`)
43
44 resp, err := session.HTTPRequest(
45 ctx,
46 "GET",
47 "https://api.passivetotal.org/v2/enrichment/subdomains",
48 "",
49 map[string]string{"Content-Type": "application/json"},
50 bytes.NewBuffer(request),
51 subscraping.BasicAuth{Username: randomApiKey.username, Password: randomApiKey.password},
52 )
53 if err != nil {
54 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
55 session.DiscardHTTPResponse(resp)
56 return
57 }
58
59 var data response
60 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
61 if err != nil {
62 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
63 resp.Body.Close()
64 return
65 }
66 resp.Body.Close()
67
68 for _, subdomain := range data.Subdomains {
69 // skip entries like xxx.xxx.xxx.xxx\032domain.tld
70 if passiveTotalFilterRegex.MatchString(subdomain) {
71 continue
72 }
73 finalSubdomain := subdomain + "." + domain
74 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: finalSubdomain}
75 }
76 }()
77
78 return results
79 }
80
81 // Name returns the name of the source
82 func (s *Source) Name() string {
83 return "passivetotal"
84 }
85
86 func (s *Source) IsDefault() bool {
87 return true
88 }
89
90 func (s *Source) HasRecursiveSupport() bool {
91 return true
92 }
93
94 func (s *Source) NeedsKey() bool {
95 return true
96 }
97
98 func (s *Source) AddApiKeys(keys []string) {
99 s.apiKeys = subscraping.CreateApiKeys(keys, func(k, v string) apiKey {
100 return apiKey{k, v}
101 })
102 }
0 // Package quake logic
1 package quake
2
3 import (
4 "bytes"
5 "context"
6 "fmt"
7 "strings"
8
9 jsoniter "github.com/json-iterator/go"
10
11 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
12 )
13
14 type quakeResults struct {
15 Code int `json:"code"`
16 Message string `json:"message"`
17 Data []struct {
18 Service struct {
19 HTTP struct {
20 Host string `json:"host"`
21 } `json:"http"`
22 }
23 } `json:"data"`
24 Meta struct {
25 Pagination struct {
26 Total int `json:"total"`
27 } `json:"pagination"`
28 } `json:"meta"`
29 }
30
31 // Source is the passive scraping agent
32 type Source struct {
33 apiKeys []string
34 }
35
36 // Run function returns all subdomains found with the service
37 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
38 results := make(chan subscraping.Result)
39 go func() {
40 defer close(results)
41
42 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
43 if randomApiKey == "" {
44 return
45 }
46
47 // quake api doc https://quake.360.cn/quake/#/help
48 var requestBody = []byte(fmt.Sprintf(`{"query":"domain: *.%s", "start":0, "size":500}`, domain))
49 resp, err := session.Post(ctx, "https://quake.360.cn/api/v3/search/quake_service", "", map[string]string{"Content-Type": "application/json", "X-QuakeToken": randomApiKey}, bytes.NewReader(requestBody))
50 if err != nil {
51 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
52 session.DiscardHTTPResponse(resp)
53 return
54 }
55
56 var response quakeResults
57 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
58 if err != nil {
59 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
60 resp.Body.Close()
61 return
62 }
63 resp.Body.Close()
64
65 if response.Code != 0 {
66 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%s", response.Message)}
67 return
68 }
69
70 if response.Meta.Pagination.Total > 0 {
71 for _, quakeDomain := range response.Data {
72 subdomain := quakeDomain.Service.HTTP.Host
73 if strings.ContainsAny(subdomain, "暂无权限") {
74 subdomain = ""
75 }
76 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
77 }
78 }
79 }()
80
81 return results
82 }
83
84 // Name returns the name of the source
85 func (s *Source) Name() string {
86 return "quake"
87 }
88
89 func (s *Source) IsDefault() bool {
90 return true
91 }
92
93 func (s *Source) HasRecursiveSupport() bool {
94 return false
95 }
96
97 func (s *Source) NeedsKey() bool {
98 return true
99 }
100
101 func (s *Source) AddApiKeys(keys []string) {
102 s.apiKeys = keys
103 }
0 // Package rapiddns is a RapidDNS Scraping Engine in Golang
1 package rapiddns
2
3 import (
4 "context"
5 "io"
6
7 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
8 )
9
10 // Source is the passive scraping agent
11 type Source struct{}
12
13 // Run function returns all subdomains found with the service
14 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
15 results := make(chan subscraping.Result)
16
17 go func() {
18 defer close(results)
19
20 resp, err := session.SimpleGet(ctx, "https://rapiddns.io/subdomain/"+domain+"?full=1")
21 if err != nil {
22 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
23 session.DiscardHTTPResponse(resp)
24 return
25 }
26
27 body, err := io.ReadAll(resp.Body)
28 if err != nil {
29 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
30 resp.Body.Close()
31 return
32 }
33
34 resp.Body.Close()
35
36 src := string(body)
37 for _, subdomain := range session.Extractor.FindAllString(src, -1) {
38 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
39 }
40 }()
41
42 return results
43 }
44
45 // Name returns the name of the source
46 func (s *Source) Name() string {
47 return "rapiddns"
48 }
49
50 func (s *Source) IsDefault() bool {
51 return false
52 }
53
54 func (s *Source) HasRecursiveSupport() bool {
55 return false
56 }
57
58 func (s *Source) NeedsKey() bool {
59 return false
60 }
61
62 func (s *Source) AddApiKeys(_ []string) {
63 // no key needed
64 }
0 // Package riddler logic
1 package riddler
2
3 import (
4 "bufio"
5 "context"
6 "fmt"
7
8 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
9 )
10
11 // Source is the passive scraping agent
12 type Source struct{}
13
14 // Run function returns all subdomains found with the service
15 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
16 results := make(chan subscraping.Result)
17
18 go func() {
19 defer close(results)
20
21 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://riddler.io/search?q=pld:%s&view_type=data_table", domain))
22 if err != nil {
23 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
24 session.DiscardHTTPResponse(resp)
25 return
26 }
27
28 scanner := bufio.NewScanner(resp.Body)
29 for scanner.Scan() {
30 line := scanner.Text()
31 if line == "" {
32 continue
33 }
34 subdomain := session.Extractor.FindString(line)
35 if subdomain != "" {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
37 }
38 }
39 resp.Body.Close()
40 }()
41
42 return results
43 }
44
45 // Name returns the name of the source
46 func (s *Source) Name() string {
47 return "riddler"
48 }
49
50 func (s *Source) IsDefault() bool {
51 return true
52 }
53
54 func (s *Source) HasRecursiveSupport() bool {
55 return false
56 }
57
58 func (s *Source) NeedsKey() bool {
59 return false
60 }
61
62 func (s *Source) AddApiKeys(_ []string) {
63 // no key needed
64 }
0 // Package robtex logic
1 package robtex
2
3 import (
4 "bufio"
5 "bytes"
6 "context"
7 "fmt"
8
9 jsoniter "github.com/json-iterator/go"
10
11 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
12 )
13
14 const (
15 addrRecord = "A"
16 iPv6AddrRecord = "AAAA"
17 baseURL = "https://proapi.robtex.com/pdns"
18 )
19
20 // Source is the passive scraping agent
21 type Source struct {
22 apiKeys []string
23 }
24
25 type result struct {
26 Rrname string `json:"rrname"`
27 Rrdata string `json:"rrdata"`
28 Rrtype string `json:"rrtype"`
29 }
30
31 // Run function returns all subdomains found with the service
32 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
33 results := make(chan subscraping.Result)
34
35 go func() {
36 defer close(results)
37
38 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
39 if randomApiKey == "" {
40 return
41 }
42
43 headers := map[string]string{"Content-Type": "application/x-ndjson"}
44
45 ips, err := enumerate(ctx, session, fmt.Sprintf("%s/forward/%s?key=%s", baseURL, domain, randomApiKey), headers)
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 return
49 }
50
51 for _, result := range ips {
52 if result.Rrtype == addrRecord || result.Rrtype == iPv6AddrRecord {
53 domains, err := enumerate(ctx, session, fmt.Sprintf("%s/reverse/%s?key=%s", baseURL, result.Rrdata, randomApiKey), headers)
54 if err != nil {
55 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
56 return
57 }
58 for _, result := range domains {
59 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: result.Rrdata}
60 }
61 }
62 }
63 }()
64 return results
65 }
66
67 func enumerate(ctx context.Context, session *subscraping.Session, targetURL string, headers map[string]string) ([]result, error) {
68 var results []result
69
70 resp, err := session.Get(ctx, targetURL, "", headers)
71 if err != nil {
72 session.DiscardHTTPResponse(resp)
73 return results, err
74 }
75
76 scanner := bufio.NewScanner(resp.Body)
77 for scanner.Scan() {
78 line := scanner.Text()
79 if line == "" {
80 continue
81 }
82 var response result
83 err = jsoniter.NewDecoder(bytes.NewBufferString(line)).Decode(&response)
84 if err != nil {
85 return results, err
86 }
87
88 results = append(results, response)
89 }
90
91 resp.Body.Close()
92
93 return results, nil
94 }
95
96 // Name returns the name of the source
97 func (s *Source) Name() string {
98 return "robtex"
99 }
100
101 func (s *Source) IsDefault() bool {
102 return true
103 }
104
105 func (s *Source) HasRecursiveSupport() bool {
106 return false
107 }
108
109 func (s *Source) NeedsKey() bool {
110 return true
111 }
112
113 func (s *Source) AddApiKeys(keys []string) {
114 s.apiKeys = keys
115 }
0 // Package securitytrails logic
1 package securitytrails
2
3 import (
4 "context"
5 "fmt"
6 "strings"
7
8 jsoniter "github.com/json-iterator/go"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 type response struct {
14 Subdomains []string `json:"subdomains"`
15 }
16
17 // Source is the passive scraping agent
18 type Source struct {
19 apiKeys []string
20 }
21
22 // Run function returns all subdomains found with the service
23 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
24 results := make(chan subscraping.Result)
25
26 go func() {
27 defer close(results)
28
29 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
30 if randomApiKey == "" {
31 return
32 }
33
34 resp, err := session.Get(ctx, fmt.Sprintf("https://api.securitytrails.com/v1/domain/%s/subdomains", domain), "", map[string]string{"APIKEY": randomApiKey})
35 if err != nil {
36 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
37 session.DiscardHTTPResponse(resp)
38 return
39 }
40
41 var securityTrailsResponse response
42 err = jsoniter.NewDecoder(resp.Body).Decode(&securityTrailsResponse)
43 if err != nil {
44 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
45 resp.Body.Close()
46 return
47 }
48
49 resp.Body.Close()
50
51 for _, subdomain := range securityTrailsResponse.Subdomains {
52 if strings.HasSuffix(subdomain, ".") {
53 subdomain += domain
54 } else {
55 subdomain = subdomain + "." + domain
56 }
57
58 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
59 }
60 }()
61
62 return results
63 }
64
65 // Name returns the name of the source
66 func (s *Source) Name() string {
67 return "securitytrails"
68 }
69
70 func (s *Source) IsDefault() bool {
71 return true
72 }
73
74 func (s *Source) HasRecursiveSupport() bool {
75 return true
76 }
77
78 func (s *Source) NeedsKey() bool {
79 return true
80 }
81
82 func (s *Source) AddApiKeys(keys []string) {
83 s.apiKeys = keys
84 }
0 // Package shodan logic
1 package shodan
2
3 import (
4 "context"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 // Source is the passive scraping agent
13 type Source struct {
14 apiKeys []string
15 }
16
17 type dnsdbLookupResponse struct {
18 Domain string `json:"domain"`
19 Subdomains []string `json:"subdomains"`
20 Result int `json:"result"`
21 Error string `json:"error"`
22 }
23
24 // Run function returns all subdomains found with the service
25 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
26 results := make(chan subscraping.Result)
27
28 go func() {
29 defer close(results)
30
31 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
32 if randomApiKey == "" {
33 return
34 }
35
36 searchURL := fmt.Sprintf("https://api.shodan.io/dns/domain/%s?key=%s", domain, randomApiKey)
37 resp, err := session.SimpleGet(ctx, searchURL)
38 if err != nil {
39 session.DiscardHTTPResponse(resp)
40 return
41 }
42
43 defer resp.Body.Close()
44
45 var response dnsdbLookupResponse
46 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
47 if err != nil {
48 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
49 return
50 }
51
52 if response.Error != "" {
53 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("%v", response.Error)}
54 return
55 }
56
57 for _, data := range response.Subdomains {
58 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: fmt.Sprintf("%s.%s", data, domain)}
59 }
60 }()
61
62 return results
63 }
64
65 // Name returns the name of the source
66 func (s *Source) Name() string {
67 return "shodan"
68 }
69
70 func (s *Source) IsDefault() bool {
71 return true
72 }
73
74 func (s *Source) HasRecursiveSupport() bool {
75 return false
76 }
77
78 func (s *Source) NeedsKey() bool {
79 return true
80 }
81
82 func (s *Source) AddApiKeys(keys []string) {
83 s.apiKeys = keys
84 }
0 // Package sitedossier logic
1 package sitedossier
2
3 import (
4 "context"
5 "fmt"
6 "io"
7 "math/rand"
8 "net/http"
9 "regexp"
10 "time"
11
12 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
13 )
14
15 // SleepRandIntn is the integer value to get the pseudo-random number
16 // to sleep before find the next match
17 const SleepRandIntn = 5
18
19 var reNext = regexp.MustCompile(`<a href="([A-Za-z0-9/.]+)"><b>`)
20
21 type agent struct {
22 results chan subscraping.Result
23 session *subscraping.Session
24 }
25
26 func (a *agent) enumerate(ctx context.Context, baseURL string) {
27 select {
28 case <-ctx.Done():
29 return
30 default:
31 }
32
33 resp, err := a.session.SimpleGet(ctx, baseURL)
34 isnotfound := resp != nil && resp.StatusCode == http.StatusNotFound
35 if err != nil && !isnotfound {
36 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
37 a.session.DiscardHTTPResponse(resp)
38 return
39 }
40
41 body, err := io.ReadAll(resp.Body)
42 if err != nil {
43 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Error, Error: err}
44 resp.Body.Close()
45 return
46 }
47 resp.Body.Close()
48
49 src := string(body)
50 for _, match := range a.session.Extractor.FindAllString(src, -1) {
51 a.results <- subscraping.Result{Source: "sitedossier", Type: subscraping.Subdomain, Value: match}
52 }
53
54 match1 := reNext.FindStringSubmatch(src)
55 time.Sleep(time.Duration((3 + rand.Intn(SleepRandIntn))) * time.Second)
56
57 if len(match1) > 0 {
58 a.enumerate(ctx, "http://www.sitedossier.com"+match1[1])
59 }
60 }
61
62 // Source is the passive scraping agent
63 type Source struct{}
64
65 // Run function returns all subdomains found with the service
66 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
67 results := make(chan subscraping.Result)
68
69 a := agent{
70 session: session,
71 results: results,
72 }
73
74 go func() {
75 a.enumerate(ctx, fmt.Sprintf("http://www.sitedossier.com/parentdomain/%s", domain))
76 close(a.results)
77 }()
78
79 return a.results
80 }
81
82 // Name returns the name of the source
83 func (s *Source) Name() string {
84 return "sitedossier"
85 }
86
87 func (s *Source) IsDefault() bool {
88 return false
89 }
90
91 func (s *Source) HasRecursiveSupport() bool {
92 return false
93 }
94
95 func (s *Source) NeedsKey() bool {
96 return false
97 }
98
99 func (s *Source) AddApiKeys(_ []string) {
100 // no key needed
101 }
0 // Package sonarsearch logic
1 package sonarsearch
2
3 import (
4 "context"
5 "encoding/json"
6 "fmt"
7 "strconv"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 // Source is the passive scraping agent
13 type Source struct{}
14
15 // Run function returns all subdomains found with the service
16 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
17 results := make(chan subscraping.Result)
18 go func() {
19 defer close(results)
20
21 getURL := fmt.Sprintf("https://sonar.omnisint.io/subdomains/%s?page=", domain)
22 page := 0
23 var subdomains []string
24 for {
25 resp, err := session.SimpleGet(ctx, getURL+strconv.Itoa(page))
26 if err != nil {
27 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
28 session.DiscardHTTPResponse(resp)
29 return
30 }
31
32 if err := json.NewDecoder(resp.Body).Decode(&subdomains); err != nil {
33 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
34 resp.Body.Close()
35 return
36 }
37 resp.Body.Close()
38
39 if len(subdomains) == 0 {
40 return
41 }
42
43 for _, subdomain := range subdomains {
44 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
45 }
46
47 page++
48 }
49 }()
50
51 return results
52 }
53
54 // Name returns the name of the source
55 func (s *Source) Name() string {
56 return "sonarsearch"
57 }
58
59 func (s *Source) IsDefault() bool {
60 return false
61 }
62
63 func (s *Source) HasRecursiveSupport() bool {
64 return true
65 }
66
67 func (s *Source) NeedsKey() bool {
68 return false
69 }
70
71 func (s *Source) AddApiKeys(_ []string) {
72 // no key needed
73 }
0 // Package threatbook logic
1 package threatbook
2
3 import (
4 "context"
5 "fmt"
6 "strconv"
7
8 jsoniter "github.com/json-iterator/go"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 type threatBookResponse struct {
14 ResponseCode int64 `json:"response_code"`
15 VerboseMsg string `json:"verbose_msg"`
16 Data struct {
17 Domain string `json:"domain"`
18 SubDomains struct {
19 Total string `json:"total"`
20 Data []string `json:"data"`
21 } `json:"sub_domains"`
22 } `json:"data"`
23 }
24
25 // Source is the passive scraping agent
26 type Source struct {
27 apiKeys []string
28 }
29
30 // Run function returns all subdomains found with the service
31 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
32 results := make(chan subscraping.Result)
33
34 go func() {
35 defer close(results)
36
37 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
38 if randomApiKey == "" {
39 return
40 }
41
42 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://api.threatbook.cn/v3/domain/sub_domains?apikey=%s&resource=%s", randomApiKey, domain))
43 if err != nil && resp == nil {
44 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
45 session.DiscardHTTPResponse(resp)
46 return
47 }
48
49 var response threatBookResponse
50 err = jsoniter.NewDecoder(resp.Body).Decode(&response)
51 if err != nil {
52 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
53 resp.Body.Close()
54 return
55 }
56 resp.Body.Close()
57
58 if response.ResponseCode != 0 {
59 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: fmt.Errorf("code %d, %s", response.ResponseCode, response.VerboseMsg)}
60 return
61 }
62
63 total, err := strconv.ParseInt(response.Data.SubDomains.Total, 10, 64)
64 if err != nil {
65 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
66 return
67 }
68
69 if total > 0 {
70 for _, subdomain := range response.Data.SubDomains.Data {
71 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
72 }
73 }
74 }()
75
76 return results
77 }
78
79 // Name returns the name of the source
80 func (s *Source) Name() string {
81 return "threatbook"
82 }
83
84 func (s *Source) IsDefault() bool {
85 return false
86 }
87
88 func (s *Source) HasRecursiveSupport() bool {
89 return false
90 }
91
92 func (s *Source) NeedsKey() bool {
93 return true
94 }
95
96 func (s *Source) AddApiKeys(keys []string) {
97 s.apiKeys = keys
98 }
0 // Package threatminer logic
1 package threatminer
2
3 import (
4 "context"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 type response struct {
13 StatusCode string `json:"status_code"`
14 StatusMessage string `json:"status_message"`
15 Results []string `json:"results"`
16 }
17
18 // Source is the passive scraping agent
19 type Source struct{}
20
21 // Run function returns all subdomains found with the service
22 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
23 results := make(chan subscraping.Result)
24
25 go func() {
26 defer close(results)
27
28 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://api.threatminer.org/v2/domain.php?q=%s&rt=5", domain))
29 if err != nil {
30 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
31 session.DiscardHTTPResponse(resp)
32 return
33 }
34
35 defer resp.Body.Close()
36
37 var data response
38 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
39 if err != nil {
40 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
41 return
42 }
43
44 for _, subdomain := range data.Results {
45 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
46 }
47 }()
48
49 return results
50 }
51
52 // Name returns the name of the source
53 func (s *Source) Name() string {
54 return "threatminer"
55 }
56
57 func (s *Source) IsDefault() bool {
58 return true
59 }
60
61 func (s *Source) HasRecursiveSupport() bool {
62 return false
63 }
64
65 func (s *Source) NeedsKey() bool {
66 return false
67 }
68
69 func (s *Source) AddApiKeys(_ []string) {
70 // no key needed
71 }
0 // Package virustotal logic
1 package virustotal
2
3 import (
4 "context"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 type response struct {
13 Subdomains []string `json:"subdomains"`
14 }
15
16 // Source is the passive scraping agent
17 type Source struct {
18 apiKeys []string
19 }
20
21 // Run function returns all subdomains found with the service
22 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
23 results := make(chan subscraping.Result)
24
25 go func() {
26 defer close(results)
27
28 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
29 if randomApiKey == "" {
30 return
31 }
32
33 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://www.virustotal.com/vtapi/v2/domain/report?apikey=%s&domain=%s", randomApiKey, domain))
34 if err != nil {
35 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
36 session.DiscardHTTPResponse(resp)
37 return
38 }
39
40 var data response
41 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
42 if err != nil {
43 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
44 resp.Body.Close()
45 return
46 }
47
48 resp.Body.Close()
49
50 for _, subdomain := range data.Subdomains {
51 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
52 }
53 }()
54
55 return results
56 }
57
58 // Name returns the name of the source
59 func (s *Source) Name() string {
60 return "virustotal"
61 }
62
63 func (s *Source) IsDefault() bool {
64 return true
65 }
66
67 func (s *Source) HasRecursiveSupport() bool {
68 return true
69 }
70
71 func (s *Source) NeedsKey() bool {
72 return true
73 }
74
75 func (s *Source) AddApiKeys(keys []string) {
76 s.apiKeys = keys
77 }
0 // Package waybackarchive logic
1 package waybackarchive
2
3 import (
4 "bufio"
5 "context"
6 "fmt"
7 "net/url"
8 "strings"
9
10 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
11 )
12
13 // Source is the passive scraping agent
14 type Source struct{}
15
16 // Run function returns all subdomains found with the service
17 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
18 results := make(chan subscraping.Result)
19
20 go func() {
21 defer close(results)
22
23 resp, err := session.SimpleGet(ctx, fmt.Sprintf("http://web.archive.org/cdx/search/cdx?url=*.%s/*&output=txt&fl=original&collapse=urlkey", domain))
24 if err != nil {
25 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
26 session.DiscardHTTPResponse(resp)
27 return
28 }
29
30 defer resp.Body.Close()
31
32 scanner := bufio.NewScanner(resp.Body)
33 for scanner.Scan() {
34 line := scanner.Text()
35 if line == "" {
36 continue
37 }
38 line, _ = url.QueryUnescape(line)
39 subdomain := session.Extractor.FindString(line)
40 if subdomain != "" {
41 // fix for triple encoded URL
42 subdomain = strings.ToLower(subdomain)
43 subdomain = strings.TrimPrefix(subdomain, "25")
44 subdomain = strings.TrimPrefix(subdomain, "2f")
45
46 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: subdomain}
47 }
48 }
49 }()
50
51 return results
52 }
53
54 // Name returns the name of the source
55 func (s *Source) Name() string {
56 return "waybackarchive"
57 }
58
59 func (s *Source) IsDefault() bool {
60 return false
61 }
62
63 func (s *Source) HasRecursiveSupport() bool {
64 return false
65 }
66
67 func (s *Source) NeedsKey() bool {
68 return false
69 }
70
71 func (s *Source) AddApiKeys(_ []string) {
72 // no key needed
73 }
0 // Package whoisxmlapi logic
1 package whoisxmlapi
2
3 import (
4 "context"
5 "fmt"
6
7 jsoniter "github.com/json-iterator/go"
8
9 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
10 )
11
12 type response struct {
13 Search string `json:"search"`
14 Result Result `json:"result"`
15 }
16
17 type Result struct {
18 Count int `json:"count"`
19 Records []Record `json:"records"`
20 }
21
22 type Record struct {
23 Domain string `json:"domain"`
24 FirstSeen int `json:"firstSeen"`
25 LastSeen int `json:"lastSeen"`
26 }
27
28 // Source is the passive scraping agent
29 type Source struct {
30 apiKeys []string
31 }
32
33 // Run function returns all subdomains found with the service
34 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
35 results := make(chan subscraping.Result)
36
37 go func() {
38 defer close(results)
39
40 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
41 if randomApiKey == "" {
42 return
43 }
44
45 resp, err := session.SimpleGet(ctx, fmt.Sprintf("https://subdomains.whoisxmlapi.com/api/v1?apiKey=%s&domainName=%s", randomApiKey, domain))
46 if err != nil {
47 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
48 session.DiscardHTTPResponse(resp)
49 return
50 }
51
52 var data response
53 err = jsoniter.NewDecoder(resp.Body).Decode(&data)
54 if err != nil {
55 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
56 resp.Body.Close()
57 return
58 }
59
60 resp.Body.Close()
61
62 for _, record := range data.Result.Records {
63 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: record.Domain}
64 }
65 }()
66
67 return results
68 }
69
70 // Name returns the name of the source
71 func (s *Source) Name() string {
72 return "whoisxmlapi"
73 }
74
75 func (s *Source) IsDefault() bool {
76 return true
77 }
78
79 func (s *Source) HasRecursiveSupport() bool {
80 return false
81 }
82
83 func (s *Source) NeedsKey() bool {
84 return true
85 }
86
87 func (s *Source) AddApiKeys(keys []string) {
88 s.apiKeys = keys
89 }
0 // Package zoomeye logic
1 package zoomeye
2
3 import (
4 "bytes"
5 "context"
6 "encoding/json"
7 "errors"
8 "fmt"
9 "net/http"
10
11 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
12 )
13
14 // zoomAuth holds the ZoomEye credentials
15 type zoomAuth struct {
16 User string `json:"username"`
17 Pass string `json:"password"`
18 }
19
20 type loginResp struct {
21 JWT string `json:"access_token"`
22 }
23
24 // search results
25 type zoomeyeResults struct {
26 Matches []struct {
27 Site string `json:"site"`
28 Domains []string `json:"domains"`
29 } `json:"matches"`
30 }
31
32 // Source is the passive scraping agent
33 type Source struct {
34 apiKeys []apiKey
35 }
36
37 type apiKey struct {
38 username string
39 password string
40 }
41
42 // Run function returns all subdomains found with the service
43 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
44 results := make(chan subscraping.Result)
45
46 go func() {
47 defer close(results)
48
49 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
50 if randomApiKey.username == "" || randomApiKey.password == "" {
51 return
52 }
53
54 jwt, err := doLogin(ctx, session, randomApiKey)
55 if err != nil {
56 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
57 return
58 }
59 // check if jwt is null
60 if jwt == "" {
61 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: errors.New("could not log into zoomeye")}
62 return
63 }
64
65 headers := map[string]string{
66 "Authorization": fmt.Sprintf("JWT %s", jwt),
67 "Accept": "application/json",
68 "Content-Type": "application/json",
69 }
70 for currentPage := 0; currentPage <= 100; currentPage++ {
71 api := fmt.Sprintf("https://api.zoomeye.org/web/search?query=hostname:%s&page=%d", domain, currentPage)
72 resp, err := session.Get(ctx, api, "", headers)
73 isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
74 if err != nil {
75 if !isForbidden && currentPage == 0 {
76 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
77 session.DiscardHTTPResponse(resp)
78 }
79 return
80 }
81
82 var res zoomeyeResults
83 err = json.NewDecoder(resp.Body).Decode(&res)
84 if err != nil {
85 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
86 resp.Body.Close()
87 return
88 }
89 resp.Body.Close()
90
91 for _, r := range res.Matches {
92 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: r.Site}
93 for _, domain := range r.Domains {
94 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: domain}
95 }
96 }
97 }
98 }()
99
100 return results
101 }
102
103 // doLogin performs authentication on the ZoomEye API
104 func doLogin(ctx context.Context, session *subscraping.Session, randomApiKey apiKey) (string, error) {
105 creds := &zoomAuth{
106 User: randomApiKey.username,
107 Pass: randomApiKey.password,
108 }
109 body, err := json.Marshal(&creds)
110 if err != nil {
111 return "", err
112 }
113 resp, err := session.SimplePost(ctx, "https://api.zoomeye.org/user/login", "application/json", bytes.NewBuffer(body))
114 if err != nil {
115 session.DiscardHTTPResponse(resp)
116 return "", err
117 }
118
119 defer resp.Body.Close()
120
121 var login loginResp
122 err = json.NewDecoder(resp.Body).Decode(&login)
123 if err != nil {
124 return "", err
125 }
126 return login.JWT, nil
127 }
128
129 // Name returns the name of the source
130 func (s *Source) Name() string {
131 return "zoomeye"
132 }
133
134 func (s *Source) IsDefault() bool {
135 return false
136 }
137
138 func (s *Source) HasRecursiveSupport() bool {
139 return false
140 }
141
142 func (s *Source) NeedsKey() bool {
143 return true
144 }
145
146 func (s *Source) AddApiKeys(keys []string) {
147 s.apiKeys = subscraping.CreateApiKeys(keys, func(k, v string) apiKey {
148 return apiKey{k, v}
149 })
150 }
0 package zoomeyeapi
1
2 import (
3 "context"
4 "encoding/json"
5 "fmt"
6 "net/http"
7
8 "github.com/projectdiscovery/subfinder/v2/pkg/subscraping"
9 )
10
11 // search results
12 type zoomeyeResults struct {
13 Status int `json:"status"`
14 Total int `json:"total"`
15 List []struct {
16 Name string `json:"name"`
17 Ip []string `json:"ip"`
18 } `json:"list"`
19 }
20
21 // Source is the passive scraping agent
22 type Source struct {
23 apiKeys []string
24 }
25
26 // Run function returns all subdomains found with the service
27 func (s *Source) Run(ctx context.Context, domain string, session *subscraping.Session) <-chan subscraping.Result {
28 results := make(chan subscraping.Result)
29
30 go func() {
31 defer close(results)
32
33 randomApiKey := subscraping.PickRandom(s.apiKeys, s.Name())
34 if randomApiKey == "" {
35 return
36 }
37
38 headers := map[string]string{
39 "API-KEY": randomApiKey,
40 "Accept": "application/json",
41 "Content-Type": "application/json",
42 }
43 var pages = 1
44 for currentPage := 1; currentPage <= pages; currentPage++ {
45 api := fmt.Sprintf("https://api.zoomeye.org/domain/search?q=%s&type=1&s=1000&page=%d", domain, currentPage)
46 resp, err := session.Get(ctx, api, "", headers)
47 isForbidden := resp != nil && resp.StatusCode == http.StatusForbidden
48 if err != nil {
49 if !isForbidden {
50 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
51 session.DiscardHTTPResponse(resp)
52 }
53 return
54 }
55
56 var res zoomeyeResults
57 err = json.NewDecoder(resp.Body).Decode(&res)
58
59 if err != nil {
60 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Error, Error: err}
61 _ = resp.Body.Close()
62 return
63 }
64 _ = resp.Body.Close()
65 pages = int(res.Total/1000) + 1
66 for _, r := range res.List {
67 results <- subscraping.Result{Source: s.Name(), Type: subscraping.Subdomain, Value: r.Name}
68 }
69 }
70 }()
71
72 return results
73 }
74
75 // Name returns the name of the source
76 func (s *Source) Name() string {
77 return "zoomeyeapi"
78 }
79
80 func (s *Source) IsDefault() bool {
81 return false
82 }
83
84 func (s *Source) HasRecursiveSupport() bool {
85 return false
86 }
87
88 func (s *Source) NeedsKey() bool {
89 return true
90 }
91
92 func (s *Source) AddApiKeys(keys []string) {
93 s.apiKeys = keys
94 }
0 package subscraping
1
2 import (
3 "context"
4 "net/http"
5 "regexp"
6
7 "github.com/projectdiscovery/ratelimit"
8 )
9
10 // BasicAuth request's Authorization header
11 type BasicAuth struct {
12 Username string
13 Password string
14 }
15
16 // Source is an interface inherited by each passive source
17 type Source interface {
18 // Run takes a domain as argument and a session object
19 // which contains the extractor for subdomains, http client
20 // and other stuff.
21 Run(context.Context, string, *Session) <-chan Result
22 // Name returns the name of the source. It is preferred to use lower case names.
23 Name() string
24
25 // IsDefault returns true if the current source should be
26 // used as part of the default execution.
27 IsDefault() bool
28
29 // HasRecursiveSupport returns true if the current source
30 // accepts subdomains (e.g. subdomain.domain.tld),
31 // not just root domains.
32 HasRecursiveSupport() bool
33
34 // NeedsKey returns true if the source requires an API key
35 NeedsKey() bool
36
37 AddApiKeys([]string)
38 }
39
40 // Session is the option passed to the source, an option is created
41 // uniquely for each source.
42 type Session struct {
43 // Extractor is the regex for subdomains created for each domain
44 Extractor *regexp.Regexp
45 // Client is the current http client
46 Client *http.Client
47 // Rate limit instance
48 RateLimiter *ratelimit.Limiter
49 }
50
51 // Result is a result structure returned by a source
52 type Result struct {
53 Type ResultType
54 Source string
55 Value string
56 Error error
57 }
58
59 // ResultType is the type of result returned by the source
60 type ResultType int
61
62 // Types of results returned by the source
63 const (
64 Subdomain ResultType = iota
65 Error
66 )
0 package subscraping
1
2 import (
3 "math/rand"
4 "regexp"
5 "strings"
6 "sync"
7 "time"
8
9 "github.com/projectdiscovery/gologger"
10 )
11
12 const MultipleKeyPartsLength = 2
13
14 var subdomainExtractorMutex = &sync.Mutex{}
15
16 func init() {
17 rand.Seed(time.Now().UnixNano())
18 }
19
20 // NewSubdomainExtractor creates a new regular expression to extract
21 // subdomains from text based on the given domain.
22 func NewSubdomainExtractor(domain string) (*regexp.Regexp, error) {
23 subdomainExtractorMutex.Lock()
24 defer subdomainExtractorMutex.Unlock()
25 extractor, err := regexp.Compile(`[a-zA-Z0-9\*_.-]+\.` + domain)
26 if err != nil {
27 return nil, err
28 }
29 return extractor, nil
30 }
31
32 func PickRandom[T any](v []T, sourceName string) T {
33 var result T
34 length := len(v)
35 if length == 0 {
36 gologger.Debug().Msgf("Cannot use the '%s' source because there was no API key/secret defined for it.", sourceName)
37 return result
38 }
39 return v[rand.Intn(length)]
40 }
41
42 func CreateApiKeys[T any](keys []string, provider func(k, v string) T) []T {
43 var result []T
44 for _, key := range keys {
45 if keyPartA, keyPartB, ok := createMultiPartKey(key); ok {
46 result = append(result, provider(keyPartA, keyPartB))
47 }
48 }
49 return result
50 }
51
52 func createMultiPartKey(key string) (keyPartA, keyPartB string, ok bool) {
53 parts := strings.Split(key, ":")
54 ok = len(parts) == MultipleKeyPartsLength
55
56 if ok {
57 keyPartA = parts[0]
58 keyPartB = parts[1]
59 }
60
61 return
62 }
0 package testutils
1
2 import (
3 "fmt"
4 "os"
5 "os/exec"
6 "strings"
7 )
8
9 func RunSubfinderAndGetResults(debug bool, domain string, extra ...string) ([]string, error) {
10 cmd := exec.Command("bash", "-c")
11 cmdLine := fmt.Sprintf("echo %s | %s", domain, "./subfinder ")
12 cmdLine += strings.Join(extra, " ")
13 cmd.Args = append(cmd.Args, cmdLine)
14 if debug {
15 cmd.Args = append(cmd.Args, "-v")
16 cmd.Stderr = os.Stderr
17 fmt.Println(cmd.String())
18 } else {
19 cmd.Args = append(cmd.Args, "-silent")
20 }
21 data, err := cmd.Output()
22 if debug {
23 fmt.Println(string(data))
24 }
25 if err != nil {
26 return nil, err
27 }
28 var parts []string
29 items := strings.Split(string(data), "\n")
30 for _, i := range items {
31 if i != "" {
32 parts = append(parts, i)
33 }
34 }
35 return parts, nil
36 }
37
38 // TestCase is a single integration test case
39 type TestCase interface {
40 Execute() error
41 }