Codebase list finalrecon / upstream/1.1.2
New upstream version 1.1.2 Sophie Brun 3 years ago
5 changed file(s) with 224 addition(s) and 115 deletion(s). Raw diff Collapse all Expand all
1818 ## Available In
1919
2020 <p align="center">
21 <a href="https://www.kali.org/news/kali-linux-2020-4-release/">
22 <img width="150px" hspace="10px" src="https://i.imgur.com/yQRrCtC.png" alt="kali linux finalrecon">
23 </a>
2124 <a href="https://blackarch.org/">
2225 <img width="150px" hspace="10px" src="https://i.imgur.com/YZ5KDL1.png" alt="blackarch finalrecon">
2326 </a>
8083 * ThreatMiner
8184 * Facebook Certificate Transparency API
8285 * Auth Token is Required for this source, read Configuration below
86 * VirusTotal
87 * API Key is Required
88 * CertSpotter
8389
8490 * Traceroute
8591 * Protocols
109115 Some Modules Use API Keys to fetch data from different resources, these are optional, if you are not using an API key, they will be simply skipped.
110116 If you are interested in using these resources you can store your API key in **keys.json** file.
111117
112 `Path --> finalrecon/conf/keys.json`
118 `Path --> $HOME/.config/finalrecon/conf/keys.json`
113119
114120 If you dont want to use a key for a certain data source just set its value to `null`, by default values of all available data sources are null.
115121
129135
130136 Read More : https://developers.facebook.com/docs/facebook-login/access-tokens
131137
138 #### VirusTotal API
139
140 This data source is used to fetch **Sub Domains** which are used in **Sub Domain Enumeration**
141
142 Key Format : `KEY`
143
144 Example :
145
146 ```
147 {
148 "virustotal": "eu4zc5f0skv15fnw54nkhj4m26zbteh9409aklpxhfpp68s8d4l63pn13rsojt9y"
149 }
150 ```
151
132152 ## Tested on
133153
134154 * Kali Linux
138158
139159 ## Installation
140160
161 ### Kali Linux
162
163 ```
164 sudo apt install finalrecon
165 ```
166
141167 ### BlackArch Linux
142168
143169 ```
144 pacman -S finalrecon
170 sudo pacman -S finalrecon
145171 ```
146172
147173 ### SecBSD
150176 doas pkg_add finalrecon
151177 ```
152178
153 ### Kali Linux
179 ### Other Linux
154180
155181 ```bash
156182 git clone https://github.com/thewhiteh4t/FinalRecon.git
00 {
1 "facebook": null
1 "facebook": null,
2 "virustotal": null
23 }
99 C = '\033[36m' # cyan
1010 W = '\033[0m' # white
1111
12 pid_path = '/tmp/finalrecon.pid'
12 home = os.getenv('HOME')
13 pid_path = home + '/.local/share/finalrecon/finalrecon.pid'
14 usr_data = home + '/.local/share/finalrecon/dumps/'
15 conf_path = home + '/.config/finalrecon'
16 path_to_script = os.path.dirname(os.path.realpath(__file__))
17 src_conf_path = path_to_script + '/conf/'
1318 fail = False
1419
15 if os.path.exists(pid_path):
20 if os.path.isfile(pid_path):
1621 print(R + '[-]' + C + ' One instance of FinalRecon is already running!' + W)
1722 with open(pid_path, 'r') as pidfile:
1823 pid = pidfile.read()
1924 print(G + '[+]' + C + ' PID : ' + W + str(pid))
20 print(G + '[>]' + C + ' If FinalRecon crashed, execute : ' + W + 'sudo rm {}'.format(pid_path))
25 print(G + '[>]' + C + ' If FinalRecon crashed, execute : ' + W + 'rm {}'.format(pid_path))
2126 sys.exit()
2227 else:
28 os.makedirs(os.path.dirname(pid_path), exist_ok=True)
2329 with open(pid_path, 'w') as pidfile:
2430 pidfile.write(str(os.getpid()))
2531
26 path_to_script = os.path.dirname(os.path.realpath(__file__))
32 if os.path.exists(conf_path):
33 pass
34 else:
35 import shutil
36 shutil.copytree(src_conf_path, conf_path, dirs_exist_ok=True)
2737
2838 with open(path_to_script + '/requirements.txt', 'r') as rqr:
2939 pkg_list = rqr.read().strip().split('\n')
4454
4555 import argparse
4656
47 version = '1.1.0'
57 version = '1.1.2'
4858 gh_version = ''
4959 twitter_url = ''
5060 discord_url = ''
7686 ext_help.add_argument('-tt', type=float, help='Traceroute Timeout [ Default : 1.0 ]')
7787 ext_help.add_argument('-o', help='Export Output [ Default : txt ] [ Available : xml, csv ]')
7888 ext_help.set_defaults(
79 t=30,
80 T=30.0,
81 w='wordlists/dirb_common.txt',
82 r=False,
83 s=True,
84 sp=443,
85 d='1.1.1.1',
86 e='',
87 m='UDP',
88 p=33434,
89 tt=1.0,
90 o='txt')
89 t = 30,
90 T = 30.0,
91 w = path_to_script + '/wordlists/dirb_common.txt',
92 r = False,
93 s = True,
94 sp = 443,
95 d = '1.1.1.1',
96 e = '',
97 m = 'UDP',
98 p = 33434,
99 tt = 1.0,
100 o = 'txt')
91101
92102 try:
93103 args = parser.parse_args()
195205 whois_lookup(ip, output, data)
196206 dnsrec(domain, output, data)
197207 if type_ip == False:
198 subdomains(domain, tout, output, data)
208 subdomains(domain, tout, output, data, conf_path)
199209 else:
200210 pass
201211 troute(ip, mode, port, tr_tout, output, data)
248258 data['module-FinalRecon'] = meta
249259
250260 if output != 'None':
251 fpath = os.getenv('HOME') + '/.local/share/finalrecon/dumps/'
261 fpath = usr_data
252262 fname = fpath + hostname + '.' + output
253263 if not os.path.exists(fpath):
254264 os.makedirs(fpath)
285295
286296 if subd == True and type_ip == False:
287297 from modules.subdom import subdomains
288 subdomains(domain, tout, output, data)
298 subdomains(domain, tout, output, data, conf_path)
289299 elif subd == True and type_ip == True:
290300 print(R + '[-]' + C + ' Sub-Domain Enumeration is Not Supported for IP Addresses' + W + '\n')
291301 os.remove(pid_path)
22 {
33 "name": "FinalRecon",
44 "author": "thewhiteh4t",
5 "version": "1.1.0",
5 "version": "1.1.2",
66 "twitter": "https://twitter.com/thewhiteh4t",
77 "discord": "https://discord.gg/UM92zUn"
88 }
00 #!/usr/bin/env python3
11
22 import json
3 import aiohttp
34 import asyncio
4 import requests
55 import psycopg2
66
77 R = '\033[31m' # red
1212
1313 found = []
1414
15 async def buffover(hostname, tout):
15 async def buffover(hostname, session):
1616 global found
1717 print(Y + '[!]' + C + ' Requesting ' + G + 'BuffOver' + W)
1818 url = 'https://dns.bufferover.run/dns'
19 data = {
20 'q': '.{}'.format(hostname)
21 }
22 try:
23 r = requests.get(url, params=data, timeout=tout)
24 sc = r.status_code
25 if sc == 200:
26 output = r.content.decode()
27 json_out = json.loads(output)
28 subds = json_out['FDNS_A']
29 if subds == None:
30 pass
31 else:
32 for subd in subds:
33 subd = subd.split(',')
34 for sub in subd:
35 found.append(sub)
36 else:
37 print(R + '[-]' + C + ' BuffOver Status : ' + W + str(sc))
19 bo_params = {
20 'q': '.{}'.format(hostname)
21 }
22 try:
23 async with session.get(url, params=bo_params) as resp:
24 sc = resp.status
25 if sc == 200:
26 output = await resp.text()
27 json_out = json.loads(output)
28 subds = json_out['FDNS_A']
29 if subds == None:
30 pass
31 else:
32 for subd in subds:
33 subd = subd.split(',')
34 for sub in subd:
35 found.append(sub)
36 else:
37 print(R + '[-]' + C + ' BuffOver Status : ' + W + str(sc))
3838 except Exception as e:
3939 print(R + '[-]' + C + ' BuffOver Exception : ' + W + str(e))
4040
5555 except Exception as e:
5656 print(R + '[-]' + C + ' crtsh Exception : ' + W + str(e))
5757
58 async def thcrowd(hostname, tout):
59 global found
60 print(Y + '[!]' + C + ' Requesting ' + G + 'ThreadCrowd' + W)
58 async def thcrowd(hostname, session):
59 global found
60 print(Y + '[!]' + C + ' Requesting ' + G + 'ThreatCrowd' + W)
6161 url = 'https://www.threatcrowd.org/searchApi/v2/domain/report/'
62 data = {
63 'domain': hostname
64 }
65 try:
66 r = requests.get(url, params=data, timeout=tout)
67 sc = r.status_code
68 if sc == 200:
69 output = r.content.decode()
70 json_out = json.loads(output)
71 if json_out['response_code'] == '0':
72 pass
73 else:
74 subd = json_out['subdomains']
75 found.extend(subd)
76 else:
77 print(R + '[-]' + C + ' ThreatCrowd Status : ' + W + str(sc))
62 thc_params = {
63 'domain': hostname
64 }
65 try:
66 async with session.get(url, params=thc_params) as resp:
67 sc = resp.status
68 if sc == 200:
69 output = await resp.text()
70 json_out = json.loads(output)
71 if json_out['response_code'] == '0':
72 pass
73 else:
74 subd = json_out['subdomains']
75 found.extend(subd)
76 else:
77 print(R + '[-]' + C + ' ThreatCrowd Status : ' + W + str(sc))
7878 except Exception as e:
7979 print(R + '[-]' + C + ' ThreatCrowd Exception : ' + W + str(e))
8080
81 async def anubisdb(hostname, tout):
81 async def anubisdb(hostname, session):
8282 global found
8383 print(Y + '[!]' + C + ' Requesting ' + G + 'AnubisDB' + W)
8484 url = 'https://jldc.me/anubis/subdomains/{}'.format(hostname)
8585 try:
86 r = requests.get(url, timeout=tout)
87 sc = r.status_code
88 if sc == 200:
89 output = r.content.decode()
90 json_out = json.loads(output)
91 found.extend(json_out)
92 elif sc == 300:
93 pass
94 else:
95 print(R + '[-]' + C + ' AnubisDB Status : ' + W + str(sc))
86 async with session.get(url) as resp:
87 sc = resp.status
88 if sc == 200:
89 output = await resp.text()
90 json_out = json.loads(output)
91 found.extend(json_out)
92 elif sc == 300:
93 pass
94 else:
95 print(R + '[-]' + C + ' AnubisDB Status : ' + W + str(sc))
9696 except Exception as e:
9797 print(R + '[-]' + C + 'AnubisDB Exception : ' + W + str(e))
9898
99 async def thminer(hostname, tout):
99 async def thminer(hostname, session):
100100 global found
101101 print(Y + '[!]' + C + ' Requesting ' + G + 'ThreatMiner' + W)
102 url = 'https://api.threatminer.org/v2/domain.php?q=instagram.com&rt=5'
103 data = {
104 'q': hostname,
105 'rt': '5'
106 }
107 try:
108 r = requests.get(url, params=data, timeout=tout)
109 sc = r.status_code
110 if sc == 200:
111 output = r.content.decode()
112 json_out = json.loads(output)
113 subd = json_out['results']
114 found.extend(subd)
115 else:
116 print(R + '[-]' + C + ' ThreatMiner Status : ' + W + str(sc))
102 url = 'https://api.threatminer.org/v2/domain.php'
103 thm_params = {
104 'q': hostname,
105 'rt': '5'
106 }
107 try:
108 async with session.get(url, params=thm_params) as resp:
109 sc = resp.status
110 if sc == 200:
111 output = await resp.text()
112 json_out = json.loads(output)
113 subd = json_out['results']
114 found.extend(subd)
115 else:
116 print(R + '[-]' + C + ' ThreatMiner Status : ' + W + str(sc))
117117 except Exception as e:
118118 print(R + '[-]' + C + ' ThreatMiner Exception : ' + W + str(e))
119119
120 async def fb_cert(hostname, tout):
121 global found
122 with open('conf/keys.json', 'r') as keyfile:
120 async def fb_cert(hostname, conf_path, session):
121 global found
122 with open('{}/keys.json'.format(conf_path), 'r') as keyfile:
123123 json_read = keyfile.read()
124124
125125 json_load = json.loads(json_read)
128128 if fb_key != None:
129129 print(Y + '[!]' + C + ' Requesting ' + G + 'Facebook' + W)
130130 url = 'https://graph.facebook.com/certificates'
131 data = {
131 fb_params = {
132132 'query': hostname,
133133 'fields': 'domains',
134134 'access_token': fb_key
135135 }
136
137 r = requests.get(url, params=data, timeout=tout)
138 json_data = r.text
139 json_read = json.loads(json_data)
140 domains = json_read['data']
141
142 for i in range (0, len(domains)):
143 found.extend(json_read['data'][i]['domains'])
136 try:
137 async with session.get(url, params=fb_params) as resp:
138 sc = resp.status
139 if sc == 200:
140 json_data = await resp.text()
141 json_read = json.loads(json_data)
142 domains = json_read['data']
143 for i in range (0, len(domains)):
144 found.extend(json_read['data'][i]['domains'])
145 else:
146 print(R + '[-]' + C + ' Facebook Status : ' + W + str(sc))
147 except Exception as e:
148 print(R + '[-]' + C + ' Facebook Exception : ' + W + str(e))
144149 else:
145150 pass
146151
147 async def query(hostname, tout):
148 await asyncio.gather(
149 buffover(hostname, tout),
150 thcrowd(hostname, tout),
151 crtsh(hostname),
152 anubisdb(hostname, tout),
153 thminer(hostname, tout),
154 fb_cert(hostname, tout)
152 async def virust(hostname, conf_path, session):
153 global found
154 with open('{}/keys.json'.format(conf_path), 'r') as keyfile:
155 json_read = keyfile.read()
156
157 json_load = json.loads(json_read)
158 vt_key = json_load['virustotal']
159
160 if vt_key != None:
161 print(Y + '[!]' + C + ' Requesting ' + G + 'VirusTotal' + W)
162 url = 'https://www.virustotal.com/api/v3/domains/{}/subdomains'.format(hostname)
163 vt_headers = {
164 'x-apikey': vt_key
165 }
166 try:
167 async with session.get(url, headers=vt_headers) as resp:
168 sc = resp.status
169 if sc == 200:
170 json_data = await resp.text()
171 json_read = json.loads(json_data)
172 domains = json_read['data']
173 tmp_list = []
174 for i in range (0, len(domains)):
175 tmp_list.append(domains[i]['id'])
176 found.extend(tmp_list)
177 else:
178 print(R + '[-]' + C + ' VirusTotal Status : ' + W + str(sc))
179 except Exception as e:
180 print(R + '[-]' + C + ' VirusTotal Exception : ' + W + str(e))
181 else:
182 pass
183
184 async def certspot(hostname, session):
185 global found
186
187 print(Y + '[!]' + C + ' Requesting ' + G + 'CertSpotter' + W)
188 url = 'https://api.certspotter.com/v1/issuances'
189 cs_params = {
190 'domain': hostname,
191 'expand': 'dns_names',
192 'include_subdomains': 'true'
193 }
194
195 try:
196 async with session.get(url, params=cs_params) as resp:
197 sc = resp.status
198 if sc == 200:
199 json_data = await resp.text()
200 json_read = json.loads(json_data)
201 for i in range (0, len(json_read)):
202 domains = json_read[i]['dns_names']
203 found.extend(domains)
204 else:
205 print(R + '[-]' + C + ' CertSpotter Status : ' + W + str(sc))
206 except Exception as e:
207 print(R + '[-]' + C + ' CertSpotter Exception : ' + W + str(e))
208
209 async def query(hostname, tout, conf_path):
210 timeout = aiohttp.ClientTimeout(total=tout)
211 async with aiohttp.ClientSession(timeout=timeout) as session:
212 await asyncio.gather(
213 buffover(hostname, session),
214 thcrowd(hostname, session),
215 anubisdb(hostname, session),
216 thminer(hostname, session),
217 fb_cert(hostname, conf_path, session),
218 virust(hostname, conf_path, session),
219 certspot(hostname, session),
220 crtsh(hostname)
155221 )
156
157 def subdomains(hostname, tout, output, data):
222 await session.close()
223
224 def subdomains(hostname, tout, output, data, conf_path):
158225 global found
159226 result = {}
160227
162229
163230 loop = asyncio.new_event_loop()
164231 asyncio.set_event_loop(loop)
165 loop.run_until_complete(query(hostname, tout))
232 loop.run_until_complete(query(hostname, tout, conf_path))
166233 loop.close()
167234
235 from urllib.parse import urlparse
236 found = [item for item in found if item.endswith(hostname)]
237 valid = r"^[A-Za-z0-9._~()'!*:@,;+?-]*$"
238 import re
239 found = [item for item in found if re.match(valid, item)]
168240 found = set(found)
169241 total = len(found)
170242