Codebase list finalrecon / 0b468b3
New upstream version 1.1.5 Sophie Brun 1 year, 8 months ago
30 changed file(s) with 1134 addition(s) and 1197 deletion(s). Raw diff Collapse all Expand all
0 # Changelog
1
2 ## v1.1.5
3
4 * fixed some url issues in crawler
5 * threads added in port scanner
6 * fixed status code issue in directory enumeration module
7 * more sources added for subdomain enumeration
8 * wayback
9 * sonar
10 * hackertarget
11
12 ---
13
14 ## v1.1.4
15
16 * CHANGELOG.md added
17 * export
18 * output format changed
19 * csv and xml export removed
20 * subdomain enum
21 * bufferover removed
22 * shodan integrated
23 * directory enum
24 * module optimized
25 * results are printed as they are found
26 * port scanner
27 * module optimized
28 * dedicated wayback module added
8585 * Auth Token is Required for this source, read Configuration below
8686 * VirusTotal
8787 * API Key is Required
88 * Shodan
89 * API Key is Required
8890 * CertSpotter
89
90 * Traceroute
91 * Protocols
92 * UDP
93 * TCP
94 * ICMP
9591
9692 * Directory Searching
9793 * Support for File Extensions
98 * Directories from Wayback Machine from Last 1 Year
94
95 * Wayback Machine
96 * URLs from Last 5 Years
9997
10098 * Port Scan
10199 * Fast
102100 * Top 1000 Ports
103 * Open Ports with Standard Services
104101
105102 * Export
106103 * Formats
107104 * txt
108 * xml
109 * csv
105 * json [Coming Soon]
110106
111107 ## Configuration
112108
149145 }
150146 ```
151147
148 #### Shodan API
149
150 This data source is used to fetch **Sub Domains** which are used in **Sub Domain Enumeration**
151
152 Key Format : `KEY`
153
154 Example :
155
156 ```
157 {
158 "shodan": "eu4zc5f0skv15fnw54nkhj"
159 }
160 ```
161
152162 ## Tested on
153163
154164 * Kali Linux
186196
187197 ### Docker
188198
189 ```
199 ``` bash
190200 docker pull thewhiteh4t/finalrecon
191201 docker run -it --entrypoint /bin/sh thewhiteh4t/finalrecon
192202 ```
193203
204 Also docker user can use this alias to run the finalrecon as the normal CLI user.
205
206 ``` bash
207 alias finalrecon="docker run -it --rm --name finalrecon --entrypoint 'python3' thewhiteh4t/finalrecon finalrecon.py"
208 ```
209
210 And then use `finalrecon` to start your scan.
211
212 > remark
213 >
214 > If you have any api keys you can easily commit that image in your local machine.
215 >
216 > This docker usage needs root to run docker command.
217
194218 ## Usage
195219
196220 ```bash
197 python3 finalrecon.py -h
198
199221 usage: finalrecon.py [-h] [--headers] [--sslinfo] [--whois] [--crawl] [--dns] [--sub]
200 [--trace] [--dir] [--ps] [--full] [-t T] [-T T] [-w W] [-r] [-s]
201 [-sp SP] [-d D] [-e E] [-m M] [-p P] [-tt TT] [-o O]
222 [--dir] [--wayback] [--ps] [--full] [-t T] [-T T] [-w W] [-r] [-s]
223 [-sp SP] [-d D] [-e E] [-o O]
202224 url
203225
204 FinalRecon - The Last Web Recon Tool You Will Need | v1.1.0
226 FinalRecon - The Last Web Recon Tool You Will Need | v1.1.4
205227
206228 positional arguments:
207229 url Target URL
208230
209 optional arguments:
231 options:
210232 -h, --help show this help message and exit
211233 --headers Header Information
212234 --sslinfo SSL Certificate Information
214236 --crawl Crawl Target
215237 --dns DNS Enumeration
216238 --sub Sub-Domain Enumeration
217 --trace Traceroute
218239 --dir Directory Search
240 --wayback Wayback URLs
219241 --ps Fast Port Scan
220242 --full Full Recon
221243
228250 -sp SP Specify SSL Port [ Default : 443 ]
229251 -d D Custom DNS Servers [ Default : 1.1.1.1 ]
230252 -e E File Extensions [ Example : txt, xml, php ]
231 -m M Traceroute Mode [ Default : UDP ] [ Available : TCP, ICMP ]
232 -p P Port for Traceroute [ Default : 80 / 33434 ]
233 -tt TT Traceroute Timeout [ Default : 1.0 ]
234 -o O Export Output [ Default : txt ] [ Available : xml, csv ]
253 -o O Export Output [ Default : txt ]
235254 ```
236255
237256 ```bash
0 {
1 "common": {
2 "timeout": 30
3 },
4 "ssl_cert": {
5 "ssl_port": 443
6 },
7 "port_scan": {
8 "threads": 50
9 },
10 "dir_enum": {
11 "threads": 50,
12 "redirect": false,
13 "verify_ssl": false,
14 "dns_server": "8.8.8.8, 8.8.4.4, 1.1.1.1, 1.0.0.1",
15 "extension": ""
16 },
17 "export": {
18 "format": "txt"
19 }
20 }
00 {
11 "facebook": null,
2 "virustotal": null
2 "virustotal": null,
3 "shodan": null
34 }
11
22 import os
33 import sys
4 import atexit
54
65 R = '\033[31m' # red
76 G = '\033[32m' # green
87 C = '\033[36m' # cyan
98 W = '\033[0m' # white
109
11 home = os.getenv('HOME')
12 pid_path = home + '/.local/share/finalrecon/finalrecon.pid'
13 usr_data = home + '/.local/share/finalrecon/dumps/'
14 conf_path = home + '/.config/finalrecon'
15 path_to_script = os.path.dirname(os.path.realpath(__file__))
16 src_conf_path = path_to_script + '/conf/'
17 meta_file_path = path_to_script + '/metadata.json'
18 fail = False
19
20 if os.path.isfile(pid_path):
21 print(f'{R}[-] {C}One instance of FinalRecon is already running!{W}')
22 with open(pid_path, 'r') as pidfile:
23 pid = pidfile.read()
24 print(f'{G}[+] {C}PID :{W} {str(pid)}')
25 print(f'{G}[>] {C}If FinalRecon crashed, execute :{W} rm {pid_path}')
26 sys.exit(1)
27 else:
28 os.makedirs(os.path.dirname(pid_path), exist_ok=True)
29 with open(pid_path, 'w') as pidfile:
30 pidfile.write(str(os.getpid()))
31
32 if os.path.exists(conf_path):
33 pass
34 else:
35 import shutil
36 shutil.copytree(src_conf_path, conf_path, dirs_exist_ok=True)
10 import settings as config
11
12 home = config.home
13 usr_data = config.usr_data
14 conf_path = config.conf_path
15 path_to_script = config.path_to_script
16 src_conf_path = config.src_conf_path
17 meta_file_path = config.meta_file_path
3718
3819 import argparse
3920
40 version = '1.1.3'
21 version = '1.1.5'
4122 gh_version = ''
4223 twitter_url = ''
4324 discord_url = ''
5031 parser.add_argument('--crawl', help='Crawl Target', action='store_true')
5132 parser.add_argument('--dns', help='DNS Enumeration', action='store_true')
5233 parser.add_argument('--sub', help='Sub-Domain Enumeration', action='store_true')
53 parser.add_argument('--trace', help='Traceroute', action='store_true')
5434 parser.add_argument('--dir', help='Directory Search', action='store_true')
35 parser.add_argument('--wayback', help='Wayback URLs', action='store_true')
5536 parser.add_argument('--ps', help='Fast Port Scan', action='store_true')
5637 parser.add_argument('--full', help='Full Recon', action='store_true')
5738
5839 ext_help = parser.add_argument_group('Extra Options')
59 ext_help.add_argument('-t', type=int, help='Number of Threads [ Default : 30 ]')
40 ext_help.add_argument('-dt', type=int, help='Number of threads for directory enum [ Default : 30 ]')
41 ext_help.add_argument('-pt', type=int, help='Number of threads for port scan [ Default : 50 ]')
6042 ext_help.add_argument('-T', type=float, help='Request Timeout [ Default : 30.0 ]')
6143 ext_help.add_argument('-w', help='Path to Wordlist [ Default : wordlists/dirb_common.txt ]')
6244 ext_help.add_argument('-r', action='store_true', help='Allow Redirect [ Default : False ]')
6446 ext_help.add_argument('-sp', type=int, help='Specify SSL Port [ Default : 443 ]')
6547 ext_help.add_argument('-d', help='Custom DNS Servers [ Default : 1.1.1.1 ]')
6648 ext_help.add_argument('-e', help='File Extensions [ Example : txt, xml, php ]')
67 ext_help.add_argument('-m', help='Traceroute Mode [ Default : UDP ] [ Available : TCP, ICMP ]')
68 ext_help.add_argument('-p', type=int, help='Port for Traceroute [ Default : 80 / 33434 ]')
69 ext_help.add_argument('-tt', type=float, help='Traceroute Timeout [ Default : 1.0 ]')
70 ext_help.add_argument('-o', help='Export Output [ Default : txt ] [ Available : xml, csv ]')
49 ext_help.add_argument('-o', help='Export Format [ Default : txt ]')
7150 ext_help.set_defaults(
72 t=30,
73 T=30.0,
74 w=path_to_script + '/wordlists/dirb_common.txt',
75 r=False,
76 s=True,
77 sp=443,
78 d='1.1.1.1',
79 e='',
80 m='UDP',
81 p=33434,
82 tt=1.0,
83 o='txt')
51 dt=config.dir_enum_th,
52 pt=config.port_scan_th,
53 T=config.timeout,
54 w=config.dir_enum_wlist,
55 r=config.dir_enum_redirect,
56 s=config.dir_enum_sslv,
57 sp=config.ssl_port,
58 d=config.dir_enum_dns,
59 e=config.dir_enum_ext,
60 o=config.export_fmt
61 )
8462
8563 try:
8664 args = parser.parse_args()
8765 except SystemExit:
88 os.remove(pid_path)
8966 sys.exit()
9067
9168 target = args.url
9471 whois = args.whois
9572 crawl = args.crawl
9673 dns = args.dns
97 trace = args.trace
9874 dirrec = args.dir
75 wback = args.wayback
9976 pscan = args.ps
10077 full = args.full
101 threads = args.t
78 threads = args.dt
79 pscan_threads = args.pt
10280 tout = args.T
10381 wdlist = args.w
10482 redir = args.r
10785 dserv = args.d
10886 filext = args.e
10987 subd = args.sub
110 mode = args.m
111 port = args.p
112 tr_tout = args.tt
11388 output = args.o
11489
11590 import socket
116 import requests
11791 import datetime
11892 import ipaddress
11993 import tldextract
12195
12296 type_ip = False
12397 data = {}
124 meta = {}
12598
12699
127100 def banner():
153126 from modules.crawler import crawler
154127 from modules.headers import headers
155128 from modules.dns import dnsrec
156 from modules.traceroute import troute
157129 from modules.whois import whois_lookup
158130 from modules.dirrec import hammer
159131 from modules.portscan import ps
160132 from modules.subdom import subdomains
133 from modules.wayback import timetravel
161134 headers(target, output, data)
162135 cert(hostname, sslp, output, data)
163136 whois_lookup(ip, output, data)
166139 subdomains(domain, tout, output, data, conf_path)
167140 else:
168141 pass
169 troute(ip, mode, port, tr_tout, output, data)
170 ps(ip, output, data)
142 ps(ip, output, data, pscan_threads)
171143 crawler(target, output, data)
172144 hammer(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext)
145 timetravel(target, data, output)
173146
174147
175148 try:
177150
178151 if target.startswith(('http', 'https')) is False:
179152 print(f'{R}[-] {C}Protocol Missing, Include {W}http:// {C}or{W} https:// \n')
180 os.remove(pid_path)
181153 sys.exit(1)
182154 else:
183155 pass
202174 print(f'\n{G}[+] {C}IP Address : {W}{str(ip)}')
203175 except Exception as e:
204176 print(f'\n{R}[-] {C}Unable to Get IP : {W}{str(e)}')
205 os.remove(pid_path)
206177 sys.exit(1)
207178
208179 start_time = datetime.datetime.now()
209
210 meta.update({'Version': str(version)})
211 meta.update({'Date': str(datetime.date.today())})
212 meta.update({'Target': str(target)})
213 meta.update({'IP Address': str(ip)})
214 meta.update({'Start Time': str(start_time.strftime('%I:%M:%S %p'))})
215 data['module-FinalRecon'] = meta
216180
217181 if output != 'None':
218182 fpath = usr_data
219 fname = fpath + hostname + '.' + output
220 if not os.path.exists(fpath):
221 os.makedirs(fpath)
183 dt_now = str(datetime.datetime.now().strftime('%d-%m-%Y_%H:%M:%S'))
184 fname = f'{fpath}fr_{hostname}_{dt_now}.{output}'
185 respath = f'{fpath}fr_{hostname}_{dt_now}'
186 if not os.path.exists(respath):
187 os.makedirs(respath)
222188 output = {
223189 'format': output,
224 'file': fname,
225 'export': False
190 'directory': respath,
191 'file': fname
226192 }
227
228 from modules.export import export
229193
230194 if full is True:
231195 full_recon()
255219 subdomains(domain, tout, output, data, conf_path)
256220 elif subd is True and type_ip is True:
257221 print(f'{R}[-] {C}Sub-Domain Enumeration is Not Supported for IP Addresses{W}\n')
258 os.remove(pid_path)
259222 sys.exit(1)
260223 else:
261224 pass
262225
263 if trace is True:
264 from modules.traceroute import troute
265 if mode == 'TCP' and port == 33434:
266 port = 80
267 troute(ip, mode, port, tr_tout, output, data)
268 else:
269 troute(ip, mode, port, tr_tout, output, data)
226 if wback is True:
227 from modules.wayback import timetravel
228 timetravel(hostname, data, output)
270229
271230 if pscan is True:
272231 from modules.portscan import ps
273 ps(ip, output, data)
232 ps(ip, output, data, threads)
274233
275234 if dirrec is True:
276235 from modules.dirrec import hammer
277236 hammer(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext)
278237
279 if any([full, headinfo, sslinfo, whois, crawl, dns, subd, trace, pscan, dirrec]) is not True:
238 if any([full, headinfo, sslinfo, whois, crawl, dns, subd, wback, pscan, dirrec]) is not True:
280239 print(f'\n{R}[-] Error : {C}At least One Argument is Required with URL{W}')
281240 output = 'None'
282 os.remove(pid_path)
283241 sys.exit(1)
284242
285243 end_time = datetime.datetime.now() - start_time
286244 print(f'\n{G}[+] {C}Completed in {W}{str(end_time)}\n')
287
288 @atexit.register
289 def call_export():
290 meta.update({'End Time': str(datetime.datetime.now().strftime('%I:%M:%S %p'))})
291 meta.update({'Completion Time': str(end_time)})
292 if output != 'None':
293 output['export'] = True
294 export(output, data)
295
296 os.remove(pid_path)
245 print(f'{G}[+] {C}Exported : {W}{respath}')
297246 sys.exit()
298247 except KeyboardInterrupt:
299248 print(f'{R}[-] {C}Keyboard Interrupt.{W}\n')
300 os.remove(pid_path)
301249 sys.exit(130)
00 {
11 "name": "FinalRecon",
22 "author": "thewhiteh4t",
3 "version": "1.1.3",
3 "version": "1.1.5",
44 "twitter": "https://twitter.com/thewhiteh4t",
55 "comms": "https://twc1rcle.com/"
66 }
88 import threading
99 import tldextract
1010 from datetime import date
11 from modules.export import export
1112 requests.packages.urllib3.disable_warnings()
1213
1314 R = '\033[31m' # red
1617 W = '\033[0m' # white
1718 Y = '\033[33m' # yellow
1819
19 user_agent = {
20 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0'
21 }
20 user_agent = {'User-Agent': 'FinalRecon'}
2221
2322 soup = ''
24 r_url = ''
25 sm_url = ''
2623 total = []
2724 r_total = []
2825 sm_total = []
3330 img_total = []
3431 js_crawl_total = []
3532 sm_crawl_total = []
36 wayback_total = []
3733
3834
3935 def crawler(target, output, data):
4036 global soup, r_url, sm_url
41 print('\n' + Y + '[!]' + Y + ' Starting Crawler...' + W + '\n')
37 print(f'\n{Y}[!] Starting Crawler...{W}\n')
4238
4339 try:
4440 rqst = requests.get(target, headers=user_agent, verify=False, timeout=10)
4541 except Exception as e:
46 print(R + '[-] Exception : ' + C + str(e) + W)
42 print(f'{R} [-] Exception : {C}{e}{W}')
4743 return
4844
4945 sc = rqst.status_code
5753 pattern = r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{2,5}'
5854 custom = bool(re.match(pattern, temp_tgt))
5955 if custom is True:
60 r_url = protocol + '://' + temp_tgt + '/robots.txt'
61 sm_url = protocol + '://' + temp_tgt + '/sitemap.xml'
56 r_url = f'{protocol}://{temp_tgt}/robots.txt'
57 sm_url = f'{protocol}://{temp_tgt}/sitemap.xml'
6258 else:
6359 ext = tldextract.extract(target)
6460 hostname = '.'.join(part for part in ext if part)
65 r_url = protocol + '://' + hostname + '/robots.txt'
66 sm_url = protocol + '://' + hostname + '/sitemap.xml'
61 base_url = f'{protocol}://{hostname}'
62 r_url = f'{base_url}/robots.txt'
63 sm_url = f'{base_url}/sitemap.xml'
6764
6865 loop = asyncio.new_event_loop()
6966 asyncio.set_event_loop(loop)
7067 tasks = asyncio.gather(
71 robots(target),
72 sitemap(),
73 css(target),
74 js(target),
75 internal_links(target),
76 external_links(target),
77 images(target),
78 sm_crawl(),
79 js_crawl(),
80 wayback(target))
68 robots(r_url, base_url, data, output),
69 sitemap(sm_url, data, output),
70 css(target, data, output),
71 js(target, data, output),
72 internal_links(target, data, output),
73 external_links(target, data, output),
74 images(target, data, output),
75 sm_crawl(data, output),
76 js_crawl(data, output))
8177 loop.run_until_complete(tasks)
8278 loop.close()
83
84 out(target, output, data)
79 stats(output, data)
8580 else:
86 print(R + '[-]' + C + ' Status : ' + W + str(sc))
87
88
89 def url_filter(target):
90 global url
91
92 if all([url.startswith('/') is True, url.startswith('//') is False]):
93 url = target + url
81 print(f'{R}[-] {C}Status : {W}{sc}')
82
83
84 def url_filter(target, link):
85 if all([link.startswith('/') is True, link.startswith('//') is False]):
86 ret_url = target + link
87 return ret_url
9488 else:
9589 pass
9690
97 if all([
98 url.find('http://') == -1,
99 url.find('https://') == -1]):
100 url = url.replace('//', 'http://')
101 url = url.replace('../', target + '/')
102 url = url.replace('./', target + '/')
91 if link.startswith('//') is True:
92 ret_url = link.replace('//', 'http://')
93 return ret_url
10394 else:
10495 pass
10596
10697 if all([
107 url.find('//') == -1,
108 url.find('../') == -1,
109 url.find('./') == -1,
110 url.find('http://') == -1,
111 url.find('https://') == -1]):
112
113 url = target + '/' + url
98 link.find('//') == -1,
99 link.find('../') == -1,
100 link.find('./') == -1,
101 link.find('http://') == -1,
102 link.find('https://') == -1]
103 ):
104 ret_url = f'{target}/{link}'
105 return ret_url
114106 else:
115107 pass
116108
117
118 async def wayback(target):
119 global wayback_total
120 is_avail = False
121 ext = tldextract.extract(target)
122 domain = ext.registered_domain
123 if len(domain) < 2:
124 domain = ext.domain
125 domain_query = domain + '/*'
126
127 curr_yr = date.today().year
128 last_yr = curr_yr - 1
129
130 print(Y + '[!]' + C + ' Checking Availability on Wayback Machine' + W, end='')
131 wm_avail = 'http://archive.org/wayback/available'
132 avail_data = {'url': domain}
109 if all([
110 link.find('http://') == -1,
111 link.find('https://') == -1]
112 ):
113 ret_url = link.replace('//', 'http://')
114 ret_url = link.replace('../', f'{target}/')
115 ret_url = link.replace('./', f'{target}/')
116 return ret_url
117 else:
118 pass
119 return link
120
121 async def robots(robo_url, base_url, data, output):
122 global r_total
123 print(f'{G}[+] {C}Looking for robots.txt{W}', end='', flush=True)
133124
134125 try:
135 check_rqst = requests.get(wm_avail, params=avail_data, timeout=10)
136 check_sc = check_rqst.status_code
137 if check_sc == 200:
138 check_data = check_rqst.text
139 json_chk_data = json.loads(check_data)
140 avail_data = json_chk_data['archived_snapshots']
141 if len(avail_data) != 0:
142 is_avail = True
143 print(G + '['.rjust(5, '.') + ' Available ]')
144 else:
145 print(R + '['.rjust(5, '.') + ' N/A ]')
146 else:
147 print('\n' + R + '[-] Status : ' + C + str(check_sc) + W)
148 except Exception as e:
149 print('\n' + R + '[-] Exception : ' + C + str(e) + W)
150
151 if is_avail is True:
152 print(Y + '[!]' + C + ' Requesting Wayback Machine' + W, end='')
153 wm_url = 'http://web.archive.org/cdx/search/cdx'
154
155 data = {
156 'url': domain_query,
157 'fl': 'original',
158 'fastLatest': 'true',
159 'from': '{}'.format(str(last_yr)),
160 'to': '{}'.format(str(curr_yr)),
161 'filter': 'statuscode:200'
162 }
163
164 try:
165 r = requests.get(wm_url, params=data)
166 r_sc = r.status_code
167 if r_sc == 200:
168 r_data = r.text
169 if len(r_data) != 0:
170 r_data = r_data.split('\n')
171 r_data = set(r_data)
172 print(G + '['.rjust(5, '.') + ' {} ]'.format(str(len(r_data))))
173 wayback_total.extend(r_data)
174 else:
175 print(R + '['.rjust(5, '.') + ' Not Found ]' + W)
176 else:
177 print(R + '['.rjust(5, '.') + ' {} ]'.format(r_sc) + W)
178 except Exception as e:
179 print('\n' + R + '[-] Exception : ' + C + str(e) + W)
180
181
182 async def robots(target):
183 global url, r_url, r_total
184 print(G + '[+]' + C + ' Looking for robots.txt' + W, end='')
185
186 try:
187 r_rqst = requests.get(r_url, headers=user_agent, verify=False, timeout=10)
126 r_rqst = requests.get(robo_url, headers=user_agent, verify=False, timeout=10)
188127 r_sc = r_rqst.status_code
189128 if r_sc == 200:
190129 print(G + '['.rjust(9, '.') + ' Found ]' + W)
191 print(G + '[+]' + C + ' Extracting robots Links', end='')
130 print(f'{G}[+] {C}Extracting robots Links{W}', end='', flush=True)
192131 r_page = r_rqst.text
193132 r_scrape = r_page.split('\n')
194133 for entry in r_scrape:
201140 try:
202141 url = url[1]
203142 url = url.strip()
204 url_filter(target)
205 r_total.append(url)
143 tmp_url = url_filter(base_url, url)
144 if tmp_url is not None:
145 r_total.append(url_filter(base_url, url))
206146 if url.endswith('xml') is True:
207147 sm_total.append(url)
208148 except Exception:
209149 pass
210150
211151 r_total = set(r_total)
212
213152 print(G + '['.rjust(8, '.') + ' {} ]'.format(str(len(r_total))))
214
153 exporter(data, output, r_total, 'robots')
215154 elif r_sc == 404:
216155 print(R + '['.rjust(9, '.') + ' Not Found ]' + W)
217156 else:
218157 print(R + '['.rjust(9, '.') + ' {} ]'.format(r_sc) + W)
219158 except Exception as e:
220 print('\n' + R + '[-] Exception : ' + C + str(e) + W)
221
222
223 async def sitemap():
224 global url, sm_url, total, sm_total
225 print(G + '[+]' + C + ' Looking for sitemap.xml' + W, end='')
159 print(f'\n{R}[-] Exception : {C}{e}{W}')
160
161
162 async def sitemap(sm_url, data, output):
163 global sm_total
164 print(f'{G}[+] {C}Looking for sitemap.xml{W}', end='', flush=True)
226165 try:
227166 sm_rqst = requests.get(sm_url, headers=user_agent, verify=False, timeout=10)
228167 sm_sc = sm_rqst.status_code
229168 if sm_sc == 200:
230169 print(G + '['.rjust(8, '.') + ' Found ]' + W)
231 print(G + '[+]' + C + ' Extracting sitemap Links', end='')
170 print(f'{G}[+] {C}Extracting sitemap Links{W}', end='', flush=True)
232171 sm_page = sm_rqst.content
233172 sm_soup = bs4.BeautifulSoup(sm_page, 'xml')
234173 links = sm_soup.find_all('loc')
238177 sm_total.append(url)
239178
240179 sm_total = set(sm_total)
241
242180 print(G + '['.rjust(7, '.') + ' {} ]'.format(str(len(sm_total))))
181 exporter(data, output, sm_total, 'sitemap')
243182 elif sm_sc == 404:
244183 print(R + '['.rjust(8, '.') + ' Not Found ]' + W)
245184 else:
246 print(R + '['.rjust(8, '.') + ' {} ]'.format(sm_sc) + W)
185 print(f'{R}{"[".rjust(8, ".")} Status Code : {sm_sc} ]{W}')
247186 except Exception as e:
248 print('\n' + R + '[-] Exception : ' + C + str(e))
249
250
251 async def css(target):
252 global url, soup, total, css_total
253 print(G + '[+]' + C + ' Extracting CSS Links' + W, end='')
254 css = soup.find_all('link')
187 print(f'\n{R}[-] Exception : {C}{e}{W}')
188
189
190 async def css(target, data, output):
191 global css_total
192 print(f'{G}[+] {C}Extracting CSS Links{W}', end='', flush=True)
193 css = soup.find_all('link', href=True)
255194
256195 for link in css:
257196 url = link.get('href')
258197 if url is not None and '.css' in url:
259 url_filter(target)
260 css_total.append(url)
198 css_total.append(url_filter(target, url))
261199
262200 css_total = set(css_total)
263201 print(G + '['.rjust(11, '.') + ' {} ]'.format(str(len(css_total))) + W)
264
265
266 async def js(target):
267 global url, total, js_total
268 print(G + '[+]' + C + ' Extracting Javascript Links' + W, end='')
269 js = soup.find_all('script')
270
271 for link in js:
202 exporter(data, output, css_total, 'css')
203
204
205 async def js(target, data, output):
206 global total, js_total
207 print(f'{G}[+] {C}Extracting Javascript Links{W}', end='', flush=True)
208 scr_tags = soup.find_all('script', src=True)
209
210 for link in scr_tags:
272211 url = link.get('src')
273212 if url is not None and '.js' in url:
274 url_filter(target)
275 js_total.append(url)
213 tmp_url = url_filter(target, url)
214 if tmp_url is not None:
215 js_total.append(tmp_url)
276216
277217 js_total = set(js_total)
278218 print(G + '['.rjust(4, '.') + ' {} ]'.format(str(len(js_total))))
279
280
281 async def internal_links(target):
219 exporter(data, output, js_total, 'javascripts')
220
221
222 async def internal_links(target, data, output):
282223 global total, int_total
283 print(G + '[+]' + C + ' Extracting Internal Links' + W, end='')
224 print(f'{G}[+] {C}Extracting Internal Links{W}', end='', flush=True)
284225
285226 ext = tldextract.extract(target)
286227 domain = ext.registered_domain
294235
295236 int_total = set(int_total)
296237 print(G + '['.rjust(6, '.') + ' {} ]'.format(str(len(int_total))))
297
298
299 async def external_links(target):
238 exporter(data, output, int_total, 'internal_urls')
239
240
241 async def external_links(target, data, output):
300242 global total, ext_total
301 print(G + '[+]' + C + ' Extracting External Links' + W, end='')
243 print(f'{G}[+] {C}Extracting External Links{W}', end='', flush=True)
302244
303245 ext = tldextract.extract(target)
304246 domain = ext.registered_domain
312254
313255 ext_total = set(ext_total)
314256 print(G + '['.rjust(6, '.') + ' {} ]'.format(str(len(ext_total))))
315
316
317 async def images(target):
318 global url, total, img_total
319 print(G + '[+]' + C + ' Extracting Images' + W, end='')
320 images = soup.find_all('img')
321
322 for link in images:
257 exporter(data, output, ext_total, 'external_urls')
258
259
260 async def images(target, data, output):
261 global total, img_total
262 print(f'{G}[+] {C}Extracting Images{W}', end='', flush=True)
263 image_tags = soup.find_all('img')
264
265 for link in image_tags:
323266 url = link.get('src')
324267 if url is not None and len(url) > 1:
325 url_filter(target)
326 img_total.append(url)
268 img_total.append(url_filter(target, url))
327269
328270 img_total = set(img_total)
329271 print(G + '['.rjust(14, '.') + ' {} ]'.format(str(len(img_total))))
330
331
332 async def sm_crawl():
272 exporter(data, output, img_total, 'images')
273
274
275 async def sm_crawl(data, output):
333276 global sm_crawl_total
334 print(G + '[+]' + C + ' Crawling Sitemaps' + W, end='')
277 print(f'{G}[+] {C}Crawling Sitemaps{W}', end='', flush=True)
335278
336279 threads = []
337
280
338281 def fetch(site_url):
339282 try:
340283 sm_rqst = requests.get(site_url, headers=user_agent, verify=False, timeout=10)
348291 if url is not None:
349292 sm_crawl_total.append(url)
350293 elif sm_sc == 404:
351 print(R + '['.rjust(8, '.') + ' Not Found ]' + W)
294 # print(R + '['.rjust(8, '.') + ' Not Found ]' + W)
295 pass
352296 else:
353 print(R + '['.rjust(8, '.') + ' {} ]'.format(sm_sc) + W)
354 except Exception as e:
355 print('\n' + R + '[-] Exception : ' + C + str(e))
297 # print(R + '['.rjust(8, '.') + ' {} ]'.format(sm_sc) + W)
298 pass
299 except Exception:
300 # print(f'\n{R}[-] Exception : {C}{e}{W}')
301 pass
356302
357303 for site_url in sm_total:
358304 if site_url != sm_url:
367313
368314 sm_crawl_total = set(sm_crawl_total)
369315 print(G + '['.rjust(14, '.') + ' {} ]'.format(str(len(sm_crawl_total))))
370
371
372 async def js_crawl():
316 exporter(data, output, sm_crawl_total, 'urls_inside_sitemap')
317
318
319 async def js_crawl(data, output):
373320 global js_crawl_total
374 print(G + '[+]' + C + ' Crawling Javascripts' + W, end='')
321 print(f'{G}[+] {C}Crawling Javascripts{W}', end='', flush=True)
375322
376323 threads = []
377324
389336 if len(item) > 8:
390337 js_crawl_total.append(item)
391338 except Exception as e:
392 print('\n' + R + '[-] Exception : ' + C + str(e))
339 print(f'\n{R}[-] Exception : {C}{e}{W}')
393340
394341 for js_url in js_total:
395342 t = threading.Thread(target=fetch, args=[js_url])
402349
403350 js_crawl_total = set(js_crawl_total)
404351 print(G + '['.rjust(11, '.') + ' {} ]'.format(str(len(js_crawl_total))))
405
406
407 def out(target, output, data):
352 exporter(data, output, js_crawl_total, 'urls_inside_js')
353
354
355 def exporter(data, output, list_name, file_name):
356 data[f'module-crawler-{file_name}'] = ({'links': list(list_name)})
357 data[f'module-crawler-{file_name}'].update({'exported': False})
358 fname = f'{output["directory"]}/{file_name}.{output["format"]}'
359 output['file'] = fname
360 export(output, data)
361
362
363 def stats(output, data):
408364 global total
409365
410366 total.extend(r_total)
416372 total.extend(int_total)
417373 total.extend(ext_total)
418374 total.extend(img_total)
419 total.extend(wayback_total)
420375 total = set(total)
421376
422 print('\n' + G + '[+]' + C + ' Total Unique Links Extracted : ' + W + str(len(total)))
377 print(f'\n{G}[+] {C}Total Unique Links Extracted : {W}{len(total)}')
423378
424379 if output != 'None':
425380 if len(total) != 0:
426 data['module-Crawler'] = {'Total Unique Links Extracted': str(len(total))}
381 data['module-crawler-stats'] = {'Total Unique Links Extracted': str(len(total))}
427382 try:
428383 target_title = soup.title.string
429384 except AttributeError:
430385 target_title = 'None'
431 data['module-Crawler'].update({'Title ': str(target_title)})
432
433 data['module-Crawler'].update(
386 data['module-crawler-stats'].update({'Title ': str(target_title)})
387
388 data['module-crawler-stats'].update(
434389 {
435 'Count ( Robots )': str(len(r_total)),
436 'Count ( Sitemap )': str(len(sm_total)),
437 'Count ( CSS )': str(len(css_total)),
438 'Count ( JS )': str(len(js_total)),
439 'Count ( Links in JS )': str(len(js_crawl_total)),
440 'Count ( Links in Sitemaps )': str(len(sm_crawl_total)),
441 'Count ( Internal )': str(len(int_total)),
442 'Count ( External )': str(len(ext_total)),
443 'Count ( Images )': str(len(img_total)),
444 'count ( Wayback Machine )': str(len(wayback_total)),
445 'Count ( Total )': str(len(total))
390 'total_urls_robots': len(r_total),
391 'total_urls_sitemap': len(sm_total),
392 'total_urls_css': len(css_total),
393 'total_urls_js': len(js_total),
394 'total_urls_in_js': len(js_crawl_total),
395 'total_urls_in_sitemaps': len(sm_crawl_total),
396 'total_urls_internal': len(int_total),
397 'total_urls_external': len(ext_total),
398 'total_urls_images': len(img_total),
399 'total_urls': len(total)
446400 })
447
448 if len(r_total) != 0:
449 data['module-Crawler'].update({'Robots': list(r_total)})
450
451 if len(sm_total) != 0:
452 data['module-Crawler'].update({'Sitemaps': list(sm_total)})
453
454 if len(css_total) != 0:
455 data['module-Crawler'].update({'CSS': list(css_total)})
456
457 if len(js_total) != 0:
458 data['module-Crawler'].update({'Javascripts': list(js_total)})
459
460 if len(js_crawl_total) != 0:
461 data['module-Crawler'].update({'Links inside Javascripts': list(js_crawl_total)})
462
463 if len(sm_crawl_total) != 0:
464 data['module-Crawler'].update({'Links Inside Sitemaps': list(sm_crawl_total)})
465
466 if len(int_total) != 0:
467 data['module-Crawler'].update({'Internal Links': list(int_total)})
468
469 if len(ext_total) != 0:
470 data['module-Crawler'].update({'External Links': list(ext_total)})
471
472 if len(img_total) != 0:
473 data['module-Crawler'].update({'Images': list(img_total)})
474
475 if len(wayback_total) != 0:
476 data['module-Crawler'].update({'Wayback Machine': list(wayback_total)})
401 data['module-crawler-stats'].update({'exported': False})
00 #!/usr/bin/env python3
11
2 import json
2
33 import socket
44 import aiohttp
55 import asyncio
6 import requests
7 import tldextract
86 from datetime import date
7 from modules.export import export
98
10 R = '\033[31m' # red
11 G = '\033[32m' # green
12 C = '\033[36m' # cyan
13 W = '\033[0m' # white
14 Y = '\033[33m' # yellow
9 R = '\033[31m' # red
10 G = '\033[32m' # green
11 C = '\033[36m' # cyan
12 W = '\033[0m' # white
13 Y = '\033[33m' # yellow
1514
1615 header = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0'}
1716 count = 0
1817 wm_count = 0
1918 found = []
20 skipped = []
2119 responses = []
22 wayback_found = []
2320 curr_yr = date.today().year
2421 last_yr = curr_yr - 1
2522
26 async def fetch(url, session, redir, sslv):
27 global count
23
24 async def fetch(url, session, redir):
25 global responses
2826 try:
2927 async with session.get(url, headers=header, allow_redirects=redir) as response:
30 count += 1
31 print(Y + '[!]' + C + ' Requests : ' + W + str(count), end='\r')
32 return response.url, response.status
28 responses.append((url, response.status))
29 return response.status
3330 except Exception as e:
34 print(R + '[-]' + C + ' Exception : ' + W + str(e).strip('\n'))
31 print(f'{R}[-] {C}Exception : {W}' + str(e).strip('\n'))
3532
36 async def run(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext):
37 global responses
38 tasks = []
39 resolver = aiohttp.AsyncResolver(nameservers=[dserv])
33
34 async def insert(queue, filext, target, wdlist, redir):
35 if len(filext) == 0:
36 url = target + '/{}'
37 with open(wdlist, 'r') as wordlist:
38 for word in wordlist:
39 word = word.strip()
40 await queue.put([url.format(word), redir])
41 await asyncio.sleep(0)
42 else:
43 filext = ',' + filext
44 filext = filext.split(',')
45 with open(wdlist, 'r') as wordlist:
46 for word in wordlist:
47 for ext in filext:
48 ext = ext.strip()
49 if len(ext) == 0:
50 url = target + '/{}'
51 else:
52 url = target + '/{}.' + ext
53 word = word.strip()
54 await queue.put([url.format(word), redir])
55 await asyncio.sleep(0)
56
57
58 async def consumer(queue, target, session, redir, total_num_words):
59 global count
60 while True:
61 values = await queue.get()
62 url = values[0]
63 redir = values[1]
64 status = await fetch(url, session, redir)
65 await filter_out(target, url, status)
66 queue.task_done()
67 count += 1
68 print(f'{Y}[!] {C}Requests : {W}{count}/{total_num_words}', end='\r')
69
70
71 async def run(target, threads, tout, wdlist, redir, sslv, dserv, filext, total_num_words):
72 queue = asyncio.Queue(maxsize=threads)
73
74 resolver = aiohttp.AsyncResolver(nameservers=dserv.split(', '))
4075 conn = aiohttp.TCPConnector(limit=threads, resolver=resolver, family=socket.AF_INET, verify_ssl=sslv)
4176 timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout)
4277 async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
43 if len(filext) == 0:
44 url = target + '/{}'
45 with open(wdlist, 'r') as wordlist:
46 for word in wordlist:
47 word = word.strip()
48 task = asyncio.create_task(fetch(url.format(word), session, redir, sslv))
49 tasks.append(task)
50 await asyncio.sleep(0)
51 responses = await asyncio.gather(*tasks)
52 else:
53 filext = ',' + filext
54 filext = filext.split(',')
55 with open(wdlist, 'r') as wordlist:
56 for word in wordlist:
57 for ext in filext:
58 ext = ext.strip()
59 if len(ext) == 0:
60 url = target + '/{}'
61 else:
62 url = target + '/{}.' + ext
63 word = word.strip()
64 task = asyncio.create_task(fetch(url.format(word), session, redir, sslv))
65 tasks.append(task)
66 await asyncio.sleep(0)
67 responses = await asyncio.gather(*tasks)
78 distrib = asyncio.create_task(insert(queue, filext, target, wdlist, redir))
79 workers = [
80 asyncio.create_task(
81 consumer(queue, target, session, redir, total_num_words)
82 ) for _ in range(threads)]
6883
69 async def wayback(target, dserv, tout):
84 await asyncio.gather(distrib)
85 await queue.join()
86
87 for worker in workers:
88 worker.cancel()
89
90
91 async def filter_out(target, url, status):
7092 global found
71 is_avail = False
72 ext = tldextract.extract(target)
73 domain = ext.registered_domain
74 if len(domain) < 2:
75 domain = ext.domain
93 if status in {200}:
94 if str(url) != target + '/':
95 found.append(url)
96 print(f'{G}{status} {C}|{W} {url}')
97 elif status in {301, 302, 303, 307, 308}:
98 found.append(url)
99 print(f'{Y}{status} {C}|{W} {url}')
100 elif status in {403}:
101 found.append(url)
102 print(f'{R}{status} {C}|{W} {url}')
103 else:
104 pass
76105
77 print('\n' + Y + '[!]' + C + ' Checking Availability on Wayback Machine' + W, end = '')
78 wm_avail = 'http://archive.org/wayback/available'
79 avail_data = { 'url': domain }
80
81 try:
82 check_rqst = requests.get(wm_avail, params=avail_data, timeout=10)
83 check_sc = check_rqst.status_code
84 if check_sc == 200:
85 check_data = check_rqst.text
86 json_chk_data = json.loads(check_data)
87 avail_data = json_chk_data['archived_snapshots']
88 if len(avail_data) != 0:
89 is_avail = True
90 print(G + '['.rjust(5, '.') + ' Available ]')
91 else:
92 print(R + '['.rjust(5, '.') + ' N/A ]')
93 else:
94 print('\n' + R + '[-] Status : ' + C + str(check_sc) + W)
95 except Exception as e:
96 print('\n' + R + '[-] Exception : ' + C + str(e) + W)
97
98 if is_avail == True:
99 print('\n' + Y + '[!]' + C + ' Requesting Wayback Machine...' + W + '\n')
100 tasks = []
101 resolver = aiohttp.AsyncResolver(nameservers=[dserv])
102 conn = aiohttp.TCPConnector(limit=10)
103 timeout = aiohttp.ClientTimeout(total=None, sock_connect=tout, sock_read=tout)
104 async with aiohttp.ClientSession(connector=conn, timeout=timeout) as session:
105 for f_url in found:
106 tasks.append(asyncio.create_task(wm_fetch(f_url, session)))
107 await asyncio.gather(*tasks)
108
109 async def wm_fetch(f_url, session):
110 global wayback_found, wm_count
111 wm_url = 'http://web.archive.org/cdx/search/cdx'
112 domain = str(f_url)
113 data= {
114 'url': domain,
115 'matchType': 'prefix',
116 'fl': 'original',
117 'fastLatest': 'true',
118 'filter': 'statuscode:200',
119 'from': '{}'.format(str(last_yr)),
120 'to': '{}'.format(str(curr_yr)),
121 'output': 'json'
122 }
123 try:
124 async with session.get(wm_url, params=data) as resp:
125 wm_count += 1
126 print(Y + '[!]' + C + ' Requests : ' + W + str(wm_count), end='\r')
127 answer = await resp.text()
128 if resp.status == 200:
129 json_ans = json.loads(answer)
130 if len(json_ans) != 0:
131 json_ans.pop(0)
132 if len(json_ans) != 0:
133 for item in json_ans:
134 addr = item[0]
135 addr = addr.replace(':80', '')
136 wayback_found.append(addr)
137 except Exception as e:
138 print(R + '[-]' + C + ' Exception : ' + W + str(e))
139
140 def filter_out(target):
141 global responses, found, skipped, wayback_found
142 for entry in responses:
143 if entry != None:
144 if entry[1] in {200}:
145 if str(entry[0]) != target + '/':
146 found.append(entry[0])
147 print(G + '[+]' + G + ' {}'.format(str(entry[1]) + C + ' | ' + W + '{}'.format(entry[0])))
148 elif entry[1] in {301, 302, 303, 307, 308}:
149 found.append(entry[0])
150 print(G + '[+]' + Y + ' {}'.format(str(entry[1]) + C + ' | ' + W + '{}'.format(entry[0])))
151 elif entry[1] in {403}:
152 found.append(entry[0])
153 print(G + '[+]' + R + ' {}'.format(str(entry[1]) + C + ' | ' + W + '{}'.format(entry[0])))
154 else:
155 skipped.append(entry[0])
156
157 def wm_filter():
158 global wayback_found
159
160 for entry in wayback_found:
161 if len(entry) == 0:
162 wayback_found.pop(wayback_found.index(entry))
163 wayback_found = list(set(wayback_found))
164
165 count = 0
166 for entry in wayback_found:
167 mod_entry = entry.split('/')
168 last = mod_entry[-1]
169 if '.' in last and last.startswith('.') == False:
170 mod_entry.pop(mod_entry.index(last))
171 mod_entry = '/'.join(mod_entry)
172 loc = wayback_found.index(entry)
173 wayback_found.remove(entry)
174 wayback_found.insert(loc, mod_entry)
175 count += 1
176 print(G + '[+]' + C + ' Filtering Results : ' + W + str(count), end='\r')
177 wayback_found = set(wayback_found)
178106
179107 def dir_output(output, data):
180 global responses, found, skipped, wayback_found
108 global responses, found
181109 result = {}
182110
183111 for entry in responses:
184 if entry != None:
112 if entry is not None:
185113 if entry[1] in {200}:
186114 if output != 'None':
187 result.setdefault('Status 200', []).append(entry[0])
115 result.setdefault('Status 200', []).append(f'200, {entry[0]}')
188116 elif entry[1] in {301, 302, 303, 307, 308}:
189117 if output != 'None':
190 result.setdefault('Status {}'.format(str(entry[1])), []).append(entry[0])
118 result.setdefault(f'Status {entry[1]}', []).append(f'{entry[1]}, {entry[0]}')
191119 elif entry[1] in {403}:
192120 if output != 'None':
193 result.setdefault('Status 403', []).append(entry[0])
121 result.setdefault('Status 403', []).append(f'{entry[1]}, {entry[0]}')
194122 else:
195123 pass
196
197 for entry in wayback_found:
198 if len(entry) != 0:
199 result.setdefault('Wayback Machine', []).append(entry)
200
201 print(G + '[+]' + C + ' Directories Found : ' + W + str(len(found)))
202 print(G + '[+]' + C + ' Directories Skipped : ' + W + str(len(skipped)))
203 print(G + '[+]' + C + ' Total Requests : ' + W + str(len(found) + len(skipped)))
204 print(G + '[+]' + C + ' Directories Found on Wayback Machine : ' + W + str(len(wayback_found)))
124
125 print(f'\n\n{G}[+] {C}Directories Found : {W}{len(found)}')
205126
206127 if output != 'None':
207 result['Directories Found'] = str(len(found))
208 result['Directories Skipped'] = str(len(skipped))
209 result['Total Requests'] = str(len(found) + len(skipped))
210 result['Directories Found on Wayback Machine'] = str(len(wayback_found))
128 result.update({'exported': False})
211129 data['module-Directory Search'] = result
130 fname = f'{output["directory"]}/directory_enum.{output["format"]}'
131 output['file'] = fname
132 export(output, data)
133
212134
213135 def hammer(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext):
214 print('\n' + Y + '[!]' + Y + ' Starting Directory Search...' + W + '\n')
215 print(G + '[+]' + C + ' Threads : ' + W + str(threads))
216 print(G + '[+]' + C + ' Timeout : ' + W + str(tout))
217 print(G + '[+]' + C + ' Wordlist : ' + W + wdlist)
218 print(G + '[+]' + C + ' Allow Redirects : ' + W + str(redir))
219 print(G + '[+]' + C + ' SSL Verification : ' + W + str(sslv))
220 print(G + '[+]' + C + ' DNS Servers : ' + W + dserv)
136 print(f'\n{Y}[!] Starting Directory Enum...{W}\n')
137 print(f'{G}[+] {C}Threads : {W}{threads}')
138 print(f'{G}[+] {C}Timeout : {W}{tout}')
139 print(f'{G}[+] {C}Wordlist : {W}{wdlist}')
140 print(f'{G}[+] {C}Allow Redirects : {W}{redir}')
141 print(f'{G}[+] {C}SSL Verification : {W}{sslv}')
142 print(f'{G}[+] {C}DNS Servers : {W}{dserv}')
221143 with open(wdlist, 'r') as wordlist:
222144 num_words = sum(1 for i in wordlist)
223 print(G + '[+]' + C + ' Wordlist Size : ' + W + str(num_words))
224 print(G + '[+]' + C + ' File Extensions : ' + W + str(filext) + '\n')
145 print(f'{G}[+] {C}Wordlist Size : {W}{num_words}')
146 print(f'{G}[+] {C}File Extensions : {W}{filext}\n')
147 if len(filext) != 0:
148 total_num_words = num_words * (len(filext.split(',')) + 1)
149 else:
150 total_num_words = num_words
151
225152 loop = asyncio.new_event_loop()
226153 asyncio.set_event_loop(loop)
227 loop.run_until_complete(run(target, threads, tout, wdlist, redir, sslv, dserv, output, data, filext))
228 filter_out(target)
229 loop.run_until_complete(wayback(target, dserv, tout))
230 wm_filter()
154 loop.run_until_complete(run(target, threads, tout, wdlist, redir, sslv, dserv, filext, total_num_words))
231155 dir_output(output, data)
232156 loop.close()
00 #!/usr/bin/env python3
11
2 import os
32 import dnslib
3 from modules.export import export
44
5 R = '\033[31m' # red
6 G = '\033[32m' # green
7 C = '\033[36m' # cyan
8 W = '\033[0m' # white
9 Y = '\033[33m' # yellow
5 R = '\033[31m' # red
6 G = '\033[32m' # green
7 C = '\033[36m' # cyan
8 W = '\033[0m' # white
9 Y = '\033[33m' # yellow
10
1011
1112 def dnsrec(domain, output, data):
1213 result = {}
13 print('\n' + Y + '[!]' + Y + ' Starting DNS Enumeration...' + W + '\n')
14 print(f'\n{Y}[!] Starting DNS Enumeration...{W}\n')
1415 types = ['A', 'AAAA', 'ANY', 'CAA', 'CNAME', 'MX', 'NS', 'TXT']
1516 full_ans = []
1617 for Type in types:
2425 dns_found = []
2526
2627 for entry in full_ans:
27 if entry.startswith(';') == False:
28 if entry.startswith(';') is False:
2829 dns_found.append(entry)
2930 else:
3031 pass
31
32
3233 if len(dns_found) != 0:
3334 for entry in dns_found:
34 print(G + '[+]' + C + ' {}'.format(entry) + W)
35 print(f'{C}{entry}{W}')
3536 if output != 'None':
3637 result.setdefault('dns', []).append(entry)
3738 else:
38 print(R + '[-]' + C + ' DNS Records Not Found!' + W)
39 print(f'{R}[-] {C}DNS Records Not Found!{W}')
3940 if output != 'None':
4041 result.setdefault('dns', ['DNS Records Not Found'])
41
42 dmarc_target = '_dmarc.' + domain
42
43 dmarc_target = f'_dmarc.{domain}'
4344 q = dnslib.DNSRecord.question(dmarc_target, 'TXT')
4445 pkt = q.send('8.8.8.8', 53, tcp='UDP')
4546 dmarc_ans = dnslib.DNSRecord.parse(pkt)
4849 dmarc_found = []
4950
5051 for entry in dmarc_ans:
51 if entry.startswith('_dmarc') == True:
52 if entry.startswith('_dmarc') is True:
5253 dmarc_found.append(entry)
5354 else:
5455 pass
5556 if len(dmarc_found) != 0:
5657 for entry in dmarc_found:
57 print(G + '[+]' + C + ' {}'.format(entry) + W)
58 print(f'{C}{entry}{W}')
5859 if output != 'None':
5960 result.setdefault('dmarc', []).append(entry)
6061 else:
61 print('\n' + R + '[-]' + C + ' DMARC Record Not Found!' + W)
62 print(f'\n{R}[-] {C}DMARC Record Not Found!{W}')
6263 if output != 'None':
6364 result.setdefault('dmarc', ['DMARC Record Not Found!'])
65 result.update({'exported': False})
6466
6567 if output != 'None':
66 dns_export(output, data, result)
67
68 def dns_export(output, data, result):
69 data['module-DNS Enumeration'] = result
68 data['module-DNS Enumeration'] = result
69 fname = f'{output["directory"]}/dns_records.{output["format"]}'
70 output['file'] = fname
71 export(output, data)
00 #!/usr/bin/env python3
11
2 import os
3 import csv
4 import lxml.etree
5 import xml.etree.ElementTree as ET
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
67
7 R = '\033[31m' # red
8 G = '\033[32m' # green
9 C = '\033[36m' # cyan
10 W = '\033[0m' # white
11 Y = '\033[33m' # yellow
12
13 root = ''
148
159 def export(output, data):
1610 if output['format'] != 'txt':
17 if output['export'] == True:
11 if output['export'] is True:
1812 fname = output['file']
1913 with open(fname, 'w') as outfile:
20 if output['format'] == 'xml':
21 print(Y + '[!]' + C + ' Exporting to ' + W + fname + '\n')
22 xml_export(output, data, outfile)
23 if output['format'] == 'csv':
24 print(Y + '[!]' + C + ' Exporting to ' + W + fname + '\n')
25 csv_export(output, data, outfile)
26 if all([output['format'] != 'xml', output['format'] != 'csv']):
27 print(R + '[-]' + C + ' Invalid Output Format, Valid Formats : ' + W + 'txt, xml, csv')
14 if output['format'] != 'txt':
15 print(f'{R}[-] {C}Invalid Output Format, Valid Formats : {W}txt')
2816 exit()
2917 else:
3018 pass
3119 elif output['format'] == 'txt':
3220 fname = output['file']
33 print(Y + '[!]' + C + ' Exporting to ' + W + fname + '\n')
3421 with open(fname, 'w') as outfile:
3522 txt_export(data, outfile)
3623 else:
3724 pass
38
39 def txt_unpack(outfile, k, v):
40 if isinstance(v, list):
41 for item in v:
25
26
27 def txt_unpack(outfile, key, val):
28 if isinstance(val, list):
29 for item in val:
4230 if isinstance(item, list):
4331 outfile.write('{}\t{}\t\t{}\n'.format(*item))
4432 else:
4533 outfile.write(str(item) + '\n')
46
47 elif isinstance(v, dict):
48 for key, val in v.items():
49 if isinstance(val, list):
50 outfile.write('\n' + str(key) + '\n')
51 outfile.write('='*len(key) + '\n\n')
52 txt_unpack(outfile, key, val)
53 else:
54 outfile.write('\n' + str(key))
55 outfile.write(' : ')
56 outfile.write(str(val) + '\n')
34
35 elif isinstance(val, dict):
36 for key, val in val.items():
37 if key != 'exported':
38 if isinstance(val, list):
39 txt_unpack(outfile, key, val)
40 else:
41 outfile.write(f'{key}: {val}\n')
5742 else:
5843 pass
5944
45
6046 def txt_export(data, outfile):
61 for k, v in data.items():
62 if k.startswith('module'):
63 k = k.split('-')
64 k = k[1]
65 outfile.write('\n' + '#'*len(k) + '\n')
66 outfile.write(k)
67 outfile.write('\n' + '#'*len(k) + '\n')
68 txt_unpack(outfile, k, v)
69
70 elif k.startswith('Type'):
71 outfile.write('\n' + data[k] + '\n')
72 outfile.write('='*len(data[k]) + '\n\n')
73
47 for key, val in data.items():
48 if key.startswith('module'):
49 if val['exported'] is False:
50 txt_unpack(outfile, key, val)
51 val['exported'] = True
52 elif key.startswith('Type'):
53 outfile.write('\n' + data[key] + '\n')
54 outfile.write('=' * len(data[key]) + '\n\n')
7455 else:
75 outfile.write(str(k))
56 outfile.write(str(key))
7657 outfile.write(' : ')
77 outfile.write(str(v) + '\n')
78
79 def xml_export(output, data, outfile):
80 global root
81 root = ET.Element('finalrecon')
82 modules = ET.Element('modules')
83
84 for k, v in data.items():
85 if k.startswith('module'):
86 module = k.split('module-')
87 module = module[1]
88 module_name = ET.Element('moduleName')
89 module_name.text = module
90 modules.append(module_name)
91 if isinstance(v, dict):
92 for key, val in v.items():
93 data_pair = ET.Element('dataPair')
94 data_key = ET.Element('dataKey')
95 data_key.text = key
96 data_pair.append(data_key)
97 if isinstance(val, list):
98 for item in val:
99 if isinstance(item, list):
100 data_val = ET.Element('dataVal')
101 data_val.text = '{},{},{}'.format(*item)
102 data_pair.append(data_val)
103 else:
104 data_val = ET.Element('dataVal')
105 data_val.text = str(item)
106 data_pair.append(data_val)
107 module_name.append(data_pair)
108 else:
109 data_val = ET.Element('dataVal')
110 data_val.text = str(val)
111 data_pair.append(data_val)
112 module_name.append(data_pair)
113
114 root.append(modules)
115 if output['format'] == 'xml':
116 tree = ET.ElementTree(root)
117 tree.write(outfile.name,
118 encoding='utf8',
119 xml_declaration=True,
120 default_namespace=None,
121 method='xml')
122 else:
123 pass
124
125 def csv_export(output, data, outfile):
126 global root
127 key_list = []
128 val_list = []
129
130 xml_export(output, data, outfile)
131
132 root_str = ET.tostring(root, method='xml').decode()
133 xml_data = lxml.etree.fromstring(root_str)
134 modules = xml_data.find('modules')
135 module_names = modules.findall('moduleName')
136
137 for module_name in module_names:
138 module_name_str = module_name.text
139 dataPairs = module_name.findall('dataPair')
140
141 for dataPair in dataPairs:
142 dataKey = dataPair.find('dataKey')
143 dataKey = dataKey.text
144 key_list.append(dataKey)
145 dataVals = dataPair.findall('dataVal')
146 if len(dataVals) == 1:
147 dataVals = dataVals[0].text
148 dataVals = dataVals.replace(',', '/').replace(';', '/')
149 val_list.append(dataVals)
150 else:
151 data_str_list = []
152 for item in dataVals:
153 item = item.text
154 item = item.replace(',', '/').replace(';', '/')
155 data_str_list.append(item)
156 val_list.append(data_str_list)
157
158 with open(outfile.name, 'a') as outfile:
159 writer = csv.writer(outfile, delimiter=';')
160 key_list.insert(0,'Module')
161 writer.writerow(key_list)
162 val_list.insert(0, module_name_str)
163
164 val_str_list = []
165
166 for item in val_list:
167 if isinstance(item, str) == False and isinstance(item, list) == False:
168 item = item.text
169 if isinstance(item, list) == True:
170 item = '\n'.join(item)
171 else:
172 pass
173 val_str_list.append(item)
174 writer.writerow(val_str_list)
175
176 for i in range(1,5):
177 writer.writerow([])
178 key_list = []
179 val_list = []
58 outfile.write(str(val) + '\n')
00 #!/usr/bin/env python3
11
22 import requests
3 from modules.export import export
34 requests.packages.urllib3.disable_warnings()
45
5 R = '\033[31m' # red
6 G = '\033[32m' # green
7 C = '\033[36m' # cyan
8 W = '\033[0m' # white
9 Y = '\033[33m' # yellow
6 R = '\033[31m' # red
7 G = '\033[32m' # green
8 C = '\033[36m' # cyan
9 W = '\033[0m' # white
10 Y = '\033[33m' # yellow
11
1012
1113 def headers(target, output, data):
1214 result = {}
13 print ('\n' + Y + '[!] Headers :' + W + '\n')
15 print(f'\n{Y}[!] Headers :{W}\n')
1416 try:
1517 rqst = requests.get(target, verify=False, timeout=10)
16 for k, v in rqst.headers.items():
17 print (G + '[+]' + C + ' {} : '.format(k) + W + v)
18 for key, val in rqst.headers.items():
19 print(f'{C}{key} : {W}{val}')
1820 if output != 'None':
19 result.update({k:v})
21 result.update({key: val})
2022 except Exception as e:
21 print('\n' + R + '[-]' + C + ' Exception : ' + W + str(e) + '\n')
23 print(f'\n{R}[-] {C}Exception : {W}{e}\n')
2224 if output != 'None':
23 result.update({'Exception':str(e)})
25 result.update({'Exception': str(e)})
26 result.update({'exported': False})
2427
2528 if output != 'None':
26 header_output(output, data, result)
27
28 def header_output(output, data, result):
29 data['module-Headers'] = result
29 fname = f'{output["directory"]}/headers.{output["format"]}'
30 output['file'] = fname
31 data['module-headers'] = result
32 export(output, data)
00 #!/usr/bin/env python3
11
2 import socket
3 import threading
2 import asyncio
3 from modules.export import export
44
5 R = '\033[31m' # red
6 G = '\033[32m' # green
7 C = '\033[36m' # cyan
8 W = '\033[0m' # white
9 Y = '\033[33m' # yellow
5 R = '\033[31m' # red
6 G = '\033[32m' # green
7 C = '\033[36m' # cyan
8 W = '\033[0m' # white
9 Y = '\033[33m' # yellow
1010
11 def ps(ip, output, data):
12 threads = []
11 counter = 0
12 port_list = [1, 3, 4, 6, 7, 9, 13, 17, 19, 20, 21, 22, 23, 24, 25, 26, 30, 32, 33, 37, 42, 43, 49, 53, 70, 79, 80, 81, 82, 83, 84, 85, 88, 89, 90, 99, 100, 106, 109, 110, 111, 113, 119, 125, 135, 139, 143, 144, 146, 161, 163, 179, 199, 211, 212, 222, 254, 255, 256, 259, 264, 280, 301, 306, 311, 340, 366, 389, 406, 407, 416, 417, 425, 427, 443, 444, 445, 458, 464, 465, 481, 497, 500, 512, 513, 514, 515, 524, 541, 543, 544, 545, 548, 554, 555, 563, 587, 593, 616, 617, 625, 631, 636, 646, 648, 666, 667, 668, 683, 687, 691, 700, 705, 711, 714, 720, 722, 726, 749, 765, 777, 783, 787, 800, 801, 808, 843, 873, 880, 888, 898, 900, 901, 902, 903, 911, 912, 981, 987, 990, 992, 993, 995, 999, 1000, 1001, 1002, 1007, 1009, 1010, 1011, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033, 1034, 1035, 1036, 1037, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1061, 1062, 1063, 1064, 1065, 1066, 1067, 1068, 1069, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1102, 1104, 1105, 1106, 1107, 1108, 1110, 1111, 1112, 1113, 1114, 1117, 1119, 1121, 1122, 1123, 1124, 1126, 1130, 1131, 1132, 1137, 1138, 1141, 1145, 1147, 1148, 1149, 1151, 1152, 1154, 1163, 1164, 1165, 1166, 1169, 1174, 1175, 1183, 1185, 1186, 1187, 1192, 1198, 1199, 1201, 1213, 1216, 1217, 1218, 1233, 1234, 1236, 1244, 1247, 1248, 1259, 1271, 1272, 1277, 1287, 1296, 1300, 1301, 1309, 1310, 1311, 1322, 1328, 1334, 1352, 1417, 1433, 1434, 1443, 1455, 1461, 1494, 1500, 1501, 1503, 1521, 1524, 1533, 1556, 1580, 1583, 1594, 1600, 1641, 1658, 1666, 1687, 1688, 1700, 1717, 1718, 1719, 1720, 1721, 1723, 1755, 1761, 1782, 1783, 1801, 1805, 1812, 1839, 1840, 1862, 1863, 1864, 1875, 1900, 1914, 1935, 1947, 1971, 1972, 1974, 1984, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2013, 2020, 2021, 2022, 2030, 2033, 2034, 2035, 2038, 2040, 2041, 2042, 2043, 2045, 2046, 2047, 2048, 2049, 2065, 2068, 2099, 2100, 2103, 2105, 2106, 2107, 2111, 2119, 2121, 2126, 2135, 2144, 2160, 2161, 2170, 2179, 2190, 2191, 2196, 2200, 2222, 2251, 2260, 2288, 2301, 2323, 2366, 2381, 2382, 2383, 2393, 2394, 2399, 2401, 2492, 2500, 2522, 2525, 2557, 2601, 2602, 2604, 2605, 2607, 2608, 2638, 2701, 2702, 2710, 2717, 2718, 2725, 2800, 2809, 2811, 2869, 2875, 2909, 2910, 2920, 2967, 2968, 2998, 3000, 3001, 3003, 3005, 3006, 3007, 3011, 3013, 3017, 3030, 3031, 3052, 3071, 3077, 3128, 3168, 3211, 3221, 3260, 3261, 3268, 3269, 3283, 3300, 3301, 3306, 3322, 3323, 3324, 3325, 3333, 3351, 3367, 3369, 3370, 3371, 3372, 3389, 3390, 3404, 3476, 3493, 3517, 3527, 3546, 3551, 3580, 3659, 3689, 3690, 3703, 3737, 3766, 3784, 3800, 3801, 3809, 3814, 3826, 3827, 3828, 3851, 3869, 3871, 3878, 3880, 3889, 3905, 3914, 3918, 3920, 3945, 3971, 3986, 3995, 3998, 4000, 4001, 4002, 4003, 4004, 4005, 4006, 4045, 4111, 4125, 4126, 4129, 4224, 4242, 4279, 4321, 4343, 4443, 4444, 4445, 4446, 4449, 4550, 4567, 4662, 4848, 4899, 4900, 4998, 5000, 5001, 5002, 5003, 5004, 5009, 5030, 5033, 5050, 5051, 5054, 5060, 5061, 5080, 5087, 5100, 5101, 5102, 5120, 5190, 5200, 5214, 5221, 5222, 5225, 5226, 5269, 5280, 5298, 5357, 5405, 5414, 5431, 5432, 5440, 5500, 5510, 5544, 5550, 5555, 5560, 5566, 5631, 5633, 5666, 5678, 5679, 5718, 5730, 5800, 5801, 5802, 5810, 5811, 5815, 5822, 5825, 5850, 5859, 5862, 5877, 5900, 5901, 5902, 5903, 5904, 5906, 5907, 5910, 5911, 5915, 5922, 5925, 5950, 5952, 5959, 5960, 5961, 5962, 5963, 5987, 5988, 5989, 5998, 5999, 6000, 6001, 6002, 6003, 6004, 6005, 6006, 6007, 6009, 6025, 6059, 6100, 6101, 6106, 6112, 6123, 6129, 6156, 6346, 6389, 6502, 6510, 6543, 6547, 6565, 6566, 6567, 6580, 6646, 6666, 6667, 6668, 6669, 6689, 6692, 6699, 6779, 6788, 6789, 6792, 6839, 6881, 6901, 6969, 7000, 7001, 7002, 7004, 7007, 7019, 7025, 7070, 7100, 7103, 7106, 7200, 7201, 7402, 7435, 7443, 7496, 7512, 7625, 7627, 7676, 7741, 7777, 7778, 7800, 7911, 7920, 7921, 7937, 7938, 7999, 8000, 8001, 8002, 8007, 8008, 8009, 8010, 8011, 8021, 8022, 8031, 8042, 8045, 8080, 8081, 8082, 8083, 8084, 8085, 8086, 8087, 8088, 8089, 8090, 8093, 8099, 8100, 8180, 8181, 8192, 8193, 8194, 8200, 8222, 8254, 8290, 8291, 8292, 8300, 8333, 8383, 8400, 8402, 8443, 8500, 8600, 8649, 8651, 8652, 8654, 8701, 8800, 8873, 8888, 8899, 8994, 9000, 9001, 9002, 9003, 9009, 9010, 9011, 9040, 9050, 9071, 9080, 9081, 9090, 9091, 9099, 9100, 9101, 9102, 9103, 9110, 9111, 9200, 9207, 9220, 9290, 9415, 9418, 9485, 9500, 9502, 9503, 9535, 9575, 9593, 9594, 9595, 9618, 9666, 9876, 9877, 9878, 9898, 9900, 9917, 9929, 9943, 9944, 9968, 9998, 9999, 10000, 10001, 10002, 10003, 10004, 10009, 10010, 10012, 10024, 10025, 10082, 10180, 10215, 10243, 10566, 10616, 10617, 10621, 10626, 10628, 10629, 10778, 11110, 11111, 11967, 12000, 12174, 12265, 12345, 13456, 13722, 13782, 13783, 14000, 14238, 14441, 14442, 15000, 15002, 15003, 15004, 15660, 15742, 16000, 16001, 16012, 16016, 16018, 16080, 16113, 16992, 16993, 17877, 17988, 18040, 18101, 18988, 19101, 19283, 19315, 19350, 19780, 19801, 19842, 20000, 20005, 20031, 20221, 20222, 20828, 21571, 22939, 23502, 24444, 24800, 25734, 25735, 26214, 27000, 27352, 27353, 27355, 27356, 27715, 28201, 30000, 30718, 30951, 31038, 31337, 32768, 32769, 32770, 32771, 32772, 32773, 32774, 32775, 32776, 32777, 32778, 32779, 32780, 32781, 32782, 32783, 32784, 32785, 33354, 33899, 34571, 34572, 34573, 35500, 38292, 40193, 40911, 41511, 42510, 44176, 44442, 44443, 44501, 45100, 48080, 49152, 49153, 49154, 49155, 49156, 49157, 49158, 49159, 49160, 49161, 49163, 49165, 49167, 49175, 49176, 49400, 49999, 50000, 50001, 50002, 50003, 50006, 50300, 50389, 50500, 50636, 50800, 51103, 51493, 52673, 52822, 52848, 52869, 54045, 54328, 55055, 55056, 55555, 55600, 56737, 56738, 57294, 57797, 58080, 60020, 60443, 61532, 61900, 62078, 63331, 64623, 64680, 65000, 65129, 65389]
13
14
15 async def insert(queue):
16 for port in port_list:
17 await queue.put(port)
18
19
20 async def consumer(queue, ip, result):
21 global counter
22 while True:
23 port = await queue.get()
24 await sock_conn(ip, port, result)
25 queue.task_done()
26 counter += 1
27 print(f'{Y}[!] {C}Scanning : {W}{counter}/{len(port_list)}', end='\r')
28
29
30 async def run(ip, result, threads):
31 queue = asyncio.Queue(maxsize=threads)
32
33
34 distrib = asyncio.create_task(insert(queue))
35 workers = [
36 asyncio.create_task(
37 consumer(queue, ip, result)
38 ) for _ in range(threads)]
39
40 await asyncio.gather(distrib)
41 await queue.join()
42 for worker in workers:
43 worker.cancel()
44
45
46 def ps(ip, output, data, threads):
1347 result = {}
48 result['ports'] = []
49 print(f'\n{Y}[!] Starting Port Scan...{W}\n')
50 print(f'{G}[+] {C}Scanning Top 1000 Ports With {threads} Threads...{W}\n')
1451
15 print('\n' + Y + '[!]' + Y + ' Starting Port Scan...' + W + '\n')
16 print(G + '[+]' + C + ' Testing Top 1000 Ports...' + W + '\n')
17 port_list = [1,3,4,6,7,9,13,17,19,20,21,22,23,24,25,26,30,32,33,37,42,43,49,53,70,79,80,81,82,83,84,85,88,89,90,99,100,106,109,110,111,113,119,125,135,139,143,144,146,161,163,179,199,211,212,222,254,255,256,259,264,280,301,306,311,340,366,389,406,407,416,417,425,427,443,444,445,458,464,465,481,497,500,512,513,514,515,524,541,543,544,545,548,554,555,563,587,593,616,617,625,631,636,646,648,666,667,668,683,687,691,700,705,711,714,720,722,726,749,765,777,783,787,800,801,808,843,873,880,888,898,900,901,902,903,911,912,981,987,990,992,993,995,999,1000,1001,1002,1007,1009,1010,1011,1021,1022,1023,1024,1025,1026,1027,1028,1029,1030,1031,1032,1033,1034,1035,1036,1037,1038,1039,1040,1041,1042,1043,1044,1045,1046,1047,1048,1049,1050,1051,1052,1053,1054,1055,1056,1057,1058,1059,1060,1061,1062,1063,1064,1065,1066,1067,1068,1069,1070,1071,1072,1073,1074,1075,1076,1077,1078,1079,1080,1081,1082,1083,1084,1085,1086,1087,1088,1089,1090,1091,1092,1093,1094,1095,1096,1097,1098,1099,1100,1102,1104,1105,1106,1107,1108,1110,1111,1112,1113,1114,1117,1119,1121,1122,1123,1124,1126,1130,1131,1132,1137,1138,1141,1145,1147,1148,1149,1151,1152,1154,1163,1164,1165,1166,1169,1174,1175,1183,1185,1186,1187,1192,1198,1199,1201,1213,1216,1217,1218,1233,1234,1236,1244,1247,1248,1259,1271,1272,1277,1287,1296,1300,1301,1309,1310,1311,1322,1328,1334,1352,1417,1433,1434,1443,1455,1461,1494,1500,1501,1503,1521,1524,1533,1556,1580,1583,1594,1600,1641,1658,1666,1687,1688,1700,1717,1718,1719,1720,1721,1723,1755,1761,1782,1783,1801,1805,1812,1839,1840,1862,1863,1864,1875,1900,1914,1935,1947,1971,1972,1974,1984,1998,1999,2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2013,2020,2021,2022,2030,2033,2034,2035,2038,2040,2041,2042,2043,2045,2046,2047,2048,2049,2065,2068,2099,2100,2103,2105,2106,2107,2111,2119,2121,2126,2135,2144,2160,2161,2170,2179,2190,2191,2196,2200,2222,2251,2260,2288,2301,2323,2366,2381,2382,2383,2393,2394,2399,2401,2492,2500,2522,2525,2557,2601,2602,2604,2605,2607,2608,2638,2701,2702,2710,2717,2718,2725,2800,2809,2811,2869,2875,2909,2910,2920,2967,2968,2998,3000,3001,3003,3005,3006,3007,3011,3013,3017,3030,3031,3052,3071,3077,3128,3168,3211,3221,3260,3261,3268,3269,3283,3300,3301,3306,3322,3323,3324,3325,3333,3351,3367,3369,3370,3371,3372,3389,3390,3404,3476,3493,3517,3527,3546,3551,3580,3659,3689,3690,3703,3737,3766,3784,3800,3801,3809,3814,3826,3827,3828,3851,3869,3871,3878,3880,3889,3905,3914,3918,3920,3945,3971,3986,3995,3998,4000,4001,4002,4003,4004,4005,4006,4045,4111,4125,4126,4129,4224,4242,4279,4321,4343,4443,4444,4445,4446,4449,4550,4567,4662,4848,4899,4900,4998,5000,5001,5002,5003,5004,5009,5030,5033,5050,5051,5054,5060,5061,5080,5087,5100,5101,5102,5120,5190,5200,5214,5221,5222,5225,5226,5269,5280,5298,5357,5405,5414,5431,5432,5440,5500,5510,5544,5550,5555,5560,5566,5631,5633,5666,5678,5679,5718,5730,5800,5801,5802,5810,5811,5815,5822,5825,5850,5859,5862,5877,5900,5901,5902,5903,5904,5906,5907,5910,5911,5915,5922,5925,5950,5952,5959,5960,5961,5962,5963,5987,5988,5989,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6009,6025,6059,6100,6101,6106,6112,6123,6129,6156,6346,6389,6502,6510,6543,6547,6565,6566,6567,6580,6646,6666,6667,6668,6669,6689,6692,6699,6779,6788,6789,6792,6839,6881,6901,6969,7000,7001,7002,7004,7007,7019,7025,7070,7100,7103,7106,7200,7201,7402,7435,7443,7496,7512,7625,7627,7676,7741,7777,7778,7800,7911,7920,7921,7937,7938,7999,8000,8001,8002,8007,8008,8009,8010,8011,8021,8022,8031,8042,8045,8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8093,8099,8100,8180,8181,8192,8193,8194,8200,8222,8254,8290,8291,8292,8300,8333,8383,8400,8402,8443,8500,8600,8649,8651,8652,8654,8701,8800,8873,8888,8899,8994,9000,9001,9002,9003,9009,9010,9011,9040,9050,9071,9080,9081,9090,9091,9099,9100,9101,9102,9103,9110,9111,9200,9207,9220,9290,9415,9418,9485,9500,9502,9503,9535,9575,9593,9594,9595,9618,9666,9876,9877,9878,9898,9900,9917,9929,9943,9944,9968,9998,9999,10000,10001,10002,10003,10004,10009,10010,10012,10024,10025,10082,10180,10215,10243,10566,10616,10617,10621,10626,10628,10629,10778,11110,11111,11967,12000,12174,12265,12345,13456,13722,13782,13783,14000,14238,14441,14442,15000,15002,15003,15004,15660,15742,16000,16001,16012,16016,16018,16080,16113,16992,16993,17877,17988,18040,18101,18988,19101,19283,19315,19350,19780,19801,19842,20000,20005,20031,20221,20222,20828,21571,22939,23502,24444,24800,25734,25735,26214,27000,27352,27353,27355,27356,27715,28201,30000,30718,30951,31038,31337,32768,32769,32770,32771,32772,32773,32774,32775,32776,32777,32778,32779,32780,32781,32782,32783,32784,32785,33354,33899,34571,34572,34573,35500,38292,40193,40911,41511,42510,44176,44442,44443,44501,45100,48080,49152,49153,49154,49155,49156,49157,49158,49159,49160,49161,49163,49165,49167,49175,49176,49400,49999,50000,50001,50002,50003,50006,50300,50389,50500,50636,50800,51103,51493,52673,52822,52848,52869,54045,54328,55055,55056,55555,55600,56737,56738,57294,57797,58080,60020,60443,61532,61900,62078,63331,64623,64680,65000,65129,65389]
18
19 for port in port_list:
20 t = threading.Thread(target=sock_conn, args=[ip, port, output, result])
21 t.daemon = True
22 t.start()
23 threads.append(t)
24
25 for thread in threads:
26 thread.join()
52 loop = asyncio.new_event_loop()
53 asyncio.set_event_loop(loop)
54 loop.run_until_complete(run(ip, result, threads))
55 loop.close()
2756
2857 if output != 'None':
2958 ps_output(output, data, result)
3059
31 def sock_conn(ip, port, output, result):
60
61 async def sock_conn(ip, port, result):
3262 try:
33 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
34 s.settimeout(3)
35 s.connect((ip, port))
36 s.close()
37 service = socket.getservbyport(port, 'tcp')
38 print(G + '[+] ' + C + str(port).ljust(7) + W + service.ljust(9))
39
40 if output != 'None':
41 result.update({str(port):service})
42 except:
43 s.close()
63 connector = asyncio.open_connection(ip, port)
64 await asyncio.wait_for(connector, 1)
65 print(f'\x1b[K{G}[+] {C}{port}{W}')
66 result['ports'].append(str(port))
67 return True
68 except TimeoutError:
69 return False
70 except Exception:
4471 pass
4572
73
4674 def ps_output(output, data, result):
47 data['module-Port Scan'] = result
75 data['module-Port Scan'] = result
76 result.update({'exported': False})
77 fname = f'{output["directory"]}/ports.{output["format"]}'
78 output['file'] = fname
79 export(output, data)
22 import os
33 import ssl
44 import socket
5 from modules.export import export
56
6 R = '\033[31m' # red
7 G = '\033[32m' # green
8 C = '\033[36m' # cyan
9 W = '\033[0m' # white
10 Y = '\033[33m' # yellow
7 R = '\033[31m' # red
8 G = '\033[32m' # green
9 C = '\033[36m' # cyan
10 W = '\033[0m' # white
11 Y = '\033[33m' # yellow
12
1113
1214 def cert(hostname, sslp, output, data):
1315 result = {}
1416 pair = {}
15 print ('\n' + Y + '[!]' + Y + ' SSL Certificate Information : ' + W + '\n')
17 print(f'\n{Y}[!] SSL Certificate Information : {W}\n')
1618
1719 pt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
1820 pt.settimeout(5)
2830 try:
2931 s.connect((hostname, sslp))
3032 info = s.getpeercert()
31 except:
33 except Exception:
3234 info = ssl.get_server_certificate((hostname, sslp))
33 f = open('{}.pem'.format(hostname), 'w')
35 f = open(f'{hostname}.pem', 'w')
3436 f.write(info)
3537 f.close()
36 cert_dict = ssl._ssl._test_decode_cert('{}.pem'.format(hostname))
38 cert_dict = ssl._ssl._test_decode_cert(f'{hostname}.pem')
3739 info = cert_dict
38 os.remove('{}.pem'.format(hostname))
40 os.remove(f'{hostname}.pem')
3941
4042 def unpack(v, pair):
4143 convert = False
4850 unpack(elem)
4951 else:
5052 convert = True
51 pass
52 if convert == True:
53 if convert is True:
5354 pair.update(dict([subitem]))
5455 else:
5556 pass
5657 else:
57 print(G + '[+]' + C + ' {} : '.format(str(k)) + W + str(item))
58 print(f'{G}[+] {C}{k}: {W}{item}')
5859 if output != 'None':
59 result.update({k:v})
60 result.update({k: v})
6061
6162 for k, v in info.items():
6263 if isinstance(v, tuple):
6364 unpack(v, pair)
64 for k,v in pair.items():
65 print(G + '[+]' + C + ' {} : '.format(str(k)) + W + str(v))
65 for k, v in pair.items():
66 print(f'{G}[+] {C}{k}: {W}{v}')
6667 if output != 'None':
67 result.update({k:v})
68 result.update({k: v})
6869 pair.clear()
6970 else:
70 print(G + '[+]' + C + ' {} : '.format(str(k)) + W + str(v))
71 print(f'{G}[+] {C}{k}: {W}{v}')
7172 if output != 'None':
72 result.update({k:v})
73 result.update({k: v})
7374
74 except:
75 except Exception:
7576 pt.close()
76 print (R + '[-]' + C + ' SSL is not Present on Target URL...Skipping...' + W)
77 print(f'{R}[-] {C}SSL is not Present on Target URL...Skipping...{W}')
7778 if output != 'None':
78 result.update({'Error':'SSL is not Present on Target URL'})
79
79 result.update({'Error': 'SSL is not Present on Target URL'})
80 result.update({'exported': False})
8081 if output != 'None':
81 cert_output(output, data, result)
82
83 def cert_output(output, data, result):
84 data['module-SSL Certificate Information'] = result
82 fname = f'{output["directory"]}/ssl.{output["format"]}'
83 output['file'] = fname
84 data['module-SSL Certificate Information'] = result
85 export(output, data)
00 #!/usr/bin/env python3
11
2 import json
32 import aiohttp
43 import asyncio
5 import psycopg2
4 from modules.export import export
5 from modules.subdomain_modules.thcrowd_subs import thcrowd
6 from modules.subdomain_modules.anubis_subs import anubisdb
7 from modules.subdomain_modules.thminer_subs import thminer
8 from modules.subdomain_modules.fb_subs import fb_cert
9 from modules.subdomain_modules.virustotal_subs import virust
10 from modules.subdomain_modules.shodan_subs import shodan
11 from modules.subdomain_modules.certspot_subs import certspot
12 from modules.subdomain_modules.wayback_subs import machine
13 from modules.subdomain_modules.sonar_subs import sonar
14 from modules.subdomain_modules.crtsh_subs import crtsh
15 from modules.subdomain_modules.htarget_subs import hackertgt
616
7 R = '\033[31m' # red
8 G = '\033[32m' # green
9 C = '\033[36m' # cyan
10 W = '\033[0m' # white
11 Y = '\033[33m' # yellow
17 R = '\033[31m' # red
18 G = '\033[32m' # green
19 C = '\033[36m' # cyan
20 W = '\033[0m' # white
21 Y = '\033[33m' # yellow
1222
1323 found = []
1424
15 async def buffover(hostname, session):
16 global found
17 print(Y + '[!]' + C + ' Requesting ' + G + 'BuffOver' + W)
18 url = 'https://dns.bufferover.run/dns'
19 bo_params = {
20 'q': '.{}'.format(hostname)
21 }
22 try:
23 async with session.get(url, params=bo_params) as resp:
24 sc = resp.status
25 if sc == 200:
26 output = await resp.text()
27 json_out = json.loads(output)
28 subds = json_out['FDNS_A']
29 if subds == None:
30 pass
31 else:
32 for subd in subds:
33 subd = subd.split(',')
34 for sub in subd:
35 found.append(sub)
36 else:
37 print(R + '[-]' + C + ' BuffOver Status : ' + W + str(sc))
38 except Exception as e:
39 print(R + '[-]' + C + ' BuffOver Exception : ' + W + str(e))
40
41 async def crtsh(hostname):
42 global found
43 print(Y + '[!]' + C + ' Requesting ' + G + 'crt.sh' + W)
44 try:
45 conn = psycopg2.connect(host="crt.sh",database="certwatch", user="guest", port="5432")
46 conn.autocommit = True
47 cur = conn.cursor()
48 query = "SELECT ci.NAME_VALUE NAME_VALUE FROM certificate_identity ci WHERE ci.NAME_TYPE = 'dNSName' AND reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower('%.{}'))".format(hostname)
49 cur.execute(query)
50 result = cur.fetchall()
51 cur.close()
52 conn.close()
53 for url in result:
54 found.append(url[0])
55 except Exception as e:
56 print(R + '[-]' + C + ' crtsh Exception : ' + W + str(e))
57
58 async def thcrowd(hostname, session):
59 global found
60 print(Y + '[!]' + C + ' Requesting ' + G + 'ThreatCrowd' + W)
61 url = 'https://www.threatcrowd.org/searchApi/v2/domain/report/'
62 thc_params = {
63 'domain': hostname
64 }
65 try:
66 async with session.get(url, params=thc_params) as resp:
67 sc = resp.status
68 if sc == 200:
69 output = await resp.text()
70 json_out = json.loads(output)
71 if json_out['response_code'] == '0':
72 pass
73 else:
74 subd = json_out['subdomains']
75 found.extend(subd)
76 else:
77 print(R + '[-]' + C + ' ThreatCrowd Status : ' + W + str(sc))
78 except Exception as e:
79 print(R + '[-]' + C + ' ThreatCrowd Exception : ' + W + str(e))
80
81 async def anubisdb(hostname, session):
82 global found
83 print(Y + '[!]' + C + ' Requesting ' + G + 'AnubisDB' + W)
84 url = 'https://jldc.me/anubis/subdomains/{}'.format(hostname)
85 try:
86 async with session.get(url) as resp:
87 sc = resp.status
88 if sc == 200:
89 output = await resp.text()
90 json_out = json.loads(output)
91 found.extend(json_out)
92 elif sc == 300:
93 pass
94 else:
95 print(R + '[-]' + C + ' AnubisDB Status : ' + W + str(sc))
96 except Exception as e:
97 print(R + '[-]' + C + 'AnubisDB Exception : ' + W + str(e))
98
99 async def thminer(hostname, session):
100 global found
101 print(Y + '[!]' + C + ' Requesting ' + G + 'ThreatMiner' + W)
102 url = 'https://api.threatminer.org/v2/domain.php'
103 thm_params = {
104 'q': hostname,
105 'rt': '5'
106 }
107 try:
108 async with session.get(url, params=thm_params) as resp:
109 sc = resp.status
110 if sc == 200:
111 output = await resp.text()
112 json_out = json.loads(output)
113 subd = json_out['results']
114 found.extend(subd)
115 else:
116 print(R + '[-]' + C + ' ThreatMiner Status : ' + W + str(sc))
117 except Exception as e:
118 print(R + '[-]' + C + ' ThreatMiner Exception : ' + W + str(e))
119
120 async def fb_cert(hostname, conf_path, session):
121 global found
122 with open('{}/keys.json'.format(conf_path), 'r') as keyfile:
123 json_read = keyfile.read()
124
125 json_load = json.loads(json_read)
126 fb_key = json_load['facebook']
127
128 if fb_key != None:
129 print(Y + '[!]' + C + ' Requesting ' + G + 'Facebook' + W)
130 url = 'https://graph.facebook.com/certificates'
131 fb_params = {
132 'query': hostname,
133 'fields': 'domains',
134 'access_token': fb_key
135 }
136 try:
137 async with session.get(url, params=fb_params) as resp:
138 sc = resp.status
139 if sc == 200:
140 json_data = await resp.text()
141 json_read = json.loads(json_data)
142 domains = json_read['data']
143 for i in range (0, len(domains)):
144 found.extend(json_read['data'][i]['domains'])
145 else:
146 print(R + '[-]' + C + ' Facebook Status : ' + W + str(sc))
147 except Exception as e:
148 print(R + '[-]' + C + ' Facebook Exception : ' + W + str(e))
149 else:
150 pass
151
152 async def virust(hostname, conf_path, session):
153 global found
154 with open('{}/keys.json'.format(conf_path), 'r') as keyfile:
155 json_read = keyfile.read()
156
157 json_load = json.loads(json_read)
158 vt_key = json_load['virustotal']
159
160 if vt_key != None:
161 print(Y + '[!]' + C + ' Requesting ' + G + 'VirusTotal' + W)
162 url = 'https://www.virustotal.com/api/v3/domains/{}/subdomains'.format(hostname)
163 vt_headers = {
164 'x-apikey': vt_key
165 }
166 try:
167 async with session.get(url, headers=vt_headers) as resp:
168 sc = resp.status
169 if sc == 200:
170 json_data = await resp.text()
171 json_read = json.loads(json_data)
172 domains = json_read['data']
173 tmp_list = []
174 for i in range (0, len(domains)):
175 tmp_list.append(domains[i]['id'])
176 found.extend(tmp_list)
177 else:
178 print(R + '[-]' + C + ' VirusTotal Status : ' + W + str(sc))
179 except Exception as e:
180 print(R + '[-]' + C + ' VirusTotal Exception : ' + W + str(e))
181 else:
182 pass
183
184 async def certspot(hostname, session):
185 global found
186
187 print(Y + '[!]' + C + ' Requesting ' + G + 'CertSpotter' + W)
188 url = 'https://api.certspotter.com/v1/issuances'
189 cs_params = {
190 'domain': hostname,
191 'expand': 'dns_names',
192 'include_subdomains': 'true'
193 }
194
195 try:
196 async with session.get(url, params=cs_params) as resp:
197 sc = resp.status
198 if sc == 200:
199 json_data = await resp.text()
200 json_read = json.loads(json_data)
201 for i in range (0, len(json_read)):
202 domains = json_read[i]['dns_names']
203 found.extend(domains)
204 else:
205 print(R + '[-]' + C + ' CertSpotter Status : ' + W + str(sc))
206 except Exception as e:
207 print(R + '[-]' + C + ' CertSpotter Exception : ' + W + str(e))
20825
20926 async def query(hostname, tout, conf_path):
21027 timeout = aiohttp.ClientTimeout(total=tout)
21128 async with aiohttp.ClientSession(timeout=timeout) as session:
21229 await asyncio.gather(
213 buffover(hostname, session),
21430 thcrowd(hostname, session),
21531 anubisdb(hostname, session),
21632 thminer(hostname, session),
21733 fb_cert(hostname, conf_path, session),
21834 virust(hostname, conf_path, session),
35 shodan(hostname, conf_path, session),
21936 certspot(hostname, session),
37 machine(hostname, session),
38 sonar(hostname, session),
39 hackertgt(hostname, session),
22040 crtsh(hostname)
22141 )
22242 await session.close()
43
22344
22445 def subdomains(hostname, tout, output, data, conf_path):
22546 global found
22647 result = {}
22748
228 print('\n' + Y + '[!]' + Y + ' Starting Sub-Domain Enumeration...' + W + '\n')
49 print(f'\n{Y}[!] Starting Sub-Domain Enumeration...{W}\n')
22950
23051 loop = asyncio.new_event_loop()
23152 asyncio.set_event_loop(loop)
23253 loop.run_until_complete(query(hostname, tout, conf_path))
23354 loop.close()
23455
235 from urllib.parse import urlparse
23656 found = [item for item in found if item.endswith(hostname)]
23757 valid = r"^[A-Za-z0-9._~()'!*:@,;+?-]*$"
238 import re
239 found = [item for item in found if re.match(valid, item)]
58 from re import match
59 found = [item for item in found if match(valid, item)]
24060 found = set(found)
24161 total = len(found)
24262
24363 if len(found) != 0:
244 print('\n' + G + '[+]' + C + ' Results : ' + W + '\n')
64 print(f'\n{G}[+] {C}Results : {W}\n')
24565 for url in found:
246 print(G + '[+] ' + C + url)
66 print(url)
24767
248 print('\n' + G + '[+]' + C + ' Total Unique Sub Domains Found : ' + W + str(total))
68 print(f'\n{G}[+] {C}Total Unique Sub Domains Found : {W}{total}')
24969
25070 if output != 'None':
25171 result['Links'] = list(found)
252 subd_output(output, data, result, total)
253
254 def subd_output(output, data, result, total):
255 data['module-Subdomain Enumeration'] = result
256 data['module-Subdomain Enumeration'].update({'Total Unique Sub Domains Found': str(total)})
72 result.update({'exported': False})
73 data['module-Subdomain Enumeration'] = result
74 fname = f'{output["directory"]}/subdomains.{output["format"]}'
75 output['file'] = fname
76 export(output, data)
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def anubisdb(hostname, session):
13 print(f'{Y}[!] {C}Requesting {G}AnubisDB{W}')
14 url = f'https://jldc.me/anubis/subdomains/{hostname}'
15 try:
16 async with session.get(url) as resp:
17 sc = resp.status
18 if sc == 200:
19 output = await resp.text()
20 json_out = loads(output)
21 parent.found.extend(json_out)
22 print(f'{G}[+] {Y}AnubisDB {W}found {C}{len(json_out)} {W}subdomains!')
23 elif sc == 300:
24 pass
25 else:
26 print(f'{R}[-] {C}AnubisDB Status : {W}{sc}')
27 except Exception as e:
28 print(f'{R}[-] {C}AnubisDB Exception : {W}{e}')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def certspot(hostname, session):
13 print(f'{Y}[!] {C}Requesting {G}CertSpotter{W}')
14 url = 'https://api.certspotter.com/v1/issuances'
15 cs_params = {
16 'domain': hostname,
17 'expand': 'dns_names',
18 'include_subdomains': 'true'
19 }
20
21 try:
22 async with session.get(url, params=cs_params) as resp:
23 sc = resp.status
24 if sc == 200:
25 json_data = await resp.text()
26 json_read = loads(json_data)
27 print(f'{G}[+] {Y}Certsport {W}found {C}{len(json_read)} {W}subdomains!')
28 for i in range(0, len(json_read)):
29 domains = json_read[i]['dns_names']
30 parent.found.extend(domains)
31 else:
32 print(f'{R}[-] {C}CertSpotter Status : {W}{sc}')
33 except Exception as e:
34 print(f'{R}[-] {C}CertSpotter Exception : {W}{e}')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 import psycopg2
9 import modules.subdom as parent
10
11
12 async def crtsh(hostname):
13 print(f'{Y}[!] {C}Requesting {G}crt.sh{W}')
14 try:
15 conn = psycopg2.connect(
16 host="crt.sh",
17 database="certwatch",
18 user="guest",
19 port="5432"
20 )
21 conn.autocommit = True
22 cur = conn.cursor()
23 query = f"SELECT ci.NAME_VALUE NAME_VALUE FROM certificate_identity ci WHERE ci.NAME_TYPE = 'dNSName' AND reverse(lower(ci.NAME_VALUE)) LIKE reverse(lower('%.{hostname}'))"
24 cur.execute(query)
25 result = cur.fetchall()
26 cur.close()
27 conn.close()
28 tmp_list = []
29 for url in result:
30 tmp_list.append(url[0])
31 print(f'{G}[+] {Y}CRT.sh {W}found {C}{len(tmp_list)} {W}subdomains!')
32 parent.found.extend(tmp_list)
33 except Exception as e:
34 print(f'{R}[-] {C}crtsh Exception : {W}{e}')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def fb_cert(hostname, conf_path, session):
13 with open(f'{conf_path}/keys.json', 'r') as keyfile:
14 json_read = keyfile.read()
15
16 json_load = loads(json_read)
17 fb_key = json_load['facebook']
18
19 if fb_key is not None:
20 print(f'{Y}[!] {C}Requesting {G}Facebook{W}')
21 url = 'https://graph.facebook.com/certificates'
22 fb_params = {
23 'query': hostname,
24 'fields': 'domains',
25 'access_token': fb_key
26 }
27 try:
28 async with session.get(url, params=fb_params) as resp:
29 sc = resp.status
30 if sc == 200:
31 json_data = await resp.text()
32 json_read = loads(json_data)
33 domains = json_read['data']
34 print(f'{G}[+] {Y}Facebook {W}found {C}{len(domains)} {W}subdomains!')
35 for i in range(0, len(domains)):
36 parent.found.extend(json_read['data'][i]['domains'])
37 else:
38 print(f'{R}[-] {C}Facebook Status : {W}{sc}')
39 except Exception as e:
40 print(f'{R}[-] {C}Facebook Exception : {W}{e}')
41 else:
42 print(f'{Y}[!] Skipping Facebook : {W}API key not found!')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 import modules.subdom as parent
9
10
11 async def hackertgt(hostname, session):
12 print(f'{Y}[!] {C}Requesting {G}HackerTarget{W}')
13 url = f'https://api.hackertarget.com/hostsearch/?q={hostname}'
14 try:
15 async with session.get(url) as resp:
16 sc = resp.status
17 if sc == 200:
18 data = await resp.text()
19 data_list = data.split('\n')
20 tmp_list = []
21 for line in data_list:
22 subdomain = line.split(',')[0]
23 tmp_list.append(subdomain)
24 print(f'{G}[+] {Y}HackerTarget {W}found {C}{len(tmp_list)} {W}subdomains!')
25 parent.found.extend(tmp_list)
26 else:
27 print(f'{R}[-] {C}HackerTarget Status : {W}{sc}')
28 except Exception as e:
29 print(f'{R}[-] {C}HackerTarget Exception : {W}{e}')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def shodan(hostname, conf_path, session):
13 with open(f'{conf_path}/keys.json', 'r') as keyfile:
14 json_read = keyfile.read()
15
16 json_load = loads(json_read)
17 sho_key = json_load['shodan']
18
19 if sho_key is not None:
20 print(f'{Y}[!] {C}Requesting {G}Shodan{W}')
21 url = f'https://api.shodan.io/dns/domain/{hostname}?key={sho_key}'
22
23 try:
24 async with session.get(url) as resp:
25 sc = resp.status
26 if sc == 200:
27 json_data = await resp.text()
28 json_read = loads(json_data)
29 domains = json_read['subdomains']
30 tmp_list = []
31 for i in range(0, len(domains)):
32 tmp_list.append(f'{domains[i]}.{hostname}')
33 print(f'{G}[+] {Y}Shodan {W}found {C}{len(tmp_list)} {W}subdomains!')
34 parent.found.extend(tmp_list)
35 else:
36 print(f'{R}[-] {C}Shodan Status : {W}{sc}')
37 except Exception as e:
38 print(f'{R}[-] {C}Shodan Exception : {W}{e}')
39 else:
40 print(f'{Y}[!] Skipping Shodan : {W}API key not found!')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def sonar(hostname, session):
13 print(f'{Y}[!] {C}Requesting {G}Sonar{W}')
14 url = f'https://sonar.omnisint.io/subdomains/{hostname}'
15 try:
16 async with session.get(url) as resp:
17 sc = resp.status
18 if sc == 200:
19 json_data = await resp.text()
20 json_read = loads(json_data)
21 print(f'{G}[+] {Y}Sonar {W}found {C}{len(json_read)} {W}subdomains!')
22 parent.found.extend(json_read)
23 else:
24 print(f'{R}[-] {C}Sonar Status : {W}{sc}')
25 except Exception as e:
26 print(f'{R}[-] {C}Sonar Exception : {W}{e}')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def thcrowd(hostname, session):
13 print(f'{Y}[!] {C}Requesting {G}ThreatCrowd{W}')
14 url = 'https://www.threatcrowd.org/searchApi/v2/domain/report/'
15 thc_params = {
16 'domain': hostname
17 }
18 try:
19 async with session.get(url, params=thc_params) as resp:
20 sc = resp.status
21 if sc == 200:
22 output = await resp.text()
23 json_out = loads(output)
24 if json_out['response_code'] == '0':
25 pass
26 else:
27 subd = json_out['subdomains']
28 print(f'{G}[+] {Y}ThreatCrowd {W}found {C}{len(subd)} {W}subdomains!')
29 parent.found.extend(subd)
30 else:
31 print(f'{R}[-] {C}ThreatCrowd Status : {W}{sc}')
32 except Exception as e:
33 print(f'{R}[-] {C}ThreatCrowd Exception : {W}{e}')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def thminer(hostname, session):
13 print(f'{Y}[!] {C}Requesting {G}ThreatMiner{W}')
14 url = 'https://api.threatminer.org/v2/domain.php'
15 thm_params = {
16 'q': hostname,
17 'rt': '5'
18 }
19 try:
20 async with session.get(url, params=thm_params) as resp:
21 sc = resp.status
22 if sc == 200:
23 output = await resp.text()
24 json_out = loads(output)
25 subd = json_out['results']
26 print(f'{G}[+] {Y}ThreatMiner {W}found {C}{len(subd)} {W}subdomains!')
27 parent.found.extend(subd)
28 else:
29 print(f'{R}[-] {C}ThreatMiner Status : {W}{sc}')
30 except Exception as e:
31 print(f'{R}[-] {C}ThreatMiner Exception : {W}{e}')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 from json import loads
9 import modules.subdom as parent
10
11
12 async def virust(hostname, conf_path, session):
13 with open(f'{conf_path}/keys.json', 'r') as keyfile:
14 json_read = keyfile.read()
15
16 json_load = loads(json_read)
17 vt_key = json_load['virustotal']
18
19 if vt_key is not None:
20 print(f'{Y}[!] {C}Requesting {G}VirusTotal{W}')
21 url = f'https://www.virustotal.com/api/v3/domains/{hostname}/subdomains'
22 vt_headers = {
23 'x-apikey': vt_key
24 }
25 try:
26 async with session.get(url, headers=vt_headers) as resp:
27 sc = resp.status
28 if sc == 200:
29 json_data = await resp.text()
30 json_read = loads(json_data)
31 domains = json_read['data']
32 tmp_list = []
33 for i in range(0, len(domains)):
34 tmp_list.append(domains[i]['id'])
35 print(f'{G}[+] {Y}VirusTotal {W}found {C}{len(tmp_list)} {W}subdomains!')
36 parent.found.extend(tmp_list)
37 else:
38 print(f'{R}[-] {C}VirusTotal Status : {W}{sc}')
39 except Exception as e:
40 print(f'{R}[-] {C}VirusTotal Exception : {W}{e}')
41 else:
42 print(f'{Y}[!] Skipping VirusTotal : {W}API key not found!')
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 import modules.subdom as parent
9
10
11 async def machine(hostname, session):
12 print(f'{Y}[!] {C}Requesting {G}Wayback{W}')
13 url = f'http://web.archive.org/cdx/search/cdx?url=*.{hostname}/*&output=txt&fl=original&collapse=urlkey'
14 try:
15 async with session.get(url) as resp:
16 sc = resp.status
17 if sc == 200:
18 raw_data = await resp.text()
19 lines = raw_data.split('\n')
20 tmp_list = []
21 for line in lines:
22 subdomain = line.replace('http://', '').replace('https://', '').split('/')[0].split(':')[0]
23 if len(subdomain) > len(hostname):
24 tmp_list.append(subdomain)
25 print(f'{G}[+] {Y}Wayback {W}found {C}{len(tmp_list)} {W}subdomains!')
26 parent.found.extend(tmp_list)
27 else:
28 print(f'{R}[-] {C}Wayback Status : {W}{sc}')
29 except Exception as e:
30 print(f'{R}[-] {C}Wayback Exception : {W}{e}')
+0
-175
modules/traceroute.py less more
0 #!/usr/bin/env python3
1
2 import os
3 import socket
4 import struct
5 import icmplib
6 import platform
7 import threading
8
9 R = '\033[31m' # red
10 G = '\033[32m' # green
11 C = '\033[36m' # cyan
12 W = '\033[0m' # white
13 Y = '\033[33m' # yellow
14
15 def icmp_trace(ip, tr_tout, output, collect):
16 result = icmplib.traceroute(ip, count=1, interval=0.05, timeout=tr_tout, id=icmplib.PID, max_hops=30, fast_mode=True)
17 print('\n' + R + 'HOPS'.ljust(7) + 'IP'.ljust(17) + 'HOST' + W + '\n')
18 for entry in result:
19 hop_index = str(entry._distance)
20 hop_addr = entry._address
21 try:
22 hop_host = socket.gethostbyaddr(hop_addr)[0]
23 except socket.herror:
24 hop_host = 'Unknown'
25 print(G + hop_index.ljust(7) + C + hop_addr.ljust(17) + W + hop_host)
26
27 if output != 'None':
28 collect.setdefault('Result', []).append([str(hop_index), str(hop_addr), str(hop_host)])
29
30 def udp_trace(ip, port, tr_tout, output, collect):
31 status = {'end': False}
32 rx = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)
33 rx.setblocking(0)
34 rx.settimeout(tr_tout)
35 rx.bind(('', port))
36
37 print('\n' + R + 'HOPS'.ljust(7) + 'IP'.ljust(17) + 'HOST' + W + '\n')
38
39 for ttl in range(1, 31):
40 udp_send(ip, port, ttl, rx, status, tr_tout, output, collect)
41 if status['end'] == True:
42 break
43 rx.close()
44
45 def udp_send(ip, port, ttl, rx, status, tr_tout, output, collect):
46 tx = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
47 tx.setsockopt(socket.SOL_IP, socket.IP_TTL, ttl)
48 tx.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
49 tx.setblocking(0)
50 tx.settimeout(tr_tout)
51 tx.sendto(''.encode(), (ip, port))
52
53 try:
54 data, curr_addr = rx.recvfrom(512)
55 curr_addr = curr_addr[0]
56 except socket.error as e:
57 curr_addr = '* * *'
58 finally:
59 tx.close()
60
61 hop_index = str(ttl)
62 hop_addr = curr_addr
63 if hop_addr != '* * *':
64 try:
65 hop_host = socket.gethostbyaddr(hop_addr)[0]
66 except socket.herror:
67 hop_host = 'Unknown'
68 else:
69 hop_addr = '* * *'
70 hop_host = ''
71
72 print(G + hop_index.ljust(7) + C + hop_addr.ljust(17) + W + hop_host)
73 if output != 'None':
74 collect.setdefault('Result', []).append([str(hop_index), str(hop_addr), str(hop_host)])
75
76 if curr_addr == ip:
77 status['end'] = True
78
79 def tcp_trace(ip, port, tr_tout, output, collect):
80 status = {'end': False}
81 rx = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP)
82 rx.setblocking(0)
83 rx.settimeout(tr_tout)
84 rx.bind(('', 0))
85
86 print('\n' + R + 'HOPS'.ljust(7) + 'IP'.ljust(17) + 'HOST' + W + '\n')
87
88 for ttl in range(1,31):
89 t = threading.Thread(target=tcp_send(ip, port, ttl, rx, status, tr_tout, output, collect), daemon=True)
90 t = t.start()
91
92 if status['end'] == True:
93 break
94 rx.close()
95
96 def tcp_send(ip, port, ttl, rx, status, tr_tout, output, collect):
97 tx = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
98 tx.setsockopt(socket.IPPROTO_IP, socket.IP_TTL, struct.pack('I', ttl))
99 tx.setblocking(0)
100 tx.settimeout(tr_tout)
101
102 while True:
103 try:
104 try:
105 tx.connect((ip, port))
106 hop_index = str(ttl)
107 try:
108 hop_host = socket.gethostbyaddr(ip)[0]
109 except socket.herror:
110 hop_host = 'Unknown'
111 print(G + hop_index.ljust(7) + C + ip.ljust(17) + W + hop_host)
112 status['end'] = True
113 if output != 'None':
114 collect.setdefault('Result', []).append([str(hop_index), str(ip), str(hop_host)])
115 except (socket.error, socket.timeout) as err:
116 try:
117 data, curr_addr = rx.recvfrom(512)
118 curr_addr = curr_addr[0]
119 except socket.timeout:
120 curr_addr = '* * *'
121 hop_index = str(ttl)
122 hop_addr = curr_addr
123 if hop_addr != '* * *':
124 try:
125 hop_host = socket.gethostbyaddr(hop_addr)[0]
126 except socket.herror:
127 hop_host = 'Unknown'
128 else:
129 hop_addr = '* * *'
130 hop_host = ''
131 print(G + hop_index.ljust(7) + C + hop_addr.ljust(17) + W + hop_host)
132 if output != 'None':
133 collect.setdefault('Result', []).append([str(hop_index), str(hop_addr), str(hop_host)])
134 continue
135 finally:
136 tx.close()
137 break
138
139 def troute(ip, mode, port, tr_tout, output, data):
140
141 if platform.system() == 'Linux':
142 if os.geteuid() != 0:
143 print('\n' + R + '[-]' + C + ' Root privileges are required for Traceroute, skipping...' + W)
144 return
145 else:
146 pass
147 else:
148 pass
149
150 collect = {}
151
152 print('\n' + G + '[+]' + C + ' Port : ' + W + str(port))
153 print(G + '[+]' + C + ' Timeout : ' + W + str(tr_tout))
154
155 if mode == 'ICMP':
156 print('\n' + Y + '[!]' + Y + ' Starting ICMP Traceroute...' + W)
157 icmp_trace(ip, tr_tout, output, collect)
158 elif mode == 'UDP':
159 print('\n' + Y + '[!]' + Y + ' Starting UDP Traceroute...' + W)
160 udp_trace(ip, port, tr_tout, output, collect)
161 elif mode == 'TCP':
162 print('\n' + Y + '[!]' + Y + ' Starting TCP Traceroute...' + W)
163 tcp_trace(ip, port, tr_tout, output, collect)
164 else:
165 print('\n' + R + '[-]' + C + ' Invalid Mode Selected!' + W)
166
167 if output != 'None':
168 collect['Protocol'] = mode
169 collect['Port'] = str(port)
170 collect['Timeout'] = str(tr_tout)
171 trace_output(output, data, collect)
172
173 def trace_output(output, data, collect):
174 data['module-Traceroute'] = collect
0 #!/usr/bin/env python3
1
2 R = '\033[31m' # red
3 G = '\033[32m' # green
4 C = '\033[36m' # cyan
5 W = '\033[0m' # white
6 Y = '\033[33m' # yellow
7
8 import json
9 import requests
10 from datetime import date
11 from modules.export import export
12
13
14 def timetravel(target, data, output):
15 wayback_total = []
16 result = {}
17 is_avail = False
18 domain_query = f'{target}/*'
19
20 curr_yr = date.today().year
21 last_yr = curr_yr - 5
22
23 print(f'\n{Y}[!] Starting WayBack Machine...{W}\n')
24 print(f'{Y}[!] {C}Checking Availability on Wayback Machine{W}', end='', flush=True)
25 wm_avail = 'http://archive.org/wayback/available'
26 avail_data = {'url': target}
27
28 try:
29 check_rqst = requests.get(wm_avail, params=avail_data, timeout=10)
30 check_sc = check_rqst.status_code
31 if check_sc == 200:
32 check_data = check_rqst.text
33 json_chk_data = json.loads(check_data)
34 avail_data = json_chk_data['archived_snapshots']
35 if len(avail_data) != 0:
36 is_avail = True
37 print(G + '['.rjust(5, '.') + ' Available ]')
38 else:
39 print(R + '['.rjust(5, '.') + ' N/A ]')
40 else:
41 print(f'\n{R}[-] Status : {C}{check_sc}{W}')
42 except Exception as e:
43 print(f'\n{R}[-] Exception : {C}{e}{W}')
44
45 if is_avail is True:
46 print(f'{Y}[!] {C}Fetching URLs{W}', end='', flush=True)
47 wm_url = 'http://web.archive.org/cdx/search/cdx'
48
49 payload = {
50 'url': domain_query,
51 'fl': 'original',
52 'fastLatest': 'true',
53 'from': str(last_yr),
54 'to': str(curr_yr)
55 }
56
57 try:
58 r = requests.get(wm_url, params=payload)
59 r_sc = r.status_code
60 if r_sc == 200:
61 r_data = r.text
62 if len(r_data) != 0:
63 r_data = r_data.split('\n')
64 r_data = set(r_data)
65 print(G + '['.rjust(5, '.') + ' {} ]'.format(str(len(r_data))))
66 wayback_total.extend(r_data)
67
68 if output != 'None':
69 result.update({'links': list(r_data)})
70 result.update({'exported': False})
71 data['module-wayback_urls'] = result
72 fname = f'{output["directory"]}/wayback_urls.{output["format"]}'
73 output['file'] = fname
74 export(output, data)
75 else:
76 print(R + '['.rjust(5, '.') + ' Not Found ]' + W)
77 else:
78 print(R + '['.rjust(5, '.') + ' {} ]'.format(r_sc) + W)
79 except Exception as e:
80 print(f'\n{R}[-] Exception : {C}{e}{W}')
00 #!/usr/bin/env python3
11
22 import ipwhois
3 from modules.export import export
34
4 R = '\033[31m' # red
5 G = '\033[32m' # green
6 C = '\033[36m' # cyan
7 W = '\033[0m' # white
8 Y = '\033[33m' # yellow
5 R = '\033[31m' # red
6 G = '\033[32m' # green
7 C = '\033[36m' # cyan
8 W = '\033[0m' # white
9 Y = '\033[33m' # yellow
910
10 def whois_lookup(ip, output, data):
11 collect = {}
12 print ('\n' + Y + '[!]' + Y + ' Whois Lookup : ' + W + '\n')
11
12 def whois_lookup(ip_addr, output, data):
13 result = {}
14 print(f'\n{Y}[!] Whois Lookup : {W}\n')
1315 try:
14 lookup = ipwhois.IPWhois(ip)
16 lookup = ipwhois.IPWhois(ip_addr)
1517 results = lookup.lookup_whois()
1618
17 for k,v in results.items():
18 if v != None:
19 if isinstance(v, list):
20 for item in v:
21 for k, v in item.items():
22 if v != None:
23 print (G + '[+]' + C + ' {} : '.format(str(k)) + W + str(v).replace(',', ' ').replace('\r', ' ').replace('\n', ' '))
24 if output != 'None':
25 collect.update({str(k):str(v).replace(',', ' ').replace('\r', ' ').replace('\n', ' ')})
19 for key, val in results.items():
20 if val is not None:
21 if isinstance(val, list):
22 for item in val:
23 for key, value in item.items():
24 if value is not None:
25 if not isinstance(value, list):
26 temp_val = value.replace(',', ' ').replace('\r', ' ').replace('\n', ' ')
27 print(f'{G}[+] {C}{key}: {W}{temp_val}')
28 if output != 'None':
29 result.update({str(key): str(temp_val)})
30 else:
31 temp_val = ', '.join(value)
32 print(f'{G}[+] {C}{key}: {W}{temp_val}')
33 if output != 'None':
34 result.update({str(key): str(temp_val)})
2635 else:
2736 pass
2837 else:
29 print (G + '[+]' + C + ' {} : '.format(str(k)) + W + str(v).replace(',', ' ').replace('\r', ' ').replace('\n', ' '))
38 temp_val = val.replace(',', ' ').replace('\r', ' ').replace('\n', ' ')
39 print(f'{G}[+] {C}{key}: {W}{temp_val}')
3040 if output != 'None':
31 collect.update({str(k):str(v).replace(',', ' ').replace('\r', ' ').replace('\n', ' ')})
41 result.update({str(key): str(temp_val)})
3242 else:
3343 pass
34
3544 except Exception as e:
36 print (R + '[-] Error : ' + C + str(e) + W)
45 print(f'{R}[-] Error : {C}{e}{W}')
3746 if output != 'None':
38 collect.update({'Error':str(e)})
39 pass
40
47 result.update({'Error': str(e)})
48
49 result.update({'exported': False})
50
4151 if output != 'None':
42 whois_output(output, data, collect)
43
44 def whois_output(output, data, collect):
45 data['module-Whois Lookup'] = collect
52 fname = f'{output["directory"]}/whois.{output["format"]}'
53 output['file'] = fname
54 data['module-whois'] = result
55 export(output, data)
0 #!/usr/bin/env python3
1
2 from os import getenv, path
3 from json import loads
4
5 home = getenv('HOME')
6 usr_data = f'{home}/.local/share/finalrecon/dumps/'
7 conf_path = f'{home}/.config/finalrecon'
8 path_to_script = path.dirname(path.realpath(__file__))
9 src_conf_path = f'{path_to_script}/conf/'
10 meta_file_path = f'{path_to_script}/metadata.json'
11 keys_file_path = f'{conf_path}/keys.json'
12 conf_file_path = f'{conf_path}/config.json'
13
14 if path.exists(conf_path):
15 pass
16 else:
17 from shutil import copytree
18 copytree(src_conf_path, conf_path, dirs_exist_ok=True)
19
20 with open(conf_file_path, 'r') as config_file:
21 config_read = config_file.read()
22 config_json = loads(config_read)
23 timeout = config_json['common']['timeout']
24
25 ssl_port = config_json['ssl_cert']['ssl_port']
26
27 port_scan_th = config_json['port_scan']['threads']
28
29 dir_enum_th = config_json['dir_enum']['threads']
30 dir_enum_redirect = config_json['dir_enum']['redirect']
31 dir_enum_sslv = config_json['dir_enum']['verify_ssl']
32 dir_enum_dns = config_json['dir_enum']['dns_server']
33 dir_enum_ext = config_json['dir_enum']['extension']
34 dir_enum_wlist = f'{path_to_script}/wordlists/dirb_common.txt'
35
36 export_fmt = config_json['export']['format']