New upstream version 3.9.3
Sophie Brun
4 years ago
0 | Nov 12th, 2019 |
0 | * Fix unicode error when exporting vulns to CSV | |
1 | * Add vuln attributes to CSV | |
2 | * Fix hostname parsing and add external ID to Qualys plugin |
7 | 7 | New features in the latest update |
8 | 8 | ===================================== |
9 | 9 | |
10 | ||
11 | 3.9.3 [Nov 12th, 2019]: | |
12 | --- | |
13 | * Fix unicode error when exporting vulns to CSV | |
14 | * Add vuln attributes to CSV | |
15 | * Fix hostname parsing and add external ID to Qualys plugin | |
10 | 16 | |
11 | 17 | 3.9 [Oct 3th, 2019]: |
12 | 18 | --- |
7 | 7 | New features in the latest update |
8 | 8 | ===================================== |
9 | 9 | |
10 | ||
11 | 3.9.3 [Nov 12th, 2019]: | |
12 | --- | |
13 | * Fix unicode error when exporting vulns to CSV | |
14 | * Add vuln attributes to CSV | |
15 | * Fix hostname parsing and add external ID to Qualys plugin | |
10 | 16 | |
11 | 17 | 3.9 [Oct 3th, 2019]: |
12 | 18 | --- |
1 | 1 | # Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | __version__ = '3.9.2' | |
4 | __version__ = '3.9.3' | |
5 | 5 | __license_version__ = __version__ |
136 | 136 | |
137 | 137 | self.node = item_node |
138 | 138 | self.ip = self.get_text_from_subnode('IP') |
139 | ||
139 | self.hostname = self.get_text_from_subnode('DNS') or '' | |
140 | 140 | self.os = self.get_text_from_subnode('OPERATING_SYSTEM') |
141 | 141 | self.vulns = self.getResults(tree) |
142 | 142 | |
172 | 172 | self.port = self.get_text_from_subnode(self.node, 'PORT') |
173 | 173 | self.protocol = self.get_text_from_subnode(self.node, 'PROTOCOL') |
174 | 174 | self.name = self.get_text_from_subnode(self.node, 'QID') |
175 | self.external_id = self.name | |
175 | 176 | self.result = self.get_text_from_subnode(self.node, 'RESULT') |
176 | 177 | |
177 | 178 | self.severity_dict = { |
204 | 205 | |
205 | 206 | # References |
206 | 207 | self.ref = [] |
207 | self.ref.append(self.get_text_from_glossary('CVE_ID_LIST/CVE_ID/ID')) | |
208 | ||
209 | cve_id = self.get_text_from_glossary('CVE_ID_LIST/CVE_ID/ID') | |
210 | if cve_id: | |
211 | self.ref.append(cve_id) | |
208 | 212 | |
209 | 213 | if self.cvss: |
210 | 214 | self.ref.append('CVSS SCORE: ' + self.cvss) |
307 | 311 | self.protocol = parent.get('protocol') |
308 | 312 | self.name = self.node.get('number') |
309 | 313 | self.external_id = self.node.get('number') |
310 | self.severity = self.node.get('severity') | |
311 | 314 | self.title = self.get_text_from_subnode('TITLE') |
312 | 315 | self.cvss = self.get_text_from_subnode('CVSS_BASE') |
313 | 316 | self.diagnosis = self.get_text_from_subnode('DIAGNOSIS') |
314 | 317 | self.solution = self.get_text_from_subnode('SOLUTION') |
315 | 318 | self.result = self.get_text_from_subnode('RESULT') |
316 | 319 | self.consequence = self.get_text_from_subnode('CONSEQUENCE') |
320 | ||
321 | self.severity_dict = { | |
322 | '1': 'info', | |
323 | '2': 'info', | |
324 | '3': 'med', | |
325 | '4': 'high', | |
326 | '5': 'critical'} | |
327 | ||
328 | self.severity = self.severity_dict.get(self.node.get('severity'), 'info') | |
317 | 329 | |
318 | 330 | self.desc = cleaner_results(self.diagnosis) |
319 | 331 | if self.result: |
389 | 401 | h_id, |
390 | 402 | v.title if v.title else v.name, |
391 | 403 | ref=v.ref, |
392 | severity=str(int(v.severity) - 1), | |
404 | severity=v.severity, | |
393 | 405 | resolution=v.solution if v.solution else '', |
394 | 406 | desc=v.desc, |
395 | 407 | external_id=v.external_id) |
416 | 428 | v.title if v.title else v.name, |
417 | 429 | ref=v.ref, |
418 | 430 | website=item.ip, |
419 | severity=str(int(v.severity) - 1), | |
431 | severity=v.severity, | |
420 | 432 | desc=v.desc, |
421 | 433 | resolution=v.solution if v.solution else '', |
422 | 434 | external_id=v.external_id) |
427 | 439 | s_id, |
428 | 440 | v.title if v.title else v.name, |
429 | 441 | ref=v.ref, |
430 | severity=str(int(v.severity) - 1), | |
442 | severity=v.severity, | |
431 | 443 | desc=v.desc, |
432 | 444 | resolution=v.solution if v.solution else '', |
433 | 445 | external_id=v.external_id) |
1 | 1 | <faraday> |
2 | 2 | |
3 | 3 | <appname>Faraday - Penetration Test IDE</appname> |
4 | <version>3.9.2</version> | |
4 | <version>3.9.3</version> | |
5 | 5 | <debug_status>0</debug_status> |
6 | 6 | <font>-Misc-Fixed-medium-r-normal-*-12-100-100-100-c-70-iso8859-1</font> |
7 | 7 | <home_path></home_path> |
47 | 47 | VulnerabilityGeneric, |
48 | 48 | ) |
49 | 49 | from faraday.server.utils.database import get_or_create |
50 | from faraday.server.utils.export import export_vulns_to_csv | |
50 | 51 | |
51 | 52 | from faraday.server.api.modules.services import ServiceSchema |
52 | 53 | from faraday.server.schemas import ( |
808 | 809 | @route('export_csv/', methods=['GET']) |
809 | 810 | def export_csv(self, workspace_name): |
810 | 811 | confirmed = bool(request.args.get('confirmed')) |
811 | filters = request.args.get('q') or '{}' | |
812 | workspace = self._get_workspace(workspace_name) | |
813 | memory_file = cStringIO.StringIO() | |
812 | filters = request.args.get('q', '{}') | |
814 | 813 | custom_fields_columns = [] |
815 | 814 | for custom_field in db.session.query(CustomFieldsSchema).order_by(CustomFieldsSchema.field_order): |
816 | 815 | custom_fields_columns.append(custom_field.field_name) |
817 | headers = ["confirmed", "id", "date", "name", "severity", "service", "target", "desc", "status", "hostnames"] | |
818 | headers += custom_fields_columns | |
819 | writer = csv.DictWriter(memory_file, fieldnames=headers) | |
820 | writer.writeheader() | |
821 | 816 | vulns_query = self._filter(filters, workspace_name, confirmed) |
822 | for vuln in vulns_query: | |
823 | vuln_description = re.sub(' +', ' ', vuln['description'].strip().replace("\n", "")) | |
824 | vuln_date = vuln['metadata']['create_time'] | |
825 | if vuln['service']: | |
826 | service_fields = ["status", "protocol", "name", "summary", "version", "ports"] | |
827 | service_fields_values = map(lambda field: "%s:%s" % (field, vuln['service'][field]), service_fields) | |
828 | vuln_service = " - ".join(service_fields_values) | |
829 | else: | |
830 | vuln_service = "" | |
831 | ||
832 | if all(isinstance(hostname, (str, unicode)) for hostname in vuln['hostnames']): | |
833 | vuln_hostnames = vuln['hostnames'] | |
834 | else: | |
835 | vuln_hostnames = [str(hostname['name']) for hostname in vuln['hostnames']] | |
836 | ||
837 | vuln_dict = {"confirmed": vuln['confirmed'], "id": vuln['_id'], "date": vuln_date, | |
838 | "severity": vuln['severity'], "target": vuln['target'], "status": vuln['status'], "hostnames": vuln_hostnames, | |
839 | "desc": vuln_description, "name": vuln['name'], "service": vuln_service} | |
840 | if vuln['custom_fields']: | |
841 | for field_name, value in vuln['custom_fields'].items(): | |
842 | if field_name in custom_fields_columns: | |
843 | vuln_dict.update({field_name: value}) | |
844 | writer.writerow(vuln_dict) | |
845 | memory_file.seek(0) | |
817 | memory_file = export_vulns_to_csv(vulns_query, custom_fields_columns) | |
846 | 818 | return send_file(memory_file, |
847 | 819 | attachment_filename="Faraday-SR-%s.csv" % workspace_name, |
848 | 820 | as_attachment=True, |
0 | import csv | |
1 | from StringIO import StringIO | |
2 | from io import BytesIO | |
3 | import re | |
4 | import logging | |
5 | ||
6 | from faraday.server.models import ( | |
7 | db, | |
8 | Comment | |
9 | ) | |
10 | ||
11 | logger = logging.getLogger(__name__) | |
12 | ||
13 | def export_vulns_to_csv(vulns, custom_fields_columns=None): | |
14 | if custom_fields_columns is None: | |
15 | custom_fields_columns = [] | |
16 | buffer = StringIO() | |
17 | headers = [ | |
18 | "confirmed", "id", "date", "name", "severity", "service", | |
19 | "target", "desc", "status", "hostnames", "comments", "owner", "os", "resolution", "easeofresolution", "web_vulnerability", | |
20 | "data", "website", "path", "status_code", "request", "method", "params", "pname", "query", | |
21 | "policyviolations", "external_id", "impact_confidentiality", "impact_integrity", "impact_availability", | |
22 | "impact_accountability" | |
23 | ] | |
24 | headers += custom_fields_columns | |
25 | writer = csv.DictWriter(buffer, fieldnames=headers) | |
26 | writer.writeheader() | |
27 | for vuln in vulns: | |
28 | comments = [] | |
29 | for comment in db.session.query(Comment).filter_by(object_type='vulnerability', object_id=vuln['_id']).all(): | |
30 | comments.append(comment.text) | |
31 | vuln_description = re.sub(' +', ' ', vuln['description'].strip().replace("\n", "")) | |
32 | vuln_date = vuln['metadata']['create_time'] | |
33 | if vuln['service']: | |
34 | service_fields = ["status", "protocol", "name", "summary", "version", "ports"] | |
35 | service_fields_values = ["%s:%s" % (field, vuln['service'][field]) for field in service_fields] | |
36 | vuln_service = " - ".join(service_fields_values) | |
37 | else: | |
38 | vuln_service = "" | |
39 | if all(isinstance(hostname, (str, unicode)) for hostname in vuln['hostnames']): | |
40 | vuln_hostnames = vuln['hostnames'] | |
41 | else: | |
42 | vuln_hostnames = [str(hostname['name']) for hostname in vuln['hostnames']] | |
43 | ||
44 | vuln_dict = {"confirmed": vuln['confirmed'], | |
45 | "id": vuln.get('_id', None), | |
46 | "date": vuln_date, | |
47 | "severity": vuln.get('severity', None), | |
48 | "target": vuln.get('target', None), | |
49 | "status": vuln.get('status', None), | |
50 | "hostnames": vuln_hostnames, | |
51 | "desc": vuln_description, | |
52 | "name": vuln.get('name', None), | |
53 | "service": vuln_service, | |
54 | "comments": comments, | |
55 | "owner": vuln.get('owner', None), | |
56 | "os": vuln.get('host_os', None), | |
57 | "resolution": vuln.get('resolution', None), | |
58 | "easeofresolution": vuln.get('easeofresolution', None), | |
59 | "data": vuln.get('data', None), | |
60 | "website": vuln.get('website', None), | |
61 | "path": vuln.get('path', None), | |
62 | "status_code": vuln.get('status_code', None), | |
63 | "request": vuln.get('request', None), | |
64 | "method": vuln.get('method', None), | |
65 | "params": vuln.get('params', None), | |
66 | "pname": vuln.get('pname', None), | |
67 | "query": vuln.get('query', None), | |
68 | "policyviolations": vuln.get('policyviolations', None), | |
69 | "external_id": vuln.get('external_id', None), | |
70 | "impact_confidentiality": vuln["impact"]["confidentiality"], | |
71 | "impact_integrity": vuln["impact"]["integrity"], | |
72 | "impact_availability": vuln["impact"]["availability"], | |
73 | "impact_accountability": vuln["impact"]["accountability"], | |
74 | "web_vulnerability": vuln['type'] == "VulnerabilityWeb" | |
75 | } | |
76 | if vuln['custom_fields']: | |
77 | for field_name, value in vuln['custom_fields'].items(): | |
78 | if field_name in custom_fields_columns: | |
79 | vuln_dict.update({field_name: value}) | |
80 | res = {} | |
81 | for key, value in vuln_dict.items(): | |
82 | if isinstance(value, (str, unicode)): | |
83 | res[key] = value.encode('utf8') | |
84 | else: | |
85 | res[key] = value | |
86 | writer.writerow(res) | |
87 | memory_file = BytesIO() | |
88 | memory_file.write(buffer.getvalue()) | |
89 | memory_file.seek(0) | |
90 | return memory_file | |
91 | ||
92 |
1889 | 1889 | def test_export_vuln_csv_empty_workspace(self, test_client): |
1890 | 1890 | Vulnerability.query.delete() |
1891 | 1891 | res = test_client.get(self.url() + 'export_csv/') |
1892 | expected_headers = "confirmed,id,date,name,severity,service,target,desc,status,hostnames".split(',') | |
1892 | expected_headers = [ | |
1893 | "confirmed", "id", "date", "name", "severity", "service", | |
1894 | "target", "desc", "status", "hostnames", "comments", "owner", "os", "resolution", "easeofresolution", "web_vulnerability", | |
1895 | "data", "website", "path", "status_code", "request", "method", "params", "pname", "query", | |
1896 | "policyviolations", "external_id", "impact_confidentiality", "impact_integrity", "impact_availability", | |
1897 | "impact_accountability" | |
1898 | ] | |
1893 | 1899 | assert res.status_code == 200 |
1894 | 1900 | assert res.data.strip('\r\n').split(',') == expected_headers |
1895 | 1901 | |
1899 | 1905 | confirmed_vulns = VulnerabilityFactory.create(confirmed=True, workspace=workspace) |
1900 | 1906 | session.add(confirmed_vulns) |
1901 | 1907 | session.commit() |
1902 | res = test_client.get(self.url(workspace=workspace) + 'export_csv/?filter?q={"filters":[{"name":"confirmed","op":"==","val":"true"}]}') | |
1903 | expected_headers = "confirmed,id,date,name,severity,service,target,desc,status,hostnames".split(',') | |
1908 | res = test_client.get(self.url(workspace=workspace) + 'export_csv/?q={"filters":[{"name":"confirmed","op":"==","val":"true"}]}') | |
1909 | assert res.status_code == 200 | |
1910 | assert self._verify_csv(res.data, confirmed=True) | |
1911 | ||
1912 | @pytest.mark.usefixtures('ignore_nplusone') | |
1913 | def test_export_vuln_csv_unicode_bug(self, test_client, session): | |
1914 | workspace = WorkspaceFactory.create() | |
1915 | desc = u'Latin-1 Supplement \xa1 \xa2 \xa3 \xa4 \xa5 \xa6 \xa7 \xa8' | |
1916 | confirmed_vulns = VulnerabilityFactory.create( | |
1917 | confirmed=True, | |
1918 | description=desc, | |
1919 | workspace=workspace) | |
1920 | session.add(confirmed_vulns) | |
1921 | session.commit() | |
1922 | res = test_client.get(self.url(workspace=workspace) + 'export_csv/') | |
1904 | 1923 | assert res.status_code == 200 |
1905 | 1924 | assert self._verify_csv(res.data, confirmed=True) |
1906 | 1925 | |
1910 | 1929 | confirmed_vulns = VulnerabilityFactory.create(confirmed=True, severity='critical', workspace=workspace) |
1911 | 1930 | session.add(confirmed_vulns) |
1912 | 1931 | session.commit() |
1913 | res = test_client.get(self.url(workspace=workspace) + 'export_csv/?filter?q={"filters":[{"name":"severity","op":"==","val":"critical"}]}') | |
1914 | expected_headers = "confirmed,id,date,name,severity,service,target,desc,status,hostnames".split(',') | |
1932 | res = test_client.get(self.url(workspace=workspace) + 'export_csv/?q={"filters":[{"name":"severity","op":"==","val":"critical"}]}') | |
1915 | 1933 | assert res.status_code == 200 |
1916 | 1934 | assert self._verify_csv(res.data, confirmed=True, severity='critical') |
1917 | 1935 | |
1920 | 1938 | self.first_object.confirmed = True |
1921 | 1939 | session.add(self.first_object) |
1922 | 1940 | session.commit() |
1923 | ||
1924 | 1941 | res = test_client.get(self.url() + 'export_csv/?confirmed=true') |
1925 | expected_headers = "confirmed,id,date,name,severity,service,target,desc,status,hostnames".split(',') | |
1926 | 1942 | assert res.status_code == 200 |
1927 | 1943 | self._verify_csv(res.data, confirmed=True) |
1928 | 1944 | |
1954 | 1970 | |
1955 | 1971 | def _verify_csv(self, raw_csv_data, confirmed=False, severity=None): |
1956 | 1972 | custom_fields = [custom_field.field_name for custom_field in CustomFieldsSchema.query.all()] |
1957 | expected_headers = "confirmed,id,date,name,severity,service,target,desc,status,hostnames".split(',') | |
1973 | expected_headers = [ | |
1974 | "confirmed", "id", "date", "name", "severity", "service", | |
1975 | "target", "desc", "status", "hostnames", "comments", "owner", "os", "resolution", "easeofresolution", "web_vulnerability", | |
1976 | "data", "website", "path", "status_code", "request", "method", "params", "pname", "query", | |
1977 | "policyviolations", "external_id", "impact_confidentiality", "impact_integrity", "impact_availability", | |
1978 | "impact_accountability" | |
1979 | ] | |
1958 | 1980 | final_expected_headers = expected_headers + custom_fields |
1959 | 1981 | csv_data = csv.reader(BytesIO(raw_csv_data), delimiter=',') |
1960 | 1982 | for index, line in enumerate(csv_data): |
2183 | 2205 | [{"name":"hostnames","op":"eq","val":"pepe"}] |
2184 | 2206 | } |
2185 | 2207 | res = test_client.get( |
2186 | '/v2/ws/{}/vulns/filter?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2208 | '/v2/ws/{}/vulns/?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2187 | 2209 | assert res.status_code == 200 |
2188 | 2210 | assert res.json['count'] == 1 |
2189 | 2211 | assert res.json['vulnerabilities'][0]['id'] == vuln.id |
2204 | 2226 | [{"name":"hostnames","op":"eq","val":"pepe"}] |
2205 | 2227 | } |
2206 | 2228 | res = test_client.get( |
2207 | '/v2/ws/{}/vulns/filter?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2229 | '/v2/ws/{}/vulns/?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2208 | 2230 | assert res.status_code == 200 |
2209 | 2231 | assert res.json['count'] == 1 |
2210 | 2232 | assert res.json['vulnerabilities'][0]['id'] == vuln.id |
2225 | 2247 | [{"name": "hostnames", "op": "eq", "val": "pepe"}] |
2226 | 2248 | } |
2227 | 2249 | res = test_client.get( |
2228 | '/v2/ws/{}/vulns/filter?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2250 | '/v2/ws/{}/vulns/?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2229 | 2251 | assert res.status_code == 200 |
2230 | 2252 | assert res.json['count'] == 1 |
2231 | 2253 | assert res.json['vulnerabilities'][0]['id'] == vuln.id |
2245 | 2267 | {"and": [{"name": "hostnames","op": "eq", "val": "pepe"}]} |
2246 | 2268 | ]} |
2247 | 2269 | res = test_client.get( |
2248 | '/v2/ws/{}/vulns/filter?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2270 | '/v2/ws/{}/vulns/?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2249 | 2271 | assert res.status_code == 200 |
2250 | 2272 | assert res.json['count'] == 1 |
2251 | 2273 | assert res.json['vulnerabilities'][0]['id'] == vuln.id |
2273 | 2295 | ]} |
2274 | 2296 | ]} |
2275 | 2297 | res = test_client.get( |
2276 | '/v2/ws/{}/vulns/filter?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2298 | '/v2/ws/{}/vulns/?q={}'.format(workspace.name, json.dumps(query_filter))) | |
2277 | 2299 | assert res.status_code == 200 |
2278 | 2300 | assert res.json['count'] == 1 |
2279 | 2301 | assert res.json['vulnerabilities'][0]['id'] == vuln.id |