New upstream version 3.15.0
Sophie Brun
3 years ago
104 | 104 | - mkdir -p ~/.config/cachix |
105 | 105 | - export USER=$(whoami) |
106 | 106 | - echo "$CACHIX_CONFG" >~/.config/cachix/cachix.dhall |
107 | - !reference [ .clone_and_replace_www, script ] | |
107 | 108 | - cachix use faradaysec |
108 | - nix-build ./release.nix -A dockerImage --argstr dockerName $CI_REGISTRY_IMAGE --argstr dockerTag latest | |
109 | - nix-build ./release.nix -A dockerImage --argstr dockerName $CI_REGISTRY_IMAGE --argstr dockerTag latest --arg useLastCommit false | |
109 | 110 | - cp $(readlink result) faraday-server-docker.tar.gz |
110 | 111 | artifacts: |
111 | 112 | paths: |
20 | 20 | image: python:3 |
21 | 21 | stage: publish |
22 | 22 | script: |
23 | - !reference [ .clone_and_replace_www, script ] | |
23 | 24 | - apt-get update -qy |
24 | 25 | - apt-get install twine -y |
25 | 26 | - python setup.py sdist bdist_wheel |
0 | .qa_integration: | |
0 | qa_integration: | |
1 | 1 | stage: upload_testing |
2 | 2 | variables: |
3 | 3 | REMOTE_BRANCH: $CI_COMMIT_REF_NAME |
0 | * Use Python 3 instead of Python 2 in the Faraday Server | |
1 | * Add ability to manage agents with multiple executors | |
2 | * Agents can be run with custom arguments | |
3 | * Improved processing of uploaded reports. Now it is much faster! | |
4 | * Add custom fields of type `choice` | |
5 | * Fix vuln status transition in bulk create API (mark closed vulns as re-opened when they are triggered again) | |
6 | * Fix bug when using non-existent workspaces in Faraday GTK Client | |
7 | * Set service name as required in the Web UI | |
8 | * Validate the start date of a workspace is not greater than the end date | |
9 | * Fix command API when year is invalid | |
10 | * When SSL misconfigurations cause websockets to fails it doesn't block server from starting | |
11 | * Check for invalid service port number in the Web UI | |
12 | * Fix dashboard tooltips for vulnerability | |
13 | * Fix bug when GTK client lost connection to the server | |
14 | * Fix style issues in "Hosts by Service" modal of the dashboard | |
15 | * Add API for bulk delete of vulnerabilities | |
16 | * Add missing vuln attributes to exported CSV | |
17 | * `faraday-manage support` now displays the Operating System version | |
18 | * Notify when `faraday-manage` can't run becasue of PostgreSQL HBA config error |
0 | * Use Python 3 instead of Python 2 in the Faraday Server | |
1 | * Add ability to manage agents with multiple executors | |
2 | * Agents can be run with custom arguments | |
3 | * Improved processing of uploaded reports. Now it is much faster! | |
4 | * Add custom fields of type `choice` | |
5 | * Fix vuln status transition in bulk create API (mark closed vulns as re-opened when they are triggered again) | |
6 | * Fix bug when using non-existent workspaces in Faraday GTK Client | |
7 | * Set service name as required in the Web UI | |
8 | * Validate the start date of a workspace is not greater than the end date | |
9 | * Fix command API when year is invalid | |
10 | * When SSL misconfigurations cause websockets to fails it doesn't block server from starting | |
11 | * Check for invalid service port number in the Web UI | |
12 | * Fix dashboard tooltips for vulnerability | |
13 | * Fix bug when GTK client lost connection to the server | |
14 | * Fix style issues in "Hosts by Service" modal of the dashboard | |
15 | * Add API for bulk delete of vulnerabilities | |
16 | * Add missing vuln attributes to exported CSV | |
17 | * `faraday-manage support` now displays the Operating System version | |
18 | * Notify when `faraday-manage` can't run becasue of PostgreSQL HBA config error |
0 | * Fix installation with `pip install --no-binary :all: faradaysec` | |
1 | * Force usage of webargs 5 (webargs 6 broke backwards compatibility) | |
2 | * Use latest version of faraday-plugins | |
3 | * Fix broken "Faraday Plugin" menu entry in the GTK client | |
4 | * Extract export csv to reuse for reports |
0 | * Fix installation with `pip install --no-binary :all: faradaysec` | |
1 | * Force usage of webargs 5 (webargs 6 broke backwards compatibility) | |
2 | * Use latest version of faraday-plugins | |
3 | * Fix broken "Faraday Plugin" menu entry in the GTK client | |
4 | * Extract export csv to reuse for reports |
0 | * Fix Cross-Site Request Forgery (CSRF) vulnerability in all JSON API endpoints. | |
1 | This was caused because a third-party library doesn't implement proper | |
2 | Content-Type header validation. To mitigate the vulnerability, we set the | |
3 | session cookie to have the `SameSite: Lax` property. | |
4 | * Fix Faraday Server logs were always in debug | |
5 | * Add update date column when exporting vulnerabilities to CSV | |
6 | * Fix unicode error when exporting vulnerabilities to CSV |
0 | * Fix Cross-Site Request Forgery (CSRF) vulnerability in all JSON API endpoints. | |
1 | This was caused because a third-party library doesn't implement proper | |
2 | Content-Type header validation. To mitigate the vulnerability, we set the | |
3 | session cookie to have the `SameSite: Lax` property. | |
4 | * Fix Faraday Server logs were always in debug | |
5 | * Add update date column when exporting vulnerabilities to CSV | |
6 | * Fix unicode error when exporting vulnerabilities to CSV |
0 | * Move GTK client to [another repository](https://github.com/infobyte/faraday-client) to improve release times. | |
1 | * Fix formula injection vulnerability when exporting vulnerability data to CSV. This was considered a low impact vulnerability. | |
2 | * Remove "--ssl" parameter. Read SSL information from the config file. | |
3 | * Add OpenAPI autogenerated documentation support | |
4 | * Show agent information in command history | |
5 | * Add bulk delete endpoint for hosts API | |
6 | * Add column with information to track agent execution data | |
7 | * Add tool attribute to vulnerability to avoid incorrectly showing "Web UI" as creator tool | |
8 | * Add sorting by target in credentials view | |
9 | * Add creator information when uploading reports or using de bulk create api | |
10 | * Add feature to disable rules in the searcher | |
11 | * Add API endpoint to export Faraday data to Metasploit XML format | |
12 | * Change websocket url route from / to /websockets | |
13 | * Use run date instead of creation date when plugins report specifies it | |
14 | * Improve knowledge base UX | |
15 | * Improve workspace table and status report table UX. | |
16 | * Improve format of exported CSV to include more fields | |
17 | * Sort results in count API endpoint | |
18 | * Limit description width in knowledge base | |
19 | * Change log date format to ISO 8601 | |
20 | * Fix parsing server port config in server.ini | |
21 | * Fix bug when \_rev was send to the hosts API | |
22 | * Send JSON response when you get a 500 or 404 error | |
23 | * Fix bug parsing invalid data in NullToBlankString | |
24 | ||
25 | Changes in plugins (only available through Web UI, not in GTK client yet): | |
26 | ||
27 | New plugins: | |
28 | ||
29 | * Checkmarx | |
30 | * Faraday\_csv (output of exported Faraday csv) | |
31 | * Qualyswebapp | |
32 | * Whitesource | |
33 | ||
34 | Updated plugins: | |
35 | ||
36 | * Acunetix | |
37 | * AppScan | |
38 | * Arachni | |
39 | * Nessus | |
40 | * Netspaker | |
41 | * Netspaker cloud | |
42 | * Nexpose | |
43 | * Openvas | |
44 | * QualysGuard | |
45 | * Retina | |
46 | * W3af | |
47 | * WPScan | |
48 | * Webinspect | |
49 | * Zap |
0 | * Move GTK client to [another repository](https://github.com/infobyte/faraday-client) to improve release times. | |
1 | * Fix formula injection vulnerability when exporting vulnerability data to CSV. This was considered a low impact vulnerability. | |
2 | * Remove "--ssl" parameter. Read SSL information from the config file. | |
3 | * Add OpenAPI autogenerated documentation support | |
4 | * Show agent information in command history | |
5 | * Add bulk delete endpoint for hosts API | |
6 | * Add column with information to track agent execution data | |
7 | * Add tool attribute to vulnerability to avoid incorrectly showing "Web UI" as creator tool | |
8 | * Add sorting by target in credentials view | |
9 | * Add creator information when uploading reports or using de bulk create api | |
10 | * Add feature to disable rules in the searcher | |
11 | * Add API endpoint to export Faraday data to Metasploit XML format | |
12 | * Change websocket url route from / to /websockets | |
13 | * Use run date instead of creation date when plugins report specifies it | |
14 | * Improve knowledge base UX | |
15 | * Improve workspace table and status report table UX. | |
16 | * Improve format of exported CSV to include more fields | |
17 | * Sort results in count API endpoint | |
18 | * Limit description width in knowledge base | |
19 | * Change log date format to ISO 8601 | |
20 | * Fix parsing server port config in server.ini | |
21 | * Fix bug when \_rev was send to the hosts API | |
22 | * Send JSON response when you get a 500 or 404 error | |
23 | * Fix bug parsing invalid data in NullToBlankString | |
24 | ||
25 | Changes in plugins (only available through Web UI, not in GTK client yet): | |
26 | ||
27 | New plugins: | |
28 | ||
29 | * Checkmarx | |
30 | * Faraday\_csv (output of exported Faraday csv) | |
31 | * Qualyswebapp | |
32 | * Whitesource | |
33 | ||
34 | Updated plugins: | |
35 | ||
36 | * Acunetix | |
37 | * AppScan | |
38 | * Arachni | |
39 | * Nessus | |
40 | * Netspaker | |
41 | * Netspaker cloud | |
42 | * Nexpose | |
43 | * Openvas | |
44 | * QualysGuard | |
45 | * Retina | |
46 | * W3af | |
47 | * WPScan | |
48 | * Webinspect | |
49 | * Zap |
0 | * Fix missing shodan icon and invalid link in dashboard and hosts list | |
1 | * Upgrade marshmallow, webargs, werkzeug and flask-login dependencies to | |
2 | latest versions in order to make packaging for distros easier |
0 | * Fix missing shodan icon and invalid link in dashboard and hosts list | |
1 | * Upgrade marshmallow, webargs, werkzeug and flask-login dependencies to | |
2 | latest versions in order to make packaging for distros easier |
0 | * Now agents can upload data to multiples workspaces | |
1 | * Add agent and executor data to Activity Feed | |
2 | * Add session timeout configuration to server.ini configuration file | |
3 | * Add hostnames to already existing hosts when importing a report | |
4 | * Add new faraday background image | |
5 | * Display an error when uploading an invalid report | |
6 | * Use minimized JS libraries to improve page load time | |
7 | * Fix aspect ratio distortion in evidence tab of vulnerability preview | |
8 | * Fix broken Knowledge Base upload modal | |
9 | * Fix closing of websocket connections when communicating with Agents | |
10 | * Change Custom Fields names in exported CSV to make columns compatible with | |
11 | `faraday_csv` plugin | |
12 | * Fix import CSV for vuln template: some values were overwritten with default values. | |
13 | * Catch errors in faraday-manage commands when the connection string is not | |
14 | specified in the server.ini file | |
15 | * Fix bug that generated a session when using Token authentication | |
16 | * Fix bug that requested to the API when an invalid filter is used | |
17 | * Cleanup old sessions when a user logs in | |
18 | * Remove unmaintained Flask-Restless dependency | |
19 | * Remove pbkdf2\_sha1 and plain password schemes. We only support bcrypt |
0 | * Now agents can upload data to multiples workspaces | |
1 | * Add agent and executor data to Activity Feed | |
2 | * Add session timeout configuration to server.ini configuration file | |
3 | * Add hostnames to already existing hosts when importing a report | |
4 | * Add new faraday background image | |
5 | * Display an error when uploading an invalid report | |
6 | * Use minimized JS libraries to improve page load time | |
7 | * Fix aspect ratio distortion in evidence tab of vulnerability preview | |
8 | * Fix broken Knowledge Base upload modal | |
9 | * Fix closing of websocket connections when communicating with Agents | |
10 | * Change Custom Fields names in exported CSV to make columns compatible with | |
11 | `faraday_csv` plugin | |
12 | * Fix import CSV for vuln template: some values were overwritten with default values. | |
13 | * Catch errors in faraday-manage commands when the connection string is not | |
14 | specified in the server.ini file | |
15 | * Fix bug that generated a session when using Token authentication | |
16 | * Fix bug that requested to the API when an invalid filter is used | |
17 | * Cleanup old sessions when a user logs in | |
18 | * Remove unmaintained Flask-Restless dependency | |
19 | * Remove pbkdf2\_sha1 and plain password schemes. We only support bcrypt |
0 | * ADD RESTless filter to multiples views, improving the searchs | |
1 | * ADD "extras" modal in options menu, linking to other Faraday resources | |
2 | * ADD `import vulnerability templates` command to faraday-manage | |
3 | * ADD `generate nginx config` command to faraday-manage | |
4 | * ADD vulnerabilities severities count to host | |
5 | * ADD Active Agent columns to workspace | |
6 | * ADD critical vulns count to workspace | |
7 | * ADD `Remember me` login option | |
8 | * ADD distinguish host flag | |
9 | * ADD a create_date field to comments | |
10 | * FIX to use new webargs version | |
11 | * FIX Custom Fields view in KB (Vulnerability Templates) | |
12 | * FIX bug on filter endpoint for vulnerabilities with offset and limit parameters | |
13 | * FIX bug raising `403 Forbidden` HTTP error when the first workspace was not active | |
14 | * FIX bug when changing the token expiration change | |
15 | * FIX bug in Custom Fields type Choice when choice name is too long. | |
16 | * FIX Vulnerability Filter endpoint Performance improvement using joinedload. Removed several nplusone uses | |
17 | * MOD Updating the template.ini for new installations | |
18 | * MOD Improve SMTP configuration | |
19 | * MOD The agent now indicates how much time it had run (faraday-agent-dispatcher v1.4.0) | |
20 | * MOD Type "Vulnerability Web" cannot have "Host" type as a parent when creating data in bulk | |
21 | * MOD Expiration default time from 1 month to 12 hour | |
22 | * MOD Improve data reference when uploading a new report | |
23 | * MOD Refactor Knowledge Base's bulk create to take to take also multiple creation from vulns in status report. | |
24 | * MOD All HTTP OPTIONS endpoints are now public | |
25 | * MOD Change documentation and what's new links in about | |
26 | * REMOVE Flask static endpoint | |
27 | * REMOVE of our custom logger |
0 | * ADD RESTless filter to multiples views, improving the searchs | |
1 | * ADD "extras" modal in options menu, linking to other Faraday resources | |
2 | * ADD `import vulnerability templates` command to faraday-manage | |
3 | * ADD `generate nginx config` command to faraday-manage | |
4 | * ADD vulnerabilities severities count to host | |
5 | * ADD Active Agent columns to workspace | |
6 | * ADD critical vulns count to workspace | |
7 | * ADD `Remember me` login option | |
8 | * ADD distinguish host flag | |
9 | * ADD a create_date field to comments | |
10 | * FIX to use new webargs version | |
11 | * FIX Custom Fields view in KB (Vulnerability Templates) | |
12 | * FIX bug on filter endpoint for vulnerabilities with offset and limit parameters | |
13 | * FIX bug raising `403 Forbidden` HTTP error when the first workspace was not active | |
14 | * FIX bug when changing the token expiration change | |
15 | * FIX bug in Custom Fields type Choice when choice name is too long. | |
16 | * FIX Vulnerability Filter endpoint Performance improvement using joinedload. Removed several nplusone uses | |
17 | * MOD Updating the template.ini for new installations | |
18 | * MOD Improve SMTP configuration | |
19 | * MOD The agent now indicates how much time it had run (faraday-agent-dispatcher v1.4.0) | |
20 | * MOD Type "Vulnerability Web" cannot have "Host" type as a parent when creating data in bulk | |
21 | * MOD Expiration default time from 1 month to 12 hour | |
22 | * MOD Improve data reference when uploading a new report | |
23 | * MOD Refactor Knowledge Base's bulk create to take to take also multiple creation from vulns in status report. | |
24 | * MOD All HTTP OPTIONS endpoints are now public | |
25 | * MOD Change documentation and what's new links in about | |
26 | * REMOVE Flask static endpoint | |
27 | * REMOVE of our custom logger |
0 | * ADD forgot password | |
1 | * ADD update services by bulk_create | |
2 | * ADD FARADAY_DISABLE_LOGS varibale to disable logs to filesystem | |
3 | * ADD security logs in `audit.log` file | |
4 | * UPD security dependency Flask-Security-Too v3.4.4 | |
5 | * MOD rename total_rows field in filter host response | |
6 | * MOD improved Export cvs performance by reducing the number of queries | |
7 | * MOD sanitize the content of vulns' request and response | |
8 | * MOD dont strip new line in description when exporting csv | |
9 | * MOD improved threads management on exception | |
10 | * MOD improved performance on vulnerability filter | |
11 | * MOD improved [API documentation](www.api.faradaysec.com) | |
12 | * FIX upload a report with invalid custom fields | |
13 | * ADD v3 API, which includes: | |
14 | * All endpoints ends without `/` | |
15 | * `PATCH {model}/id` endpoints | |
16 | * ~~Bulk update via PATCH `{model}` endpoints~~ In a future release | |
17 | * ~~Bulk delete via DELETE `{model}` endpoints~~ In a future release | |
18 | * Endpoints removed: | |
19 | * `/v2/ws/<workspace_id>/activate/` | |
20 | * `/v2/ws/<workspace_id>/change_readonly/` | |
21 | * `/v2/ws/<workspace_id>/deactivate/` | |
22 | * `/v2/ws/<workspace_name>/hosts/bulk_delete/` | |
23 | * `/v2/ws/<workspace_name>/vulns/bulk_delete/` | |
24 | * Endpoints updated: | |
25 | * `/v2/ws/<workspace_name>/vulns/<int:vuln_id>/attachments/` => \ | |
26 | `/v3/ws/<workspace_name>/vulns/<int:vuln_id>/attachment` |
0 | * ADD forgot password | |
1 | * ADD update services by bulk_create | |
2 | * ADD FARADAY_DISABLE_LOGS varibale to disable logs to filesystem | |
3 | * ADD security logs in `audit.log` file | |
4 | * UPD security dependency Flask-Security-Too v3.4.4 | |
5 | * MOD rename total_rows field in filter host response | |
6 | * MOD improved Export cvs performance by reducing the number of queries | |
7 | * MOD sanitize the content of vulns' request and response | |
8 | * MOD dont strip new line in description when exporting csv | |
9 | * MOD improved threads management on exception | |
10 | * MOD improved performance on vulnerability filter | |
11 | * MOD improved [API documentation](www.api.faradaysec.com) | |
12 | * FIX upload a report with invalid custom fields | |
13 | * ADD v3 API, which includes: | |
14 | * All endpoints ends without `/` | |
15 | * `PATCH {model}/id` endpoints | |
16 | * ~~Bulk update via PATCH `{model}` endpoints~~ In a future release | |
17 | * ~~Bulk delete via DELETE `{model}` endpoints~~ In a future release | |
18 | * Endpoints removed: | |
19 | * `/v2/ws/<workspace_id>/activate/` | |
20 | * `/v2/ws/<workspace_id>/change_readonly/` | |
21 | * `/v2/ws/<workspace_id>/deactivate/` | |
22 | * `/v2/ws/<workspace_name>/hosts/bulk_delete/` | |
23 | * `/v2/ws/<workspace_name>/vulns/bulk_delete/` | |
24 | * Endpoints updated: | |
25 | * `/v2/ws/<workspace_name>/vulns/<int:vuln_id>/attachments/` => \ | |
26 | `/v3/ws/<workspace_name>/vulns/<int:vuln_id>/attachment` |
0 | * ADD New plugins: | |
1 | * microsoft baseline security analyzer | |
2 | * nextnet | |
3 | * openscap | |
4 | * FIX old versions of Nessus plugins bugs |
0 | * ADD New plugins: | |
1 | * microsoft baseline security analyzer | |
2 | * nextnet | |
3 | * openscap | |
4 | * FIX old versions of Nessus plugins bugs |
0 | * MOD MAYOR Breaking change: Use frontend from other repository | |
1 | * ADD `last_run` to executors and agents | |
2 | * ADD ignore info vulns option (from faraday-plugins 1.4.3) | |
3 | * ADD invalid logins are registered in `audit.log` | |
4 | * ADD agent registration tokens are now 6-digit short and automatically regenerated every 30 seconds | |
5 | * MOD Fix logout redirect loop | |
6 | * REMOVE support for native SSL |
0 | * MOD MAYOR Breaking change: Use frontend from other repository | |
1 | * ADD `last_run` to executors and agents | |
2 | * ADD ignore info vulns option (from faraday-plugins 1.4.3) | |
3 | * ADD invalid logins are registered in `audit.log` | |
4 | * ADD agent registration tokens are now 6-digit short and automatically regenerated every 30 seconds | |
5 | * MOD Fix logout redirect loop | |
6 | * REMOVE support for native SSL |
0 | * Updated plugins package, which update appscan plugin |
0 | * ADD `Basic Auth` support | |
1 | * ADD support for GET method in websocket_tokens, POST will be deprecated in the future | |
2 | * ADD CVSS(String), CWE(String), CVE(relationship) columns to vulnerability model and API | |
3 | * ADD agent token's API says the renewal cycling duration | |
4 | * MOD Improve database model to be able to delete workspaces fastly | |
5 | * MOD Improve code style and uses (less flake8 exceptions, py3 `super` style, Flask app as singleton, etc) | |
6 | * MOD workspaces' names regex to verify they cannot contain forward slash (`/`) | |
7 | * MOD Improve bulk create logs | |
8 | * FIX Own schema breaking Marshmallow 3.11.0+ | |
9 | * UPD flask_security_too to version 4.0.0+ |
0 | May 18th, 2021 |
0 | * Added logical operator AND to status report search | |
1 | * Restkit dependency removed. | |
2 | * Improvement on manage.py change-password | |
3 | * Add feature to show only unconfirmed vulns. | |
4 | * Add ssl information to manage.py status-check | |
5 | * Update wpscan plugin to support latest version. | |
6 | * Allow workspace names starting with numbers. |
0 | * Added logical operator AND to status report search | |
1 | * Restkit dependency removed. | |
2 | * Improvement on manage.py change-password | |
3 | * Add feature to show only unconfirmed vulns. | |
4 | * Add ssl information to manage.py status-check | |
5 | * Update wpscan plugin to support latest version. | |
6 | * Allow workspace names starting with numbers. |
0 | * Add workspace disable feature | |
1 | * Add mac vendor to host and services | |
2 | * Fix typos and add sorting in workspace name (workspace list view) | |
3 | * Improve warning when you try to select hosts instead of services as targets of a Vulnerability Web | |
4 | * Deleted old Nexpose plugin. Now Faraday uses Nexpose-Full. | |
5 | * Update sqlmap plugin | |
6 | * Add updated zap plugin | |
7 | * Add hostnames to nessus plugin | |
8 | * Python interpreter in SSLCheck plugin is not hardcoded anymore. | |
9 | * Fix importer key error when some data from couchdb didn't contain the "type" key | |
10 | * Fix AttributeError when importing vulns without exploitation from CouchDB | |
11 | * Fix KeyError in importer.py. This issue occurred during the import of Vulnerability Templates | |
12 | * Fix error when file config.xml doesn't exist as the moment of executing initdb | |
13 | * Improve invalid credentials warning by indicating the user to run Faraday GTK with --login option | |
14 | * Fix typos in VulnDB and add two new vulnerabilities (Default Credentials, Privilege Escalation) | |
15 | * Improved tests performance with new versions of the Faker library | |
16 | * `abort()` calls were checked and changed to `flask.abort()` |
0 | * Add workspace disable feature | |
1 | * Add mac vendor to host and services | |
2 | * Fix typos and add sorting in workspace name (workspace list view) | |
3 | * Improve warning when you try to select hosts instead of services as targets of a Vulnerability Web | |
4 | * Deleted old Nexpose plugin. Now Faraday uses Nexpose-Full. | |
5 | * Update sqlmap plugin | |
6 | * Add updated zap plugin | |
7 | * Add hostnames to nessus plugin | |
8 | * Python interpreter in SSLCheck plugin is not hardcoded anymore. | |
9 | * Fix importer key error when some data from couchdb didn't contain the "type" key | |
10 | * Fix AttributeError when importing vulns without exploitation from CouchDB | |
11 | * Fix KeyError in importer.py. This issue occurred during the import of Vulnerability Templates | |
12 | * Fix error when file config.xml doesn't exist as the moment of executing initdb | |
13 | * Improve invalid credentials warning by indicating the user to run Faraday GTK with --login option | |
14 | * Fix typos in VulnDB and add two new vulnerabilities (Default Credentials, Privilege Escalation) | |
15 | * Improved tests performance with new versions of the Faker library | |
16 | * `abort()` calls were checked and changed to `flask.abort()` |
0 | * In GTK, check active_workspace its not null | |
1 | * Add fbruteforce services fplugin | |
2 | * Attachments can be added to a vulnerability through the API. | |
3 | * Catch gaierror error on lynis plugin | |
4 | * Add OR and NOT with parenthesis support on status report search | |
5 | * Info API now is public | |
6 | * Web UI now detects Appscan plugin | |
7 | * Improve performance on the workspace using cusotm query | |
8 | * Workspaces can be set as active/disable in welcome page. | |
9 | * Change Nmap plugin, response field in VulnWeb now goes to Data field. | |
10 | * Update code to support latest SQLAlchemy version | |
11 | * Fix `create_vuln` fplugin bug that incorrectly reported duplicated vulns | |
12 | * Attachments on a vulnerability can be deleted through the API. | |
13 | * Improvement in the coverage of the tests. |
0 | * In GTK, check active_workspace its not null | |
1 | * Add fbruteforce services fplugin | |
2 | * Attachments can be added to a vulnerability through the API. | |
3 | * Catch gaierror error on lynis plugin | |
4 | * Add OR and NOT with parenthesis support on status report search | |
5 | * Info API now is public | |
6 | * Web UI now detects Appscan plugin | |
7 | * Improve performance on the workspace using cusotm query | |
8 | * Workspaces can be set as active/disable in welcome page. | |
9 | * Change Nmap plugin, response field in VulnWeb now goes to Data field. | |
10 | * Update code to support latest SQLAlchemy version | |
11 | * Fix `create_vuln` fplugin bug that incorrectly reported duplicated vulns | |
12 | * Attachments on a vulnerability can be deleted through the API. | |
13 | * Improvement in the coverage of the tests. |
0 | * Redesgin of new/edit vulnerability forms | |
1 | * Add new custom fields feature to vulnerabilities | |
2 | * Add ./manage.py migrate to perform alembic migrations | |
3 | * Faraday will use webargs==4.4.1 because webargs==5.0.0 fails with Python2 | |
4 | * New system for online plugins using Threads, a few fixes for metasploit plugin online also. | |
5 | * Fix Command "python manage.py process-reports" now stops once all reports have been processed | |
6 | * Fix bug in query when it checks if a vulnerability or a workspace exists | |
7 | * Fix Once a workspace is created through the web UI, a folder with its name is created inside ~/.faraday/report/ | |
8 | * The manage.py now has a new support funtionality that creates a .zip file with all the information faraday's support team will need to throubleshoot your issue | |
9 | * Status-check checks PostgreSQL encoding | |
10 | * Fix a bug when fail importation of reports, command duration say "In Progress" forever. | |
11 | * Fix confirmed bug in vulns API | |
12 | * Update websockets code to use latest lib version | |
13 | * bootstrap updated to v3.4.0 | |
14 | * Manage.py support now throws a message once it finishes the process. | |
15 | * Update Lynis to its version 2.7.1 | |
16 | * Updated arp-scan plugin, added support in the Host class for mac address which was deprecated before v3.0 | |
17 | * OpenVAS Plugin now supports OpenVAS v-9.0.3 |
0 | * Redesgin of new/edit vulnerability forms | |
1 | * Add new custom fields feature to vulnerabilities | |
2 | * Add ./manage.py migrate to perform alembic migrations | |
3 | * Faraday will use webargs==4.4.1 because webargs==5.0.0 fails with Python2 | |
4 | * New system for online plugins using Threads, a few fixes for metasploit plugin online also. | |
5 | * Fix Command "python manage.py process-reports" now stops once all reports have been processed | |
6 | * Fix bug in query when it checks if a vulnerability or a workspace exists | |
7 | * Fix Once a workspace is created through the web UI, a folder with its name is created inside ~/.faraday/report/ | |
8 | * The manage.py now has a new support funtionality that creates a .zip file with all the information faraday's support team will need to throubleshoot your issue | |
9 | * Status-check checks PostgreSQL encoding | |
10 | * Fix a bug when fail importation of reports, command duration say "In Progress" forever. | |
11 | * Fix confirmed bug in vulns API | |
12 | * Update websockets code to use latest lib version | |
13 | * bootstrap updated to v3.4.0 | |
14 | * Manage.py support now throws a message once it finishes the process. | |
15 | * Update Lynis to its version 2.7.1 | |
16 | * Updated arp-scan plugin, added support in the Host class for mac address which was deprecated before v3.0 | |
17 | * OpenVAS Plugin now supports OpenVAS v-9.0.3 |
0 | * Fix CSRF (Cross-Site Request Forgery) vulnerability in vulnerability attachments API. | |
1 | This allowed an attacker to upload evidence to vulns. He/she required to know the | |
2 | desired workspace name and vulnerability id so it complicated the things a bit. We | |
3 | classified this vuln as a low impact one. | |
4 | * Readonly and disabled workspaces | |
5 | * Add fields 'impact', 'easeofresolution' and 'policyviolations' to vulnerability_template | |
6 | * Add pagination in 'Command history', 'Last Vulnerabilities', 'Activity logs' into dashboard | |
7 | * Add status_code field to web vulnerability | |
8 | * Preserve selection after bulk edition of vulnerabilities in the Web UI | |
9 | * Faraday's database will be created using UTF-8 encoding | |
10 | * Fix bug of "select a different workspace" from an empty list loop. | |
11 | * Fix bug when creating duplicate custom fields | |
12 | * Fix bug when loading in server.ini with extra configs | |
13 | * Fix `./manage.py command`. It wasn't working since the last schema migration | |
14 | * `./manage.py createsuperuser` command renamed to `./manage.py create-superuser` | |
15 | * Fix bug when non-numeric vulnerability IDs were passed to the attachments API | |
16 | * Fix logic in search exploits | |
17 | * Add ability to 'Searcher' to execute rules in loop with dynamic variables | |
18 | * Send searcher alert with custom mail | |
19 | * Add gitlab-ci.yml file to execute test and pylint on gitlab runner | |
20 | * Fix 500 error when updating services and vulns with specific read-only parameters set | |
21 | * Fix SQLMap plugin to support newer versions of the tool | |
22 | * Improve service's parser for Lynis plugin | |
23 | * Fix bug when parsing URLs in Acunetix reports | |
24 | * Fix and update NetSparker Plugin | |
25 | * Fix bug in nessus plugin. It was trying to create a host without IP. Enabled logs on the server for plugin processing (use --debug) | |
26 | * Fix bug when parsing hostnames in Nessus reports | |
27 | * Fix SSLyze report automatic detection, so reports can be imported from the web ui | |
28 | * Update Dnsmap Plugin |
0 | * Fix CSRF (Cross-Site Request Forgery) vulnerability in vulnerability attachments API. | |
1 | This allowed an attacker to upload evidence to vulns. He/she required to know the | |
2 | desired workspace name and vulnerability id so it complicated the things a bit. We | |
3 | classified this vuln as a low impact one. | |
4 | * Readonly and disabled workspaces | |
5 | * Add fields 'impact', 'easeofresolution' and 'policyviolations' to vulnerability_template | |
6 | * Add pagination in 'Command history', 'Last Vulnerabilities', 'Activity logs' into dashboard | |
7 | * Add status_code field to web vulnerability | |
8 | * Preserve selection after bulk edition of vulnerabilities in the Web UI | |
9 | * Faraday's database will be created using UTF-8 encoding | |
10 | * Fix bug of "select a different workspace" from an empty list loop. | |
11 | * Fix bug when creating duplicate custom fields | |
12 | * Fix bug when loading in server.ini with extra configs | |
13 | * Fix `./manage.py command`. It wasn't working since the last schema migration | |
14 | * `./manage.py createsuperuser` command renamed to `./manage.py create-superuser` | |
15 | * Fix bug when non-numeric vulnerability IDs were passed to the attachments API | |
16 | * Fix logic in search exploits | |
17 | * Add ability to 'Searcher' to execute rules in loop with dynamic variables | |
18 | * Send searcher alert with custom mail | |
19 | * Add gitlab-ci.yml file to execute test and pylint on gitlab runner | |
20 | * Fix 500 error when updating services and vulns with specific read-only parameters set | |
21 | * Fix SQLMap plugin to support newer versions of the tool | |
22 | * Improve service's parser for Lynis plugin | |
23 | * Fix bug when parsing URLs in Acunetix reports | |
24 | * Fix and update NetSparker Plugin | |
25 | * Fix bug in nessus plugin. It was trying to create a host without IP. Enabled logs on the server for plugin processing (use --debug) | |
26 | * Fix bug when parsing hostnames in Nessus reports | |
27 | * Fix SSLyze report automatic detection, so reports can be imported from the web ui | |
28 | * Update Dnsmap Plugin |
0 | * Add vulnerability preview to status report | |
1 | * Update Fierce Plugin. Import can be done from GTK console. | |
2 | * Update Goohost plugin and now Faraday imports Goohost .txt report. | |
3 | * Update plugin for support WPScan v-3.4.5 | |
4 | * Update Qualysguard plugin to its 8.17.1.0.2 version | |
5 | * Update custom fields with Searcher | |
6 | * Update Recon-ng Plugin so that it accepts XML reports | |
7 | * Add postres version to status-change command | |
8 | * Couchdb configuration section will not be added anymore | |
9 | * Add unit test for config/default.xml |
0 | * Add vulnerability preview to status report | |
1 | * Update Fierce Plugin. Import can be done from GTK console. | |
2 | * Update Goohost plugin and now Faraday imports Goohost .txt report. | |
3 | * Update plugin for support WPScan v-3.4.5 | |
4 | * Update Qualysguard plugin to its 8.17.1.0.2 version | |
5 | * Update custom fields with Searcher | |
6 | * Update Recon-ng Plugin so that it accepts XML reports | |
7 | * Add postres version to status-change command | |
8 | * Couchdb configuration section will not be added anymore | |
9 | * Add unit test for config/default.xml |
0 | * Add parser for connection string at PGCli connection | |
1 | * Fix bug when using custom fields, we must use the field_name instead of the display_name | |
2 | * Fix user's menu visibily when vuln detail is open. | |
3 | * Fix bug in status report that incorrectly showed standard vulns like if they were vulnwebs |
0 | * Add parser for connection string at PGCli connection | |
1 | * Fix bug when using custom fields, we must use the field_name instead of the display_name | |
2 | * Fix user's menu visibily when vuln detail is open. | |
3 | * Fix bug in status report that incorrectly showed standard vulns like if they were vulnwebs |
0 | * Refactor the project to use absolute imports to make the installation easier | |
1 | (with a setup.py file). This also was a first step to make our codebase | |
2 | compatible with python 3. | |
3 | * Change the commands used to run faraday. `./faraday-server.py`, | |
4 | `./manage.py`, `./faraday.py` and `bin/flugin` are replaced for `faraday-server`, `faraday-manage`, | |
5 | `faraday-client` and `fplugin` respectively | |
6 | * Changed suggested installation method. Now we provide binary executables with all python dependencies | |
7 | embedded into them | |
8 | * Add admin panel to the Web UI to manage custom fields | |
9 | * Fix slow host list when creating vulns in a workspace with many hosts | |
10 | * Usability improvements in status report: change the way vulns are selected and confirmed | |
11 | * Improve workspace workspace creation from the Web UI | |
12 | * Fix attachment api when file was not found in .faraday/storage | |
13 | * Fix visualization of the fields Policy Violations and References. | |
14 | * Add a setting in server.ini to display the Vulnerability Cost widget of the Dashboard | |
15 | * Fix status report resize when the browser console closes. | |
16 | * Fix severity dropdown when creating vulnerability templates | |
17 | * Update OS icons in the Web UI. | |
18 | * Fix bug when using custom fields, we must use the field\_name instead of the display\_name | |
19 | * Prevent creation of custom fields with the same name | |
20 | * Add custom fields to vuln templates. | |
21 | * Fix user's menu visibily when vuln detail is open | |
22 | * Remove "show all" option in the status report pagination | |
23 | * The activity feed widget of the dashboard now displays the hostname of the | |
24 | machine that runned each command | |
25 | * Add loading spinner in hosts report. | |
26 | * Fix "invalid dsn" bug in sql-shell | |
27 | * Fix hostnames bug in Nikto and Core Impact plugins | |
28 | * Change Openvas plugin: Low and Debug threats are not taken as vulnerabilities. | |
29 | * Add fplugin command to close vulns created after a certain time | |
30 | * Add list-plugins command to faraday-manage to see all available plugins | |
31 | * Fix a logging error in PluginBase class | |
32 | * Fix an error when using NexposePlugin from command line. | |
33 | * Add CSV parser to Dnsmap Plugin | |
34 | * Fix bug when creating web vulnerabilities in dirb plugin | |
35 | * Change Nexpose Severity Mappings. |
0 | * Refactor the project to use absolute imports to make the installation easier | |
1 | (with a setup.py file). This also was a first step to make our codebase | |
2 | compatible with python 3. | |
3 | * Change the commands used to run faraday. `./faraday-server.py`, | |
4 | `./manage.py`, `./faraday.py` and `bin/flugin` are replaced for `faraday-server`, `faraday-manage`, | |
5 | `faraday-client` and `fplugin` respectively | |
6 | * Changed suggested installation method. Now we provide binary executables with all python dependencies | |
7 | embedded into them | |
8 | * Add admin panel to the Web UI to manage custom fields | |
9 | * Fix slow host list when creating vulns in a workspace with many hosts | |
10 | * Usability improvements in status report: change the way vulns are selected and confirmed | |
11 | * Improve workspace workspace creation from the Web UI | |
12 | * Fix attachment api when file was not found in .faraday/storage | |
13 | * Fix visualization of the fields Policy Violations and References. | |
14 | * Add a setting in server.ini to display the Vulnerability Cost widget of the Dashboard | |
15 | * Fix status report resize when the browser console closes. | |
16 | * Fix severity dropdown when creating vulnerability templates | |
17 | * Update OS icons in the Web UI. | |
18 | * Fix bug when using custom fields, we must use the field\_name instead of the display\_name | |
19 | * Prevent creation of custom fields with the same name | |
20 | * Add custom fields to vuln templates. | |
21 | * Fix user's menu visibily when vuln detail is open | |
22 | * Remove "show all" option in the status report pagination | |
23 | * The activity feed widget of the dashboard now displays the hostname of the | |
24 | machine that runned each command | |
25 | * Add loading spinner in hosts report. | |
26 | * Fix "invalid dsn" bug in sql-shell | |
27 | * Fix hostnames bug in Nikto and Core Impact plugins | |
28 | * Change Openvas plugin: Low and Debug threats are not taken as vulnerabilities. | |
29 | * Add fplugin command to close vulns created after a certain time | |
30 | * Add list-plugins command to faraday-manage to see all available plugins | |
31 | * Fix a logging error in PluginBase class | |
32 | * Fix an error when using NexposePlugin from command line. | |
33 | * Add CSV parser to Dnsmap Plugin | |
34 | * Fix bug when creating web vulnerabilities in dirb plugin | |
35 | * Change Nexpose Severity Mappings. |
0 | * Add configurations for websocket ssl |
0 | * Add agents feature for distributed plugin execution | |
1 | * Add an API endpoint to to perform a bulk create of many objects (hosts, | |
2 | services, vulns, commands and credentials). This is used to avoid doing a lot | |
3 | of API requests to upload data. Now one request should be enough | |
4 | * Major style and color changes to the Web UI | |
5 | * Add API token authentication method | |
6 | * Use server side stored sessions to properly invalidate cookies of logged out users | |
7 | * Add "New" button to create credentials without host or service assigned yet | |
8 | * Allow filtering hosts by its service's ports in the Web UI | |
9 | * Performance improvements in vulnerabilities and vulnerability templates API (they | |
10 | were doing a lot of SQL queries because of a programming bug) | |
11 | * Require being in the faraday-manage group when running faraday from a .deb or .rpm package | |
12 | * Change the first page shown after the user logs in. Now it displays a workspace | |
13 | selection dialog | |
14 | * Add API endpoint to import Vuln Templates from a CSV file | |
15 | * Create the exported CSV of the status report in the backend instead of in the | |
16 | problem, which was much slower | |
17 | * Add API endpoint to import hosts from a CSV file | |
18 | * Add `faraday-manage rename-user` command to change a user's username | |
19 | * Allow resizing columns in Vulnerability Templates view | |
20 | * Avoid copying technical details when a vuln template is generated from the status report | |
21 | * Use exact matches when searching vulns by target | |
22 | * Add API endpoint to get which tools impacted in a host | |
23 | * Add pagination to activity feed | |
24 | * Add ordering for date and creator to vuln templates view | |
25 | * Modify tabs in vuln template, add Details tab | |
26 | * Add copy IP to clipboard button in hosts view | |
27 | * Add creator and create date columns to vuln template view | |
28 | * When a plugin creates a host with its IP set to a domain name, | |
29 | resolve the IP address of that domain | |
30 | * Add support for logging in RFC5254 format | |
31 | * Add active filter in workspaces view. Only show active workspaces | |
32 | in other parts of the Web UI | |
33 | * Enforce end date to be greater than start date in workspaces API | |
34 | * Fix bug in `faraday-manage create-tables` that incorrectly marked schema | |
35 | migrations as applied | |
36 | * Fix bug in many plugins that loaded hostnames incorrectly (one hostname per chararcter) | |
37 | * Improve references parsing in OpenVAS plugin | |
38 | * Fix a bug in Nessus plugin when parsing reports without host\_start | |
39 | * Fix bug hostname search is now working in status-report | |
40 | * Fix showing of services with large names in the Web UI | |
41 | * Fix broken select all hosts checkbox | |
42 | * Fix bug viewing an attachment/evidence when its filename contained whitespaces | |
43 | * Fix "Are you sure you want to quit Faraday?" dialog showing twice in GTK |
0 | * Add agents feature for distributed plugin execution | |
1 | * Add an API endpoint to to perform a bulk create of many objects (hosts, | |
2 | services, vulns, commands and credentials). This is used to avoid doing a lot | |
3 | of API requests to upload data. Now one request should be enough | |
4 | * Major style and color changes to the Web UI | |
5 | * Add API token authentication method | |
6 | * Use server side stored sessions to properly invalidate cookies of logged out users | |
7 | * Add "New" button to create credentials without host or service assigned yet | |
8 | * Allow filtering hosts by its service's ports in the Web UI | |
9 | * Performance improvements in vulnerabilities and vulnerability templates API (they | |
10 | were doing a lot of SQL queries because of a programming bug) | |
11 | * Require being in the faraday-manage group when running faraday from a .deb or .rpm package | |
12 | * Change the first page shown after the user logs in. Now it displays a workspace | |
13 | selection dialog | |
14 | * Add API endpoint to import Vuln Templates from a CSV file | |
15 | * Create the exported CSV of the status report in the backend instead of in the | |
16 | problem, which was much slower | |
17 | * Add API endpoint to import hosts from a CSV file | |
18 | * Add `faraday-manage rename-user` command to change a user's username | |
19 | * Allow resizing columns in Vulnerability Templates view | |
20 | * Avoid copying technical details when a vuln template is generated from the status report | |
21 | * Use exact matches when searching vulns by target | |
22 | * Add API endpoint to get which tools impacted in a host | |
23 | * Add pagination to activity feed | |
24 | * Add ordering for date and creator to vuln templates view | |
25 | * Modify tabs in vuln template, add Details tab | |
26 | * Add copy IP to clipboard button in hosts view | |
27 | * Add creator and create date columns to vuln template view | |
28 | * When a plugin creates a host with its IP set to a domain name, | |
29 | resolve the IP address of that domain | |
30 | * Add support for logging in RFC5254 format | |
31 | * Add active filter in workspaces view. Only show active workspaces | |
32 | in other parts of the Web UI | |
33 | * Enforce end date to be greater than start date in workspaces API | |
34 | * Fix bug in `faraday-manage create-tables` that incorrectly marked schema | |
35 | migrations as applied | |
36 | * Fix bug in many plugins that loaded hostnames incorrectly (one hostname per chararcter) | |
37 | * Improve references parsing in OpenVAS plugin | |
38 | * Fix a bug in Nessus plugin when parsing reports without host\_start | |
39 | * Fix bug hostname search is now working in status-report | |
40 | * Fix showing of services with large names in the Web UI | |
41 | * Fix broken select all hosts checkbox | |
42 | * Fix bug viewing an attachment/evidence when its filename contained whitespaces | |
43 | * Fix "Are you sure you want to quit Faraday?" dialog showing twice in GTK |
0 | * Fix unicode error when exporting vulns to CSV | |
1 | * Add vuln attributes to CSV | |
2 | * Fix hostname parsing and add external ID to Qualys plugin |
0 | * Fix unicode error when exporting vulns to CSV | |
1 | * Add vuln attributes to CSV | |
2 | * Fix hostname parsing and add external ID to Qualys plugin |
1 | 1 | ===================================== |
2 | 2 | |
3 | 3 | |
4 | 3.15.0 [May 18th, 2021]: | |
5 | --- | |
6 | ||
7 | * ADD `Basic Auth` support | |
8 | * ADD support for GET method in websocket_tokens, POST will be deprecated in the future | |
9 | * ADD CVSS(String), CWE(String), CVE(relationship) columns to vulnerability model and API | |
10 | * ADD agent token's API says the renewal cycling duration | |
11 | * MOD Improve database model to be able to delete workspaces fastly | |
12 | * MOD Improve code style and uses (less flake8 exceptions, py3 `super` style, Flask app as singleton, etc) | |
13 | * MOD workspaces' names regex to verify they cannot contain forward slash (`/`) | |
14 | * MOD Improve bulk create logs | |
15 | * FIX Own schema breaking Marshmallow 3.11.0+ | |
16 | * UPD flask_security_too to version 4.0.0+ | |
17 | ||
4 | 18 | 3.14.4 [Apr 15th, 2021]: |
5 | 19 | --- |
6 | 20 | * Updated plugins package, which update appscan plugin |
7 | ||
8 | 21 | |
9 | 22 | 3.14.3 [Mar 30th, 2021]: |
10 | 23 | --- |
107 | 120 | * Cleanup old sessions when a user logs in |
108 | 121 | * Remove unmaintained Flask-Restless dependency |
109 | 122 | * Remove pbkdf2\_sha1 and plain password schemes. We only support bcrypt |
123 | ||
124 | 3.11.2: | |
125 | --- | |
110 | 126 | |
111 | 127 | 3.11.1 [Jun 3rd, 2020]: |
112 | 128 | --- |
310 | 326 | * Fix user's menu visibily when vuln detail is open. |
311 | 327 | * Fix bug in status report that incorrectly showed standard vulns like if they were vulnwebs |
312 | 328 | |
313 | 3.7.2: | |
314 | --- | |
315 | ||
316 | 329 | 3.7: |
317 | 330 | --- |
318 | 331 | * Add vulnerability preview to status report |
0 | import json | |
1 | from pathlib import Path | |
2 | from typing import Dict | |
0 | 3 | |
1 | import os | |
2 | 4 | import packaging.version |
3 | 5 | |
4 | LEVEL = "white" | |
6 | LEVEL = "community" | |
7 | LEVELS = ["community", "prof", "corp"] if LEVEL == "corp" else ["community"] | |
8 | MD_FILES = ["community.md", "prof.md", "corp.md", "date.md"] if LEVEL == "corp" else ["community.md", "date.md"] | |
5 | 9 | |
6 | def match(elem): | |
10 | ||
11 | def match(elem: str): | |
7 | 12 | try: |
8 | 13 | ans = packaging.version.Version(elem) |
9 | 14 | except packaging.version.InvalidVersion as e: |
11 | 16 | return False |
12 | 17 | return ans |
13 | 18 | |
14 | IGNORED_FILES = ["white.md", "pink.md", "black.md", "date.md"] | |
15 | 19 | |
16 | def addFile(filename,changelog_file,to=None): | |
17 | with open(filename, "r") as date_file: | |
18 | if to: | |
19 | changelog_file.write(date_file.readline()[:to]) | |
20 | else: | |
21 | changelog_file.writelines(date_file.readlines()) | |
20 | def add_md_file(filename, changelog_file): | |
21 | with filename.open("r") as date_file: | |
22 | changelog_file.write(date_file.readline()[:-1]) | |
22 | 23 | |
23 | def main(level): | |
24 | 24 | |
25 | ls_ans = os.listdir(".") | |
26 | folders = list(sorted(filter(lambda el: el, map(lambda elem: match(elem),ls_ans)),reverse=True)) | |
27 | with open("RELEASE.md","w") as changelog_file: | |
25 | def get_md_text_from_json_file(filepath: Path, level_dict): | |
26 | with filepath.open("r") as file: | |
27 | file_json: Dict = json.loads(file.read()) | |
28 | level = file_json.get("level") | |
29 | level_dict[level] += f" * {file_json.get('md')}\n" | |
30 | ||
31 | ||
32 | def main(): | |
33 | ls_ans = [path.name for path in Path(__file__).parent.iterdir()] | |
34 | folders = list(sorted(filter(lambda el: el, map(lambda elem: match(elem), ls_ans)), reverse=True)) | |
35 | with (Path(__file__).parent / "RELEASE.md").open("w") as changelog_file: | |
28 | 36 | if "header.md" in ls_ans: |
29 | with open("header.md", "r") as header_file: | |
37 | with (Path(__file__).parent / "header.md").open("r") as header_file: | |
30 | 38 | changelog_file.writelines(header_file.readlines()) |
31 | 39 | changelog_file.writelines("\n\n") |
32 | 40 | for folder in folders: |
33 | 41 | changelog_file.write(str(folder)) |
34 | inner_files = list(filter(lambda elem: elem.endswith(".md") ,os.listdir("./" + str(folder)))) | |
35 | if "date.md" in inner_files: | |
42 | inner_files = list(filter(lambda elem: elem.suffix == ".json" or elem.name in MD_FILES, | |
43 | (Path(__file__).parent / str(folder)).iterdir())) | |
44 | if any([file.name == "date.md" for file in inner_files]): | |
36 | 45 | changelog_file.write(" [") |
37 | addFile("./" + str(folder) + "/date.md",changelog_file,-1) | |
46 | add_md_file(Path(__file__).parent / str(folder) / "date.md", changelog_file) | |
38 | 47 | changelog_file.write("]") |
39 | 48 | changelog_file.writelines(":\n---\n") |
40 | if level != "white": | |
41 | addFile("./" + str(folder) + "/white.md",changelog_file) | |
42 | if level == "black": | |
43 | addFile("./" + str(folder) + "/pink.md",changelog_file) | |
44 | level_filename = "./" + str(folder) + "/" + level + ".md" | |
45 | 49 | |
46 | previous = [""] | |
47 | if level + ".md" in os.listdir("./" + str(folder)): | |
48 | with open(level_filename, "r") as level_file: | |
49 | previous = level_file.readlines() | |
50 | ||
51 | with open(level_filename, "w") as level_file: | |
52 | level_file.writelines(previous) | |
53 | for inner_file_name in inner_files: | |
54 | if inner_file_name not in IGNORED_FILES: | |
55 | level_file.write(" * ") | |
56 | addFile("./" + str(folder) + "/" + inner_file_name, level_file) | |
57 | level_file.write("\n") | |
58 | os.remove("./" + str(folder) + "/" + inner_file_name) | |
59 | addFile(level_filename, changelog_file) | |
50 | level_dicts = {level: "" for level in LEVELS} | |
51 | for level in LEVELS: | |
52 | if any([file.name == f"{level}.md" for file in inner_files]): | |
53 | with (Path(__file__).parent / str(folder) / f"{level}.md").open("r") as level_file: | |
54 | level_dicts[level] = level_file.read() | |
55 | for inner_file in filter(lambda elem: elem.suffix == ".json", inner_files): | |
56 | get_md_text_from_json_file(inner_file, level_dicts) | |
57 | inner_file.unlink() | |
58 | for level in LEVELS: | |
59 | with (Path(__file__).parent / str(folder) / f"{level}.md").open("w") as level_file: | |
60 | level_file.write(level_dicts[level]) | |
61 | changelog_file.write(level_dicts[level]) | |
60 | 62 | changelog_file.writelines("\n") |
61 | 63 | |
62 | 64 | if "footer.md" in ls_ans: |
63 | with open("footer.md", "r") as footer_file: | |
65 | with (Path(__file__).parent / "footer.md").open("r") as footer_file: | |
64 | 66 | changelog_file.writelines(footer_file.readlines()) |
65 | 67 | |
66 | 68 | |
67 | 69 | if __name__ == '__main__': |
68 | level = LEVEL # if not level_passed else level_pased | |
69 | main(level) | |
70 | ||
71 | # I'm Py3⏎ | |
70 | main() |
14 | 14 | assert file == "keep", file |
15 | 15 | version_folder = changelog_folder / version_id |
16 | 16 | for file in os.listdir(version_folder): |
17 | assert file in ["date.md", "white.md", "pink.md", "black.md"], file | |
17 | assert file in ["date.md", "community.md", "prof.md", "corp.md"], file | |
18 | 18 | |
19 | 19 | |
20 | 20 | if __name__ == '__main__': |
29 | 29 | $ source faraday_env/bin/activate |
30 | 30 | $ git clone [email protected]:infobyte/faraday.git |
31 | 31 | $ cd faraday |
32 | $ git clone [email protected]:infobyte/faraday-angular.git faraday/frontend | |
32 | $ git clone https://github.com/infobyte/faraday_angular_frontend.git faraday/frontend | |
33 | 33 | $ pip install . |
34 | 34 | ``` |
35 | 35 |
1 | 1 | ===================================== |
2 | 2 | |
3 | 3 | |
4 | 3.15.0 [May 18th, 2021]: | |
5 | --- | |
6 | ||
7 | * ADD `Basic Auth` support | |
8 | * ADD support for GET method in websocket_tokens, POST will be deprecated in the future | |
9 | * ADD CVSS(String), CWE(String), CVE(relationship) columns to vulnerability model and API | |
10 | * ADD agent token's API says the renewal cycling duration | |
11 | * MOD Improve database model to be able to delete workspaces fastly | |
12 | * MOD Improve code style and uses (less flake8 exceptions, py3 `super` style, Flask app as singleton, etc) | |
13 | * MOD workspaces' names regex to verify they cannot contain forward slash (`/`) | |
14 | * MOD Improve bulk create logs | |
15 | * FIX Own schema breaking Marshmallow 3.11.0+ | |
16 | * UPD flask_security_too to version 4.0.0+ | |
17 | ||
4 | 18 | 3.14.4 [Apr 15th, 2021]: |
5 | 19 | --- |
6 | 20 | * Updated plugins package, which update appscan plugin |
7 | ||
8 | 21 | |
9 | 22 | 3.14.3 [Mar 30th, 2021]: |
10 | 23 | --- |
107 | 120 | * Cleanup old sessions when a user logs in |
108 | 121 | * Remove unmaintained Flask-Restless dependency |
109 | 122 | * Remove pbkdf2\_sha1 and plain password schemes. We only support bcrypt |
123 | ||
124 | 3.11.2: | |
125 | --- | |
110 | 126 | |
111 | 127 | 3.11.1 [Jun 3rd, 2020]: |
112 | 128 | --- |
310 | 326 | * Fix user's menu visibily when vuln detail is open. |
311 | 327 | * Fix bug in status report that incorrectly showed standard vulns like if they were vulnwebs |
312 | 328 | |
313 | 3.7.2: | |
314 | --- | |
315 | ||
316 | 329 | 3.7: |
317 | 330 | --- |
318 | 331 | * Add vulnerability preview to status report |
1 | 1 | # Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | __version__ = '3.14.4' | |
4 | __version__ = '3.15.0' | |
5 | 5 | __license_version__ = __version__ |
9 | 9 | import sys |
10 | 10 | import platform |
11 | 11 | import logging |
12 | ||
12 | 13 | # If is linux and its installed with deb or rpm, it must run with a user in the faraday group |
13 | 14 | if platform.system() == "Linux": |
14 | 15 | import grp |
15 | 16 | from getpass import getuser |
17 | ||
16 | 18 | try: |
17 | 19 | FARADAY_GROUP = "faraday" |
18 | 20 | faraday_group = grp.getgrnam(FARADAY_GROUP) |
19 | #The current user may be different from the logged user | |
21 | # The current user may be different from the logged user | |
20 | 22 | current_user = getuser() |
21 | 23 | if current_user != 'root' and faraday_group.gr_gid not in os.getgroups(): |
22 | 24 | print(f"\n\nUser ({os.getlogin()}) must be in the '{FARADAY_GROUP}' group.") |
47 | 49 | from faraday.server.commands import nginx_config |
48 | 50 | from faraday.server.commands import import_vulnerability_template |
49 | 51 | from faraday.server.models import db, User |
50 | from faraday.server.web import app | |
52 | from faraday.server.web import get_app | |
51 | 53 | from faraday_plugins.plugins.manager import PluginsManager |
52 | 54 | from flask_security.utils import hash_password |
53 | 55 | |
54 | ||
55 | 56 | CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help']) |
56 | 57 | |
57 | #logger = logging.getLogger(__name__) | |
58 | ||
59 | # logger = logging.getLogger(__name__) | |
60 | ||
58 | 61 | |
59 | 62 | @click.group(context_settings=CONTEXT_SETTINGS) |
60 | 63 | def cli(): |
79 | 82 | openapi_format(format="yaml", server=server, no_servers=no_servers) |
80 | 83 | |
81 | 84 | |
82 | ||
83 | 85 | @click.command(help="Import Vulnerability templates") |
84 | 86 | @click.option('--language', required=False, default='en') |
85 | 87 | @click.option('--list-languages', is_flag=True) |
89 | 91 | |
90 | 92 | @click.command(help="Create Faraday DB in Postgresql, also tables and indexes") |
91 | 93 | @click.option( |
92 | '--choose-password', is_flag=True, default=False, | |
93 | help=('Instead of using a random password for the user "faraday", ' | |
94 | 'ask for the desired one') | |
95 | ) | |
94 | '--choose-password', is_flag=True, default=False, | |
95 | help=('Instead of using a random password for the user "faraday", ' | |
96 | 'ask for the desired one') | |
97 | ) | |
96 | 98 | @click.option( |
97 | '--password', type=str, default=False, | |
98 | help=('Instead of using a random password for the user "faraday", ' | |
99 | 'use the one provided') | |
100 | ) | |
99 | '--password', type=str, default=False, | |
100 | help=('Instead of using a random password for the user "faraday", ' | |
101 | 'use the one provided') | |
102 | ) | |
101 | 103 | def initdb(choose_password, password): |
102 | with app.app_context(): | |
104 | with get_app().app_context(): | |
103 | 105 | InitDB().run(choose_password=choose_password, faraday_user_password=password) |
104 | 106 | |
105 | 107 | |
128 | 130 | @click.option('--check_dependencies', default=False, is_flag=True) |
129 | 131 | @click.option('--check_config', default=False, is_flag=True) |
130 | 132 | def status_check(check_postgresql, check_faraday, check_dependencies, check_config): |
131 | ||
132 | 133 | selected = False |
133 | 134 | exit_code = 0 |
134 | 135 | if check_postgresql: |
167 | 168 | |
168 | 169 | |
169 | 170 | def validate_user_unique_field(ctx, param, value): |
170 | with app.app_context(): | |
171 | with get_app().app_context(): | |
171 | 172 | try: |
172 | 173 | if User.query.filter_by(**{param.name: value}).count(): |
173 | 174 | raise click.ClickException("User already exists") |
189 | 190 | # Also validate that the email doesn't exist in the database |
190 | 191 | return validate_user_unique_field(ctx, param, value) |
191 | 192 | |
193 | ||
192 | 194 | @click.command(help="List Available Plugins") |
193 | 195 | def list_plugins(): |
194 | 196 | plugins_manager = PluginsManager() |
195 | 197 | for _, plugin in plugins_manager.get_plugins(): |
196 | 198 | click.echo(f"{plugin.id}") |
199 | ||
197 | 200 | |
198 | 201 | @click.command(help="Create ADMIN user for Faraday application") |
199 | 202 | @click.option('--username', prompt=True, callback=validate_user_unique_field) |
201 | 204 | @click.option('--password', prompt=True, hide_input=True, |
202 | 205 | confirmation_prompt=True) |
203 | 206 | def create_superuser(username, email, password): |
204 | with app.app_context(): | |
207 | with get_app().app_context(): | |
205 | 208 | if db.session.query(User).filter_by(active=True).count() > 0: |
206 | print("Can't create more users. The comumunity edition only allows one user. Please contact support for further information.") | |
209 | print( | |
210 | "Can't create more users. The comumunity edition only allows one user. Please contact support for further information.") | |
207 | 211 | sys.exit(1) |
208 | 212 | |
209 | app.user_datastore.create_user(username=username, | |
213 | get_app().user_datastore.create_user(username=username, | |
210 | 214 | email=email, |
211 | 215 | password=hash_password(password), |
212 | 216 | role='admin', |
218 | 222 | |
219 | 223 | |
220 | 224 | @click.command(help="Create database tables. Requires a functional " |
221 | "PostgreSQL database configured in the server.ini") | |
225 | "PostgreSQL database configured in the server.ini") | |
222 | 226 | def create_tables(): |
223 | with app.app_context(): | |
227 | with get_app().app_context(): | |
224 | 228 | # Ugly hack to create tables and also setting alembic revision |
225 | 229 | conn_string = faraday.server.config.database.connection_string |
226 | 230 | if not conn_string: |
236 | 240 | 'Tables created successfully!', |
237 | 241 | fg='green', bold=True)) |
238 | 242 | |
243 | ||
239 | 244 | @click.command(help="Generates a .zip file with technical information") |
240 | 245 | def support(): |
241 | 246 | support_zip.all_for_support() |
242 | 247 | |
243 | 248 | |
244 | 249 | @click.command( |
245 | context_settings={"ignore_unknown_options": True}, | |
246 | help='Migrates database schema. If the target revision ' | |
247 | 'is not specified, use "head" when upgrading and "-1" when ' | |
248 | 'downgrading') | |
250 | context_settings={"ignore_unknown_options": True}, | |
251 | help='Migrates database schema. If the target revision ' | |
252 | 'is not specified, use "head" when upgrading and "-1" when ' | |
253 | 'downgrading') | |
249 | 254 | @click.option( |
250 | '--downgrade', | |
251 | help="Perform a downgrade migration instead of an upgrade one", | |
252 | is_flag=True) | |
255 | '--downgrade', | |
256 | help="Perform a downgrade migration instead of an upgrade one", | |
257 | is_flag=True) | |
253 | 258 | @click.argument( |
254 | 'revision', | |
255 | required=False, | |
256 | ) | |
259 | 'revision', | |
260 | required=False, | |
261 | ) | |
257 | 262 | def migrate(downgrade, revision): |
258 | 263 | try: |
259 | 264 | revision = revision or ("-1" if downgrade else "head") |
290 | 295 | @click.option('--current_username', required=True, prompt=True) |
291 | 296 | @click.option('--new_username', required=True, prompt=True) |
292 | 297 | def rename_user(current_username, new_username): |
293 | if(current_username == new_username): | |
298 | if (current_username == new_username): | |
294 | 299 | print("\nERROR: Usernames must be different.") |
295 | 300 | sys.exit(1) |
296 | 301 | else: |
297 | 302 | change_username.change_username(current_username, new_username) |
303 | ||
298 | 304 | |
299 | 305 | @click.command(help="Generate nginx config") |
300 | 306 | @click.option('--fqdn', prompt='Server FQDN', help='The FQDN of your faraday server', type=str, show_default=True) |
301 | 307 | @click.option('--port', prompt='Faraday port', help='Faraday listening port', type=int, default=5985) |
302 | 308 | @click.option('--ws-port', prompt='Faraday Websocket port', help='Faraday websocket listening port', type=int, |
303 | 309 | default=9000, show_default=True) |
304 | @click.option('--ssl-certificate', prompt='SSL Certificate Path', help='SSL Certificate Path', type=click.Path(exists=True)) | |
310 | @click.option('--ssl-certificate', prompt='SSL Certificate Path', help='SSL Certificate Path', | |
311 | type=click.Path(exists=True)) | |
305 | 312 | @click.option('--ssl-key', prompt='SSL Key Path', help='SSL Key Path', type=click.Path(exists=True)) |
306 | 313 | @click.option('--multitenant-url', help='URL for multitenant config', type=str) |
307 | 314 | def generate_nginx_config(fqdn, port, ws_port, ssl_certificate, ssl_key, multitenant_url): |
308 | 315 | nginx_config.generate_nginx_config(fqdn, port, ws_port, ssl_certificate, ssl_key, multitenant_url) |
316 | ||
309 | 317 | |
310 | 318 | cli.add_command(show_urls) |
311 | 319 | cli.add_command(initdb) |
325 | 333 | cli.add_command(generate_nginx_config) |
326 | 334 | cli.add_command(import_vulnerability_templates) |
327 | 335 | |
328 | ||
329 | 336 | if __name__ == '__main__': |
330 | ||
331 | 337 | cli() |
332 | 338 | |
333 | ||
334 | 339 | # I'm Py3 |
0 | 0 | |
1 | 1 | import logging |
2 | 2 | import faraday.server.config |
3 | from faraday.server.web import app | |
3 | from faraday.server.web import get_app | |
4 | 4 | from faraday.server.models import db |
5 | 5 | |
6 | 6 | from alembic import context |
7 | from sqlalchemy import engine_from_config, pool | |
8 | 7 | from logging.config import fileConfig |
9 | 8 | |
10 | 9 | # this is the Alembic Config object, which provides |
61 | 60 | and associate a connection with the context. |
62 | 61 | |
63 | 62 | """ |
64 | with app.app_context(): | |
63 | with get_app().app_context(): | |
65 | 64 | connectable = db.engine |
66 | 65 | |
67 | 66 | with connectable.connect() as connection: |
68 | 67 | context.configure( |
69 | 68 | connection=connection, |
70 | target_metadata=target_metadata | |
69 | target_metadata=target_metadata, | |
70 | compare_type=True | |
71 | 71 | ) |
72 | 72 | |
73 | 73 | with context.begin_transaction(): |
74 | 74 | context.run_migrations() |
75 | ||
75 | 76 | |
76 | 77 | if context.is_offline_mode(): |
77 | 78 | run_migrations_offline() |
6 | 6 | """ |
7 | 7 | from alembic import op |
8 | 8 | import sqlalchemy as sa |
9 | ||
10 | 9 | |
11 | 10 | # revision identifiers, used by Alembic. |
12 | 11 | revision = '0d216660da28' |
24 | 23 | 'executive_report', |
25 | 24 | 'workspace', |
26 | 25 | 'task' |
27 | ) | |
26 | ) | |
28 | 27 | |
29 | 28 | |
30 | 29 | def upgrade(): |
43 | 42 | # the syntax os the sql is invalid for postgresql and it also tries to |
44 | 43 | # create the enum when it already exists. |
45 | 44 | op.add_column('notification', |
46 | sa.Column( | |
47 | 'object_type', | |
48 | sa.Enum(OBJECT_TYPES, name='object_types'), | |
49 | nullable=False | |
50 | ) | |
51 | ) | |
52 | ||
45 | sa.Column( | |
46 | 'object_type', | |
47 | sa.Enum(OBJECT_TYPES, name='object_types'), | |
48 | nullable=False | |
49 | ) | |
50 | ) | |
53 | 51 | |
54 | 52 | op.create_foreign_key( |
55 | 53 | 'notification_user_id_fkey', |
66 | 64 | |
67 | 65 | def downgrade(): |
68 | 66 | op.drop_table('notification') |
69 | #op.drop_constraint(None, 'notification_user_id_fkey', type_='foreignkey') | |
70 | #op.drop_constraint(None, 'notification_workspace_id_fkey', type_='foreignkey') | |
67 | # op.drop_constraint(None, 'notification_user_id_fkey', type_='foreignkey') | |
68 | # op.drop_constraint(None, 'notification_workspace_id_fkey', type_='foreignkey') | |
71 | 69 | # I'm Py3 |
0 | """add cascade delete from workspace | |
1 | ||
2 | Revision ID: 18891ca61db6 | |
3 | Revises: aa56852fa76d | |
4 | Create Date: 2021-04-08 12:09:04.182543+00:00 | |
5 | ||
6 | """ | |
7 | from alembic import op | |
8 | ||
9 | # revision identifiers, used by Alembic. | |
10 | revision = '18891ca61db6' | |
11 | down_revision = 'aa56852fa76d' | |
12 | branch_labels = None | |
13 | depends_on = None | |
14 | ||
15 | ||
16 | def upgrade(): | |
17 | # ### commands auto generated by Alembic - please adjust! ### | |
18 | op.drop_constraint('credential_workspace_id_fkey', 'credential', type_='foreignkey') | |
19 | op.create_foreign_key('credential_workspace_id_fkey', 'credential', 'workspace', ['workspace_id'], ['id'], ondelete='CASCADE') | |
20 | op.drop_constraint('host_workspace_id_fkey', 'host', type_='foreignkey') | |
21 | op.create_foreign_key('host_workspace_id_fkey', 'host', 'workspace', ['workspace_id'], ['id'], ondelete='CASCADE') | |
22 | op.drop_constraint('hostname_workspace_id_fkey', 'hostname', type_='foreignkey') | |
23 | op.create_foreign_key('hostname_workspace_id_fkey', 'hostname', 'workspace', ['workspace_id'], ['id'], ondelete='CASCADE') | |
24 | op.drop_constraint('rule_action_uc', 'rule_action', type_='unique') | |
25 | op.drop_constraint('service_workspace_id_fkey', 'service', type_='foreignkey') | |
26 | op.create_foreign_key('service_workspace_id_fkey', 'service', 'workspace', ['workspace_id'], ['id'], ondelete='CASCADE') | |
27 | op.drop_constraint('vulnerability_workspace_id_fkey', 'vulnerability', type_='foreignkey') | |
28 | op.create_foreign_key('vulnerability_workspace_id_fkey', 'vulnerability', 'workspace', ['workspace_id'], ['id'], ondelete='CASCADE') | |
29 | # ### end Alembic commands ### | |
30 | ||
31 | ||
32 | def downgrade(): | |
33 | # ### commands auto generated by Alembic - please adjust! ### | |
34 | op.drop_constraint('vulnerability_workspace_id_fkey', 'vulnerability', type_='foreignkey') | |
35 | op.create_foreign_key('vulnerability_workspace_id_fkey', 'vulnerability', 'workspace', ['workspace_id'], ['id']) | |
36 | op.drop_constraint('service_workspace_id_fkey', 'service', type_='foreignkey') | |
37 | op.create_foreign_key('service_workspace_id_fkey', 'service', 'workspace', ['workspace_id'], ['id']) | |
38 | op.create_unique_constraint('rule_action_uc', 'rule_action', ['rule_id', 'action_id']) | |
39 | op.drop_constraint('hostname_workspace_id_fkey', 'hostname', type_='foreignkey') | |
40 | op.create_foreign_key('hostname_workspace_id_fkey', 'hostname', 'workspace', ['workspace_id'], ['id']) | |
41 | op.drop_constraint('host_workspace_id_fkey', 'host', type_='foreignkey') | |
42 | op.create_foreign_key('host_workspace_id_fkey', 'host', 'workspace', ['workspace_id'], ['id']) | |
43 | op.drop_constraint('credential_workspace_id_fkey', 'credential', type_='foreignkey') | |
44 | op.create_foreign_key('credential_workspace_id_fkey', 'credential', 'workspace', ['workspace_id'], ['id']) | |
45 | # ### end Alembic commands ### |
+0
-1
6 | 6 | """ |
7 | 7 | import json |
8 | 8 | from alembic import op |
9 | import sqlalchemy as sa | |
10 | 9 | from sqlalchemy.sql import text |
11 | 10 | |
12 | 11 |
+12
-13
7 | 7 | from alembic import op |
8 | 8 | import sqlalchemy as sa |
9 | 9 | |
10 | ||
11 | 10 | # revision identifiers, used by Alembic. |
12 | 11 | revision = '1dbe9e8e4247' |
13 | 12 | down_revision = 'f8a44acd0e41' |
17 | 16 | |
18 | 17 | def upgrade(): |
19 | 18 | op.add_column('rule_execution', |
20 | sa.Column( | |
21 | 'start', | |
22 | sa.DateTime(), nullable=True | |
23 | ) | |
24 | ) | |
19 | sa.Column( | |
20 | 'start', | |
21 | sa.DateTime(), nullable=True | |
22 | ) | |
23 | ) | |
25 | 24 | op.add_column('rule_execution', |
26 | sa.Column( | |
27 | 'end', | |
28 | sa.DateTime(), nullable=True | |
29 | ) | |
30 | ) | |
25 | sa.Column( | |
26 | 'end', | |
27 | sa.DateTime(), nullable=True | |
28 | ) | |
29 | ) | |
31 | 30 | |
32 | 31 | |
33 | 32 | def downgrade(): |
34 | op.drop_column('rule_execution','start') | |
35 | op.drop_column('rule_execution','end') | |
33 | op.drop_column('rule_execution', 'start') | |
34 | op.drop_column('rule_execution', 'end') |
6 | 6 | """ |
7 | 7 | from alembic import op |
8 | 8 | import sqlalchemy as sa |
9 | ||
10 | 9 | |
11 | 10 | # revision identifiers, used by Alembic. |
12 | 11 | revision = '20f3d0c2f71f' |
20 | 19 | sa.Column( |
21 | 20 | 'advanced_filter_parsed', |
22 | 21 | sa.String(255), |
23 | nullable = False, | |
24 | server_default = "" | |
22 | nullable=False, | |
23 | server_default="" | |
25 | 24 | ) |
26 | ) | |
25 | ) | |
27 | 26 | |
28 | 27 | |
29 | 28 | def downgrade(): |
30 | op.drop_column('executive_report','advanced_filter_parsed') | |
29 | op.drop_column('executive_report', 'advanced_filter_parsed') |
5 | 5 | |
6 | 6 | """ |
7 | 7 | from alembic import op |
8 | import sqlalchemy as sa | |
9 | 8 | |
10 | 9 | |
11 | 10 | # revision identifiers, used by Alembic. |
7 | 7 | from alembic import op |
8 | 8 | import sqlalchemy as sa |
9 | 9 | |
10 | ||
11 | 10 | # revision identifiers, used by Alembic. |
12 | 11 | revision = '526aa91cac98' |
13 | 12 | down_revision = '085188e0a016' |
14 | 13 | branch_labels = None |
15 | 14 | depends_on = None |
16 | 15 | |
17 | #TODO Verificar que se borran las fk | |
16 | ||
17 | # TODO Verificar que se borran las fk | |
18 | ||
18 | 19 | |
19 | 20 | def upgrade(): |
20 | 21 | conn = op.get_bind() |
29 | 30 | sa.Column('tool_name', sa.Text), |
30 | 31 | sa.Column('false_positive', sa.Integer, nullable=False, default=0), |
31 | 32 | sa.Column('verified', sa.Integer, nullable=False, default=0), |
32 | sa.UniqueConstraint('external_identifier', 'tool_name', 'reference_id', name='uix_externalidentifier_toolname_referenceid') | |
33 | ) | |
33 | sa.UniqueConstraint('external_identifier', 'tool_name', 'reference_id', | |
34 | name='uix_externalidentifier_toolname_referenceid') | |
35 | ) | |
34 | 36 | |
35 | 37 | op.create_foreign_key( |
36 | 38 | 'knowledge_base_vulnerability_template_id_fkey', 'knowledge_base', |
38 | 40 | ) |
39 | 41 | |
40 | 42 | op.add_column('vulnerability', |
41 | sa.Column( | |
42 | 'association_date', | |
43 | sa.DateTime(), nullable=True | |
44 | ) | |
45 | ) | |
43 | sa.Column( | |
44 | 'association_date', | |
45 | sa.DateTime(), nullable=True | |
46 | ) | |
47 | ) | |
46 | 48 | |
47 | 49 | op.add_column('vulnerability', |
48 | sa.Column( | |
49 | 'vulnerability_template_id', | |
50 | sa.Integer(), | |
51 | nullable=True | |
52 | ) | |
53 | ) | |
50 | sa.Column( | |
51 | 'vulnerability_template_id', | |
52 | sa.Integer(), | |
53 | nullable=True | |
54 | ) | |
55 | ) | |
54 | 56 | |
55 | 57 | op.create_foreign_key( |
56 | 58 | 'vulnerability_vulnerability_template_id_fkey', |
60 | 62 | ) |
61 | 63 | |
62 | 64 | op.add_column('vulnerability', |
63 | sa.Column( | |
64 | 'vulnerability_duplicate_id', | |
65 | sa.Integer(), | |
66 | nullable=True | |
67 | ) | |
68 | ) | |
65 | sa.Column( | |
66 | 'vulnerability_duplicate_id', | |
67 | sa.Integer(), | |
68 | nullable=True | |
69 | ) | |
70 | ) | |
69 | 71 | |
70 | 72 | op.add_column('vulnerability', |
71 | sa.Column( | |
72 | 'disassociated_manually', | |
73 | sa.Boolean(), | |
74 | nullable=False, | |
75 | server_default='false', | |
76 | ) | |
77 | ) | |
73 | sa.Column( | |
74 | 'disassociated_manually', | |
75 | sa.Boolean(), | |
76 | nullable=False, | |
77 | server_default='false', | |
78 | ) | |
79 | ) | |
78 | 80 | |
79 | 81 | op.create_foreign_key( |
80 | 82 | 'vulnerability_vulnerability_duplicate_id_fkey', |
83 | 85 | ) |
84 | 86 | |
85 | 87 | op.add_column('vulnerability_template', |
86 | sa.Column( | |
87 | 'shipped', | |
88 | sa.Boolean(), | |
89 | nullable=False, | |
90 | server_default='false', | |
91 | ) | |
92 | ) | |
88 | sa.Column( | |
89 | 'shipped', | |
90 | sa.Boolean(), | |
91 | nullable=False, | |
92 | server_default='false', | |
93 | ) | |
94 | ) | |
93 | 95 | |
94 | 96 | conn.execute('ALTER TABLE vulnerability_template DROP CONSTRAINT uix_vulnerability_template_name') |
95 | conn.execute('ALTER TABLE vulnerability_template ADD CONSTRAINT uix_vulnerability_template_name UNIQUE (name, shipped)') | |
97 | conn.execute( | |
98 | 'ALTER TABLE vulnerability_template ADD CONSTRAINT uix_vulnerability_template_name UNIQUE (name, shipped)') | |
99 | ||
96 | 100 | |
97 | 101 | def downgrade(): |
98 | ||
99 | 102 | conn = op.get_bind() |
100 | 103 | |
104 | conn.execute('ALTER TABLE vulnerability_template DROP CONSTRAINT uix_vulnerability_template_name') | |
105 | op.drop_column('vulnerability_template', 'shipped') | |
106 | conn.execute('ALTER TABLE vulnerability_template ADD CONSTRAINT uix_vulnerability_template_name UNIQUE (name)') | |
101 | 107 | |
102 | conn.execute('ALTER TABLE vulnerability_template DROP CONSTRAINT uix_vulnerability_template_name') | |
103 | op.drop_column('vulnerability_template','shipped') | |
104 | conn.execute('ALTER TABLE vulnerability_template ADD CONSTRAINT uix_vulnerability_template_name UNIQUE (name)') | |
105 | ||
106 | 108 | op.drop_table('knowledge_base') |
107 | 109 | |
108 | op.drop_column('vulnerability','vulnerability_duplicate_id') | |
109 | op.drop_column('vulnerability','vulnerability_template_id') | |
110 | op.drop_column('vulnerability','association_date') | |
111 | op.drop_column('vulnerability','disassociated_manually') | |
110 | op.drop_column('vulnerability', 'vulnerability_duplicate_id') | |
111 | op.drop_column('vulnerability', 'vulnerability_template_id') | |
112 | op.drop_column('vulnerability', 'association_date') | |
113 | op.drop_column('vulnerability', 'disassociated_manually') |
6 | 6 | """ |
7 | 7 | |
8 | 8 | from alembic import op |
9 | import sqlalchemy as sa | |
10 | 9 | |
11 | 10 | |
12 | 11 | # revision identifiers, used by Alembic. |
5 | 5 | |
6 | 6 | """ |
7 | 7 | from alembic import op |
8 | import sqlalchemy as sa | |
9 | 8 | |
10 | 9 | |
11 | 10 | # revision identifiers, used by Alembic. |
7 | 7 | from alembic import op |
8 | 8 | import sqlalchemy as sa |
9 | 9 | |
10 | ||
11 | 10 | # revision identifiers, used by Alembic. |
12 | 11 | revision = '84f266a05be3' |
13 | 12 | down_revision = 'a39a3a6e3f99' |
17 | 16 | |
18 | 17 | def upgrade(): |
19 | 18 | op.add_column('vulnerability', sa.Column( |
20 | 'tool', | |
21 | sa.Text(), | |
22 | nullable=False, | |
23 | server_default="" | |
24 | ) | |
19 | 'tool', | |
20 | sa.Text(), | |
21 | nullable=False, | |
22 | server_default="" | |
25 | 23 | ) |
24 | ) | |
26 | 25 | conn = op.get_bind() |
27 | 26 | conn.execute("""UPDATE vulnerability |
28 | SET tool=SUBQUERY.tool | |
27 | SET tool=SUBQUERY.tool | |
29 | 28 | FROM (select v.id, c.tool from vulnerability v, command_object co, command c where v.id = co.object_id and co.object_type = 'vulnerability' and co.command_id = c.id) AS SUBQUERY |
30 | 29 | WHERE vulnerability.id=SUBQUERY.id""") |
31 | 30 | conn.execute("UPDATE vulnerability set tool='Web UI' where tool=''") |
32 | ||
31 | ||
33 | 32 | |
34 | 33 | def downgrade(): |
35 | op.drop_column('vulnerability','tool') | |
36 | ||
34 | op.drop_column('vulnerability', 'tool') |
5 | 5 | |
6 | 6 | """ |
7 | 7 | |
8 | import uuid | |
9 | 8 | from alembic import op |
10 | 9 | import sqlalchemy as sa |
11 | ||
12 | # revision identifiers, used by Alembic. | |
13 | from sqlalchemy.dialects import postgresql | |
14 | 10 | |
15 | 11 | revision = '9c4091d1a09b' |
16 | 12 | down_revision = 'be89aa03e35e' |
0 | """update rule fields | |
1 | ||
2 | Revision ID: aa56852fa76d | |
3 | Revises: f0a507afabd4 | |
4 | Create Date: 2021-04-12 19:53:48.615218+00:00 | |
5 | ||
6 | """ | |
7 | from alembic import op | |
8 | import sqlalchemy as sa | |
9 | from sqlalchemy.sql import text | |
10 | ||
11 | # revision identifiers, used by Alembic. | |
12 | revision = 'aa56852fa76d' | |
13 | down_revision = 'f0a507afabd4' | |
14 | branch_labels = None | |
15 | depends_on = None | |
16 | ||
17 | ||
18 | def constraint_exists(constraint_name): | |
19 | connection = op.get_bind() | |
20 | result = connection.execute( | |
21 | text(""" | |
22 | SELECT exists( | |
23 | SELECT 1 | |
24 | from pg_catalog.pg_constraint | |
25 | where conname = :constraint_name | |
26 | ) as exists """ | |
27 | ), **{ | |
28 | 'constraint_name': constraint_name, | |
29 | } | |
30 | ).first() | |
31 | ||
32 | return result.exists | |
33 | ||
34 | ||
35 | def column_exists(table_name, column_name): | |
36 | connection = op.get_bind() | |
37 | result = connection.execute( | |
38 | text(""" | |
39 | SELECT exists( | |
40 | SELECT 1 | |
41 | FROM information_schema.columns | |
42 | WHERE table_name = :table_name | |
43 | AND column_name = :column_name | |
44 | ) as exists """ | |
45 | ), **{ | |
46 | 'table_name': table_name, | |
47 | 'column_name': column_name, | |
48 | } | |
49 | ).first() | |
50 | return result.exists | |
51 | ||
52 | ||
53 | def upgrade(): | |
54 | # ### commands auto generated by Alembic - please adjust! ### | |
55 | if not column_exists('action', 'description'): | |
56 | op.add_column('action', sa.Column('description', sa.String(), nullable=False)) | |
57 | op.drop_constraint('condition_rule_id_fkey', 'condition', type_='foreignkey') | |
58 | op.create_foreign_key('condition_rule_id_fkey', 'condition', 'rule', ['rule_id'], ['id'], ondelete='CASCADE') | |
59 | if not column_exists('rule', 'description'): | |
60 | op.add_column('rule', sa.Column('description', sa.String(), nullable=False)) | |
61 | if not column_exists('rule', 'name'): | |
62 | op.add_column('rule', sa.Column('name', sa.String(), nullable=False)) | |
63 | if not constraint_exists('ux_rule_name'): | |
64 | op.create_unique_constraint('ux_rule_name', 'rule', ['name']) | |
65 | if column_exists('rule', 'object'): | |
66 | op.drop_column('rule', 'object') | |
67 | if not constraint_exists('rule_action_uc'): | |
68 | op.create_unique_constraint('rule_action_uc', 'rule_action', ['rule_id', 'action_id']) | |
69 | # ### end Alembic commands ### | |
70 | ||
71 | ||
72 | def downgrade(): | |
73 | # ### commands auto generated by Alembic - please adjust! ### | |
74 | ||
75 | # several items could have been created 'conditionally' by upgrade() | |
76 | # so at this point their presence doesn't tell whether they was created by this script or not | |
77 | # therefore don't do anything to reverse them | |
78 | # a reversion operation does not apply if items weren't created or deleted by this script | |
79 | ||
80 | # op.drop_constraint('rule_action_uc', 'rule_action', type_='unique') | |
81 | # op.add_column('rule', sa.Column('object', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False)) | |
82 | # op.drop_constraint('ux_rule_name', 'rule', type_='unique') | |
83 | # op.drop_column('rule', 'name') | |
84 | # op.drop_column('rule', 'description') | |
85 | op.drop_constraint('condition_rule_id_fkey', 'condition', type_='foreignkey') | |
86 | op.create_foreign_key('condition_rule_id_fkey', 'condition', 'rule', ['rule_id'], ['id']) | |
87 | # op.drop_column('action', 'description') | |
88 | # ### end Alembic commands ### |
4 | 4 | Create Date: 2020-04-02 20:41:41.083048+00:00 |
5 | 5 | |
6 | 6 | """ |
7 | from alembic import op | |
8 | import sqlalchemy as sa | |
9 | 7 | |
10 | 8 | from faraday.server.config import LOCAL_CONFIG_FILE |
11 | 9 | from configparser import ConfigParser, NoSectionError |
6 | 6 | """ |
7 | 7 | |
8 | 8 | from alembic import op |
9 | import sqlalchemy as sa | |
10 | 9 | |
11 | 10 | |
12 | 11 | # revision identifiers, used by Alembic. |
21 | 20 | conn.execute('ALTER TABLE executive_report ADD COLUMN filter JSONB') |
22 | 21 | |
23 | 22 | |
24 | ||
25 | 23 | def downgrade(): |
26 | 24 | conn = op.get_bind() |
27 | 25 | conn.execute('ALTER TABLE executive_report DROP COLUMN filter') |
5 | 5 | |
6 | 6 | """ |
7 | 7 | from alembic import op |
8 | import sqlalchemy as sa | |
9 | 8 | |
10 | 9 | |
11 | 10 | # revision identifiers, used by Alembic. |
19 | 18 | conn = op.get_bind() |
20 | 19 | conn.execute('ALTER TABLE vulnerability ADD COLUMN custom_fields JSONB') |
21 | 20 | conn.execute('ALTER TABLE vulnerability_template ADD COLUMN custom_fields JSONB') |
22 | conn.execute('CREATE TABLE custom_fields_schema ( '\ | |
23 | 'id SERIAL PRIMARY KEY,' \ | |
24 | 'table_name TEXT,' \ | |
25 | 'field_name TEXT,' \ | |
26 | 'field_type TEXT,' \ | |
27 | 'field_order INTEGER,' \ | |
28 | 'field_display_name TEXT)' | |
29 | ) | |
21 | conn.execute('CREATE TABLE custom_fields_schema ( ' | |
22 | 'id SERIAL PRIMARY KEY,' | |
23 | 'table_name TEXT,' | |
24 | 'field_name TEXT,' | |
25 | 'field_type TEXT,' | |
26 | 'field_order INTEGER,' | |
27 | 'field_display_name TEXT)' | |
28 | ) | |
29 | ||
30 | 30 | |
31 | 31 | def downgrade(): |
32 | 32 | conn = op.get_bind() |
0 | """adding fs uniquifier to user model | |
1 | ||
2 | Revision ID: f0a507afabd4 | |
3 | Revises: a4def820a5bb | |
4 | Create Date: 2021-02-24 22:08:24.237037+00:00 | |
5 | ||
6 | """ | |
7 | from alembic import op | |
8 | import sqlalchemy as sa | |
9 | ||
10 | ||
11 | # revision identifiers, used by Alembic. | |
12 | revision = 'f0a507afabd4' | |
13 | down_revision = 'a4def820a5bb' | |
14 | branch_labels = None | |
15 | depends_on = None | |
16 | ||
17 | ||
18 | def upgrade(): | |
19 | # be sure to MODIFY this line to make nullable=True: | |
20 | op.add_column('faraday_user', sa.Column('fs_uniquifier', sa.String(length=64), nullable=True)) | |
21 | ||
22 | # update existing rows with unique fs_uniquifier | |
23 | import uuid | |
24 | user_table = sa.Table('faraday_user', sa.MetaData(), sa.Column('id', sa.Integer, primary_key=True), | |
25 | sa.Column('fs_uniquifier', sa.String)) | |
26 | conn = op.get_bind() | |
27 | for row in conn.execute(sa.select([user_table.c.id])): | |
28 | conn.execute(user_table.update().values(fs_uniquifier=uuid.uuid4().hex).where(user_table.c.id == row['id'])) | |
29 | ||
30 | # finally - set nullable to false | |
31 | op.alter_column('faraday_user', 'fs_uniquifier', nullable=False) | |
32 | ||
33 | ||
34 | def downgrade(): | |
35 | op.drop_column( | |
36 | 'faraday_user', | |
37 | 'fs_uniquifier', | |
38 | ) |
6 | 6 | """ |
7 | 7 | from alembic import op |
8 | 8 | import sqlalchemy as sa |
9 | from faraday.server.fields import JSONType | |
10 | from depot.fields.sqlalchemy import UploadedFileField | |
11 | 9 | from sqlalchemy.dialects import postgresql |
12 | 10 | |
13 | 11 | # revision identifiers, used by Alembic. |
0 | 0 | ### |
1 | ## Faraday Penetration Test IDE | |
2 | ## Copyright (C) 2018 Infobyte LLC (http://www.infobytesec.com/) | |
3 | ## See the file 'doc/LICENSE' for the license information | |
1 | # Faraday Penetration Test IDE | |
2 | # Copyright (C) 2018 Infobyte LLC (http://www.infobytesec.com/) | |
3 | # See the file 'doc/LICENSE' for the license information | |
4 | 4 | ### |
5 | ||
6 | # I'm Py3⏎ |
9 | 9 | |
10 | 10 | class ApiError(Exception): |
11 | 11 | def __init__(self, message): |
12 | super(ApiError, self).__init__(message) | |
12 | super().__init__(message) | |
13 | 13 | |
14 | 14 | |
15 | 15 | class Structure: |
1 | 1 | # -*- coding: utf-8 -*- |
2 | 2 | |
3 | 3 | ### |
4 | ## Faraday Penetration Test IDE | |
5 | ## Copyright (C) 2018 Infobyte LLC (http://www.infobytesec.com/) | |
6 | ## See the file 'doc/LICENSE' for the license information | |
4 | # Faraday Penetration Test IDE | |
5 | # Copyright (C) 2018 Infobyte LLC (http://www.infobytesec.com/) | |
6 | # See the file 'doc/LICENSE' for the license information | |
7 | 7 | ### |
8 | 8 | from builtins import str |
9 | 9 | |
31 | 31 | |
32 | 32 | threshold = 0.75 |
33 | 33 | min_weight = 0.3 |
34 | ||
34 | 35 | |
35 | 36 | def compare(a, b): |
36 | 37 | return SequenceMatcher(None, a, b).ratio() |
691 | 692 | else: |
692 | 693 | self.api.set_array(field, value, add=to_add, key=key, object=vuln) |
693 | 694 | action = 'Adding %s to %s list in vulnerability %s with id %s' % ( |
694 | value, key, vuln.name, vuln.id) | |
695 | value, key, vuln.name, vuln.id) | |
695 | 696 | if not to_add: |
696 | 697 | action = 'Removing %s from %s list in vulnerability %s with id %s' % ( |
697 | 698 | value, key, vuln.name, vuln.id) |
829 | 830 | smtp_ssl=ssl |
830 | 831 | ) |
831 | 832 | |
832 | for d in [output, 'log/']: # TODO CHANGE THIS | |
833 | for d in [output, 'log/']: # TODO CHANGE THIS | |
833 | 834 | if not Path(d): |
834 | 835 | Path(d).mkdir(parents=True) |
835 | 836 |
1 | 1 | # -*- coding: utf-8 -*- |
2 | 2 | |
3 | 3 | ### |
4 | ## Faraday Penetration Test IDE | |
5 | ## Copyright (C) 2018 Infobyte LLC (http://www.infobytesec.com/) | |
6 | ## See the file 'doc/LICENSE' for the license information | |
4 | # Faraday Penetration Test IDE | |
5 | # Copyright (C) 2018 Infobyte LLC (http://www.infobytesec.com/) | |
6 | # See the file 'doc/LICENSE' for the license information | |
7 | 7 | ### |
8 | 8 | import re |
9 | 9 | import json |
0 | 0 | # Faraday Penetration Test IDE |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | # I'm Py3⏎ |
12 | 12 | import sqlalchemy |
13 | 13 | import datetime |
14 | 14 | from collections import defaultdict |
15 | from flask import g | |
16 | 15 | from flask_classful import FlaskView |
17 | 16 | from sqlalchemy.orm import joinedload, undefer |
18 | 17 | from sqlalchemy.orm.exc import NoResultFound, ObjectDeletedError |
26 | 25 | from webargs.flaskparser import FlaskParser |
27 | 26 | from webargs.core import ValidationError |
28 | 27 | from flask_classful import route |
28 | import flask_login | |
29 | 29 | |
30 | 30 | from faraday.server.models import Workspace, db, Command, CommandObject, count_vulnerability_severities |
31 | 31 | from faraday.server.schemas import NullToBlankString |
32 | 32 | from faraday.server.utils.database import ( |
33 | 33 | get_conflict_object, |
34 | 34 | is_unique_constraint_violation |
35 | ) | |
35 | ) | |
36 | 36 | from faraday.server.utils.filters import FlaskRestlessSchema |
37 | 37 | from faraday.server.utils.search import search |
38 | 38 | |
299 | 299 | deserialization |
300 | 300 | """ |
301 | 301 | return FlaskParser(unknown=EXCLUDE).parse(schema, request, location="json", |
302 | *args, **kwargs) | |
302 | *args, **kwargs) | |
303 | 303 | |
304 | 304 | @classmethod |
305 | 305 | def register(cls, app, *args, **kwargs): |
306 | 306 | """Register and add JSON error handler. Use error code |
307 | 307 | 400 instead of 409""" |
308 | super(GenericView, cls).register(app, *args, **kwargs) | |
308 | super().register(app, *args, **kwargs) | |
309 | 309 | |
310 | 310 | @app.errorhandler(422) |
311 | def handle_error(err): # pylint: disable=unused-variable | |
311 | def handle_error(err): # pylint: disable=unused-variable | |
312 | 312 | # webargs attaches additional metadata to the `data` attribute |
313 | 313 | exc = getattr(err, 'exc') |
314 | 314 | if exc: |
321 | 321 | }), 400 |
322 | 322 | |
323 | 323 | @app.errorhandler(409) |
324 | def handle_conflict(err): # pylint: disable=unused-variable | |
324 | def handle_conflict(err): # pylint: disable=unused-variable | |
325 | 325 | # webargs attaches additional metadata to the `data` attribute |
326 | 326 | exc = getattr(err, 'exc', None) or getattr(err, 'description', None) |
327 | 327 | if exc: |
332 | 332 | return flask.jsonify(messages), 409 |
333 | 333 | |
334 | 334 | @app.errorhandler(InvalidUsage) |
335 | def handle_invalid_usage(error): # pylint: disable=unused-variable | |
335 | def handle_invalid_usage(error): # pylint: disable=unused-variable | |
336 | 336 | response = flask.jsonify(error.to_dict()) |
337 | 337 | response.status_code = error.status_code |
338 | 338 | return response |
339 | 339 | |
340 | 340 | # @app.errorhandler(404) |
341 | def handle_not_found(err): # pylint: disable=unused-variable | |
341 | def handle_not_found(err): # pylint: disable=unused-variable | |
342 | 342 | response = {'success': False, 'message': err.description if faraday_server.debug else err.name} |
343 | 343 | return flask.jsonify(response), 404 |
344 | 344 | |
345 | 345 | @app.errorhandler(500) |
346 | def handle_server_error(err): # pylint: disable=unused-variable | |
347 | response = {'success': False, 'message': f"Exception: {err.original_exception}" if faraday_server.debug else 'Internal Server Error'} | |
346 | def handle_server_error(err): # pylint: disable=unused-variable | |
347 | response = {'success': False, | |
348 | 'message': f"Exception: {err.original_exception}" if faraday_server.debug else 'Internal Server Error'} | |
348 | 349 | return flask.jsonify(response), 500 |
349 | 350 | |
350 | 351 | |
398 | 399 | sup = super() |
399 | 400 | if hasattr(sup, 'before_request'): |
400 | 401 | sup.before_request(name, *args, **kwargs) |
401 | if (self._get_workspace(kwargs['workspace_name']).readonly and | |
402 | flask.request.method not in ['GET', 'HEAD', 'OPTIONS']): | |
402 | if (self._get_workspace(kwargs['workspace_name']).readonly | |
403 | and flask.request.method not in ['GET', 'HEAD', 'OPTIONS']): | |
403 | 404 | flask.abort(403, "Altering a readonly workspace is not allowed") |
404 | 405 | |
405 | 406 | |
583 | 584 | |
584 | 585 | try: |
585 | 586 | per_page = int(flask.request.args[ |
586 | self.per_page_parameter_name]) | |
587 | self.per_page_parameter_name]) | |
587 | 588 | except (TypeError, ValueError): |
588 | 589 | flask.abort(404, 'Invalid per_page value') |
589 | 590 | |
609 | 610 | class FilterWorkspacedMixin(ListMixin): |
610 | 611 | """Add filter endpoint for searching on any workspaced objects columns |
611 | 612 | """ |
613 | ||
612 | 614 | @route('/filter') |
613 | 615 | def filter(self, workspace_name): |
614 | 616 | """ |
633 | 635 | |
634 | 636 | class PageMeta: |
635 | 637 | total = 0 |
638 | ||
636 | 639 | pagination_metadata = PageMeta() |
637 | 640 | pagination_metadata.total = count |
638 | 641 | return self._envelope_list(filtered_objs, pagination_metadata) |
639 | 642 | |
640 | 643 | def _generate_filter_query(self, filters, workspace, severity_count=False): |
641 | 644 | filter_query = search(db.session, |
642 | self.model_class, | |
643 | filters) | |
645 | self.model_class, | |
646 | filters) | |
644 | 647 | |
645 | 648 | filter_query = filter_query.filter(self.model_class.workspace == workspace) |
646 | 649 | |
665 | 668 | if 'offset' in filters: |
666 | 669 | offset = filters.pop('offset') |
667 | 670 | if 'limit' in filters: |
668 | limit = filters.pop('limit') # we need to remove pagination, since | |
669 | ||
670 | filter_query = self._generate_filter_query( | |
671 | filters, | |
672 | workspace, | |
673 | severity_count=severity_count | |
674 | ) | |
671 | limit = filters.pop('limit') # we need to remove pagination, since | |
672 | ||
673 | try: | |
674 | filter_query = self._generate_filter_query( | |
675 | filters, | |
676 | workspace, | |
677 | severity_count=severity_count | |
678 | ) | |
679 | except AttributeError as e: | |
680 | flask.abort(400, e) | |
681 | ||
675 | 682 | count = filter_query.count() |
676 | 683 | if limit: |
677 | 684 | filter_query = filter_query.limit(limit) |
680 | 687 | objs = self.schema_class(**marshmallow_params).dumps(filter_query.all()) |
681 | 688 | return json.loads(objs), count |
682 | 689 | else: |
683 | filter_query = self._generate_filter_query( | |
684 | filters, | |
685 | workspace, | |
686 | ) | |
690 | try: | |
691 | filter_query = self._generate_filter_query( | |
692 | filters, | |
693 | workspace, | |
694 | ) | |
695 | except AttributeError as e: | |
696 | flask.abort(400, e) | |
687 | 697 | column_names = ['count'] + [field['field'] for field in filters.get('group_by', [])] |
688 | 698 | rows = [list(zip(column_names, row)) for row in filter_query.all()] |
689 | 699 | data = [] |
721 | 731 | |
722 | 732 | class PageMeta: |
723 | 733 | total = 0 |
734 | ||
724 | 735 | pagination_metadata = PageMeta() |
725 | 736 | pagination_metadata.total = count |
726 | 737 | return self._envelope_list(filtered_objs, pagination_metadata) |
727 | 738 | |
728 | 739 | def _generate_filter_query(self, filters, severity_count=False, host_vulns=False): |
729 | 740 | filter_query = search(db.session, |
730 | self.model_class, | |
731 | filters) | |
741 | self.model_class, | |
742 | filters) | |
732 | 743 | |
733 | 744 | if severity_count and 'group_by' not in filters: |
734 | 745 | filter_query = count_vulnerability_severities(filter_query, self.model_class, |
751 | 762 | if 'offset' in filters: |
752 | 763 | offset = filters.pop('offset') |
753 | 764 | if 'limit' in filters: |
754 | limit = filters.pop('limit') # we need to remove pagination, since | |
755 | ||
756 | filter_query = self._generate_filter_query( | |
757 | filters, | |
758 | severity_count=severity_count, | |
759 | host_vulns=host_vulns | |
760 | ) | |
765 | limit = filters.pop('limit') # we need to remove pagination, since | |
766 | ||
767 | try: | |
768 | filter_query = self._generate_filter_query( | |
769 | filters, | |
770 | severity_count=severity_count, | |
771 | host_vulns=host_vulns | |
772 | ) | |
773 | except AttributeError as e: | |
774 | flask.abort(400, e) | |
761 | 775 | |
762 | 776 | if extra_alchemy_filters is not None: |
763 | 777 | filter_query = filter_query.filter(extra_alchemy_filters) |
817 | 831 | |
818 | 832 | class RetrieveWorkspacedMixin(RetrieveMixin): |
819 | 833 | """Add GET /<workspace_name>/<route_base>/<id>/ route""" |
834 | ||
820 | 835 | # There are no differences with the non-workspaced implementations. The code |
821 | 836 | # inside the view generic methods is enough |
822 | 837 | def get(self, object_id, workspace_name=None): |
911 | 926 | flask.request) |
912 | 927 | data.pop('id', None) |
913 | 928 | created = self._perform_create(data, **kwargs) |
914 | created.creator = g.user | |
929 | if not flask_login.current_user.is_anonymous: | |
930 | created.creator = flask_login.current_user | |
915 | 931 | db.session.commit() |
916 | 932 | return self._dump(created, kwargs), 201 |
917 | 933 | |
960 | 976 | command_id = None |
961 | 977 | |
962 | 978 | if command_id: |
963 | command = db.session.query(Command).filter(Command.id==command_id, Command.workspace==obj.workspace).first() | |
979 | command = db.session.query(Command).filter(Command.id == command_id, | |
980 | Command.workspace == obj.workspace).first() | |
964 | 981 | if command is None: |
965 | 982 | raise InvalidUsage('Command not found.') |
966 | 983 | # if the object is created and updated in the same command |
1178 | 1195 | self._perform_update(object_id, obj, data, partial=True, **kwargs) |
1179 | 1196 | |
1180 | 1197 | return self._dump(obj, kwargs), 200 |
1198 | ||
1181 | 1199 | |
1182 | 1200 | class UpdateWorkspacedMixin(UpdateMixin, CommandMixin): |
1183 | 1201 | """Add PUT /<workspace_name>/<route_base>/<id>/ route |
1274 | 1292 | |
1275 | 1293 | class DeleteMixin: |
1276 | 1294 | """Add DELETE /<id>/ route""" |
1295 | ||
1277 | 1296 | def delete(self, object_id, **kwargs): |
1278 | 1297 | """ |
1279 | 1298 | --- |
1300 | 1319 | |
1301 | 1320 | class DeleteWorkspacedMixin(DeleteMixin): |
1302 | 1321 | """Add DELETE /<workspace_name>/<route_base>/<id>/ route""" |
1322 | ||
1303 | 1323 | def delete(self, object_id, workspace_name=None): |
1304 | ||
1305 | 1324 | """ |
1306 | 1325 | --- |
1307 | 1326 | tags: ["{tag_name}"] |
1385 | 1404 | |
1386 | 1405 | count = self._filter_query( |
1387 | 1406 | db.session.query(self.model_class) |
1388 | .join(Workspace) | |
1389 | .group_by(group_by) | |
1390 | .filter(Workspace.name == workspace_name, | |
1391 | *self.count_extra_filters)) | |
1392 | ||
1393 | #order | |
1407 | .join(Workspace) | |
1408 | .group_by(group_by) | |
1409 | .filter(Workspace.name == workspace_name, | |
1410 | *self.count_extra_filters)) | |
1411 | ||
1412 | # order | |
1394 | 1413 | order_by = group_by |
1395 | 1414 | if sort_dir == 'desc': |
1396 | 1415 | count = count.order_by(desc(order_by)) |
1442 | 1461 | 400: |
1443 | 1462 | description: No workspace passed or group_by is not specified |
1444 | 1463 | """ |
1445 | #"""head: | |
1464 | # """head: | |
1446 | 1465 | # tags: [{tag_name}] |
1447 | 1466 | # responses: |
1448 | 1467 | # 200: |
1486 | 1505 | grouped_attr = getattr(self.model_class, group_by) |
1487 | 1506 | |
1488 | 1507 | q = db.session.query( |
1489 | Workspace.name, | |
1490 | grouped_attr, | |
1491 | func.count(grouped_attr) | |
1492 | )\ | |
1493 | .join(Workspace)\ | |
1494 | .group_by(grouped_attr, Workspace.name)\ | |
1508 | Workspace.name, | |
1509 | grouped_attr, | |
1510 | func.count(grouped_attr) | |
1511 | ) \ | |
1512 | .join(Workspace) \ | |
1513 | .group_by(grouped_attr, Workspace.name) \ | |
1495 | 1514 | .filter(Workspace.name.in_(workspace_names_list)) |
1496 | 1515 | |
1497 | #order | |
1516 | # order | |
1498 | 1517 | order_by = grouped_attr |
1499 | 1518 | if sort_dir == 'desc': |
1500 | 1519 | q = q.order_by(desc(Workspace.name), desc(order_by)) |
1540 | 1559 | Model converter that automatically sets minimum length |
1541 | 1560 | validators to not blankable fields |
1542 | 1561 | """ |
1562 | ||
1543 | 1563 | def _add_column_kwargs(self, kwargs, column): |
1544 | 1564 | super()._add_column_kwargs(kwargs, column) |
1545 | 1565 | if not column.info.get('allow_blank', True): |
1566 | 1586 | else: |
1567 | 1587 | dt = dt.astimezone(datetime.timezone.utc) |
1568 | 1588 | return dt.isoformat(*args, **kwargs) |
1589 | ||
1590 | ||
1569 | 1591 | fields.DateTime.SERIALIZATION_FUNCS['iso'] = old_isoformat |
1570 | 1592 | |
1571 | 1593 | |
1585 | 1607 | def __init__(self, *args, **kwargs): |
1586 | 1608 | super().__init__(*args, **kwargs) |
1587 | 1609 | self.unknown = EXCLUDE |
1610 | ||
1588 | 1611 | |
1589 | 1612 | class FilterAlchemyModelConverter(ModelConverter): |
1590 | 1613 | """Use this to make all fields of a model not required. |
33 | 33 | agent_creation_api = Blueprint('agent_creation_api', __name__) |
34 | 34 | |
35 | 35 | logger = logging.getLogger(__name__) |
36 | ||
36 | 37 | |
37 | 38 | class ExecutorSchema(AutoSchema): |
38 | 39 | |
132 | 133 | except NoResultFound: |
133 | 134 | flask.abort(404, f"No such workspace: {workspace_name}") |
134 | 135 | |
135 | def _perform_create(self, data, **kwargs): | |
136 | def _perform_create(self, data, **kwargs): | |
136 | 137 | token = data.pop('token') |
137 | 138 | if not faraday_server.agent_registration_secret: |
138 | 139 | # someone is trying to use the token, but no token was generated yet. |
139 | 140 | abort(401, "Invalid Token") |
140 | if not pyotp.TOTP(faraday_server.agent_registration_secret).verify(token, valid_window=1): | |
141 | if not pyotp.TOTP(faraday_server.agent_registration_secret, | |
142 | interval=int(faraday_server.agent_token_expiration) | |
143 | ).verify(token, valid_window=1): | |
141 | 144 | abort(401, "Invalid Token") |
142 | 145 | |
143 | 146 | workspace_names = data.pop('workspaces') |
160 | 163 | dict_["name"] for dict_ in workspace_names |
161 | 164 | ] |
162 | 165 | |
163 | ||
164 | 166 | workspaces = list( |
165 | 167 | self._get_workspace(workspace_name) |
166 | 168 | for workspace_name in workspace_names |
16 | 16 | class AgentAuthTokenSchema(Schema): |
17 | 17 | token = fields.String(required=True) |
18 | 18 | expires_in = fields.Float(required=True) |
19 | total_duration = fields.Float(required=True) | |
19 | 20 | |
20 | 21 | |
21 | 22 | class AgentAuthTokenView(GenericView): |
39 | 40 | 200: |
40 | 41 | description: Ok |
41 | 42 | """ |
42 | totp = pyotp.TOTP(faraday_server.agent_registration_secret) | |
43 | totp = pyotp.TOTP(faraday_server.agent_registration_secret, interval=int( | |
44 | faraday_server.agent_token_expiration)) | |
43 | 45 | return AgentAuthTokenSchema().dump( |
44 | 46 | {'token': totp.now(), |
45 | 'expires_in': totp.interval - datetime.datetime.now().timestamp() % totp.interval}) | |
47 | 'expires_in': totp.interval - datetime.datetime.now().timestamp() % totp.interval, | |
48 | 'total_duration': totp.interval}) | |
46 | 49 | |
47 | 50 | |
48 | 51 | class AgentAuthTokenV3View(AgentAuthTokenView): |
49 | 52 | route_prefix = '/v3' |
50 | 53 | trailing_slash = False |
51 | 54 | |
55 | ||
52 | 56 | AgentAuthTokenView.register(agent_auth_token_api) |
53 | 57 | AgentAuthTokenV3View.register(agent_auth_token_api) |
4 | 4 | """ |
5 | 5 | from __future__ import print_function |
6 | 6 | from __future__ import absolute_import |
7 | from builtins import range | |
8 | 7 | |
9 | 8 | import flask |
10 | from flask_login import current_user | |
11 | from marshmallow import Schema, fields | |
12 | 9 | |
13 | 10 | from werkzeug.local import LocalProxy |
14 | 11 | from werkzeug.datastructures import MultiDict |
17 | 14 | import logging |
18 | 15 | |
19 | 16 | from flask import current_app as app |
20 | from flask import abort, Blueprint, jsonify, g, request, make_response | |
21 | from flask_security.confirmable import requires_confirmation | |
22 | from flask_security.forms import LoginForm, ChangePasswordForm | |
23 | from flask_security.datastore import SQLAlchemyUserDatastore | |
24 | from flask_security.utils import ( | |
25 | get_message, | |
26 | get_identity_attributes, | |
27 | ) | |
28 | from flask_security.signals import password_reset, reset_password_instructions_sent | |
17 | from flask import Blueprint, request, make_response | |
18 | from flask_security.signals import reset_password_instructions_sent | |
29 | 19 | from faraday.server import config |
30 | 20 | |
31 | 21 | from flask_security.recoverable import generate_reset_password_token, update_password |
32 | 22 | from flask_security.views import anonymous_user_required |
33 | from werkzeug.middleware.proxy_fix import ProxyFix | |
34 | #from flask_security.recoverable import _security | |
35 | from flask_security.utils import do_flash, send_mail, \ | |
36 | config_value, get_token_status, verify_hash | |
23 | from flask_security.utils import send_mail, config_value, get_token_status, verify_hash | |
37 | 24 | from flask_security.forms import ResetPasswordForm |
38 | 25 | |
39 | 26 | from faraday.server.models import User |
27 | ||
40 | 28 | _security = LocalProxy(lambda: app.extensions['security']) |
41 | 29 | _datastore = LocalProxy(lambda: _security.datastore) |
42 | 30 | |
44 | 32 | logger = logging.getLogger(__name__) |
45 | 33 | |
46 | 34 | |
47 | @auth.route('/auth/forgot_password', methods= ['POST']) | |
35 | @auth.route('/auth/forgot_password', methods=['POST']) | |
48 | 36 | @anonymous_user_required |
49 | 37 | def forgot_password(): |
50 | 38 | """ |
59 | 47 | |
60 | 48 | if not config.smtp.is_enabled(): |
61 | 49 | logger.warning('Missing SMTP Config.') |
62 | return make_response(flask.jsonify(response=dict(message="Operation not implemented"), success=False, code=501), 501) | |
50 | return make_response(flask.jsonify(response=dict(message="Operation not implemented"), success=False, code=501), | |
51 | 501) | |
63 | 52 | |
64 | 53 | if 'email' not in request.json: |
65 | return make_response(flask.jsonify(response=dict(message="Operation not allowed"), success=False, code=406),406) | |
66 | ||
54 | return make_response(flask.jsonify(response=dict(message="Operation not allowed"), success=False, code=406), | |
55 | 406) | |
67 | 56 | |
68 | 57 | try: |
69 | 58 | email = request.json.get('email') |
70 | 59 | user = User.query.filter_by(email=email).first() |
71 | 60 | if not user: |
72 | return make_response(flask.jsonify(response=dict(email=email, message="Invalid Email"), success=False, code=400), 400) | |
61 | return make_response( | |
62 | flask.jsonify(response=dict(email=email, message="Invalid Email"), success=False, code=400), 400) | |
73 | 63 | |
74 | 64 | send_reset_password_instructions(user) |
75 | 65 | return flask.jsonify(response=dict(email=email), success=True, code=200) |
76 | 66 | except Exception as e: |
77 | 67 | logger.exception(e) |
78 | return make_response(flask.jsonify(response=dict(email=email, message="Server Error"), success=False, code=500), 500) | |
68 | return make_response(flask.jsonify(response=dict(email=email, message="Server Error"), success=False, code=500), | |
69 | 500) | |
79 | 70 | |
80 | 71 | |
81 | @auth.route('/auth/reset_password/<token>', methods= ['POST']) | |
72 | @auth.route('/auth/reset_password/<token>', methods=['POST']) | |
82 | 73 | @anonymous_user_required |
83 | 74 | def reset_password(token): |
84 | 75 | """ |
92 | 83 | """ |
93 | 84 | if not config.smtp.is_enabled(): |
94 | 85 | logger.warning('Missing SMTP Config.') |
95 | return make_response(flask.jsonify(response=dict(message="Operation not implemented"), success=False, code=501), 501) | |
86 | return make_response(flask.jsonify(response=dict(message="Operation not implemented"), success=False, code=501), | |
87 | 501) | |
96 | 88 | |
97 | 89 | try: |
98 | 90 | if 'password' not in request.json or 'password_confirm' not in request.json: |
99 | return make_response(flask.jsonify(response=dict(message="Invalid data provided"), success=False, code=406),406) | |
91 | return make_response(flask.jsonify(response=dict(message="Invalid data provided"), success=False, code=406), | |
92 | 406) | |
100 | 93 | |
101 | 94 | expired, invalid, user = reset_password_token_status(token) |
102 | 95 | |
104 | 97 | invalid = True |
105 | 98 | |
106 | 99 | if invalid or expired: |
107 | return make_response(flask.jsonify(response=dict(message="Invalid Token"), success=False, code=406),406) | |
100 | return make_response(flask.jsonify(response=dict(message="Invalid Token"), success=False, code=406), 406) | |
108 | 101 | if request.is_json: |
109 | 102 | form = ResetPasswordForm(MultiDict(request.get_json())) |
110 | 103 | if form.validate_on_submit() and validate_strong_password(form.password.data, form.password_confirm.data): |
112 | 105 | _datastore.commit() |
113 | 106 | return flask.jsonify(response=dict(message="Password changed successfully"), success=True, code=200) |
114 | 107 | |
115 | return make_response(flask.jsonify(response=dict(message="Bad request"), success=False, code=400),400) | |
108 | return make_response(flask.jsonify(response=dict(message="Bad request"), success=False, code=400), 400) | |
116 | 109 | |
117 | 110 | except Exception as e: |
118 | 111 | logger.exception(e) |
119 | return make_response(flask.jsonify(response=dict(token=token, message="Server Error"), success=False, code=500), 500) | |
112 | return make_response(flask.jsonify(response=dict(token=token, message="Server Error"), success=False, code=500), | |
113 | 500) | |
120 | 114 | |
121 | 115 | |
122 | 116 | def send_reset_password_instructions(user): |
130 | 124 | |
131 | 125 | if config_value('SEND_PASSWORD_RESET_EMAIL'): |
132 | 126 | send_mail(config_value('EMAIL_SUBJECT_PASSWORD_RESET'), |
133 | user.email, 'reset_instructions', | |
134 | user=user, reset_link=reset_link) | |
127 | user.email, 'reset_instructions', | |
128 | user=user, reset_link=reset_link) | |
135 | 129 | |
136 | 130 | reset_password_instructions_sent.send( |
137 | 131 | app._get_current_object(), user=user, token=token |
144 | 138 | """ |
145 | 139 | if config_value('SEND_PASSWORD_RESET_NOTICE_EMAIL'): |
146 | 140 | send_mail(config_value('EMAIL_SUBJECT_PASSWORD_NOTICE'), |
147 | user.email, 'reset_notice', user=user) | |
141 | user.email, 'reset_notice', user=user) | |
148 | 142 | |
149 | 143 | |
150 | 144 | def reset_password_token_status(token): |
1 | 1 | from datetime import datetime, timedelta |
2 | 2 | from typing import Type, Optional |
3 | 3 | |
4 | ||
4 | import time | |
5 | import flask_login | |
5 | 6 | import flask |
6 | 7 | import sqlalchemy |
7 | 8 | from sqlalchemy.orm.exc import NoResultFound |
67 | 68 | class PolymorphicVulnerabilityField(fields.Field): |
68 | 69 | """Used like a nested field with many objects, but it decides which |
69 | 70 | schema to use based on the type of each vuln""" |
71 | ||
70 | 72 | def __init__(self, *args, **kwargs): |
71 | 73 | super().__init__(*args, **kwargs) |
72 | 74 | self.many = kwargs.get('many', False) |
222 | 224 | data: dict, |
223 | 225 | data_already_deserialized: bool = False, |
224 | 226 | set_end_date: bool = True): |
227 | ||
228 | logger.info("Init bulk create process") | |
229 | start_time = time.time() | |
230 | ||
225 | 231 | if not data_already_deserialized: |
226 | 232 | schema = BulkCreateSchema() |
227 | 233 | data = schema.load(data) |
234 | ||
228 | 235 | if 'command' in data: |
229 | 236 | command = _update_command(command, data['command']) |
230 | for host in data['hosts']: | |
231 | _create_host(ws, host, command) | |
237 | ||
238 | total_hosts = len(data['hosts']) | |
239 | if total_hosts > 0: | |
240 | logger.debug(f"Needs to create {total_hosts} hosts...") | |
241 | for host in data['hosts']: | |
242 | _create_host(ws, host, command) | |
243 | ||
232 | 244 | if 'command' in data and set_end_date: |
233 | 245 | command.end_date = datetime.now() if command.end_date is None else \ |
234 | 246 | command.end_date |
235 | 247 | db.session.commit() |
248 | ||
249 | total_secs = time.time() - start_time | |
250 | logger.info(f"Finish bulk create process. Total time: {total_secs:.2f} secs") | |
236 | 251 | |
237 | 252 | |
238 | 253 | def _update_command(command: Command, command_data: dict): |
254 | 269 | if command is not None: |
255 | 270 | _create_command_object_for(ws, created, host, command) |
256 | 271 | |
257 | for service_data in services: | |
258 | _create_service(ws, host, service_data, command) | |
259 | ||
260 | for vuln_data in vulns: | |
261 | _create_hostvuln(ws, host, vuln_data, command) | |
262 | ||
263 | for cred_data in credentials: | |
264 | _create_credential(ws, cred_data, command, host=host) | |
272 | total_services = len(services) | |
273 | if total_services > 0: | |
274 | logger.debug(f"Needs to create {total_services} services...") | |
275 | for service_data in services: | |
276 | _create_service(ws, host, service_data, command) | |
277 | ||
278 | total_vulns = len(vulns) | |
279 | if total_vulns > 0: | |
280 | logger.debug(f"Needs to create {total_vulns} vulns...") | |
281 | for vuln_data in vulns: | |
282 | _create_hostvuln(ws, host, vuln_data, command) | |
283 | ||
284 | total_credentials = len(credentials) | |
285 | if total_credentials > 0: | |
286 | logger.debug(f"Needs to create {total_credentials} credentials...") | |
287 | for cred_data in credentials: | |
288 | _create_credential(ws, cred_data, command, host=host) | |
265 | 289 | |
266 | 290 | |
267 | 291 | def _create_command_object_for(ws, created, obj, command): |
310 | 334 | if command is not None: |
311 | 335 | _create_command_object_for(ws, created, service, command) |
312 | 336 | |
313 | for vuln_data in vulns: | |
314 | _create_servicevuln(ws, service, vuln_data, command) | |
315 | ||
316 | for cred_data in creds: | |
317 | _create_credential(ws, cred_data, command, service=service) | |
337 | total_service_vulns = len(vulns) | |
338 | if total_service_vulns > 0: | |
339 | logger.debug(f"Needs to create {total_service_vulns} service vulns...") | |
340 | for vuln_data in vulns: | |
341 | _create_servicevuln(ws, service, vuln_data, command) | |
342 | ||
343 | total_service_creds = len(creds) | |
344 | if total_service_creds > 0: | |
345 | logger.debug(f"Needs to create {total_service_creds} service credentials...") | |
346 | for cred_data in creds: | |
347 | _create_credential(ws, cred_data, command, service=service) | |
318 | 348 | |
319 | 349 | |
320 | 350 | def _create_vuln(ws, vuln_data, command=None, **kwargs): |
432 | 462 | """ |
433 | 463 | data = self._parse_data(self._get_schema_instance({}), flask.request) |
434 | 464 | |
435 | if flask.g.user is None: | |
465 | if flask_login.current_user.is_anonymous: | |
436 | 466 | agent = require_agent_token() |
437 | 467 | workspace = self._get_workspace(workspace_name) |
438 | 468 | |
472 | 502 | |
473 | 503 | data["command"] = { |
474 | 504 | 'id': agent_execution.command.id, |
475 | 'tool': agent.name, # Agent name | |
505 | 'tool': agent.name, # Agent name | |
476 | 506 | 'command': agent_execution.executor.name, |
477 | 507 | 'user': '', |
478 | 508 | 'hostname': '', |
494 | 524 | _update_command(command, data['command']) |
495 | 525 | db.session.flush() |
496 | 526 | |
497 | ||
498 | 527 | else: |
499 | 528 | workspace = self._get_workspace(workspace_name) |
500 | creator_user = flask.g.user | |
529 | creator_user = flask_login.current_user | |
501 | 530 | data = add_creator(data, creator_user) |
502 | 531 | |
503 | 532 | if 'command' in data: |
40 | 40 | if obj.end_date: |
41 | 41 | return (obj.end_date - obj.start_date).seconds + ((obj.end_date - obj.start_date).microseconds / 1000000.0) |
42 | 42 | else: |
43 | if (datetime.datetime.now() - obj.start_date).total_seconds() > 86400:# 86400 is 1d TODO BY CONFIG | |
43 | if (datetime.datetime.now() - obj.start_date).total_seconds() > 86400: # 86400 is 1d TODO BY CONFIG | |
44 | 44 | return 'Timeout' |
45 | 45 | return 'In progress' |
46 | 46 | |
119 | 119 | 200: |
120 | 120 | description: Last executed command or an empty json |
121 | 121 | """ |
122 | command = Command.query.join(Workspace).filter_by(name=workspace_name).order_by(Command.start_date.desc()).first() | |
122 | command = Command.query.join(Workspace).filter_by(name=workspace_name).order_by( | |
123 | Command.start_date.desc()).first() | |
123 | 124 | command_obj = {} |
124 | 125 | if command: |
125 | 126 | command_obj = { |
0 | ||
1 | 0 | import logging |
2 | 1 | from io import BytesIO |
3 | from lxml.etree import Element, SubElement, tostring # nosec | |
2 | from lxml.etree import Element, SubElement, tostring # nosec | |
4 | 3 | # We don't use Element for parsing |
5 | 4 | from flask import Blueprint, request, abort, send_file |
6 | 5 | |
81 | 80 | web_services.add(vuln_web.service) |
82 | 81 | web_vuln_tag = SubElement(web_vulns_tag, 'web_vuln') |
83 | 82 | _build_vuln_web_element(vuln_web, web_vuln_tag) |
84 | ||
85 | 83 | |
86 | 84 | for vuln in host.vulnerabilities: |
87 | 85 | vuln_tag = SubElement(vulns_tag, 'vuln') |
10 | 10 | exploits_api = Blueprint('exploits_api', __name__) |
11 | 11 | |
12 | 12 | logger = logging.getLogger(__name__) |
13 | ||
13 | 14 | |
14 | 15 | @gzipped |
15 | 16 | @exploits_api.route('/v2/vulners/exploits/<cveid>', methods=['GET']) |
14 | 14 | 'messages': 'error', |
15 | 15 | }), 500 |
16 | 16 | |
17 | ||
18 | ||
19 | ||
20 | #.register(commandsrun_api) | |
21 | # I'm Py3⏎ | |
17 | # .register(commandsrun_api) |
132 | 132 | operators = (operators.Equal, operators.Like, operators.ILike) |
133 | 133 | service = ServiceNameFilter(fields.Str()) |
134 | 134 | port = ServicePortFilter(fields.Str()) |
135 | ||
136 | 135 | |
137 | 136 | |
138 | 137 | class HostsView(PaginatedMixin, |
378 | 377 | Service.name.ilike(like_term)) |
379 | 378 | match_os = Host.os.ilike(like_term) |
380 | 379 | match_hostname = Host.hostnames.any(Hostname.name.ilike(like_term)) |
381 | query = query.filter(match_ip | | |
382 | match_service_name | | |
383 | match_os | | |
384 | match_hostname) | |
380 | query = query.filter(match_ip | |
381 | | match_service_name | |
382 | | match_os | |
383 | | match_hostname) | |
385 | 384 | return query |
386 | 385 | |
387 | 386 | def _envelope_list(self, objects, pagination_metadata=None): |
464 | 463 | bulk_create.__doc__ = HostsView.bulk_create.__doc__ |
465 | 464 | count_vulns.__doc__ = HostsView.count_vulns.__doc__ |
466 | 465 | |
466 | ||
467 | 467 | HostsView.register(host_api) |
468 | 468 | HostsV3View.register(host_api) |
9 | 9 | |
10 | 10 | |
11 | 11 | info_api = Blueprint('info_api', __name__) |
12 | ||
12 | 13 | |
13 | 14 | @info_api.route('/v2/info', methods=['GET']) |
14 | 15 | def show_info(): |
0 | 0 | from faraday.server.api.base import GenericView |
1 | 1 | from faraday.server.models import User, db |
2 | from flask import Blueprint, request, jsonify, g, abort | |
2 | from flask import Blueprint, request, jsonify, abort | |
3 | 3 | from marshmallow import Schema, fields |
4 | import flask_login | |
4 | 5 | |
5 | 6 | preferences_api = Blueprint('preferences_api', __name__) |
6 | 7 | |
24 | 25 | 200: |
25 | 26 | description: Ok |
26 | 27 | """ |
27 | user = g.user | |
28 | user = flask_login.current_user | |
28 | 29 | |
29 | 30 | if request.json and 'preferences' not in request.json: |
30 | 31 | abort(400) |
46 | 47 | 200: |
47 | 48 | description: Ok |
48 | 49 | """ |
49 | return jsonify({'preferences': g.user.preferences}), 200 | |
50 | return jsonify({'preferences': flask_login.current_user.preferences}), 200 | |
50 | 51 | |
51 | 52 | |
52 | 53 | class PreferencesV3View(PreferencesView): |
0 | 0 | # Faraday Penetration Test IDE |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | from flask import Blueprint, g | |
3 | from flask import Blueprint | |
4 | 4 | from marshmallow import fields |
5 | import flask_login | |
5 | 6 | |
6 | 7 | from faraday.server.models import SearchFilter |
7 | 8 | from faraday.server.api.base import ( |
30 | 31 | |
31 | 32 | def _get_base_query(self): |
32 | 33 | query = super()._get_base_query() |
33 | return query.filter(SearchFilter.creator_id == g.user.id) | |
34 | return query.filter(SearchFilter.creator_id == flask_login.current_user.id) | |
34 | 35 | |
35 | 36 | |
36 | 37 | class SearchFilterV3View(SearchFilterView, PatchableMixin): |
138 | 138 | route_prefix = '/v3/ws/<workspace_name>/' |
139 | 139 | trailing_slash = False |
140 | 140 | |
141 | ||
141 | 142 | ServiceView.register(services_api) |
142 | 143 | ServiceV3View.register(services_api) |
6 | 6 | from flask import jsonify, Blueprint |
7 | 7 | from flask_wtf.csrf import generate_csrf |
8 | 8 | from faraday.server.api.base import get_user_permissions |
9 | import flask | |
9 | import flask_login | |
10 | 10 | |
11 | 11 | session_api = Blueprint('session_api', __name__) |
12 | ||
12 | 13 | |
13 | 14 | @session_api.route('/session') |
14 | 15 | def session_info(): |
21 | 22 | 200: |
22 | 23 | description: Ok |
23 | 24 | """ |
24 | user = flask.g.user | |
25 | user = flask_login.current_user | |
25 | 26 | data = user.get_security_payload() |
26 | 27 | data['csrf_token'] = generate_csrf() |
27 | 28 | data['preferences'] = user.preferences |
1 | 1 | import logging |
2 | 2 | |
3 | 3 | from itsdangerous import TimedJSONWebSignatureSerializer |
4 | from flask import Blueprint, g, request | |
4 | from flask import Blueprint, request | |
5 | 5 | from flask_security.utils import hash_data |
6 | 6 | from flask import current_app as app |
7 | 7 | from marshmallow import Schema |
8 | import flask_login | |
8 | 9 | |
9 | 10 | from faraday.server.config import faraday_server |
10 | 11 | from faraday.server.api.base import GenericView |
32 | 33 | 200: |
33 | 34 | description: Ok |
34 | 35 | """ |
35 | user_id = g.user.id | |
36 | user_id = flask_login.current_user.fs_uniquifier | |
36 | 37 | serializer = TimedJSONWebSignatureSerializer( |
37 | 38 | app.config['SECRET_KEY'], |
38 | 39 | salt="api_token", |
39 | 40 | expires_in=int(faraday_server.api_token_expiration) |
40 | 41 | ) |
41 | hashed_data = hash_data(g.user.password) if g.user.password else None | |
42 | hashed_data = hash_data(flask_login.current_user.password) if flask_login.current_user.password else None | |
42 | 43 | user_ip = request.headers.get('X-Forwarded-For', request.remote_addr) |
43 | 44 | requested_at = datetime.datetime.now() |
44 | audit_logger.info(f"User [{g.user.username}] requested token from IP [{user_ip}] at [{requested_at}]") | |
45 | audit_logger.info(f"User [{flask_login.current_user.username}] requested token from IP [{user_ip}] at [{requested_at}]") | |
45 | 46 | return serializer.dumps({'user_id': user_id, "validation_check": hashed_data}).decode('utf-8') |
46 | 47 | |
47 | 48 |
4 | 4 | import random |
5 | 5 | import logging |
6 | 6 | from datetime import datetime |
7 | import flask_login | |
7 | 8 | |
8 | 9 | from faraday.server.config import CONST_FARADAY_HOME_PATH |
9 | 10 | from faraday.server.threads.reports_processor import REPORTS_QUEUE |
14 | 15 | jsonify, |
15 | 16 | Blueprint, |
16 | 17 | ) |
17 | import flask | |
18 | 18 | |
19 | 19 | from flask_wtf.csrf import validate_csrf |
20 | 20 | from werkzeug.utils import secure_filename |
74 | 74 | if report_file: |
75 | 75 | |
76 | 76 | chars = string.ascii_uppercase + string.digits |
77 | random_prefix = ''.join(random.choice(chars) for x in range(12)) # nosec | |
77 | random_prefix = ''.join(random.choice(chars) for x in range(12)) # nosec | |
78 | 78 | raw_report_filename = f'{random_prefix}_{secure_filename(report_file.filename)}' |
79 | 79 | |
80 | 80 | try: |
85 | 85 | except AttributeError: |
86 | 86 | logger.warning( |
87 | 87 | "Upload reports in WEB-UI not configurated, run Faraday client and try again...") |
88 | abort(make_response(jsonify(message="Upload reports not configurated: Run faraday client and start Faraday server again"), 500)) | |
88 | abort(make_response( | |
89 | jsonify(message="Upload reports not configurated: Run faraday client and start Faraday server again"), | |
90 | 500)) | |
89 | 91 | else: |
90 | 92 | logger.info(f"Get plugin for file: {file_path}") |
91 | 93 | plugin = report_analyzer.get_plugin(file_path) |
115 | 117 | command.id, |
116 | 118 | file_path, |
117 | 119 | plugin.id, |
118 | flask.g.user.id | |
120 | flask_login.current_user.id | |
119 | 121 | ) |
120 | 122 | ) |
121 | 123 | return make_response( |
20 | 20 | from marshmallow.validate import OneOf |
21 | 21 | import wtforms |
22 | 22 | |
23 | ||
24 | 23 | from faraday.server.api.base import ( |
25 | 24 | AutoSchema, |
26 | 25 | FilterAlchemyMixin, |
60 | 59 | _id = fields.Integer(dump_only=True, attribute='id') |
61 | 60 | id = fields.Integer(dump_only=True, attribute='id') |
62 | 61 | _rev = fields.String(default='', dump_only=True) |
63 | cwe = fields.String(dump_only=True, default='') # deprecated field, the legacy data is added to refs on import | |
62 | cwe = fields.String(dump_only=True, default='') # deprecated field, the legacy data is added to refs on import | |
64 | 63 | exploitation = SeverityField(attribute='severity', required=True) |
65 | 64 | references = fields.Method('get_references', deserialize='load_references') |
66 | 65 | refs = fields.List(fields.String(), dump_only=True, attribute='references') |
77 | 76 | creator_id = fields.Integer(dump_only=True, attribute='creator_id') |
78 | 77 | |
79 | 78 | create_at = fields.DateTime(attribute='create_date', |
80 | dump_only=True) | |
79 | dump_only=True) | |
81 | 80 | |
82 | 81 | # Here we use vulnerability instead of vulnerability_template to avoid duplicate row |
83 | 82 | # in the custom_fields_schema table. |
284 | 283 | status_code |
285 | 284 | ) |
286 | 285 | |
287 | ||
288 | 286 | def _parse_vuln_from_file(self, vulns_reader): |
289 | 287 | custom_fields = {cf_schema.field_name: cf_schema for cf_schema in db.session.query(CustomFieldsSchema).all()} |
290 | 288 | vulns_list = [] |
123 | 123 | policyviolations = fields.List(fields.String, |
124 | 124 | attribute='policy_violations') |
125 | 125 | refs = fields.List(fields.String(), attribute='references') |
126 | owasp = fields.Method(serialize='get_owasp_refs', default=[]) | |
127 | cve = fields.Method(serialize='get_cve_refs', default=[]) | |
128 | cwe = fields.Method(serialize='get_cwe_refs', default=[]) | |
129 | cvss = fields.Method(serialize='get_cvss_refs', default=[]) | |
126 | 130 | issuetracker = fields.Method(serialize='get_issuetracker', dump_only=True) |
127 | 131 | tool = fields.String(attribute='tool') |
128 | 132 | parent = fields.Method(serialize='get_parent', deserialize='load_parent', required=True) |
168 | 172 | 'service', 'obj_id', 'type', 'policyviolations', |
169 | 173 | '_attachments', |
170 | 174 | 'target', 'host_os', 'resolution', 'metadata', |
171 | 'custom_fields', 'external_id', 'tool') | |
175 | 'custom_fields', 'external_id', 'tool', | |
176 | 'cvss', 'cwe', 'cve', 'owasp', | |
177 | ) | |
172 | 178 | |
173 | 179 | def get_type(self, obj): |
174 | 180 | return obj.__class__.__name__ |
181 | ||
182 | def get_owasp_refs(self, obj): | |
183 | return [reference for reference in obj.references if 'owasp' in reference.lower()] | |
184 | ||
185 | def get_cwe_refs(self, obj): | |
186 | return [reference for reference in obj.references if 'cwe' in reference.lower()] | |
187 | ||
188 | def get_cve_refs(self, obj): | |
189 | return [reference for reference in obj.references if 'cve' in reference.lower()] | |
190 | ||
191 | def get_cvss_refs(self, obj): | |
192 | return [reference for reference in obj.references if 'cvss' in reference.lower()] | |
175 | 193 | |
176 | 194 | def get_attachments(self, obj): |
177 | 195 | res = {} |
266 | 284 | |
267 | 285 | |
268 | 286 | class VulnerabilityWebSchema(VulnerabilitySchema): |
269 | ||
270 | 287 | method = fields.String(default='') |
271 | 288 | params = fields.String(attribute='parameters', default='') |
272 | 289 | pname = fields.String(attribute='parameter_name', default='') |
289 | 306 | 'service', 'obj_id', 'type', 'policyviolations', |
290 | 307 | 'request', '_attachments', 'params', |
291 | 308 | 'target', 'host_os', 'resolution', 'method', 'metadata', |
292 | 'status_code', 'custom_fields', 'external_id', 'tool' | |
309 | 'status_code', 'custom_fields', 'external_id', 'tool', | |
310 | 'cve', 'cwe', 'owasp', 'cvss', | |
293 | 311 | ) |
294 | 312 | |
295 | 313 | |
335 | 353 | return query.join( |
336 | 354 | alias, |
337 | 355 | alias.id == model.__table__.c.service_id).filter( |
338 | alias.name == value | |
356 | alias.name == value | |
339 | 357 | ) |
340 | 358 | |
341 | 359 | |
345 | 363 | |
346 | 364 | value_list = value.split(",") |
347 | 365 | |
348 | service_hostnames_query = query.join(Service, Service.id == Vulnerability.service_id).\ | |
349 | join(Host).\ | |
350 | join(alias).\ | |
351 | filter(alias.name.in_(value_list)) | |
352 | ||
353 | host_hostnames_query = query.join(Host, Host.id == Vulnerability.host_id).\ | |
354 | join(alias).\ | |
366 | service_hostnames_query = query.join(Service, Service.id == Vulnerability.service_id). \ | |
367 | join(Host). \ | |
368 | join(alias). \ | |
369 | filter(alias.name.in_(value_list)) | |
370 | ||
371 | host_hostnames_query = query.join(Host, Host.id == Vulnerability.host_id). \ | |
372 | join(alias). \ | |
355 | 373 | filter(alias.name.in_(value_list)) |
356 | 374 | |
357 | 375 | query = service_hostnames_query.union(host_hostnames_query) |
395 | 413 | field: _strict_filtering for field in strict_fields |
396 | 414 | } |
397 | 415 | operators = (CustomILike, operators.Equal) |
416 | ||
398 | 417 | id = IDFilter(fields.Int()) |
399 | 418 | target = TargetFilter(fields.Str()) |
400 | 419 | type = TypeFilter(fields.Str(validate=[OneOf(['Vulnerability', |
442 | 461 | |
443 | 462 | if command_id: |
444 | 463 | # query = query.filter(CommandObject.command_id == int(command_id)) |
445 | query = query.filter(VulnerabilityGeneric.creator_command_id == | |
446 | int(command_id)) # TODO migration: handle invalid int() | |
464 | query = query.filter(VulnerabilityGeneric.creator_command_id | |
465 | == int(command_id)) # TODO migration: handle invalid int() | |
447 | 466 | return query |
448 | 467 | |
449 | 468 | |
451 | 470 | FilterAlchemyMixin, |
452 | 471 | ReadWriteWorkspacedView, |
453 | 472 | CountMultiWorkspacedMixin): |
454 | ||
455 | 473 | route_base = 'vulns' |
456 | 474 | filterset_class = VulnerabilityFilterSet |
457 | 475 | sort_model_class = VulnerabilityWeb # It has all the fields |
534 | 552 | ) |
535 | 553 | |
536 | 554 | def _update_object(self, obj, data, **kwargs): |
537 | data.pop('type', '') # It's forbidden to change vuln type! | |
555 | data.pop('type', '') # It's forbidden to change vuln type! | |
538 | 556 | data.pop('tool', '') |
539 | 557 | return super()._update_object(obj, data) |
540 | 558 | |
557 | 575 | *args, **kwargs) |
558 | 576 | joinedloads = [ |
559 | 577 | joinedload(Vulnerability.host) |
560 | .load_only(Host.id) # Only hostnames are needed | |
561 | .joinedload(Host.hostnames), | |
578 | .load_only(Host.id) # Only hostnames are needed | |
579 | .joinedload(Host.hostnames), | |
562 | 580 | |
563 | 581 | joinedload(Vulnerability.service) |
564 | .joinedload(Service.host) | |
565 | .joinedload(Host.hostnames), | |
582 | .joinedload(Service.host) | |
583 | .joinedload(Host.hostnames), | |
566 | 584 | |
567 | 585 | joinedload(VulnerabilityWeb.service) |
568 | .joinedload(Service.host) | |
569 | .joinedload(Host.hostnames), | |
586 | .joinedload(Service.host) | |
587 | .joinedload(Host.hostnames), | |
570 | 588 | joinedload(VulnerabilityGeneric.update_user), |
571 | 589 | undefer(VulnerabilityGeneric.creator_command_id), |
572 | 590 | undefer(VulnerabilityGeneric.creator_command_tool), |
684 | 702 | flask.abort(403) |
685 | 703 | vuln_workspace_check = db.session.query(VulnerabilityGeneric, Workspace.id).join( |
686 | 704 | Workspace).filter(VulnerabilityGeneric.id == vuln_id, |
687 | Workspace.name == workspace_name).first() | |
705 | Workspace.name == workspace_name).first() | |
688 | 706 | |
689 | 707 | if vuln_workspace_check: |
690 | 708 | if 'file' not in request.files: |
776 | 794 | return res_filters, hostname_filters |
777 | 795 | |
778 | 796 | def _generate_filter_query(self, vulnerability_class, filters, hostname_filters, workspace, marshmallow_params): |
779 | hosts_os_filter = [host_os_filter for host_os_filter in filters.get('filters', []) if host_os_filter.get('name') == 'host__os'] | |
797 | hosts_os_filter = [host_os_filter for host_os_filter in filters.get('filters', []) if | |
798 | host_os_filter.get('name') == 'host__os'] | |
780 | 799 | |
781 | 800 | if hosts_os_filter: |
782 | 801 | # remove host__os filters from filters due to a bug |
783 | 802 | hosts_os_filter = hosts_os_filter[0] |
784 | filters['filters'] = [host_os_filter for host_os_filter in filters.get('filters', []) if host_os_filter.get('name') != 'host__os'] | |
803 | filters['filters'] = [host_os_filter for host_os_filter in filters.get('filters', []) if | |
804 | host_os_filter.get('name') != 'host__os'] | |
785 | 805 | |
786 | 806 | vulns = search(db.session, |
787 | 807 | vulnerability_class, |
788 | 808 | filters) |
789 | vulns = vulns.filter(VulnerabilityGeneric.workspace==workspace) | |
790 | ||
809 | vulns = vulns.filter(VulnerabilityGeneric.workspace == workspace) | |
791 | 810 | if hostname_filters: |
792 | 811 | or_filters = [] |
793 | 812 | for hostname_filter in hostname_filters: |
799 | 818 | |
800 | 819 | if hosts_os_filter: |
801 | 820 | os_value = hosts_os_filter['val'] |
802 | vulns = vulns.join(Host).join(Service).filter(Host.os==os_value) | |
821 | vulns = vulns.join(Host).join(Service).filter(Host.os == os_value) | |
803 | 822 | |
804 | 823 | if 'group_by' not in filters: |
805 | 824 | vulns = vulns.options( |
815 | 834 | filters = FlaskRestlessSchema().load(json.loads(filters)) or {} |
816 | 835 | hostname_filters = [] |
817 | 836 | if filters: |
818 | _, hostname_filters = self._hostname_filters(filters.get('filters', [])) | |
837 | filters['filters'], hostname_filters = self._hostname_filters(filters.get('filters', [])) | |
819 | 838 | except (ValidationError, JSONDecodeError) as ex: |
820 | 839 | logger.exception(ex) |
821 | 840 | flask.abort(400, "Invalid filters") |
828 | 847 | if 'offset' in filters: |
829 | 848 | offset = filters.pop('offset') |
830 | 849 | if 'limit' in filters: |
831 | limit = filters.pop('limit') # we need to remove pagination, since | |
832 | ||
833 | vulns = self._generate_filter_query( | |
834 | VulnerabilityGeneric, | |
835 | filters, | |
836 | hostname_filters, | |
837 | workspace, | |
838 | marshmallow_params) | |
850 | limit = filters.pop('limit') # we need to remove pagination, since | |
851 | ||
852 | try: | |
853 | vulns = self._generate_filter_query( | |
854 | VulnerabilityGeneric, | |
855 | filters, | |
856 | hostname_filters, | |
857 | workspace, | |
858 | marshmallow_params) | |
859 | except AttributeError as e: | |
860 | flask.abort(400, e) | |
839 | 861 | total_vulns = vulns |
840 | 862 | if limit: |
841 | 863 | vulns = vulns.limit(limit) |
853 | 875 | workspace, |
854 | 876 | marshmallow_params, |
855 | 877 | ) |
856 | column_names = ['count'] + [field['field'] for field in filters.get('group_by',[])] | |
878 | column_names = ['count'] + [field['field'] for field in filters.get('group_by', [])] | |
857 | 879 | rows = [list(zip(column_names, row)) for row in vulns.all()] |
858 | 880 | vulns_data = [] |
859 | 881 | for row in rows: |
860 | vulns_data.append({field[0]:field[1] for field in row}) | |
882 | vulns_data.append({field[0]: field[1] for field in row}) | |
861 | 883 | |
862 | 884 | return vulns_data, len(rows) |
863 | 885 | |
882 | 904 | |
883 | 905 | if vuln_workspace_check: |
884 | 906 | file_obj = db.session.query(File).filter_by(object_type='vulnerability', |
885 | object_id=vuln_id, | |
886 | filename=attachment_filename.replace(" ", "%20")).first() | |
907 | object_id=vuln_id, | |
908 | filename=attachment_filename.replace(" ", "%20")).first() | |
887 | 909 | if file_obj: |
888 | 910 | depot = DepotManager.get() |
889 | 911 | depot_file = depot.get(file_obj.content.get('file_id')) |
932 | 954 | Workspace.name == workspace.name).first() |
933 | 955 | if vuln_workspace_check: |
934 | 956 | files = db.session.query(File).filter_by(object_type='vulnerability', |
935 | object_id=vuln_id).all() | |
957 | object_id=vuln_id).all() | |
936 | 958 | res = {} |
937 | 959 | for file_obj in files: |
938 | 960 | ret = EvidenceSchema().dump(file_obj) |
941 | 963 | return flask.jsonify(res) |
942 | 964 | else: |
943 | 965 | flask.abort(404, "Vulnerability not found") |
944 | ||
945 | 966 | |
946 | 967 | @route('/<int:vuln_id>/attachment/<attachment_filename>/', methods=['DELETE']) |
947 | 968 | def delete_attachment(self, workspace_name, vuln_id, attachment_filename): |
1010 | 1031 | as_attachment=True, |
1011 | 1032 | cache_timeout=-1) |
1012 | 1033 | |
1013 | ||
1014 | 1034 | @route('bulk_delete/', methods=['DELETE']) |
1015 | 1035 | def bulk_delete(self, workspace_name): |
1016 | 1036 | """ |
1039 | 1059 | if vulnerability_ids: |
1040 | 1060 | logger.info("Delete Vuln IDs: %s", vulnerability_ids) |
1041 | 1061 | vulns = VulnerabilityGeneric.query.filter(VulnerabilityGeneric.id.in_(vulnerability_ids), |
1042 | VulnerabilityGeneric.workspace_id == workspace.id) | |
1062 | VulnerabilityGeneric.workspace_id == workspace.id) | |
1043 | 1063 | elif vulnerability_severities: |
1044 | 1064 | logger.info("Delete Vuln Severities: %s", vulnerability_severities) |
1045 | 1065 | vulns = VulnerabilityGeneric.query.filter(VulnerabilityGeneric.severity.in_(vulnerability_severities), |
1071 | 1091 | """ |
1072 | 1092 | limit = flask.request.args.get('limit', 1) |
1073 | 1093 | workspace = self._get_workspace(workspace_name) |
1074 | data = db.session.query(User, func.count(VulnerabilityGeneric.id)).join(VulnerabilityGeneric.creator)\ | |
1075 | .filter(VulnerabilityGeneric.workspace_id == workspace.id).group_by(User.id)\ | |
1094 | data = db.session.query(User, func.count(VulnerabilityGeneric.id)).join(VulnerabilityGeneric.creator) \ | |
1095 | .filter(VulnerabilityGeneric.workspace_id == workspace.id).group_by(User.id) \ | |
1076 | 1096 | .order_by(desc(func.count(VulnerabilityGeneric.id))).limit(int(limit)).all() |
1077 | 1097 | users = [] |
1078 | 1098 | for item in data: |
4 | 4 | import flask |
5 | 5 | from flask import Blueprint |
6 | 6 | from flask import current_app as app |
7 | from flask_classful import route | |
7 | 8 | from itsdangerous import BadData, TimestampSigner |
8 | 9 | from marshmallow import Schema |
9 | 10 | from sqlalchemy.orm.exc import NoResultFound |
24 | 25 | route_base = 'websocket_token' |
25 | 26 | schema_class = WebsocketWorkspaceAuthSchema |
26 | 27 | |
27 | def post(self, workspace_name): | |
28 | @route('/', methods=['GET', 'POST']) | |
29 | def get(self, workspace_name): | |
28 | 30 | """ |
29 | 31 | --- |
30 | post: | |
32 | get: | |
31 | 33 | tags: ["Token"] |
32 | 34 | responses: |
33 | 35 | 200: |
42 | 44 | class WebsocketWorkspaceAuthV3View(WebsocketWorkspaceAuthView): |
43 | 45 | route_prefix = "/v3/ws/<workspace_name>/" |
44 | 46 | trailing_slash = False |
47 | ||
48 | @route('', methods=['GET', 'POST']) | |
49 | def get(self, workspace_name): | |
50 | """ | |
51 | --- | |
52 | get: | |
53 | tags: ["Token"] | |
54 | responses: | |
55 | 200: | |
56 | description: Ok | |
57 | """ | |
58 | return super().get(workspace_name) | |
45 | 59 | |
46 | 60 | |
47 | 61 | WebsocketWorkspaceAuthView.register(websocket_auth_api) |
28 | 28 | logger = logging.getLogger(__name__) |
29 | 29 | |
30 | 30 | workspace_api = Blueprint('workspace_api', __name__) |
31 | ||
32 | 31 | |
33 | 32 | |
34 | 33 | class WorkspaceSummarySchema(Schema): |
66 | 65 | class WorkspaceSchema(AutoSchema): |
67 | 66 | |
68 | 67 | name = fields.String(required=True, |
69 | validate=validate.Regexp(r"^[a-z0-9][a-z0-9\_\$\(\)\+\-\/]*$", 0, | |
68 | validate=validate.Regexp(r"^[a-z0-9][a-z0-9\_\$\(\)\+\-]*$", 0, | |
70 | 69 | error="The workspace name must validate with the regex " |
71 | 70 | "^[a-z0-9][a-z0-9\\_\\$\\(\\)\\+\\-\\/]*$")) |
72 | 71 | stats = SelfNestedField(WorkspaceSummarySchema()) |
85 | 84 | dump_only=True) |
86 | 85 | |
87 | 86 | active_agents_count = fields.Integer(dump_only=True) |
88 | ||
89 | 87 | |
90 | 88 | class Meta: |
91 | 89 | model = Workspace |
4 | 4 | import string |
5 | 5 | import datetime |
6 | 6 | |
7 | import bleach | |
7 | 8 | import pyotp |
8 | 9 | import requests |
9 | 10 | from flask_limiter import Limiter |
16 | 17 | from configparser import ConfigParser, NoSectionError, NoOptionError, DuplicateSectionError |
17 | 18 | |
18 | 19 | import flask |
20 | import flask_login | |
19 | 21 | from flask import Flask, session, g, request |
20 | 22 | from flask.json import JSONEncoder |
21 | 23 | from flask_sqlalchemy import get_debug_queries |
43 | 45 | from faraday.server.utils.logger import LOGGING_HANDLERS |
44 | 46 | from faraday.server.utils.invalid_chars import remove_null_caracters |
45 | 47 | from faraday.server.config import CONST_FARADAY_HOME_PATH |
46 | ||
47 | 48 | |
48 | 49 | logger = logging.getLogger(__name__) |
49 | 50 | audit_logger = logging.getLogger('audit') |
68 | 69 | |
69 | 70 | |
70 | 71 | def register_blueprints(app): |
71 | ||
72 | from faraday.server.api.modules.info import info_api # pylint:disable=import-outside-toplevel | |
73 | from faraday.server.api.modules.commandsrun import commandsrun_api # pylint:disable=import-outside-toplevel | |
74 | from faraday.server.api.modules.activity_feed import activityfeed_api # pylint:disable=import-outside-toplevel | |
75 | from faraday.server.api.modules.credentials import credentials_api # pylint:disable=import-outside-toplevel | |
76 | from faraday.server.api.modules.hosts import host_api # pylint:disable=import-outside-toplevel | |
77 | from faraday.server.api.modules.licenses import license_api # pylint:disable=import-outside-toplevel | |
78 | from faraday.server.api.modules.services import services_api # pylint:disable=import-outside-toplevel | |
79 | from faraday.server.api.modules.session import session_api # pylint:disable=import-outside-toplevel | |
80 | from faraday.server.api.modules.vulns import vulns_api # pylint:disable=import-outside-toplevel | |
81 | from faraday.server.api.modules.vulnerability_template import vulnerability_template_api # pylint:disable=import-outside-toplevel | |
82 | from faraday.server.api.modules.workspaces import workspace_api # pylint:disable=import-outside-toplevel | |
83 | from faraday.server.api.modules.handlers import handlers_api # pylint:disable=import-outside-toplevel | |
84 | from faraday.server.api.modules.comments import comment_api # pylint:disable=import-outside-toplevel | |
85 | from faraday.server.api.modules.upload_reports import upload_api # pylint:disable=import-outside-toplevel | |
86 | from faraday.server.api.modules.websocket_auth import websocket_auth_api # pylint:disable=import-outside-toplevel | |
87 | from faraday.server.api.modules.get_exploits import exploits_api # pylint:disable=import-outside-toplevel | |
88 | from faraday.server.api.modules.custom_fields import custom_fields_schema_api # pylint:disable=import-outside-toplevel | |
89 | from faraday.server.api.modules.agent_auth_token import agent_auth_token_api # pylint:disable=import-outside-toplevel | |
90 | from faraday.server.api.modules.agent import agent_api # pylint:disable=import-outside-toplevel | |
91 | from faraday.server.api.modules.bulk_create import bulk_create_api # pylint:disable=import-outside-toplevel | |
92 | from faraday.server.api.modules.token import token_api # pylint:disable=import-outside-toplevel | |
93 | from faraday.server.api.modules.search_filter import searchfilter_api # pylint:disable=import-outside-toplevel | |
72 | from faraday.server.api.modules.info import info_api # pylint:disable=import-outside-toplevel | |
73 | from faraday.server.api.modules.commandsrun import commandsrun_api # pylint:disable=import-outside-toplevel | |
74 | from faraday.server.api.modules.activity_feed import activityfeed_api # pylint:disable=import-outside-toplevel | |
75 | from faraday.server.api.modules.credentials import credentials_api # pylint:disable=import-outside-toplevel | |
76 | from faraday.server.api.modules.hosts import host_api # pylint:disable=import-outside-toplevel | |
77 | from faraday.server.api.modules.licenses import license_api # pylint:disable=import-outside-toplevel | |
78 | from faraday.server.api.modules.services import services_api # pylint:disable=import-outside-toplevel | |
79 | from faraday.server.api.modules.session import session_api # pylint:disable=import-outside-toplevel | |
80 | from faraday.server.api.modules.vulns import vulns_api # pylint:disable=import-outside-toplevel | |
81 | from faraday.server.api.modules.vulnerability_template import \ | |
82 | vulnerability_template_api # pylint:disable=import-outside-toplevel | |
83 | from faraday.server.api.modules.workspaces import workspace_api # pylint:disable=import-outside-toplevel | |
84 | from faraday.server.api.modules.handlers import handlers_api # pylint:disable=import-outside-toplevel | |
85 | from faraday.server.api.modules.comments import comment_api # pylint:disable=import-outside-toplevel | |
86 | from faraday.server.api.modules.upload_reports import upload_api # pylint:disable=import-outside-toplevel | |
87 | from faraday.server.api.modules.websocket_auth import websocket_auth_api # pylint:disable=import-outside-toplevel | |
88 | from faraday.server.api.modules.get_exploits import exploits_api # pylint:disable=import-outside-toplevel | |
89 | from faraday.server.api.modules.custom_fields import \ | |
90 | custom_fields_schema_api # pylint:disable=import-outside-toplevel | |
91 | from faraday.server.api.modules.agent_auth_token import \ | |
92 | agent_auth_token_api # pylint:disable=import-outside-toplevel | |
93 | from faraday.server.api.modules.agent import agent_api # pylint:disable=import-outside-toplevel | |
94 | from faraday.server.api.modules.bulk_create import bulk_create_api # pylint:disable=import-outside-toplevel | |
95 | from faraday.server.api.modules.token import token_api # pylint:disable=import-outside-toplevel | |
96 | from faraday.server.api.modules.search_filter import searchfilter_api # pylint:disable=import-outside-toplevel | |
94 | 97 | from faraday.server.api.modules.preferences import preferences_api # pylint:disable=import-outside-toplevel |
95 | 98 | from faraday.server.api.modules.export_data import export_data_api # pylint:disable=import-outside-toplevel |
96 | #Custom reset password | |
97 | from faraday.server.api.modules.auth import auth # pylint:disable=import-outside-toplevel | |
99 | # Custom reset password | |
100 | from faraday.server.api.modules.auth import auth # pylint:disable=import-outside-toplevel | |
98 | 101 | |
99 | 102 | app.register_blueprint(commandsrun_api) |
100 | 103 | app.register_blueprint(activityfeed_api) |
145 | 148 | try: |
146 | 149 | data = serialized.loads(token) |
147 | 150 | user_id = data["user_id"] |
148 | user = User.query.filter_by(id=user_id).first() | |
151 | user = User.query.filter_by(fs_uniquifier=user_id).first() | |
149 | 152 | if not user or not verify_hash(data['validation_check'], user.password): |
150 | 153 | logger.warn('Invalid authentication token. token invalid after password change') |
151 | 154 | return None |
155 | 158 | except BadSignature: |
156 | 159 | return None # invalid token |
157 | 160 | |
158 | ||
159 | @app.before_request | |
160 | def default_login_required(): # pylint:disable=unused-variable | |
161 | view = app.view_functions.get(flask.request.endpoint) | |
162 | ||
161 | @app.login_manager.request_loader | |
162 | def load_user_from_request(request): | |
163 | 163 | if app.config['SECURITY_TOKEN_AUTHENTICATION_HEADER'] in flask.request.headers: |
164 | 164 | header = flask.request.headers[app.config['SECURITY_TOKEN_AUTHENTICATION_HEADER']] |
165 | 165 | try: |
173 | 173 | if not user: |
174 | 174 | logger.warn('Invalid authentication token.') |
175 | 175 | flask.abort(401) |
176 | logged_in = True | |
176 | else: | |
177 | return user | |
177 | 178 | elif auth_type == 'agent': |
178 | 179 | # Don't handle the agent logic here, do it in another |
179 | 180 | # before_request handler |
180 | logged_in = False | |
181 | return None | |
182 | elif auth_type == "basic": | |
183 | username = flask.request.authorization.get('username', '') | |
184 | password = flask.request.authorization.get('password', '') | |
185 | user = User.query.filter_by(username=username).first() | |
186 | if user and user.verify_and_update_password(password): | |
187 | return user | |
181 | 188 | else: |
182 | 189 | logger.warn("Invalid authorization type") |
183 | 190 | flask.abort(401) |
184 | else: | |
185 | # TODO use public flask_login functions | |
186 | logged_in = '_user_id' in flask.session | |
187 | user_id = session.get("_user_id") | |
188 | if logged_in: | |
189 | user = User.query.filter_by(id=user_id).first() | |
190 | ||
191 | if logged_in: | |
192 | assert user | |
193 | ||
194 | if not logged_in and not getattr(view, 'is_public', False) \ | |
191 | ||
192 | # finally, return None if both methods did not login the user | |
193 | return None | |
194 | ||
195 | @app.before_request | |
196 | def default_login_required(): # pylint:disable=unused-variable | |
197 | view = app.view_functions.get(flask.request.endpoint) | |
198 | ||
199 | if flask_login.current_user.is_anonymous and not getattr(view, 'is_public', False) \ | |
195 | 200 | and flask.request.method != 'OPTIONS': |
196 | 201 | flask.abort(401) |
197 | 202 | |
198 | g.user = None | |
199 | if logged_in: | |
200 | g.user = user | |
201 | if user is None: | |
202 | logger.warn(f"Unknown user id {session['_user_id']}") | |
203 | del flask.session['_user_id'] | |
204 | flask.abort(401) # 403 would be better but breaks the web ui | |
205 | return | |
206 | ||
207 | 203 | @app.before_request |
208 | def load_g_custom_fields(): # pylint:disable=unused-variable | |
204 | def load_g_custom_fields(): # pylint:disable=unused-variable | |
209 | 205 | g.custom_fields = {} |
210 | 206 | |
211 | 207 | @app.after_request |
212 | def log_queries_count(response): # pylint:disable=unused-variable | |
208 | def log_queries_count(response): # pylint:disable=unused-variable | |
213 | 209 | if flask.request.method not in ['GET', 'HEAD']: |
214 | 210 | # We did most optimizations for read only endpoints |
215 | 211 | # TODO migrations: improve optimization and remove this if |
284 | 280 | audit_logger.info(f"User [{user.username}] logged in from IP [{user_ip}] at [{user_login_at}]") |
285 | 281 | |
286 | 282 | |
283 | def uia_username_mapper(identity): | |
284 | return bleach.clean(identity, strip=True) | |
285 | ||
286 | ||
287 | 287 | def create_app(db_connection_string=None, testing=None): |
288 | ||
289 | 288 | class CustomFlask(Flask): |
290 | 289 | SKIP_RULES = [ # These endpoints will be removed for v3 |
291 | 290 | '/v3/ws/<workspace_name>/hosts/bulk_delete/', |
328 | 327 | 'SECURITY_BACKWARDS_COMPAT_AUTH_TOKEN': True, |
329 | 328 | 'SECURITY_PASSWORD_SINGLE_HASH': True, |
330 | 329 | 'WTF_CSRF_ENABLED': False, |
331 | 'SECURITY_USER_IDENTITY_ATTRIBUTES': ['username'], | |
330 | 'SECURITY_USER_IDENTITY_ATTRIBUTES': [{'username': {'mapper': uia_username_mapper}}], | |
332 | 331 | 'SECURITY_POST_LOGIN_VIEW': '/_api/session', |
333 | 332 | 'SECURITY_POST_CHANGE_VIEW': '/_api/change', |
334 | 333 | 'SECURITY_RESET_PASSWORD_TEMPLATE': '/security/reset.html', |
335 | 334 | 'SECURITY_POST_RESET_VIEW': '/', |
336 | 'SECURITY_SEND_PASSWORD_RESET_EMAIL':True, | |
337 | #For testing porpouse | |
335 | 'SECURITY_SEND_PASSWORD_RESET_EMAIL': True, | |
336 | # For testing porpouse | |
338 | 337 | 'SECURITY_EMAIL_SENDER': "[email protected]", |
339 | 338 | 'SECURITY_CHANGEABLE': True, |
340 | 339 | 'SECURITY_SEND_PASSWORD_CHANGE_EMAIL': False, |
341 | 340 | 'SECURITY_MSG_USER_DOES_NOT_EXIST': login_failed_message, |
342 | 341 | 'SECURITY_TOKEN_AUTHENTICATION_HEADER': 'Authorization', |
343 | ||
344 | 342 | |
345 | 343 | # The line bellow should not be necessary because of the |
346 | 344 | # CustomLoginForm, but i'll include it anyway. |
360 | 358 | # 'sha256_crypt', |
361 | 359 | # 'sha512_crypt', |
362 | 360 | ], |
363 | 'PERMANENT_SESSION_LIFETIME': datetime.timedelta(hours=int(faraday.server.config.faraday_server.session_timeout or 12)), | |
361 | 'PERMANENT_SESSION_LIFETIME': datetime.timedelta( | |
362 | hours=int(faraday.server.config.faraday_server.session_timeout or 12)), | |
364 | 363 | 'SESSION_COOKIE_NAME': 'faraday_session_2', |
365 | 364 | 'SESSION_COOKIE_SAMESITE': 'Lax', |
366 | 365 | }) |
373 | 372 | |
374 | 373 | storage_path = faraday.server.config.storage.path |
375 | 374 | if not storage_path: |
376 | logger.warn('No storage section or path in the .faraday/config/server.ini. Setting the default value to .faraday/storage') | |
375 | logger.warn( | |
376 | 'No storage section or path in the .faraday/config/server.ini. Setting the default value to .faraday/storage') | |
377 | 377 | storage_path = setup_storage_path() |
378 | 378 | |
379 | 379 | if not DepotManager.get('default'): |
389 | 389 | check_testing_configuration(testing, app) |
390 | 390 | |
391 | 391 | try: |
392 | app.config['SQLALCHEMY_DATABASE_URI'] = db_connection_string or faraday.server.config.database.connection_string.strip("'") | |
392 | app.config[ | |
393 | 'SQLALCHEMY_DATABASE_URI'] = db_connection_string or faraday.server.config.database.connection_string.strip( | |
394 | "'") | |
393 | 395 | except AttributeError: |
394 | logger.info('Missing [database] section on server.ini. Please configure the database before running the server.') | |
396 | logger.info( | |
397 | 'Missing [database] section on server.ini. Please configure the database before running the server.') | |
395 | 398 | except NoOptionError: |
396 | logger.info('Missing connection_string on [database] section on server.ini. Please configure the database before running the server.') | |
397 | ||
398 | from faraday.server.models import db # pylint:disable=import-outside-toplevel | |
399 | logger.info( | |
400 | 'Missing connection_string on [database] section on server.ini. Please configure the database before running the server.') | |
401 | ||
402 | from faraday.server.models import db # pylint:disable=import-outside-toplevel | |
399 | 403 | db.init_app(app) |
400 | #Session(app) | |
404 | # Session(app) | |
401 | 405 | |
402 | 406 | # Setup Flask-Security |
403 | 407 | app.user_datastore = SQLAlchemyUserDatastore( |
467 | 471 | return False |
468 | 472 | self.email.data = remove_null_caracters(self.email.data) |
469 | 473 | |
470 | self.user = _datastore.get_user(self.email.data) | |
474 | self.user = _datastore.find_user(username=self.email.data) | |
471 | 475 | |
472 | 476 | if self.user is None: |
473 | 477 | audit_logger.warning(f"Invalid Login - User [{self.email.data}] from IP [{user_ip}] at [{time_now}] - " |
7 | 7 | from apispec import APISpec |
8 | 8 | from apispec.ext.marshmallow import MarshmallowPlugin |
9 | 9 | from apispec_webframeworks.flask import FlaskPlugin |
10 | from faraday.server.web import app | |
10 | from faraday.server.web import get_app | |
11 | 11 | import json |
12 | 12 | |
13 | 13 | from faraday.utils.faraday_openapi_plugin import FaradayAPIPlugin |
48 | 48 | |
49 | 49 | tags = set() |
50 | 50 | |
51 | with app.test_request_context(): | |
52 | for endpoint in app.view_functions.values(): | |
53 | spec.path(view=endpoint, app=app) | |
51 | with get_app().test_request_context(): | |
52 | for endpoint in get_app().view_functions.values(): | |
53 | spec.path(view=endpoint, app=get_app()) | |
54 | 54 | |
55 | 55 | # Set up global tags |
56 | 56 | spec_yaml = yaml.load(spec.to_yaml(), Loader=yaml.SafeLoader) |
72 | 72 | |
73 | 73 | |
74 | 74 | def show_all_urls(): |
75 | print(app.url_map) | |
75 | print(get_app().url_map) |
0 | from faraday.server.web import app | |
0 | from faraday.server.web import get_app | |
1 | 1 | from faraday.server.models import User, db |
2 | 2 | from flask_security.utils import hash_password |
3 | 3 | |
4 | 4 | |
5 | 5 | def changes_password(username, password): |
6 | with app.app_context(): | |
6 | with get_app().app_context(): | |
7 | 7 | user = User.query.filter_by(username=username).first() |
8 | 8 | if user: |
9 | 9 | user.password = hash_password(password) |
12 | 12 | print("Password changed succesfully") |
13 | 13 | else: |
14 | 14 | print("User not found in Faraday's Database") |
15 | # I'm Py3⏎ |
0 | 0 | import sys |
1 | 1 | import click |
2 | 2 | |
3 | from faraday.server.web import app | |
3 | from faraday.server.web import get_app | |
4 | 4 | from faraday.server.models import User, db |
5 | 5 | |
6 | 6 | |
7 | 7 | def change_username(current_username, new_username): |
8 | with app.app_context(): | |
8 | with get_app().app_context(): | |
9 | 9 | user = User.query.filter_by(username=current_username).first() |
10 | 10 | if not user: |
11 | 11 | print(f"\nERROR: User {current_username} was not found in Faraday's Database.") |
0 | 0 | import sys |
1 | 1 | import click |
2 | 2 | |
3 | from faraday.server.web import app | |
3 | from faraday.server.web import get_app | |
4 | 4 | from faraday.server.models import ( |
5 | 5 | db, |
6 | 6 | CustomFieldsSchema |
9 | 9 | |
10 | 10 | |
11 | 11 | def add_custom_field_main(): |
12 | with app.app_context(): | |
12 | with get_app().app_context(): | |
13 | 13 | add_custom_field_wizard() |
14 | 14 | |
15 | 15 | |
16 | 16 | def delete_custom_field_main(): |
17 | with app.app_context(): | |
17 | with get_app().app_context(): | |
18 | 18 | delete_custom_field_wizard() |
19 | 19 | |
20 | 20 | |
40 | 40 | field_type = click.prompt('Field type (int, str, list)', type=click.Choice(['int', 'str', 'list'])) |
41 | 41 | custom_fields = db.session.query(CustomFieldsSchema) |
42 | 42 | |
43 | #Checks the name of the fields wont be a duplicate | |
43 | # Checks the name of the fields wont be a duplicate | |
44 | 44 | for custom_field in custom_fields: |
45 | 45 | if field_name == custom_field.field_name \ |
46 | or field_display_name == custom_field.field_display_name: | |
46 | or field_display_name == custom_field.field_display_name: | |
47 | 47 | print('Custom field already exists, skipping') |
48 | 48 | sys.exit(1) |
49 | 49 | |
70 | 70 | invalid_field_order = True |
71 | 71 | continue |
72 | 72 | invalid_field_order = False |
73 | confirmation = click.prompt('New CustomField will be added to vulnerability -> Order {order} ({0},{1},{2}) <-, confirm to continue (yes/no)'\ | |
74 | .format(field_name, field_display_name, field_type, order=field_order)) | |
73 | confirmation = click.prompt('New CustomField will be added to vulnerability -> Order {order} ({0},{1},{2}) <-' | |
74 | ', confirm to continue (yes/no)' | |
75 | .format(field_name, field_display_name, field_type, order=field_order)) | |
75 | 76 | if not confirmation: |
76 | 77 | sys.exit(1) |
77 | 78 | |
78 | 79 | custom_field_data, created = get_or_create( |
79 | db.session, | |
80 | CustomFieldsSchema, | |
81 | table_name='vulnerability', | |
82 | field_name=field_name, | |
83 | field_order=field_order, | |
80 | db.session, | |
81 | CustomFieldsSchema, | |
82 | table_name='vulnerability', | |
83 | field_name=field_name, | |
84 | field_order=field_order, | |
84 | 85 | ) |
85 | 86 | if not created: |
86 | 87 | print('Custom field already exists, skipping') |
24 | 24 | def _draw_entity_diagrama(self): |
25 | 25 | # create the pydot graph object by autoloading all tables via a bound metadata object |
26 | 26 | try: |
27 | from sqlalchemy_schemadisplay import create_schema_graph # pylint:disable=import-outside-toplevel | |
27 | from sqlalchemy_schemadisplay import create_schema_graph # pylint:disable=import-outside-toplevel | |
28 | 28 | except ImportError: |
29 | 29 | print('Please install sqlalchemy_schemadisplay with "pip install sqlalchemy_schemadisplay"') |
30 | 30 | sys.exit(1) |
43 | 43 | sys.exit(1) |
44 | 44 | raise |
45 | 45 | |
46 | ||
47 | 46 | def _draw_uml_class_diagram(self): |
48 | 47 | # lets find all the mappers in our model |
49 | 48 | try: |
50 | from sqlalchemy_schemadisplay import create_uml_graph # pylint:disable=import-outside-toplevel | |
49 | from sqlalchemy_schemadisplay import create_uml_graph # pylint:disable=import-outside-toplevel | |
51 | 50 | except ImportError: |
52 | 51 | print('Please install sqlalchemy_schemadisplay with "pip install sqlalchemy_schemadisplay"') |
53 | 52 | sys.exit(1) |
9 | 9 | |
10 | 10 | from sqlalchemy.exc import IntegrityError |
11 | 11 | |
12 | from faraday.server.web import app | |
12 | from faraday.server.web import get_app | |
13 | 13 | from faraday.server.models import ( |
14 | 14 | db, |
15 | 15 | VulnerabilityTemplate, |
27 | 27 | def import_vulnerability_templates(language): |
28 | 28 | imported_rows = 0 |
29 | 29 | duplicated_rows = 0 |
30 | with app.app_context(): | |
30 | with get_app().app_context(): | |
31 | 31 | try: |
32 | 32 | res = requests.get(f'{CWE_URL}/cwe_{language}.csv') |
33 | 33 | except Exception as e: |
7 | 7 | |
8 | 8 | import getpass |
9 | 9 | import string |
10 | ||
10 | import uuid | |
11 | 11 | import os |
12 | 12 | import sys |
13 | 13 | import click |
16 | 16 | from alembic import command |
17 | 17 | from random import SystemRandom |
18 | 18 | from tempfile import TemporaryFile |
19 | from subprocess import Popen # nosec | |
19 | from subprocess import Popen # nosec | |
20 | 20 | |
21 | 21 | import sqlalchemy |
22 | 22 | from sqlalchemy import create_engine |
47 | 47 | config.get('database', 'connection_string') |
48 | 48 | reconfigure = None |
49 | 49 | while not reconfigure: |
50 | reconfigure = input(f'Database section {Fore.YELLOW} already found{Fore.WHITE}. Do you want to reconfigure database? (yes/no) ') | |
50 | reconfigure = input( | |
51 | f'Database section {Fore.YELLOW} already found{Fore.WHITE}. Do you want to reconfigure database? (yes/no) ') | |
51 | 52 | if reconfigure.lower() == 'no': |
52 | 53 | return False |
53 | 54 | elif reconfigure.lower() == 'yes': |
118 | 119 | else: |
119 | 120 | user_password = self.generate_random_pw(12) |
120 | 121 | already_created = False |
122 | fs_uniquifier = str(uuid.uuid4()) | |
121 | 123 | try: |
122 | 124 | |
123 | 125 | statement = text(""" |
124 | 126 | INSERT INTO faraday_user ( |
125 | 127 | username, name, password, |
126 | 128 | is_ldap, active, last_login_ip, |
127 | current_login_ip, role, state_otp | |
129 | current_login_ip, role, state_otp, fs_uniquifier | |
128 | 130 | ) VALUES ( |
129 | 131 | 'faraday', 'Administrator', :password, |
130 | 132 | false, true, '127.0.0.1', |
131 | '127.0.0.1', 'admin', 'disabled' | |
133 | '127.0.0.1', 'admin', 'disabled', :fs_uniquifier | |
132 | 134 | ) |
133 | 135 | """) |
134 | 136 | params = { |
135 | 'password': hash_password(user_password) | |
137 | 'password': hash_password(user_password), | |
138 | 'fs_uniquifier': fs_uniquifier | |
136 | 139 | } |
137 | 140 | connection = engine.connect() |
138 | 141 | connection.execute(statement, **params) |
141 | 144 | # when re using database user could be created previously |
142 | 145 | already_created = True |
143 | 146 | print( |
144 | "{yellow}WARNING{white}: Faraday administrator user already exists.".format( | |
145 | yellow=Fore.YELLOW, white=Fore.WHITE)) | |
147 | "{yellow}WARNING{white}: Faraday administrator user already exists.".format( | |
148 | yellow=Fore.YELLOW, white=Fore.WHITE)) | |
146 | 149 | else: |
147 | 150 | print( |
148 | 151 | "{yellow}WARNING{white}: Can't create administrator user.".format( |
152 | 155 | print("Admin user created with \n\n{red}username: {white}faraday \n" |
153 | 156 | "{red}password:{white} {" |
154 | 157 | "user_password} \n".format(user_password=user_password, |
155 | white=Fore.WHITE, red=Fore.RED)) | |
158 | white=Fore.WHITE, red=Fore.RED)) | |
156 | 159 | |
157 | 160 | def _configure_existing_postgres_user(self): |
158 | 161 | username = input('Please enter the postgresql username: ') |
166 | 169 | if 'unknown user: postgres' in psql_output: |
167 | 170 | print(f'ERROR: Postgres user not found. Did you install package {Fore.BLUE}postgresql{Fore.WHITE}?') |
168 | 171 | elif 'could not connect to server' in psql_output: |
169 | print(f'ERROR: {Fore.RED}PostgreSQL service{Fore.WHITE} is not running. Please verify that it is running in port 5432 before executing setup script.') | |
172 | print( | |
173 | f'ERROR: {Fore.RED}PostgreSQL service{Fore.WHITE} is not running. Please verify that it is running in port 5432 before executing setup script.') | |
170 | 174 | elif process_status > 0: |
171 | 175 | current_psql_output_file.seek(0) |
172 | 176 | print('ERROR: ' + psql_output) |
173 | 177 | |
174 | 178 | if process_status != 0: |
175 | current_psql_output_file.close() # delete temp file | |
179 | current_psql_output_file.close() # delete temp file | |
176 | 180 | sys.exit(process_status) |
177 | 181 | |
178 | 182 | def generate_random_pw(self, pwlen): |
184 | 188 | This step will create the role on the database. |
185 | 189 | we return username and password and those values will be saved in the config file. |
186 | 190 | """ |
187 | print('This script will {blue} create a new postgres user {white} and {blue} save faraday-server settings {white}(server.ini). '.format(blue=Fore.BLUE, white=Fore.WHITE)) | |
188 | username = os.environ.get("FARADAY_DATABASE_USER", 'faraday_postgresql') | |
191 | print( | |
192 | 'This script will {blue} create a new postgres user {white} and {blue} save faraday-server settings {white}(server.ini). '.format( | |
193 | blue=Fore.BLUE, white=Fore.WHITE)) | |
194 | username = os.environ.get("FARADAY_DATABASE_USER", 'faraday_postgresql') | |
189 | 195 | postgres_command = ['sudo', '-u', 'postgres', 'psql'] |
190 | 196 | if sys.platform == 'darwin': |
191 | 197 | print(f'{Fore.BLUE}MAC OS detected{Fore.WHITE}') |
192 | 198 | postgres_command = ['psql', 'postgres'] |
193 | 199 | password = self.generate_random_pw(25) |
194 | command = postgres_command + [ '-c', 'CREATE ROLE {0} WITH LOGIN PASSWORD \'{1}\';'.format(username, password)] | |
195 | p = Popen(command, stderr=psql_log_file, stdout=psql_log_file) # nosec | |
200 | command = postgres_command + ['-c', 'CREATE ROLE {0} WITH LOGIN PASSWORD \'{1}\';'.format(username, password)] | |
201 | p = Popen(command, stderr=psql_log_file, stdout=psql_log_file) # nosec | |
196 | 202 | p.wait() |
197 | 203 | psql_log_file.seek(0) |
198 | 204 | output = psql_log_file.read() |
205 | 211 | |
206 | 212 | try: |
207 | 213 | if not getattr(faraday.server.config, 'database', None): |
208 | print('Manual configuration? \n faraday_postgresql was found in PostgreSQL, but no connection string was found in server.ini. ') | |
209 | print('Please configure [database] section with correct postgresql string. Ex. postgresql+psycopg2://faraday_postgresql:PASSWORD@localhost/faraday') | |
214 | print( | |
215 | 'Manual configuration? \n faraday_postgresql was found in PostgreSQL, but no connection string was found in server.ini. ') | |
216 | print( | |
217 | 'Please configure [database] section with correct postgresql string. Ex. postgresql+psycopg2://faraday_postgresql:PASSWORD@localhost/faraday') | |
210 | 218 | sys.exit(1) |
211 | 219 | try: |
212 | 220 | password = faraday.server.config.database.connection_string.split(':')[2].split('@')[0] |
213 | 221 | except AttributeError: |
214 | 222 | print('Could not find connection string.') |
215 | print('Please configure [database] section with correct postgresql string. Ex. postgresql+psycopg2://faraday_postgresql:PASSWORD@localhost/faraday') | |
223 | print( | |
224 | 'Please configure [database] section with correct postgresql string. Ex. postgresql+psycopg2://faraday_postgresql:PASSWORD@localhost/faraday') | |
216 | 225 | sys.exit(1) |
217 | 226 | connection = psycopg2.connect(dbname='postgres', |
218 | 227 | user=username, |
244 | 253 | |
245 | 254 | print(f'Creating database {database_name}') |
246 | 255 | command = postgres_command + ['createdb', '-E', 'utf8', '-O', username, database_name] |
247 | p = Popen(command, stderr=psql_log_file, stdout=psql_log_file, cwd='/tmp') # nosec | |
256 | p = Popen(command, stderr=psql_log_file, stdout=psql_log_file, cwd='/tmp') # nosec | |
248 | 257 | p.wait() |
249 | 258 | return_code = p.returncode |
250 | 259 | psql_log_file.seek(0) |
274 | 283 | |
275 | 284 | def _create_tables(self, conn_string): |
276 | 285 | print('Creating tables') |
277 | from faraday.server.models import db # pylint:disable=import-outside-toplevel | |
286 | from faraday.server.models import db # pylint:disable=import-outside-toplevel | |
278 | 287 | current_app.config['SQLALCHEMY_DATABASE_URI'] = conn_string |
279 | 288 | |
280 | 289 | # Check if the alembic_version exists |
292 | 301 | db.create_all() |
293 | 302 | except OperationalError as ex: |
294 | 303 | if 'could not connect to server' in str(ex): |
295 | print(f'ERROR: {Fore.RED}PostgreSQL service{Fore.WHITE} is not running. Please verify that it is running in port 5432 before executing setup script.') | |
304 | print( | |
305 | f'ERROR: {Fore.RED}PostgreSQL service{Fore.WHITE} is not running. Please verify that it is running in port 5432 before executing setup script.') | |
296 | 306 | sys.exit(1) |
297 | 307 | elif 'password authentication failed' in str(ex): |
298 | 308 | print('ERROR: ') |
7 | 7 | import click |
8 | 8 | |
9 | 9 | from faraday.server.models import db |
10 | from faraday.server.web import app | |
10 | from faraday.server.web import get_app | |
11 | 11 | from faraday.server.commands.initdb import InitDB |
12 | 12 | import faraday.server.config |
13 | 13 | |
30 | 30 | |
31 | 31 | |
32 | 32 | def reset_db(): |
33 | with app.app_context(): | |
33 | with get_app().app_context(): | |
34 | 34 | reset_db_all() |
35 | 35 | |
36 | 36 |
10 | 10 | from colorama import Fore |
11 | 11 | |
12 | 12 | import faraday.server.config |
13 | from faraday.server.web import app | |
13 | from faraday.server.web import get_app | |
14 | 14 | from faraday.server.models import db |
15 | 15 | from faraday.server.config import CONST_FARADAY_HOME_PATH |
16 | 16 | from faraday.server.utils.daemonize import is_server_running |
26 | 26 | |
27 | 27 | |
28 | 28 | def check_open_ports(): |
29 | address = faraday.server.config.faraday_server.bind_address | |
29 | address = faraday.server.config.faraday_server.bind_address | |
30 | 30 | port = int(faraday.server.config.faraday_server.port) |
31 | 31 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
32 | 32 | result = sock.connect_ex((address, port)) |
37 | 37 | |
38 | 38 | |
39 | 39 | def check_postgres(): |
40 | with app.app_context(): | |
40 | with get_app().app_context(): | |
41 | 41 | try: |
42 | result = (db.session.query("version()").one(),db.session.query("current_setting('server_version_num')").one()) | |
42 | result = ( | |
43 | db.session.query("version()").one(), db.session.query("current_setting('server_version_num')").one()) | |
43 | 44 | return result |
44 | 45 | except sqlalchemy.exc.OperationalError: |
45 | 46 | return False |
48 | 49 | |
49 | 50 | |
50 | 51 | def check_locks_postgresql(): |
51 | with app.app_context(): | |
52 | with get_app().app_context(): | |
52 | 53 | psql_status = check_postgres() |
53 | 54 | if psql_status: |
54 | 55 | result = db.engine.execute("""SELECT blocked_locks.pid AS blocked_pid, |
84 | 85 | |
85 | 86 | |
86 | 87 | def check_postgresql_encoding(): |
87 | with app.app_context(): | |
88 | with get_app().app_context(): | |
88 | 89 | psql_status = check_postgres() |
89 | 90 | if psql_status: |
90 | 91 | encoding = db.engine.execute("SHOW SERVER_ENCODING").first()[0] |
94 | 95 | |
95 | 96 | |
96 | 97 | def check_storage_permission(): |
97 | ||
98 | 98 | path = CONST_FARADAY_HOME_PATH / 'storage' / 'test' |
99 | 99 | |
100 | 100 | try: |
112 | 112 | data_keys = ['bind_address', 'port', 'websocket_port', 'debug'] |
113 | 113 | for key in data_keys: |
114 | 114 | print('{blue} {KEY}: {white}{VALUE}'. |
115 | format(KEY=key, VALUE=getattr(faraday.server.config.faraday_server, key), white=Fore.WHITE, blue=Fore.BLUE)) | |
115 | format(KEY=key, VALUE=getattr(faraday.server.config.faraday_server, key), white=Fore.WHITE, | |
116 | blue=Fore.BLUE)) | |
116 | 117 | |
117 | 118 | print(f'\n{Fore.WHITE}Showing faraday plugins data') |
118 | 119 | print(f"{Fore.BLUE} version: {Fore.WHITE}{faraday_plugins.__version__}") |
135 | 136 | exit_code = 0 |
136 | 137 | result = check_postgres() |
137 | 138 | |
138 | ||
139 | if result == False: | |
140 | print('[{red}-{white}] Could not connect to PostgreSQL, please check if database is running'\ | |
141 | .format(red=Fore.RED, white=Fore.WHITE)) | |
139 | if not result: | |
140 | print('[{red}-{white}] Could not connect to PostgreSQL, please check if database is running' | |
141 | .format(red=Fore.RED, white=Fore.WHITE)) | |
142 | 142 | exit_code = 1 |
143 | 143 | return exit_code |
144 | elif result == None: | |
145 | print('[{red}-{white}] Database not initialized. Execute: faraday-manage initdb'\ | |
146 | .format(red=Fore.RED, white=Fore.WHITE)) | |
144 | elif result is None: | |
145 | print('[{red}-{white}] Database not initialized. Execute: faraday-manage initdb' | |
146 | .format(red=Fore.RED, white=Fore.WHITE)) | |
147 | 147 | exit_code = 1 |
148 | 148 | return exit_code |
149 | elif int(result[1][0])<90400: | |
150 | print('[{red}-{white}] PostgreSQL is running, but needs to be 9.4 or newer, please update PostgreSQL'.\ | |
151 | format(red=Fore.RED, white=Fore.WHITE)) | |
149 | elif int(result[1][0]) < 90400: | |
150 | print('[{red}-{white}] PostgreSQL is running, but needs to be 9.4 or newer, please update PostgreSQL' | |
151 | .format(red=Fore.RED, white=Fore.WHITE)) | |
152 | 152 | elif result: |
153 | 153 | print(f'[{Fore.GREEN}+{Fore.WHITE}] PostgreSQL is running and up to date') |
154 | 154 | return exit_code |
161 | 161 | lock_status = check_locks_postgresql() |
162 | 162 | if lock_status: |
163 | 163 | print(f'[{Fore.YELLOW}-{Fore.WHITE}] Warning: PostgreSQL lock detected.') |
164 | elif lock_status == False: | |
164 | elif not lock_status: | |
165 | 165 | print(f'[{Fore.GREEN}+{Fore.WHITE}] PostgreSQL lock not detected. ') |
166 | elif lock_status == None: | |
166 | elif lock_status is None: | |
167 | 167 | pass |
168 | 168 | |
169 | 169 | encoding = check_postgresql_encoding() |
170 | 170 | if encoding: |
171 | 171 | print(f'[{Fore.GREEN}+{Fore.WHITE}] PostgreSQL encoding: {encoding}') |
172 | elif encoding == None: | |
172 | elif encoding is None: | |
173 | 173 | pass |
174 | 174 | |
175 | 175 | |
176 | 176 | def print_faraday_status(): |
177 | 177 | """Prints Status of farday using check_server_running() """ |
178 | 178 | |
179 | #Prints Status of the server using check_server_running() | |
179 | # Prints Status of the server using check_server_running() | |
180 | 180 | pid = check_server_running() |
181 | 181 | if pid is not None: |
182 | 182 | print('[{green}+{white}] Faraday Server is running. PID:{PID} \ |
198 | 198 | print(f'[{Fore.RED}-{Fore.WHITE}] /.faraday/storage -> Permission denied') |
199 | 199 | |
200 | 200 | if check_open_ports(): |
201 | print("[{green}+{white}] Port {PORT} in {ad} is open"\ | |
202 | .format(PORT=faraday.server.config.faraday_server.port, green=Fore.GREEN,white=Fore.WHITE,ad=faraday.server.config.faraday_server.bind_address)) | |
203 | else: | |
204 | print("[{red}-{white}] Port {PORT} in {ad} is not open"\ | |
205 | .format(PORT=faraday.server.config.faraday_server.port,red=Fore.RED,white=Fore.WHITE,ad =faraday.server.config.faraday_server.bind_address)) | |
201 | print("[{green}+{white}] Port {PORT} in {ad} is open" | |
202 | .format(PORT=faraday.server.config.faraday_server.port, | |
203 | green=Fore.GREEN, white=Fore.WHITE, ad=faraday.server.config.faraday_server.bind_address)) | |
204 | else: | |
205 | print("[{red}-{white}] Port {PORT} in {ad} is not open" | |
206 | .format(PORT=faraday.server.config.faraday_server.port, | |
207 | red=Fore.RED, white=Fore.WHITE, ad=faraday.server.config.faraday_server.bind_address)) | |
206 | 208 | |
207 | 209 | |
208 | 210 | def full_status_check(): |
144 | 144 | self.session_timeout = 12 |
145 | 145 | self.api_token_expiration = 43200 # Default as 12 hs |
146 | 146 | self.agent_registration_secret = None |
147 | self.agent_token_expiration = 60 # Default as 1 min | |
147 | 148 | self.debug = False |
148 | 149 | self.custom_plugins_folder = None |
149 | 150 | self.ignore_info_severity = False |
163 | 164 | self.use_start_tls = None |
164 | 165 | |
165 | 166 | |
166 | ||
167 | 167 | class SmtpConfigObject(ConfigSection): |
168 | 168 | def __init__(self): |
169 | 169 | self.username = None |
188 | 188 | class LoggerConfig(ConfigSection): |
189 | 189 | def __init__(self): |
190 | 190 | self.use_rfc5424_formatter = False |
191 | ||
191 | 192 | |
192 | 193 | database = DatabaseConfigObject() |
193 | 194 | dashboard = DashboardConfigObject() |
67 | 67 | changes_queue.put(msg) |
68 | 68 | |
69 | 69 | |
70 | ||
71 | 70 | def update_object_event(mapper, connection, instance): |
72 | 71 | delta = instance.update_date - instance.create_date |
73 | 72 | if delta.seconds < 30: |
87 | 86 | |
88 | 87 | def after_insert_check_child_has_same_workspace(mapper, connection, inserted_instance): |
89 | 88 | if inserted_instance.parent: |
90 | assert (inserted_instance.workspace == | |
91 | inserted_instance.parent.workspace), \ | |
89 | assert (inserted_instance.workspace | |
90 | == inserted_instance.parent.workspace), \ | |
92 | 91 | "Conflicting workspace assignation for objects. " \ |
93 | 92 | "This should never happen!!!" |
94 | 93 | |
95 | ||
96 | ||
97 | assert (inserted_instance.workspace_id == | |
98 | inserted_instance.parent.workspace_id), \ | |
94 | assert (inserted_instance.workspace_id | |
95 | == inserted_instance.parent.workspace_id), \ | |
99 | 96 | "Conflicting workspace_id assignation for objects. " \ |
100 | 97 | "This should never happen!!!" |
101 | 98 | |
105 | 102 | if inspect.isclass(obj) and getattr(obj, 'workspace_id', None): |
106 | 103 | event.listen(obj, 'after_insert', after_insert_check_child_has_same_workspace) |
107 | 104 | event.listen(obj, 'after_update', after_insert_check_child_has_same_workspace) |
108 | ||
109 | ||
110 | 105 | |
111 | 106 | |
112 | 107 | # Events for websockets |
120 | 115 | # Update object bindings |
121 | 116 | event.listen(Host, 'after_update', update_object_event) |
122 | 117 | event.listen(Service, 'after_update', update_object_event) |
123 | # I'm Py3⏎ |
0 | 0 | # Faraday Penetration Test IDE |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | import json | |
3 | 4 | import logging |
4 | 5 | import operator |
5 | 6 | import string |
120 | 121 | cursor.close() |
121 | 122 | |
122 | 123 | @event.listens_for(rv, "begin") |
123 | def do_begin(conn): # pylint:disable=unused-variable | |
124 | def do_begin(conn): # pylint:disable=unused-variable | |
124 | 125 | # emit our own BEGIN |
125 | 126 | conn.execute("BEGIN") |
126 | 127 | return rv |
168 | 169 | query = select([BooleanToIntColumn("(count(*) = 0)")]) |
169 | 170 | query = query.select_from(text('command_object as command_object_inner')) |
170 | 171 | where_expr = " command_object_inner.create_date < command_object.create_date and " \ |
171 | " (command_object_inner.object_id = command_object.object_id and " \ | |
172 | " command_object_inner.object_type = command_object.object_type) and " \ | |
173 | " command_object_inner.workspace_id = command_object.workspace_id " | |
172 | " (command_object_inner.object_id = command_object.object_id and " \ | |
173 | " command_object_inner.object_type = command_object.object_type) and " \ | |
174 | " command_object_inner.workspace_id = command_object.workspace_id " | |
174 | 175 | query = query.where(text(where_expr)) |
175 | 176 | return column_property( |
176 | 177 | query, |
209 | 210 | # I suppose that we're using PostgreSQL, that can't compare |
210 | 211 | # booleans with integers |
211 | 212 | query = query.where(text("vulnerability.confirmed = true")) |
212 | elif confirmed == False: | |
213 | elif confirmed is False: | |
213 | 214 | if db.session.bind.dialect.name == 'sqlite': |
214 | 215 | # SQLite has no "true" expression, we have to use the integer 1 |
215 | 216 | # instead |
304 | 305 | |
305 | 306 | vuln_count = ( |
306 | 307 | select([func.count(text('vulnerability.id'))]). |
307 | select_from(text('vulnerability')). | |
308 | where(text(f'vulnerability.host_id = host.id and vulnerability.severity = \'{severity}\'')). | |
309 | as_scalar() | |
308 | select_from(text('vulnerability')). | |
309 | where(text(f'vulnerability.host_id = host.id and vulnerability.severity = \'{severity}\'')). | |
310 | as_scalar() | |
310 | 311 | ) |
311 | 312 | |
312 | 313 | vuln_web_count = ( |
313 | 314 | select([func.count(text('vulnerability.id'))]). |
314 | select_from(text('vulnerability, service')). | |
315 | where(text('(vulnerability.service_id = service.id and ' | |
316 | f'service.host_id = host.id) and vulnerability.severity = \'{severity}\'')). | |
317 | as_scalar() | |
315 | select_from(text('vulnerability, service')). | |
316 | where(text('(vulnerability.service_id = service.id and ' | |
317 | f'service.host_id = host.id) and vulnerability.severity = \'{severity}\'')). | |
318 | as_scalar() | |
318 | 319 | ) |
319 | 320 | |
320 | 321 | vulnerability_generic_count = column_property( |
368 | 369 | function = BlankColumn(Text) |
369 | 370 | module = BlankColumn(Text) |
370 | 371 | |
372 | # 1 workspace <--> N source_codes | |
373 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
371 | 374 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) |
372 | 375 | workspace = relationship('Workspace', backref='source_codes') |
373 | 376 | |
428 | 431 | |
429 | 432 | host_id = Column(Integer, ForeignKey('host.id'), index=True, nullable=False) |
430 | 433 | host = relationship('Host', backref=backref("hostnames", cascade="all, delete-orphan")) |
431 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
434 | ||
435 | # 1 workspace <--> N hostnames | |
436 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
437 | workspace_id = Column(Integer, ForeignKey('workspace.id', ondelete='CASCADE'), index=True, nullable=False) | |
432 | 438 | workspace = relationship( |
433 | 439 | 'Workspace', |
434 | backref='hostnames', | |
435 | foreign_keys=[workspace_id] | |
436 | ) | |
440 | foreign_keys=[workspace_id], | |
441 | backref=backref('hostnames', cascade="all, delete-orphan", passive_deletes=True), | |
442 | ) | |
443 | ||
437 | 444 | __table_args__ = ( |
438 | 445 | UniqueConstraint(name, host_id, workspace_id, name='uix_hostname_host_workspace'), |
439 | 446 | ) |
444 | 451 | @property |
445 | 452 | def parent(self): |
446 | 453 | return self.host |
447 | ||
448 | 454 | |
449 | 455 | |
450 | 456 | class CustomFieldsSchema(db.Model): |
566 | 572 | for new_value in self._create(value): |
567 | 573 | self.col.add(new_value) |
568 | 574 | |
575 | ||
569 | 576 | def _build_associationproxy_creator(model_class_name): |
570 | 577 | def creator(name, vulnerability): |
571 | 578 | """Get or create a reference/policyviolation with the |
658 | 665 | command = relationship('Command', backref='command_objects') |
659 | 666 | command_id = Column(Integer, ForeignKey('command.id'), index=True) |
660 | 667 | |
668 | # 1 workspace <--> N command_objects | |
669 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
661 | 670 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) |
662 | 671 | workspace = relationship( |
663 | 672 | 'Workspace', |
664 | 673 | foreign_keys=[workspace_id], |
665 | backref = backref('command_objects', cascade="all, delete-orphan") | |
674 | backref=backref('command_objects', cascade="all, delete-orphan") | |
666 | 675 | ) |
667 | 676 | |
668 | 677 | create_date = Column(DateTime, default=datetime.utcnow) |
703 | 712 | |
704 | 713 | # db.session.flush() |
705 | 714 | assert object_.id is not None, "object must have an ID. Try " \ |
706 | "flushing the session" | |
715 | "flushing the session" | |
707 | 716 | kwargs['object_id'] = object_.id |
708 | 717 | kwargs['object_type'] = object_type |
709 | 718 | return super().__init__(**kwargs) |
715 | 724 | where_conditions.append("command_object.workspace_id = command.workspace_id") |
716 | 725 | return column_property( |
717 | 726 | select([func.sum(CommandObject.created)]). |
718 | select_from(table('command_object')). | |
719 | where(text(' and '.join(where_conditions))) | |
727 | select_from(table('command_object')). | |
728 | where(text(' and '.join(where_conditions))) | |
720 | 729 | ) |
721 | 730 | |
722 | 731 | |
734 | 743 | for attr, filter_value in join_filters.items(): |
735 | 744 | where_conditions.append(f"vulnerability.{attr} = {filter_value}") |
736 | 745 | return column_property( |
737 | select([func.sum(CommandObject.created)]). \ | |
738 | select_from(table('command_object')). \ | |
739 | select_from(table('vulnerability')). \ | |
740 | where(text(' and '.join(where_conditions))) | |
746 | select([func.sum(CommandObject.created)]) | |
747 | .select_from(table('command_object')) | |
748 | .select_from(table('vulnerability')) | |
749 | .where(text(' and '.join(where_conditions))) | |
741 | 750 | ) |
742 | 751 | |
743 | 752 | |
744 | 753 | class Command(Metadata): |
745 | ||
746 | 754 | IMPORT_SOURCE = [ |
747 | 'report', # all the files the tools export and faraday imports it from the resports directory, gtk manual import or web import. | |
755 | 'report', | |
756 | # all the files the tools export and faraday imports it from the resports directory, gtk manual import or web import. | |
748 | 757 | 'shell', # command executed on the shell or webshell with hooks connected to faraday. |
749 | 758 | 'agent' |
750 | 759 | ] |
761 | 770 | user = BlankColumn(String(250)) # os username where the command was executed |
762 | 771 | import_source = Column(Enum(*IMPORT_SOURCE, name='import_source_enum')) |
763 | 772 | |
773 | # 1 workspace <--> N commands | |
774 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
764 | 775 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) |
765 | 776 | workspace = relationship( |
766 | 777 | 'Workspace', |
770 | 781 | |
771 | 782 | sum_created_vulnerabilities = _make_created_objects_sum('vulnerability') |
772 | 783 | |
773 | sum_created_vulnerabilities_web = _make_created_objects_sum_joined('vulnerability', {'type': '\'vulnerability_web\''}) | |
784 | sum_created_vulnerabilities_web = _make_created_objects_sum_joined('vulnerability', | |
785 | {'type': '\'vulnerability_web\''}) | |
774 | 786 | |
775 | 787 | sum_created_hosts = _make_created_objects_sum('host') |
776 | 788 | |
817 | 829 | cascade="all, delete-orphan" |
818 | 830 | ) |
819 | 831 | |
820 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, | |
821 | nullable=False) | |
832 | # 1 workspace <--> N hosts | |
833 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
834 | workspace_id = Column(Integer, ForeignKey('workspace.id', ondelete='CASCADE'), index=True, nullable=False) | |
822 | 835 | workspace = relationship( |
823 | 836 | 'Workspace', |
824 | 837 | foreign_keys=[workspace_id], |
825 | backref=backref("hosts", cascade="all, delete-orphan") | |
826 | ) | |
838 | backref=backref("hosts", cascade="all, delete-orphan", passive_deletes=True) | |
839 | ) | |
827 | 840 | |
828 | 841 | open_service_count = _make_generic_count_property( |
829 | 842 | 'host', 'service', where=text("service.status = 'open'")) |
831 | 844 | |
832 | 845 | __host_vulnerabilities = ( |
833 | 846 | select([func.count(text('vulnerability.id'))]). |
834 | select_from(text('vulnerability')). | |
835 | where(text('vulnerability.host_id = host.id')). | |
836 | as_scalar() | |
847 | select_from(text('vulnerability')). | |
848 | where(text('vulnerability.host_id = host.id')). | |
849 | as_scalar() | |
837 | 850 | ) |
838 | 851 | __service_vulnerabilities = ( |
839 | 852 | select([func.count(text('vulnerability.id'))]). |
840 | select_from(text('vulnerability, service')). | |
841 | where(text('vulnerability.service_id = service.id and ' | |
842 | 'service.host_id = host.id')). | |
843 | as_scalar() | |
853 | select_from(text('vulnerability, service')). | |
854 | where(text('vulnerability.service_id = service.id and ' | |
855 | 'service.host_id = host.id')). | |
856 | as_scalar() | |
844 | 857 | ) |
845 | 858 | vulnerability_count = column_property( |
846 | 859 | # select(text('count(*)')).select_from(__host_vulnerabilities.subquery()), |
880 | 893 | cls.vulnerability_informational_count, |
881 | 894 | _make_vuln_count_property( |
882 | 895 | type_=None, |
883 | confirmed = confirmed, | |
884 | use_column_property = False, | |
885 | extra_query = "vulnerability.severity='informational'", | |
886 | get_hosts_vulns = True | |
896 | confirmed=confirmed, | |
897 | use_column_property=False, | |
898 | extra_query="vulnerability.severity='informational'", | |
899 | get_hosts_vulns=True | |
887 | 900 | ) |
888 | 901 | ), |
889 | 902 | with_expression( |
890 | 903 | cls.vulnerability_medium_count, |
891 | 904 | _make_vuln_count_property( |
892 | type_ = None, | |
893 | confirmed = confirmed, | |
894 | use_column_property = False, | |
895 | extra_query = "vulnerability.severity='medium'", | |
896 | get_hosts_vulns = True | |
905 | type_=None, | |
906 | confirmed=confirmed, | |
907 | use_column_property=False, | |
908 | extra_query="vulnerability.severity='medium'", | |
909 | get_hosts_vulns=True | |
897 | 910 | ) |
898 | 911 | ), |
899 | 912 | with_expression( |
900 | 913 | cls.vulnerability_high_count, |
901 | 914 | _make_vuln_count_property( |
902 | type_ = None, | |
903 | confirmed = confirmed, | |
904 | use_column_property = False, | |
905 | extra_query = "vulnerability.severity='high'", | |
906 | get_hosts_vulns = True | |
915 | type_=None, | |
916 | confirmed=confirmed, | |
917 | use_column_property=False, | |
918 | extra_query="vulnerability.severity='high'", | |
919 | get_hosts_vulns=True | |
907 | 920 | ) |
908 | 921 | ), |
909 | 922 | with_expression( |
910 | 923 | cls.vulnerability_critical_count, |
911 | 924 | _make_vuln_count_property( |
912 | type_ = None, | |
913 | confirmed = confirmed, | |
914 | use_column_property = False, | |
915 | extra_query = "vulnerability.severity='critical'", | |
916 | get_hosts_vulns = True | |
925 | type_=None, | |
926 | confirmed=confirmed, | |
927 | use_column_property=False, | |
928 | extra_query="vulnerability.severity='critical'", | |
929 | get_hosts_vulns=True | |
917 | 930 | ) |
918 | 931 | ), |
919 | 932 | with_expression( |
920 | 933 | cls.vulnerability_low_count, |
921 | 934 | _make_vuln_count_property( |
922 | type_ = None, | |
923 | confirmed = confirmed, | |
924 | use_column_property = False, | |
925 | extra_query = "vulnerability.severity='low'", | |
926 | get_hosts_vulns = True | |
935 | type_=None, | |
936 | confirmed=confirmed, | |
937 | use_column_property=False, | |
938 | extra_query="vulnerability.severity='low'", | |
939 | get_hosts_vulns=True | |
927 | 940 | ) |
928 | 941 | ), |
929 | 942 | with_expression( |
930 | 943 | cls.vulnerability_unclassified_count, |
931 | 944 | _make_vuln_count_property( |
932 | type_ = None, | |
933 | confirmed = confirmed, | |
934 | use_column_property = False, | |
935 | extra_query = "vulnerability.severity='unclassified'", | |
936 | get_hosts_vulns = True | |
945 | type_=None, | |
946 | confirmed=confirmed, | |
947 | use_column_property=False, | |
948 | extra_query="vulnerability.severity='unclassified'", | |
949 | get_hosts_vulns=True | |
937 | 950 | ) |
938 | 951 | ), |
939 | 952 | with_expression( |
940 | 953 | cls.vulnerability_total_count, |
941 | 954 | _make_vuln_count_property( |
942 | type_ = None, | |
943 | confirmed = confirmed, | |
944 | use_column_property = False, | |
945 | get_hosts_vulns = True | |
955 | type_=None, | |
956 | confirmed=confirmed, | |
957 | use_column_property=False, | |
958 | get_hosts_vulns=True | |
946 | 959 | ) |
947 | 960 | ), |
948 | 961 | ) |
989 | 1002 | foreign_keys=[host_id], |
990 | 1003 | ) |
991 | 1004 | |
992 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
1005 | # 1 workspace <--> N services | |
1006 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
1007 | workspace_id = Column(Integer, ForeignKey('workspace.id', ondelete='CASCADE'), index=True, nullable=False) | |
993 | 1008 | workspace = relationship( |
994 | 1009 | 'Workspace', |
995 | backref=backref('services', cascade="all, delete-orphan"), | |
996 | foreign_keys=[workspace_id] | |
1010 | foreign_keys=[workspace_id], | |
1011 | backref=backref('services', cascade="all, delete-orphan", passive_deletes=True), | |
997 | 1012 | ) |
998 | 1013 | |
999 | 1014 | vulnerability_count = _make_generic_count_property('service', |
1049 | 1064 | website = BlankColumn(Text) |
1050 | 1065 | status_code = Column(Integer, nullable=True) |
1051 | 1066 | |
1052 | ||
1053 | 1067 | vulnerability_duplicate_id = Column( |
1054 | Integer, | |
1055 | ForeignKey('vulnerability.id'), | |
1056 | index=True, | |
1057 | nullable=True, | |
1058 | ) | |
1068 | Integer, | |
1069 | ForeignKey('vulnerability.id'), | |
1070 | index=True, | |
1071 | nullable=True, | |
1072 | ) | |
1059 | 1073 | duplicate_childs = relationship("VulnerabilityGeneric", cascade="all, delete-orphan", |
1060 | backref=backref('vulnerability_duplicate', remote_side=[id]) | |
1061 | ) | |
1074 | backref=backref('vulnerability_duplicate', remote_side=[id]) | |
1075 | ) | |
1062 | 1076 | |
1063 | 1077 | vulnerability_template_id = Column( |
1064 | Integer, | |
1065 | ForeignKey('vulnerability_template.id'), | |
1066 | index=True, | |
1067 | nullable=True, | |
1068 | ) | |
1069 | ||
1070 | vulnerability_template = relationship('VulnerabilityTemplate', backref=backref('duplicate_vulnerabilities', passive_deletes='all')) | |
1071 | ||
1072 | workspace_id = Column( | |
1073 | Integer, | |
1074 | ForeignKey('workspace.id'), | |
1075 | index=True, | |
1076 | nullable=False, | |
1077 | ) | |
1078 | workspace = relationship('Workspace', backref='vulnerabilities') | |
1078 | Integer, | |
1079 | ForeignKey('vulnerability_template.id'), | |
1080 | index=True, | |
1081 | nullable=True, | |
1082 | ) | |
1083 | ||
1084 | vulnerability_template = relationship('VulnerabilityTemplate', | |
1085 | backref=backref('duplicate_vulnerabilities', passive_deletes='all')) | |
1086 | ||
1087 | # 1 workspace <--> N vulnerabilites | |
1088 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
1089 | workspace_id = Column(Integer, ForeignKey('workspace.id', ondelete='CASCADE'), index=True, nullable=False) | |
1090 | workspace = relationship( | |
1091 | 'Workspace', | |
1092 | backref=backref('vulnerabilities', cascade="all, delete-orphan", passive_deletes=True) | |
1093 | ) | |
1079 | 1094 | |
1080 | 1095 | reference_instances = relationship( |
1081 | 1096 | "Reference", |
1119 | 1134 | |
1120 | 1135 | creator_command_id = column_property( |
1121 | 1136 | select([CommandObject.command_id]) |
1122 | .where(CommandObject.object_type == 'vulnerability') | |
1123 | .where(text('command_object.object_id = vulnerability.id')) | |
1124 | .where(CommandObject.workspace_id == workspace_id) | |
1125 | .order_by(asc(CommandObject.create_date)) | |
1126 | .limit(1), | |
1137 | .where(CommandObject.object_type == 'vulnerability') | |
1138 | .where(text('command_object.object_id = vulnerability.id')) | |
1139 | .where(CommandObject.workspace_id == workspace_id) | |
1140 | .order_by(asc(CommandObject.create_date)) | |
1141 | .limit(1), | |
1127 | 1142 | deferred=True) |
1128 | 1143 | |
1129 | 1144 | creator_command_tool = column_property( |
1130 | 1145 | select([Command.tool]) |
1131 | .select_from(join(Command, CommandObject, | |
1132 | Command.id == CommandObject.command_id)) | |
1133 | .where(CommandObject.object_type == 'vulnerability') | |
1134 | .where(text('command_object.object_id = vulnerability.id')) | |
1135 | .where(CommandObject.workspace_id == workspace_id) | |
1136 | .order_by(asc(CommandObject.create_date)) | |
1137 | .limit(1), | |
1146 | .select_from(join(Command, CommandObject, | |
1147 | Command.id == CommandObject.command_id)) | |
1148 | .where(CommandObject.object_type == 'vulnerability') | |
1149 | .where(text('command_object.object_id = vulnerability.id')) | |
1150 | .where(CommandObject.workspace_id == workspace_id) | |
1151 | .order_by(asc(CommandObject.create_date)) | |
1152 | .limit(1), | |
1138 | 1153 | deferred=True |
1139 | 1154 | ) |
1140 | 1155 | |
1141 | 1156 | _host_ip_query = ( |
1142 | 1157 | select([Host.ip]) |
1143 | .where(text('vulnerability.host_id = host.id')) | |
1158 | .where(text('vulnerability.host_id = host.id')) | |
1144 | 1159 | ) |
1145 | 1160 | _service_ip_query = ( |
1146 | 1161 | select([text('host_inner.ip')]) |
1147 | .select_from(text('host as host_inner, service')) | |
1148 | .where(text('vulnerability.service_id = service.id and ' | |
1149 | 'host_inner.id = service.host_id')) | |
1162 | .select_from(text('host as host_inner, service')) | |
1163 | .where(text('vulnerability.service_id = service.id and ' | |
1164 | 'host_inner.id = service.host_id')) | |
1150 | 1165 | ) |
1151 | 1166 | target_host_ip = column_property( |
1152 | 1167 | case([ |
1153 | 1168 | (text('vulnerability.host_id IS NOT null'), |
1154 | _host_ip_query.as_scalar()), | |
1169 | _host_ip_query.as_scalar()), | |
1155 | 1170 | (text('vulnerability.service_id IS NOT null'), |
1156 | _service_ip_query.as_scalar()) | |
1171 | _service_ip_query.as_scalar()) | |
1157 | 1172 | ]), |
1158 | 1173 | deferred=True |
1159 | 1174 | ) |
1160 | 1175 | |
1161 | 1176 | _host_os_query = ( |
1162 | 1177 | select([Host.os]) |
1163 | .where(text('vulnerability.host_id = host.id')) | |
1178 | .where(text('vulnerability.host_id = host.id')) | |
1164 | 1179 | ) |
1165 | 1180 | _service_os_query = ( |
1166 | 1181 | select([text('host_inner.os')]) |
1167 | .select_from(text('host as host_inner, service')) | |
1168 | .where(text('vulnerability.service_id = service.id and ' | |
1169 | 'host_inner.id = service.host_id')) | |
1182 | .select_from(text('host as host_inner, service')) | |
1183 | .where(text('vulnerability.service_id = service.id and ' | |
1184 | 'host_inner.id = service.host_id')) | |
1170 | 1185 | ) |
1171 | 1186 | |
1172 | 1187 | host_id = Column(Integer, ForeignKey(Host.id), index=True) |
1179 | 1194 | target_host_os = column_property( |
1180 | 1195 | case([ |
1181 | 1196 | (text('vulnerability.host_id IS NOT null'), |
1182 | _host_os_query.as_scalar()), | |
1197 | _host_os_query.as_scalar()), | |
1183 | 1198 | (text('vulnerability.service_id IS NOT null'), |
1184 | _service_os_query.as_scalar()) | |
1199 | _service_os_query.as_scalar()) | |
1185 | 1200 | ]), |
1186 | 1201 | deferred=True |
1187 | 1202 | ) |
1203 | 1218 | |
1204 | 1219 | @property |
1205 | 1220 | def has_duplicate(self): |
1206 | return self.vulnerability_duplicate_id == None | |
1221 | return self.vulnerability_duplicate_id is None | |
1207 | 1222 | |
1208 | 1223 | @property |
1209 | 1224 | def hostnames(self): |
1229 | 1244 | @declared_attr |
1230 | 1245 | def service(cls): |
1231 | 1246 | return relationship('Service', backref=backref("vulnerabilities", cascade="all, delete-orphan")) |
1232 | ||
1233 | 1247 | |
1234 | 1248 | @property |
1235 | 1249 | def parent(self): |
1251 | 1265 | kwargs['response'] = ''.join([x for x in kwargs['response'] if x in string.printable]) |
1252 | 1266 | super().__init__(*args, **kwargs) |
1253 | 1267 | |
1254 | ||
1255 | 1268 | @declared_attr |
1256 | 1269 | def service_id(cls): |
1257 | 1270 | return VulnerabilityGeneric.__table__.c.get( |
1319 | 1332 | id = Column(Integer, primary_key=True) |
1320 | 1333 | name = NonBlankColumn(Text) |
1321 | 1334 | |
1322 | workspace_id = Column( | |
1323 | Integer, | |
1324 | ForeignKey('workspace.id'), | |
1325 | index=True, | |
1326 | nullable=False | |
1327 | ) | |
1335 | # 1 workspace <--> N references | |
1336 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
1337 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
1328 | 1338 | workspace = relationship( |
1329 | 1339 | 'Workspace', |
1330 | backref=backref("references", | |
1331 | cascade="all, delete-orphan"), | |
1332 | 1340 | foreign_keys=[workspace_id], |
1341 | backref=backref("references", cascade="all, delete-orphan"), | |
1333 | 1342 | ) |
1334 | 1343 | |
1335 | 1344 | __table_args__ = ( |
1346 | 1355 | |
1347 | 1356 | |
1348 | 1357 | class ReferenceVulnerabilityAssociation(db.Model): |
1349 | ||
1350 | 1358 | __tablename__ = 'reference_vulnerability_association' |
1351 | 1359 | |
1352 | 1360 | vulnerability_id = Column(Integer, ForeignKey('vulnerability.id'), primary_key=True) |
1364 | 1372 | |
1365 | 1373 | |
1366 | 1374 | class PolicyViolationVulnerabilityAssociation(db.Model): |
1367 | ||
1368 | 1375 | __tablename__ = 'policy_violation_vulnerability_association' |
1369 | 1376 | |
1370 | 1377 | vulnerability_id = Column(Integer, ForeignKey('vulnerability.id'), primary_key=True) |
1371 | 1378 | policy_violation_id = Column(Integer, ForeignKey('policy_violation.id'), primary_key=True) |
1372 | 1379 | |
1373 | policy_violation = relationship("PolicyViolation", backref="policy_violation_associations", foreign_keys=[policy_violation_id]) | |
1374 | vulnerability = relationship("Vulnerability", backref=backref("policy_violationvulnerability_associations", cascade="all, delete-orphan"), | |
1375 | foreign_keys=[vulnerability_id]) | |
1380 | policy_violation = relationship("PolicyViolation", backref=backref("policy_violation_associations", cascade="all, delete-orphan"), foreign_keys=[policy_violation_id]) | |
1381 | vulnerability = relationship("Vulnerability", backref=backref("policy_violation_vulnerability_associations", cascade="all, delete-orphan"), foreign_keys=[vulnerability_id]) | |
1376 | 1382 | |
1377 | 1383 | |
1378 | 1384 | class ReferenceTemplateVulnerabilityAssociation(db.Model): |
1379 | ||
1380 | 1385 | __tablename__ = 'reference_template_vulnerability_association' |
1381 | 1386 | |
1382 | 1387 | vulnerability_id = Column(Integer, ForeignKey('vulnerability_template.id'), primary_key=True) |
1389 | 1394 | ) |
1390 | 1395 | vulnerability = relationship( |
1391 | 1396 | "VulnerabilityTemplate", |
1392 | backref=backref('reference_template_vulnerability_associations', | |
1393 | cascade="all, delete-orphan"), | |
1394 | foreign_keys=[vulnerability_id] | |
1397 | foreign_keys=[vulnerability_id], | |
1398 | backref=backref('reference_template_vulnerability_associations', cascade="all, delete-orphan") | |
1395 | 1399 | ) |
1396 | 1400 | |
1397 | 1401 | |
1398 | 1402 | class PolicyViolationTemplateVulnerabilityAssociation(db.Model): |
1399 | ||
1400 | 1403 | __tablename__ = 'policy_violation_template_vulnerability_association' |
1401 | 1404 | |
1402 | 1405 | vulnerability_id = Column(Integer, ForeignKey('vulnerability_template.id'), primary_key=True) |
1403 | 1406 | policy_violation_id = Column(Integer, ForeignKey('policy_violation_template.id'), primary_key=True) |
1404 | 1407 | |
1405 | policy_violation = relationship("PolicyViolationTemplate", backref="policy_violation_template_associations", foreign_keys=[policy_violation_id]) | |
1406 | vulnerability = relationship("VulnerabilityTemplate", backref=backref("policy_violation_template_vulnerability_associations", cascade="all, delete-orphan"), | |
1407 | foreign_keys=[vulnerability_id]) | |
1408 | policy_violation = relationship("PolicyViolationTemplate", backref=backref("policy_violation_template_associations", cascade="all, delete-orphan"), foreign_keys=[policy_violation_id]) | |
1409 | vulnerability = relationship("VulnerabilityTemplate", backref=backref("policy_violation_template_vulnerability_associations", cascade="all, delete-orphan"), foreign_keys=[vulnerability_id]) | |
1408 | 1410 | |
1409 | 1411 | |
1410 | 1412 | class PolicyViolationTemplate(Metadata): |
1414 | 1416 | |
1415 | 1417 | __table_args__ = ( |
1416 | 1418 | UniqueConstraint( |
1417 | 'name', | |
1418 | name='uix_policy_violation_template_name'), | |
1419 | 'name', | |
1420 | name='uix_policy_violation_template_name'), | |
1419 | 1421 | ) |
1420 | 1422 | |
1421 | 1423 | def __init__(self, name=None, **kwargs): |
1428 | 1430 | name = NonBlankColumn(Text) |
1429 | 1431 | |
1430 | 1432 | workspace_id = Column( |
1431 | Integer, | |
1432 | ForeignKey('workspace.id'), | |
1433 | index=True, | |
1434 | nullable=False | |
1435 | ) | |
1433 | Integer, | |
1434 | ForeignKey('workspace.id'), | |
1435 | index=True, | |
1436 | nullable=False | |
1437 | ) | |
1436 | 1438 | workspace = relationship( |
1437 | 'Workspace', | |
1438 | backref=backref("policy_violations", | |
1439 | cascade="all, delete-orphan"), | |
1440 | foreign_keys=[workspace_id], | |
1441 | ) | |
1439 | 'Workspace', | |
1440 | backref=backref("policy_violations", | |
1441 | cascade="all, delete-orphan"), | |
1442 | foreign_keys=[workspace_id], | |
1443 | ) | |
1442 | 1444 | |
1443 | 1445 | __table_args__ = ( |
1444 | 1446 | UniqueConstraint( |
1445 | 'name', | |
1446 | 'workspace_id', | |
1447 | name='uix_policy_violation_template_name_vulnerability_workspace'), | |
1447 | 'name', | |
1448 | 'workspace_id', | |
1449 | name='uix_policy_violation_template_name_vulnerability_workspace'), | |
1448 | 1450 | ) |
1449 | 1451 | |
1450 | 1452 | def __init__(self, name=None, workspace_id=None, **kwargs): |
1475 | 1477 | 'Service', |
1476 | 1478 | backref=backref('credentials', cascade="all, delete-orphan"), |
1477 | 1479 | foreign_keys=[service_id], |
1478 | ) | |
1479 | ||
1480 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
1480 | ) | |
1481 | ||
1482 | # 1 workspace <--> N credentials | |
1483 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
1484 | workspace_id = Column(Integer, ForeignKey('workspace.id', ondelete='CASCADE'), index=True, nullable=False) | |
1481 | 1485 | workspace = relationship( |
1482 | 1486 | 'Workspace', |
1483 | backref=backref('credentials', cascade="all, delete-orphan"), | |
1484 | 1487 | foreign_keys=[workspace_id], |
1488 | backref=backref('credentials', cascade="all, delete-orphan", passive_deletes=True), | |
1485 | 1489 | ) |
1486 | 1490 | |
1487 | 1491 | _host_ip_query = ( |
1488 | 1492 | select([Host.ip]) |
1489 | .where(text('credential.host_id = host.id')) | |
1493 | .where(text('credential.host_id = host.id')) | |
1490 | 1494 | ) |
1491 | 1495 | |
1492 | 1496 | _service_ip_query = ( |
1493 | 1497 | select([text('host_inner.ip || \'/\' || service.name')]) |
1494 | .select_from(text('host as host_inner, service')) | |
1495 | .where(text('credential.service_id = service.id and ' | |
1496 | 'host_inner.id = service.host_id')) | |
1498 | .select_from(text('host as host_inner, service')) | |
1499 | .where(text('credential.service_id = service.id and ' | |
1500 | 'host_inner.id = service.host_id')) | |
1497 | 1501 | ) |
1498 | 1502 | |
1499 | 1503 | target_ip = column_property( |
1500 | 1504 | case([ |
1501 | 1505 | (text('credential.host_id IS NOT null'), |
1502 | _host_ip_query.as_scalar()), | |
1506 | _host_ip_query.as_scalar()), | |
1503 | 1507 | (text('credential.service_id IS NOT null'), |
1504 | _service_ip_query.as_scalar()) | |
1508 | _service_ip_query.as_scalar()) | |
1505 | 1509 | ]), |
1506 | 1510 | deferred=True |
1507 | 1511 | ) |
1508 | ||
1509 | 1512 | |
1510 | 1513 | __table_args__ = ( |
1511 | 1514 | CheckConstraint('(host_id IS NULL AND service_id IS NOT NULL) OR ' |
1512 | 1515 | '(host_id IS NOT NULL AND service_id IS NULL)', |
1513 | 1516 | name='check_credential_host_service'), |
1514 | 1517 | UniqueConstraint( |
1515 | 'username', | |
1516 | 'host_id', | |
1517 | 'service_id', | |
1518 | 'workspace_id', | |
1519 | name='uix_credential_username_host_service_workspace' | |
1520 | ), | |
1518 | 'username', | |
1519 | 'host_id', | |
1520 | 'service_id', | |
1521 | 'workspace_id', | |
1522 | name='uix_credential_username_host_service_workspace' | |
1523 | ), | |
1521 | 1524 | ) |
1522 | 1525 | |
1523 | 1526 | @property |
1525 | 1528 | return self.host or self.service |
1526 | 1529 | |
1527 | 1530 | |
1528 | ||
1529 | 1531 | association_workspace_and_agents_table = Table( |
1530 | 'association_workspace_and_agents_table', | |
1531 | db.Model.metadata, | |
1532 | Column('workspace_id', Integer, ForeignKey('workspace.id')), | |
1533 | Column('agent_id', Integer, ForeignKey('agent.id')) | |
1534 | ) | |
1532 | 'association_workspace_and_agents_table', | |
1533 | db.Model.metadata, | |
1534 | Column('workspace_id', Integer, ForeignKey('workspace.id')), | |
1535 | Column('agent_id', Integer, ForeignKey('agent.id')) | |
1536 | ) | |
1535 | 1537 | |
1536 | 1538 | |
1537 | 1539 | class Workspace(Metadata): |
1710 | 1712 | name = NonBlankColumn(Text) |
1711 | 1713 | |
1712 | 1714 | workspace_id = Column( |
1713 | Integer, | |
1714 | ForeignKey('workspace.id'), | |
1715 | index=True, | |
1716 | nullable=False | |
1717 | ) | |
1715 | Integer, | |
1716 | ForeignKey('workspace.id'), | |
1717 | index=True, | |
1718 | nullable=False | |
1719 | ) | |
1718 | 1720 | |
1719 | 1721 | workspace = relationship( |
1720 | 1722 | 'Workspace', |
1721 | backref=backref('scope', cascade="all, delete-orphan"), | |
1722 | foreign_keys=[workspace_id], | |
1723 | ) | |
1723 | backref=backref('scope', cascade="all, delete-orphan"), | |
1724 | foreign_keys=[workspace_id], | |
1725 | ) | |
1724 | 1726 | |
1725 | 1727 | __table_args__ = ( |
1726 | 1728 | UniqueConstraint('name', 'workspace_id', |
1753 | 1755 | |
1754 | 1756 | |
1755 | 1757 | class User(db.Model, UserMixin): |
1756 | ||
1757 | 1758 | __tablename__ = 'faraday_user' |
1758 | 1759 | ROLES = ['admin', 'pentester', 'client', 'asset_owner'] |
1759 | 1760 | OTP_STATES = ["disabled", "requested", "confirmed"] |
1774 | 1775 | role = Column(Enum(*ROLES, name='user_roles'), |
1775 | 1776 | nullable=False, default='client') |
1776 | 1777 | _otp_secret = Column( |
1777 | String(32), | |
1778 | name="otp_secret", nullable=True) | |
1778 | String(32), | |
1779 | name="otp_secret", nullable=True | |
1780 | ) | |
1779 | 1781 | state_otp = Column(Enum(*OTP_STATES, name='user_otp_states'), nullable=False, default="disabled") |
1780 | 1782 | preferences = Column(JSONType, nullable=True, default={}) |
1783 | fs_uniquifier = Column(String(64), unique=True, nullable=False) # flask-security | |
1781 | 1784 | |
1782 | 1785 | # TODO: add many to many relationship to add permission to workspace |
1783 | 1786 | |
1849 | 1852 | backref=backref('methodologies') |
1850 | 1853 | ) |
1851 | 1854 | template_id = Column( |
1852 | Integer, | |
1853 | ForeignKey('methodology_template.id', | |
1854 | ondelete="SET NULL"), | |
1855 | index=True, | |
1856 | nullable=True, | |
1857 | ) | |
1858 | ||
1855 | Integer, | |
1856 | ForeignKey('methodology_template.id', | |
1857 | ondelete="SET NULL"), | |
1858 | index=True, | |
1859 | nullable=True, | |
1860 | ) | |
1861 | ||
1862 | # 1 workspace <--> N methodologies | |
1863 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
1864 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
1859 | 1865 | workspace = relationship( |
1860 | 1866 | 'Workspace', |
1861 | 1867 | backref=backref('methodologies', cascade="all, delete-orphan"), |
1862 | 1868 | ) |
1863 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
1864 | 1869 | |
1865 | 1870 | @property |
1866 | 1871 | def parent(self): |
1886 | 1891 | 'MethodologyTemplate', |
1887 | 1892 | backref=backref('tasks', cascade="all, delete-orphan")) |
1888 | 1893 | template_id = Column( |
1889 | Integer, | |
1890 | ForeignKey('methodology_template.id'), | |
1891 | index=True, | |
1892 | nullable=False, | |
1893 | ) | |
1894 | Integer, | |
1895 | ForeignKey('methodology_template.id'), | |
1896 | index=True, | |
1897 | nullable=False, | |
1898 | ) | |
1894 | 1899 | |
1895 | 1900 | # __table_args__ = ( |
1896 | 1901 | # UniqueConstraint(template_id, name='uix_task_template_name_desc_template_delete'), |
1934 | 1939 | secondary="task_assigned_to_association") |
1935 | 1940 | |
1936 | 1941 | methodology_id = Column( |
1937 | Integer, | |
1938 | ForeignKey('methodology.id'), | |
1939 | index=True, | |
1940 | nullable=False, | |
1941 | ) | |
1942 | Integer, | |
1943 | ForeignKey('methodology.id'), | |
1944 | index=True, | |
1945 | nullable=False, | |
1946 | ) | |
1942 | 1947 | methodology = relationship( |
1943 | 1948 | 'Methodology', |
1944 | 1949 | backref=backref('tasks', cascade="all, delete-orphan") |
1945 | 1950 | ) |
1946 | 1951 | |
1947 | 1952 | template_id = Column( |
1948 | Integer, | |
1949 | ForeignKey('task_template.id'), | |
1950 | index=True, | |
1951 | nullable=True, | |
1952 | ) | |
1953 | Integer, | |
1954 | ForeignKey('task_template.id'), | |
1955 | index=True, | |
1956 | nullable=True, | |
1957 | ) | |
1953 | 1958 | template = relationship('TaskTemplate', backref='tasks') |
1954 | 1959 | |
1960 | # 1 workspace <--> N tasks | |
1961 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
1962 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
1955 | 1963 | workspace = relationship( |
1956 | 1964 | 'Workspace', |
1957 | 1965 | backref=backref('tasks', cascade="all, delete-orphan") |
1958 | 1966 | ) |
1959 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
1960 | 1967 | |
1961 | 1968 | # __table_args__ = ( |
1962 | 1969 | # UniqueConstraint(TaskABC.name, methodology_id, workspace_id, name='uix_task_name_desc_methodology_workspace'), |
2014 | 2021 | foreign_keys=[reply_to_id] |
2015 | 2022 | ) |
2016 | 2023 | |
2017 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, | |
2018 | nullable=False) | |
2024 | # 1 workspace <--> N comments | |
2025 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
2026 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) | |
2019 | 2027 | workspace = relationship( |
2020 | 2028 | 'Workspace', |
2021 | 2029 | foreign_keys=[workspace_id], |
2071 | 2079 | collection_class=set, |
2072 | 2080 | ) |
2073 | 2081 | filter = Column(JSONType, nullable=True, default=[]) |
2082 | ||
2074 | 2083 | @property |
2075 | 2084 | def parent(self): |
2076 | 2085 | return |
2084 | 2093 | |
2085 | 2094 | |
2086 | 2095 | class Notification(db.Model): |
2087 | ||
2088 | 2096 | __tablename__ = 'notification' |
2089 | 2097 | id = Column(Integer, primary_key=True) |
2090 | 2098 | |
2092 | 2100 | user_notified = relationship( |
2093 | 2101 | 'User', |
2094 | 2102 | backref=backref('notification', cascade="all, delete-orphan"), |
2095 | #primaryjoin="User.id == Notification.user_notified_id" | |
2103 | # primaryjoin="User.id == Notification.user_notified_id" | |
2096 | 2104 | ) |
2097 | 2105 | |
2098 | 2106 | object_id = Column(Integer, nullable=False) |
2103 | 2111 | workspace = relationship( |
2104 | 2112 | 'Workspace', |
2105 | 2113 | backref=backref('notification', cascade="all, delete-orphan"), |
2106 | #primaryjoin="Notification.id == Notification.workspace_id" | |
2114 | # primaryjoin="Notification.id == Notification.workspace_id" | |
2107 | 2115 | ) |
2108 | 2116 | |
2109 | 2117 | mark_read = Column(Boolean, default=False, index=True) |
2118 | 2126 | __tablename__ = 'knowledge_base' |
2119 | 2127 | id = Column(Integer, primary_key=True) |
2120 | 2128 | |
2121 | vulnerability_template_id = Column( | |
2122 | Integer, | |
2123 | ForeignKey('vulnerability_template.id'), | |
2124 | index=True, | |
2125 | nullable=True, | |
2126 | ) | |
2129 | vulnerability_template_id = Column( | |
2130 | Integer, | |
2131 | ForeignKey('vulnerability_template.id'), | |
2132 | index=True, | |
2133 | nullable=True, | |
2134 | ) | |
2127 | 2135 | vulnerability_template = relationship('VulnerabilityTemplate', |
2128 | backref=backref('knowledge', cascade="all, delete-orphan"), | |
2129 | ) | |
2136 | backref=backref('knowledge', cascade="all, delete-orphan"), | |
2137 | ) | |
2130 | 2138 | |
2131 | 2139 | faraday_kb_id = Column(Text, nullable=False) |
2132 | 2140 | reference_id = Column(Integer, nullable=False) |
2136 | 2144 | false_positive = Column(Integer, nullable=False, default=0) |
2137 | 2145 | verified = Column(Integer, nullable=False, default=0) |
2138 | 2146 | |
2139 | __table_args__ = (UniqueConstraint('external_identifier', 'tool_name', 'reference_id', name='uix_externalidentifier_toolname_referenceid'),) | |
2147 | __table_args__ = (UniqueConstraint('external_identifier', 'tool_name', 'reference_id', | |
2148 | name='uix_externalidentifier_toolname_referenceid'),) | |
2149 | ||
2150 | ||
2151 | def rule_default_name(context): | |
2152 | model = context.get_current_parameters()['model'] | |
2153 | create_date = context.get_current_parameters()['create_date'] | |
2154 | return f'Rule for model {model} @ {create_date.isoformat()}' | |
2140 | 2155 | |
2141 | 2156 | |
2142 | 2157 | class Rule(Metadata): |
2143 | 2158 | __tablename__ = 'rule' |
2144 | 2159 | id = Column(Integer, primary_key=True) |
2160 | description = Column(String, nullable=False, default="") | |
2145 | 2161 | model = Column(String, nullable=False) |
2146 | 2162 | object_parent = Column(String, nullable=True) |
2147 | 2163 | fields = Column(JSONType, nullable=True) |
2148 | object = Column(JSONType, nullable=False) | |
2149 | 2164 | enabled = Column(Boolean, nullable=False, default=True) |
2150 | actions = relationship("Action", secondary="rule_action", backref=backref("rules")) | |
2165 | actions = relationship("Action", secondary="rule_action", backref=backref("rules"), lazy='subquery') | |
2166 | # 1 workspace <--> N rules | |
2167 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
2151 | 2168 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) |
2152 | workspace = relationship('Workspace', backref=backref('rules', cascade="all, delete-orphan")) | |
2169 | workspace = relationship( | |
2170 | 'Workspace', | |
2171 | backref=backref('rules', cascade="all, delete-orphan") | |
2172 | ) | |
2173 | conditions = relationship("Condition", back_populates="rule", | |
2174 | cascade="all, delete-orphan", passive_deletes=True, lazy='subquery') | |
2175 | name = Column(String, nullable=False, unique=True, default=rule_default_name) | |
2153 | 2176 | |
2154 | 2177 | @property |
2155 | 2178 | def parent(self): |
2156 | 2179 | return |
2180 | ||
2181 | @property | |
2182 | def object(self): | |
2183 | # TODO THIS MUST BE DELETED AND REIMPLEMENTED FOR NEWW METHODS | |
2184 | return json.dumps( | |
2185 | [{condition.field: condition.value} for condition in self.conditions] | |
2186 | ) | |
2157 | 2187 | |
2158 | 2188 | @property |
2159 | 2189 | def disabled(self): |
2168 | 2198 | __tablename__ = 'action' |
2169 | 2199 | id = Column(Integer, primary_key=True) |
2170 | 2200 | name = Column(String, nullable=True) |
2201 | description = Column(String, nullable=False, default='') | |
2171 | 2202 | command = Column(String, nullable=False) |
2172 | 2203 | field = Column(String, nullable=True) |
2173 | 2204 | value = Column(String, nullable=True) |
2201 | 2232 | active = Column(Boolean, nullable=False, default=True) |
2202 | 2233 | last_run = Column(DateTime) |
2203 | 2234 | |
2235 | # 1 workspace <--> N schedules | |
2236 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
2204 | 2237 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) |
2205 | 2238 | workspace = relationship( |
2206 | 2239 | 'Workspace', |
2229 | 2262 | rule_id = Column(Integer, ForeignKey('rule.id'), index=True, nullable=False) |
2230 | 2263 | rule = relationship('Rule', foreign_keys=[rule_id], backref=backref('rule_actions', cascade="all, delete-orphan")) |
2231 | 2264 | action_id = Column(Integer, ForeignKey('action.id'), index=True, nullable=False) |
2232 | action = relationship('Action', foreign_keys=[action_id], backref=backref('rule_actions', cascade="all, delete-orphan")) | |
2265 | action = relationship('Action', foreign_keys=[action_id], | |
2266 | backref=backref('rule_actions', cascade="all, delete-orphan")) | |
2267 | ||
2268 | __table_args__ = (UniqueConstraint('rule_id', 'action_id', name='rule_action_uc'),) | |
2233 | 2269 | |
2234 | 2270 | |
2235 | 2271 | class Agent(Metadata): |
2236 | 2272 | __tablename__ = 'agent' |
2237 | 2273 | id = Column(Integer, primary_key=True) |
2238 | 2274 | token = Column(Text, unique=True, nullable=False, default=lambda: |
2239 | "".join([SystemRandom().choice(string.ascii_letters + string.digits) | |
2240 | for _ in range(64)])) | |
2275 | "".join([SystemRandom().choice(string.ascii_letters + string.digits) | |
2276 | for _ in range(64)])) | |
2241 | 2277 | workspaces = relationship( |
2242 | 2278 | 'Workspace', |
2243 | 2279 | secondary=association_workspace_and_agents_table, |
2252 | 2288 | |
2253 | 2289 | @property |
2254 | 2290 | def is_online(self): |
2255 | from faraday.server.websocket_factories import connected_agents # pylint:disable=import-outside-toplevel | |
2291 | from faraday.server.websocket_factories import connected_agents # pylint:disable=import-outside-toplevel | |
2256 | 2292 | return self.id in connected_agents |
2257 | 2293 | |
2258 | 2294 | @property |
2285 | 2321 | message = Column(String, nullable=True) |
2286 | 2322 | executor_id = Column(Integer, ForeignKey('executor.id'), index=True, nullable=False) |
2287 | 2323 | executor = relationship('Executor', foreign_keys=[executor_id], backref=backref('executions', cascade="all, delete-orphan")) |
2324 | # 1 workspace <--> N agent_executions | |
2325 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
2288 | 2326 | workspace_id = Column(Integer, ForeignKey('workspace.id'), index=True, nullable=False) |
2289 | 2327 | workspace = relationship( |
2290 | 2328 | 'Workspace', |
2291 | backref=backref('agent_executions', cascade="all, delete-orphan"), | |
2329 | backref=backref('agent_executions', cascade="all, delete-orphan") | |
2292 | 2330 | ) |
2293 | 2331 | parameters_data = Column(JSONType, nullable=False) |
2294 | 2332 | command_id = Column(Integer, ForeignKey('command.id'), index=True) |
2298 | 2336 | backref=backref('agent_execution_id', cascade="all, delete-orphan") |
2299 | 2337 | ) |
2300 | 2338 | |
2301 | ||
2302 | 2339 | @property |
2303 | 2340 | def parent(self): |
2304 | 2341 | return |
2311 | 2348 | field = Column(String) |
2312 | 2349 | value = Column(String) |
2313 | 2350 | operator = Column(String, default='equals') |
2314 | rule_id = Column(Integer, ForeignKey('rule.id'), index=True, nullable=False) | |
2315 | rule = relationship('Rule', foreign_keys=[rule_id], backref=backref('conditions', cascade="all, delete-orphan")) | |
2351 | # 1 rule <--> N conditions | |
2352 | # 1 to N (the FK is placed in the child) and bidirectional (backref) | |
2353 | # rule_id = Column(Integer, ForeignKey('rule.id'), index=True, nullable=False) | |
2354 | # rule = relationship('Rule', foreign_keys=[rule_id], backref=backref('conditions', cascade="all, delete-orphan")) | |
2355 | rule_id = Column(Integer, ForeignKey('rule.id', ondelete="CASCADE"), index=True, nullable=False) | |
2356 | rule = relationship('Rule', back_populates="conditions") | |
2316 | 2357 | |
2317 | 2358 | @property |
2318 | 2359 | def parent(self): |
2332 | 2373 | rule_id = Column(Integer, ForeignKey('rule.id'), index=True, nullable=False) |
2333 | 2374 | rule = relationship('Rule', foreign_keys=[rule_id], backref=backref('executions', cascade="all, delete-orphan")) |
2334 | 2375 | command_id = Column(Integer, ForeignKey('command.id'), index=True, nullable=False) |
2335 | command = relationship('Command', foreign_keys=[command_id], backref=backref('rule_executions', cascade="all, delete-orphan")) | |
2376 | command = relationship('Command', foreign_keys=[command_id], | |
2377 | backref=backref('rule_executions', cascade="all, delete-orphan")) | |
2336 | 2378 | |
2337 | 2379 | @property |
2338 | 2380 | def parent(self): |
2340 | 2382 | |
2341 | 2383 | |
2342 | 2384 | class SearchFilter(Metadata): |
2343 | ||
2344 | 2385 | __tablename__ = 'search_filter' |
2345 | 2386 | id = Column(Integer, primary_key=True) |
2346 | 2387 | name = Column(String, nullable=False) |
2347 | json_query = Column(String, nullable=False) # meant to store json but just readonly | |
2388 | json_query = Column(String, nullable=False) # meant to store json but just readonly | |
2348 | 2389 | user_query = Column(String, nullable=False) |
2349 | 2390 | |
2350 | 2391 | |
2379 | 2420 | "COALESCE(website, ''), workspace_id, COALESCE(source_code_id, -1));" |
2380 | 2421 | ) |
2381 | 2422 | |
2382 | ||
2383 | 2423 | event.listen( |
2384 | 2424 | VulnerabilityGeneric.__table__, |
2385 | 2425 | 'after_create', |
2393 | 2433 | ) |
2394 | 2434 | |
2395 | 2435 | # We have to import this after all models are defined |
2396 | import faraday.server.events # pylint: disable=unused-import | |
2436 | import faraday.server.events # noqa F401 |
32 | 32 | |
33 | 33 | def _deserialize(self, value, attr, data, **kwargs): |
34 | 34 | if value is not None and value: |
35 | return datetime.datetime.fromtimestamp(self._validated(value)/1e3) | |
35 | return datetime.datetime.fromtimestamp(self._validated(value) / 1e3) | |
36 | 36 | |
37 | 37 | |
38 | 38 | class FaradayCustomField(fields.Field): |
183 | 183 | |
184 | 184 | return self.write_field._deserialize(value, attr, data, **kwargs) |
185 | 185 | |
186 | def _add_to_schema(self, field_name, schema): | |
186 | def _bind_to_schema(self, field_name, schema): | |
187 | 187 | # Propagate to child fields |
188 | super()._add_to_schema(field_name, schema) | |
189 | self.read_field._add_to_schema(field_name, schema) | |
190 | self.write_field._add_to_schema(field_name, schema) | |
188 | super()._bind_to_schema(field_name, schema) | |
189 | self.read_field._bind_to_schema(field_name, schema) | |
190 | self.write_field._bind_to_schema(field_name, schema) | |
191 | 191 | |
192 | 192 | |
193 | 193 | class SeverityField(fields.String): |
36 | 36 | report_json: dict, |
37 | 37 | user_id: int): |
38 | 38 | logger.info("Send Report data to workspace [%s]", workspace_name) |
39 | from faraday.server.web import app # pylint:disable=import-outside-toplevel | |
40 | with app.app_context(): | |
39 | from faraday.server.web import get_app # pylint:disable=import-outside-toplevel | |
40 | with get_app().app_context(): | |
41 | 41 | ws = Workspace.query.filter_by(name=workspace_name).one() |
42 | 42 | command = Command.query.filter_by(id=command_id).one() |
43 | 43 | user = User.query.filter_by(id=user_id).one() |
0 | 0 | # Faraday Penetration Test IDE |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | ||
4 | # I'm Py3⏎ |
95 | 95 | # based systems). This second fork guarantees that the child is no |
96 | 96 | # longer a session leader, preventing the daemon from ever acquiring |
97 | 97 | # a controlling terminal. |
98 | pid = os.fork() # Fork a second child. | |
98 | pid = os.fork() # Fork a second child. | |
99 | 99 | except OSError as e: |
100 | 100 | raise Exception("%s [%d]" % (e.strerror, e.errno)) |
101 | 101 | |
109 | 109 | os.umask(UMASK) |
110 | 110 | else: |
111 | 111 | # exit() or _exit()? See below. |
112 | os._exit(0) # Exit parent (the first child) of the second child. | |
112 | os._exit(0) # Exit parent (the first child) of the second child. | |
113 | 113 | else: |
114 | 114 | # exit() or _exit()? |
115 | 115 | # _exit is like exit(), but it doesn't call any functions registered |
118 | 118 | # streams to be flushed twice and any temporary files may be unexpectedly |
119 | 119 | # removed. It's therefore recommended that child branches of a fork() |
120 | 120 | # and the parent branch(es) of a daemon use _exit(). |
121 | os._exit(0) # Exit parent of the first child. | |
121 | os._exit(0) # Exit parent of the first child. | |
122 | 122 | |
123 | 123 | # NOTE(mrocha): Since we need all file descriptors opened during server |
124 | 124 | # setup (i.e.: databases sessions, logging, socket connections, etc.), we |
158 | 158 | logger.info("Faraday Server stopped successfully") |
159 | 159 | except OSError as err: |
160 | 160 | if err.errno == errno.EPERM: |
161 | logger.error("Couldn't stop Faraday Server. User doesn't"\ | |
162 | "have enough permissions") | |
161 | logger.error("Couldn't stop Faraday Server. User doesn't" | |
162 | "have enough permissions") | |
163 | 163 | return False |
164 | 164 | else: |
165 | 165 | raise err |
181 | 181 | remove_pid_file(port) |
182 | 182 | return None |
183 | 183 | elif err.errno == errno.EPERM: |
184 | logger.warning("Server is running BUT the current user"\ | |
185 | "doesn't have enough access to operate with it") | |
184 | logger.warning("Server is running BUT the current user" | |
185 | "doesn't have enough access to operate with it") | |
186 | 186 | return pid |
187 | 187 | else: |
188 | 188 | raise |
189 | 189 | else: |
190 | 190 | return pid |
191 | ||
191 | 192 | |
192 | 193 | def get_server_pid(port): |
193 | 194 | if not Path(str(FARADAY_SERVER_PID_FILE).format(port)).exists(): |
199 | 200 | try: |
200 | 201 | pid = int(pid_file.readline()) |
201 | 202 | except ValueError: |
202 | logger.warning('PID file was found but is corrupted. '\ | |
203 | 'Assuming server is not running. Please check manually'\ | |
204 | 'if Faraday Server is effectively running') | |
203 | logger.warning('PID file was found but is corrupted. ' | |
204 | 'Assuming server is not running. Please check manually' | |
205 | 'if Faraday Server is effectively running') | |
205 | 206 | remove_pid_file(port) |
206 | 207 | return None |
207 | 208 |
155 | 155 | else: |
156 | 156 | count_filter = [func.count(distinct(count_col))] |
157 | 157 | |
158 | count_q = query.statement.with_only_columns(count_filter).\ | |
159 | order_by(None).group_by(None) | |
158 | count_q = query.statement.with_only_columns(count_filter). \ | |
159 | order_by(None).group_by(None) | |
160 | 160 | count = query.session.execute(count_q).scalar() |
161 | 161 | |
162 | 162 | return count |
213 | 213 | object_type = instance.__tablename__ |
214 | 214 | if object_type is None: |
215 | 215 | if instance.__class__.__name__ in ['Vulnerability', |
216 | 'VulnerabilityWeb', | |
217 | 'VulnerabilityCode']: | |
216 | 'VulnerabilityWeb', | |
217 | 'VulnerabilityCode']: | |
218 | 218 | object_type = 'vulnerability' |
219 | 219 | else: |
220 | 220 | raise RuntimeError(f"Unknown table for object: {instance}") |
263 | 263 | |
264 | 264 | if get_object_type_for(obj) == 'vulnerability': |
265 | 265 | # This is a special key due to model inheritance |
266 | from faraday.server.models import VulnerabilityGeneric # pylint:disable=import-outside-toplevel | |
266 | from faraday.server.models import VulnerabilityGeneric # pylint:disable=import-outside-toplevel | |
267 | 267 | klass = VulnerabilityGeneric |
268 | 268 | else: |
269 | 269 | klass = obj.__class__ |
310 | 310 | |
311 | 311 | |
312 | 312 | def is_unique_constraint_violation(exception): |
313 | from faraday.server.models import db # pylint:disable=import-outside-toplevel | |
313 | from faraday.server.models import db # pylint:disable=import-outside-toplevel | |
314 | 314 | if db.engine.dialect.name != 'postgresql': |
315 | 315 | # Not implemened for RDMS other than postgres, we can live without |
316 | 316 | # this since it is just an extra check |
9 | 9 | |
10 | 10 | |
11 | 11 | debug_logger = logging.getLogger(__name__) |
12 | ||
12 | 13 | |
13 | 14 | class Timer: |
14 | 15 | def __init__(self, tag, logger=None): |
27 | 28 | # |
28 | 29 | # Debug utility extracted from http://docs.sqlalchemy.org/en/latest/faq/performance.html |
29 | 30 | # |
31 | ||
32 | ||
30 | 33 | @contextlib.contextmanager |
31 | 34 | def profiled(): |
32 | 35 | pr = cProfile.Profile() |
0 | import re | |
1 | 0 | import csv |
2 | 1 | from io import StringIO, BytesIO |
3 | 2 | import logging |
203 | 202 | |
204 | 203 | # Patch possible formula injection attacks |
205 | 204 | def csv_escape(vuln_dict): |
206 | for key,value in vuln_dict.items(): | |
205 | for key, value in vuln_dict.items(): | |
207 | 206 | if str(value).startswith('=') or str(value).startswith('+') or str(value).startswith('-') or str(value).startswith('@'): |
208 | 207 | # Convert value to str just in case is has another type (like a list or |
209 | 208 | # dict). This would be done anyway by the csv writer. |
26 | 26 | VALID_OPERATORS = set(OPERATORS.keys()) - set(['desc', 'asc']) |
27 | 27 | |
28 | 28 | logger = logging.getLogger(__name__) |
29 | ||
29 | 30 | |
30 | 31 | class FlaskRestlessFilterSchema(Schema): |
31 | 32 | name = fields.String(required=True) |
174 | 175 | def _model_class(self): |
175 | 176 | return VulnerabilityWeb |
176 | 177 | |
178 | ||
177 | 179 | class FlaskRestlessVulnerabilityTemplateFilterSchema(FlaskRestlessFilterSchema): |
178 | 180 | def _model_class(self): |
179 | 181 | return VulnerabilityTemplate |
180 | 182 | |
183 | ||
181 | 184 | class FlaskRestlessHostFilterSchema(FlaskRestlessFilterSchema): |
182 | 185 | def _model_class(self): |
183 | 186 | return Host |
184 | 187 | |
188 | ||
185 | 189 | class FlaskRestlessWorkspaceFilterSchema(FlaskRestlessFilterSchema): |
186 | 190 | def _model_class(self): |
187 | 191 | return Workspace |
188 | 192 | |
193 | ||
189 | 194 | class FlaskRestlessUserFilterSchema(FlaskRestlessFilterSchema): |
190 | 195 | def _model_class(self): |
191 | 196 | return User |
192 | ||
193 | 197 | |
194 | 198 | |
195 | 199 | class FlaskRestlessOperator(Schema): |
3 | 3 | See the file 'doc/LICENSE' for the license information |
4 | 4 | |
5 | 5 | """ |
6 | ||
7 | ||
6 | 8 | def remove_null_caracters(string): |
7 | 9 | string = string.replace('\x00', '') |
8 | 10 | string = string.replace('\00', '') |
20 | 20 | from sqlalchemy import and_, or_ |
21 | 21 | from sqlalchemy import inspect as sqlalchemy_inspect |
22 | 22 | from sqlalchemy.ext.associationproxy import AssociationProxy |
23 | from sqlalchemy.ext.hybrid import hybrid_property | |
23 | 24 | from sqlalchemy.orm.attributes import InstrumentedAttribute |
24 | 25 | from sqlalchemy.orm.attributes import QueryableAttribute |
25 | 26 | from sqlalchemy.orm import ColumnProperty |
111 | 112 | #: be described by the strings ``'=='``, ``'eq'``, ``'equals'``, etc. |
112 | 113 | OPERATORS = { |
113 | 114 | # Operators which accept a single argument. |
114 | 'is_null': lambda f: f == None, | |
115 | 'is_not_null': lambda f: f != None, | |
115 | 'is_null': lambda f: f is None, | |
116 | 'is_not_null': lambda f: f is not None, | |
116 | 117 | 'desc': lambda f: f.desc, |
117 | 118 | 'asc': lambda f: f.asc, |
118 | 119 | # Operators which accept two arguments. |
279 | 280 | class JunctionFilter(Filter): |
280 | 281 | def __init__(self, *subfilters): |
281 | 282 | self.subfilters = subfilters |
283 | ||
282 | 284 | def __iter__(self): |
283 | 285 | return iter(self.subfilters) |
284 | 286 | |
493 | 495 | create_filt = QueryBuilder._create_filter |
494 | 496 | |
495 | 497 | def create_filters(filt): |
496 | if not getattr(filt, 'fieldname', False) or filt.fieldname.split('__')[0] in valid_model_fields: | |
498 | if not getattr(filt, 'fieldname', False) \ | |
499 | or filt.fieldname.split('__')[0] in valid_model_fields: | |
497 | 500 | try: |
498 | 501 | return create_filt(model, filt) |
499 | except AttributeError: | |
502 | except AttributeError as e: | |
500 | 503 | # Can't create the filter since the model or submodel does not have the attribute (usually mapper) |
501 | return None | |
502 | return None | |
504 | raise AttributeError(f"Foreing field {filt.fieldname.split('__')[0]} not found in submodel") | |
505 | raise AttributeError(f"Field {filt.fieldname} not found in model") | |
503 | 506 | |
504 | 507 | return create_filters |
505 | 508 | |
541 | 544 | query = session.query(*select_fields) |
542 | 545 | else: |
543 | 546 | query = session.query(model) |
547 | ||
544 | 548 | # This function call may raise an exception. |
545 | valid_model_fields = [str(algo).split('.')[1] for algo in sqlalchemy_inspect(model).attrs] | |
549 | valid_model_fields = [] | |
550 | for orm_descriptor in sqlalchemy_inspect(model).all_orm_descriptors: | |
551 | if isinstance(orm_descriptor, InstrumentedAttribute): | |
552 | valid_model_fields.append(str(orm_descriptor).split('.')[1]) | |
553 | if isinstance(orm_descriptor, hybrid_property): | |
554 | valid_model_fields.append(orm_descriptor.__name__) | |
546 | 555 | |
547 | 556 | filters_generator = map( # pylint: disable=W1636 |
548 | 557 | QueryBuilder.create_filters_func(model, valid_model_fields), |
549 | 558 | search_params.filters |
550 | 559 | ) |
560 | ||
551 | 561 | filters = [filt for filt in filters_generator if filt is not None] |
552 | 562 | |
553 | 563 | # Multiple filter criteria at the top level of the provided search |
47 | 47 | |
48 | 48 | response.direct_passthrough = False |
49 | 49 | |
50 | if (response.status_code < 200 or | |
51 | response.status_code >= 300 or | |
52 | 'Content-Encoding' in response.headers): | |
50 | if (response.status_code < 200 | |
51 | or response.status_code >= 300 | |
52 | or 'Content-Encoding' in response.headers): | |
53 | 53 | return response |
54 | 54 | gzip_buffer = IO() |
55 | 55 | gzip_file = gzip.GzipFile(mode='wb', |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | import sys |
4 | import functools | |
5 | 4 | import logging |
6 | 5 | from signal import SIGABRT, SIGILL, SIGINT, SIGSEGV, SIGTERM, SIG_DFL, signal |
7 | 6 | |
19 | 18 | |
20 | 19 | from flask_mail import Mail |
21 | 20 | |
22 | from OpenSSL.SSL import Error as SSLError | |
23 | ||
24 | 21 | import faraday.server.config |
25 | 22 | |
26 | 23 | from faraday.server.config import CONST_FARADAY_HOME_PATH, smtp |
27 | from faraday.server.utils import logger | |
28 | 24 | from faraday.server.threads.reports_processor import ReportsManager, REPORTS_QUEUE |
29 | 25 | from faraday.server.threads.ping_home import PingHomeThread |
30 | 26 | from faraday.server.app import create_app |
33 | 29 | BroadcastServerProtocol |
34 | 30 | ) |
35 | 31 | |
32 | FARADAY_APP = None | |
36 | 33 | |
37 | app = create_app() # creates a Flask(__name__) app | |
38 | # After 'Create app' | |
39 | app.config['MAIL_SERVER'] = smtp.host | |
40 | app.config['MAIL_PORT'] = smtp.port | |
41 | app.config['MAIL_USE_SSL'] = smtp.ssl | |
42 | app.config['MAIL_USERNAME'] = smtp.username | |
43 | app.config['MAIL_PASSWORD'] = smtp.password | |
44 | mail = Mail(app) | |
45 | 34 | logger = logging.getLogger(__name__) |
46 | 35 | |
47 | 36 | |
82 | 71 | WEB_UI_LOCAL_PATH = faraday.server.config.FARADAY_BASE / 'server/www' |
83 | 72 | |
84 | 73 | def __init__(self): |
85 | logger.info(f'Starting web server at http://' | |
74 | ||
75 | logger.info('Starting web server at http://' | |
86 | 76 | f'{faraday.server.config.faraday_server.bind_address}:' |
87 | 77 | f'{faraday.server.config.faraday_server.port}/') |
88 | 78 | self.__websocket_port = faraday.server.config.faraday_server.websocket_port or 9000 |
97 | 87 | certs = (faraday.server.config.ssl.keyfile, faraday.server.config.ssl.certificate) |
98 | 88 | if not all(certs): |
99 | 89 | logger.critical("HTTPS request but SSL certificates are not configured") |
100 | sys.exit(1) # Abort web-server startup | |
90 | sys.exit(1) # Abort web-server startup | |
101 | 91 | return ssl.DefaultOpenSSLContextFactory(*certs) |
102 | 92 | |
103 | 93 | def __build_server_tree(self): |
114 | 104 | return FileWithoutDirectoryListing(WebServer.WEB_UI_LOCAL_PATH) |
115 | 105 | |
116 | 106 | def __build_api_resource(self): |
117 | return FaradayWSGIResource(reactor, reactor.getThreadPool(), app) | |
107 | return FaradayWSGIResource(reactor, reactor.getThreadPool(), get_app()) | |
118 | 108 | |
119 | 109 | def __build_websockets_resource(self): |
120 | 110 | websocket_port = int(faraday.server.config.faraday_server.websocket_port) |
180 | 170 | logger.exception(e) |
181 | 171 | self.__stop_all_threads() |
182 | 172 | sys.exit(1) |
183 | # I'm Py3 | |
173 | ||
174 | ||
175 | def get_app(): | |
176 | global FARADAY_APP # pylint: disable=W0603 | |
177 | if not FARADAY_APP: | |
178 | app = create_app() # creates a Flask(__name__) app | |
179 | # After 'Create app' | |
180 | app.config['MAIL_SERVER'] = smtp.host | |
181 | app.config['MAIL_PORT'] = smtp.port | |
182 | app.config['MAIL_USE_SSL'] = smtp.ssl | |
183 | app.config['MAIL_USERNAME'] = smtp.username | |
184 | app.config['MAIL_PASSWORD'] = smtp.password | |
185 | mail = Mail(app) | |
186 | FARADAY_APP = app | |
187 | return FARADAY_APP |
29 | 29 | from faraday.server.api.modules.websocket_auth import decode_agent_websocket_token |
30 | 30 | from faraday.server.events import changes_queue |
31 | 31 | |
32 | ||
33 | 32 | logger = logging.getLogger(__name__) |
34 | ||
35 | 33 | |
36 | 34 | connected_agents = {} |
37 | 35 | |
51 | 49 | return (protocol, headers) |
52 | 50 | |
53 | 51 | def onMessage(self, payload, is_binary): |
54 | from faraday.server.web import app # pylint:disable=import-outside-toplevel | |
55 | 52 | """ |
56 | 53 | We only support JOIN and LEAVE workspace messages. |
57 | 54 | When authentication is implemented we need to verify |
59 | 56 | When authentication is implemented we need to reply |
60 | 57 | the client if the join failed. |
61 | 58 | """ |
59 | from faraday.server.web import get_app # pylint:disable=import-outside-toplevel | |
62 | 60 | if not is_binary: |
63 | 61 | message = json.loads(payload) |
64 | 62 | if message['action'] == 'JOIN_WORKSPACE': |
66 | 64 | logger.warning(f'Invalid join workspace message: {message}') |
67 | 65 | self.sendClose() |
68 | 66 | return |
69 | signer = itsdangerous.TimestampSigner(app.config['SECRET_KEY'], | |
67 | signer = itsdangerous.TimestampSigner(get_app().config['SECRET_KEY'], | |
70 | 68 | salt="websocket") |
71 | 69 | try: |
72 | 70 | workspace_id = signer.unsign(message['token'], max_age=60) |
76 | 74 | '{}'.format(message['workspace'])) |
77 | 75 | logger.exception(e) |
78 | 76 | else: |
79 | with app.app_context(): | |
77 | with get_app().app_context(): | |
80 | 78 | workspace = Workspace.query.get(int(workspace_id)) |
81 | 79 | if workspace.name != message['workspace']: |
82 | 80 | logger.warning( |
95 | 93 | logger.warning("Invalid agent join message") |
96 | 94 | self.sendClose(1000, reason="Invalid JOIN_AGENT message") |
97 | 95 | return False |
98 | with app.app_context(): | |
96 | with get_app().app_context(): | |
99 | 97 | try: |
100 | 98 | agent = decode_agent_websocket_token(message['token']) |
101 | 99 | update_executors(agent, message['executors']) |
106 | 104 | # factory will now send broadcast messages to the agent |
107 | 105 | return self.factory.join_agent(self, agent) |
108 | 106 | if message['action'] == 'LEAVE_AGENT': |
109 | with app.app_context(): | |
107 | with get_app().app_context(): | |
110 | 108 | (agent_id,) = [ |
111 | 109 | k |
112 | 110 | for (k, v) in connected_agents.items() |
116 | 114 | assert agent is not None # TODO the agent could be deleted here |
117 | 115 | return self.factory.leave_agent(self, agent) |
118 | 116 | if message['action'] == 'RUN_STATUS': |
119 | with app.app_context(): | |
117 | with get_app().app_context(): | |
120 | 118 | if 'executor_name' not in message: |
121 | 119 | logger.warning(f'Missing executor_name param in message: {message}') |
122 | 120 | return True |
150 | 148 | else: |
151 | 149 | agent_execution.successful = message.get('successful', None) |
152 | 150 | agent_execution.running = message.get('running', None) |
153 | agent_execution.message = message.get('message','') | |
151 | agent_execution.message = message.get('message', '') | |
154 | 152 | db.session.commit() |
155 | 153 | else: |
156 | 154 | logger.exception( |
15 | 15 | import faraday.server.web |
16 | 16 | from faraday.server.models import db, Workspace |
17 | 17 | from faraday.server.utils import daemonize |
18 | from faraday.server.web import app | |
18 | from faraday.server.web import get_app | |
19 | 19 | from alembic.script import ScriptDirectory |
20 | 20 | from alembic.config import Config |
21 | 21 | |
47 | 47 | |
48 | 48 | |
49 | 49 | def check_postgresql(): |
50 | with app.app_context(): | |
50 | with get_app().app_context(): | |
51 | 51 | try: |
52 | 52 | if not db.session.query(Workspace).count(): |
53 | 53 | logger.warning('No workspaces found') |
72 | 72 | script = ScriptDirectory.from_config(config) |
73 | 73 | |
74 | 74 | head_revision = script.get_current_head() |
75 | with app.app_context(): | |
75 | with get_app().app_context(): | |
76 | 76 | try: |
77 | 77 | conn = db.session.connection() |
78 | 78 | except ImportError: |
101 | 101 | "with a schema migration not merged yet. If you are a " |
102 | 102 | "normal user, consider reporting this bug back to us" |
103 | 103 | ) |
104 | ||
104 | 105 | |
105 | 106 | def main(): |
106 | 107 | os.chdir(faraday.server.config.FARADAY_BASE) |
33 | 33 | self.updateMetadata() |
34 | 34 | return func(self, *args, **kwargs) |
35 | 35 | return wrapper |
36 | ||
37 | # I'm Py3⏎ |
81 | 81 | |
82 | 82 | logger = logging.getLogger(__name__) |
83 | 83 | |
84 | ||
84 | 85 | class FaradayAPIPlugin(BasePlugin): |
85 | 86 | """APISpec plugin for Flask""" |
86 | 87 | |
124 | 125 | class_model = view_instance.model_class.__name__ |
125 | 126 | else: |
126 | 127 | class_model = 'No name' |
127 | #print(f'{view_name} / {class_model}') | |
128 | logger.debug(f'{view_name} / {class_model} / {rule.methods} / {view_name} / {view_instance._get_schema_class().__name__}') | |
128 | # print(f'{view_name} / {class_model}') | |
129 | logger.debug( | |
130 | f'{view_name} / {class_model} / {rule.methods} / {view_name} / {view_instance._get_schema_class().__name__}') | |
129 | 131 | operations[view_name] = yaml_utils.load_yaml_from_docstring( |
130 | view.__doc__.format(schema_class=view_instance._get_schema_class().__name__, class_model=class_model, tag_name=class_model) | |
132 | view.__doc__.format(schema_class=view_instance._get_schema_class().__name__, | |
133 | class_model=class_model, tag_name=class_model) | |
131 | 134 | ) |
132 | 135 | elif hasattr(view, "__doc__"): |
133 | 136 | if not view.__doc__: |
137 | 140 | else: |
138 | 141 | class_model = 'No name' |
139 | 142 | for method in rule.methods: |
140 | logger.debug(f'{view_name} / {class_model} / {rule.methods} / {method} / {view_instance._get_schema_class().__name__}') | |
143 | logger.debug( | |
144 | f'{view_name} / {class_model} / {rule.methods} / {method} / {view_instance._get_schema_class().__name__}') | |
141 | 145 | if method not in ['HEAD', 'OPTIONS'] or os.environ.get("FULL_API_DOC", None): |
142 | 146 | operations[method.lower()] = yaml_utils.load_yaml_from_docstring( |
143 | view.__doc__.format(schema_class=view_instance._get_schema_class().__name__, class_model=class_model, tag_name=class_model) | |
147 | view.__doc__.format(schema_class=view_instance._get_schema_class().__name__, | |
148 | class_model=class_model, tag_name=class_model) | |
144 | 149 | ) |
145 | 150 | if hasattr(view, "view_class") and issubclass(view.view_class, MethodView): |
146 | 151 | for method in view.methods: |
66 | 66 | ./packages/apispec-webframeworks |
67 | 67 | { }; |
68 | 68 | |
69 | bleach = | |
70 | self.callPackage | |
71 | ./packages/bleach | |
72 | { }; | |
73 | ||
69 | 74 | faraday-plugins = |
70 | 75 | self.callPackage |
71 | 76 | ./packages/faraday-plugins |
10 | 10 | pname = |
11 | 11 | "anyascii"; |
12 | 12 | version = |
13 | "0.1.7"; | |
13 | "0.2.0"; | |
14 | 14 | |
15 | 15 | src = |
16 | 16 | fetchPypi { |
18 | 18 | pname |
19 | 19 | version; |
20 | 20 | sha256 = |
21 | "1xcrhmgpv8da34sg62r0yfxzyq2kwgiaardkih9z3sm96dlhgsyh"; | |
21 | "1b6jdd9nx15py0jqjdn154m6m491517sqlk57bbyj3x4xzywadkh"; | |
22 | 22 | }; |
23 | 23 | |
24 | 24 | # TODO FIXME |
0 | # WARNING: This file was automatically generated. You should avoid editing it. | |
1 | # If you run pynixify again, the file will be either overwritten or | |
2 | # deleted, and you will lose the changes you made to it. | |
3 | ||
4 | { buildPythonPackage | |
5 | , fetchPypi | |
6 | , lib | |
7 | , packaging | |
8 | , six | |
9 | , webencodings | |
10 | }: | |
11 | ||
12 | buildPythonPackage rec { | |
13 | pname = | |
14 | "bleach"; | |
15 | version = | |
16 | "3.3.0"; | |
17 | ||
18 | src = | |
19 | fetchPypi { | |
20 | inherit | |
21 | pname | |
22 | version; | |
23 | sha256 = | |
24 | "0cx4jyvd7hlaiiq2cq6vps689b978w3kyqqrvkckvs75743igcwq"; | |
25 | }; | |
26 | ||
27 | propagatedBuildInputs = | |
28 | [ | |
29 | packaging | |
30 | six | |
31 | webencodings | |
32 | ]; | |
33 | ||
34 | # TODO FIXME | |
35 | doCheck = | |
36 | false; | |
37 | ||
38 | meta = | |
39 | with lib; { | |
40 | description = | |
41 | "An easy safelist-based HTML-sanitizing tool."; | |
42 | homepage = | |
43 | "https://github.com/mozilla/bleach"; | |
44 | }; | |
45 | } |
6 | 6 | , apispec-webframeworks |
7 | 7 | , autobahn |
8 | 8 | , bcrypt |
9 | , bleach | |
9 | 10 | , buildPythonPackage |
10 | 11 | , click |
11 | 12 | , colorama |
23 | 24 | , flask-limiter |
24 | 25 | , flask-security-too |
25 | 26 | , flask_login |
27 | , flask_mail | |
26 | 28 | , flask_sqlalchemy |
27 | 29 | , hypothesis |
28 | 30 | , lib |
60 | 62 | pname = |
61 | 63 | "faradaysec"; |
62 | 64 | version = |
63 | "3.14.4"; | |
65 | "3.15.0"; | |
64 | 66 | |
65 | 67 | src = |
66 | 68 | lib.cleanSource |
85 | 87 | wtforms |
86 | 88 | flask_login |
87 | 89 | flask-security-too |
90 | bleach | |
88 | 91 | marshmallow |
89 | 92 | pillow |
90 | 93 | psycopg2 |
112 | 115 | pyyaml |
113 | 116 | pyotp |
114 | 117 | flask-limiter |
118 | flask_mail | |
115 | 119 | ]; |
116 | 120 | checkInputs = |
117 | 121 | [ |
1 | 1 | # If you run pynixify again, the file will be either overwritten or |
2 | 2 | # deleted, and you will lose the changes you made to it. |
3 | 3 | |
4 | { Babel | |
4 | { blinker | |
5 | 5 | , buildPythonPackage |
6 | 6 | , email_validator |
7 | 7 | , fetchPypi |
8 | 8 | , flask |
9 | , flask-babelex | |
10 | 9 | , flask_login |
11 | , flask_mail | |
12 | 10 | , flask_principal |
13 | 11 | , flask_wtf |
14 | 12 | , itsdangerous |
15 | 13 | , lib |
16 | 14 | , passlib |
17 | , pytestrunner | |
18 | , twine | |
19 | , wheel | |
20 | 15 | }: |
21 | 16 | |
22 | 17 | buildPythonPackage rec { |
23 | 18 | pname = |
24 | 19 | "flask-security-too"; |
25 | 20 | version = |
26 | "3.4.5"; | |
21 | "4.0.1"; | |
27 | 22 | |
28 | 23 | src = |
29 | 24 | fetchPypi { |
32 | 27 | pname = |
33 | 28 | "Flask-Security-Too"; |
34 | 29 | sha256 = |
35 | "19cdad65bxs23zz5hmr41s12359ija3p2kk0mbf9jsk1swg0b7d0"; | |
30 | "1q7izrmz84wwhmzs39zgjvr90vb22z3szsm8mp3a3qnb1377z5n2"; | |
36 | 31 | }; |
37 | 32 | |
38 | buildInputs = | |
39 | [ | |
40 | Babel | |
41 | pytestrunner | |
42 | twine | |
43 | wheel | |
44 | ]; | |
45 | 33 | propagatedBuildInputs = |
46 | 34 | [ |
47 | 35 | flask |
48 | 36 | flask_login |
49 | flask_mail | |
50 | 37 | flask_principal |
51 | 38 | flask_wtf |
52 | flask-babelex | |
53 | 39 | email_validator |
54 | 40 | itsdangerous |
55 | 41 | passlib |
42 | blinker | |
56 | 43 | ]; |
57 | 44 | |
58 | 45 | # TODO FIXME |
9 | 9 | email_validator |
10 | 10 | WTForms>=2.1 |
11 | 11 | flask-login>=0.5.0 |
12 | Flask-Security-Too>=3.4.4,<4.0.0 | |
12 | Flask-Security-Too>=4.0.0 | |
13 | bleach>=3.3.0 | |
13 | 14 | marshmallow>=3.0.0,<3.11.0 |
14 | 15 | Pillow>=4.2.1 |
15 | 16 | psycopg2 |
37 | 38 | pyyaml |
38 | 39 | pyotp>=2.6.0 |
39 | 40 | Flask-Limiter |
41 | Flask-Mail |
10 | 10 | ''' |
11 | 11 | |
12 | 12 | import os |
13 | import re | |
14 | 13 | import sys |
15 | 14 | import subprocess |
16 | 15 | import logging |
19 | 18 | from tempfile import mkdtemp |
20 | 19 | from shutil import rmtree |
21 | 20 | |
22 | VERSIONS = ['white', 'pink', 'black'] | |
21 | VERSIONS = ['white', 'black'] | |
23 | 22 | BRANCH_FORMAT = 'origin/{}/dev' |
23 | ||
24 | 24 | |
25 | 25 | @contextmanager |
26 | 26 | def chdir(directory): |
29 | 29 | os.chdir(directory) |
30 | 30 | yield |
31 | 31 | os.chdir(current) |
32 | ||
32 | 33 | |
33 | 34 | @contextmanager |
34 | 35 | def temp_worktree(branch=None): |
44 | 45 | yield |
45 | 46 | rmtree(directory) |
46 | 47 | subprocess.check_output(['git', 'worktree', 'prune']) |
48 | ||
47 | 49 | |
48 | 50 | def check_merge(dst_branch, cur_branch='HEAD'): |
49 | 51 | """Return a boolean indicating if the merge from cur_branch |
84 | 86 | |
85 | 87 | def version_of_branch(branch_name): |
86 | 88 | """ |
87 | >>> version_of_branch('tkt_white_this_is_not_a_pink_branch') | |
89 | >>> version_of_branch('tkt_white_this_is_not_a_ee_branch') | |
88 | 90 | 'white' |
89 | 91 | """ |
90 | 92 | positions = {version: branch_name.find(version) |
118 | 120 | else: |
119 | 121 | branches_to_test.append(BRANCH_FORMAT.format(target_version)) |
120 | 122 | |
121 | logging.info('Testing merges in branches %s' % branches_to_test) | |
123 | logging.info(f'Testing merges in branches {branches_to_test}') | |
122 | 124 | |
123 | 125 | success = True |
124 | 126 | cur_branch = branch |
129 | 131 | else: |
130 | 132 | success = False |
131 | 133 | logger.error("Merge into %s failed :(", dst_branch) |
132 | print() | |
133 | print() | |
134 | 134 | |
135 | 135 | if not success: |
136 | 136 | sys.exit(1) |
142 | 142 | parser.add_argument('-l', '--log-level', default='debug') |
143 | 143 | args = parser.parse_args() |
144 | 144 | main(args.branch) |
145 | ||
146 | ||
147 | # I'm Py3 | |
148 |
1 | 1 | # Check that a white branch doesn't contain commits of pink or black |
2 | 2 | # and a pink branch has no black commits |
3 | 3 | # Requires setting BRANCH_NAME environment variable |
4 | PINK_COMMIT=da7a869e186f61f1b138392734be4eae62cb2e31 # Always redirect to login page when user is logged out | |
5 | BLACK_COMMIT=ec3dcfbe8955d41125944e82aa084b441c0b9e77 # Fix msg in webshell | |
4 | PROF_COMMIT=da7a869e186f61f1b138392734be4eae62cb2e31 # Always redirect to login page when user is logged out | |
5 | CORP_COMMIT=ec3dcfbe8955d41125944e82aa084b441c0b9e77 # Fix msg in webshell | |
6 | 6 | |
7 | 7 | if [ $CI_COMMIT_REF_NAME ]; then |
8 | 8 | BRANCH_NAME=$CI_COMMIT_REF_NAME |
10 | 10 | BRANCH_NAME=$(git rev-parse --abbrev-ref HEAD) |
11 | 11 | fi |
12 | 12 | |
13 | function fail(){ | |
13 | fail(){ | |
14 | 14 | echo "Branch $BRANCH_NAME contains commit of another version ($1). You shouldn't do that!!!!!!" |
15 | 15 | exit 1 |
16 | 16 | } |
17 | 17 | |
18 | function check_no_commits(){ | |
18 | check_no_commits(){ | |
19 | 19 | # Check that current branch doesn't contain the commits passed as arguments |
20 | 20 | # If it does contain at least one of then, quit the script with a non-zero exit code |
21 | 21 | for commit in $* |
25 | 25 | } |
26 | 26 | |
27 | 27 | echo current branch $(git rev-parse --abbrev-ref HEAD) should be equal to $BRANCH_NAME |
28 | echo $BRANCH_NAME | grep -i white && check_no_commits $PINK_COMMIT $BLACK_COMMIT | |
29 | echo $BRANCH_NAME | grep -i pink && check_no_commits $BLACK_COMMIT | |
28 | echo $BRANCH_NAME | grep -i white && check_no_commits $PROF_COMMIT $CORP_COMMIT | |
30 | 29 | exit 0 |
16 | 16 | if not args.local: |
17 | 17 | BRANCH_NAME = f"origin/{BRANCH_NAME}" |
18 | 18 | |
19 | PINK_FILE = "faraday/server/api/modules/reports.py" | |
20 | BLACK_FILE = "faraday/server/api/modules/jira.py" | |
19 | PROF_FILE = "faraday/server/api/modules/reports.py" | |
20 | CORP_FILE = "faraday/server/api/modules/integration_jira.py" | |
21 | 21 | |
22 | 22 | mode = args.mode |
23 | 23 | if mode == "diff": |
41 | 41 | print(f"Current branch {ACTUAL_BRANCH} should be equal to {BRANCH_NAME}") |
42 | 42 | intersection = set() |
43 | 43 | if "white" in BRANCH_NAME: |
44 | intersection = git_diff_intersection({PINK_FILE, BLACK_FILE}) | |
45 | elif "pink" in BRANCH_NAME: | |
46 | intersection = git_diff_intersection({BLACK_FILE}) | |
47 | assert len(intersection) == 0, f"The {intersection} should not be in " \ | |
48 | f"{BRANCH_NAME}" | |
49 | assert child.returncode == 0, (child.stdout, child.returncode) | |
44 | intersection = git_diff_intersection({PROF_FILE, CORP_FILE}) | |
45 | assert len(intersection) == 0, f"The {intersection} should not be in" \ | |
46 | f" {BRANCH_NAME}" | |
47 | assert child.returncode == 0, (child.stdout, child.returncode) |
9 | 9 | # The list of error ignored is ordered by priority/easiness of the fix |
10 | 10 | # First to fix |
11 | 11 | |
12 | ## Logic improve | |
13 | ### comparison to None should be 'if cond is None:' | |
14 | E711 | |
15 | ### ambiguous variable name 'x' | |
16 | E741 | |
17 | ### the backslash is redundant between brackets | |
18 | E502 | |
19 | ### 'x' imported but unused | |
20 | F401 | |
21 | ### comparison to False should be 'if cond is False:' or 'if not cond:' | |
22 | E712 | |
23 | ### redefinition of unused 'logger' from line 26 | |
24 | F811 | |
25 | 12 | |
26 | 13 | ## Invalid escape sequence; probably fixed by adding r to specify regex str |
27 | 14 | ### invalid escape sequence |
32 | 19 | F841 |
33 | 20 | |
34 | 21 | ## New lines |
35 | ### no newline at end of file | |
36 | W292 | |
37 | ### Blank line at end of file | |
38 | W391 | |
39 | ### expected 1 blank line, found 0 | |
40 | E302 | |
41 | ### expected 2 blank line, found 1 | |
42 | E301 | |
43 | ### line break before binary operator | |
22 | ### line break before binary operator, W503 is deprecated | |
44 | 23 | W503 |
45 | ### line break after binary operator | |
46 | W504 | |
47 | ### expected 2 blank lines after class or function definition, found 1 | |
48 | E305 | |
49 | ### too many blank lines (N) | |
50 | E303 | |
51 | ||
52 | ## Spaces | |
53 | ### whitespace after '[' | |
54 | E201 | |
55 | ### whitespace before ']' | |
56 | E202 | |
57 | ### missing whitespace after ',' | |
58 | E231 | |
59 | ### multiple spaces after operator | |
60 | E222 | |
61 | ### missing whitespace around arithmetic operator | |
62 | E226 | |
63 | ### unexpected spaces around keyword / parameter equals | |
64 | E251 | |
65 | ### missing whitespace around operator | |
66 | E225 | |
67 | ### blank line contains whitespace | |
68 | W293 | |
69 | ### trailing whitespace | |
70 | W291 | |
71 | ### multiple spaces after ',' | |
72 | E241 | |
73 | ||
74 | ## Block comment | |
75 | ### at least two spaces before inline comment | |
76 | E261 | |
77 | ### inline comment should start with '# ' | |
78 | E262 | |
79 | ### block comment should start with '# ' | |
80 | E265 | |
81 | ### E266 too many leading '#' for block comment | |
82 | E266 | |
83 | 24 | |
84 | 25 | ## Visual |
85 | 26 | ### continuation line missing indentation or outdented |
41 | 41 | # and we don't want this! |
42 | 42 | # Taken from https://github.com/pypa/setuptools_scm/issues/190#issuecomment-351181286 |
43 | 43 | import setuptools_scm.integration |
44 | ||
44 | 45 | setuptools_scm.integration.find_files = lambda _: [] |
45 | 46 | except ImportError: |
46 | 47 | pass |
162 | 163 | # packages=find_packages(exclude=['contrib', 'docs', 'tests']), # Required |
163 | 164 | # packages=[''], |
164 | 165 | # packages=['faraday', 'faraday.server', 'faraday.utils'], |
165 | #packages=['faraday.' + package | |
166 | # packages=['faraday.' + package | |
166 | 167 | # for package in find_packages( |
167 | 168 | # '.', include=['server.*', 'config.*', 'utils.*', 'client.*', |
168 | 169 | # 'server', 'config', 'utils', 'client']) |
169 | 170 | # ] + ['faraday'], |
170 | #package_dir={'faraday': '.'}, | |
171 | # package_dir={'faraday': '.'}, | |
171 | 172 | packages=find_packages(include=['faraday', 'faraday.*']), |
172 | 173 | |
173 | 174 | # Specify which Python versions you support. In contrast to the |
205 | 206 | # MANIFEST.in as well. |
206 | 207 | include_package_data=True, |
207 | 208 | package_data={ # Optional |
208 | '': ['requirements.txt',], | |
209 | '': ['requirements.txt', ], | |
209 | 210 | }, |
210 | 211 | |
211 | 212 | # Although 'package_data' is the preferred approach, in some case you may |
242 | 243 | # what's used to render the link text on PyPI. |
243 | 244 | project_urls={ # Optional |
244 | 245 | 'Bug Reports': 'https://github.com/infobyte/faraday/issues', |
245 | #'Funding': 'https://donate.pypi.org', | |
246 | # 'Funding': 'https://donate.pypi.org', | |
246 | 247 | 'Say Thanks!': 'http://saythanks.io/to/faradaysec', |
247 | 248 | 'Source': 'https://github.com/infobyte/faraday/', |
248 | 249 | }, |
19 | 19 | from faraday.server.app import create_app |
20 | 20 | from faraday.server.models import db |
21 | 21 | from tests import factories |
22 | ||
23 | 22 | |
24 | 23 | TEST_DATA_PATH = Path(__file__).parent / 'data' |
25 | 24 | |
59 | 58 | from flask import _app_ctx_stack |
60 | 59 | _app_ctx_stack.top.sqlalchemy_queries = [] |
61 | 60 | |
62 | ret = super(CustomClient, self).open(*args, **kwargs) | |
63 | #Now set in flask 1.0 | |
64 | #if ret.headers.get('content-type') == 'application/json': | |
61 | ret = super().open(*args, **kwargs) | |
62 | # Now set in flask 1.0 | |
63 | # if ret.headers.get('content-type') == 'application/json': | |
65 | 64 | # try: |
66 | 65 | # ret.json = json.loads(ret.data) |
67 | 66 | # except ValueError: |
78 | 77 | # we need to review sqlite configuraitons for persistence using PRAGMA. |
79 | 78 | parser.addoption('--connection-string', default=f'sqlite:////{TEMPORATY_SQLITE.name}', |
80 | 79 | help="Database connection string. Defaults to in-memory " |
81 | "sqlite if not specified:") | |
80 | "sqlite if not specified:") | |
82 | 81 | parser.addoption('--ignore-nplusone', action='store_true', |
83 | 82 | help="Globally ignore nplusone errors") |
84 | 83 | parser.addoption("--with-hypothesis", action="store_true", |
190 | 189 | @event.listens_for(session, "after_transaction_end") |
191 | 190 | def restart_savepoint(session, transaction): |
192 | 191 | if transaction.nested and not transaction._parent.nested: |
193 | ||
194 | 192 | # ensure that state is expired the way |
195 | 193 | # session.commit() at the top level normally does |
196 | 194 | # (optional step) |
219 | 217 | |
220 | 218 | @pytest.fixture |
221 | 219 | def test_client(app): |
222 | ||
223 | 220 | # flask.g is persisted in requests, and the werkzeug |
224 | 221 | # CSRF checker could fail if we don't do this |
225 | 222 | from flask import g |
266 | 263 | # http://pythonhosted.org/Flask-Testing/#testing-with-sqlalchemy |
267 | 264 | assert user.id is not None |
268 | 265 | db.session.add(user) |
269 | sess['_user_id'] = user.id # TODO use public flask_login functions | |
266 | sess['_user_id'] = user.fs_uniquifier # TODO use public flask_login functions | |
270 | 267 | identity_changed.send(test_client.application, |
271 | 268 | identity=Identity(user.id)) |
272 | 269 | |
298 | 295 | session_response = test_client.get('/session') |
299 | 296 | return session_response.json.get('csrf_token') |
300 | 297 | |
301 | ||
302 | 298 | # I'm Py3 |
11 | 11 | import datetime |
12 | 12 | import itertools |
13 | 13 | import unicodedata |
14 | import uuid | |
14 | 15 | import time |
15 | 16 | |
16 | 17 | import pytz |
17 | from factory import SubFactory | |
18 | 18 | from factory.fuzzy import ( |
19 | 19 | BaseFuzzyAttribute, |
20 | 20 | FuzzyChoice, |
51 | 51 | Executor, |
52 | 52 | Rule, |
53 | 53 | Action, |
54 | RuleAction) | |
54 | RuleAction, | |
55 | Condition) | |
56 | ||
55 | 57 | |
56 | 58 | # Make partials for start and end date. End date must be after start date |
57 | 59 | def FuzzyStartTime(): |
62 | 64 | ) |
63 | 65 | ) |
64 | 66 | |
67 | ||
65 | 68 | def FuzzyEndTime(): |
66 | 69 | return ( |
67 | 70 | FuzzyNaiveDateTime( |
70 | 73 | ) |
71 | 74 | ) |
72 | 75 | |
76 | ||
73 | 77 | all_unicode = ''.join(chr(i) for i in range(65536)) |
74 | 78 | UNICODE_LETTERS = ''.join(c for c in all_unicode if unicodedata.category(c) == 'Lu' or unicodedata.category(c) == 'Ll') |
75 | 79 | |
90 | 94 | class UserFactory(FaradayFactory): |
91 | 95 | |
92 | 96 | username = FuzzyText() |
97 | fs_uniquifier = factory.LazyAttribute( | |
98 | lambda e: uuid.uuid4().hex | |
99 | ) | |
93 | 100 | |
94 | 101 | class Meta: |
95 | 102 | model = User |
98 | 105 | |
99 | 106 | class WorkspaceFactory(FaradayFactory): |
100 | 107 | |
101 | name = FuzzyText(chars=string.ascii_lowercase+string.digits) | |
108 | name = FuzzyText(chars=string.ascii_lowercase + string.digits) | |
102 | 109 | creator = factory.SubFactory(UserFactory) |
103 | 110 | |
104 | 111 | class Meta: |
123 | 130 | |
124 | 131 | def __init__(self, low, high, **kwargs): |
125 | 132 | self.iterator = itertools.cycle(range(low, high - 1)) |
126 | super(FuzzyIncrementalInteger, self).__init__(**kwargs) | |
133 | super().__init__(**kwargs) | |
127 | 134 | |
128 | 135 | def fuzz(self): |
129 | 136 | return next(self.iterator) |
189 | 196 | |
190 | 197 | @classmethod |
191 | 198 | def build_dict(cls, **kwargs): |
192 | ret = super(ServiceFactory, cls).build_dict(**kwargs) | |
199 | ret = super().build_dict(**kwargs) | |
193 | 200 | ret['host'].workspace = kwargs['workspace'] |
194 | 201 | ret['parent'] = ret['host'].id |
195 | 202 | ret['ports'] = [ret['port']] |
326 | 333 | service = factory.SubFactory(ServiceFactory, workspace=factory.SelfAttribute('..workspace')) |
327 | 334 | type = "vulnerability_web" |
328 | 335 | |
329 | ||
330 | @classmethod | |
331 | def build_dict(cls, **kwargs): | |
332 | ret = super(VulnerabilityWebFactory, cls).build_dict(**kwargs) | |
336 | @classmethod | |
337 | def build_dict(cls, **kwargs): | |
338 | ret = super().build_dict(**kwargs) | |
333 | 339 | assert ret['type'] == 'vulnerability_web' |
334 | 340 | ret['type'] = 'VulnerabilityWeb' |
335 | 341 | return ret |
360 | 366 | model = VulnerabilityTemplate |
361 | 367 | sqlalchemy_session = db.session |
362 | 368 | |
363 | ||
364 | @classmethod | |
365 | def build_dict(cls, **kwargs): | |
366 | ret = super(VulnerabilityTemplateFactory, cls).build_dict(**kwargs) | |
369 | @classmethod | |
370 | def build_dict(cls, **kwargs): | |
371 | ret = super().build_dict(**kwargs) | |
367 | 372 | ret['exploitation'] = ret['severity'] |
368 | 373 | return ret |
369 | 374 | |
428 | 433 | @classmethod |
429 | 434 | def build_dict(cls, **kwargs): |
430 | 435 | # Ugly hack to JSON-serialize datetimes |
431 | ret = super(CommandFactory, cls).build_dict(**kwargs) | |
436 | ret = super().build_dict(**kwargs) | |
432 | 437 | ret['itime'] = time.mktime(ret['start_date'].utctimetuple()) |
433 | 438 | ret['duration'] = (ret['end_date'] - ret['start_date']).seconds + ((ret['end_date'] - ret['start_date']).microseconds / 1000000.0) |
434 | 439 | ret.pop('start_date') |
465 | 470 | @classmethod |
466 | 471 | def build_dict(cls, **kwargs): |
467 | 472 | # The host, service or comment must be created |
468 | ret = super(CommentFactory, cls).build_dict(**kwargs) | |
473 | ret = super().build_dict(**kwargs) | |
469 | 474 | workspace = kwargs['workspace'] |
470 | 475 | if ret['object_type'] == 'host': |
471 | 476 | HostFactory.create(workspace=workspace, id=ret['object_id']) |
482 | 487 | sqlalchemy_session = db.session |
483 | 488 | |
484 | 489 | |
485 | ||
486 | 490 | class LicenseFactory(FaradayFactory): |
487 | 491 | product = FuzzyText() |
488 | 492 | start_date = FuzzyStartTime() |
496 | 500 | @classmethod |
497 | 501 | def build_dict(cls, **kwargs): |
498 | 502 | # Ugly hack to JSON-serialize datetimes |
499 | ret = super(LicenseFactory, cls).build_dict(**kwargs) | |
503 | ret = super().build_dict(**kwargs) | |
500 | 504 | ret['start'] = ret['start_date'].isoformat() |
501 | 505 | ret['end'] = ret['end_date'].isoformat() |
502 | 506 | ret.pop('start_date') |
546 | 550 | |
547 | 551 | @classmethod |
548 | 552 | def build_dict(cls, **kwargs): |
549 | return super(AgentFactory, cls).build_dict(**kwargs) | |
553 | return super().build_dict(**kwargs) | |
550 | 554 | |
551 | 555 | class Meta: |
552 | 556 | model = Agent |
559 | 563 | parameters_metadata = factory.LazyAttribute( |
560 | 564 | lambda e: {"param_name": False} |
561 | 565 | ) |
566 | ||
562 | 567 | class Meta: |
563 | 568 | model = Executor |
564 | 569 | sqlalchemy_session = db.session |
585 | 590 | sqlalchemy_session = db.session |
586 | 591 | |
587 | 592 | |
588 | ||
589 | 593 | class SearchFilterFactory(FaradayFactory): |
590 | 594 | |
591 | 595 | name = FuzzyText() |
610 | 614 | sqlalchemy_session = db.session |
611 | 615 | |
612 | 616 | |
617 | class ConditionFactory(FaradayFactory): | |
618 | field = 'description' | |
619 | value = FuzzyText() | |
620 | operator = 'equals' | |
621 | ||
622 | class Meta: | |
623 | model = Condition | |
624 | sqlalchemy_session = db.session | |
625 | ||
626 | ||
613 | 627 | class RuleFactory(WorkspaceObjectFactory): |
614 | 628 | model = 'Vulnerability' |
615 | object = "severity=low", | |
616 | 629 | disabled = FuzzyChoice([True, False]) |
617 | 630 | workspace = factory.SubFactory(WorkspaceFactory) |
618 | 631 |
16 | 16 | TaskTemplate, |
17 | 17 | WorkspacePermission, |
18 | 18 | ) |
19 | ||
19 | 20 | |
20 | 21 | def test_delete_user(workspace, session): |
21 | 22 | assert workspace.creator |
243 | 244 | def test_delete_user_deletes_assignations(self): |
244 | 245 | with self.assert_deletes(self.methodology_task_assigned): |
245 | 246 | self.session.delete(self.user) |
246 | # I'm Py3⏎ |
13 | 13 | from tests.test_api_workspaced_base import ( |
14 | 14 | ReadOnlyAPITests) |
15 | 15 | from tests import factories |
16 | from tests.factories import WorkspaceFactory | |
16 | ||
17 | 17 | |
18 | 18 | @pytest.mark.parametrize( |
19 | 19 | "with_host_vulns,with_service_vulns", [[True, False], |
126 | 126 | assert len(host.hostnames) == 1 |
127 | 127 | assert host.hostnames[0].name == 'y' |
128 | 128 | |
129 | ||
129 | 130 | HOST_TO_QUERY_AMOUNT = 3 |
130 | 131 | HOST_NOT_TO_QUERY_AMOUNT = 2 |
131 | 132 | SERVICE_BY_HOST = 3 |
132 | 133 | VULN_BY_HOST = 2 |
133 | 134 | VULN_BY_SERVICE = 1 |
135 | ||
134 | 136 | |
135 | 137 | class TestHostAPI(ReadOnlyAPITests): |
136 | 138 | model = Host |
191 | 193 | |
192 | 194 | # This test the api endpoint for some of the host in the ws, with existing other host in other ws and ask for the |
193 | 195 | # other hosts and test the api endpoint for all of the host in the ws, retrieving all host when none is required |
194 | @pytest.mark.parametrize('querystring', [ 'countVulns/?hosts={}', 'countVulns/', | |
196 | @pytest.mark.parametrize('querystring', ['countVulns/?hosts={}', 'countVulns/', | |
195 | 197 | ]) |
196 | 198 | def test_vuln_count_ignore_other_ws(self, |
197 | 199 | vulnerability_factory, |
241 | 243 | |
242 | 244 | for host in hosts_not_to_query_w2: |
243 | 245 | assert str(host.id) not in res.json['hosts'] |
244 | # I'm Py3⏎ | |
246 | # I'm Py3 |
120 | 120 | assert self.model.query.count() == 2 |
121 | 121 | assert len(self.childs(self.vuln_different_ws)) == 1 |
122 | 122 | new_child = self.childs(self.vuln_different_ws, True).pop() |
123 | assert (new_child.workspace_id == | |
124 | self.vuln_different_ws.workspace_id) | |
123 | assert (new_child.workspace_id | |
124 | == self.vuln_different_ws.workspace_id) | |
125 | 125 | assert new_child.id != child.id |
126 | 126 | |
127 | 127 | def test_remove_reference(self, session, child): |
85 | 85 | assert workspace['vulnerability_standard_count'] == sum( |
86 | 86 | STANDARD_VULN_COUNT) |
87 | 87 | assert workspace['vulnerability_total_count'] == ( |
88 | sum(STANDARD_VULN_COUNT) + WEB_VULN_COUNT + | |
89 | SOURCE_CODE_VULN_COUNT | |
88 | sum(STANDARD_VULN_COUNT) + WEB_VULN_COUNT + SOURCE_CODE_VULN_COUNT | |
90 | 89 | ) |
91 | 90 | |
92 | 91 | |
102 | 101 | assert workspace['vulnerability_standard_count'] == sum( |
103 | 102 | C_STANDARD_VULN_COUNT) |
104 | 103 | assert workspace['vulnerability_total_count'] == ( |
105 | sum(C_STANDARD_VULN_COUNT) + C_WEB_VULN_COUNT + | |
106 | C_SOURCE_CODE_VULN_COUNT | |
104 | sum(C_STANDARD_VULN_COUNT) + C_WEB_VULN_COUNT + C_SOURCE_CODE_VULN_COUNT | |
107 | 105 | ) |
108 | 106 | |
109 | 107 | |
118 | 116 | assert workspace.vulnerability_code_count is None |
119 | 117 | assert workspace.vulnerability_standard_count is None |
120 | 118 | assert workspace.vulnerability_total_count is None |
121 | # I'm Py3⏎ |
37 | 37 | assert activities['hosts_count'] == 1 |
38 | 38 | assert activities['vulnerabilities_count'] == 1 |
39 | 39 | assert activities['tool'] == 'nessus' |
40 | ||
41 | 40 | |
42 | 41 | def test_load_itime(self, test_client, session): |
43 | 42 | ws = WorkspaceFactory.create(name="abc") |
12 | 12 | from faraday.server.api.modules.agent import AgentWithWorkspacesView, AgentView |
13 | 13 | from faraday.server.models import Agent, Command |
14 | 14 | from tests.factories import AgentFactory, WorkspaceFactory, ExecutorFactory |
15 | from tests.test_api_non_workspaced_base import ReadWriteAPITests, OBJECT_COUNT, PatchableTestsMixin | |
16 | from tests.test_api_workspaced_base import ReadWriteMultiWorkspacedAPITests, ReadOnlyMultiWorkspacedAPITests | |
15 | from tests.test_api_non_workspaced_base import ReadWriteAPITests, PatchableTestsMixin | |
16 | from tests.test_api_workspaced_base import ReadOnlyMultiWorkspacedAPITests | |
17 | 17 | from tests import factories |
18 | 18 | from tests.test_api_workspaced_base import API_PREFIX |
19 | 19 | from tests.utils.url import v2_to_v3 |
97 | 97 | session.commit() |
98 | 98 | secret = pyotp.random_base32() |
99 | 99 | faraday_server_config.agent_registration_secret = secret |
100 | faraday_server_config.agent_token_expiration = 60 | |
100 | 101 | logout(test_client, [302]) |
101 | 102 | initial_agent_count = len(session.query(Agent).all()) |
102 | 103 | raw_data = get_raw_agent( |
103 | 104 | name='new_agent', |
104 | token=pyotp.TOTP(secret).now(), | |
105 | token=pyotp.TOTP(secret, interval=60).now(), | |
105 | 106 | workspaces=[workspace, other_workspace] |
106 | 107 | ) |
107 | 108 | # /v2/agent_registration/ |
124 | 125 | session.commit() |
125 | 126 | secret = pyotp.random_base32() |
126 | 127 | faraday_server_config.agent_registration_secret = secret |
128 | faraday_server_config.agent_token_expiration = 60 | |
127 | 129 | logout(test_client, [302]) |
128 | 130 | initial_agent_count = len(session.query(Agent).all()) |
129 | 131 | raw_data = get_raw_agent( |
130 | 132 | name=None, |
131 | token=pyotp.TOTP(secret).now(), | |
133 | token=pyotp.TOTP(secret, interval=60).now(), | |
132 | 134 | workspaces=[workspace] |
133 | 135 | ) |
134 | 136 | # /v2/agent_registration/ |
191 | 193 | session.commit() |
192 | 194 | secret = pyotp.random_base32() |
193 | 195 | faraday_server_config.agent_registration_secret = secret |
196 | faraday_server_config.agent_token_expiration = 60 | |
194 | 197 | logout(test_client, [302]) |
195 | 198 | raw_data = get_raw_agent( |
196 | token=pyotp.TOTP(secret).now(), | |
199 | token=pyotp.TOTP(secret, interval=60).now(), | |
197 | 200 | name="test agent", |
198 | 201 | workspaces=[] |
199 | 202 | ) |
209 | 212 | session.commit() |
210 | 213 | secret = pyotp.random_base32() |
211 | 214 | faraday_server_config.agent_registration_secret = secret |
215 | faraday_server_config.agent_token_expiration = 60 | |
212 | 216 | logout(test_client, [302]) |
213 | 217 | raw_data = get_raw_agent( |
214 | token=pyotp.TOTP(secret).now(), | |
218 | token=pyotp.TOTP(secret, interval=60).now(), | |
215 | 219 | name="test agent", |
216 | 220 | workspaces=[] |
217 | 221 | ) |
228 | 232 | session.commit() |
229 | 233 | secret = pyotp.random_base32() |
230 | 234 | faraday_server_config.agent_registration_secret = secret |
235 | faraday_server_config.agent_token_expiration = 60 | |
231 | 236 | logout(test_client, [302]) |
232 | 237 | raw_data = get_raw_agent( |
233 | 238 | name="test agent", |
234 | token=pyotp.TOTP(secret).now() | |
239 | token=pyotp.TOTP(secret, interval=60).now() | |
235 | 240 | ) |
236 | 241 | # /v2/agent_registration/ |
237 | 242 | res = test_client.post(self.check_url('/v2/agent_registration/'), data=raw_data) |
252 | 257 | |
253 | 258 | def test_create_succeeds(self, test_client): |
254 | 259 | with pytest.raises(AssertionError) as exc_info: |
255 | super(TestAgentWithWorkspacesAPIGeneric, self).test_create_succeeds(test_client) | |
260 | super().test_create_succeeds(test_client) | |
256 | 261 | assert '405' in exc_info.value.args[0] |
257 | 262 | |
258 | 263 | def test_create_fails_with_empty_dict(self, test_client): |
259 | 264 | with pytest.raises(AssertionError) as exc_info: |
260 | super(TestAgentWithWorkspacesAPIGeneric, self).test_create_fails_with_empty_dict(test_client) | |
265 | super().test_create_fails_with_empty_dict(test_client) | |
261 | 266 | assert '405' in exc_info.value.args[0] |
262 | 267 | |
263 | def workspaced_url(self, workspace, obj= None): | |
268 | def workspaced_url(self, workspace, obj=None): | |
264 | 269 | url = API_PREFIX + workspace.name + '/' + self.api_endpoint + '/' |
265 | 270 | if obj is not None: |
266 | 271 | id_ = str(obj.id) if isinstance(obj, self.model) else str(obj) |
391 | 396 | assert res.status_code == 204 |
392 | 397 | assert len(session.query(Agent).all()) == initial_agent_count |
393 | 398 | |
394 | def test_run_fails(self, test_client, session,csrf_token): | |
399 | def test_run_fails(self, test_client, session, csrf_token): | |
395 | 400 | workspace = WorkspaceFactory.create() |
396 | 401 | session.add(workspace) |
397 | 402 | other_workspace = WorkspaceFactory.create() |
422 | 427 | |
423 | 428 | class TestAgentWithWorkspacesAPIGenericV3(TestAgentWithWorkspacesAPIGeneric, PatchableTestsMixin): |
424 | 429 | def url(self, obj=None): |
425 | return v2_to_v3(super(TestAgentWithWorkspacesAPIGenericV3, self).url(obj)) | |
430 | return v2_to_v3(super().url(obj)) | |
426 | 431 | |
427 | 432 | |
428 | 433 | class TestAgentAPI(ReadOnlyMultiWorkspacedAPITests): |
602 | 607 | |
603 | 608 | class TestAgentAPIV3(TestAgentAPI): |
604 | 609 | def url(self, obj=None, workspace=None): |
605 | return v2_to_v3(super(TestAgentAPIV3, self).url(obj, workspace)) | |
610 | return v2_to_v3(super().url(obj, workspace)) | |
606 | 611 | |
607 | 612 | def check_url(self, url): |
608 | 613 | return v2_to_v3(url) |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | from builtins import str | |
7 | import base64 | |
8 | ||
9 | import pytest | |
10 | from tests import factories | |
11 | from flask_security.utils import hash_password | |
12 | from faraday.server.api.modules.websocket_auth import decode_agent_websocket_token | |
13 | from tests.utils.url import v2_to_v3 | |
14 | ||
15 | ||
16 | class TestWebsocketAuthEndpoint: | |
17 | def check_url(self, url): | |
18 | return url | |
19 | ||
20 | def test_not_logged_in_request_fail(self, test_client, workspace): | |
21 | res = test_client.post(self.check_url(f'/v2/ws/{workspace.name}/websocket_token/')) | |
22 | assert res.status_code == 401 | |
23 | ||
24 | @pytest.mark.usefixtures('logged_user') | |
25 | def test_get_method_succeeds(self, test_client, workspace): | |
26 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/websocket_token/')) | |
27 | assert res.status_code == 200 | |
28 | ||
29 | # A token for that workspace should be generated, | |
30 | # This will break if we change the token generation | |
31 | # mechanism. | |
32 | assert res.json['token'].startswith(str(workspace.id)) | |
33 | ||
34 | @pytest.mark.usefixtures('logged_user') | |
35 | def test_post_method_succeeds(self, test_client, workspace): | |
36 | res = test_client.post(self.check_url(f'/v2/ws/{workspace.name}/websocket_token/')) | |
37 | assert res.status_code == 200 | |
38 | ||
39 | # A token for that workspace should be generated, | |
40 | # This will break if we change the token generation | |
41 | # mechanism. | |
42 | assert res.json['token'].startswith(str(workspace.id)) | |
43 | ||
44 | ||
45 | class TestWebsocketAuthEndpointV3(TestWebsocketAuthEndpoint): | |
46 | def check_url(self, url): | |
47 | return v2_to_v3(url) | |
48 | ||
49 | ||
50 | class TestAgentWebsocketToken: | |
51 | ||
52 | def check_url(self, url): | |
53 | return url | |
54 | ||
55 | @pytest.mark.usefixtures('session') # I don't know why this is required | |
56 | def test_fails_without_authorization_header(self, test_client): | |
57 | res = test_client.post( | |
58 | self.check_url('/v2/agent_websocket_token/') | |
59 | ) | |
60 | assert res.status_code == 401 | |
61 | ||
62 | @pytest.mark.usefixtures('logged_user') | |
63 | def test_fails_with_logged_user(self, test_client): | |
64 | res = test_client.post( | |
65 | self.check_url('/v2/agent_websocket_token/') | |
66 | ) | |
67 | assert res.status_code == 401 | |
68 | ||
69 | @pytest.mark.usefixtures('logged_user') | |
70 | def test_fails_with_user_token(self, test_client, session): | |
71 | res = test_client.get(self.check_url('/v2/token/')) | |
72 | ||
73 | assert res.status_code == 200 | |
74 | ||
75 | headers = [('Authorization', 'Token ' + res.json)] | |
76 | ||
77 | # clean cookies make sure test_client has no session | |
78 | test_client.cookie_jar.clear() | |
79 | res = test_client.post( | |
80 | self.check_url('/v2/agent_websocket_token/'), | |
81 | headers=headers, | |
82 | ) | |
83 | assert res.status_code == 401 | |
84 | ||
85 | @pytest.mark.usefixtures('session') | |
86 | def test_fails_with_invalid_agent_token(self, test_client): | |
87 | headers = [('Authorization', 'Agent 13123')] | |
88 | res = test_client.post( | |
89 | self.check_url('/v2/agent_websocket_token/'), | |
90 | headers=headers, | |
91 | ) | |
92 | assert res.status_code == 403 | |
93 | ||
94 | @pytest.mark.usefixtures('session') | |
95 | def test_succeeds_with_agent_token(self, test_client, agent, session): | |
96 | session.add(agent) | |
97 | session.commit() | |
98 | assert agent.token | |
99 | headers = [('Authorization', 'Agent ' + agent.token)] | |
100 | res = test_client.post( | |
101 | self.check_url('/v2/agent_websocket_token/'), | |
102 | headers=headers, | |
103 | ) | |
104 | assert res.status_code == 200 | |
105 | decoded_agent = decode_agent_websocket_token(res.json['token']) | |
106 | assert decoded_agent == agent | |
107 | ||
108 | ||
109 | class TestBasicAuth: | |
110 | ||
111 | def check_url(self, url): | |
112 | return url | |
113 | ||
114 | def test_basic_auth_invalid_credentials(self, test_client, session): | |
115 | """ | |
116 | Use of invalid Basic Auth credentials | |
117 | """ | |
118 | ||
119 | alice = factories.UserFactory.create( | |
120 | active=True, | |
121 | username='asdasd', | |
122 | password=hash_password('asdasd'), | |
123 | role='admin') | |
124 | session.add(alice) | |
125 | session.commit() | |
126 | ||
127 | agent = factories.AgentFactory.create() | |
128 | session.add(agent) | |
129 | session.commit() | |
130 | ||
131 | valid_credentials = base64.b64encode(b"asdasd:wrong_password").decode("utf-8") | |
132 | headers = [('Authorization', f'Basic {valid_credentials}')] | |
133 | res = test_client.get(self.check_url('/v2/agents/'), headers=headers) | |
134 | assert res.status_code == 401 | |
135 | ||
136 | def test_basic_auth_valid_credentials(self, test_client, session): | |
137 | """ | |
138 | Use of valid Basic Auth credentials | |
139 | """ | |
140 | ||
141 | alice = factories.UserFactory.create( | |
142 | active=True, | |
143 | username='asdasd', | |
144 | password=hash_password('asdasd'), | |
145 | role='admin') | |
146 | session.add(alice) | |
147 | session.commit() | |
148 | ||
149 | agent = factories.AgentFactory.create() | |
150 | session.add(agent) | |
151 | session.commit() | |
152 | ||
153 | valid_credentials = base64.b64encode(b"asdasd:asdasd").decode("utf-8") | |
154 | headers = [('Authorization', f'Basic {valid_credentials}')] | |
155 | res = test_client.get(self.check_url('/v2/agents/'), headers=headers) | |
156 | assert res.status_code == 200 | |
157 | ||
158 | ||
159 | class TestAgentWebsocketTokenV3(TestAgentWebsocketToken): | |
160 | def check_url(self, url): | |
161 | return v2_to_v3(url) | |
162 | ||
163 | ||
164 | class TestBasicAuthV3(TestBasicAuth): | |
165 | def check_url(self, url): | |
166 | return v2_to_v3(url) |
0 | 0 | from datetime import datetime, timedelta, timezone |
1 | import string | |
2 | 1 | |
3 | 2 | import pytest |
4 | 3 | from marshmallow import ValidationError |
4 | from sqlalchemy import true, null, false | |
5 | ||
5 | 6 | from faraday.server.models import ( |
6 | 7 | db, |
7 | 8 | Command, |
54 | 55 | 'status_code': 200, |
55 | 56 | } |
56 | 57 | |
57 | ||
58 | 58 | credential_data = { |
59 | 59 | 'name': 'test credential', |
60 | 60 | 'description': 'test', |
61 | 61 | 'username': 'admin', |
62 | 62 | 'password': '12345', |
63 | 63 | } |
64 | ||
65 | 64 | |
66 | 65 | command_data = { |
67 | 66 | 'tool': 'pytest', |
115 | 114 | assert host.ip == "127.0.0.1" |
116 | 115 | assert set({hn.name for hn in host.hostnames}) == {"test.com", "test2.org", "test3.org"} |
117 | 116 | |
117 | ||
118 | 118 | def test_create_existing_host(session, host): |
119 | 119 | session.add(host) |
120 | 120 | session.commit() |
159 | 159 | data = bc.BulkServiceSchema().load(data) |
160 | 160 | bc._create_service(service.workspace, service.host, data) |
161 | 161 | assert count(Service, service.host.workspace) == 1 |
162 | ||
162 | 163 | |
163 | 164 | def test_create_host_vuln(session, host): |
164 | 165 | data = bc.VulnerabilitySchema().load(vuln_data) |
215 | 216 | dict( |
216 | 217 | command=command_data, |
217 | 218 | hosts=[host_data_] |
218 | ) | |
219 | ) | |
219 | 220 | ) |
220 | 221 | assert count(Vulnerability, service.workspace) == 1 |
221 | 222 | vuln = service.workspace.vulnerabilities[0] |
238 | 239 | assert count(Vulnerability, service.workspace) == 1 |
239 | 240 | vuln = service.workspace.vulnerabilities[0] |
240 | 241 | assert vuln.tool == command_data['tool'] |
242 | ||
241 | 243 | |
242 | 244 | def test_cannot_create_host_vulnweb(session, host): |
243 | 245 | data = vuln_data.copy() |
427 | 429 | service = host.services[0] |
428 | 430 | vuln_host = Vulnerability.query.filter( |
429 | 431 | Vulnerability.workspace == workspace, |
430 | Vulnerability.service == None).one() | |
432 | Vulnerability.service == null()).one() | |
431 | 433 | vuln_service = Vulnerability.query.filter( |
432 | 434 | Vulnerability.workspace == workspace, |
433 | Vulnerability.host == None).one() | |
435 | Vulnerability.host == null()).one() | |
434 | 436 | vuln_web = VulnerabilityWeb.query.filter( |
435 | 437 | VulnerabilityWeb.workspace == workspace).one() |
436 | 438 | host_cred = Credential.query.filter( |
457 | 459 | CommandObject.command == command, |
458 | 460 | CommandObject.object_type == table_name, |
459 | 461 | CommandObject.object_id == obj.id, |
460 | CommandObject.created_persistent == True, | |
462 | CommandObject.created_persistent == true(), | |
461 | 463 | ).one() |
462 | 464 | |
463 | 465 | |
566 | 568 | CommandObject.command == new_command, |
567 | 569 | CommandObject.object_type == table_name, |
568 | 570 | CommandObject.object_id == obj.id, |
569 | CommandObject.created_persistent == False, | |
571 | CommandObject.created_persistent == false(), | |
570 | 572 | ).one() |
571 | 573 | |
572 | 574 | |
829 | 831 | ) |
830 | 832 | assert res.status_code == 400 |
831 | 833 | |
832 | assert Host.query.filter(Host.workspace == workspace and Host.creator_id is None).count() == initial_host_count | |
834 | assert Host.query.filter( | |
835 | Host.workspace == workspace and Host.creator_id is None).count() == initial_host_count | |
833 | 836 | assert count(Command, workspace) == 1 |
834 | 837 | data_kwargs["execution_id"] = extra_agent_execution.id |
835 | 838 | res = test_client.post( |
838 | 841 | headers=[("authorization", f"agent {agent.token}")] |
839 | 842 | ) |
840 | 843 | assert res.status_code == 400 |
841 | assert Host.query.filter(Host.workspace == workspace and Host.creator_id is None).count() == initial_host_count | |
844 | assert Host.query.filter( | |
845 | Host.workspace == workspace and Host.creator_id is None).count() == initial_host_count | |
842 | 846 | assert count(Command, workspace) == 1 |
843 | 847 | data_kwargs["execution_id"] = agent_execution.id |
844 | 848 | res = test_client.post( |
907 | 911 | session.add(workspace) |
908 | 912 | session.commit() |
909 | 913 | for workspace in agent.workspaces: |
910 | ||
911 | 914 | url = self.check_url(f'/v2/ws/{workspace.name}/bulk_create/') |
912 | 915 | res = test_client.post( |
913 | 916 | url, |
1000 | 1003 | host_data_['services'] = [service_data] |
1001 | 1004 | host_data_['credentials'] = [credential_data] |
1002 | 1005 | host_data_['vulnerabilities'] = [vuln_data] |
1003 | host_data_['default_gateway'] = ["localhost"] # Can not be a list | |
1006 | host_data_['default_gateway'] = ["localhost"] # Can not be a list | |
1004 | 1007 | res = test_client.post(url, data=dict(hosts=[host_data_])) |
1005 | 1008 | assert res.status_code == 400, res.json |
1006 | 1009 | assert count(Host, workspace) == 0 |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
13 | 13 | import time |
14 | 14 | |
15 | 15 | from tests import factories |
16 | from tests.test_api_workspaced_base import API_PREFIX, ReadWriteAPITests, PatchableTestsMixin | |
16 | from tests.test_api_workspaced_base import ReadWriteAPITests, PatchableTestsMixin | |
17 | 17 | from faraday.server.models import ( |
18 | 18 | Command, |
19 | Workspace, | |
20 | 19 | Vulnerability) |
21 | 20 | from faraday.server.api.modules.commandsrun import CommandView, CommandV3View |
22 | from faraday.server.api.modules.workspaces import WorkspaceView | |
23 | 21 | from tests.factories import VulnerabilityFactory, EmptyCommandFactory, CommandObjectFactory, HostFactory, \ |
24 | 22 | WorkspaceFactory, ServiceFactory |
25 | 23 | |
46 | 44 | def test_list_retrieves_all_items_from_workspace(self, test_client, |
47 | 45 | second_workspace, |
48 | 46 | session): |
49 | super(TestListCommandView, self).test_list_retrieves_all_items_from_workspace(test_client, second_workspace, session) | |
47 | super().test_list_retrieves_all_items_from_workspace(test_client, second_workspace, session) | |
50 | 48 | |
51 | 49 | @pytest.mark.usefixtures('ignore_nplusone') |
52 | 50 | def test_backwards_compatibility_list(self, test_client, second_workspace, session): |
77 | 75 | |
78 | 76 | @pytest.mark.usefixtures('ignore_nplusone') |
79 | 77 | def test_can_list_readonly(self, test_client, session): |
80 | super(TestListCommandView, self).test_can_list_readonly(test_client, session) | |
78 | super().test_can_list_readonly(test_client, session) | |
81 | 79 | |
82 | 80 | def test_activity_feed(self, session, test_client): |
83 | 81 | command = self.factory.create() |
115 | 113 | u'criticalIssue': 0}] |
116 | 114 | |
117 | 115 | assert list(filter(lambda stats: stats['_id'] == another_command.id, |
118 | res.json)) == [{ | |
119 | u'_id': another_command.id, | |
120 | u'command': another_command.command, | |
121 | u'import_source': u'shell', | |
122 | u'tool': another_command.tool, | |
123 | u'user': another_command.user, | |
124 | u'date': time.mktime( | |
125 | another_command.start_date.timetuple()) * 1000, | |
126 | u'params': another_command.params, | |
127 | u'hosts_count': 0, | |
128 | u'services_count': 0, | |
129 | u'vulnerabilities_count': 0, | |
130 | u'criticalIssue': 0}] | |
116 | res.json)) == [{ | |
117 | u'_id': another_command.id, | |
118 | u'command': another_command.command, | |
119 | u'import_source': u'shell', | |
120 | u'tool': another_command.tool, | |
121 | u'user': another_command.user, | |
122 | u'date': time.mktime( | |
123 | another_command.start_date.timetuple()) * 1000, | |
124 | u'params': another_command.params, | |
125 | u'hosts_count': 0, | |
126 | u'services_count': 0, | |
127 | u'vulnerabilities_count': 0, | |
128 | u'criticalIssue': 0}] | |
131 | 129 | |
132 | 130 | def test_verify_created_critical_vulns_is_correctly_showing_sum_values(self, session, test_client): |
133 | 131 | workspace = WorkspaceFactory.create() |
158 | 156 | res = test_client.get(self.check_url(urljoin(self.url(workspace=command.workspace), 'activity_feed/'))) |
159 | 157 | assert res.status_code == 200 |
160 | 158 | assert res.json == [ |
161 | {u'_id': command.id, | |
162 | u'command': command.command, | |
163 | u'import_source': u'shell', | |
164 | u'tool': command.tool, | |
165 | u'user': command.user, | |
166 | u'date': time.mktime(command.start_date.timetuple()) * 1000, | |
167 | u'params': command.params, | |
168 | u'hosts_count': 1, | |
169 | u'services_count': 0, | |
170 | u'vulnerabilities_count': 2, | |
171 | u'criticalIssue': 1} | |
172 | ] | |
159 | {u'_id': command.id, | |
160 | u'command': command.command, | |
161 | u'import_source': u'shell', | |
162 | u'tool': command.tool, | |
163 | u'user': command.user, | |
164 | u'date': time.mktime(command.start_date.timetuple()) * 1000, | |
165 | u'params': command.params, | |
166 | u'hosts_count': 1, | |
167 | u'services_count': 0, | |
168 | u'vulnerabilities_count': 2, | |
169 | u'criticalIssue': 1} | |
170 | ] | |
173 | 171 | |
174 | 172 | def test_verify_created_vulns_with_host_and_service_verification(self, session, test_client): |
175 | 173 | workspace = WorkspaceFactory.create() |
291 | 289 | for in_the_middle_command in in_the_middle_commands: |
292 | 290 | raw_in_the_middle_command = list(filter(lambda comm: comm['_id'] == in_the_middle_command.id, res.json)) |
293 | 291 | assert raw_in_the_middle_command.pop() == {u'_id': in_the_middle_command.id, |
294 | u'command': in_the_middle_command.command, | |
295 | u'import_source': u'shell', | |
296 | u'user': in_the_middle_command.user, | |
297 | u'date': time.mktime(in_the_middle_command.start_date.timetuple()) * 1000, | |
298 | u'params': in_the_middle_command.params, | |
299 | u'hosts_count': 0, | |
300 | u'tool': in_the_middle_command.tool, | |
301 | u'services_count': 0, | |
302 | u'vulnerabilities_count': 0, | |
303 | u'criticalIssue': 0} | |
292 | u'command': in_the_middle_command.command, | |
293 | u'import_source': u'shell', | |
294 | u'user': in_the_middle_command.user, | |
295 | u'date': time.mktime( | |
296 | in_the_middle_command.start_date.timetuple()) * 1000, | |
297 | u'params': in_the_middle_command.params, | |
298 | u'hosts_count': 0, | |
299 | u'tool': in_the_middle_command.tool, | |
300 | u'services_count': 0, | |
301 | u'vulnerabilities_count': 0, | |
302 | u'criticalIssue': 0} | |
304 | 303 | |
305 | 304 | # new command must create new service and vuln |
306 | 305 | raw_last_command = list(filter(lambda comm: comm['_id'] == last_command.id, res.json)) |
307 | 306 | assert raw_last_command.pop() == {u'_id': last_command.id, |
308 | u'command': last_command.command, | |
309 | u'import_source': u'shell', | |
310 | u'user': last_command.user, | |
311 | u'date': time.mktime(last_command.start_date.timetuple()) * 1000, | |
312 | u'params': last_command.params, | |
313 | u'hosts_count': 0, | |
314 | u'tool': last_command.tool, | |
315 | u'services_count': 1, | |
316 | u'vulnerabilities_count': 1, | |
317 | u'criticalIssue': 0} | |
307 | u'command': last_command.command, | |
308 | u'import_source': u'shell', | |
309 | u'user': last_command.user, | |
310 | u'date': time.mktime(last_command.start_date.timetuple()) * 1000, | |
311 | u'params': last_command.params, | |
312 | u'hosts_count': 0, | |
313 | u'tool': last_command.tool, | |
314 | u'services_count': 1, | |
315 | u'vulnerabilities_count': 1, | |
316 | u'criticalIssue': 0} | |
318 | 317 | |
319 | 318 | @pytest.mark.usefixtures('ignore_nplusone') |
320 | 319 | def test_sub_second_command_returns_correct_duration_value(self, test_client): |
367 | 366 | assert res.json['commands'][0]['value']['duration'].lower() == "in progress" |
368 | 367 | |
369 | 368 | def test_create_command(self, test_client): |
370 | raw_data ={ | |
369 | raw_data = { | |
371 | 370 | 'command': 'Import Nessus:', |
372 | 371 | 'tool': 'nessus', |
373 | 372 | 'duration': None, |
432 | 431 | assert command_history['tool'] == 'test' |
433 | 432 | |
434 | 433 | def test_year_is_out_range(self, test_client): |
435 | raw_data ={ | |
434 | raw_data = { | |
436 | 435 | 'command': 'Import Nessus:', |
437 | 436 | 'tool': 'nessus', |
438 | 437 | 'duration': None, |
452 | 451 | view_class = CommandV3View |
453 | 452 | |
454 | 453 | def url(self, obj=None, workspace=None): |
455 | return v2_to_v3(super(TestListCommandViewV3, self).url(obj, workspace)) | |
454 | return v2_to_v3(super().url(obj, workspace)) | |
456 | 455 | |
457 | 456 | def check_url(self, url): |
458 | 457 | return v2_to_v3(url) |
76 | 76 | assert res.status_code == 400 |
77 | 77 | assert res.json == {u'message': u"Can't comment inexistent object"} |
78 | 78 | |
79 | ||
80 | 79 | def test_create_unique_comment_for_plugins(self, session, test_client): |
81 | 80 | """ |
82 | 81 | |
124 | 123 | factories.CommentFactory.create(workspace=workspace, text='third') |
125 | 124 | factories.CommentFactory.create(workspace=workspace, text='fourth') |
126 | 125 | get_comments = test_client.get(self.url(workspace=workspace)) |
127 | expected = ['first', 'second', 'third','fourth'] | |
126 | expected = ['first', 'second', 'third', 'fourth'] | |
128 | 127 | assert expected == [comment['text'] for comment in get_comments.json] |
129 | 128 | |
130 | 129 | |
132 | 131 | view_class = CommentV3View |
133 | 132 | |
134 | 133 | def url(self, obj=None, workspace=None): |
135 | return v2_to_v3(super(TestCommentAPIGenericV3, self).url(obj, workspace)) | |
134 | return v2_to_v3(super().url(obj, workspace)) | |
136 | 135 | |
137 | 136 | def check_url(self, url): |
138 | 137 | return v2_to_v3(url) |
56 | 56 | host = host_factory.create(workspace=workspace) |
57 | 57 | session.commit() |
58 | 58 | raw_data = { |
59 | "_id":"1.e5069bb0718aa519852e6449448eedd717f1b90d", | |
60 | "name":"name", | |
61 | "username":"username", | |
62 | "metadata":{"update_time":1508794240799,"update_user":"", | |
63 | "update_action":0,"creator":"UI Web", | |
64 | "create_time":1508794240799,"update_controller_action":"", | |
65 | "owner":""}, | |
66 | "password":"pass", | |
67 | "type":"Cred", | |
68 | "owner":"", | |
69 | "description":"", | |
59 | "_id": "1.e5069bb0718aa519852e6449448eedd717f1b90d", | |
60 | "name": "name", | |
61 | "username": "username", | |
62 | "metadata": {"update_time": 1508794240799, "update_user": "", | |
63 | "update_action": 0, "creator": "UI Web", | |
64 | "create_time": 1508794240799, "update_controller_action": "", | |
65 | "owner": ""}, | |
66 | "password": "pass", | |
67 | "type": "Cred", | |
68 | "owner": "", | |
69 | "description": "", | |
70 | 70 | "parent": host.id, |
71 | 71 | "parent_type": "Host" |
72 | 72 | } |
81 | 81 | service = service_factory.create(workspace=workspace) |
82 | 82 | session.commit() |
83 | 83 | raw_data = { |
84 | "_id":"1.e5069bb0718aa519852e6449448eedd717f1b90d", | |
85 | "name":"name", | |
86 | "username":"username", | |
87 | "metadata":{"update_time":1508794240799,"update_user":"", | |
88 | "update_action":0,"creator":"UI Web", | |
89 | "create_time":1508794240799,"update_controller_action":"", | |
90 | "owner":""}, | |
91 | "password":"pass", | |
92 | "type":"Cred", | |
93 | "owner":"", | |
94 | "description":"", | |
84 | "_id": "1.e5069bb0718aa519852e6449448eedd717f1b90d", | |
85 | "name": "name", | |
86 | "username": "username", | |
87 | "metadata": {"update_time": 1508794240799, "update_user": "", | |
88 | "update_action": 0, "creator": "UI Web", | |
89 | "create_time": 1508794240799, "update_controller_action": "", | |
90 | "owner": ""}, | |
91 | "password": "pass", | |
92 | "type": "Cred", | |
93 | "owner": "", | |
94 | "description": "", | |
95 | 95 | "parent": service.id, |
96 | 96 | "parent_type": "Service" |
97 | 97 | } |
152 | 152 | service = service_factory.create(workspace=workspace) |
153 | 153 | session.commit() |
154 | 154 | raw_data = { |
155 | "_id":"1.e5069bb0718aa519852e6449448eedd717f1b90d", | |
156 | "name":"name", | |
157 | "username":"username", | |
158 | "metadata":{"update_time":1508794240799,"update_user":"", | |
159 | "update_action":0,"creator":"UI Web", | |
160 | "create_time":1508794240799,"update_controller_action":"", | |
161 | "owner":""}, | |
162 | "password":"pass", | |
163 | "type":"Cred", | |
164 | "owner":"", | |
165 | "description":"", | |
155 | "_id": "1.e5069bb0718aa519852e6449448eedd717f1b90d", | |
156 | "name": "name", | |
157 | "username": "username", | |
158 | "metadata": {"update_time": 1508794240799, "update_user": "", | |
159 | "update_action": 0, "creator": "UI Web", | |
160 | "create_time": 1508794240799, "update_controller_action": "", | |
161 | "owner": ""}, | |
162 | "password": "pass", | |
163 | "type": "Cred", | |
164 | "owner": "", | |
165 | "description": "", | |
166 | 166 | "parent": service.id, |
167 | 167 | "parent_type": "Vulnerability" |
168 | 168 | } |
169 | 169 | res = test_client.post(self.url(), data=raw_data) |
170 | 170 | assert res.status_code == 400 |
171 | 171 | assert res.json['messages']['json']['_schema'] == ['Unknown parent type: Vulnerability'] |
172 | ||
173 | 172 | |
174 | 173 | def test_update_credentials(self, test_client, session, host): |
175 | 174 | credential = self.factory.create(host=host, service=None, |
237 | 236 | assert res.status_code == 400 |
238 | 237 | assert b'Parent id not found' in res.data |
239 | 238 | |
240 | ||
241 | 239 | def test_sort_credentials_target(self, test_client, second_workspace): |
242 | 240 | host = HostFactory(workspace=second_workspace, ip="192.168.1.1") |
243 | 241 | service = ServiceFactory(name="http", workspace=second_workspace, host=host) |
260 | 258 | # Desc order |
261 | 259 | response = test_client.get(self.url(workspace=second_workspace) + "?sort=target&sort_dir=desc") |
262 | 260 | assert response.status_code == 200 |
263 | assert sorted(credentials_target, reverse=True) == [ v['value']['target'] for v in response.json['rows']] | |
261 | assert sorted(credentials_target, reverse=True) == [v['value']['target'] for v in response.json['rows']] | |
264 | 262 | |
265 | 263 | # Asc order |
266 | 264 | response = test_client.get(self.url(workspace=second_workspace) + "?sort=target&sort_dir=asc") |
272 | 270 | view_class = CredentialV3View |
273 | 271 | |
274 | 272 | def url(self, obj=None, workspace=None): |
275 | return v2_to_v3(super(TestCredentialsAPIGenericV3, self).url(obj, workspace)) | |
273 | return v2_to_v3(super().url(obj, workspace)) |
0 | ||
1 | 0 | import pytest |
2 | 1 | |
3 | 2 | from tests.factories import CustomFieldsSchemaFactory |
15 | 14 | model = CustomFieldsSchema |
16 | 15 | factory = CustomFieldsSchemaFactory |
17 | 16 | api_endpoint = 'custom_fields_schema' |
18 | #unique_fields = ['ip'] | |
19 | #update_fields = ['ip', 'description', 'os'] | |
17 | # unique_fields = ['ip'] | |
18 | # update_fields = ['ip', 'description', 'os'] | |
20 | 19 | view_class = CustomFieldsSchemaView |
21 | 20 | patchable_fields = ['field_name'] |
22 | 21 | |
33 | 32 | |
34 | 33 | res = test_client.get(self.url()) |
35 | 34 | assert res.status_code == 200 |
36 | assert {u'table_name': u'vulnerability', u'id': add_text_field.id, u'field_type': u'text', u'field_name': u'cvss', u'field_display_name': u'CVSS', u'field_metadata': None, u'field_order': 1} in res.json | |
35 | assert {u'table_name': u'vulnerability', u'id': add_text_field.id, u'field_type': u'text', | |
36 | u'field_name': u'cvss', u'field_display_name': u'CVSS', u'field_metadata': None, | |
37 | u'field_order': 1} in res.json | |
37 | 38 | |
38 | 39 | def test_custom_fields_field_name_cant_be_changed(self, session, test_client): |
39 | 40 | add_text_field = CustomFieldsSchemaFactory.create( |
84 | 85 | |
85 | 86 | class TestVulnerabilityCustomFieldsV3(TestVulnerabilityCustomFields, PatchableTestsMixin): |
86 | 87 | def url(self, obj=None): |
87 | return v2_to_v3(super(TestVulnerabilityCustomFieldsV3, self).url(obj)) | |
88 | return v2_to_v3(super().url(obj)) |
1 | 1 | |
2 | 2 | import yaml |
3 | 3 | from apispec import APISpec |
4 | from faraday.server.web import app | |
4 | from faraday.server.web import get_app | |
5 | 5 | from apispec.ext.marshmallow import MarshmallowPlugin |
6 | 6 | from apispec_webframeworks.flask import FlaskPlugin |
7 | 7 | from faraday.utils.faraday_openapi_plugin import FaradayAPIPlugin |
29 | 29 | exc = {'/login', '/logout', '/change', '/reset', '/reset/{token}', '/verify'} |
30 | 30 | failing = [] |
31 | 31 | |
32 | with app.test_request_context(): | |
33 | for endpoint in app.view_functions: | |
34 | spec.path(view=app.view_functions[endpoint], app=app) | |
32 | with get_app().test_request_context(): | |
33 | for endpoint in get_app().view_functions: | |
34 | spec.path(view=get_app().view_functions[endpoint], app=get_app()) | |
35 | 35 | |
36 | 36 | spec_yaml = yaml.load(spec.to_yaml(), Loader=yaml.BaseLoader) |
37 | 37 | |
54 | 54 | |
55 | 55 | failing = [] |
56 | 56 | |
57 | with app.test_request_context(): | |
58 | for endpoint in app.view_functions: | |
59 | spec.path(view=app.view_functions[endpoint], app=app) | |
57 | with get_app().test_request_context(): | |
58 | for endpoint in get_app().view_functions: | |
59 | spec.path(view=get_app().view_functions[endpoint], app=get_app()) | |
60 | 60 | |
61 | 61 | spec_yaml = yaml.load(spec.to_yaml(), Loader=yaml.BaseLoader) |
62 | 62 | |
80 | 80 | |
81 | 81 | tags = set() |
82 | 82 | |
83 | with app.test_request_context(): | |
84 | for endpoint in app.view_functions: | |
85 | spec.path(view=app.view_functions[endpoint], app=app) | |
83 | with get_app().test_request_context(): | |
84 | for endpoint in get_app().view_functions: | |
85 | spec.path(view=get_app().view_functions[endpoint], app=get_app()) | |
86 | 86 | |
87 | 87 | spec_yaml = yaml.load(spec.to_yaml(), Loader=yaml.BaseLoader) |
88 | 88 |
5 | 5 | ''' |
6 | 6 | |
7 | 7 | import pytest |
8 | from lxml.etree import fromstring, tostring | |
8 | from lxml.etree import fromstring | |
9 | 9 | |
10 | 10 | from tests.conftest import TEST_DATA_PATH |
11 | 11 | from tests.factories import ( |
0 | 0 | import re |
1 | from faraday.server.web import app | |
1 | from faraday.server.web import get_app | |
2 | 2 | |
3 | 3 | placeholders = { |
4 | 4 | r".*(<int:.*>).*": "1" |
15 | 15 | |
16 | 16 | |
17 | 17 | def test_options(test_client): |
18 | for rule in app.url_map.iter_rules(): | |
18 | for rule in get_app().url_map.iter_rules(): | |
19 | 19 | if 'OPTIONS' in rule.methods: |
20 | 20 | res = test_client.options(replace_placeholders(rule.rule)) |
21 | 21 | assert res.status_code == 200, rule.rule |
23 | 23 | |
24 | 24 | def test_v3_endpoints(): |
25 | 25 | rules = list( |
26 | filter(lambda rule: rule.rule.startswith("/v3") and rule.rule.endswith("/"), app.url_map.iter_rules()) | |
26 | filter(lambda rule: rule.rule.startswith("/v3") and rule.rule.endswith("/"), get_app().url_map.iter_rules()) | |
27 | 27 | ) |
28 | 28 | assert len(rules) == 0, [rule.rule for rule in rules] |
29 | 29 | |
40 | 40 | rules_v2 = set( |
41 | 41 | map( |
42 | 42 | lambda rule: rule.rule.replace("v2", "v3").rstrip("/"), |
43 | filter(lambda rule: rule.rule.startswith("/v2"), app.url_map.iter_rules()) | |
43 | filter(lambda rule: rule.rule.startswith("/v2"), get_app().url_map.iter_rules()) | |
44 | 44 | ) |
45 | 45 | ) |
46 | 46 | rules = set( |
47 | map(lambda rule: rule.rule, filter(lambda rule: rule.rule.startswith("/v3"), app.url_map.iter_rules())) | |
47 | map(lambda rule: rule.rule, filter(lambda rule: rule.rule.startswith("/v3"), get_app().url_map.iter_rules())) | |
48 | 48 | ) |
49 | 49 | exceptions_present_v2 = rules_v2.intersection(exceptions) |
50 | 50 | assert len(exceptions_present_v2) == len(exceptions), sorted(exceptions_present_v2) |
5 | 5 | ''' |
6 | 6 | |
7 | 7 | import pytest |
8 | ||
8 | 9 | |
9 | 10 | @pytest.mark.skip(reason='occassionaly timeouts') |
10 | 11 | @pytest.mark.usefixtures('logged_user') |
14 | 14 | from urllib.parse import urlencode |
15 | 15 | from random import choice |
16 | 16 | from sqlalchemy.orm.util import was_deleted |
17 | from hypothesis import given, assume, settings, strategies as st | |
17 | from hypothesis import given, strategies as st | |
18 | 18 | |
19 | 19 | import pytest |
20 | 20 | |
26 | 26 | ) |
27 | 27 | from faraday.server.models import db, Host, Hostname |
28 | 28 | from faraday.server.api.modules.hosts import HostsView, HostsV3View |
29 | from tests.factories import HostFactory, CommandFactory, \ | |
30 | EmptyCommandFactory, WorkspaceFactory | |
29 | from tests.factories import HostFactory, EmptyCommandFactory, WorkspaceFactory | |
31 | 30 | |
32 | 31 | HOSTS_COUNT = 5 |
33 | 32 | SERVICE_COUNT = [10, 5] # 10 services to the first host, 5 to the second |
33 | ||
34 | 34 | |
35 | 35 | @pytest.mark.usefixtures('database', 'logged_user') |
36 | 36 | class TestHostAPI: |
126 | 126 | res = test_client.post(self.url(), data={ |
127 | 127 | "ip": "127.0.0.1", |
128 | 128 | "description": "aaaaa", |
129 | "_rev":"saraza" | |
129 | "_rev": "saraza" | |
130 | 130 | # os is not required |
131 | 131 | }) |
132 | 132 | assert res.status_code == 201 |
290 | 290 | |
291 | 291 | @pytest.mark.usefixtures('ignore_nplusone') |
292 | 292 | def test_filter_restless_by_os_exact(self, test_client, session, workspace, |
293 | second_workspace, host_factory): | |
293 | second_workspace, host_factory): | |
294 | 294 | # The hosts that should be shown |
295 | 295 | hosts = host_factory.create_batch(10, workspace=workspace, os='Unix') |
296 | 296 | |
307 | 307 | |
308 | 308 | @pytest.mark.usefixtures('ignore_nplusone') |
309 | 309 | def test_filter_restless_count(self, test_client, session, workspace, |
310 | second_workspace, host_factory): | |
310 | second_workspace, host_factory): | |
311 | 311 | # The hosts that should be shown |
312 | 312 | hosts = host_factory.create_batch(30, workspace=workspace, os='Unix') |
313 | 313 | |
326 | 326 | host_factory.create_batch(1, workspace=workspace, os='unix') |
327 | 327 | session.commit() |
328 | 328 | res = test_client.get(urljoin(self.url(), 'filter?q={"filters":[{"name": "os", "op": "like", "val": "%nix"}], ' |
329 | '"group_by":[{"field": "os"}], "order_by":[{"field": "os", "direction": "desc"}]}')) | |
329 | '"group_by":[{"field": "os"}], "order_by":[{"field": "os", "direction": "desc"}]}')) | |
330 | 330 | assert res.status_code == 200 |
331 | 331 | assert len(res.json['rows']) == 2 |
332 | 332 | assert res.json['count'] == 2 |
360 | 360 | |
361 | 361 | @pytest.mark.usefixtures('ignore_nplusone') |
362 | 362 | def test_filter_restless_by_os_like_ilike(self, test_client, session, workspace, |
363 | second_workspace, host_factory): | |
363 | second_workspace, host_factory): | |
364 | 364 | # The hosts that should be shown |
365 | 365 | hosts = host_factory.create_batch(5, workspace=workspace, os='Unix 1') |
366 | 366 | hosts += host_factory.create_batch(5, workspace=workspace, os='Unix 2') |
379 | 379 | res = test_client.get(urljoin( |
380 | 380 | self.url(), |
381 | 381 | 'filter?q={"filters":[{"name": "os", "op":"like", "val":"Unix %"}]}' |
382 | ) | |
382 | ) | |
383 | 383 | ) |
384 | 384 | assert res.status_code == 200 |
385 | 385 | self.compare_results(hosts, res) |
387 | 387 | res = test_client.get(urljoin( |
388 | 388 | self.url(), |
389 | 389 | 'filter?q={"filters":[{"name": "os", "op":"ilike", "val":"Unix %"}]}' |
390 | ) | |
390 | ) | |
391 | 391 | ) |
392 | 392 | assert res.status_code == 200 |
393 | 393 | self.compare_results(hosts + [case_insensitive_host], res) |
410 | 410 | |
411 | 411 | @pytest.mark.usefixtures('ignore_nplusone') |
412 | 412 | def test_filter_restless_by_service_name(self, test_client, session, workspace, |
413 | service_factory, host_factory): | |
413 | service_factory, host_factory): | |
414 | 414 | services = service_factory.create_batch(10, workspace=workspace, |
415 | 415 | name="IRC") |
416 | 416 | hosts = [service.host for service in services] |
431 | 431 | expected_host_ids = set(host.id for host in hosts) |
432 | 432 | assert shown_hosts_ids == expected_host_ids |
433 | 433 | |
434 | ||
435 | 434 | def test_filter_by_service_port(self, test_client, session, workspace, |
436 | service_factory, host_factory): | |
435 | service_factory, host_factory): | |
437 | 436 | services = service_factory.create_batch(10, workspace=workspace, port=25) |
438 | 437 | hosts = [service.host for service in services] |
439 | 438 | |
447 | 446 | expected_host_ids = set(host.id for host in hosts) |
448 | 447 | assert shown_hosts_ids == expected_host_ids |
449 | 448 | |
450 | ||
451 | 449 | @pytest.mark.usefixtures('ignore_nplusone') |
452 | 450 | def test_filter_restless_by_service_port(self, test_client, session, workspace, |
453 | service_factory, host_factory): | |
451 | service_factory, host_factory): | |
454 | 452 | services = service_factory.create_batch(10, workspace=workspace, port=25) |
455 | 453 | hosts = [service.host for service in services] |
456 | 454 | |
493 | 491 | |
494 | 492 | assert res.status_code == 200 |
495 | 493 | |
496 | severities = res.json['rows'][0]['value']['severity_counts'] | |
494 | severities = res.json['rows'][0]['value']['severity_counts'] | |
497 | 495 | assert severities['info'] == 1 |
498 | 496 | assert severities['critical'] == 2 |
499 | 497 | assert severities['high'] == 1 |
503 | 501 | assert severities['total'] == 5 |
504 | 502 | |
505 | 503 | def test_filter_by_invalid_service_port(self, test_client, session, workspace, |
506 | service_factory, host_factory): | |
504 | service_factory, host_factory): | |
507 | 505 | services = service_factory.create_batch(10, workspace=workspace, port=25) |
508 | 506 | hosts = [service.host for service in services] |
509 | 507 | |
516 | 514 | assert res.json['count'] == 0 |
517 | 515 | |
518 | 516 | def test_filter_restless_by_invalid_service_port(self, test_client, session, workspace, |
519 | service_factory, host_factory): | |
517 | service_factory, host_factory): | |
520 | 518 | services = service_factory.create_batch(10, workspace=workspace, port=25) |
521 | 519 | hosts = [service.host for service in services] |
522 | 520 | |
543 | 541 | |
544 | 542 | @pytest.mark.usefixtures('ignore_nplusone') |
545 | 543 | def test_filter_restless_with_no_q_param(self, test_client, session, workspace, host_factory): |
546 | res = test_client.get(urljoin(self.url(),'filter')) | |
544 | res = test_client.get(urljoin(self.url(), 'filter')) | |
547 | 545 | assert res.status_code == 200 |
548 | 546 | assert len(res.json['rows']) == HOSTS_COUNT |
549 | 547 | |
619 | 617 | vulnerability_factory.create(service=service, host=None, workspace=workspace) |
620 | 618 | session.commit() |
621 | 619 | |
622 | res = test_client.get(self.check_url(urljoin(self.url(host),'services/'))) | |
620 | res = test_client.get(self.check_url(urljoin(self.url(host), 'services/'))) | |
623 | 621 | assert res.status_code == 200 |
624 | 622 | assert res.json[0]['vulns'] == 1 |
625 | 623 | |
674 | 672 | session.commit() |
675 | 673 | raw_data = { |
676 | 674 | "metadata": |
677 | { | |
678 | "update_time":1510688312.431, | |
679 | "update_user":"UI Web", | |
680 | "update_action":0, | |
681 | "creator":"", | |
682 | "create_time":1510673388000, | |
683 | "update_controller_action":"", | |
684 | "owner":"leonardo", | |
685 | "command_id": None}, | |
686 | "name":"10.31.112.21", | |
687 | "ip":"10.31.112.21", | |
688 | "_rev":"", | |
689 | "description":"", | |
675 | { | |
676 | "update_time": 1510688312.431, | |
677 | "update_user": "UI Web", | |
678 | "update_action": 0, | |
679 | "creator": "", | |
680 | "create_time": 1510673388000, | |
681 | "update_controller_action": "", | |
682 | "owner": "leonardo", | |
683 | "command_id": None}, | |
684 | "name": "10.31.112.21", | |
685 | "ip": "10.31.112.21", | |
686 | "_rev": "", | |
687 | "description": "", | |
690 | 688 | "default_gateway": None, |
691 | 689 | "owned": False, |
692 | "services":12, | |
693 | "hostnames":[], | |
694 | "vulns":43, | |
695 | "owner":"leonardo", | |
696 | "credentials":0, | |
690 | "services": 12, | |
691 | "hostnames": [], | |
692 | "vulns": 43, | |
693 | "owner": "leonardo", | |
694 | "credentials": 0, | |
697 | 695 | "_id": 4000, |
698 | "os":"Microsoft Windows Server 2008 R2 Standard Service Pack 1", | |
696 | "os": "Microsoft Windows Server 2008 R2 Standard Service Pack 1", | |
699 | 697 | "id": 4000, |
700 | "icon":"windows", | |
698 | "icon": "windows", | |
701 | 699 | "versions": [], |
702 | 700 | "important": False, |
703 | 701 | } |
835 | 833 | |
836 | 834 | class TestHostAPIV3(TestHostAPI): |
837 | 835 | def url(self, host=None, workspace=None): |
838 | return v2_to_v3(super(TestHostAPIV3, self).url(host, workspace)) | |
836 | return v2_to_v3(super().url(host, workspace)) | |
839 | 837 | |
840 | 838 | def check_url(self, url): |
841 | 839 | return v2_to_v3(url) |
902 | 900 | session.flush() |
903 | 901 | expected_ids.append(host.id) |
904 | 902 | session.commit() |
905 | res = test_client.get(self.url(workspace=second_workspace) + | |
906 | '?sort=services&sort_dir=asc') | |
903 | res = test_client.get(self.url(workspace=second_workspace) | |
904 | + '?sort=services&sort_dir=asc') | |
907 | 905 | assert res.status_code == 200 |
908 | 906 | assert [h['_id'] for h in res.json['data']] == expected_ids |
909 | 907 | |
918 | 916 | expected = host_factory.create_batch(10, workspace=second_workspace) |
919 | 917 | session.commit() |
920 | 918 | for i in range(len(expected)): |
921 | if i % 2 == 0: # Only update some hosts | |
919 | if i % 2 == 0: # Only update some hosts | |
922 | 920 | host = expected.pop(0) |
923 | 921 | host.description = 'i was updated' |
924 | 922 | session.add(host) |
925 | 923 | session.commit() |
926 | 924 | expected.append(host) # Put it on the end |
927 | res = test_client.get(self.url(workspace=second_workspace) + | |
928 | '?sort=metadata.update_time&sort_dir=asc') | |
925 | res = test_client.get(self.url(workspace=second_workspace) | |
926 | + '?sort=metadata.update_time&sort_dir=asc') | |
929 | 927 | assert res.status_code == 200, res.data |
930 | 928 | assert [h['_id'] for h in res.json['data']] == [h.id for h in expected] |
931 | 929 | |
1035 | 1033 | session.add(host) |
1036 | 1034 | session.commit() |
1037 | 1035 | data = { |
1038 | "description":"", | |
1039 | "default_gateway":"", | |
1040 | "ip":"127.0.0.1", | |
1041 | "owned":False, | |
1042 | "name":"127.0.0.1", | |
1043 | "mac":"", | |
1044 | "hostnames":["dasdas"], | |
1045 | "owner":"faraday", | |
1046 | "os":"Unknown", | |
1036 | "description": "", | |
1037 | "default_gateway": "", | |
1038 | "ip": "127.0.0.1", | |
1039 | "owned": False, | |
1040 | "name": "127.0.0.1", | |
1041 | "mac": "", | |
1042 | "hostnames": ["dasdas"], | |
1043 | "owner": "faraday", | |
1044 | "os": "Unknown", | |
1047 | 1045 | } |
1048 | 1046 | |
1049 | 1047 | res = test_client.put(self.url(host, workspace=host.workspace), data=data) |
1140 | 1138 | view_class = HostsV3View |
1141 | 1139 | |
1142 | 1140 | def url(self, obj=None, workspace=None): |
1143 | return v2_to_v3(super(TestHostAPIGenericV3, self).url(obj, workspace)) | |
1141 | return v2_to_v3(super().url(obj, workspace)) | |
1144 | 1142 | |
1145 | 1143 | |
1146 | 1144 | def host_json(): |
1155 | 1153 | "create_time": st.integers(), |
1156 | 1154 | "update_controller_action": st.text(), |
1157 | 1155 | "owner": st.one_of(st.none(), st.text()), |
1158 | "command_id": st.one_of(st.none(), st.text(), st.integers()),}), | |
1156 | "command_id": st.one_of(st.none(), st.text(), st.integers()), }), | |
1159 | 1157 | "name": st.one_of(st.none(), st.text()), |
1160 | 1158 | "ip": st.one_of(st.none(), st.text()), |
1161 | 1159 | "_rev": st.one_of(st.none(), st.text()), |
1182 | 1180 | |
1183 | 1181 | @given(HostData) |
1184 | 1182 | def send_api_request(raw_data): |
1185 | ||
1186 | 1183 | ws_name = host_with_hostnames.workspace.name |
1187 | 1184 | res = test_client.post(f'/v2/ws/{ws_name}/vulns/', |
1188 | 1185 | data=raw_data) |
1190 | 1187 | |
1191 | 1188 | @given(HostData) |
1192 | 1189 | def send_api_request_v3(raw_data): |
1193 | ||
1194 | 1190 | ws_name = host_with_hostnames.workspace.name |
1195 | 1191 | res = test_client.post(f'/v3/ws/{ws_name}/vulns', |
1196 | 1192 | data=raw_data) |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
55 | 55 | |
56 | 56 | class TestLicensesAPIV3(TestLicensesAPI, PatchableTestsMixin): |
57 | 57 | def url(self, obj=None): |
58 | return v2_to_v3(super(TestLicensesAPIV3, self).url(obj)) | |
58 | return v2_to_v3(super().url(obj)) | |
59 | 59 | |
60 | 60 | @pytest.mark.skip(reason="Not a license actually test") |
61 | 61 | def test_envelope_list(self, test_client, app): |
73 | 73 | "creator": st.one_of(st.none(), st.text()), |
74 | 74 | "create_time": st.floats(), |
75 | 75 | "update_controller_action": st.one_of(st.none(), st.text()), |
76 | "owner": st.one_of(st.none(), st.text())}), | |
76 | "owner": st.one_of(st.none(), st.text())}), | |
77 | 77 | "notes": st.one_of(st.none(), st.text()), |
78 | 78 | "product": st.one_of(st.none(), st.text()), |
79 | 79 | "start": st.datetimes(), |
80 | 80 | "end": st.datetimes(), |
81 | 81 | "type": st.one_of(st.none(), st.text()) |
82 | }) | |
82 | }) | |
83 | 83 | |
84 | 84 | |
85 | 85 | @pytest.mark.usefixtures('logged_user') |
3 | 3 | from itsdangerous import TimedJSONWebSignatureSerializer |
4 | 4 | |
5 | 5 | from faraday.server.models import User |
6 | from faraday.server.web import app | |
6 | from faraday.server.web import get_app | |
7 | 7 | from tests import factories |
8 | from tests.conftest import logged_user, login_as | |
9 | 8 | from tests.utils.url import v2_to_v3 |
10 | 9 | |
11 | 10 | |
29 | 28 | session.commit() |
30 | 29 | # we use lower case username, but in db is Capitalized |
31 | 30 | login_payload = { |
32 | 'email': 'susan', | |
31 | 'email': 'Susan', | |
33 | 32 | 'password': 'pepito', |
34 | 33 | } |
35 | 34 | res = test_client.post('/login', data=login_payload) |
72 | 71 | """ |
73 | 72 | # clean cookies make sure test_client has no session |
74 | 73 | test_client.cookie_jar.clear() |
75 | secret_key = app.config['SECRET_KEY'] | |
74 | secret_key = get_app().config['SECRET_KEY'] | |
76 | 75 | alice = factories.UserFactory.create( |
77 | 76 | active=True, |
78 | 77 | username='alice', |
85 | 84 | session.add(ws) |
86 | 85 | session.commit() |
87 | 86 | |
88 | serializer = TimedJSONWebSignatureSerializer(app.config['SECRET_KEY'], expires_in=500, salt="token") | |
89 | token = serializer.dumps({ 'user_id': alice.id}) | |
87 | serializer = TimedJSONWebSignatureSerializer(get_app().config['SECRET_KEY'], expires_in=500, salt="token") | |
88 | token = serializer.dumps({'user_id': alice.id}) | |
90 | 89 | |
91 | 90 | headers = {'Authorization': b'Token ' + token} |
92 | 91 | |
111 | 110 | assert 'Set-Cookie' not in res.headers |
112 | 111 | cookies = [cookie.name for cookie in test_client.cookie_jar] |
113 | 112 | assert "faraday_session_2" not in cookies |
114 | ||
115 | 113 | |
116 | 114 | def test_cant_retrieve_token_unauthenticated(self, test_client): |
117 | 115 | # clean cookies make sure test_client has no session |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
5 | 5 | |
6 | 6 | ''' |
7 | 7 | from builtins import str |
8 | ||
9 | from tests.utils.url import v2_to_v3 | |
10 | 8 | |
11 | 9 | """Generic tests for APIs NOT prefixed with a workspace_name""" |
12 | 10 | |
16 | 14 | API_PREFIX = '/v2/' |
17 | 15 | OBJECT_COUNT = 5 |
18 | 16 | |
17 | ||
19 | 18 | @pytest.mark.usefixtures('logged_user') |
20 | 19 | class GenericAPITest: |
21 | ||
22 | 20 | model = None |
23 | 21 | factory = None |
24 | 22 | api_endpoint = None |
145 | 143 | |
146 | 144 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
147 | 145 | def test_update_an_object(self, test_client, logged_user, method): |
148 | super(PatchableTestsMixin, self).test_update_an_object(test_client, logged_user, method) | |
146 | super().test_update_an_object(test_client, logged_user, method) | |
149 | 147 | |
150 | 148 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
151 | 149 | def test_update_fails_with_existing(self, test_client, session, method): |
152 | super(PatchableTestsMixin, self).test_update_fails_with_existing(test_client, session, method) | |
150 | super().test_update_fails_with_existing(test_client, session, method) | |
153 | 151 | |
154 | 152 | def test_patch_update_an_object_does_not_fail_with_partial_data(self, test_client, logged_user): |
155 | 153 | """To do this the user should use a PATCH request""" |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
14 | 14 | except ImportError as e: |
15 | 15 | from urllib.parse import urlencode |
16 | 16 | |
17 | ||
17 | 18 | def with_0_and_n_objects(n=10): |
18 | 19 | return pytest.mark.parametrize('object_count', [0, n]) |
20 | ||
19 | 21 | |
20 | 22 | class PaginationTestsMixin: |
21 | 23 |
0 | import pytest | |
1 | ||
2 | 0 | from tests.test_api_non_workspaced_base import GenericAPITest |
3 | 1 | from tests.factories import UserFactory |
4 | 2 | from faraday.server.models import User |
6 | 4 | from tests.utils.url import v2_to_v3 |
7 | 5 | |
8 | 6 | |
9 | pytest.fixture('logged_user') | |
7 | # pytest.fixture('logged_user') | |
10 | 8 | class TestPreferences(GenericAPITest): |
11 | 9 | model = User |
12 | 10 | factory = UserFactory |
37 | 35 | assert response.status_code == 200 |
38 | 36 | assert response.json['preferences'] == preferences |
39 | 37 | |
40 | ||
41 | 38 | def test_add_invalid_preference(self, test_client): |
42 | 39 | preferences = {'field1': 1, 'field2': 'str1'} |
43 | 40 | data = {'p': preferences} |
48 | 45 | |
49 | 46 | class TestPreferencesV3(TestPreferences): |
50 | 47 | def url(self, obj=None): |
51 | return v2_to_v3(super(TestPreferencesV3, self).url(obj)) | |
48 | return v2_to_v3(super().url(obj)) |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
8 | 8 | |
9 | 9 | import pytest |
10 | 10 | |
11 | from tests.factories import SearchFilterFactory, UserFactory, SubFactory | |
12 | from tests.test_api_non_workspaced_base import ReadWriteAPITests, OBJECT_COUNT, PatchableTestsMixin | |
13 | from tests.test_api_agent import logout, http_req | |
11 | from tests.factories import SearchFilterFactory, UserFactory | |
12 | from tests.test_api_non_workspaced_base import ReadWriteAPITests, PatchableTestsMixin | |
13 | from tests.test_api_agent import logout | |
14 | 14 | from tests.conftest import login_as |
15 | 15 | from faraday.server.models import SearchFilter |
16 | ||
17 | 16 | |
18 | 17 | from faraday.server.api.modules.search_filter import SearchFilterView |
19 | 18 | from tests.utils.url import v2_to_v3 |
32 | 31 | def test_list_retrieves_all_items_from(self, test_client, logged_user): |
33 | 32 | for searchfilter in SearchFilter.query.all(): |
34 | 33 | searchfilter.creator = logged_user |
35 | super(TestSearchFilterAPI, self).test_list_retrieves_all_items_from(test_client, logged_user) | |
34 | super().test_list_retrieves_all_items_from(test_client, logged_user) | |
36 | 35 | |
37 | 36 | def test_list_retrieves_all_items_from_logger_user(self, test_client, session, logged_user): |
38 | 37 | user_filter = SearchFilterFactory.create(creator=logged_user) |
47 | 46 | |
48 | 47 | def test_retrieve_one_object(self, test_client, logged_user): |
49 | 48 | self.first_object.creator = logged_user |
50 | super(TestSearchFilterAPI, self).test_retrieve_one_object(test_client, logged_user) | |
49 | super().test_retrieve_one_object(test_client, logged_user) | |
51 | 50 | |
52 | 51 | def test_retrieve_one_object_from_logged_user(self, test_client, session, logged_user): |
53 | 52 | |
107 | 106 | @pytest.mark.parametrize("method", ["PUT"]) |
108 | 107 | def test_update_an_object(self, test_client, logged_user, method): |
109 | 108 | self.first_object.creator = logged_user |
110 | super(TestSearchFilterAPI, self).test_update_an_object(test_client, logged_user, method) | |
109 | super().test_update_an_object(test_client, logged_user, method) | |
111 | 110 | |
112 | 111 | def test_update_an_object_fails_with_empty_dict(self, test_client, logged_user): |
113 | 112 | self.first_object.creator = logged_user |
114 | super(TestSearchFilterAPI, self).test_update_an_object_fails_with_empty_dict(test_client, logged_user) | |
113 | super().test_update_an_object_fails_with_empty_dict(test_client, logged_user) | |
115 | 114 | |
116 | 115 | def test_delete(self, test_client, logged_user): |
117 | 116 | self.first_object.creator = logged_user |
118 | super(TestSearchFilterAPI, self).test_delete(test_client, logged_user) | |
117 | super().test_delete(test_client, logged_user) | |
119 | 118 | |
120 | 119 | |
121 | 120 | @pytest.mark.usefixtures('logged_user') |
122 | 121 | class TestSearchFilterAPIV3(TestSearchFilterAPI, PatchableTestsMixin): |
123 | 122 | def url(self, obj=None): |
124 | return v2_to_v3(super(TestSearchFilterAPIV3, self).url(obj)) | |
123 | return v2_to_v3(super().url(obj)) | |
125 | 124 | |
126 | 125 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
127 | 126 | def test_update_an_object(self, test_client, logged_user, method): |
128 | super(TestSearchFilterAPIV3, self).test_update_an_object(test_client, logged_user, method) | |
127 | super().test_update_an_object(test_client, logged_user, method) | |
129 | 128 | |
130 | 129 | def test_patch_update_an_object_does_not_fail_with_partial_data(self, test_client, logged_user): |
131 | 130 | self.first_object.creator = logged_user |
132 | super(TestSearchFilterAPIV3, self).test_update_an_object_fails_with_empty_dict(test_client, logged_user) | |
131 | super().test_update_an_object_fails_with_empty_dict(test_client, logged_user) |
269 | 269 | assert cmd_obj.object_type == 'service' |
270 | 270 | assert cmd_obj.object_id == res.json['id'] |
271 | 271 | |
272 | ||
273 | 272 | def test_create_service_without_ost(self, test_client, host, session): |
274 | 273 | session.commit() |
275 | 274 | data = { |
343 | 342 | view_class = ServiceV3View |
344 | 343 | |
345 | 344 | def url(self, obj=None, workspace=None): |
346 | return v2_to_v3(super(TestListServiceViewV3, self).url(obj, workspace)) | |
345 | return v2_to_v3(super().url(obj, workspace)) | |
347 | 346 | |
348 | 347 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
349 | 348 | def test_update_cant_change_id(self, test_client, session, method): |
350 | super(TestListServiceViewV3, self).test_update_cant_change_id(test_client, session, method) | |
349 | super().test_update_cant_change_id(test_client, session, method) |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
22 | 22 | except ImportError: |
23 | 23 | from urllib.parse import urlencode |
24 | 24 | |
25 | ||
26 | 25 | import pytz |
27 | 26 | import pytest |
28 | 27 | from dateutil import parser |
30 | 29 | |
31 | 30 | from hypothesis import given, settings, strategies as st |
32 | 31 | |
33 | from sqlalchemy import inspect | |
34 | 32 | from faraday.server.api.modules.vulns import ( |
35 | 33 | VulnerabilityFilterSet, |
36 | 34 | VulnerabilitySchema, |
72 | 70 | CustomFieldsSchemaFactory |
73 | 71 | ) |
74 | 72 | |
73 | ||
75 | 74 | def _create_post_data_vulnerability(name, vuln_type, parent_id, |
76 | 75 | parent_type, refs, policyviolations, |
77 | 76 | status='opened', |
78 | 77 | attachments=None, impact=None, |
79 | 78 | description='desc1234', |
80 | 79 | confirmed=True, data='data1234', |
81 | easeofresolution= | |
82 | Vulnerability.EASE_OF_RESOLUTIONS[0], | |
80 | easeofresolution=Vulnerability.EASE_OF_RESOLUTIONS[0], | |
83 | 81 | owned=False, resolution='res1234', |
84 | 82 | severity='critical', |
85 | 83 | update_controller_action='UI Web', |
159 | 157 | model = Vulnerability |
160 | 158 | factory = factories.VulnerabilityFactory |
161 | 159 | api_endpoint = 'vulns' |
162 | #unique_fields = ['ip'] | |
163 | #update_fields = ['ip', 'description', 'os'] | |
160 | # unique_fields = ['ip'] | |
161 | # update_fields = ['ip', 'description', 'os'] | |
164 | 162 | view_class = VulnerabilityView |
165 | 163 | patchable_fields = ['description'] |
166 | 164 | |
563 | 561 | impact = {"accountability": False, "availability": False, "confidentiality": False, "integrity": False} |
564 | 562 | |
565 | 563 | raw_data = { |
566 | "_id":"e1b45f5375facfb1435d37e182ebc22de5f77bb3.e05df1c85617fffb575d2ced2679e9a0ebda7c3e", | |
567 | "metadata":{ | |
568 | "update_time":1509045001.279, | |
569 | "update_user":"", | |
570 | "update_action":0, | |
571 | "creator":"UI Web", | |
572 | "create_time":1509045001.279, | |
564 | "_id": "e1b45f5375facfb1435d37e182ebc22de5f77bb3.e05df1c85617fffb575d2ced2679e9a0ebda7c3e", | |
565 | "metadata": { | |
566 | "update_time": 1509045001.279, | |
567 | "update_user": "", | |
568 | "update_action": 0, | |
569 | "creator": "UI Web", | |
570 | "create_time": 1509045001.279, | |
573 | 571 | "update_controller_action": |
574 | 572 | "UI Web New", |
575 | "owner":""}, | |
576 | "obj_id":"e05df1c85617fffb575d2ced2679e9a0ebda7c3e", | |
577 | "owner":"", | |
573 | "owner": ""}, | |
574 | "obj_id": "e05df1c85617fffb575d2ced2679e9a0ebda7c3e", | |
575 | "owner": "", | |
578 | 576 | "parent": parent, |
579 | "type":"Vulnerability", | |
580 | "ws":"cloud", | |
577 | "type": "Vulnerability", | |
578 | "ws": "cloud", | |
581 | 579 | "confirmed": True, |
582 | "data":"", | |
580 | "data": "", | |
583 | 581 | "desc": desc, |
584 | "easeofresolution":None, | |
582 | "easeofresolution": None, | |
585 | 583 | "impact": impact, |
586 | 584 | "name": name, |
587 | 585 | "owned": False, |
588 | "policyviolations":policy_violations, | |
586 | "policyviolations": policy_violations, | |
589 | 587 | "refs": refs, |
590 | "resolution":"", | |
588 | "resolution": "", | |
591 | 589 | "severity": "critical", |
592 | 590 | "status": status, |
593 | "_attachments":{}, | |
594 | "description":"", | |
591 | "_attachments": {}, | |
592 | "description": "", | |
595 | 593 | "parent_type": parent_type, |
596 | "protocol":"", | |
597 | "version":""} | |
594 | "protocol": "", | |
595 | "version": ""} | |
598 | 596 | |
599 | 597 | if attachments: |
600 | 598 | raw_data['_attachments'] = {} |
601 | 599 | for attachment in attachments: |
602 | 600 | raw_data['_attachments'][attachment.name] = { |
603 | "content_type": "application/x-shellscript", | |
604 | "data": b64encode(attachment.read()).decode() | |
605 | } | |
601 | "content_type": "application/x-shellscript", | |
602 | "data": b64encode(attachment.read()).decode() | |
603 | } | |
606 | 604 | |
607 | 605 | return raw_data |
608 | 606 | |
609 | 607 | def test_update_vuln_from_open_to_close(self, test_client, session, host_with_hostnames): |
610 | vuln = self.factory.create(status='open', host=host_with_hostnames, service=None, workspace=host_with_hostnames.workspace) | |
608 | vuln = self.factory.create(status='open', host=host_with_hostnames, service=None, | |
609 | workspace=host_with_hostnames.workspace) | |
611 | 610 | session.commit() |
612 | 611 | raw_data = self._create_put_data( |
613 | 612 | name='New name', |
627 | 626 | assert res.json['desc'] == 'New desc' |
628 | 627 | |
629 | 628 | def test_update_vuln_from_correct_type_to_incorrect(self, test_client, session, host_with_hostnames): |
630 | vuln = self.factory.create(status='open', host=host_with_hostnames, service=None, workspace=host_with_hostnames.workspace) | |
629 | vuln = self.factory.create(status='open', host=host_with_hostnames, service=None, | |
630 | workspace=host_with_hostnames.workspace) | |
631 | 631 | session.commit() |
632 | 632 | raw_data = self._create_put_data( |
633 | 633 | name='New name', |
641 | 641 | raw_data['type'] = "ASDADADASD" |
642 | 642 | vuln_count_previous = session.query(Vulnerability).count() |
643 | 643 | res = test_client.put(self.url(vuln), data=raw_data) |
644 | assert res.status_code in [400,409] | |
644 | assert res.status_code in [400, 409] | |
645 | 645 | assert vuln_count_previous == session.query(Vulnerability).count() |
646 | 646 | |
647 | 647 | def test_create_vuln_web(self, host_with_hostnames, test_client, session): |
670 | 670 | assert res.json['method'] == 'GET' |
671 | 671 | assert res.json['path'] == '/pepep' |
672 | 672 | |
673 | ||
674 | ||
675 | 673 | @pytest.mark.parametrize('param_name', ['query', 'query_string']) |
676 | 674 | @pytest.mark.usefixtures('mock_envelope_list') |
677 | 675 | def test_filter_by_querystring( |
701 | 699 | for vuln in res.json['data']: |
702 | 700 | assert vuln['query'] == 'bbb' |
703 | 701 | assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids |
704 | ||
705 | 702 | |
706 | 703 | @pytest.mark.usefixtures('mock_envelope_list') |
707 | 704 | @pytest.mark.parametrize('medium_name', ['medium', 'med']) |
753 | 750 | # Vulns that shouldn't be shown |
754 | 751 | vuln_second_workspace = vulnerability_factory.create_batch(5, workspace=second_workspace) |
755 | 752 | more_vuln_second_workspace = vulnerability_web_factory.create_batch(5, workspace=second_workspace, |
756 | method='POSTT') | |
753 | method='POSTT') | |
757 | 754 | |
758 | 755 | # Vulns that must be shown |
759 | 756 | expected_vulns = vulnerability_web_factory.create_batch( |
839 | 836 | assert set(vuln['_id'] for vuln in res.json['data']) == expected_ids |
840 | 837 | |
841 | 838 | @pytest.mark.usefixtures('ignore_nplusone') |
842 | def test_filter_restless_by_target(self, test_client, session, workspace, host_factory): | |
843 | ||
844 | host_factory.create(workspace=workspace, ip="192.168.0.1") | |
845 | host_factory.create(workspace=workspace, ip="192.168.0.2") | |
846 | ||
847 | session.commit() | |
848 | res = test_client.get(self.check_url(urljoin( | |
849 | self.url(), 'filter?q={"filters":[{"name": "target", "op":"eq", "val":"192.168.0.2"}]}' | |
850 | ))) | |
851 | assert res.status_code == 200 | |
852 | ||
853 | @pytest.mark.usefixtures('ignore_nplusone') | |
854 | def test_filter_restless_by_target_host_ip(self, test_client, session, workspace, | |
855 | host_factory, vulnerability_factory): | |
839 | def test_filter_restless_by_target__(self, test_client, session, workspace, host_factory, vulnerability_factory): | |
856 | 840 | |
857 | 841 | Vulnerability.query.delete() |
858 | 842 | host = host_factory.create(workspace=workspace, ip="192.168.0.2") |
865 | 849 | |
866 | 850 | session.commit() |
867 | 851 | res = test_client.get(self.check_url(urljoin( |
852 | self.url(), 'filter?q={"filters":[{"name": "target", "op":"eq", "val":"192.168.0.1"}]}' | |
853 | ))) | |
854 | ||
855 | assert res.status_code == 200 | |
856 | assert len(res.json['vulnerabilities']) == 10 | |
857 | ||
858 | @pytest.mark.usefixtures('ignore_nplusone') | |
859 | def test_filter_restless_by_target_host_ip(self, test_client, session, workspace, | |
860 | host_factory, vulnerability_factory): | |
861 | ||
862 | Vulnerability.query.delete() | |
863 | host = host_factory.create(workspace=workspace, ip="192.168.0.2") | |
864 | host_vulns = vulnerability_factory.create_batch( | |
865 | 1, workspace=self.workspace, host=host, service=None) | |
866 | ||
867 | host2 = host_factory.create(workspace=workspace, ip="192.168.0.1") | |
868 | host_vulns2 = vulnerability_factory.create_batch( | |
869 | 10, workspace=self.workspace, host=host2, service=None) | |
870 | ||
871 | session.commit() | |
872 | res = test_client.get(self.check_url(urljoin( | |
868 | 873 | self.url(), |
869 | 874 | 'filter?q={"filters":[{"name": "target_host_ip", "op":"eq", "val":"192.168.0.2"}]}' |
870 | 875 | ))) |
872 | 877 | assert len(res.json['vulnerabilities']) == 1 |
873 | 878 | assert res.json['vulnerabilities'][0]['value']['target'] == '192.168.0.2' |
874 | 879 | |
875 | ||
876 | 880 | @pytest.mark.usefixtures('ignore_nplusone') |
877 | 881 | def test_filter_restless_by_service_port(self, test_client, session, workspace, |
878 | host_factory, vulnerability_factory, | |
879 | vulnerability_web_factory, service_factory): | |
882 | host_factory, vulnerability_factory, | |
883 | vulnerability_web_factory, service_factory): | |
880 | 884 | |
881 | 885 | service = service_factory.create(port=9098, name="ssh", workspace=self.workspace) |
882 | 886 | vulns = vulnerability_factory.create_batch( |
883 | 887 | 1, workspace=self.workspace, service=service, host=None) |
884 | ||
885 | 888 | |
886 | 889 | service = service_factory.create(port=8956, name="443", workspace=self.workspace) |
887 | 890 | |
899 | 902 | |
900 | 903 | @pytest.mark.usefixtures('ignore_nplusone') |
901 | 904 | def test_filter_restless_by_service_name(self, test_client, session, workspace, |
902 | host_factory, vulnerability_factory, | |
903 | vulnerability_web_factory, service_factory): | |
905 | host_factory, vulnerability_factory, | |
906 | vulnerability_web_factory, service_factory): | |
904 | 907 | |
905 | 908 | service = service_factory.create(port=9098, name="ssh", workspace=self.workspace) |
906 | 909 | vulns = vulnerability_factory.create_batch( |
907 | 910 | 1, workspace=self.workspace, service=service, host=None) |
908 | ||
909 | 911 | |
910 | 912 | service = service_factory.create(port=8956, name="443", workspace=self.workspace) |
911 | 913 | |
937 | 939 | method=method) |
938 | 940 | |
939 | 941 | session.commit() |
940 | res = test_client.get(self.url(workspace=second_workspace) + | |
941 | '?sort=method&sort_dir=asc') | |
942 | res = test_client.get(self.url(workspace=second_workspace) | |
943 | + '?sort=method&sort_dir=asc') | |
942 | 944 | assert res.status_code == 200, res.data |
943 | 945 | assert len(res.json['data']) == 30 |
944 | 946 | assert ''.join(v['method'] for v in res.json['data'] |
945 | 947 | if v['method']) == 'abcdefghij' |
946 | 948 | |
947 | res = test_client.get(self.url(workspace=second_workspace) + | |
948 | '?sort=method&sort_dir=desc') | |
949 | res = test_client.get(self.url(workspace=second_workspace) | |
950 | + '?sort=method&sort_dir=desc') | |
949 | 951 | assert res.status_code == 200, res.data |
950 | 952 | assert len(res.json['data']) == 30 |
951 | 953 | assert ''.join(v['method'] for v in res.json['data'] |
1034 | 1036 | assert vuln_count_previous + 1 == session.query(Vulnerability).count() |
1035 | 1037 | assert res.json['name'] == 'New vulns' |
1036 | 1038 | assert res.json['impact'] == {u'accountability': True, |
1037 | u'availability': True, | |
1038 | u'confidentiality': True, | |
1039 | u'integrity': True} | |
1039 | u'availability': True, | |
1040 | u'confidentiality': True, | |
1041 | u'integrity': True} | |
1040 | 1042 | |
1041 | 1043 | def test_handles_invalid_impact(self, host_with_hostnames, test_client, |
1042 | 1044 | session): |
1184 | 1186 | session.add(vuln) |
1185 | 1187 | session.commit() |
1186 | 1188 | |
1187 | #Desc | |
1188 | res = test_client.get( | |
1189 | self.check_url(urljoin(self.url(), "count/")) + | |
1190 | "?confirmed=1&group_by=severity&order=sc" | |
1189 | # Desc | |
1190 | res = test_client.get( | |
1191 | self.check_url(urljoin(self.url(), "count/")) | |
1192 | + "?confirmed=1&group_by=severity&order=sc" | |
1191 | 1193 | ) |
1192 | 1194 | assert res.status_code == 400 |
1193 | 1195 | |
1194 | #Asc | |
1195 | res = test_client.get( | |
1196 | self.check_url(urljoin(self.url(), "count/")) + | |
1197 | "?confirmed=1&group_by=severity&order=name,asc" | |
1196 | # Asc | |
1197 | res = test_client.get( | |
1198 | self.check_url(urljoin(self.url(), "count/")) | |
1199 | + "?confirmed=1&group_by=severity&order=name,asc" | |
1198 | 1200 | ) |
1199 | 1201 | assert res.status_code == 400 |
1200 | ||
1201 | 1202 | |
1202 | 1203 | def test_count_order_by(self, test_client, session): |
1203 | 1204 | for i, vuln in enumerate(self.objects[:3]): |
1211 | 1212 | session.add(vuln) |
1212 | 1213 | session.commit() |
1213 | 1214 | |
1214 | #Desc | |
1215 | res = test_client.get( | |
1216 | self.check_url(urljoin(self.url(),"count/")) + "?confirmed=1&group_by=severity&order=desc" | |
1215 | # Desc | |
1216 | res = test_client.get( | |
1217 | self.check_url(urljoin(self.url(), "count/")) + "?confirmed=1&group_by=severity&order=desc" | |
1217 | 1218 | ) |
1218 | 1219 | assert res.status_code == 200 |
1219 | 1220 | assert res.json['total_count'] == 3 |
1220 | assert sorted(res.json['groups'], key=lambda i: (i['name'],i['count'],i['severity'])) == sorted([ | |
1221 | assert sorted(res.json['groups'], key=lambda i: (i['name'], i['count'], i['severity'])) == sorted([ | |
1221 | 1222 | {"name": "high", "severity": "high", "count": 2}, |
1222 | 1223 | {"name": "critical", "severity": "critical", "count": 1}, |
1223 | ], key=lambda i: (i['name'],i['count'],i['severity'])) | |
1224 | ||
1225 | #Asc | |
1226 | res = test_client.get(self.check_url(urljoin(self.url(),"count/"))+"?confirmed=1&group_by=severity&order=asc") | |
1224 | ], key=lambda i: (i['name'], i['count'], i['severity'])) | |
1225 | ||
1226 | # Asc | |
1227 | res = test_client.get( | |
1228 | self.check_url(urljoin(self.url(), "count/")) + "?confirmed=1&group_by=severity&order=asc") | |
1227 | 1229 | assert res.status_code == 200 |
1228 | 1230 | assert res.json['total_count'] == 3 |
1229 | assert sorted(res.json['groups'], key=lambda i: (i['name'],i['count'],i['severity']), reverse=True) == sorted([ | |
1230 | {"name": "critical", "severity": "critical", "count": 1}, | |
1231 | {"name": "high", "severity": "high", "count": 2}, | |
1232 | ], key=lambda i: (i['name'],i['count'],i['severity']), reverse=True) | |
1231 | assert sorted(res.json['groups'], key=lambda i: (i['name'], i['count'], i['severity']), reverse=True) == sorted( | |
1232 | [ | |
1233 | {"name": "critical", "severity": "critical", "count": 1}, | |
1234 | {"name": "high", "severity": "high", "count": 2}, | |
1235 | ], key=lambda i: (i['name'], i['count'], i['severity']), reverse=True) | |
1233 | 1236 | |
1234 | 1237 | def test_count_group_by_incorrect_vuln_column(self, test_client, session): |
1235 | 1238 | for i, vuln in enumerate(self.objects[:3]): |
1243 | 1246 | session.add(vuln) |
1244 | 1247 | session.commit() |
1245 | 1248 | |
1246 | res = test_client.get(self.check_url(urljoin(self.url(),"count/")) + "?confirmed=1&group_by=username") | |
1249 | res = test_client.get(self.check_url(urljoin(self.url(), "count/")) + "?confirmed=1&group_by=username") | |
1247 | 1250 | assert res.status_code == 400 |
1248 | 1251 | |
1249 | res = test_client.get(self.check_url(urljoin(self.url(),"count/")) + "?confirmed=1&group_by=") | |
1252 | res = test_client.get(self.check_url(urljoin(self.url(), "count/")) + "?confirmed=1&group_by=") | |
1250 | 1253 | assert res.status_code == 400 |
1251 | 1254 | |
1252 | 1255 | def test_count_confirmed(self, test_client, session): |
1262 | 1265 | session.add(vuln) |
1263 | 1266 | session.commit() |
1264 | 1267 | |
1265 | res = test_client.get(self.check_url(urljoin(self.url(),'count/')) + '?confirmed=1&group_by=severity') | |
1268 | res = test_client.get(self.check_url(urljoin(self.url(), 'count/')) + '?confirmed=1&group_by=severity') | |
1266 | 1269 | assert res.status_code == 200 |
1267 | 1270 | assert res.json['total_count'] == 3 |
1268 | assert sorted(res.json['groups'], key=lambda i: (i['count'],i['name'],i['severity'])) == sorted([ | |
1271 | assert sorted(res.json['groups'], key=lambda i: (i['count'], i['name'], i['severity'])) == sorted([ | |
1269 | 1272 | {"name": "high", "severity": "high", "count": 2}, |
1270 | 1273 | {"name": "critical", "severity": "critical", "count": 1}, |
1271 | ], key=lambda i: (i['count'],i['name'],i['severity'])) | |
1274 | ], key=lambda i: (i['count'], i['name'], i['severity'])) | |
1272 | 1275 | |
1273 | 1276 | def test_count_severity_map(self, test_client, second_workspace, session): |
1274 | 1277 | vulns = self.factory.create_batch(4, severity='informational', |
1275 | workspace=second_workspace) | |
1278 | workspace=second_workspace) | |
1276 | 1279 | vulns += self.factory.create_batch(3, severity='medium', |
1277 | workspace=second_workspace) | |
1280 | workspace=second_workspace) | |
1278 | 1281 | vulns += self.factory.create_batch(2, severity='low', |
1279 | workspace=second_workspace) | |
1282 | workspace=second_workspace) | |
1280 | 1283 | session.add_all(vulns) |
1281 | 1284 | session.commit() |
1282 | 1285 | |
1283 | 1286 | res = test_client.get( |
1284 | self.check_url(urljoin(self.url(workspace=second_workspace),'count/')) + '?group_by=severity' | |
1287 | self.check_url(urljoin(self.url(workspace=second_workspace), 'count/')) + '?group_by=severity' | |
1285 | 1288 | ) |
1286 | 1289 | assert res.status_code == 200 |
1287 | 1290 | assert res.json['total_count'] == 9 |
1288 | assert sorted(res.json['groups'], key=lambda i: (i['count'],i['name'],i['severity'])) == sorted([ | |
1291 | assert sorted(res.json['groups'], key=lambda i: (i['count'], i['name'], i['severity'])) == sorted([ | |
1289 | 1292 | {"name": "med", "severity": "med", "count": 3}, |
1290 | 1293 | {"name": "low", "severity": "low", "count": 2}, |
1291 | 1294 | {"name": "info", "severity": "info", "count": 4}, |
1292 | ], key=lambda i: (i['count'],i['name'],i['severity'])) | |
1295 | ], key=lambda i: (i['count'], i['name'], i['severity'])) | |
1293 | 1296 | |
1294 | 1297 | def test_count_multiworkspace_one_workspace(self, test_client, session): |
1295 | 1298 | for i, vuln in enumerate(self.objects): |
1304 | 1307 | session.commit() |
1305 | 1308 | |
1306 | 1309 | res = test_client.get( |
1307 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) + | |
1308 | f'?workspaces={self.workspace.name}&confirmed=1&group_by=severity&order=desc' | |
1310 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) | |
1311 | + f'?workspaces={self.workspace.name}&confirmed=1&group_by=severity&order=desc' | |
1309 | 1312 | ) |
1310 | 1313 | |
1311 | 1314 | assert res.status_code == 200 |
1314 | 1317 | |
1315 | 1318 | def test_count_multiworkspace_two_public_workspaces(self, test_client, session, second_workspace): |
1316 | 1319 | vulns = self.factory.create_batch(1, severity='informational', |
1317 | workspace=second_workspace) | |
1320 | workspace=second_workspace) | |
1318 | 1321 | vulns += self.factory.create_batch(3, severity='medium', |
1319 | workspace=second_workspace) | |
1322 | workspace=second_workspace) | |
1320 | 1323 | vulns += self.factory.create_batch(1, severity='low', |
1321 | workspace=second_workspace) | |
1324 | workspace=second_workspace) | |
1322 | 1325 | session.add_all(vulns) |
1323 | 1326 | session.commit() |
1324 | 1327 | |
1334 | 1337 | session.commit() |
1335 | 1338 | |
1336 | 1339 | res = test_client.get( |
1337 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) + | |
1338 | f'?workspaces={self.workspace.name},{second_workspace.name}&confirmed=1&group_by=severity&order=desc' | |
1340 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) | |
1341 | + f'?workspaces={self.workspace.name},{second_workspace.name}&confirmed=1&group_by=severity&order=desc' | |
1339 | 1342 | ) |
1340 | 1343 | |
1341 | 1344 | assert res.status_code == 200 |
1344 | 1347 | |
1345 | 1348 | def test_count_multiworkspace_no_workspace_param(self, test_client): |
1346 | 1349 | res = test_client.get( |
1347 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) + | |
1348 | '?confirmed=1&group_by=severity&order=desc' | |
1350 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) | |
1351 | + '?confirmed=1&group_by=severity&order=desc' | |
1349 | 1352 | ) |
1350 | 1353 | assert res.status_code == 400 |
1351 | 1354 | |
1352 | 1355 | def test_count_multiworkspace_no_groupby_param(self, test_client): |
1353 | 1356 | res = test_client.get( |
1354 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) + | |
1355 | f'?workspaces={self.workspace.name}&confirmed=1&order=desc' | |
1357 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) | |
1358 | + f'?workspaces={self.workspace.name}&confirmed=1&order=desc' | |
1356 | 1359 | ) |
1357 | 1360 | assert res.status_code == 400 |
1358 | 1361 | |
1359 | 1362 | def test_count_multiworkspace_nonexistent_ws(self, test_client): |
1360 | 1363 | res = test_client.get( |
1361 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) + | |
1362 | '?workspaces=asdf,{self.workspace.name}&confirmed=1&group_by=severity&order=desc' | |
1364 | self.check_url(urljoin(self.url(), 'count_multi_workspace/')) | |
1365 | + '?workspaces=asdf,{self.workspace.name}&confirmed=1&group_by=severity&order=desc' | |
1363 | 1366 | ) |
1364 | 1367 | assert res.status_code == 404 |
1365 | 1368 | |
1443 | 1446 | workspace=second_workspace |
1444 | 1447 | ) |
1445 | 1448 | for high_vuln in high_vulns: |
1446 | ||
1447 | 1449 | CommandObjectFactory.create( |
1448 | 1450 | command=command, |
1449 | 1451 | object_type='vulnerability', |
1451 | 1453 | workspace=second_workspace |
1452 | 1454 | ) |
1453 | 1455 | for high_vuln_web in high_vulns_web: |
1454 | ||
1455 | 1456 | CommandObjectFactory.create( |
1456 | 1457 | command=web_command, |
1457 | 1458 | object_type='vulnerability', |
1525 | 1526 | res = test_client.get(self.url()) |
1526 | 1527 | assert res.status_code == 200 |
1527 | 1528 | from_json_vuln = list(filter(lambda raw_vuln: raw_vuln['id'] == vuln.id, |
1528 | res.json['vulnerabilities'])) | |
1529 | res.json['vulnerabilities'])) | |
1529 | 1530 | assert 'metadata' in from_json_vuln[0]['value'] |
1530 | 1531 | expected_metadata = { |
1531 | 1532 | u'command_id': command.id, |
1717 | 1718 | severity='high', |
1718 | 1719 | ) |
1719 | 1720 | res = test_client.put( |
1720 | self.check_url(urljoin(self.url(workspace=host_with_hostnames.workspace), f'{res.json["_id"]}/')) + | |
1721 | f'?command_id={command.id}', | |
1721 | self.check_url(urljoin(self.url(workspace=host_with_hostnames.workspace), f'{res.json["_id"]}/')) | |
1722 | + f'?command_id={command.id}', | |
1722 | 1723 | data=raw_data) |
1723 | 1724 | assert res.status_code == 200 |
1724 | 1725 | |
1789 | 1790 | """ |
1790 | 1791 | raw_data = { |
1791 | 1792 | 'command_id': None, |
1792 | 'confirmed': False, | |
1793 | 'data': None, | |
1794 | 'desc': 'pepe', | |
1795 | 'description': 'pepe', | |
1796 | 'metadata': { | |
1793 | 'confirmed': False, | |
1794 | 'data': None, | |
1795 | 'desc': 'pepe', | |
1796 | 'description': 'pepe', | |
1797 | 'metadata': { | |
1797 | 1798 | 'command_id': '', |
1798 | 1799 | 'create_time': 1518627247.194113, |
1799 | 1800 | 'creator': '', |
1802 | 1803 | 'update_controller_action': 'No model controller call', |
1803 | 1804 | 'update_time': 1518627247.194114, |
1804 | 1805 | 'update_user': ''}, |
1805 | 'name': 'vuln1', | |
1806 | 'owned': False, | |
1807 | 'owner': '', | |
1808 | 'parent': '358302', | |
1809 | 'parent_type': 'Host', | |
1810 | 'policyviolations': [], | |
1811 | 'refs': [], | |
1812 | 'resolution': '', | |
1813 | 'severity': 'critical', | |
1814 | 'status': 'opened', | |
1815 | 'type': 'Vulnerability' | |
1806 | 'name': 'vuln1', | |
1807 | 'owned': False, | |
1808 | 'owner': '', | |
1809 | 'parent': '358302', | |
1810 | 'parent_type': 'Host', | |
1811 | 'policyviolations': [], | |
1812 | 'refs': [], | |
1813 | 'resolution': '', | |
1814 | 'severity': 'critical', | |
1815 | 'status': 'opened', | |
1816 | 'type': 'Vulnerability' | |
1816 | 1817 | } |
1817 | 1818 | |
1818 | 1819 | res = test_client.post(self.url(), data=raw_data) |
1889 | 1890 | assert res.json['vulnerabilities'][0]['value']['name'] == vuln.name |
1890 | 1891 | |
1891 | 1892 | def test_hostnames_comma_separated(self, test_client, session): |
1892 | #Create Host A with hostname HA | |
1893 | # Create Host A with hostname HA | |
1893 | 1894 | hostnameA = HostnameFactory.create() |
1894 | 1895 | hostnameA.host.workspace = hostnameA.workspace |
1895 | #Create Host B with hostname HB | |
1896 | # Create Host B with hostname HB | |
1896 | 1897 | hostnameB = HostnameFactory.create(workspace=hostnameA.workspace) |
1897 | 1898 | hostnameB.host.workspace = hostnameA.workspace |
1898 | #Create Vuln with Host A | |
1899 | # Create Vuln with Host A | |
1899 | 1900 | vuln = VulnerabilityFactory.create(host=hostnameA.host, workspace=hostnameA.workspace) |
1900 | #Create Vuln with Host B | |
1901 | # Create Vuln with Host B | |
1901 | 1902 | vuln2 = VulnerabilityFactory.create(host=hostnameB.host, workspace=hostnameA.workspace) |
1902 | 1903 | session.add(hostnameA) |
1903 | 1904 | session.add(hostnameB) |
1905 | 1906 | session.add(vuln2) |
1906 | 1907 | session.commit() |
1907 | 1908 | |
1908 | #Search with hosnames=HA,HB | |
1909 | # Search with hosnames=HA,HB | |
1909 | 1910 | res = test_client.get(self.url(workspace=vuln.workspace) + f'?hostname={hostnameA},{hostnameB}') |
1910 | 1911 | assert res.status_code == 200 |
1911 | 1912 | assert res.json['count'] == 2 |
1917 | 1918 | host = HostFactory.create(workspace=self.workspace) |
1918 | 1919 | session.commit() |
1919 | 1920 | data = { |
1920 | 'name': 'Test Alert policy_violations', | |
1921 | 'severity': 'informational', | |
1922 | 'creator': 'Zap', | |
1923 | 'parent_type': 'Host', | |
1924 | 'parent': host.id, | |
1925 | 'type': 'Vulnerability', | |
1921 | 'name': 'Test Alert policy_violations', | |
1922 | 'severity': 'informational', | |
1923 | 'creator': 'Zap', | |
1924 | 'parent_type': 'Host', | |
1925 | 'parent': host.id, | |
1926 | 'type': 'Vulnerability', | |
1926 | 1927 | } |
1927 | 1928 | res = test_client.post(self.url(), data=data) |
1928 | 1929 | assert res.status_code == 201 |
1934 | 1935 | host = HostFactory.create(workspace=self.workspace) |
1935 | 1936 | session.commit() |
1936 | 1937 | data = { |
1937 | 'name': 'Test Alert policy_violations', | |
1938 | 'severity': 'informational', | |
1939 | 'creator': 'Zap', | |
1940 | 'parent_type': 'Host', | |
1941 | 'parent': host.id, | |
1942 | 'type': 'Vulnerability', | |
1938 | 'name': 'Test Alert policy_violations', | |
1939 | 'severity': 'informational', | |
1940 | 'creator': 'Zap', | |
1941 | 'parent_type': 'Host', | |
1942 | 'parent': host.id, | |
1943 | 'type': 'Vulnerability', | |
1943 | 1944 | } |
1944 | 1945 | res = test_client.post(self.url(), data=data) |
1945 | 1946 | assert res.status_code == 201 |
1998 | 1999 | assert query_test == [] |
1999 | 2000 | |
2000 | 2001 | def test_delete_attachment_from_vuln(self, test_client, session, host_with_hostnames): |
2001 | session.commit() # flush host_with_hostnames | |
2002 | session.commit() # flush host_with_hostnames | |
2002 | 2003 | ws_name = host_with_hostnames.workspace.name |
2003 | 2004 | attachment = NamedTemporaryFile() |
2004 | 2005 | file_content = b'test file' |
2027 | 2028 | assert query_test == [] |
2028 | 2029 | |
2029 | 2030 | def test_delete_attachment_from_vuln_fails_readonly(self, test_client, session, host_with_hostnames): |
2030 | session.commit() # flush host_with_hostnames | |
2031 | session.commit() # flush host_with_hostnames | |
2031 | 2032 | ws_name = host_with_hostnames.workspace.name |
2032 | 2033 | attachment = NamedTemporaryFile() |
2033 | 2034 | file_content = b'test file' |
2086 | 2087 | |
2087 | 2088 | def test_invalid_vuln_filters(self, test_client, session, workspace): |
2088 | 2089 | data = { |
2089 | "q": {"filters":[{"name":"severity","op":"eq","val":"medium"}]} | |
2090 | "q": {"filters": [{"name": "severity", "op": "eq", "val": "medium"}]} | |
2090 | 2091 | } |
2091 | 2092 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2092 | 2093 | assert res.status_code == 400 |
2106 | 2107 | workspace = WorkspaceFactory.create() |
2107 | 2108 | creator = UserFactory.create() |
2108 | 2109 | vuln = VulnerabilityFactory.create( |
2109 | workspace=workspace, | |
2110 | severity="medium", | |
2111 | creator=creator, | |
2110 | workspace=workspace, | |
2111 | severity="medium", | |
2112 | creator=creator, | |
2112 | 2113 | ) |
2113 | 2114 | vuln2 = VulnerabilityFactory.create( |
2114 | workspace=workspace, | |
2115 | severity="medium", | |
2116 | creator=creator, | |
2115 | workspace=workspace, | |
2116 | severity="medium", | |
2117 | creator=creator, | |
2117 | 2118 | ) |
2118 | 2119 | session.add(vuln) |
2119 | 2120 | session.add(vuln2) |
2120 | 2121 | session.commit() |
2121 | 2122 | data = { |
2122 | 'q': '{"group_by":[{"field":"creator_id"}]}' | |
2123 | 'q': '{"group_by":[{"field":"creator_id"}]}' | |
2123 | 2124 | } |
2124 | 2125 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2125 | 2126 | assert res.status_code == 200 |
2126 | assert res.json['count'] == 1 # all vulns created by the same creator | |
2127 | assert res.json['count'] == 1 # all vulns created by the same creator | |
2127 | 2128 | expected = [{'count': 2, 'creator_id': creator.id}] |
2128 | 2129 | assert [vuln['value'] for vuln in res.json['vulnerabilities']] == expected |
2129 | 2130 | |
2131 | 2132 | workspace = WorkspaceFactory.create() |
2132 | 2133 | creator = UserFactory.create() |
2133 | 2134 | vuln = VulnerabilityFactory.create_batch(size=10, |
2134 | workspace=workspace, | |
2135 | severity="critical", | |
2136 | creator=creator, | |
2137 | ) | |
2135 | workspace=workspace, | |
2136 | severity="critical", | |
2137 | creator=creator, | |
2138 | ) | |
2138 | 2139 | vuln2 = VulnerabilityWebFactory.create_batch(size=10, |
2139 | workspace=workspace, | |
2140 | severity="critical", | |
2141 | creator=creator, | |
2142 | ) | |
2140 | workspace=workspace, | |
2141 | severity="critical", | |
2142 | creator=creator, | |
2143 | ) | |
2143 | 2144 | session.add_all(vuln) |
2144 | 2145 | session.add_all(vuln2) |
2145 | 2146 | session.commit() |
2146 | 2147 | data = { |
2147 | 'q': '{"group_by":[{"field":"severity"}]}' | |
2148 | 'q': '{"group_by":[{"field":"severity"}]}' | |
2148 | 2149 | } |
2149 | 2150 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2150 | 2151 | assert res.status_code == 200, res.json |
2151 | assert res.json['count'] == 1, res.json # all vulns created by the same creator | |
2152 | expected = { | |
2152 | assert res.json['count'] == 1, res.json # all vulns created by the same creator | |
2153 | expected = { | |
2153 | 2154 | 'count': 1, |
2154 | 2155 | 'vulnerabilities': [ |
2155 | 2156 | {'id': 0, 'key': 0, 'value': {'count': 20, 'severity': 'critical'}} |
2161 | 2162 | workspace = WorkspaceFactory.create() |
2162 | 2163 | creator = UserFactory.create() |
2163 | 2164 | vuln = VulnerabilityFactory.create_batch(size=10, |
2164 | name='name 1', | |
2165 | workspace=workspace, | |
2166 | severity="critical", | |
2167 | creator=creator, | |
2168 | ) | |
2165 | name='name 1', | |
2166 | workspace=workspace, | |
2167 | severity="critical", | |
2168 | creator=creator, | |
2169 | ) | |
2169 | 2170 | vuln2 = VulnerabilityWebFactory.create_batch(size=10, |
2170 | name='name 2', | |
2171 | workspace=workspace, | |
2172 | severity="critical", | |
2173 | creator=creator, | |
2174 | ) | |
2171 | name='name 2', | |
2172 | workspace=workspace, | |
2173 | severity="critical", | |
2174 | creator=creator, | |
2175 | ) | |
2175 | 2176 | session.add_all(vuln) |
2176 | 2177 | session.add_all(vuln2) |
2177 | 2178 | session.commit() |
2178 | 2179 | data = { |
2179 | 'q': '{"group_by":[{"field":"severity"}, {"field": "name"}]}' | |
2180 | 'q': '{"group_by":[{"field":"severity"}, {"field": "name"}]}' | |
2180 | 2181 | } |
2181 | 2182 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2182 | 2183 | assert res.status_code == 200, res.json |
2183 | assert res.json['count'] == 2, res.json # all vulns created by the same creator | |
2184 | expected ={'vulnerabilities': [ | |
2185 | {'id': 0, 'key': 0, 'value': {'count': 10, 'severity': 'critical', 'name': 'name 1'}}, {'id': 1, 'key': 1, 'value': {'count': 10, 'severity': 'critical', 'name': 'name 2'}}], 'count': 2} | |
2184 | assert res.json['count'] == 2, res.json # all vulns created by the same creator | |
2185 | expected = {'vulnerabilities': [ | |
2186 | {'id': 0, 'key': 0, 'value': {'count': 10, 'severity': 'critical', 'name': 'name 1'}}, | |
2187 | {'id': 1, 'key': 1, 'value': {'count': 10, 'severity': 'critical', 'name': 'name 2'}}], 'count': 2} | |
2186 | 2188 | |
2187 | 2189 | assert res.json == expected, res.json |
2188 | 2190 | |
2196 | 2198 | workspace = WorkspaceFactory.create() |
2197 | 2199 | creator = UserFactory.create() |
2198 | 2200 | vuln = VulnerabilityFactory.create_batch(size=10, |
2199 | workspace=workspace, | |
2200 | severity="critical", | |
2201 | creator=creator, | |
2202 | ) | |
2201 | workspace=workspace, | |
2202 | severity="critical", | |
2203 | creator=creator, | |
2204 | ) | |
2203 | 2205 | vuln2 = VulnerabilityWebFactory.create_batch(size=10, |
2204 | workspace=workspace, | |
2205 | severity="critical", | |
2206 | creator=creator, | |
2207 | ) | |
2206 | workspace=workspace, | |
2207 | severity="critical", | |
2208 | creator=creator, | |
2209 | ) | |
2208 | 2210 | session.add_all(vuln) |
2209 | 2211 | session.add_all(vuln2) |
2210 | 2212 | session.commit() |
2211 | 2213 | data = { |
2212 | 'q': json.dumps({"group_by":[{"field":col_name}]}) | |
2214 | 'q': json.dumps({"group_by": [{"field": col_name}]}) | |
2213 | 2215 | } |
2214 | 2216 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2215 | 2217 | assert res.status_code == 200, res.json |
2218 | 2220 | workspace = WorkspaceFactory.create() |
2219 | 2221 | creator = UserFactory.create() |
2220 | 2222 | vuln = VulnerabilityFactory.create( |
2221 | name="test", | |
2222 | description="test", | |
2223 | workspace=workspace, | |
2224 | severity="medium", | |
2225 | creator=creator, | |
2223 | name="test", | |
2224 | description="test", | |
2225 | workspace=workspace, | |
2226 | severity="medium", | |
2227 | creator=creator, | |
2226 | 2228 | ) |
2227 | 2229 | vuln2 = VulnerabilityFactory.create( |
2228 | name="test", | |
2229 | description="test", | |
2230 | workspace=workspace, | |
2231 | severity="medium", | |
2232 | creator=creator, | |
2230 | name="test", | |
2231 | description="test", | |
2232 | workspace=workspace, | |
2233 | severity="medium", | |
2234 | creator=creator, | |
2233 | 2235 | ) |
2234 | 2236 | vuln3 = VulnerabilityFactory.create( |
2235 | name="test2", | |
2236 | description="test", | |
2237 | workspace=workspace, | |
2238 | severity="medium", | |
2239 | creator=creator, | |
2237 | name="test2", | |
2238 | description="test", | |
2239 | workspace=workspace, | |
2240 | severity="medium", | |
2241 | creator=creator, | |
2240 | 2242 | ) |
2241 | 2243 | session.add(vuln) |
2242 | 2244 | session.add(vuln2) |
2243 | 2245 | session.add(vuln3) |
2244 | 2246 | session.commit() |
2245 | 2247 | data = { |
2246 | 'q': '{"group_by":[{"field":"name"}, {"field":"description"}]}' | |
2248 | 'q': '{"group_by":[{"field":"name"}, {"field":"description"}]}' | |
2247 | 2249 | } |
2248 | 2250 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2249 | 2251 | assert res.status_code == 200 |
2250 | 2252 | assert res.json['count'] == 2 |
2251 | expected = [{'count': 2, 'name': 'test', 'description': 'test'}, {'count': 1, 'name': 'test2', 'description': 'test'}] | |
2253 | expected = [{'count': 2, 'name': 'test', 'description': 'test'}, | |
2254 | {'count': 1, 'name': 'test2', 'description': 'test'}] | |
2252 | 2255 | assert [vuln['value'] for vuln in res.json['vulnerabilities']] == expected |
2253 | 2256 | |
2254 | 2257 | def test_vuln_restless_sort_by_(self, test_client, session): |
2257 | 2260 | host2 = HostFactory.create(workspace=workspace) |
2258 | 2261 | creator = UserFactory.create() |
2259 | 2262 | vuln = VulnerabilityFactory.create( |
2260 | name="test", | |
2261 | description="test", | |
2262 | workspace=workspace, | |
2263 | severity="critical", | |
2264 | creator=creator, | |
2265 | service=None, | |
2266 | host=host, | |
2263 | name="test", | |
2264 | description="test", | |
2265 | workspace=workspace, | |
2266 | severity="critical", | |
2267 | creator=creator, | |
2268 | service=None, | |
2269 | host=host, | |
2267 | 2270 | ) |
2268 | 2271 | vuln2 = VulnerabilityFactory.create( |
2269 | name="test 2", | |
2270 | description="test", | |
2271 | workspace=workspace, | |
2272 | severity="critical", | |
2273 | creator=creator, | |
2274 | service=None, | |
2275 | host=host, | |
2272 | name="test 2", | |
2273 | description="test", | |
2274 | workspace=workspace, | |
2275 | severity="critical", | |
2276 | creator=creator, | |
2277 | service=None, | |
2278 | host=host, | |
2276 | 2279 | ) |
2277 | 2280 | vuln3 = VulnerabilityFactory.create( |
2278 | name="test 3", | |
2279 | description="test", | |
2280 | workspace=workspace, | |
2281 | severity="low", | |
2282 | creator=creator, | |
2283 | service=None, | |
2284 | host=host, | |
2281 | name="test 3", | |
2282 | description="test", | |
2283 | workspace=workspace, | |
2284 | severity="low", | |
2285 | creator=creator, | |
2286 | service=None, | |
2287 | host=host, | |
2285 | 2288 | ) |
2286 | 2289 | vulns = VulnerabilityFactory.create_batch( |
2287 | 10, | |
2288 | workspace=workspace, | |
2289 | service=None, | |
2290 | severity="medium", | |
2291 | host=host2, | |
2290 | 10, | |
2291 | workspace=workspace, | |
2292 | service=None, | |
2293 | severity="medium", | |
2294 | host=host2, | |
2292 | 2295 | ) |
2293 | 2296 | session.add(vuln) |
2294 | 2297 | session.add(vuln2) |
2295 | 2298 | session.add(vuln3) |
2296 | 2299 | session.add_all(vulns) |
2297 | 2300 | session.commit() |
2298 | query = {"order_by":[ | |
2299 | {"field":"host__vulnerability_critical_generic_count", "direction": "desc"}, | |
2300 | {"field":"host__vulnerability_high_generic_count", "direction": "desc"}, | |
2301 | {"field":"host__vulnerability_medium_generic_count", "direction": "desc"}, | |
2301 | query = {"order_by": [ | |
2302 | {"field": "host__vulnerability_critical_generic_count", "direction": "desc"}, | |
2303 | {"field": "host__vulnerability_high_generic_count", "direction": "desc"}, | |
2304 | {"field": "host__vulnerability_medium_generic_count", "direction": "desc"}, | |
2302 | 2305 | ], |
2303 | "filters": [{"or": [ | |
2304 | {"name": "severity", "op": "==", "val": "critical"}, | |
2305 | {"name": "severity", "op": "==", "val": "high"}, | |
2306 | {"name": "severity", "op": "==", "val": "medium"}, | |
2307 | ]}] | |
2306 | "filters": [{"or": [ | |
2307 | {"name": "severity", "op": "==", "val": "critical"}, | |
2308 | {"name": "severity", "op": "==", "val": "high"}, | |
2309 | {"name": "severity", "op": "==", "val": "medium"}, | |
2310 | ]}] | |
2308 | 2311 | } |
2309 | 2312 | |
2310 | 2313 | data = { |
2311 | 'q': json.dumps(query) | |
2314 | 'q': json.dumps(query) | |
2312 | 2315 | } |
2313 | 2316 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2314 | 2317 | assert res.status_code == 200 |
2322 | 2325 | session.add(vuln) |
2323 | 2326 | session.commit() |
2324 | 2327 | data = { |
2325 | 'q': json.dumps({"filters":[{"name":"creator","op":"eq","val": vuln.creator.username}]}) | |
2328 | 'q': json.dumps({"filters": [{"name": "creator", "op": "eq", "val": vuln.creator.username}]}) | |
2326 | 2329 | } |
2327 | 2330 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/vulns/filter'), query_string=data) |
2328 | 2331 | assert res.status_code == 200 |
2422 | 2425 | session.add(confirmed_vulns) |
2423 | 2426 | session.commit() |
2424 | 2427 | res = test_client.get( |
2425 | self.check_url(urljoin(self.url(workspace=workspace), 'export_csv/')) + | |
2426 | '?q={"filters":[{"name":"confirmed","op":"==","val":"true"}]}' | |
2428 | self.check_url(urljoin(self.url(workspace=workspace), 'export_csv/')) | |
2429 | + '?q={"filters":[{"name":"confirmed","op":"==","val":"true"}]}' | |
2427 | 2430 | ) |
2428 | 2431 | assert res.status_code == 200 |
2429 | 2432 | assert self._verify_csv(res.data, confirmed=True) |
2449 | 2452 | session.add(confirmed_vulns) |
2450 | 2453 | session.commit() |
2451 | 2454 | res = test_client.get( |
2452 | self.check_url(urljoin(self.url(workspace=workspace), 'export_csv/')) + | |
2453 | '?q={"filters":[{"name":"severity","op":"==","val":"critical"}]}' | |
2455 | self.check_url(urljoin(self.url(workspace=workspace), 'export_csv/')) | |
2456 | + '?q={"filters":[{"name":"severity","op":"==","val":"critical"}]}' | |
2454 | 2457 | ) |
2455 | 2458 | assert res.status_code == 200 |
2456 | 2459 | assert self._verify_csv(res.data, confirmed=True, severity='critical') |
2461 | 2464 | session.add(self.first_object) |
2462 | 2465 | session.commit() |
2463 | 2466 | res = test_client.get( |
2464 | self.check_url(urljoin(self.url(), 'export_csv/')) + | |
2465 | '?confirmed=true' | |
2467 | self.check_url(urljoin(self.url(), 'export_csv/')) | |
2468 | + '?confirmed=true' | |
2466 | 2469 | ) |
2467 | 2470 | assert res.status_code == 200 |
2468 | 2471 | self._verify_csv(res.data, confirmed=True) |
2523 | 2526 | self.first_object.custom_fields = {"cvss": "9", "invalid": "not shown"} |
2524 | 2527 | # another case witt custom fields as None |
2525 | 2528 | vuln = VulnerabilityFactory.create() |
2526 | vuln.custom_fields=None | |
2529 | vuln.custom_fields = None | |
2527 | 2530 | session.add(vuln) |
2528 | 2531 | session.commit() |
2529 | 2532 | |
2533 | 2536 | def _verify_csv(self, raw_csv_data, confirmed=False, severity=None): |
2534 | 2537 | custom_fields = [custom_field.field_name for custom_field in CustomFieldsSchema.query.all()] |
2535 | 2538 | vuln_headers = [ |
2536 | "confirmed", "id", "date", "name", "severity", "service", | |
2539 | "confirmed", "id", "date", "name", "severity", "service", | |
2537 | 2540 | "target", "desc", "status", "hostnames", "comments", "owner", |
2538 | 2541 | "os", "resolution", "refs", "easeofresolution", "web_vulnerability", |
2539 | 2542 | "data", "website", "path", "status_code", "request", "response", "method", |
2560 | 2563 | return False |
2561 | 2564 | # test custom fields |
2562 | 2565 | for c_index, custom_field in enumerate(custom_fields): |
2563 | if vuln.custom_fields[custom_field] != line['cf_'+custom_field]: | |
2566 | if vuln.custom_fields[custom_field] != line['cf_' + custom_field]: | |
2564 | 2567 | return False |
2565 | 2568 | |
2566 | #test hosts | |
2569 | # test hosts | |
2567 | 2570 | host = Host.query.filter(Host.id == line['host_id']).first() |
2568 | 2571 | if host: |
2569 | 2572 | if host.ip != line['target']: |
2604 | 2607 | view_class = VulnerabilityV3View |
2605 | 2608 | |
2606 | 2609 | def url(self, obj=None, workspace=None): |
2607 | return v2_to_v3(super(TestListVulnerabilityViewV3, self).url(obj, workspace)) | |
2610 | return v2_to_v3(super().url(obj, workspace)) | |
2608 | 2611 | |
2609 | 2612 | def check_url(self, url): |
2610 | 2613 | return v2_to_v3(url) |
2654 | 2657 | session.add(custom_field_schema) |
2655 | 2658 | session.commit() |
2656 | 2659 | data = { |
2657 | 'name': 'Test Alert policy_violations', | |
2658 | 'severity': 'informational', | |
2659 | 'creator': 'Zap', | |
2660 | 'parent_type': 'Host', | |
2661 | 'parent': host.id, | |
2662 | 'type': 'Vulnerability', | |
2663 | 'custom_fields': { | |
2660 | 'name': 'Test Alert policy_violations', | |
2661 | 'severity': 'informational', | |
2662 | 'creator': 'Zap', | |
2663 | 'parent_type': 'Host', | |
2664 | 'parent': host.id, | |
2665 | 'type': 'Vulnerability', | |
2666 | 'custom_fields': { | |
2664 | 2667 | 'cvss': '321321', |
2665 | } | |
2668 | } | |
2666 | 2669 | } |
2667 | 2670 | res = test_client.post(self.url(), data=data) |
2668 | 2671 | |
2669 | 2672 | assert res.status_code == 201 |
2670 | 2673 | assert res.json['custom_fields']['cvss'] == '321321' |
2671 | 2674 | |
2672 | def test_create_vuln_with_custom_fields_using_field_display_name_continues_with_warning(self, test_client, second_workspace, session, caplog): | |
2675 | def test_create_vuln_with_custom_fields_using_field_display_name_continues_with_warning(self, test_client, | |
2676 | second_workspace, session, | |
2677 | caplog): | |
2673 | 2678 | host = HostFactory.create(workspace=self.workspace) |
2674 | 2679 | custom_field_schema = CustomFieldsSchemaFactory( |
2675 | 2680 | field_name='cvss', |
2681 | 2686 | session.add(custom_field_schema) |
2682 | 2687 | session.commit() |
2683 | 2688 | data = { |
2684 | 'name': 'Test Alert policy_violations', | |
2685 | 'severity': 'informational', | |
2686 | 'creator': 'Zap', | |
2687 | 'parent_type': 'Host', | |
2688 | 'parent': host.id, | |
2689 | 'type': 'Vulnerability', | |
2690 | 'custom_fields': { | |
2689 | 'name': 'Test Alert policy_violations', | |
2690 | 'severity': 'informational', | |
2691 | 'creator': 'Zap', | |
2692 | 'parent_type': 'Host', | |
2693 | 'parent': host.id, | |
2694 | 'type': 'Vulnerability', | |
2695 | 'custom_fields': { | |
2691 | 2696 | 'CVSS': '321321', # here we use the field_name and not the display_name |
2692 | } | |
2697 | } | |
2693 | 2698 | } |
2694 | 2699 | res = test_client.post(self.url(), data=data) |
2695 | 2700 | |
2708 | 2713 | session.add(custom_field_schema) |
2709 | 2714 | session.commit() |
2710 | 2715 | data = { |
2711 | 'name': 'Test Alert policy_violations', | |
2712 | 'severity': 'informational', | |
2713 | 'creator': 'Zap', | |
2714 | 'parent_type': 'Host', | |
2715 | 'parent': host.id, | |
2716 | 'type': 'Vulnerability', | |
2717 | 'custom_fields': { | |
2716 | 'name': 'Test Alert policy_violations', | |
2717 | 'severity': 'informational', | |
2718 | 'creator': 'Zap', | |
2719 | 'parent_type': 'Host', | |
2720 | 'parent': host.id, | |
2721 | 'type': 'Vulnerability', | |
2722 | 'custom_fields': { | |
2718 | 2723 | 'changes': ['1', '2', '3'], |
2719 | } | |
2724 | } | |
2720 | 2725 | } |
2721 | 2726 | res = test_client.post(self.url(), data=data) |
2722 | 2727 | |
2735 | 2740 | session.add(custom_field_schema) |
2736 | 2741 | session.commit() |
2737 | 2742 | data = { |
2738 | 'name': 'Test Alert policy_violations', | |
2739 | 'severity': 'informational', | |
2740 | 'creator': 'Zap', | |
2741 | 'parent_type': 'Host', | |
2742 | 'parent': host.id, | |
2743 | 'type': 'Vulnerability', | |
2744 | 'custom_fields': { | |
2743 | 'name': 'Test Alert policy_violations', | |
2744 | 'severity': 'informational', | |
2745 | 'creator': 'Zap', | |
2746 | 'parent_type': 'Host', | |
2747 | 'parent': host.id, | |
2748 | 'type': 'Vulnerability', | |
2749 | 'custom_fields': { | |
2745 | 2750 | 'cvss': 'pepe', |
2746 | } | |
2751 | } | |
2747 | 2752 | } |
2748 | 2753 | res = test_client.post(self.url(), data=data) |
2749 | 2754 | |
2750 | 2755 | assert res.status_code == 400 |
2751 | 2756 | |
2752 | def test_create_vuln_with_invalid_custom_fields_continues_with_warning(self, test_client, second_workspace, session, caplog): | |
2757 | def test_create_vuln_with_invalid_custom_fields_continues_with_warning(self, test_client, second_workspace, session, | |
2758 | caplog): | |
2753 | 2759 | host = HostFactory.create(workspace=self.workspace) |
2754 | 2760 | session.add(host) |
2755 | 2761 | session.commit() |
2756 | 2762 | data = { |
2757 | 'name': 'Test Alert policy_violations', | |
2758 | 'severity': 'informational', | |
2759 | 'creator': 'Zap', | |
2760 | 'parent_type': 'Host', | |
2761 | 'parent': host.id, | |
2762 | 'type': 'Vulnerability', | |
2763 | 'custom_fields': { | |
2763 | 'name': 'Test Alert policy_violations', | |
2764 | 'severity': 'informational', | |
2765 | 'creator': 'Zap', | |
2766 | 'parent_type': 'Host', | |
2767 | 'parent': host.id, | |
2768 | 'type': 'Vulnerability', | |
2769 | 'custom_fields': { | |
2764 | 2770 | 'CVSS': '321321', |
2765 | } | |
2771 | } | |
2766 | 2772 | } |
2767 | 2773 | res = test_client.post(self.url(), data=data) |
2768 | 2774 | |
2995 | 3001 | assert cmd_obj.object_id == res.json['_id'] |
2996 | 3002 | assert res.json['tool'] == command.tool |
2997 | 3003 | |
3004 | @pytest.mark.parametrize('refs', [ | |
3005 | ('cve', 'CVE-2017-0002'), | |
3006 | ('owasp', 'https://www.owasp.org/index.php/XSS_%28Cross_Site_Scripting%29_Prevention_Cheat_Sheet'), | |
3007 | ('cwe', 'CWE-135'), | |
3008 | ('cvss', 'CVSS v2 Vector(AV:A/AC:M/Au:S/C:P/I:P/A:N)'), | |
3009 | ]) | |
3010 | def test_vuln_with_specific_refs(self, host_with_hostnames, test_client, session, refs): | |
3011 | ref_name, ref_example = refs | |
3012 | raw_data_vuln = _create_post_data_vulnerability( | |
3013 | name='New vuln 1', | |
3014 | vuln_type='Vulnerability', | |
3015 | parent_id=host_with_hostnames.id, | |
3016 | parent_type='Host', | |
3017 | refs=[ref_example], | |
3018 | policyviolations=[], | |
3019 | description='helloworld 1', | |
3020 | severity='low', | |
3021 | ) | |
3022 | ||
3023 | post_response = test_client.post(self.url(workspace=host_with_hostnames.workspace), data=raw_data_vuln) | |
3024 | vuln_1_id = post_response.json['obj_id'] | |
3025 | get_response = test_client.get(self.url(workspace=host_with_hostnames.workspace, obj=vuln_1_id)) | |
3026 | ||
3027 | assert get_response.status_code == 200 | |
3028 | assert ref_name in get_response.json | |
3029 | assert 1 == len(get_response.json[ref_name]) | |
3030 | assert ref_example == get_response.json[ref_name][0] | |
3031 | ||
2998 | 3032 | |
2999 | 3033 | class TestCustomFieldVulnerabilityV3(TestCustomFieldVulnerability, PatchableTestsMixin): |
3000 | 3034 | view_class = VulnerabilityV3View |
3001 | 3035 | |
3002 | 3036 | def url(self, obj=None, workspace=None): |
3003 | return v2_to_v3(super(TestCustomFieldVulnerabilityV3, self).url(obj, workspace)) | |
3037 | return v2_to_v3(super().url(obj, workspace)) | |
3004 | 3038 | |
3005 | 3039 | def check_url(self, url): |
3006 | 3040 | return v2_to_v3(url) |
3012 | 3046 | @pytest.mark.skip(reason="To be reimplemented") |
3013 | 3047 | def test_bulk_delete_vuln_severity(self, host_with_hostnames, test_client, session): |
3014 | 3048 | pass |
3015 | ||
3016 | 3049 | |
3017 | 3050 | |
3018 | 3051 | @pytest.mark.usefixtures('logged_user') |
3038 | 3071 | view_class = VulnerabilityV3View |
3039 | 3072 | |
3040 | 3073 | def url(self, obj=None, workspace=None): |
3041 | return v2_to_v3(super(TestVulnerabilityCustomFieldsV3, self).url(obj, workspace)) | |
3074 | return v2_to_v3(super().url(obj, workspace)) | |
3042 | 3075 | |
3043 | 3076 | |
3044 | 3077 | @pytest.mark.usefixtures('logged_user') |
3058 | 3091 | session.commit() |
3059 | 3092 | |
3060 | 3093 | query_filter = {"filters": |
3061 | [{"name":"hostnames","op":"eq","val":"pepe"}] | |
3062 | } | |
3094 | [{"name": "hostnames", "op": "eq", "val": "pepe"}] | |
3095 | } | |
3063 | 3096 | res = test_client.get( |
3064 | 3097 | self.check_url(f'/v2/ws/{workspace.name}/vulns/filter?q={json.dumps(query_filter)}') |
3065 | 3098 | ) |
3079 | 3112 | session.commit() |
3080 | 3113 | |
3081 | 3114 | query_filter = {"filters": |
3082 | [{"name":"hostnames","op":"eq","val":"pepe"}] | |
3083 | } | |
3115 | [{"name": "hostnames", "op": "eq", "val": "pepe"}] | |
3116 | } | |
3084 | 3117 | res = test_client.get( |
3085 | 3118 | self.check_url(f'/v2/ws/{workspace.name}/vulns/') + f'?q={json.dumps(query_filter)}' |
3086 | 3119 | ) |
3122 | 3155 | |
3123 | 3156 | def test_search_code_attribute_bug(self, workspace, test_client, session): |
3124 | 3157 | query_filter = {"filters": |
3125 | [{"name":"code", "op": "eq", "val": "test"}] | |
3158 | [{"name": "code", "op": "eq", "val": "test"}] | |
3126 | 3159 | } |
3127 | 3160 | res = test_client.get( |
3128 | 3161 | self.check_url(f'/v2/ws/{workspace.name}/vulns/filter?q={json.dumps(query_filter)}') |
3140 | 3173 | session.add(host) |
3141 | 3174 | session.commit() |
3142 | 3175 | |
3143 | query_filter = {"filters":[ | |
3144 | {"and": [{"name": "hostnames","op": "eq", "val": "pepe"}]} | |
3176 | query_filter = {"filters": [ | |
3177 | {"and": [{"name": "hostnames", "op": "eq", "val": "pepe"}]} | |
3145 | 3178 | ]} |
3146 | 3179 | res = test_client.get( |
3147 | 3180 | self.check_url(f'/v2/ws/{workspace.name}/vulns/filter?q={json.dumps(query_filter)}') |
3156 | 3189 | workspace = WorkspaceFactory.create() |
3157 | 3190 | host = HostFactory.create(workspace=workspace) |
3158 | 3191 | vulns = VulnerabilityFactory.create_batch(10, |
3159 | workspace=workspace, | |
3160 | severity='high' | |
3161 | ) | |
3192 | workspace=workspace, | |
3193 | severity='high' | |
3194 | ) | |
3162 | 3195 | session.add_all(vulns) |
3163 | 3196 | web_vulns = VulnerabilityWebFactory.create_batch(10, |
3164 | workspace=workspace, | |
3165 | severity='high' | |
3166 | ) | |
3197 | workspace=workspace, | |
3198 | severity='high' | |
3199 | ) | |
3167 | 3200 | session.add_all(web_vulns) |
3168 | 3201 | session.add(host) |
3169 | 3202 | session.commit() |
3171 | 3204 | expected_vulns = set([vuln.id for vuln in vulns] + [vuln.id for vuln in web_vulns]) |
3172 | 3205 | for offset in range(0, 2): |
3173 | 3206 | query_filter = { |
3174 | "filters":[{"name":"severity","op":"eq","val":"high"}], | |
3207 | "filters": [{"name": "severity", "op": "eq", "val": "high"}], | |
3175 | 3208 | "limit": 10, |
3176 | 3209 | "offset": offset * 10, |
3177 | 3210 | } |
3191 | 3224 | workspace = WorkspaceFactory.create() |
3192 | 3225 | host = HostFactory.create(workspace=workspace) |
3193 | 3226 | vulns = VulnerabilityWebFactory.create_batch(100, |
3194 | workspace=workspace, | |
3195 | severity='high' | |
3196 | ) | |
3227 | workspace=workspace, | |
3228 | severity='high' | |
3229 | ) | |
3197 | 3230 | session.add_all(vulns) |
3198 | 3231 | session.add(host) |
3199 | 3232 | session.commit() |
3201 | 3234 | expected_vulns = set([vuln.id for vuln in vulns]) |
3202 | 3235 | for offset in range(0, 10): |
3203 | 3236 | query_filter = { |
3204 | "filters":[{"name":"severity","op":"eq","val":"high"}], | |
3237 | "filters": [{"name": "severity", "op": "eq", "val": "high"}], | |
3205 | 3238 | "limit": 10, |
3206 | 3239 | "offset": 10 * offset, |
3207 | 3240 | } |
3220 | 3253 | workspace = WorkspaceFactory.create() |
3221 | 3254 | host = HostFactory.create(workspace=workspace) |
3222 | 3255 | vulns = VulnerabilityWebFactory.create_batch(10, |
3223 | workspace=workspace, | |
3224 | severity='high' | |
3225 | ) | |
3256 | workspace=workspace, | |
3257 | severity='high' | |
3258 | ) | |
3226 | 3259 | session.add_all(vulns) |
3227 | 3260 | vulns = VulnerabilityFactory.create_batch(10, |
3228 | workspace=workspace, | |
3229 | severity='low' | |
3230 | ) | |
3261 | workspace=workspace, | |
3262 | severity='low' | |
3263 | ) | |
3231 | 3264 | session.add_all(vulns) |
3232 | 3265 | med_vulns = VulnerabilityFactory.create_batch(10, |
3233 | workspace=workspace, | |
3234 | severity='medium' | |
3235 | ) | |
3266 | workspace=workspace, | |
3267 | severity='medium' | |
3268 | ) | |
3236 | 3269 | session.add_all(med_vulns) |
3237 | 3270 | session.add(host) |
3238 | 3271 | session.commit() |
3240 | 3273 | expected_vulns = set([vuln.id for vuln in med_vulns]) |
3241 | 3274 | for offset in range(0, 10): |
3242 | 3275 | query_filter = { |
3243 | "filters":[{"name":"severity","op":"eq","val":"medium"}], | |
3244 | "limit":"1", | |
3276 | "filters": [{"name": "severity", "op": "eq", "val": "medium"}], | |
3277 | "limit": "1", | |
3245 | 3278 | "offset": offset, |
3246 | 3279 | } |
3247 | 3280 | res = test_client.get( |
3353 | 3386 | |
3354 | 3387 | @pytest.mark.skip_sql_dialect('sqlite') |
3355 | 3388 | def test_search_hypothesis_test_found_case(self, test_client, session, workspace): |
3356 | query_filter = {'filters': [{'name': 'host_id', 'op': 'not_in', 'val': '\U0010a1a7\U00093553\U000eb46a\x1e\x10\r\x18%\U0005ddfa0\x05\U000fdeba\x08\x04絮'}]} | |
3389 | query_filter = {'filters': [{'name': 'host_id', 'op': 'not_in', | |
3390 | 'val': '\U0010a1a7\U00093553\U000eb46a\x1e\x10\r\x18%\U0005ddfa0\x05\U000fdeba\x08\x04絮'}]} | |
3357 | 3391 | res = test_client.get( |
3358 | 3392 | self.check_url(f'/v2/ws/{workspace.name}/vulns/filter?q={json.dumps(query_filter)}') |
3359 | 3393 | ) |
3405 | 3439 | |
3406 | 3440 | @pytest.mark.skip_sql_dialect('sqlite') |
3407 | 3441 | def test_search_hypothesis_test_found_case_6(self, test_client, session, workspace): |
3408 | query_filter = {'filters': [{'name': 'resolution', 'op': 'any', 'val': ''}]} | |
3442 | query_filter = {'filters': [{'name': 'resolution', 'op': '==', 'val': ''}]} | |
3409 | 3443 | res = test_client.get( |
3410 | 3444 | self.check_url(f'/v2/ws/{workspace.name}/vulns/filter?q={json.dumps(query_filter)}') |
3411 | 3445 | ) |
3413 | 3447 | |
3414 | 3448 | @pytest.mark.skip_sql_dialect('sqlite') |
3415 | 3449 | def test_search_hypothesis_test_found_case_7(self, test_client, session, workspace): |
3416 | query_filter = {'filters': [{'name': 'name', 'op': '>', 'val': '\U0004e755\U0007a789\U000e02d1\U000b3d32\x10\U000ad0e2,\x05\x1a'}, {'name': 'creator', 'op': 'eq', 'val': 21883}]} | |
3450 | query_filter = {'filters': [ | |
3451 | {'name': 'name', 'op': '>', 'val': '\U0004e755\U0007a789\U000e02d1\U000b3d32\x10\U000ad0e2,\x05\x1a'}, | |
3452 | {'name': 'creator', 'op': 'eq', 'val': 21883}]} | |
3417 | 3453 | res = test_client.get( |
3418 | 3454 | self.check_url(f'/v2/ws/{workspace.name}/vulns/filter?q={json.dumps(query_filter)}') |
3419 | 3455 | ) |
3440 | 3476 | |
3441 | 3477 | @pytest.mark.skip_sql_dialect('sqlite') |
3442 | 3478 | def test_search_hypothesis_test_found_case_9(self, test_client, session, workspace): |
3443 | query_filter = {'filters': [{'name': 'issuetracker', 'op': 'not_equal_to', 'val': '0\x00\U00034383$\x13-\U000375fb\U0007add2\x01\x01\U0010c23a'}]} | |
3479 | query_filter = {'filters': [{'name': 'issuetracker', 'op': 'not_equal_to', | |
3480 | 'val': '0\x00\U00034383$\x13-\U000375fb\U0007add2\x01\x01\U0010c23a'}]} | |
3444 | 3481 | |
3445 | 3482 | res = test_client.get( |
3446 | 3483 | self.check_url(f'/v2/ws/{workspace.name}/vulns/filter?q={json.dumps(query_filter)}') |
3513 | 3550 | session.commit() |
3514 | 3551 | query_filter = { |
3515 | 3552 | "group_by": |
3516 | [{"field":"severity"}], | |
3553 | [{"field": "severity"}], | |
3517 | 3554 | "order_by": |
3518 | [{"field":"name","direction":"asc"}] | |
3555 | [{"field": "name", "direction": "asc"}] | |
3519 | 3556 | } |
3520 | 3557 | |
3521 | 3558 | res = test_client.get( |
3525 | 3562 | |
3526 | 3563 | @pytest.mark.skip_sql_dialect('sqlite') |
3527 | 3564 | @pytest.mark.parametrize("sort_order", [ |
3528 | {"direction":"asc", "expected": ['a', 'A', 'b', 'B']}, | |
3529 | {"direction":"desc", "expected": ['B', 'b', 'A', 'a']} | |
3565 | {"direction": "asc", "expected": ['a', 'A', 'b', 'B']}, | |
3566 | {"direction": "desc", "expected": ['B', 'b', 'A', 'a']} | |
3530 | 3567 | ]) |
3531 | 3568 | def test_filter_order_by_name_directions(self, sort_order, test_client, session, workspace): |
3532 | 3569 | vuln_1 = VulnerabilityWebFactory.create(name='a', workspace=workspace, severity='high') |
3538 | 3575 | session.commit() |
3539 | 3576 | query_filter = { |
3540 | 3577 | "order_by": |
3541 | [{"field":"name","direction": sort_order["direction"]}], | |
3578 | [{"field": "name", "direction": sort_order["direction"]}], | |
3542 | 3579 | "limit": 10, |
3543 | 3580 | "offset": 0 |
3544 | 3581 | } |
3562 | 3599 | session.commit() |
3563 | 3600 | query_filter = { |
3564 | 3601 | "order_by": |
3565 | [{"field":"severity","direction":"asc"}], | |
3602 | [{"field": "severity", "direction": "asc"}], | |
3566 | 3603 | "limit": 10, |
3567 | 3604 | "offset": 0 |
3568 | 3605 | } |
3576 | 3613 | assert expected_order == [vuln['value']['severity'] for vuln in res.json['vulnerabilities']] |
3577 | 3614 | |
3578 | 3615 | def test_filter_by_creator_command_id(self, |
3579 | test_client, | |
3580 | session, | |
3581 | workspace, | |
3582 | command_object_factory, | |
3583 | empty_command_factory): | |
3616 | test_client, | |
3617 | session, | |
3618 | workspace, | |
3619 | command_object_factory, | |
3620 | empty_command_factory): | |
3584 | 3621 | |
3585 | 3622 | command = empty_command_factory.create(workspace=workspace, |
3586 | 3623 | tool="metasploit") |
3600 | 3637 | workspace=workspace) |
3601 | 3638 | session.commit() |
3602 | 3639 | |
3603 | query_filter ={ | |
3604 | "filters":[{"and":[ | |
3605 | {"name":"creator_command_id","op":"==","val":command.id}] | |
3640 | query_filter = { | |
3641 | "filters": [{"and": [ | |
3642 | {"name": "creator_command_id", "op": "==", "val": command.id}] | |
3606 | 3643 | }], |
3607 | "offset":0, | |
3608 | "limit":40 | |
3644 | "offset": 0, | |
3645 | "limit": 40 | |
3609 | 3646 | } |
3610 | 3647 | |
3611 | 3648 | res = test_client.get( |
3618 | 3655 | class TestVulnerabilitySearchV3(TestVulnerabilitySearch): |
3619 | 3656 | def check_url(self, url): |
3620 | 3657 | return v2_to_v3(url) |
3658 | ||
3621 | 3659 | |
3622 | 3660 | def test_type_filter(workspace, session, |
3623 | 3661 | vulnerability_factory, |
3735 | 3773 | 'parent_type': st.sampled_from([parent_type]), |
3736 | 3774 | 'type': st.one_of( |
3737 | 3775 | st.sampled_from([ |
3738 | "Vulnerability", "Invalid", None]), | |
3776 | "Vulnerability", "Invalid", None]), | |
3739 | 3777 | st.text() |
3740 | 3778 | ), |
3741 | 3779 | 'ws': st.one_of(st.none(), st.text()), |
3743 | 3781 | 'data': st.one_of(st.none(), st.text()), |
3744 | 3782 | 'desc': st.one_of(st.none(), st.text()), |
3745 | 3783 | 'easeofresolution': st.sampled_from(['trivial', |
3746 | 'simple', | |
3747 | 'moderate', | |
3748 | 'difficult', | |
3749 | 'infeasible']), | |
3784 | 'simple', | |
3785 | 'moderate', | |
3786 | 'difficult', | |
3787 | 'infeasible']), | |
3750 | 3788 | 'impact': st.fixed_dictionaries({'accountability': st.booleans(), 'availability': st.booleans(), |
3751 | 'confidentiality': st.booleans(), | |
3752 | 'integrity': st.booleans()}), | |
3789 | 'confidentiality': st.booleans(), | |
3790 | 'integrity': st.booleans()}), | |
3753 | 3791 | 'name': st.one_of(st.none(), st.text()), |
3754 | 3792 | 'owned': st.booleans(), |
3755 | 3793 | 'policyviolations': st.lists(st.one_of(st.none(), st.text())), |
3756 | 3794 | 'refs': st.lists(st.one_of(st.none(), st.text())), |
3757 | 3795 | 'resolution': st.one_of(st.none(), st.text()), |
3758 | 3796 | 'severity': st.sampled_from(['critical', |
3759 | 'high', | |
3760 | 'med', | |
3761 | 'medium', | |
3762 | 'low', | |
3763 | 'informational', | |
3764 | 'unclassified']), | |
3797 | 'high', | |
3798 | 'med', | |
3799 | 'medium', | |
3800 | 'low', | |
3801 | 'informational', | |
3802 | 'unclassified']), | |
3765 | 3803 | 'status': st.sampled_from(['open', |
3766 | 'closed', | |
3767 | 're-opened', | |
3768 | 'risk-accepted']), | |
3804 | 'closed', | |
3805 | 're-opened', | |
3806 | 'risk-accepted']), | |
3769 | 3807 | '_attachments': st.fixed_dictionaries({}), |
3770 | 3808 | 'description': st.one_of(st.none(), st.text()), |
3771 | 3809 | 'protocol': st.one_of(st.none(), st.text()), |
3774 | 3812 | vuln_dict.update({ |
3775 | 3813 | '_id': st.integers(min_value=vuln.id, max_value=vuln.id), |
3776 | 3814 | 'id': st.integers(min_value=vuln.id, max_value=vuln.id) |
3777 | }) | |
3815 | }) | |
3778 | 3816 | return st.fixed_dictionaries(vuln_dict) |
3779 | 3817 | |
3780 | 3818 | |
3789 | 3827 | |
3790 | 3828 | @given(VulnerabilityData) |
3791 | 3829 | def send_api_create_request(raw_data): |
3792 | ||
3793 | 3830 | ws_name = host_with_hostnames.workspace.name |
3794 | 3831 | res = test_client.post(f'/v2/ws/{ws_name}/vulns/', |
3795 | 3832 | data=raw_data) |
3797 | 3834 | |
3798 | 3835 | @given(VulnerabilityData) |
3799 | 3836 | def send_api_create_request_v3(raw_data): |
3800 | ||
3801 | 3837 | ws_name = host_with_hostnames.workspace.name |
3802 | 3838 | res = test_client.post(f'/v3/ws/{ws_name}/vulns/', |
3803 | 3839 | data=raw_data) |
3805 | 3841 | |
3806 | 3842 | @given(VulnerabilityDataWithId) |
3807 | 3843 | def send_api_update_request(raw_data): |
3808 | ||
3809 | 3844 | ws_name = host_with_hostnames.workspace.name |
3810 | 3845 | res = test_client.put(f"/v2/ws/{ws_name}/vulns/{raw_data['_id']}", |
3811 | data=raw_data) | |
3846 | data=raw_data) | |
3812 | 3847 | assert res.status_code in [200, 400, 409, 405] |
3813 | 3848 | |
3814 | 3849 | @given(VulnerabilityDataWithId) |
3815 | 3850 | def send_api_update_request_v3(raw_data): |
3816 | ||
3817 | 3851 | ws_name = host_with_hostnames.workspace.name |
3818 | 3852 | res = test_client.put(f"/v3/ws/{ws_name}/vulns/{raw_data['_id']}", |
3819 | data=raw_data) | |
3853 | data=raw_data) | |
3820 | 3854 | assert res.status_code in [200, 400, 409, 405] |
3821 | 3855 | |
3822 | 3856 | send_api_create_request() |
3847 | 3881 | ) |
3848 | 3882 | }) |
3849 | 3883 | |
3884 | ||
3850 | 3885 | @pytest.mark.usefixtures('logged_user') |
3851 | 3886 | @pytest.mark.hypothesis |
3852 | 3887 | @pytest.mark.usefixtures('ignore_nplusone') |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
26 | 26 | from tests.utils.url import v2_to_v3 |
27 | 27 | |
28 | 28 | TEMPLATES_DATA = [ |
29 | {'name': 'XML Injection (aka Blind XPath Injection) (Type: Base)', | |
30 | 'description': 'The software does not properly neutralize special elements that are u', | |
31 | 'resolution': 'resolved', | |
32 | 'severity': 'medium', | |
33 | 'create_date': datetime(2020, 5, 1, 11, 00), | |
34 | 'creator': 'testuser' | |
35 | }, | |
36 | {'name': 'xml InjectioN (aka Blind XPath Injection) (Type: Base)', | |
37 | 'description': 'THE SOFtware does not properly neutralize special elements that are', | |
38 | 'resolution': 'not resolved', | |
39 | 'severity': 'high', | |
40 | 'create_date': datetime(2020, 6, 1), | |
41 | 'creator': 'testuser2' | |
42 | } | |
29 | {'name': 'XML Injection (aka Blind XPath Injection) (Type: Base)', | |
30 | 'description': 'The software does not properly neutralize special elements that are u', | |
31 | 'resolution': 'resolved', | |
32 | 'severity': 'medium', | |
33 | 'create_date': datetime(2020, 5, 1, 11, 00), | |
34 | 'creator': 'testuser' | |
35 | }, | |
36 | {'name': 'xml InjectioN (aka Blind XPath Injection) (Type: Base)', | |
37 | 'description': 'THE SOFtware does not properly neutralize special elements that are', | |
38 | 'resolution': 'not resolved', | |
39 | 'severity': 'high', | |
40 | 'create_date': datetime(2020, 6, 1), | |
41 | 'creator': 'testuser2' | |
42 | } | |
43 | 43 | ] |
44 | 44 | |
45 | 45 | |
81 | 81 | |
82 | 82 | def _create_post_data_vulnerability_template(self, references): |
83 | 83 | data = { |
84 | "exploitation":"high", | |
85 | "references":references, | |
86 | "name":"name", | |
84 | "exploitation": "high", | |
85 | "references": references, | |
86 | "name": "name", | |
87 | 87 | "resolution": "resolution", |
88 | "cwe":"swe", | |
89 | "description":"desc"} | |
88 | "cwe": "swe", | |
89 | "description": "desc"} | |
90 | 90 | return data |
91 | 91 | |
92 | 92 | def test_create_new_vulnerability_template(self, session, test_client): |
107 | 107 | 'templates': TEMPLATES_DATA}, |
108 | 108 | {'field': 'name', 'op': 'eq', 'count': 1, |
109 | 109 | 'filtered_value': 'XML Injection (aka Blind XPath Injection) (Type: Base)', |
110 | 'expected_template_name':TEMPLATES_DATA[0]['name'], | |
110 | 'expected_template_name': TEMPLATES_DATA[0]['name'], | |
111 | 111 | 'templates': TEMPLATES_DATA}, |
112 | 112 | {'field': 'name', 'op': 'like', 'count': 1, |
113 | 113 | 'filtered_value': '% Injection (aka Blind XPath Injection)%', |
165 | 165 | )) |
166 | 166 | session.commit() |
167 | 167 | |
168 | query = self.check_url(f'/v2/vulnerability_template/filter?q={{"filters": [' \ | |
169 | f'{{ "name": "{filters["field"]}",' \ | |
170 | f' "op": "{filters["op"]}", ' \ | |
171 | f' "val": "{filters["filtered_value"]}" }}]}}') | |
168 | query = self.check_url(f'/v2/vulnerability_template/filter?q={{"filters": [' | |
169 | f'{{ "name": "{filters["field"]}",' | |
170 | f' "op": "{filters["op"]}", ' | |
171 | f' "val": "{filters["filtered_value"]}" }}]}}') | |
172 | 172 | |
173 | 173 | res = test_client.get(query) |
174 | 174 | assert res.status_code == 200 |
178 | 178 | |
179 | 179 | @pytest.mark.usefixtures('ignore_nplusone') |
180 | 180 | @pytest.mark.parametrize('filters', [ |
181 | {'field': 'creator_id', 'op': 'eq', 'count': 1, | |
182 | 'filtered_value': TEMPLATES_DATA[0]['creator'], | |
183 | 'expected_template_name': TEMPLATES_DATA[0]['name'], | |
184 | 'templates': TEMPLATES_DATA} | |
181 | {'field': 'creator_id', 'op': 'eq', 'count': 1, | |
182 | 'filtered_value': TEMPLATES_DATA[0]['creator'], | |
183 | 'expected_template_name': TEMPLATES_DATA[0]['name'], | |
184 | 'templates': TEMPLATES_DATA} | |
185 | 185 | ]) |
186 | 186 | # TODO: fix filter restless to filter by username |
187 | 187 | def test_filter_vuln_template_by_creator(self, session, test_client, filters): |
203 | 203 | )) |
204 | 204 | session.commit() |
205 | 205 | |
206 | query = self.check_url(f'/v2/vulnerability_template/filter?q={{"filters": [' \ | |
207 | f'{{ "name": "{filters["field"]}",' \ | |
208 | f' "op": "{filters["op"]}", ' \ | |
209 | f' "val": "{templates[0].creator.id}" }}]}}') | |
206 | query = self.check_url(f'/v2/vulnerability_template/filter?q={{"filters": [' | |
207 | f'{{ "name": "{filters["field"]}",' | |
208 | f' "op": "{filters["op"]}", ' | |
209 | f' "val": "{templates[0].creator.id}" }}]}}') | |
210 | 210 | |
211 | 211 | res = test_client.get(query) |
212 | 212 | assert res.status_code == 200 |
214 | 214 | if filters['count'] == 1: |
215 | 215 | assert res.json['rows'][0]['doc']['name'] == templates[0].name |
216 | 216 | |
217 | ||
218 | 217 | @pytest.mark.skip_sql_dialect('sqlite') |
219 | 218 | @pytest.mark.usefixtures('ignore_nplusone') |
220 | 219 | @pytest.mark.parametrize('filters', [ |
221 | {'field': 'create_date', 'op': 'eq', 'count': 1, | |
222 | 'filtered_value': "2020-05-01", | |
223 | 'expected_template_name': TEMPLATES_DATA[0]['name'], | |
224 | 'templates': TEMPLATES_DATA} | |
220 | {'field': 'create_date', 'op': 'eq', 'count': 1, | |
221 | 'filtered_value': "2020-05-01", | |
222 | 'expected_template_name': TEMPLATES_DATA[0]['name'], | |
223 | 'templates': TEMPLATES_DATA} | |
225 | 224 | ]) |
226 | 225 | def test_filter_vuln_template_by_create_date(self, session, test_client, filters): |
227 | 226 | templates = [] |
242 | 241 | )) |
243 | 242 | session.commit() |
244 | 243 | |
245 | query = self.check_url(f'/v2/vulnerability_template/filter?q={{"filters": [' \ | |
246 | f'{{ "name": "{filters["field"]}",' \ | |
247 | f' "op": "{filters["op"]}", ' \ | |
248 | f' "val": "{filters["filtered_value"]}" }}]}}') | |
244 | query = self.check_url(f'/v2/vulnerability_template/filter?q={{"filters": [' | |
245 | f'{{ "name": "{filters["field"]}",' | |
246 | f' "op": "{filters["op"]}", ' | |
247 | f' "val": "{filters["filtered_value"]}" }}]}}') | |
249 | 248 | |
250 | 249 | res = test_client.get(query) |
251 | 250 | assert res.status_code == 200 |
325 | 324 | """ |
326 | 325 | |
327 | 326 | raw_data = { |
328 | "id":123010, | |
327 | "id": 123010, | |
329 | 328 | "cwe": "", |
330 | 329 | "description": "test2", |
331 | "desc":"test2", | |
332 | "exploitation":"critical", | |
333 | "name":"test2", | |
334 | "references":[], | |
335 | "refs":[], | |
336 | "resolution":"", | |
337 | "type":"vulnerability_template" | |
330 | "desc": "test2", | |
331 | "exploitation": "critical", | |
332 | "name": "test2", | |
333 | "references": [], | |
334 | "refs": [], | |
335 | "resolution": "", | |
336 | "type": "vulnerability_template" | |
338 | 337 | } |
339 | 338 | res = test_client.post(self.url(), data=raw_data) |
340 | 339 | assert res.status_code == 201 |
365 | 364 | session.commit() |
366 | 365 | |
367 | 366 | raw_data = { |
368 | "id":123010, | |
367 | "id": 123010, | |
369 | 368 | "cwe": "", |
370 | 369 | "description": "test2", |
371 | "desc":"test2", | |
372 | "exploitation":"critical", | |
373 | "name":"test2", | |
374 | "references":[], | |
375 | "refs":[], | |
376 | "resolution":"", | |
370 | "desc": "test2", | |
371 | "exploitation": "critical", | |
372 | "name": "test2", | |
373 | "references": [], | |
374 | "refs": [], | |
375 | "resolution": "", | |
377 | 376 | "type": "vulnerability_template", |
378 | 377 | "customfields": { |
379 | 378 | "cvss": "value", |
400 | 399 | raw_data = { |
401 | 400 | "cwe": "", |
402 | 401 | "description": "test2", |
403 | "desc":"test2", | |
404 | "exploitation":"critical", | |
405 | "name":"test2", | |
406 | "references":[], | |
407 | "refs":[], | |
408 | "resolution":"", | |
402 | "desc": "test2", | |
403 | "exploitation": "critical", | |
404 | "name": "test2", | |
405 | "references": [], | |
406 | "refs": [], | |
407 | "resolution": "", | |
409 | 408 | "type": "vulnerability_template", |
410 | 409 | "customfields": { |
411 | 410 | "cvss": "updated value", |
482 | 481 | assert len(res.json['vulns_created']) == expected_created_vuln_template |
483 | 482 | assert res.json['vulns_created'][0][1] == vuln_template_name |
484 | 483 | |
485 | ||
486 | 484 | def test_add_vuln_template_missing_required_fields(self, session, test_client, csrf_token): |
487 | 485 | expected_created_vuln_template = 1 |
488 | 486 | file_contents = b"""name,description\n |
625 | 623 | |
626 | 624 | class TestListVulnerabilityTemplateViewV3(TestListVulnerabilityTemplateView, PatchableTestsMixin): |
627 | 625 | def url(self, obj=None): |
628 | return v2_to_v3(super(TestListVulnerabilityTemplateViewV3, self).url(obj)) | |
626 | return v2_to_v3(super().url(obj)) | |
629 | 627 | |
630 | 628 | def check_url(self, url): |
631 | 629 | return v2_to_v3(url) |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | from builtins import str | |
7 | ||
8 | import pytest | |
9 | from faraday.server.api.modules.websocket_auth import decode_agent_websocket_token | |
10 | from tests.utils.url import v2_to_v3 | |
11 | ||
12 | ||
13 | class TestWebsocketAuthEndpoint: | |
14 | def check_url(self, url): | |
15 | return url | |
16 | ||
17 | def test_not_logged_in_request_fail(self, test_client, workspace): | |
18 | res = test_client.post(self.check_url(f'/v2/ws/{workspace.name}/websocket_token/')) | |
19 | assert res.status_code == 401 | |
20 | ||
21 | @pytest.mark.usefixtures('logged_user') | |
22 | def test_get_method_not_allowed(self, test_client, workspace): | |
23 | res = test_client.get(self.check_url(f'/v2/ws/{workspace.name}/websocket_token/')) | |
24 | assert res.status_code == 405 | |
25 | ||
26 | @pytest.mark.usefixtures('logged_user') | |
27 | def test_succeeds(self, test_client, workspace): | |
28 | res = test_client.post(self.check_url(f'/v2/ws/{workspace.name}/websocket_token/')) | |
29 | assert res.status_code == 200 | |
30 | ||
31 | # A token for that workspace should be generated, | |
32 | # This will break if we change the token generation | |
33 | # mechanism. | |
34 | assert res.json['token'].startswith(str(workspace.id)) | |
35 | ||
36 | ||
37 | class TestWebsocketAuthEndpointV3(TestWebsocketAuthEndpoint): | |
38 | def check_url(self, url): | |
39 | return v2_to_v3(url) | |
40 | ||
41 | ||
42 | class TestAgentWebsocketToken: | |
43 | ||
44 | def check_url(self, url): | |
45 | return url | |
46 | ||
47 | @pytest.mark.usefixtures('session') # I don't know why this is required | |
48 | def test_fails_without_authorization_header(self, test_client): | |
49 | res = test_client.post( | |
50 | self.check_url('/v2/agent_websocket_token/') | |
51 | ) | |
52 | assert res.status_code == 401 | |
53 | ||
54 | @pytest.mark.usefixtures('logged_user') | |
55 | def test_fails_with_logged_user(self, test_client): | |
56 | res = test_client.post( | |
57 | self.check_url('/v2/agent_websocket_token/') | |
58 | ) | |
59 | assert res.status_code == 401 | |
60 | ||
61 | @pytest.mark.usefixtures('logged_user') | |
62 | def test_fails_with_user_token(self, test_client, session): | |
63 | res = test_client.get(self.check_url('/v2/token/')) | |
64 | ||
65 | assert res.status_code == 200 | |
66 | ||
67 | headers = [('Authorization', 'Token ' + res.json)] | |
68 | ||
69 | # clean cookies make sure test_client has no session | |
70 | test_client.cookie_jar.clear() | |
71 | res = test_client.post( | |
72 | self.check_url('/v2/agent_websocket_token/'), | |
73 | headers=headers, | |
74 | ) | |
75 | assert res.status_code == 401 | |
76 | ||
77 | @pytest.mark.usefixtures('session') | |
78 | def test_fails_with_invalid_agent_token(self, test_client): | |
79 | headers = [('Authorization', 'Agent 13123')] | |
80 | res = test_client.post( | |
81 | self.check_url('/v2/agent_websocket_token/'), | |
82 | headers=headers, | |
83 | ) | |
84 | assert res.status_code == 403 | |
85 | ||
86 | @pytest.mark.usefixtures('session') | |
87 | def test_succeeds_with_agent_token(self, test_client, agent, session): | |
88 | session.add(agent) | |
89 | session.commit() | |
90 | assert agent.token | |
91 | headers = [('Authorization', 'Agent ' + agent.token)] | |
92 | res = test_client.post( | |
93 | self.check_url('/v2/agent_websocket_token/'), | |
94 | headers=headers, | |
95 | ) | |
96 | assert res.status_code == 200 | |
97 | decoded_agent = decode_agent_websocket_token(res.json['token']) | |
98 | assert decoded_agent == agent | |
99 | ||
100 | ||
101 | class TestAgentWebsocketTokenV3(TestAgentWebsocketToken): | |
102 | def check_url(self, url): | |
103 | return v2_to_v3(url) |
131 | 131 | vulns += vulnerability_web_factory.create_batch(2, workspace=self.first_object, |
132 | 132 | confirmed=True, status='open', severity='informational') |
133 | 133 | |
134 | ||
135 | ||
136 | 134 | session.add_all(vulns) |
137 | 135 | session.commit() |
138 | 136 | res = test_client.get(self.url(self.first_object) + querystring) |
144 | 142 | assert res.json['stats']['info_vulns'] == 2 |
145 | 143 | assert res.json['stats']['total_vulns'] == 2 |
146 | 144 | |
147 | ||
148 | 145 | @pytest.mark.parametrize('querystring', [ |
149 | 146 | '?status=closed' |
150 | 147 | ]) |
195 | 192 | vulns += vulnerability_web_factory.create_batch(2, workspace=self.first_object, |
196 | 193 | confirmed=True, status='open') |
197 | 194 | |
198 | ||
199 | ||
200 | 195 | session.add_all(vulns) |
201 | 196 | session.commit() |
202 | 197 | res = test_client.get(self.url(self.first_object) + querystring) |
205 | 200 | assert res.json['stats']['web_vulns'] == 2 |
206 | 201 | assert res.json['stats']['std_vulns'] == 11 |
207 | 202 | assert res.json['stats']['total_vulns'] == 13 |
208 | ||
209 | 203 | |
210 | 204 | @pytest.mark.parametrize('querystring', [ |
211 | 205 | '?confirmed=1', |
230 | 224 | '?confirmed=0', |
231 | 225 | '?confirmed=false' |
232 | 226 | ]) |
233 | def test_vuln_count_confirmed(self, | |
227 | def test_vuln_count_confirmed_2(self, | |
234 | 228 | vulnerability_factory, |
235 | 229 | test_client, |
236 | 230 | session, |
247 | 241 | |
248 | 242 | def test_create_fails_with_valid_duration(self, session, test_client): |
249 | 243 | workspace_count_previous = session.query(Workspace).count() |
250 | start_date = int(time.time())*1000 | |
251 | end_date = start_date+86400000 | |
244 | start_date = int(time.time()) * 1000 | |
245 | end_date = start_date + 86400000 | |
252 | 246 | duration = {'start_date': start_date, 'end_date': end_date} |
253 | 247 | raw_data = {'name': 'somethingdarkside', 'duration': duration} |
254 | 248 | res = test_client.post(self.url(), data=raw_data) |
294 | 288 | session, |
295 | 289 | test_client): |
296 | 290 | workspace_count_previous = session.query(Workspace).count() |
297 | start_date = int(time.time())*1000 | |
298 | duration = {'start_date': start_date, 'end_date': start_date-86400000} | |
291 | start_date = int(time.time()) * 1000 | |
292 | duration = {'start_date': start_date, 'end_date': start_date - 86400000} | |
299 | 293 | raw_data = {'name': 'somethingdarkside', 'duration': duration} |
294 | res = test_client.post(self.url(), data=raw_data) | |
295 | assert res.status_code == 400 | |
296 | assert workspace_count_previous == session.query(Workspace).count() | |
297 | ||
298 | def test_create_fails_with_forward_slash(self, session, test_client): | |
299 | workspace_count_previous = session.query(Workspace).count() | |
300 | raw_data = {'name': 'swtr/'} | |
300 | 301 | res = test_client.post(self.url(), data=raw_data) |
301 | 302 | assert res.status_code == 400 |
302 | 303 | assert workspace_count_previous == session.query(Workspace).count() |
363 | 364 | |
364 | 365 | @pytest.mark.skip # TODO fix fox sqlite |
365 | 366 | def test_list_retrieves_all_items_from(self, test_client): |
366 | super(TestWorkspaceAPI, self).test_list_retrieves_all_items_from(test_client) | |
367 | super().test_list_retrieves_all_items_from(test_client) | |
367 | 368 | |
368 | 369 | def test_workspace_activation(self, test_client, workspace, session): |
369 | 370 | workspace.active = False |
374 | 375 | |
375 | 376 | res = test_client.get(f'{self.url()}{workspace.name}/') |
376 | 377 | active = res.json.get('active') |
377 | assert active == True | |
378 | assert active | |
378 | 379 | |
379 | 380 | active_query = session.query(Workspace).filter_by(id=workspace.id).first().active |
380 | assert active_query == True | |
381 | assert active_query | |
381 | 382 | |
382 | 383 | def test_workspace_deactivation(self, test_client, workspace, session): |
383 | 384 | workspace.active = True |
388 | 389 | |
389 | 390 | res = test_client.get(f'{self.url()}{workspace.name}/') |
390 | 391 | active = res.json.get('active') |
391 | assert active == False | |
392 | assert not active | |
392 | 393 | |
393 | 394 | active_query = session.query(Workspace).filter_by(id=workspace.id).first().active |
394 | assert active_query == False | |
395 | assert not active_query | |
395 | 396 | |
396 | 397 | def test_create_fails_with_start_date_greater_than_end_date(self, |
397 | 398 | session, |
410 | 411 | return v2_to_v3(url) |
411 | 412 | |
412 | 413 | def url(self, obj=None): |
413 | return v2_to_v3(super(TestWorkspaceAPIV3, self).url(obj)) | |
414 | return v2_to_v3(super().url(obj)) | |
414 | 415 | |
415 | 416 | def test_workspace_activation(self, test_client, workspace, session): |
416 | 417 | workspace.active = False |
421 | 422 | |
422 | 423 | res = test_client.get(self.url(workspace)) |
423 | 424 | active = res.json.get('active') |
424 | assert active == True | |
425 | assert active | |
425 | 426 | |
426 | 427 | active_query = session.query(Workspace).filter_by(id=workspace.id).first().active |
427 | assert active_query == True | |
428 | assert active_query | |
428 | 429 | |
429 | 430 | def test_workspace_deactivation(self, test_client, workspace, session): |
430 | 431 | workspace.active = True |
435 | 436 | |
436 | 437 | res = test_client.get(self.url(workspace)) |
437 | 438 | active = res.json.get('active') |
438 | assert active == False | |
439 | assert not active | |
439 | 440 | |
440 | 441 | active_query = session.query(Workspace).filter_by(id=workspace.id).first().active |
441 | assert active_query == False | |
442 | assert not active_query |
0 | #-*- coding: utf8 -*- | |
0 | # -*- coding: utf8 -*- | |
1 | 1 | ''' |
2 | 2 | Faraday Penetration Test IDE |
3 | 3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
7 | 7 | from builtins import str |
8 | 8 | from posixpath import join as urljoin |
9 | 9 | |
10 | from tests.utils.url import v2_to_v3 | |
11 | ||
12 | 10 | """Generic tests for APIs prefixed with a workspace_name""" |
13 | 11 | |
14 | 12 | import pytest |
15 | 13 | from sqlalchemy.orm.util import was_deleted |
16 | from faraday.server.models import db, Workspace, Credential | |
14 | from faraday.server.models import db | |
17 | 15 | from tests.test_api_pagination import PaginationTestsMixin as \ |
18 | 16 | OriginalPaginationTestsMixin |
19 | 17 | |
23 | 21 | |
24 | 22 | @pytest.mark.usefixtures('logged_user') |
25 | 23 | class GenericAPITest: |
26 | ||
27 | 24 | model = None |
28 | 25 | factory = None |
29 | 26 | api_endpoint = None |
66 | 63 | @pytest.fixture |
67 | 64 | def mock_envelope_list(self, monkeypatch): |
68 | 65 | assert self.view_class is not None, 'You must define view_class ' \ |
69 | 'in order to use ListTestsMixin or PaginationTestsMixin' | |
66 | 'in order to use ListTestsMixin or PaginationTestsMixin' | |
70 | 67 | |
71 | 68 | def _envelope_list(_, objects, pagination_metadata=None): |
72 | 69 | return {"data": objects} |
70 | ||
73 | 71 | monkeypatch.setattr(self.view_class, '_envelope_list', _envelope_list) |
74 | 72 | |
75 | 73 | @pytest.mark.usefixtures('mock_envelope_list') |
88 | 86 | session.commit() |
89 | 87 | res = test_client.get(self.url()) |
90 | 88 | assert res.status_code == 200 |
89 | ||
91 | 90 | |
92 | 91 | class RetrieveTestsMixin: |
93 | 92 | |
134 | 133 | db.session.commit() |
135 | 134 | assert res.status_code == 403 |
136 | 135 | assert self.model.query.count() == count |
137 | ||
138 | 136 | |
139 | 137 | def test_create_inactive_fails(self, test_client): |
140 | 138 | self.workspace.deactivate() |
272 | 270 | |
273 | 271 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
274 | 272 | def test_update_an_object(self, test_client, method): |
275 | super(PatchableTestsMixin, self).test_update_an_object(test_client, method) | |
273 | super().test_update_an_object(test_client, method) | |
276 | 274 | |
277 | 275 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
278 | 276 | def test_update_an_object_readonly_fails(self, test_client, method): |
279 | super(PatchableTestsMixin, self).test_update_an_object_readonly_fails(test_client, method) | |
277 | super().test_update_an_object_readonly_fails(test_client, method) | |
280 | 278 | |
281 | 279 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
282 | 280 | def test_update_inactive_fails(self, test_client, method): |
283 | super(PatchableTestsMixin, self).test_update_inactive_fails(test_client, method) | |
281 | super().test_update_inactive_fails(test_client, method) | |
284 | 282 | |
285 | 283 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
286 | 284 | def test_update_fails_with_existing(self, test_client, session, method): |
287 | super(PatchableTestsMixin, self).test_update_fails_with_existing(test_client, session, method) | |
285 | super().test_update_fails_with_existing(test_client, session, method) | |
288 | 286 | |
289 | 287 | def test_update_an_object_fails_with_empty_dict(self, test_client): |
290 | 288 | """To do this the user should use a PATCH request""" |
293 | 291 | |
294 | 292 | @pytest.mark.parametrize("method", ["PUT", "PATCH"]) |
295 | 293 | def test_update_cant_change_id(self, test_client, method): |
296 | super(PatchableTestsMixin, self).test_update_cant_change_id(test_client, method) | |
294 | super().test_update_cant_change_id(test_client, method) | |
295 | ||
297 | 296 | |
298 | 297 | class CountTestsMixin: |
299 | 298 | def test_count(self, test_client, session, user_factory): |
306 | 305 | factory_kwargs[field] = value |
307 | 306 | |
308 | 307 | session.add(self.factory.create(creator=self.first_object.creator, |
309 | workspace=self.first_object.workspace, | |
310 | **factory_kwargs)) | |
308 | workspace=self.first_object.workspace, | |
309 | **factory_kwargs)) | |
311 | 310 | |
312 | 311 | session.commit() |
313 | 312 | |
363 | 362 | assert creators == sorted(creators, reverse=True) |
364 | 363 | |
365 | 364 | |
366 | ||
367 | 365 | class DeleteTestsMixin: |
368 | 366 | |
369 | 367 | def test_delete(self, test_client): |
389 | 387 | assert self.model.query.count() == OBJECT_COUNT |
390 | 388 | |
391 | 389 | def test_delete_from_other_workspace_fails(self, test_client, |
392 | second_workspace): | |
390 | second_workspace): | |
393 | 391 | res = test_client.delete(self.url(self.first_object, |
394 | 392 | workspace=second_workspace)) |
395 | 393 | assert res.status_code == 404 # No content |
450 | 448 | assert res.status_code == 200 |
451 | 449 | assert len(res.json['data']) == OBJECT_COUNT |
452 | 450 | |
451 | ||
453 | 452 | class ReadWriteMultiWorkspacedAPITests(ReadOnlyMultiWorkspacedAPITests, |
454 | 453 | ReadWriteTestsMixin): |
455 | 454 | pass |
14 | 14 | user = User.query.filter_by(username='test_change_pass').first() |
15 | 15 | |
16 | 16 | assert not verify_password('old_pass', user.password) |
17 | assert verify_password('new_pass', user.password)⏎ | |
17 | assert verify_password('new_pass', user.password) |
7 | 7 | |
8 | 8 | from faraday.server.models import Host, Service, Vulnerability |
9 | 9 | import random |
10 | ||
11 | ||
10 | 12 | def new_random_workspace_name(): |
11 | 13 | return ("aworkspace" + "".join(random.sample([chr(i) for i in range(65, 90) |
12 | ], 10 ))).lower() | |
14 | ], 10))).lower() | |
15 | ||
13 | 16 | |
14 | 17 | def create_host(self, host_name="pepito", os="linux"): |
15 | 18 | host = Host(host_name, os) |
16 | 19 | self.model_controller.addHostSYNC(host) |
17 | 20 | return host |
18 | 21 | |
22 | ||
19 | 23 | def create_interface(self, host, iname="coqiuto", mac="00:03:00:03:04:04"): |
20 | 24 | raise NotImplementedError() |
21 | 25 | |
22 | def create_service(self, host, interface, service_name = "coquito"): | |
26 | ||
27 | def create_service(self, host, interface, service_name="coquito"): | |
23 | 28 | service = Service(service_name) |
24 | 29 | self.model_controller.addServiceToInterfaceSYNC(host.getID(), |
25 | interface.getID(), service) | |
30 | interface.getID(), service) | |
26 | 31 | return service |
32 | ||
27 | 33 | |
28 | 34 | def create_host_vuln(self, host, name, desc, severity): |
29 | 35 | vuln = Vulnerability(name, desc, severity) |
31 | 37 | |
32 | 38 | return vuln |
33 | 39 | |
40 | ||
34 | 41 | def create_int_vuln(self, host, interface, name, desc, severity): |
35 | 42 | vuln = Vulnerability(name=name, description=desc, severity=severity) |
36 | 43 | self.model_controller.addVulnToInterfaceSYNC(host.getID(), interface.getID(), vuln) |
37 | 44 | |
38 | 45 | return vuln |
39 | 46 | |
47 | ||
40 | 48 | def create_serv_vuln(self, host, service, name, desc, severity): |
41 | 49 | vuln = Vulnerability(name=name, description=desc, severity=severity) |
42 | 50 | self.model_controller.addVulnToServiceSYNC(host.getID(), service.getID(), vuln) |
43 | 51 | |
44 | 52 | return vuln |
45 | ||
46 | ||
47 | # I'm Py3 |
20 | 20 | |
21 | 21 | assert session.query(Host).filter( |
22 | 22 | Workspace.id == workspace.id |
23 | ).first() == None | |
23 | ).first() is None | |
24 | 24 | |
25 | 25 | |
26 | 26 | def test_child_parent_verification_event_succeeds(session, workspace): |
54 | 54 | |
55 | 55 | def test_child_parent_verification_event_changing_id_fails(session, workspace, |
56 | 56 | second_workspace): |
57 | ||
57 | ||
58 | 58 | session.add(workspace) |
59 | 59 | session.add(second_workspace) |
60 | 60 | session.commit() |
64 | 64 | service = ServiceFactory.build(host=host, workspace_id=second_workspace.id) |
65 | 65 | |
66 | 66 | session.add(service) |
67 | ||
67 | ||
68 | 68 | with pytest.raises(AssertionError): |
69 | 69 | session.commit() |
70 | 70 | |
71 | 71 | |
72 | # I'm Py3⏎ | |
72 | # I'm Py3 |
20 | 20 | |
21 | 21 | def test_image_is_detected_correctly(): |
22 | 22 | |
23 | with open(TEST_DATA_PATH / 'faraday.png', "rb")as image_data: | |
23 | with open(TEST_DATA_PATH / 'faraday.png', "rb")as image_data: | |
24 | 24 | field = FaradayUploadedFile(image_data.read()) |
25 | 25 | assert field['content_type'] == 'image/png' |
26 | 26 | assert 'thumb_id' in field.keys() |
29 | 29 | |
30 | 30 | |
31 | 31 | def test_normal_attach_is_not_detected_as_image(): |
32 | with open(TEST_DATA_PATH / 'report_w3af.xml', "rb")as image_data: | |
32 | with open(TEST_DATA_PATH / 'report_w3af.xml', "rb")as image_data: | |
33 | 33 | field = FaradayUploadedFile(image_data.read()) |
34 | 34 | assert field['content_type'] == 'application/octet-stream' |
35 | 35 | assert len(field['files']) == 1 |
27 | 27 | self.use_ldaps = ldap.use_ldaps |
28 | 28 | self.use_start_tls = ldap.use_start_tls |
29 | 29 | |
30 | ||
31 | 30 | def test_storage(self): |
32 | 31 | from faraday.server.config import storage |
33 | 32 | self.path = storage.path |
34 | ||
35 | ||
36 | ||
37 | # I'm Py3 |
5 | 5 | from faraday.searcher.api import Api |
6 | 6 | from faraday.searcher.searcher import Searcher |
7 | 7 | from faraday.searcher.sqlapi import SqlApi |
8 | from faraday.server.models import Service, Host, VulnerabilityWeb | |
8 | from faraday.server.models import Service, Host, VulnerabilityWeb, Rule | |
9 | 9 | from faraday.server.models import Vulnerability, CommandObject |
10 | 10 | from faraday.server.schemas import WorkerRuleSchema |
11 | 11 | from faraday.utils.smtp import MailNotification |
19 | 19 | ActionFactory, |
20 | 20 | RuleActionFactory, |
21 | 21 | UserFactory, |
22 | ConditionFactory, | |
22 | 23 | ) |
23 | 24 | from tests.factories import WorkspaceFactory, VulnerabilityFactory |
24 | 25 | |
527 | 528 | searcher.process(rules) |
528 | 529 | vuln = session.query(Vulnerability).get(vuln_id) |
529 | 530 | assert vuln.severity == 'informational' |
530 | ||
531 | 531 | |
532 | 532 | @pytest.mark.parametrize("api", [ |
533 | 533 | lambda workspace, test_client, session: Api(workspace.name, test_client, session, username='test', |
854 | 854 | assert vulns_count == 10 |
855 | 855 | |
856 | 856 | searcher = Searcher(api(workspace, test_client, session)) |
857 | rule_disabled = RuleFactory.create(object="severity=low", disabled=True, workspace=workspace) | |
858 | rule_enabled = RuleFactory.create(object="severity=medium", disabled=False, workspace=workspace) | |
857 | rule_disabled: Rule = RuleFactory.create(disabled=True, workspace=workspace) | |
858 | rule_enabled = RuleFactory.create(disabled=False, workspace=workspace) | |
859 | rule_disabled.conditions = [ConditionFactory.create(field='severity', value="low")] | |
860 | rule_enabled.conditions = [ConditionFactory.create(field='severity', value="medium")] | |
859 | 861 | |
860 | 862 | action = ActionFactory.create(command='DELETE') |
861 | 863 | session.add(action) |
42 | 42 | self.assertEqual(res.status_code, 401) |
43 | 43 | |
44 | 44 | def test_401_when_getting_an_existent_view_agent_token(self): |
45 | res = self.app.get('/', headers={'authorization':'agent 1234'}) | |
45 | res = self.app.get('/', headers={'authorization': 'agent 1234'}) | |
46 | 46 | self.assertEqual(res.status_code, 401) |
47 | 47 | |
48 | 48 | def test_401_when_getting_an_existent_view_user_token(self): |
49 | res = self.app.get('/', headers={'authorization':'token 1234'}) | |
49 | res = self.app.get('/', headers={'authorization': 'token 1234'}) | |
50 | 50 | self.assertEqual(res.status_code, 401) |
51 | 51 | |
52 | 52 | def test_401_when_posting_an_existent_view_and_not_logged(self): |
53 | res = self.app.post('/', data={'data':'data'}) | |
53 | res = self.app.post('/', data={'data': 'data'}) | |
54 | 54 | self.assertEqual(res.status_code, 401) |
55 | 55 | |
56 | 56 | def test_401_when_accessing_a_non_existent_view_and_not_logged(self): |
57 | res = self.app.post('/dfsdfsdd', data={'data':'data'}) | |
57 | res = self.app.post('/dfsdfsdd', data={'data': 'data'}) | |
58 | 58 | self.assertEqual(res.status_code, 401) |
59 | 59 | |
60 | 60 | def test_200_when_not_logged_but_endpoint_is_public(self): |
91 | 91 | if __name__ == '__main__': |
92 | 92 | unittest.main() |
93 | 93 | |
94 | ||
95 | 94 | # I'm Py3 |
34 | 34 | assert copy_default_config_to_local() is None |
35 | 35 | assert not copyfile.called |
36 | 36 | |
37 | ||
37 | 38 | VERSION_PATTERN = r""" |
38 | 39 | v? |
39 | 40 | (?: |
70 | 71 | re.VERBOSE | re.IGNORECASE, |
71 | 72 | ) |
72 | 73 | |
74 | ||
73 | 75 | def isPEP440(arg): |
74 | 76 | return not _regex.match(arg) is None |
75 | 77 | |
78 | ||
76 | 79 | def test_exists_and_content(): |
77 | 80 | assert isPEP440(__version__) |
78 | ||
79 | ||
80 | # I'm Py3 |
3 | 3 | |
4 | 4 | from faraday.server.utils.filters import FilterSchema |
5 | 5 | from faraday.server.utils.filters import FlaskRestlessSchema |
6 | from faraday.server.models import VulnerabilityWeb | |
7 | 6 | |
8 | 7 | |
9 | 8 | class TestFilters: |
19 | 18 | |
20 | 19 | def test_restless_using_order_by(self): |
21 | 20 | test_filter = { |
22 | "order_by":[ | |
23 | {"field":"host__vulnerability_critical_generic_count"}, | |
24 | {"field":"host__vulnerability_high_generic_count"}, | |
25 | {"field":"host__vulnerability_medium_generic_count"}, | |
21 | "order_by": [ | |
22 | {"field": "host__vulnerability_critical_generic_count"}, | |
23 | {"field": "host__vulnerability_high_generic_count"}, | |
24 | {"field": "host__vulnerability_medium_generic_count"}, | |
26 | 25 | ], |
27 | 26 | "filters": [{ |
28 | 27 | "or": [ |
35 | 34 | res = FlaskRestlessSchema().load(test_filter) |
36 | 35 | assert res == test_filter |
37 | 36 | |
38 | ||
39 | 37 | def test_FlaskRestlessSchema_(self): |
40 | 38 | test_filter = [{"name": "severity", "op": "eq", "val": "low"}] |
41 | 39 | res = FlaskRestlessSchema().load(test_filter) |
44 | 42 | def test_simple_and_operator(self): |
45 | 43 | test_filter = {"filters": [ |
46 | 44 | {'and': [ |
47 | {"name": "severity", "op": "eq", "val": "low"}, | |
48 | {"name": "severity", "op": "eq", "val": "medium"} | |
49 | ] | |
45 | {"name": "severity", "op": "eq", "val": "low"}, | |
46 | {"name": "severity", "op": "eq", "val": "medium"} | |
47 | ] | |
50 | 48 | } |
51 | 49 | |
52 | 50 | ]} |
181 | 179 | else: |
182 | 180 | assert and_op == {"name": "severity", "op": "eq", "val": "high"} |
183 | 181 | |
184 | ||
185 | 182 | def test_case_1(self): |
186 | 183 | filter_schema = FilterSchema() |
187 | 184 | filters = {'filters': [{"name": "confirmed", "op": "==", "val": "true"}]} |
196 | 193 | |
197 | 194 | def test_case_3(self): |
198 | 195 | filters = {'filters': [ |
196 | {"and": [ | |
199 | 197 | {"and": [ |
200 | {"and": [ | |
201 | {"name": "severity", "op": "eq", "val": "critical"}, | |
202 | {"name": "confirmed", "op": "==", "val": "true"} | |
203 | ]}, | |
204 | {"name": "host__os", "op": "has", "val": "Linux"} | |
205 | ]} | |
206 | ]} | |
198 | {"name": "severity", "op": "eq", "val": "critical"}, | |
199 | {"name": "confirmed", "op": "==", "val": "true"} | |
200 | ]}, | |
201 | {"name": "host__os", "op": "has", "val": "Linux"} | |
202 | ]} | |
203 | ]} | |
207 | 204 | res = FilterSchema().load(filters) |
208 | 205 | assert res == filters |
209 | 206 | |
210 | 207 | def test_test_case_recursive(self): |
211 | 208 | filters = {"filters": |
212 | [{"or":[ | |
213 | {"name":"severity","op":"eq","val":"medium"}, | |
214 | {"or":[ | |
215 | {"name":"severity","op":"eq","val":"high"}, | |
216 | {"and":[ | |
217 | {"and":[ | |
218 | {"name":"severity","op":"eq","val":"critical"}, | |
219 | {"name":"confirmed","op":"==","val":"true"} | |
220 | ]}, | |
221 | {"name":"host__os","op":"has","val":"Linux"} | |
209 | [{"or": [ | |
210 | {"name": "severity", "op": "eq", "val": "medium"}, | |
211 | {"or": [ | |
212 | {"name": "severity", "op": "eq", "val": "high"}, | |
213 | {"and": [ | |
214 | {"and": [ | |
215 | {"name": "severity", "op": "eq", "val": "critical"}, | |
216 | {"name": "confirmed", "op": "==", "val": "true"} | |
217 | ]}, | |
218 | {"name": "host__os", "op": "has", "val": "Linux"} | |
222 | 219 | ]} |
223 | 220 | ]} |
224 | 221 | ]} |
225 | ]} | |
222 | ]} | |
226 | 223 | res = FilterSchema().load(filters) |
227 | 224 | assert res == filters |
228 | 225 | |
229 | 226 | def test_case_recursive_2(self): |
230 | 227 | filters = {'filters': [ |
231 | {"and": [ | |
232 | {"and": [ | |
233 | {"name": "severity", "op": "eq", "val": "critical"}, | |
234 | {"name": "confirmed", "op": "==", "val": "true"} | |
235 | ]}, | |
236 | {"name": "host__os", "op": "has", "val": "Linux"} | |
237 | ]} | |
238 | ]} | |
228 | {"and": [ | |
229 | {"and": [ | |
230 | {"name": "severity", "op": "eq", "val": "critical"}, | |
231 | {"name": "confirmed", "op": "==", "val": "true"} | |
232 | ]}, | |
233 | {"name": "host__os", "op": "has", "val": "Linux"} | |
234 | ]} | |
235 | ]} | |
239 | 236 | |
240 | 237 | res = FilterSchema().load(filters) |
241 | 238 | assert res == filters |
52 | 52 | map(lambda column: column.strip("'')").strip('-1').strip('-1));').strip(), statements_clean) |
53 | 53 | ) |
54 | 54 | ) |
55 | statements_clean.remove('source_code_id') # we don't support source_code yet | |
55 | statements_clean.remove('source_code_id') # we don't support source_code yet | |
56 | 56 | unique_constraints = get_unique_fields(session, Vulnerability()) |
57 | 57 | for unique_constraint in unique_constraints: |
58 | 58 | assert len(statements_clean) == len(unique_constraint) |
69 | 69 | for unique_constraint in unique_constraints: |
70 | 70 | assert unique_constraint == expected_unique_fields |
71 | 71 | |
72 | ||
73 | 72 | # I'm Py3 |