Merge tag 'upstream/2.1.0' into kali/master
Upstream version 2.1.0
Sophie Brun
7 years ago
26 | 26 | |
27 | 27 | ![platform](https://raw.github.com/wiki/infobyte/faraday/images/platform/supported.png) |
28 | 28 | |
29 | Read more about [supported platforms and installation specifics] (https://github.com/infobyte/faraday/wiki/Installation). | |
29 | Read more about [supported platforms and installation specifics] (https://github.com/infobyte/faraday/wiki/First-steps). | |
30 | 30 | |
31 | 31 | #### Quick install |
32 | 32 | |
33 | This applies only to Debian, Ubuntu, Kali and Backtrack. For the full installation guide [visit our wiki](https://github.com/infobyte/faraday/wiki/Installation). | |
33 | This applies only to Debian, Ubuntu, Kali and Backtrack. For the full installation guide [visit our wiki](https://github.com/infobyte/faraday/wiki/First-steps). | |
34 | 34 | |
35 | 35 | Download the [latest tarball](https://github.com/infobyte/faraday/tarball/master) or clone our repo: |
36 | 36 | |
38 | 38 | $ git clone https://github.com/infobyte/faraday.git faraday-dev |
39 | 39 | $ cd faraday-dev |
40 | 40 | $ ./install.sh |
41 | $ ./faraday-server.py | |
41 | 42 | $ ./faraday.py |
42 | 43 | ``` |
43 | 44 |
8 | 8 | |
9 | 9 | New features in the latest update |
10 | 10 | ===================================== |
11 | ||
12 | September 19, 2016: | |
13 | --- | |
14 | * Major refactor of Faraday Client: now we support massive workspaces (100.000+ hosts). | |
15 | * Fixed more than 10 minor bugs on the Web UI. | |
16 | * Fixed searching with spaces character on Web UI | |
17 | * Updated URL shown when starting Faraday. | |
18 | * Dashboard is now refreshed automatically every 60 seconds. | |
19 | * Fixed Propecia plugin. | |
20 | * New plugin: WPscan | |
21 | * Host Sidebar on GTK now adds information more intelligently and will never block the application. | |
22 | * Evidence screenshots in report generation is now bigger. | |
23 | * Added Help section to WEB UI. | |
24 | ||
11 | 25 | |
12 | 26 | August 12, 2016: |
13 | 27 | --- |
1 | 1 | <faraday> |
2 | 2 | |
3 | 3 | <appname>Faraday - Penetration Test IDE</appname> |
4 | <version>2.0.0</version> | |
4 | <version>2.1.0</version> | |
5 | 5 | <debug_status>0</debug_status> |
6 | 6 | <font>-Misc-Fixed-medium-r-normal-*-12-100-100-100-c-70-iso8859-1</font> |
7 | 7 | <home_path>~/</home_path> |
29 | 29 | <repo_url type="svn"></repo_url> |
30 | 30 | <repo_user>u</repo_user> |
31 | 31 | <repo_password></repo_password> |
32 | <couch_uri/> | |
32 | <couch_uri>http://127.0.0.1:5985</couch_uri> | |
33 | 33 | <couch_is_replicated/> |
34 | 34 | <couch_replics/> |
35 | 35 |
6 | 6 | |
7 | 7 | CONST_VERSION_FILE = 'VERSION' |
8 | 8 | CONST_REQUIREMENTS_FILE = 'requirements.txt' |
9 | CONST_CONFIG = 'views/reports/_attachments/scripts/config/config.json' | |
10 | 9 | CONST_FARADAY_HOME_PATH = '~/.faraday' |
11 | 10 | CONST_FARADAY_PLUGINS_PATH = 'plugins' |
12 | 11 | CONST_FARADAY_PLUGINS_REPO_PATH = 'plugins/repo' |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | from persistence.change import change_factory, CHANGETYPE, ChangeModelObject | |
7 | import model | |
8 | import model.guiapi | |
9 | import threading | |
10 | from utils.logs import getLogger | |
11 | ||
12 | ||
13 | class ChangeController(object): | |
14 | def __init__(self): | |
15 | self.mapper_manager = None | |
16 | self.changesWatcher = None | |
17 | ||
18 | def notify(self, changes): | |
19 | for change in changes: | |
20 | model.guiapi.notification_center.changeFromInstance(change) | |
21 | ||
22 | def loadChange(self, objid, revision, deleted): | |
23 | try: | |
24 | obj = self.mapper_manager.find(objid) | |
25 | change = change_factory.create(obj, revision, deleted) | |
26 | ||
27 | if change.getChangeType() == CHANGETYPE.DELETE: | |
28 | # object deleted | |
29 | if isinstance(change, ChangeModelObject): | |
30 | obj_parent = obj.getParent() | |
31 | if obj_parent: | |
32 | obj_parent.deleteChild(obj.getID()) | |
33 | self.mapper_manager.remove(objid) | |
34 | elif change.getChangeType() == CHANGETYPE.UPDATE: | |
35 | # object edited | |
36 | self.mapper_manager.reload(objid) | |
37 | elif change.getChangeType() == CHANGETYPE.ADD: | |
38 | if isinstance(change, ChangeModelObject): | |
39 | # The child has a parent, but the parent doesn't | |
40 | # have the child yet... | |
41 | if obj.getParent(): | |
42 | obj.getParent().addChild(obj) | |
43 | ||
44 | if isinstance(change, ChangeModelObject): | |
45 | self._notify_model_object_change(change, obj) | |
46 | model.guiapi.notification_center.changeFromInstance(change) | |
47 | except: | |
48 | getLogger(self).debug( | |
49 | "Change couldn't be processed") | |
50 | ||
51 | def _notify_model_object_change(self, change, obj): | |
52 | host = obj.getHost() | |
53 | if (change.getChangeType() == CHANGETYPE.ADD and | |
54 | obj.class_signature == model.hosts.Host.class_signature): | |
55 | model.guiapi.notification_center.addHost(host) | |
56 | elif (change.getChangeType() == CHANGETYPE.DELETE and | |
57 | obj.class_signature == model.hosts.Host.class_signature): | |
58 | model.guiapi.notification_center.delHost(host.getID()) | |
59 | elif (change.getChangeType() != CHANGETYPE.UNKNOWN): | |
60 | model.guiapi.notification_center.editHost(host) | |
61 | ||
62 | ||
63 | def revertToNoWorkspace(self): | |
64 | model.guiapi.notification_center.WorkspaceProblem() | |
65 | ||
66 | def watch(self, mapper, dbConnector): | |
67 | self.mapper_manager = mapper | |
68 | self.dbConnector = dbConnector | |
69 | self.changesWatcher = ChangeWatcher(dbConnector.waitForDBChange) | |
70 | dbConnector.setChangesCallback(self.loadChange) | |
71 | dbConnector.setNoWorkspacesCallback(self.revertToNoWorkspace) | |
72 | self.changesWatcher.start() | |
73 | ||
74 | def unwatch(self): | |
75 | if self.changesWatcher: | |
76 | self.dbConnector.setChangesCallback(None) | |
77 | self.dbConnector.forceUpdate() | |
78 | self.changesWatcher.join() | |
79 | ||
80 | def stop(self): | |
81 | self.unwatch() | |
82 | ||
83 | def isAlive(self): | |
84 | return self.changesWatcher.isAlive() | |
85 | ||
86 | class ChangeWatcher(threading.Thread): | |
87 | def __init__(self, watch_function): | |
88 | threading.Thread.__init__(self) | |
89 | ||
90 | self._function = watch_function | |
91 | self._watcher = threading.Thread(target=self._function) | |
92 | self._watcher.setDaemon(True) | |
93 | ||
94 | def run(self): | |
95 | self._watcher.start() | |
96 | ||
97 | def stop(self): | |
98 | self._stop_event.set() | |
99 |
1 | 1 | # Faraday Penetration Test IDE |
2 | 2 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
3 | 3 | # See the file 'doc/LICENSE' for the license information |
4 | import server.utils.logger | |
4 | 5 | |
5 | import sys | |
6 | import sys, os | |
6 | 7 | import argparse |
8 | import subprocess | |
7 | 9 | import server.config |
10 | import server.couchdb | |
8 | 11 | |
9 | 12 | from server.utils import daemonize |
10 | from server.utils.logger import setup_logging, get_logger, set_logging_level | |
11 | 13 | from utils.dependencies import DependencyChecker |
12 | 14 | from utils.user_input import query_yes_no |
13 | 15 | |
16 | logger = server.utils.logger.get_logger(__name__) | |
14 | 17 | |
15 | 18 | def main(): |
16 | setup_logging() | |
19 | args = parse_arguments() | |
17 | 20 | |
18 | cli_arguments = parse_arguments() | |
21 | if args.stop: | |
22 | sys.exit(0 if stop_server() else 1) | |
19 | 23 | |
20 | process_run_commands(cli_arguments) | |
21 | setup_environment(cli_arguments) | |
22 | setup_and_run_server(cli_arguments) | |
24 | if not args.no_setup: | |
25 | setup_environment() | |
26 | import_workspaces() | |
27 | ||
28 | if is_server_running(): | |
29 | sys.exit(1) | |
30 | ||
31 | if args.debug: | |
32 | server.utils.logger.set_logging_level(server.config.DEBUG) | |
33 | ||
34 | if args.start: | |
35 | # Starts a new process on background with --ignore-setup | |
36 | # and without --start nor --stop | |
37 | devnull = open('/dev/null', 'w') | |
38 | params = ['/usr/bin/env', 'python2.7', os.path.join(server.config.FARADAY_BASE, __file__), '--no-setup'] | |
39 | if args.ssl: params.append('--ssl') | |
40 | if args.debug: params.append('--debug') | |
41 | logger.info('Faraday Server is running as a daemon') | |
42 | subprocess.Popen(params, stdout=devnull, stderr=devnull) | |
43 | else: | |
44 | run_server(args) | |
23 | 45 | |
24 | 46 | def parse_arguments(): |
25 | 47 | parser = argparse.ArgumentParser() |
27 | 49 | parser.add_argument('--debug', action='store_true', help='run Faraday Server in debug mode') |
28 | 50 | parser.add_argument('--start', action='store_true', help='run Faraday Server in background') |
29 | 51 | parser.add_argument('--stop', action='store_true', help='stop Faraday Server') |
52 | parser.add_argument('--no-setup', action='store_true', help=argparse.SUPPRESS) | |
30 | 53 | return parser.parse_args() |
31 | 54 | |
32 | def process_run_commands(cli_arguments): | |
33 | logger = get_logger(__name__) | |
34 | ||
35 | if cli_arguments.stop: | |
36 | if not daemonize.stop_server(): | |
37 | # Exists with an error if it couldn't close the server | |
38 | sys.exit(1) | |
39 | else: | |
40 | logger.info("Faraday Server stopped successfully") | |
41 | sys.exit(0) | |
42 | ||
43 | # Check if server is already running | |
44 | pid = daemonize.is_server_running() | |
45 | if pid is not None: | |
46 | logger.error("Faraday Server is already running. PID: {}".format(pid)) | |
47 | sys.exit(1) | |
48 | ||
49 | def setup_environment(cli_arguments): | |
50 | logger = get_logger(__name__) | |
55 | def setup_environment(): | |
56 | # Configuration files generation | |
51 | 57 | server.config.copy_default_config_to_local() |
52 | 58 | |
53 | if cli_arguments.debug: | |
54 | set_logging_level(server.config.DEBUG) | |
59 | # Dependencies installation | |
60 | missing_packages = check_dependencies() | |
61 | if len(missing_packages) > 0: | |
62 | install_packages(missing_packages) | |
55 | 63 | |
56 | missing_packages = check_dependencies() | |
64 | # Web configuration file generation | |
65 | server.config.gen_web_config() | |
57 | 66 | |
58 | if len(missing_packages) > 0: | |
59 | answer = ask_to_install(missing_packages) | |
60 | if answer: | |
61 | logger.info( | |
62 | "Dependencies installed. Please launch Faraday Server again") | |
63 | sys.exit(0) | |
64 | else: | |
65 | logger.error("Dependencies not met") | |
66 | sys.exit(1) | |
67 | ||
68 | server.config.gen_web_config() | |
67 | # Reports DB creation | |
68 | server.couchdb.push_reports() | |
69 | 69 | |
70 | 70 | def check_dependencies(): |
71 | 71 | checker = DependencyChecker(server.config.REQUIREMENTS_FILE) |
72 | 72 | missing = checker.check_dependencies() |
73 | 73 | return missing |
74 | 74 | |
75 | def install_packages(packages): | |
76 | if ask_to_install(packages): | |
77 | logger.info("Dependencies installed. Please launch Faraday Server again") | |
78 | sys.exit(0) | |
79 | else: | |
80 | logger.error("Dependencies not met") | |
81 | sys.exit(1) | |
82 | ||
75 | 83 | def ask_to_install(missing_packages): |
76 | logger = get_logger(__name__) | |
77 | 84 | logger.warning("The following packages are not installed:") |
78 | 85 | for package in missing_packages: |
79 | 86 | logger.warning("%s" % package) |
80 | res = query_yes_no("Do you want to install them?", default="no") | |
81 | if res: | |
87 | ||
88 | if query_yes_no("Do you want to install them?", default="no"): | |
82 | 89 | checker = DependencyChecker(server.config.REQUIREMENTS_FILE) |
83 | 90 | checker.install_packages(missing_packages) |
84 | return res | |
91 | return True | |
85 | 92 | |
86 | def setup_and_run_server(cli_arguments): | |
93 | return False | |
94 | ||
95 | def import_workspaces(): | |
96 | import server.importer | |
97 | server.importer.import_workspaces() | |
98 | ||
99 | def stop_server(): | |
100 | if not daemonize.stop_server(): | |
101 | # Exists with an error if it couldn't close the server | |
102 | return False | |
103 | else: | |
104 | logger.info("Faraday Server stopped successfully") | |
105 | return True | |
106 | ||
107 | def is_server_running(): | |
108 | pid = daemonize.is_server_running() | |
109 | if pid is not None: | |
110 | logger.error("Faraday Server is already running. PID: {}".format(pid)) | |
111 | return True | |
112 | else: | |
113 | return False | |
114 | ||
115 | def run_server(args): | |
116 | import server.database | |
117 | import server.app | |
87 | 118 | import server.web |
88 | import server.database | |
89 | 119 | |
90 | server.database.setup() | |
91 | ||
92 | web_server = server.web.WebServer(enable_ssl=cli_arguments.ssl) | |
93 | get_logger().info('Faraday Server is ready') | |
94 | ||
95 | # Now that server is ready to go, run in background if requested | |
96 | if cli_arguments.start: | |
97 | daemonize.start_server() | |
120 | server.database.initialize() | |
121 | server.app.setup() | |
122 | web_server = server.web.WebServer(enable_ssl=args.ssl) | |
98 | 123 | |
99 | 124 | daemonize.create_pid_file() |
100 | ||
125 | logger.info('Faraday Server is ready') | |
101 | 126 | web_server.run() |
102 | 127 | |
103 | 128 | if __name__ == '__main__': |
55 | 55 | FARADAY_BASE_ZSH = os.path.join(FARADAY_BASE, CONST_FARADAY_ZSH_FARADAY) |
56 | 56 | |
57 | 57 | FARADAY_VERSION_FILE = os.path.join(FARADAY_BASE, CONST_VERSION_FILE) |
58 | FARADAY_CONFIG = os.path.join(FARADAY_BASE, CONST_CONFIG) | |
59 | 58 | FARADAY_REQUIREMENTS_FILE = os.path.join(FARADAY_BASE, CONST_REQUIREMENTS_FILE) |
60 | 59 | |
61 | 60 | REQUESTS_CA_BUNDLE_VAR = "REQUESTS_CA_BUNDLE" |
313 | 312 | import string |
314 | 313 | couchURL = getInstanceConfiguration().getCouchURI() |
315 | 314 | if couchURL: |
316 | url = "%s/reports/_design/reports/index.html" % couchURL | |
315 | url = "%s/_ui" % couchURL | |
317 | 316 | print(Fore.WHITE + Style.BRIGHT + \ |
318 | 317 | "\n*" + string.center("faraday ui is ready", 53 - 6) ) |
319 | 318 | print(Fore.WHITE + Style.BRIGHT + \ |
28 | 28 | DELHOST = 4101 |
29 | 29 | EDITHOST = 4102 |
30 | 30 | CHANGEFROMINSTANCE = 5100 |
31 | UPDATEMODEL_ID = 54321 | |
32 | 31 | CONNECTION_REFUSED = 42424 |
33 | 32 | WORKSPACE_PROBLEM = 24242 |
34 | ||
33 | ADDOBJECT = 7777 | |
34 | DELETEOBJECT = 8888 | |
35 | UPDATEOBJECT = 9999 | |
35 | 36 | |
36 | 37 | class CustomEvent(object): |
37 | 38 | def __init__(self, type): |
94 | 95 | |
95 | 96 | |
96 | 97 | class WorkspaceChangedCustomEvent(CustomEvent): |
97 | def __init__(self, workspace,workspace_type): | |
98 | def __init__(self, workspace): | |
98 | 99 | CustomEvent.__init__(self, WORKSPACE_CHANGED) |
99 | 100 | self.workspace = workspace |
100 | self.workspace_type = workspace_type | |
101 | 101 | |
102 | 102 | |
103 | 103 | class ConflictUpdatedCustomEvent(CustomEvent): |
124 | 124 | CustomEvent.__init__(self, CLEARHOSTS_ID) |
125 | 125 | |
126 | 126 | |
127 | class ModelObjectUpdateEvent(CustomEvent): | |
128 | def __init__(self, hosts): | |
129 | CustomEvent.__init__(self, UPDATEMODEL_ID) | |
130 | self.hosts = hosts | |
131 | ||
132 | ||
133 | 127 | class AddHostCustomEvent(CustomEvent): |
134 | 128 | def __init__(self, host): |
135 | 129 | CustomEvent.__init__(self, ADDHOST) |
149 | 143 | |
150 | 144 | |
151 | 145 | class ChangeFromInstanceCustomEvent(CustomEvent): |
152 | def __init__(self, change): | |
146 | def __init__(self, object_id, object_type, object_name, | |
147 | deleted=False, update=False): | |
153 | 148 | CustomEvent.__init__(self, CHANGEFROMINSTANCE) |
154 | self.change = change | |
149 | self.object_id = object_id | |
150 | self.object_type = object_type | |
151 | self.object_name = object_name | |
152 | self.deleted = deleted | |
153 | self.updated_or_created = "updated" if update else "created" | |
154 | ||
155 | def __str__(self): | |
156 | if not self.object_type or not self.object_name and self.deleted: | |
157 | return "An object was deleted" | |
158 | if self.deleted: | |
159 | return "The {0} {1} was deleted".format(self.object_type, self.object_name) | |
160 | return "The {0} {1} was {2}".format(self.object_type, | |
161 | self.object_name, | |
162 | self.updated_or_created) | |
163 | ||
164 | class AddObjectCustomEvent(CustomEvent): | |
165 | def __init__(self, new_obj): | |
166 | CustomEvent.__init__(self, ADDOBJECT) | |
167 | self.new_obj = new_obj | |
168 | ||
169 | class DeleteObjectCustomEvent(CustomEvent): | |
170 | def __init__(self, obj_id): | |
171 | CustomEvent.__init__(self, DELETEOBJECT) | |
172 | self.obj_id = obj_id | |
173 | ||
174 | class UpdateObjectCustomEvent(CustomEvent): | |
175 | def __init__(self, obj): | |
176 | CustomEvent.__init__(self, UPDATEOBJECT) | |
177 | self.obj = obj |
6 | 6 | |
7 | 7 | ''' |
8 | 8 | |
9 | import os | |
10 | import sys | |
11 | import threading | |
12 | import webbrowser | |
9 | import os, sys, threading, webbrowser, time | |
10 | from utils.logs import getLogger | |
13 | 11 | |
14 | 12 | try: |
15 | 13 | import gi |
39 | 37 | "Check that you have GTK+3 and Vte installed.") |
40 | 38 | sys.exit(1) |
41 | 39 | |
42 | ||
43 | 40 | import model.guiapi |
44 | 41 | import model.api |
45 | 42 | import model.log |
47 | 44 | from gui.gui_app import FaradayUi |
48 | 45 | from config.configuration import getInstanceConfiguration |
49 | 46 | from utils.logs import getLogger |
50 | from persistence.persistence_managers import CouchDbManager | |
47 | from persistence.server import models | |
51 | 48 | from appwindow import AppWindow |
52 | 49 | |
50 | from server import ServerIO | |
53 | 51 | from dialogs import PreferenceWindowDialog |
54 | 52 | from dialogs import NewWorkspaceDialog |
55 | 53 | from dialogs import PluginOptionsDialog |
56 | 54 | from dialogs import NotificationsDialog |
57 | 55 | from dialogs import aboutDialog |
58 | from dialogs import helpDialog | |
59 | 56 | from dialogs import ConflictsDialog |
60 | 57 | from dialogs import HostInfoDialog |
61 | 58 | from dialogs import ForceChooseWorkspaceDialog |
118 | 115 | 16, False) |
119 | 116 | self.window = None |
120 | 117 | self.model_controller = model_controller |
121 | self.conflicts = self.model_controller.getConflicts() | |
118 | ||
119 | @property | |
120 | def active_ws_name(self): | |
121 | return self.get_active_workspace().name | |
122 | ||
123 | def get_active_workspace(self): | |
124 | """Return the currently active workspace""" | |
125 | return self.workspace_manager.getActiveWorkspace() | |
122 | 126 | |
123 | 127 | def getMainWindow(self): |
124 | 128 | """Useless mostly, but guiapi uses this method to access the main |
125 | 129 | window.""" |
126 | 130 | return self.window |
127 | ||
128 | def updateConflicts(self): | |
129 | """Reassings self.conflicts with an updated list of conflicts""" | |
130 | self.conflicts = self.model_controller.getConflicts() | |
131 | ||
132 | def updateHosts(self): | |
133 | """Reassings the value of self.all_hosts to a current one to | |
134 | catch workspace changes, new hosts added via plugins or any other | |
135 | external interference with our host list""" | |
136 | self.all_hosts = self.model_controller.getAllHosts() | |
137 | return self.all_hosts | |
138 | 131 | |
139 | 132 | def createWorkspace(self, name, description=""): |
140 | 133 | """Uses the instance of workspace manager passed into __init__ to |
152 | 145 | model.api.devlog("Looking for the delegation class") |
153 | 146 | manager = self.getWorkspaceManager() |
154 | 147 | try: |
155 | w = manager.createWorkspace(name, description, | |
156 | manager.namedTypeToDbType('CouchDB')) | |
157 | self.change_workspace(w.name) | |
148 | name = manager.createWorkspace(name, description) | |
149 | self.change_workspace(name) | |
158 | 150 | creation_ok = True |
159 | 151 | except Exception as e: |
160 | 152 | model.guiapi.notification_center.showDialog(str(e)) |
164 | 156 | |
165 | 157 | def remove_workspace(self, button, ws_name): |
166 | 158 | """Removes a workspace. If the workspace to be deleted is the one |
167 | selected, it moves you first to the default. The clears and refreshes | |
159 | selected, it moves you to the one above it on the list. If there | |
160 | aren't more workspaces left, you will be forced to create one. | |
161 | The clears and refreshes | |
168 | 162 | sidebar""" |
169 | ||
170 | 163 | model.api.log("Removing Workspace: %s" % ws_name) |
171 | self.getWorkspaceManager().removeWorkspace(ws_name) | |
172 | self.ws_sidebar.clear_sidebar() | |
173 | self.ws_sidebar.refresh_sidebar() | |
174 | self.select_active_workspace() | |
164 | server_response = self.getWorkspaceManager().removeWorkspace(ws_name) | |
165 | if server_response: | |
166 | self.ws_sidebar.clear_sidebar() | |
167 | self.ws_sidebar.refresh_sidebar() | |
168 | available_workspaces = self.serverIO.get_workspaces_names() | |
169 | if available_workspaces: | |
170 | self.select_last_workspace_in_list(available_workspaces) | |
171 | else: | |
172 | self.handle_no_active_workspace() | |
175 | 173 | |
176 | 174 | def lost_db_connection(self, explanatory_message=None, |
177 | handle_connection_lost=None, | |
178 | connect_to_a_different_couch=None): | |
175 | handle_connection_lost=None, | |
176 | connect_to_a_different_couch=None): | |
179 | 177 | """Creates a simple dialog with an error message to inform the user |
180 | 178 | some kind of problem has happened and the connection was lost. |
181 | 179 | """ |
192 | 190 | |
193 | 191 | self.lost_connection_dialog_raised = True |
194 | 192 | |
195 | if explanatory_message: | |
193 | if explanatory_message and isinstance(explanatory_message, basestring): | |
196 | 194 | explanation = "\n The specific error was: " + explanatory_message |
197 | 195 | else: |
198 | 196 | explanation = "" |
237 | 235 | if self.workspace_dialogs_raised: |
238 | 236 | return False |
239 | 237 | |
240 | if not CouchDbManager.testCouch(CONF.getCouchURI()): | |
238 | if not self.serverIO.is_server_up(): | |
241 | 239 | # make sure it is not because we're not connected to Couch |
242 | 240 | # there's another whole strategy for that. |
243 | 241 | return False |
244 | 242 | |
245 | 243 | self.workspace_dialogs_raised = True |
246 | ||
247 | available_workspaces = self.workspace_manager.getWorkspacesNames() | |
244 | self.ws_sidebar.refresh_sidebar() | |
245 | ||
246 | available_workspaces = self.serverIO.get_workspaces_names() | |
248 | 247 | workspace_model = self.ws_sidebar.workspace_model |
249 | 248 | |
250 | 249 | if available_workspaces: |
264 | 263 | |
265 | 264 | def select_active_workspace(self): |
266 | 265 | """Selects on the sidebar the currently active workspace.""" |
267 | active_ws_name = self.get_active_workspace().name | |
268 | self.ws_sidebar.select_ws_by_name(active_ws_name) | |
269 | ||
270 | def get_active_workspace(self): | |
271 | """Return the currently active workspace""" | |
272 | return self.workspace_manager.getActiveWorkspace() | |
266 | self.ws_sidebar.select_ws_by_name(self.active_ws_name) | |
267 | ||
268 | def select_last_workspace_in_list(self, ws_names_list): | |
269 | self.ws_sidebar.select_ws_by_name(ws_names_list[-1]) | |
273 | 270 | |
274 | 271 | def exit_faraday(self, button=None, parent=None): |
275 | 272 | """A simple exit which will ask for confirmation.""" |
304 | 301 | |
305 | 302 | preference_window.run() |
306 | 303 | |
307 | def connect_to_couch(self, couch_uri, parent=None): | |
304 | def connect_to_couch(self, server_uri, parent=None): | |
308 | 305 | """Tries to connect to a CouchDB on a specified Couch URI. |
309 | 306 | Returns the success status of the operation, False for not successful, |
310 | 307 | True for successful |
312 | 309 | if parent is None: |
313 | 310 | parent = self.window |
314 | 311 | |
315 | if not CouchDbManager.testCouch(couch_uri): | |
312 | if not self.serverIO.test_server_url(server_uri): | |
316 | 313 | errorDialog(parent, "Could not connect to Faraday Server.", |
317 | 314 | ("Are you sure it is running and that you can " |
318 | 315 | "connect to it? \n Make sure your username and " |
319 | 316 | "password are still valid.")) |
320 | 317 | success = False |
321 | elif couch_uri.startswith("https://"): | |
322 | if not checkSSL(couch_uri): | |
318 | elif server_uri.startswith("https://"): | |
319 | if not checkSSL(server_uri): | |
323 | 320 | errorDialog(self.window, |
324 | 321 | "The SSL certificate validation has failed") |
325 | 322 | success = False |
326 | 323 | else: |
327 | CONF.setCouchUri(couch_uri) | |
324 | CONF.setCouchUri(server_uri) | |
328 | 325 | CONF.saveConfig() |
329 | 326 | self.reload_workspaces() |
330 | 327 | self.open_last_workspace() |
346 | 343 | return reconnected |
347 | 344 | |
348 | 345 | def update_counts(self): |
349 | """Update the counts for host, services and vulns""" | |
350 | host_count = self.model_controller.getHostsCount() | |
351 | service_count = self.model_controller.getServicesCount() | |
352 | vuln_count = self.model_controller.getVulnsCount() | |
353 | return host_count, service_count, vuln_count | |
346 | """Returns the counts of hosts, services and vulns on the current | |
347 | workspace.""" | |
348 | hosts, interfaces, services, vulns = self.serverIO.get_workspace_numbers() | |
349 | return hosts, services, vulns | |
354 | 350 | |
355 | 351 | def show_host_info(self, host_id): |
356 | 352 | """Looks up the host selected in the HostSidebar by id and shows |
357 | its information on the HostInfoDialog""" | |
353 | its information on the HostInfoDialog. | |
354 | ||
355 | Return True if everything went OK, False if there was a problem | |
356 | looking for the host.""" | |
358 | 357 | current_ws_name = self.get_active_workspace().name |
359 | 358 | |
360 | for host in self.all_hosts: | |
361 | if host_id == host.id: | |
362 | selected_host = host | |
363 | break | |
364 | ||
365 | info_window = HostInfoDialog(self.window, current_ws_name, selected_host) | |
359 | #for host in self.model_controller.getAllHosts(): | |
360 | host = self.serverIO.get_hosts(couchid=host_id) | |
361 | if not host: | |
362 | self.show_normal_error("The host you clicked isn't accessible. " | |
363 | "This is most probably due to an internal " | |
364 | "error.") | |
365 | return False | |
366 | ||
367 | info_window = HostInfoDialog(self.window, current_ws_name, host[0]) | |
366 | 368 | info_window.show_all() |
367 | ||
368 | def reload_worskpaces_no_connection(self): | |
369 | return True | |
370 | ||
371 | def reload_workspaces_no_connection(self): | |
369 | 372 | """Very similar to reload_workspaces, but doesn't resource the |
370 | 373 | workspace_manager to avoid asking for information to a database |
371 | 374 | we can't access.""" |
377 | 380 | clears the sidebar of the old workspaces and injects all the new ones |
378 | 381 | in there too""" |
379 | 382 | self.workspace_manager.closeWorkspace() |
380 | self.workspace_manager.resource() | |
381 | 383 | self.ws_sidebar.clear_sidebar() |
382 | 384 | self.ws_sidebar.refresh_sidebar() |
383 | 385 | |
391 | 393 | def change_workspace(self, workspace_name): |
392 | 394 | """Changes workspace in a separate thread. Emits a signal |
393 | 395 | to present a 'Loading workspace' dialog while Faraday processes |
394 | the change""" | |
396 | the change. If there are conflict present in the workspace, it will | |
397 | show a warning before changing the workspaces.""" | |
395 | 398 | |
396 | 399 | def loading_workspace(action): |
397 | 400 | """Function to be called via GObject.idle_add by the background |
401 | 404 | |
402 | 405 | if action == "show" and not self.loading_dialog_raised: |
403 | 406 | message_string = ("Loading workspace {0}. Please wait. \n" |
404 | "To cancel, press Alt+F4 or a similar shorcut." | |
405 | .format(workspace_name)) | |
407 | "To cancel, press Alt+F4 or a similar shorcut." | |
408 | .format(workspace_name)) | |
406 | 409 | |
407 | 410 | self.loading_dialog_raised = True |
408 | 411 | self.loading_dialog = Gtk.MessageDialog(self.window, 0, |
438 | 441 | GObject.idle_add(CONF.saveConfig) |
439 | 442 | except Exception as e: |
440 | 443 | GObject.idle_add(self.handle_no_active_workspace) |
441 | model.guiapi.notification_center.showDialog(str(e)) | |
444 | getLogger("GTK").error(e) | |
442 | 445 | |
443 | 446 | GObject.idle_add(loading_workspace, 'destroy') |
444 | 447 | return True |
445 | 448 | |
446 | 449 | self.ws_sidebar.select_ws_by_name(workspace_name) |
450 | if self.statusbar.conflict_button_label_int > 0: | |
451 | response = self.window.show_conflicts_warning() | |
452 | if response == Gtk.ResponseType.NO: | |
453 | self.select_active_workspace() | |
454 | return False | |
455 | ||
447 | 456 | thread = threading.Thread(target=background_process) |
448 | 457 | thread.daemon = True |
449 | 458 | thread.start() |
485 | 494 | events module is updated. |
486 | 495 | |
487 | 496 | DO NOT, AND I REPEAT, DO NOT REDRAW *ANYTHING* FROM THE GUI |
488 | FROM HERE. If you must do it, you should to it sing Glib.idle_add, | |
497 | FROM HERE. If you must do it, you should to it sing GObject.idle_add, | |
489 | 498 | a misterious function with outdated documentation. Good luck.""" |
490 | 499 | |
491 | type_ = event.type() | |
492 | ||
493 | if type_ == 3131: # new log event | |
500 | def new_log_event(): | |
494 | 501 | GObject.idle_add(self.console_log.customEvent, event.text) |
495 | 502 | |
496 | elif type_ == 3141: # new conflict event | |
503 | def new_conflict_event(): | |
497 | 504 | GObject.idle_add(self.statusbar.update_conflict_button_label, |
498 | 505 | event.nconflicts) |
499 | 506 | |
500 | elif type_ == 5100: # new notification event | |
501 | self.notificationsModel.prepend([event.change.getMessage()]) | |
507 | def new_notification_event(): | |
508 | self.notificationsModel.prepend([str(event)]) | |
502 | 509 | GObject.idle_add(self.statusbar.inc_notif_button_label) |
503 | 510 | host_count, service_count, vuln_count = self.update_counts() |
504 | 511 | GObject.idle_add(self.statusbar.update_ws_info, host_count, |
505 | 512 | service_count, vuln_count) |
506 | 513 | |
507 | # in order: add host, delete host, edit host, workspace_change | |
508 | elif type_ in {4100, 4101, 4102, 3140}: | |
514 | def workspace_changed_event(): | |
515 | self.serverIO.active_workspace = event.workspace.name | |
509 | 516 | host_count, service_count, vuln_count = self.update_counts() |
510 | GObject.idle_add(self.hosts_sidebar.update, self.updateHosts()) | |
517 | total_host_amount = self.serverIO.get_hosts_number() | |
518 | first_host_page = self.serverIO.get_hosts(page='0', page_size='20', sort='vulns', sort_dir='desc') | |
519 | GObject.idle_add(self.statusbar.set_workspace_label, event.workspace.name) | |
520 | GObject.idle_add(self.hosts_sidebar.redo, first_host_page, total_host_amount) | |
511 | 521 | GObject.idle_add(self.statusbar.update_ws_info, host_count, |
512 | 522 | service_count, vuln_count) |
523 | GObject.idle_add(self.statusbar.set_default_conflict_label) | |
524 | GObject.idle_add(self.statusbar.set_default_conflict_label) | |
513 | 525 | GObject.idle_add(self.select_active_workspace) |
514 | 526 | |
515 | elif type_ == 3132: # error | |
527 | def normal_error_event(): | |
516 | 528 | GObject.idle_add(self.show_normal_error, event.text) |
517 | 529 | |
518 | elif type_ == 3134: # important error, uncaught exception | |
530 | def important_error_event(): | |
519 | 531 | GObject.idle_add(self.show_important_error, event) |
520 | 532 | |
521 | elif type_ == 42424: # lost connection to couch db | |
533 | def lost_connection_to_server_event(): | |
522 | 534 | GObject.idle_add(self.lost_db_connection, event.problem, |
523 | 535 | self.handle_connection_lost, |
524 | 536 | self.force_change_couch_url) |
525 | GObject.idle_add(self.reload_worskpaces_no_connection) | |
526 | ||
527 | elif type_ == 24242: # workspace not accesible | |
537 | GObject.idle_add(self.reload_workspaces_no_connection) | |
538 | ||
539 | def workspace_not_accessible_event(): | |
528 | 540 | GObject.idle_add(self.handle_no_active_workspace) |
541 | ||
542 | def add_object(): | |
543 | GObject.idle_add(self.hosts_sidebar.add_object, event.new_obj) | |
544 | host_count, service_count, vuln_count = self.update_counts() | |
545 | GObject.idle_add(self.statusbar.update_ws_info, host_count, | |
546 | service_count, vuln_count) | |
547 | ||
548 | def delete_object(): | |
549 | GObject.idle_add(self.hosts_sidebar.remove_object, event.obj_id) | |
550 | host_count, service_count, vuln_count = self.update_counts() | |
551 | GObject.idle_add(self.statusbar.update_ws_info, host_count, | |
552 | service_count, vuln_count) | |
553 | ||
554 | def update_object(): | |
555 | GObject.idle_add(self.hosts_sidebar.update_object, event.obj) | |
556 | host_count, service_count, vuln_count = self.update_counts() | |
557 | GObject.idle_add(self.statusbar.update_ws_info, host_count, | |
558 | service_count, vuln_count) | |
559 | ||
560 | dispatch = {3131: new_log_event, | |
561 | 3141: new_conflict_event, | |
562 | 5100: new_notification_event, | |
563 | 3140: workspace_changed_event, | |
564 | 3132: normal_error_event, | |
565 | 3134: important_error_event, | |
566 | 42424: lost_connection_to_server_event, | |
567 | 24242: workspace_not_accessible_event, | |
568 | 7777: add_object, | |
569 | 8888: delete_object, | |
570 | 9999: update_object} | |
571 | ||
572 | function = dispatch.get(event.type()) | |
573 | if function is not None: | |
574 | function() | |
529 | 575 | |
530 | 576 | def show_normal_error(self, dialog_text): |
531 | 577 | """Just a simple, normal, ignorable error""" |
558 | 604 | """ |
559 | 605 | Gtk.Application.do_startup(self) # deep GTK magic |
560 | 606 | |
561 | self.ws_sidebar = WorkspaceSidebar(self.workspace_manager, | |
607 | self.serverIO = ServerIO(CONF.getLastWorkspace()) | |
608 | self.serverIO.continously_check_server_connection() | |
609 | ||
610 | self.ws_sidebar = WorkspaceSidebar(self.serverIO, | |
562 | 611 | self.change_workspace, |
563 | 612 | self.remove_workspace, |
564 | 613 | self.on_new_button, |
565 | 614 | CONF.getLastWorkspace()) |
566 | 615 | |
567 | # XXX: do not move next line, it is very important it stays there, | |
568 | # just after the creation of the sidebar and before updateHosts. | |
569 | # correct fix: move the creation of the ws_model to the application | |
570 | ||
571 | workspace_argument_set = self.open_workspace_from_args() | |
572 | if not workspace_argument_set: | |
573 | self.open_last_workspace() | |
574 | ||
575 | self.updateHosts() | |
576 | self.hosts_sidebar = HostsSidebar(self.show_host_info, self.icons) | |
577 | default_model = self.hosts_sidebar.create_model(self.all_hosts) | |
616 | # the dummy values here will be updated as soon as the ws is loaded. | |
617 | self.hosts_sidebar = HostsSidebar(self.show_host_info, self.serverIO.get_hosts, | |
618 | self.icons) | |
619 | default_model = self.hosts_sidebar.create_model([]) # dummy empty list | |
578 | 620 | self.hosts_sidebar.create_view(default_model) |
579 | ||
580 | 621 | self.sidebar = Sidebar(self.ws_sidebar.get_box(), |
581 | 622 | self.hosts_sidebar.get_box()) |
582 | 623 | |
583 | host_count, service_count, vuln_count = self.update_counts() | |
584 | ||
624 | host_count, service_count, vuln_count = 0, 0, 0 # dummy values | |
585 | 625 | self.terminal = Terminal(CONF) |
586 | 626 | self.console_log = ConsoleLog() |
587 | 627 | self.statusbar = Statusbar(self.on_click_notifications, |
590 | 630 | |
591 | 631 | self.notificationsModel = Gtk.ListStore(str) |
592 | 632 | |
593 | action_to_method = {"about" : self.on_about, | |
594 | "help" : self.on_help, | |
595 | "quit" : self.on_quit, | |
596 | "preferences" : self.on_preferences, | |
597 | "pluginOptions" : self.on_plugin_options, | |
598 | "new" : self.on_new_button, | |
599 | "new_terminal" : self.on_new_terminal_button, | |
600 | "open_report" : self.on_open_report_button, | |
601 | "go_to_web_ui" : self.on_click_go_to_web_ui_button | |
633 | action_to_method = {"about": self.on_about, | |
634 | "quit": self.on_quit, | |
635 | "preferences": self.on_preferences, | |
636 | "pluginOptions": self.on_plugin_options, | |
637 | "new": self.on_new_button, | |
638 | "new_terminal": self.on_new_terminal_button, | |
639 | "open_report": self.on_open_report_button, | |
640 | "go_to_web_ui": self.on_click_go_to_web_ui_button, | |
641 | "go_to_documentation": self.on_help_dispatch, | |
642 | "go_to_faq": self.on_help_dispatch, | |
643 | "go_to_troubleshooting": self.on_help_dispatch, | |
644 | "go_to_demos": self.on_help_dispatch, | |
645 | "go_to_issues": self.on_help_dispatch, | |
646 | "go_to_forum": self.on_help_dispatch, | |
647 | "go_to_irc": self.on_help_dispatch, | |
648 | "go_to_twitter": self.on_help_dispatch, | |
649 | "go_to_googlegroup": self.on_help_dispatch | |
602 | 650 | } |
603 | 651 | |
604 | 652 | for action, method in action_to_method.items(): |
611 | 659 | builder.connect_signals(self) |
612 | 660 | appmenu = builder.get_object('appmenu') |
613 | 661 | self.set_app_menu(appmenu) |
662 | ||
614 | 663 | helpMenu = builder.get_object('Help') |
615 | 664 | self.set_menubar(helpMenu) |
616 | 665 | |
644 | 693 | notifier.widget = self.window |
645 | 694 | model.guiapi.notification_center.registerWidget(self.window) |
646 | 695 | |
647 | if not CouchDbManager.testCouch(CONF.getCouchURI()): | |
696 | if not self.serverIO.is_server_up(): | |
648 | 697 | self.lost_db_connection( |
649 | 698 | handle_connection_lost=self.handle_connection_lost, |
650 | 699 | connect_to_a_different_couch=self.force_change_couch_url) |
700 | ||
701 | workspace_argument_set = self.open_workspace_from_args() | |
702 | if not workspace_argument_set: | |
703 | self.open_last_workspace() | |
651 | 704 | |
652 | 705 | def on_quit(self, action=None, param=None): |
653 | 706 | self.quit() |
693 | 746 | """Doesn't use the button at all, there cause GTK likes it. |
694 | 747 | Shows the conflict dialog. |
695 | 748 | """ |
696 | self.updateConflicts() | |
697 | if self.conflicts: | |
698 | dialog = ConflictsDialog(self.conflicts, | |
749 | conflicts = self.model_controller.getConflicts() | |
750 | if conflicts: | |
751 | dialog = ConflictsDialog(conflicts, | |
699 | 752 | self.window) |
700 | 753 | dialog.show_all() |
701 | self.updateConflicts() | |
702 | 754 | |
703 | 755 | else: |
704 | 756 | dialog = Gtk.MessageDialog(self.window, 0, |
770 | 822 | about_dialog.run() |
771 | 823 | about_dialog.destroy() |
772 | 824 | |
773 | def on_help(self, action, param): | |
774 | """Defines what happens when user press 'help' on the menu""" | |
775 | help_dialog = helpDialog(self.window) | |
776 | help_dialog.run() | |
777 | help_dialog.destroy() | |
778 | ||
779 | 825 | def on_preferences(self, action=None, param=None): |
780 | 826 | """Defines what happens when you press 'preferences' on the menu. |
781 | 827 | Sends as a callback reloadWsManager, so if the user actually |
795 | 841 | ws_name = self.workspace_manager.getActiveWorkspace().name |
796 | 842 | ws_url = couch_url + "/_ui/#/dashboard/ws/" + ws_name |
797 | 843 | webbrowser.open(ws_url, new=2) |
844 | ||
845 | def on_help_dispatch(self, action, param=None): | |
846 | """Open the url contained in "action" in the user's browser.""" | |
847 | urls = {"go_to_documentation": "https://faradaysec.com/help/docs", | |
848 | "go_to_faq": "https://faradaysec.com/help/faq", | |
849 | "go_to_troubleshooting": "https://faradaysec.com/help/troubleshooting", | |
850 | "go_to_demos": "https://faradaysec.com/help/demos", | |
851 | "go_to_issues": "https://faradaysec.com/help/issues", | |
852 | "go_to_forum": "https://forum.faradaysec.com", | |
853 | "go_to_irc": "https://faradaysec.com/help/irc", | |
854 | "go_to_twitter": "https://faradaysec.com/help/twitter", | |
855 | "go_to_googlegroup": "https://faradaysec.com/help/googlegroup" | |
856 | } | |
857 | url = urls.get(action.get_name(), "https://faradaysec.com") | |
858 | webbrowser.open(url, new=2) |
275 | 275 | current_state = self.log_box.is_visible() |
276 | 276 | self.log_box.set_visible(not current_state) |
277 | 277 | |
278 | def show_conflicts_warning(self): | |
279 | warning_string = ("There are conflicts that need manual " | |
280 | "handling. Closing Faraday or changing workspaces " | |
281 | "may result in the loss of relevant information. " | |
282 | "Are you sure you want to continue?") | |
283 | dialog = Gtk.MessageDialog(self, 0, | |
284 | Gtk.MessageType.QUESTION, | |
285 | Gtk.ButtonsType.YES_NO, | |
286 | warning_string) | |
287 | response = dialog.run() | |
288 | dialog.destroy() | |
289 | return response | |
290 | ||
278 | 291 | def do_delete_event(self, event=None, status=None, parent=None): |
279 | 292 | """Override delete_event signal to show a confirmation dialog first. |
280 | 293 | """ |
283 | 296 | |
284 | 297 | # NOTE: Return False for 'yes' is weird but that's how gtk likes it |
285 | 298 | # Don't judge, man. Don't judge. |
299 | if self.statusbar.conflict_button_label_int > 0: | |
300 | response = self.show_conflicts_warning() | |
301 | if response == Gtk.ResponseType.NO: | |
302 | return True | |
303 | else: | |
304 | return False | |
286 | 305 | |
287 | 306 | dialog = Gtk.MessageDialog(transient_for=parent, |
288 | 307 | modal=True, |
0 | 0 | from gi.repository import Gtk |
1 | from utils.logs import getLogger | |
1 | 2 | from functools import wraps |
2 | 3 | from compatibility import CompatibleScrolledWindow as GtkScrolledWindow |
4 | from persistence.server.server import ServerRequestException | |
3 | 5 | |
6 | def safe_io_with_server(response_in_emergency): | |
7 | """A function that takes a response_in_emergency. It will return | |
8 | a safe_decorator, which will try to execture a funcion and in case | |
9 | anything happens, it will return the response in emergency. | |
10 | """ | |
11 | def safe_decorator(func): | |
12 | @wraps(func) | |
13 | def wrapper(*args, **kwargs): | |
14 | try: | |
15 | res = func(*args, **kwargs) | |
16 | except ServerRequestException as e: | |
17 | res = response_in_emergency | |
18 | getLogger("Server-GTK IO").warning(e) | |
19 | return res | |
20 | return wrapper | |
21 | return safe_decorator | |
4 | 22 | |
5 | 23 | def scrollable(width=-1, height=-1, overlay_scrolling=False): |
6 | 24 | """A function that takes optinal width and height and returns |
7 | 25 | the scrollable decorator. -1 is the default GTK option for both |
8 | 26 | width and height.""" |
9 | ||
10 | 27 | def scrollable_decorator(func): |
11 | 28 | """Takes a function and returns the scroll_object_wrapper.""" |
12 | ||
13 | 29 | @wraps(func) |
14 | 30 | def scroll_object_wrapper(*args, **kwargs): |
15 | 31 | """Takes arguments and obtains the original object from |
611 | 611 | # only the ID and the name are needed, but i still need to 'fill' |
612 | 612 | # the other columns with dummy info |
613 | 613 | |
614 | display_str = host.getName() + " (" + str(len(host.getVulns())) + ")" | |
614 | # display_str = host.getName() + " (" + str(len(host.getVulns())) + ")" | |
615 | display_str = str(host) | |
615 | 616 | owned_status = ("Yes" if host.isOwned() else "No") |
616 | 617 | host_position = model.append(None, [host.getID(), host.getName(), |
617 | 618 | host.getOS(), owned_status, |
634 | 635 | """ |
635 | 636 | ipv4_dic = interface.getIPv4() |
636 | 637 | ipv6_dic = interface.getIPv6() |
637 | vulns = interface.getVulns() | |
638 | display_str = interface.getName() + " (" + str(len(vulns)) + ")" | |
638 | display_str = str(interface) | |
639 | 639 | |
640 | 640 | position = model.append(host_pos, [interface.getID(), |
641 | 641 | interface.getName(), |
655 | 655 | def add_service_to_interface_in_model(service, interface_pos, model): |
656 | 656 | """Append a service to an interface at interface_pos in the given |
657 | 657 | model. Return None. Modifies the model""" |
658 | vulns = service.getVulns() | |
659 | display_str = service.getName() + " (" + str(len(vulns)) + ")" | |
658 | display_str = str(service) | |
660 | 659 | model.append(interface_pos, [service.getID(), |
661 | 660 | service.getName(), |
662 | 661 | service.getDescription(), |
725 | 724 | prop_names = self.get_properties_names(object_type) |
726 | 725 | self.show_info_in_box(object_info, prop_names, self.specific_info) |
727 | 726 | actual_object = self.get_object(object_info, object_type) |
727 | if not actual_object: | |
728 | return None | |
728 | 729 | vuln_model = self.create_vuln_model(actual_object) |
729 | 730 | self.set_vuln_model(vuln_model) |
730 | 731 | |
760 | 761 | """Return the model for the vulnerabilities of the obj object. |
761 | 762 | It will be sorted alphabetically. |
762 | 763 | """ |
763 | ||
764 | 764 | def params_to_string(params): # XXX |
765 | 765 | """Converts params to a string, in case it gets here as a list. |
766 | 766 | It's pretty anoyting, but needed for backwards compatibility. |
798 | 798 | vuln.getResponse(), vuln.getMethod(), |
799 | 799 | vuln.getPname(), |
800 | 800 | params_to_string(vuln.getParams()), |
801 | vuln.getQuery(), vuln.getCategory()]) | |
801 | vuln.getQuery(), ""]) | |
802 | 802 | # sort it! |
803 | 803 | sorted_model = Gtk.TreeModelSort(model=model) |
804 | 804 | sorted_model.set_sort_column_id(1, Gtk.SortType.ASCENDING) |
847 | 847 | """Take a selection as selected_object and an object_type |
848 | 848 | and return the actual object, not the model's selection. |
849 | 849 | """ |
850 | def safely(func): | |
851 | def safe_wrapper(*args, **kwargs): | |
852 | try: | |
853 | return func(*args, **kwargs) | |
854 | except IndexError, ValueError: | |
855 | dialog = errorDialog(self, ("There has been a problem. " | |
856 | "The object you clicked on " | |
857 | "does not exist anymore.")) | |
858 | self.destroy() # exit | |
859 | return safe_wrapper | |
860 | ||
850 | 861 | object_id = selected_object[0] |
851 | 862 | if object_type == 'Interface': |
852 | 863 | # an interface is a direct child of a host |
853 | object_ = self.host.getInterface(object_id) | |
864 | object_ = safely(self.host.getInterface)(object_id) | |
854 | 865 | elif object_type == 'Service': |
855 | 866 | # a service is a grand-child of a host, so we should look |
856 | 867 | # for its parent interface and ask her about the child |
857 | 868 | parent_interface_iter = selected_object.get_parent() |
858 | 869 | parent_interface_id = parent_interface_iter[0] |
859 | parent_interface = self.host.getInterface(parent_interface_id) | |
860 | object_ = parent_interface.getService(object_id) | |
870 | parent_interface = safely(self.host.getInterface)(parent_interface_id) | |
871 | if parent_interface: | |
872 | object_ = safely(parent_interface.getService)(object_id) | |
873 | else: | |
874 | object_ = None | |
861 | 875 | |
862 | 876 | return object_ |
863 | 877 | |
972 | 986 | button_box.pack_start(keep_left, False, False, 5) |
973 | 987 | button_box.pack_start(keep_right, False, False, 5) |
974 | 988 | return button_box |
989 | ||
990 | def _next_conflict_or_close(self): | |
991 | if len(self.conflicts)-1 > self.conflict_n: | |
992 | self.conflict_n += 1 | |
993 | self.update_current_conflict() | |
994 | self.update_current_conflict_model() | |
995 | self.set_conflict_view(self.conflict_n) | |
996 | else: | |
997 | self.destroy() | |
975 | 998 | |
976 | 999 | def save(self, button, keeper): |
977 | 1000 | """Saves information to Faraday. Keeper is needed to know if user |
996 | 1019 | |
997 | 1020 | try: |
998 | 1021 | guiapi.resolveConflict(self.current_conflict, solution) |
1022 | self._next_conflict_or_close() | |
999 | 1023 | # if this isn't the last conflict... |
1000 | if len(self.conflicts)-1 > self.conflict_n: | |
1001 | self.conflict_n += 1 | |
1002 | self.update_current_conflict() | |
1003 | self.update_current_conflict_model() | |
1004 | self.set_conflict_view(self.conflict_n) | |
1005 | else: | |
1006 | self.destroy() | |
1007 | 1024 | |
1008 | 1025 | except ValueError: |
1009 | 1026 | dialog = Gtk.MessageDialog(self, 0, |
1016 | 1033 | " numbers, and so on.")) |
1017 | 1034 | dialog.run() |
1018 | 1035 | dialog.destroy() |
1036 | ||
1037 | except KeyError: # TODO: revert this hack to prevent exception when | |
1038 | # fixing conflict of non existent object | |
1039 | dialog = Gtk.MessageDialog(self, 0, | |
1040 | Gtk.MessageType.INFO, | |
1041 | Gtk.ButtonsType.OK, | |
1042 | ("It seems like this conflict does not " | |
1043 | "exist anymore. Most probably someone " | |
1044 | "deleted the conflicting object from " | |
1045 | "the DB \n" | |
1046 | "Moving on to the next conflict.")) | |
1047 | dialog.run() | |
1048 | dialog.destroy() | |
1049 | self._next_conflict_or_close() | |
1019 | 1050 | |
1020 | 1051 | def case_for_interfaces(self, model, n): |
1021 | 1052 | """The custom case for the interfaces. Plays a little |
1236 | 1267 | obj.getDescription(), |
1237 | 1268 | obj.getData(), |
1238 | 1269 | obj.getSeverity(), |
1239 | obj.getRefs())) | |
1240 | ||
1241 | props = ["Name", "Desc", "Data", "Severity", "Refs"] | |
1270 | obj.getRefs(), | |
1271 | obj.getResolution())) | |
1272 | ||
1273 | props = ["Name", "Desc", "Data", "Severity", "Refs", "Resolution"] | |
1242 | 1274 | model = self.fill_model_from_props_and_attr(model, attr, props) |
1243 | 1275 | return model |
1244 | 1276 | |
1261 | 1293 | obj.getMethod(), |
1262 | 1294 | obj.getPname(), |
1263 | 1295 | obj.getParams(), |
1264 | obj.getQuery(), | |
1265 | obj.getCategory())) | |
1296 | obj.getQuery())) | |
1266 | 1297 | |
1267 | 1298 | props = ["Name", "Desc", "Data", "Severity", "Refs", "Path", |
1268 | 1299 | "Website", "Request", "Response", "Method", "Pname", |
1269 | "Params", "Query", "Category"] | |
1300 | "Params", "Query"] | |
1270 | 1301 | |
1271 | 1302 | model = self.fill_model_from_props_and_attr(model, attr, props) |
1272 | 1303 | return model |
1504 | 1535 | faraday_website = "http://www.infobytesec.com/faraday.html" |
1505 | 1536 | self.set_website(faraday_website) |
1506 | 1537 | self.set_website_label("Learn more about Faraday") |
1507 | ||
1508 | ||
1509 | class helpDialog(Gtk.AboutDialog): | |
1510 | """Using about dialog 'cause they are very similar, but this will | |
1511 | display github page, Wiki, and such""" | |
1512 | def __init__(self, main_window): | |
1513 | Gtk.AboutDialog.__init__(self, transient_for=main_window, modal=True) | |
1514 | icons = CONF.getImagePath() + "icons/" | |
1515 | faraday_icon = GdkPixbuf.Pixbuf.new_from_file(icons+"faraday_icon.png") | |
1516 | self.set_logo(faraday_icon) | |
1517 | self.set_program_name("Faraday") | |
1518 | self.set_comments("Farday is a Penetration Test IDE. " | |
1519 | "Just use one of the supported tools on Faraday's " | |
1520 | " terminal and a plugin will capture the output and " | |
1521 | "extract useful information for you.") | |
1522 | faraday_website = "https://github.com/infobyte/faraday/wiki" | |
1523 | self.set_website(faraday_website) | |
1524 | self.set_website_label("Learn more about how to use Faraday") | |
1525 | ||
1526 | 1538 | |
1527 | 1539 | class errorDialog(Gtk.MessageDialog): |
1528 | 1540 | """A simple error dialog to show the user where things went wrong. |
7 | 7 | ''' |
8 | 8 | import gi |
9 | 9 | import os |
10 | import math | |
10 | 11 | |
11 | 12 | gi.require_version('Gtk', '3.0') |
12 | 13 | |
113 | 114 | the Sidebar notebook. Will list all the host, and when clicking on one, |
114 | 115 | will open a window with more information about it""" |
115 | 116 | |
116 | def __init__(self, open_dialog_callback, icons): | |
117 | def __init__(self, open_dialog_callback, get_host_function, icons): | |
117 | 118 | """Initializes the HostsSidebar. Initialization by itself does |
118 | 119 | almost nothing, the application will inmediatly call create_model |
119 | 120 | with the last workspace and create_view with that model upon startup. |
120 | 121 | """ |
121 | super(Gtk.Widget, self).__init__() | |
122 | Gtk.Widget.__init__(self) | |
122 | 123 | self.open_dialog_callback = open_dialog_callback |
124 | self.get_host_function = get_host_function | |
123 | 125 | self.current_model = None |
126 | self.progress_label = Gtk.Label("") | |
127 | self.host_amount = 0 | |
128 | self.page = 0 | |
129 | self.host_id_to_iter = {} | |
124 | 130 | self.linux_icon = icons + "tux.png" |
125 | 131 | self.windows_icon = icons + "windows.png" |
126 | 132 | self.mac_icon = icons + "Apple.png" |
127 | 133 | self.no_os_icon = icons + "TreeHost.png" |
134 | ||
135 | def __compute_vuln_count(self, host): | |
136 | """Return the total vulnerability count for a given host""" | |
137 | return host.getVulnAmount() | |
138 | ||
139 | def __get_vuln_amount_from_model(self, host_id): | |
140 | """Given a host_id, it will look in the current model for the host_id | |
141 | and return the amount of vulnerabilities IF the host_id corresponds | |
142 | to the model ID. Else it will return None. | |
143 | """ | |
144 | host_iter = self.host_id_to_iter.get(host_id) | |
145 | if host_iter: | |
146 | return self.current_model[host_iter][4] | |
147 | ||
148 | def __add_host_to_model(self, host): | |
149 | """Adds host to the model given as parameter in the initial load | |
150 | of the sidebar.""" | |
151 | vuln_count = self.__compute_vuln_count(host) | |
152 | os_icon, os_str = self.__decide_icon(host.getOS()) | |
153 | display_str = str(host) | |
154 | host_iter = self.current_model.append([host.id, os_icon, os_str, | |
155 | display_str, vuln_count]) | |
156 | self.host_id_to_iter[host.id] = host_iter | |
157 | ||
158 | def __add_host_to_model_after_initial_load(self, host): | |
159 | """Adds a host to the model after the intial load is done | |
160 | (host came through the changes or through a plugin)""" | |
161 | self.host_amount += 1 | |
162 | if self.host_amount % 20 == 0: | |
163 | self.redo([host], self.host_amount, page=self.page+1) | |
164 | else: | |
165 | self.__add_host_to_model(host) | |
166 | ||
167 | def __host_exists_in_current_model(self, host_id): | |
168 | return self.host_id_to_iter.get(host_id) is not None | |
169 | ||
170 | def __get_host_from_host_id(self, host_id): | |
171 | try: | |
172 | return self.get_host_function(couchid=host_id)[0] | |
173 | except IndexError: | |
174 | return None | |
175 | ||
176 | def __add_vuln_to_model(self, vuln): | |
177 | """When a new vulnerability arrives, look up its hosts | |
178 | and update its vuln amount and its representation as a string.""" | |
179 | host_id = self.__find_host_id(vuln) | |
180 | if self.__host_exists_in_current_model(host_id): | |
181 | real_host = self.__get_host_from_host_id(host_id) | |
182 | if real_host is None: return | |
183 | vuln_amount = self.__compute_vuln_count(real_host) | |
184 | self.__update_host_str(host_id, new_vuln_amount=vuln_amount) | |
185 | ||
186 | def __remove_vuln_from_model(self, host_id): | |
187 | """When a new vulnerability id deleted, look up its hosts | |
188 | fand update its vuln amount and its representation as a string.""" | |
189 | if self.__host_exists_in_current_model(host_id): | |
190 | real_host = self.__get_host_from_host_id(host_id) | |
191 | if real_host is None: return | |
192 | vuln_amount = self.__compute_vuln_count(real_host) | |
193 | self.__update_host_str(host_id, new_vuln_amount=vuln_amount) | |
194 | ||
195 | def __update_host_str(self, host_id, new_vuln_amount=None, new_host_name=None): | |
196 | """When a new vulnerability id deleted, look up its hosts | |
197 | and update its vuln amount and its representation as a string.""" | |
198 | host_iter = self.host_id_to_iter[host_id] | |
199 | if not new_host_name: | |
200 | new_host_name = str(self.current_model[host_iter][3].split(" ")[0]) | |
201 | if new_vuln_amount is None: | |
202 | new_vuln_amount = str(self.current_model[host_iter][4]) | |
203 | new_string = "{0} ({1})".format(new_host_name, new_vuln_amount) | |
204 | self.current_model.set_value(host_iter, 3, new_string) | |
205 | self.current_model.set_value(host_iter, 4, int(new_vuln_amount)) | |
206 | ||
207 | def __update_host_in_model(self, host): | |
208 | self.__update_host_str(host.getID(), new_host_name=host.getName()) | |
209 | ||
210 | def __remove_host_from_model(self, host_id): | |
211 | """Deletes a host from the model given as parameter.""" | |
212 | if self.__host_exists_in_current_model(host_id): | |
213 | host_iter = self.host_id_to_iter[host_id] | |
214 | could_be_removed = self.current_model.remove(host_iter) | |
215 | del self.host_id_to_iter[host_id] | |
216 | else: | |
217 | could_be_removed = False | |
218 | return could_be_removed | |
219 | ||
220 | def __find_host_id(self, object_info): | |
221 | object_id = object_info.getID() | |
222 | host_id = object_id.split(".")[0] | |
223 | return host_id | |
224 | ||
225 | def __decide_icon(self, os): | |
226 | """Return the GdkPixbuf icon according to 'os' paramather string | |
227 | and a str_id to that GdkPixbuf for easy comparison and ordering | |
228 | of the view ('os' paramether string is complicated and has caps). | |
229 | """ | |
230 | os = os.lower() | |
231 | if "linux" in os or "unix" in os: | |
232 | icon = GdkPixbuf.Pixbuf.new_from_file(self.linux_icon) | |
233 | str_id = "linux" | |
234 | elif "windows" in os: | |
235 | icon = GdkPixbuf.Pixbuf.new_from_file(self.windows_icon) | |
236 | str_id = "windows" | |
237 | elif "mac" in os: | |
238 | icon = GdkPixbuf.Pixbuf.new_from_file(self.mac_icon) | |
239 | str_id = "mac" | |
240 | else: | |
241 | icon = GdkPixbuf.Pixbuf.new_from_file(self.no_os_icon) | |
242 | str_id = "unknown" | |
243 | return icon, str_id | |
128 | 244 | |
129 | 245 | def create_model(self, hosts): |
130 | 246 | """Creates a model for a lists of hosts. The model contians the |
135 | 251 | ====================================================================== |
136 | 252 | | a923fd | PixBufIcon(linux)| linux | 192.168.1.2 (5) | 5 | |
137 | 253 | """ |
138 | def compute_vuln_count(host): | |
139 | """Return the total vulnerability count for a given host""" | |
140 | vuln_count = 0 | |
141 | vuln_count += len(host.getVulns()) | |
142 | for interface in host.getAllInterfaces(): | |
143 | vuln_count += len(interface.getVulns()) | |
144 | for service in interface.getAllServices(): | |
145 | vuln_count += len(service.getVulns()) | |
146 | return vuln_count | |
147 | ||
148 | def decide_icon(os): | |
149 | """Return the GdkPixbuf icon according to 'os' paramather string | |
150 | and a str_id to that GdkPixbuf for easy comparison and ordering | |
151 | of the view ('os' paramether string is complicated and has caps). | |
152 | """ | |
153 | os = os.lower() | |
154 | if "linux" in os or "unix" in os: | |
155 | icon = GdkPixbuf.Pixbuf.new_from_file(self.linux_icon) | |
156 | str_id = "linux" | |
157 | elif "windows" in os: | |
158 | icon = GdkPixbuf.Pixbuf.new_from_file(self.windows_icon) | |
159 | str_id = "windows" | |
160 | elif "mac" in os: | |
161 | icon = GdkPixbuf.Pixbuf.new_from_file(self.mac_icon) | |
162 | str_id = "mac" | |
163 | else: | |
164 | icon = GdkPixbuf.Pixbuf.new_from_file(self.no_os_icon) | |
165 | str_id = "unknown" | |
166 | return icon, str_id | |
167 | ||
168 | def compare_os_strings(model, an_os, other_os, user_data): | |
169 | """Compare an_os with other_os so the model knows how to sort them. | |
170 | user_data is not used. | |
171 | Forces 'unknown' OS to be always at the bottom of the model. | |
172 | Return values: | |
173 | 1 means an_os should come after other_os | |
174 | 0 means they are the same | |
175 | -1 means an_os should come before other_os | |
176 | It helps to think about it like the relative position of an_os | |
177 | in respect to other_os (-1 'left' in a list, 1 'right' in a list) | |
178 | """ | |
179 | sort_column = 2 | |
180 | an_os = model.get_value(an_os, sort_column) | |
181 | other_os = model.get_value(other_os, sort_column) | |
182 | if an_os == "unknown": | |
183 | order = 1 | |
184 | elif an_os < other_os or other_os == "unknown": | |
185 | order = -1 | |
186 | elif an_os == other_os: | |
187 | order = 0 | |
188 | else: | |
189 | order = 1 | |
190 | return order | |
191 | 254 | |
192 | 255 | hosts_model = Gtk.ListStore(str, GdkPixbuf.Pixbuf(), str, str, int) |
193 | ||
256 | self.current_model = hosts_model | |
194 | 257 | for host in hosts: |
195 | vuln_count = compute_vuln_count(host) | |
196 | os_icon, os_str = decide_icon(host.getOS()) | |
197 | display_str = host.name + " (" + str(vuln_count) + ")" | |
198 | hosts_model.append([host.id, os_icon, os_str, | |
199 | display_str, vuln_count]) | |
200 | ||
201 | # sort the model by default according to column 4 (num of vulns) | |
202 | sorted_model = Gtk.TreeModelSort(model=hosts_model) | |
203 | sorted_model.set_sort_column_id(4, Gtk.SortType.DESCENDING) | |
204 | ||
205 | # set the sorting function of column 2 | |
206 | sorted_model.set_sort_func(2, compare_os_strings, None) | |
207 | ||
208 | self.current_model = sorted_model | |
258 | self.__add_host_to_model(host) | |
209 | 259 | |
210 | 260 | return self.current_model |
211 | 261 | |
221 | 271 | """ |
222 | 272 | |
223 | 273 | self.view = Gtk.TreeView(model) |
224 | self.view.set_activate_on_single_click(True) | |
274 | self.view.set_activate_on_single_click(False) | |
225 | 275 | |
226 | 276 | text_renderer = Gtk.CellRendererText() |
227 | 277 | icon_renderer = Gtk.CellRendererPixbuf() |
244 | 294 | |
245 | 295 | return self.view |
246 | 296 | |
247 | def update(self, hosts): | |
297 | def add_object(self, obj): | |
298 | object_type = obj.class_signature | |
299 | if object_type == 'Host': | |
300 | self.__add_host_to_model_after_initial_load(obj) | |
301 | if object_type == "Vulnerability" or object_type == "VulnerabilityWeb": | |
302 | self.__add_vuln_to_model(obj) | |
303 | ||
304 | def remove_object(self, obj_id): | |
305 | if obj_id.count('.') == 0: | |
306 | self.__remove_host_from_model(obj_id) | |
307 | else: | |
308 | host_id = obj_id.split(".")[0] | |
309 | self.__remove_vuln_from_model(host_id) | |
310 | ||
311 | def update_object(self, obj): | |
312 | object_type = obj.class_signature | |
313 | if object_type == 'Host': | |
314 | self.__update_host_in_model(obj) | |
315 | ||
316 | def redo(self, hosts, total_host_amount, page=0): | |
248 | 317 | """Creates a new model from an updated list of hosts and adapts |
249 | 318 | the view to reflect the changes""" |
319 | self.page = page | |
320 | self.host_id_to_iter = {} | |
250 | 321 | model = self.create_model(hosts) |
251 | self.update_view(model) | |
252 | ||
253 | def update_view(self, model): | |
322 | self.redo_view(model) | |
323 | self.host_amount = total_host_amount | |
324 | self.set_move_buttons_sensitivity() | |
325 | self.progress_label.set_label("{0} / {1}".format(self.page+1, self.compute_total_number_of_pages()+1)) | |
326 | ||
327 | def redo_view(self, model): | |
254 | 328 | """Updates the view of the object with a new model""" |
255 | 329 | self.view.set_model(model) |
330 | self.progress_label.set_label("{0} / {1}".format(self.page+1, self.compute_total_number_of_pages()+1)) | |
256 | 331 | |
257 | 332 | def on_click(self, tree_view, path, column): |
258 | 333 | """Sends the host_id of the clicked host back to the application""" |
260 | 335 | host_id = self.current_model[tree_iter][0] |
261 | 336 | self.open_dialog_callback(host_id) |
262 | 337 | |
338 | def set_move_buttons_sensitivity(self): | |
339 | if self.page > 0: | |
340 | self.prev_button.set_sensitive(True) | |
341 | else: | |
342 | self.prev_button.set_sensitive(False) | |
343 | if self.compute_total_number_of_pages() > self.page: | |
344 | self.next_button.set_sensitive(True) | |
345 | else: | |
346 | self.next_button.set_sensitive(False) | |
347 | ||
348 | def compute_total_number_of_pages(self): | |
349 | return int(math.ceil(self.host_amount / 20)) | |
350 | ||
263 | 351 | @scrollable(width=160) |
352 | def scrollable_view(self): | |
353 | return self.view | |
354 | ||
264 | 355 | def get_box(self): |
265 | """Returns the box to be displayed in the appwindow""" | |
266 | return self.view | |
356 | search_entry= self.create_search_entry() | |
357 | scrollable_view = self.scrollable_view() | |
358 | button_box = self.button_box() | |
359 | sidebar_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL) | |
360 | sidebar_box.pack_start(search_entry, False, False, 0) | |
361 | sidebar_box.pack_start(scrollable_view, True, True, 0) | |
362 | sidebar_box.pack_start(button_box, False, True, 0) | |
363 | return sidebar_box | |
364 | ||
365 | def button_box(self): | |
366 | button_box = Gtk.Box() | |
367 | button_box.override_background_color(Gtk.StateType.NORMAL, Gdk.RGBA(.1,.1,.1,.1)) | |
368 | self.prev_button = Gtk.Button.new_with_label("<<") | |
369 | self.next_button = Gtk.Button.new_with_label(">>") | |
370 | self.prev_button.connect("clicked", self.on_click_move_page, lambda x: x-1) | |
371 | self.next_button.connect("clicked", self.on_click_move_page, lambda x: x+1) | |
372 | button_box.pack_start(self.prev_button, True, True, 0) | |
373 | button_box.pack_start(self.progress_label, True, True, 0) | |
374 | button_box.pack_start(self.next_button, True, True, 0) | |
375 | return button_box | |
376 | ||
377 | def on_click_move_page(self, button, add_one_or_take_one_from, *args, **kwargs): | |
378 | self.page = add_one_or_take_one_from(self.page) | |
379 | hosts = self.get_host_function(page=str(self.page), page_size=20, | |
380 | sort='vulns', sort_dir='desc') | |
381 | model = self.create_model(hosts) | |
382 | self.redo_view(model) | |
383 | self.set_move_buttons_sensitivity() | |
384 | ||
385 | def create_search_entry(self): | |
386 | """Returns a simple search entry""" | |
387 | search_entry = Gtk.Entry() | |
388 | search_entry.set_placeholder_text("Search a host by name...") | |
389 | search_entry.connect("activate", self.on_search_enter_key) | |
390 | search_entry.show() | |
391 | return search_entry | |
392 | ||
393 | def on_search_enter_key(self, entry): | |
394 | """When the users preses enter, if the workspace exists, | |
395 | select it. If not, present the window to create a workspace with | |
396 | that name""" | |
397 | search = entry.get_text() | |
398 | if search == "": | |
399 | hosts = self.get_host_function(page=0, page_size=20, sort='vulns', | |
400 | sort_dir='desc') | |
401 | model = self.create_model(hosts) | |
402 | self.redo_view(model) | |
403 | self.set_move_buttons_sensitivity() | |
404 | else: | |
405 | hosts = self.get_host_function(name=search, sort='name', | |
406 | sort_dir='desc') | |
407 | model = self.create_model(hosts) | |
408 | self.redo_view(model) | |
409 | self.prev_button.set_sensitive(False) | |
410 | self.next_button.set_sensitive(False) | |
267 | 411 | |
268 | 412 | |
269 | 413 | class WorkspaceSidebar(Gtk.Widget): |
271 | 415 | instance to the application. It only handles the view and the model, |
272 | 416 | all the backend word is handled by the application via the callback""" |
273 | 417 | |
274 | def __init__(self, workspace_manager, callback_to_change_workspace, | |
418 | def __init__(self, server_io, callback_to_change_workspace, | |
275 | 419 | callback_to_remove_workspace, callback_to_create_workspace, |
276 | 420 | last_workspace): |
277 | 421 | |
280 | 424 | self.remove_ws = callback_to_remove_workspace |
281 | 425 | self.create_ws = callback_to_create_workspace |
282 | 426 | self.last_workspace = last_workspace |
283 | self.ws_manager = workspace_manager | |
284 | ||
285 | self.workspaces = self.ws_manager.getWorkspacesNames() | |
427 | self.serverIO = server_io | |
428 | ||
429 | self.workspaces = self.serverIO.get_workspaces_names() | |
286 | 430 | self.search_entry = self.create_search_entry() |
287 | 431 | |
288 | 432 | self.workspace_model = self.create_ws_model() |
329 | 473 | Gets an updated copy of the workspaces and checks against |
330 | 474 | the model to see which are already there and which arent""" |
331 | 475 | |
332 | self.ws_manager.resource() | |
333 | self.workspaces = self.ws_manager.getWorkspacesNames() | |
476 | self.workspaces = self.serverIO.get_workspaces_names() | |
334 | 477 | |
335 | 478 | model = self.workspace_model |
336 | 479 | added_workspaces = [added_ws[0] for added_ws in model] |
338 | 481 | if ws not in added_workspaces: |
339 | 482 | ws_iter = self.workspace_model.append([ws]) |
340 | 483 | self.valid_ws_iters.append(ws_iter) |
484 | ||
485 | for ws in added_workspaces: | |
486 | if ws not in self.workspaces: | |
487 | iter = self.get_iter_by_name(ws) | |
488 | self.workspace_model.remove(iter) | |
341 | 489 | |
342 | 490 | def clear_sidebar(self): |
343 | 491 | """Brutaly clear all the information from the model. |
368 | 516 | a selection with the change workspace callback""" |
369 | 517 | |
370 | 518 | self.ws_view = Gtk.TreeView(model) |
519 | self.ws_view.set_activate_on_single_click(False) | |
371 | 520 | renderer = Gtk.CellRendererText() |
372 | 521 | column = Gtk.TreeViewColumn("Workspaces", renderer, text=0) |
373 | 522 | self.ws_view.append_column(column) |
381 | 530 | selection = self.ws_view.get_selection() |
382 | 531 | selection.set_mode(Gtk.SelectionMode.BROWSE) |
383 | 532 | |
384 | self.ws_view.connect("button-press-event", self.on_click) | |
533 | self.ws_view.connect("button-press-event", self.on_right_click) | |
534 | self.ws_view.connect("row-activated", self.on_left_click) | |
385 | 535 | |
386 | 536 | return self.ws_view |
387 | 537 | |
388 | def on_click(self, view, event): | |
538 | def on_left_click(self, view, path, column): | |
539 | ||
540 | # force selection of newly selected | |
541 | # before actually changing workspace | |
542 | select = view.get_selection() | |
543 | select.select_path(path) | |
544 | ||
545 | # change the workspace to the newly selected | |
546 | self.change_ws(self.get_selected_ws_name()) | |
547 | return True # prevents the click from selecting a workspace | |
548 | # this is handled manually by us on self.change_ws | |
549 | ||
550 | def on_right_click(self, view, event): | |
389 | 551 | """On click, check if it was a right click. If it was, |
390 | 552 | create a menu with the delete option. On click on that option, |
391 | 553 | delete the workspace that occupied the position where the user |
392 | 554 | clicked. Returns True if it was a right click""" |
393 | 555 | |
394 | # it it isnt right click or left click just do nothing | |
395 | if event.button != 3 and event.button != 1: | |
556 | # if it isnt right click just do nothing | |
557 | if event.button != 3: | |
396 | 558 | return False |
397 | 559 | |
398 | 560 | # we really do care about where the user clicked, that is our |
405 | 567 | # if the user didn't click on a workspace there no path to work on |
406 | 568 | return False |
407 | 569 | |
408 | # left click: | |
409 | if event.button == 1: | |
410 | # force selection of newly selected | |
411 | # before actually changing workspace | |
412 | select = view.get_selection() | |
413 | select.select_path(path) | |
414 | ||
415 | # change the workspace to the newly selected | |
416 | ||
417 | self.change_ws(self.get_selected_ws_name()) | |
418 | ||
419 | if event.button == 3: # 3 represents right click | |
420 | menu = Gtk.Menu() | |
421 | delete_item = Gtk.MenuItem("Delete") | |
422 | menu.append(delete_item) | |
423 | ||
424 | # get tree_iter from path. then get its name. then delete | |
425 | # that workspace | |
426 | ||
427 | tree_iter = self.workspace_model.get_iter(path) | |
428 | ws_name = self.workspace_model[tree_iter][0] | |
429 | ||
430 | delete_item.connect("activate", self.remove_ws, ws_name) | |
431 | ||
432 | delete_item.show() | |
433 | menu.popup(None, None, None, None, event.button, event.time) | |
434 | return True # prevents the click from selecting a workspace | |
570 | menu = Gtk.Menu() | |
571 | delete_item = Gtk.MenuItem("Delete") | |
572 | menu.append(delete_item) | |
573 | ||
574 | # get tree_iter from path. then get its name. then delete | |
575 | # that workspace | |
576 | ||
577 | tree_iter = self.workspace_model.get_iter(path) | |
578 | ws_name = self.workspace_model[tree_iter][0] | |
579 | ||
580 | delete_item.connect("activate", self.remove_ws, ws_name) | |
581 | ||
582 | delete_item.show() | |
583 | menu.popup(None, None, None, None, event.button, event.time) | |
584 | return True # prevents the click from selecting a workspace | |
435 | 585 | |
436 | 586 | def get_selected_ws_iter(self): |
437 | 587 | """Returns the tree_iter of the current selected workspace""" |
455 | 605 | """Returns the iter associated to the workspace ws_name or None |
456 | 606 | if not found. |
457 | 607 | """ |
608 | # NOTE. this function should really be replaced by a dictionary | |
458 | 609 | for ws_iter in self.valid_ws_iters: |
459 | 610 | if self.workspace_model[ws_iter][0] == ws_name: |
460 | 611 | return ws_iter |
491 | 642 | self.bold = self.textBuffer.create_tag("bold", |
492 | 643 | weight=Pango.Weight.BOLD) |
493 | 644 | |
494 | self.textBuffer.set_text("Welcome to Faraday. Happy hacking!\n\0", | |
645 | self.textBuffer.set_text("Welcome to Faraday!\n\0", | |
495 | 646 | -1) |
496 | 647 | |
497 | 648 | self.textBuffer.apply_tag(self.bold, |
567 | 718 | Gtk.Widget.__init__(self) |
568 | 719 | initial_strings = self.create_strings(host_count, service_count, |
569 | 720 | vuln_count) |
721 | ||
722 | self.active_workspace_label = Gtk.Label() | |
723 | self.active_workspace_label.set_use_markup(True) | |
570 | 724 | self.notif_text = "Notifications: " |
571 | 725 | self.conflict_text = "Conflicts: " |
572 | 726 | |
589 | 743 | self.mainBox.pack_start(self.notif_button, False, False, 5) |
590 | 744 | self.mainBox.pack_start(self.ws_info, False, True, 5) |
591 | 745 | self.mainBox.pack_start(Gtk.Box(), True, True, 5) # blank space |
746 | self.mainBox.pack_start(self.active_workspace_label, False, True, 5) | |
592 | 747 | self.mainBox.pack_end(self.conflict_button, False, True, 5) |
748 | ||
749 | def set_workspace_label(self, new_label): | |
750 | self.active_workspace_label.set_label("Active workspace: <b>{0}</b>".format(new_label)) | |
593 | 751 | |
594 | 752 | def inc_notif_button_label(self): |
595 | 753 | """Increments the button label, sets bold so user knows there are |
21 | 21 | <attribute name="label" translatable="yes">About</attribute> |
22 | 22 | <attribute name="action">app.about</attribute> |
23 | 23 | </item> |
24 | <item> | |
25 | <attribute name="label" translatable="yes">Help</attribute> | |
26 | <attribute name="action">app.help</attribute> | |
27 | </item> | |
24 | <submenu> | |
25 | <attribute name="label">Help</attribute> | |
26 | <item> | |
27 | <attribute name="label" translatable="yes">Documentation</attribute> | |
28 | <attribute name="action">app.go_to_documentation</attribute> | |
29 | </item> | |
30 | <item> | |
31 | <attribute name="label" translatable="yes">FAQ</attribute> | |
32 | <attribute name="action">app.go_to_faq</attribute> | |
33 | </item> | |
34 | <item> | |
35 | <attribute name="label" translatable="yes">Troubleshooting</attribute> | |
36 | <attribute name="action">app.go_to_troubleshooting</attribute> | |
37 | </item> | |
38 | <item> | |
39 | <attribute name="label" translatable="yes">Demos</attribute> | |
40 | <attribute name="action">app.go_to_demos</attribute> | |
41 | </item> | |
42 | <item> | |
43 | <attribute name="label" translatable="yes">Issues</attribute> | |
44 | <attribute name="action">app.go_to_issues</attribute> | |
45 | </item> | |
46 | <item> | |
47 | <attribute name="label" translatable="yes">Forum</attribute> | |
48 | <attribute name="action">app.go_to_forum</attribute> | |
49 | </item> | |
50 | <item> | |
51 | <attribute name="label" translatable="yes">IRC</attribute> | |
52 | <attribute name="action">app.go_to_irc</attribute> | |
53 | </item> | |
54 | <item> | |
55 | <attribute name="label" translatable="yes">Twitter</attribute> | |
56 | <attribute name="action">app.go_to_twitter</attribute> | |
57 | </item> | |
58 | <item> | |
59 | <attribute name="label" translatable="yes">Google Group</attribute> | |
60 | <attribute name="action">app.go_to_googlegroup</attribute> | |
61 | </item> | |
62 | </submenu> | |
28 | 63 | <item> |
29 | 64 | <attribute name="label" translatable="yes">Quit</attribute> |
30 | 65 | <attribute name="action">app.quit</attribute> |
0 | #!/usr/bin/python2.7 | |
1 | # -*- coding: utf-8 -*- | |
2 | ''' | |
3 | Faraday Penetration Test IDE | |
4 | Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | ||
7 | ''' | |
8 | ||
9 | import threading, time, requests | |
10 | from model.guiapi import notification_center | |
11 | from decorators import safe_io_with_server | |
12 | from persistence.server import models | |
13 | ||
14 | class ServerIO(object): | |
15 | def __init__(self, active_workspace): | |
16 | self.__active_workspace = active_workspace | |
17 | self.stream = None # will be set when active workpsace is set | |
18 | self.changes_lock = models.get_changes_lock() | |
19 | ||
20 | @property | |
21 | def active_workspace(self): | |
22 | return self.__active_workspace | |
23 | ||
24 | @active_workspace.setter | |
25 | def active_workspace(self, new_workspace): | |
26 | self.__active_workspace = new_workspace | |
27 | if self.stream: | |
28 | self.stream.stop() | |
29 | self.stream = self.get_changes_stream() | |
30 | self.continously_get_changes() | |
31 | ||
32 | @safe_io_with_server([]) | |
33 | def get_hosts(self, **params): | |
34 | return models.get_hosts(self.active_workspace, **params) | |
35 | ||
36 | @safe_io_with_server(0) | |
37 | def get_hosts_number(self): | |
38 | return models.get_hosts_number(self.active_workspace) | |
39 | ||
40 | @safe_io_with_server([]) | |
41 | def get_interfaces(self, **params): | |
42 | return models.get_interfaces(self.active_workspace, **params) | |
43 | ||
44 | @safe_io_with_server(0) | |
45 | def get_interfaces_number(self): | |
46 | return models.get_interfaces_number(self.active_workspace) | |
47 | ||
48 | @safe_io_with_server([]) | |
49 | def get_services(self, **params): | |
50 | return models.get_services(self.active_workspace, **params) | |
51 | ||
52 | @safe_io_with_server(0) | |
53 | def get_services_number(self): | |
54 | return models.get_services_number(self.active_workspace) | |
55 | ||
56 | @safe_io_with_server([]) | |
57 | def get_all_vulns(self, **params): | |
58 | return models.get_all_vulns(self.active_workspace, **params) | |
59 | ||
60 | @safe_io_with_server(0) | |
61 | def get_vulns_number(self): | |
62 | return models.get_vulns_number(self.active_workspace) | |
63 | ||
64 | @safe_io_with_server([]) | |
65 | def get_workspaces_names(self): | |
66 | return models.get_workspaces_names() | |
67 | ||
68 | @safe_io_with_server(None) | |
69 | def get_object(self, object_signature, object_id): | |
70 | return models.get_object(self.active_workspace, object_signature, object_id) | |
71 | ||
72 | @safe_io_with_server((0,0,0,0)) | |
73 | def get_workspace_numbers(self): | |
74 | return models.get_workspace_numbers(self.active_workspace) | |
75 | ||
76 | @safe_io_with_server(False) | |
77 | def is_server_up(self): | |
78 | return models.is_server_up() | |
79 | ||
80 | @safe_io_with_server(False) | |
81 | def test_server_url(self, url): | |
82 | return models.test_server_url(url) | |
83 | ||
84 | @safe_io_with_server(None) | |
85 | def get_changes_stream(self): | |
86 | return models.get_changes_stream(self.active_workspace) | |
87 | ||
88 | @safe_io_with_server((None, None)) | |
89 | def get_deleted_object_name_and_type(self, obj_id): | |
90 | return models.get_deleted_object_name_and_type(self.active_workspace, obj_id) | |
91 | ||
92 | def continously_get_changes(self): | |
93 | """Creates a thread which will continuously check the changes | |
94 | coming from other instances of Faraday. Return the thread on any | |
95 | exception, of if self.stream is None. | |
96 | """ | |
97 | ||
98 | # There is very arcane, dark magic involved in this method. | |
99 | # What you need to know: do not touch it. | |
100 | # If you touch it, do check out persitence/server/changes_stream.py | |
101 | # there lies _most_ of the darkest magic | |
102 | ||
103 | def filter_changes(change, obj_type): | |
104 | local_changes = models.local_changes() | |
105 | cool_types = ("Host", "Interface", "Service", "Vulnerability", | |
106 | "VulnerabilityWeb", "CommandRunInfomation", "Cred", | |
107 | "Note") | |
108 | ||
109 | if not change.get('changes') or not change['changes'][0].get('rev'): | |
110 | # not a change really right? | |
111 | return None | |
112 | ||
113 | if obj_type is not None and obj_type not in cool_types: | |
114 | # if obj_type is None it's a deleted change. retrieve its type later | |
115 | return None | |
116 | ||
117 | if change['changes'][0]['rev'] == local_changes.get(change['id']): | |
118 | del local_changes[change['id']] | |
119 | return None | |
120 | ||
121 | if not change or change.get('last_seq'): | |
122 | return None | |
123 | ||
124 | if change['id'].startswith('_design'): # XXX: is this still neccesary? | |
125 | return None | |
126 | ||
127 | return change | |
128 | ||
129 | def notification_dispatcher(obj_id, obj_type, obj_name, deleted, revision): | |
130 | if deleted: | |
131 | obj_name, obj_type = self.get_deleted_object_name_and_type(obj_id) | |
132 | notification_center.deleteObject(obj_id) | |
133 | update = False | |
134 | else: | |
135 | is_new_object = revision.split("-")[0] == "1" | |
136 | obj = self.get_object(obj_type, obj_id) | |
137 | if obj: | |
138 | if is_new_object: | |
139 | notification_center.addObject(obj) | |
140 | update = False | |
141 | else: | |
142 | notification_center.editObject(obj) | |
143 | update = True | |
144 | else: | |
145 | update = False | |
146 | for var in [var for var in [obj_id, obj_type, obj_name] if var is None]: | |
147 | var = "" | |
148 | notification_center.changeFromInstance(obj_id, obj_type, | |
149 | obj_name, deleted=deleted, | |
150 | update=update) | |
151 | ||
152 | def get_changes(): | |
153 | # dark maaaaaagic *sing with me!* dark maaaaaagic | |
154 | if self.stream: | |
155 | try: | |
156 | for change, obj_type, obj_name in self.stream: | |
157 | with self.changes_lock: | |
158 | change = filter_changes(change, obj_type) | |
159 | if change: | |
160 | deleted = bool(change.get('deleted')) | |
161 | obj_id = change.get('id') | |
162 | revision = change.get("changes")[-1].get('rev') | |
163 | notification_dispatcher(obj_id, obj_type, obj_name, | |
164 | deleted, revision) | |
165 | except requests.exceptions.RequestException: | |
166 | notification_center.WorkspaceProblem() | |
167 | return False | |
168 | else: | |
169 | return False | |
170 | ||
171 | get_changes_thread = threading.Thread(target=get_changes) | |
172 | get_changes_thread.daemon = True | |
173 | get_changes_thread.start() | |
174 | ||
175 | def continously_check_server_connection(self): | |
176 | """Starts a thread which requests from the server every second, so | |
177 | we know if the connection is still alive. | |
178 | """ | |
179 | def test_server_connection(): | |
180 | tolerance = 0 | |
181 | while True: | |
182 | time.sleep(1) | |
183 | test_was_successful = self.is_server_up() | |
184 | if test_was_successful: | |
185 | tolerance = 0 | |
186 | else: | |
187 | tolerance += 1 | |
188 | if tolerance == 3: | |
189 | notification_center.CouchDBConnectionProblem() | |
190 | ||
191 | test_server_thread = threading.Thread(target=test_server_connection) | |
192 | test_server_thread.daemon = True | |
193 | test_server_thread.start() |
50 | 50 | def showDialog(self, msg, level="INFORMATION"): |
51 | 51 | self._notifyWidgets(events.ShowDialogCustomEvent(msg, level)) |
52 | 52 | |
53 | def workspaceLoad(self, hosts): | |
54 | self._notifyWidgets(events.ModelObjectUpdateEvent(hosts)) | |
55 | ||
56 | def workspaceChanged(self, workspace, workspace_type): | |
57 | self._notifyWidgets(events.WorkspaceChangedCustomEvent(workspace,workspace_type)) | |
53 | def workspaceChanged(self, workspace): | |
54 | self._notifyWidgets(events.WorkspaceChangedCustomEvent(workspace)) | |
58 | 55 | |
59 | 56 | def CouchDBConnectionProblem(self, problem=None): |
60 | 57 | self._notifyWidgets(events.ShowExceptionConnectionRefusedCustomEvent(problem)) |
77 | 74 | def conflictResolution(self, conflicts): |
78 | 75 | self._notifyWidgets(events.ResolveConflictsCustomEvent(conflicts)) |
79 | 76 | |
80 | def changeFromInstance(self, change): | |
81 | self._notifyWidgets(events.ChangeFromInstanceCustomEvent(change)) | |
77 | def changeFromInstance(self, obj_id, obj_type, obj_name, | |
78 | deleted=False, update=False): | |
79 | self._notifyWidgets(events.ChangeFromInstanceCustomEvent(obj_id, | |
80 | obj_type, | |
81 | obj_name, | |
82 | deleted=deleted, | |
83 | update=update)) | |
82 | 84 | |
85 | def addHostFromChanges(self, obj): | |
86 | self._notifyWidgets(events.AddHostChangesEvent(obj)) | |
87 | ||
88 | def editObject(self, obj): | |
89 | self._notifyWidgets(events.UpdateObjectCustomEvent(obj)) | |
90 | ||
91 | def deleteObject(self, obj_id): | |
92 | self._notifyWidgets(events.DeleteObjectCustomEvent(obj_id)) | |
93 | ||
94 | def addObject(self, new_object): | |
95 | self._notifyWidgets(events.AddObjectCustomEvent(new_object)) |
31 | 31 | couchdb = args.couchdb |
32 | 32 | __serv = Server(uri = couchdb) |
33 | 33 | |
34 | # reports = os.path.join(os.getcwd(), "views", "reports") | |
35 | 34 | workspace = __serv.get_or_create_db("cwe") |
36 | # designer.push(reports, workspace, atomic = False) | |
37 | 35 | |
38 | 36 | with open('data/cwe.csv', 'r') as csvfile: |
39 | 37 | cwereader = csv.reader(csvfile, delimiter=',') |
56 | 56 | update=1 |
57 | 57 | fi |
58 | 58 | |
59 | apt-get --ignore-missing -y install build-essential ipython python-setuptools python-pip python-dev libpq-dev libffi-dev couchdb gir1.2-gtk-3.0 gir1.2-vte-2.91 python-gobject zsh curl | |
59 | for pkg in build-essential ipython python-setuptools python-pip python-dev libpq-dev libffi-dev couchdb gir1.2-gtk-3.0 gir1.2-vte-2.91 gir1.2-vte-2.90 python-gobject zsh curl; do | |
60 | sudo apt-get install -y $pkg | |
61 | done | |
60 | 62 | |
61 | 63 | pip2 install -r requirements.txt |
62 | 64 |
9 | 9 | |
10 | 10 | import os |
11 | 11 | from couchdbkit import designer |
12 | ||
13 | from config.configuration import getInstanceConfiguration | |
14 | ||
15 | CONF = getInstanceConfiguration() | |
16 | 12 | |
17 | 13 | |
18 | 14 | class ViewsManager(object): |
1 | 1 | Faraday Penetration Test IDE |
2 | 2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) |
3 | 3 | See the file 'doc/LICENSE' for the license information |
4 | ||
5 | 4 | ''' |
6 | 5 | |
7 | ||
8 | from persistence.mappers.data_mappers import Mappers | |
6 | from persistence.server.models import create_object, get_object, update_object, delete_object | |
9 | 7 | |
10 | 8 | # NOTE: This class is intended to be instantiated by the |
11 | 9 | # service or controller that needs it. |
17 | 15 | class MapperManager(object): |
18 | 16 | def __init__(self): |
19 | 17 | # create and store the datamappers |
20 | self.mappers = {} | |
18 | self.workspace_name = None | |
21 | 19 | |
22 | def createMappers(self, dbconnector): | |
23 | self.mappers.clear() | |
24 | for tmapper, mapper in Mappers.items(): | |
25 | self.mappers[tmapper] = mapper(self, dbconnector) | |
20 | def createMappers(self, workpace_name): | |
21 | self.workspace_name = workpace_name | |
26 | 22 | |
27 | 23 | def save(self, obj): |
28 | if self.mappers.get(obj.class_signature, None): | |
29 | self.mappers.get(obj.class_signature).save(obj) | |
24 | if create_object(self.workspace_name, obj.class_signature, obj): | |
25 | return True | |
26 | return False | |
27 | ||
28 | def update(self, obj): | |
29 | if update_object(self.workspace_name, obj.class_signature, obj): | |
30 | 30 | return True |
31 | 31 | return False |
32 | 32 | |
33 | def find(self, obj_id): | |
34 | obj = self._find(obj_id, with_load=False) | |
35 | if not obj: | |
36 | obj = self._find(obj_id, with_load=True) | |
37 | return obj | |
33 | def find(self, class_signature, obj_id): | |
34 | return get_object(self.workspace_name, class_signature, obj_id) | |
38 | 35 | |
39 | def _find(self, obj_id, with_load=True): | |
40 | for mapper in self.mappers.values(): | |
41 | obj = mapper.find(obj_id, with_load=with_load) | |
42 | if obj: | |
43 | return obj | |
44 | return None | |
45 | ||
46 | def remove(self, obj_id): | |
47 | obj = self.find(obj_id) | |
48 | if obj: | |
49 | self.mappers.get(obj.class_signature).delete(obj_id) | |
50 | return True | |
51 | return False | |
52 | ||
53 | def reload(self, obj_id): | |
54 | obj = self.find(obj_id) | |
55 | if obj: | |
56 | self.mappers.get(obj.class_signature).reload(obj) | |
57 | ||
58 | def getMapper(self, type): | |
59 | return self.mappers.get(type, None) | |
36 | def remove(self, obj_id, class_signature): | |
37 | return delete_object(self.workspace_name, class_signature, obj_id)⏎ |
39 | 39 | |
40 | 40 | if parser.report_type is None: |
41 | 41 | getLogger(self).error( |
42 | 'Plugin not found: automatic and manual try!' | |
43 | ) | |
42 | 'Plugin not found: automatic and manual try!') | |
44 | 43 | return False |
45 | 44 | |
46 | 45 | return self.sendReport(parser.report_type, filename) |
47 | 46 | |
48 | 47 | def sendReport(self, plugin_id, filename): |
49 | 48 | """Sends a report to the appropiate plugin specified by plugin_id""" |
50 | getLogger(self).debug( | |
49 | getLogger(self).info( | |
51 | 50 | 'The file is %s, %s' % (filename, plugin_id)) |
52 | 51 | if not self.plugin_controller.processReport(plugin_id, filename): |
53 | 52 | getLogger(self).error( |
94 | 93 | except Exception: |
95 | 94 | getLogger(self).error( |
96 | 95 | "An exception was captured while saving reports\n%s" |
97 | % traceback.format_exc() | |
98 | ) | |
96 | % traceback.format_exc()) | |
99 | 97 | finally: |
100 | 98 | tmp_timer = 0 |
101 | 99 | |
107 | 105 | Synchronize report directory using the DataManager and Plugins online |
108 | 106 | We first make sure that all shared reports were added to the repo |
109 | 107 | """ |
108 | filenames = [] | |
110 | 109 | |
111 | 110 | for root, dirs, files in os.walk(self._report_path, False): |
112 | 111 | |
113 | 112 | if root == self._report_path: |
114 | 113 | for name in files: |
115 | filename = os.path.join(root, name) | |
116 | ||
117 | # If plugin not is detected... move to unprocessed | |
118 | if self.processor.processReport(filename) is False: | |
119 | ||
120 | os.rename( | |
121 | filename, | |
122 | os.path.join(self._report_upath, name) | |
123 | ) | |
124 | else: | |
125 | os.rename( | |
126 | filename, | |
127 | os.path.join(self._report_ppath, name) | |
128 | ) | |
114 | filenames.append(os.path.join(root, name)) | |
115 | ||
116 | for filename in filenames: | |
117 | name = os.path.basename(filename) | |
118 | ||
119 | # If plugin not is detected... move to unprocessed | |
120 | if self.processor.processReport(filename) is False: | |
121 | ||
122 | os.rename( | |
123 | filename, | |
124 | os.path.join(self._report_upath, name)) | |
125 | else: | |
126 | os.rename( | |
127 | filename, | |
128 | os.path.join(self._report_ppath, name)) | |
129 | 129 | |
130 | 130 | self.onlinePlugins() |
131 | 131 | |
280 | 280 | |
281 | 281 | if re.search( |
282 | 282 | "https://raw.githubusercontent.com/Arachni/arachni/", |
283 | output | |
284 | ) is not None: | |
283 | output) is not None: | |
285 | 284 | return "Arachni" |
286 | 285 | |
287 | 286 | elif re.search("OpenVAS", output) is not None or re.search( |
288 | 287 | '<omp><version>', |
289 | output | |
290 | ) is not None: | |
288 | output) is not None: | |
291 | 289 | return "Openvas" |
292 | 290 | |
293 | 291 | else: |
7 | 7 | ''' |
8 | 8 | import restkit |
9 | 9 | import re |
10 | import time | |
10 | 11 | |
11 | 12 | from model.workspace import Workspace |
12 | from persistence.persistence_managers import DBTYPE | |
13 | ||
13 | from persistence.server.models import create_workspace, get_workspaces_names, get_workspace, delete_workspace | |
14 | from persistence.server.server import Unauthorized | |
14 | 15 | from model.guiapi import notification_center |
15 | 16 | |
16 | 17 | from config.configuration import getInstanceConfiguration |
24 | 25 | |
25 | 26 | class WorkspaceManager(object): |
26 | 27 | """ |
27 | Workspace Manager class | |
28 | Its responsibilities goes from: | |
29 | * Workspace creation | |
30 | * Workspace removal | |
31 | * Workspace opening | |
32 | * Active Workspace switching | |
28 | This class is in charge of creating, deleting and opening workspaces | |
33 | 29 | """ |
34 | 30 | |
35 | def __init__(self, dbManager, mappersManager, | |
36 | changesManager, *args, **kwargs): | |
37 | self.dbManager = dbManager | |
31 | def __init__(self, mappersManager, *args, **kwargs): | |
38 | 32 | self.mappersManager = mappersManager |
39 | self.changesManager = changesManager | |
40 | 33 | self.active_workspace = None |
41 | 34 | |
42 | 35 | def getWorkspacesNames(self): |
43 | 36 | """Returns the names of the workspaces as a list of strings""" |
44 | return self.dbManager.getAllDbNames() | |
37 | return get_workspaces_names() | |
45 | 38 | |
46 | def createWorkspace(self, name, desc, dbtype=DBTYPE.COUCHDB): | |
39 | def createWorkspace(self, name, desc, start_date=int(time.time() * 1000), | |
40 | finish_date=int(time.time() * 1000), customer=""): | |
41 | # XXX: DEPRECATE NEXT LINE | |
47 | 42 | workspace = Workspace(name, desc) |
48 | 43 | try: |
49 | dbConnector = self.dbManager.createDb(name, dbtype) | |
50 | except restkit.Unauthorized: | |
44 | create_workspace(name, description=desc, start_date=start_date, | |
45 | finish_date=finish_date, customer=customer) | |
46 | # XXX: Remove this hack! Only for testing | |
47 | time.sleep(2) | |
48 | except Unauthorized: | |
51 | 49 | raise WorkspaceException( |
52 | 50 | ("You're not authorized to create workspaces\n" |
53 | 51 | "Make sure you're an admin and add your credentials" |
57 | 55 | "<couch_uri>http://john:[email protected]:5984</couch_uri>")) |
58 | 56 | except Exception as e: |
59 | 57 | raise WorkspaceException(str(e)) |
60 | if dbConnector: | |
61 | self.closeWorkspace() | |
62 | self.mappersManager.createMappers(dbConnector) | |
63 | self.mappersManager.save(workspace) | |
64 | self.setActiveWorkspace(workspace) | |
65 | notification_center.workspaceChanged( | |
66 | workspace, self.getWorkspaceType(name)) | |
67 | notification_center.workspaceLoad(workspace.getHosts()) | |
68 | self.changesManager.watch(self.mappersManager, dbConnector) | |
69 | return workspace | |
70 | return False | |
58 | self.closeWorkspace() | |
59 | self.mappersManager.createMappers(name) | |
60 | self.setActiveWorkspace(workspace) | |
61 | notification_center.workspaceChanged(workspace) | |
62 | # XXX: REIMPLEMENT THIS | |
63 | #self.changesManager.watch(self.mappersManager, dbConnector) | |
64 | return name | |
71 | 65 | |
72 | 66 | def openWorkspace(self, name): |
73 | 67 | """Open a workspace by name. Returns the workspace. Raises an |
74 | 68 | WorkspaceException if something went wrong along the way. |
75 | 69 | """ |
76 | if name not in self.getWorkspacesNames(): | |
70 | if name not in get_workspaces_names(): | |
77 | 71 | raise WorkspaceException( |
78 | 72 | "Workspace %s wasn't found" % name) |
79 | 73 | self.closeWorkspace() |
80 | 74 | try: |
81 | dbConnector = self.dbManager.getConnector(name) | |
82 | except restkit.Unauthorized: | |
75 | workspace = get_workspace(name) | |
76 | except Unauthorized: | |
83 | 77 | raise WorkspaceException( |
84 | 78 | ("You're not authorized to access this workspace\n" |
85 | 79 | "Add your credentials to your user configuration " |
89 | 83 | except Exception as e: |
90 | 84 | notification_center.CouchDBConnectionProblem(e) |
91 | 85 | raise WorkspaceException(str(e)) |
92 | self.mappersManager.createMappers(dbConnector) | |
93 | workspace = self.mappersManager.getMapper( | |
94 | Workspace.__name__).find(name) | |
95 | if not workspace: | |
96 | raise WorkspaceException( | |
97 | ("Error loading workspace.\n" | |
98 | "You should try opening faraday " | |
99 | "with the '--update' option")) | |
86 | self.mappersManager.createMappers(name) | |
100 | 87 | self.setActiveWorkspace(workspace) |
101 | notification_center.workspaceChanged( | |
102 | workspace, self.getWorkspaceType(name)) | |
103 | notification_center.workspaceLoad(workspace.getHosts()) | |
104 | self.changesManager.watch(self.mappersManager, dbConnector) | |
88 | notification_center.workspaceChanged(workspace) | |
105 | 89 | return workspace |
106 | 90 | |
107 | 91 | def closeWorkspace(self): |
108 | self.changesManager.unwatch() | |
92 | # TODO: DELETE | |
93 | pass | |
109 | 94 | |
110 | 95 | def removeWorkspace(self, name): |
111 | 96 | if name in self.getWorkspacesNames(): |
112 | return self.dbManager.removeDb(name) | |
97 | try: | |
98 | return delete_workspace(name) | |
99 | except Unauthorized: | |
100 | notification_center.showDialog("You are not authorized to " | |
101 | "delete this workspace. \n") | |
113 | 102 | |
114 | 103 | def setActiveWorkspace(self, workspace): |
115 | 104 | self.active_workspace = workspace |
118 | 107 | return self.active_workspace |
119 | 108 | |
120 | 109 | def workspaceExists(self, name): |
121 | return self.dbManager.connectorExists(name) | |
122 | ||
123 | def resource(self): | |
124 | self.dbManager.reloadConfig() | |
110 | return name in self.getWorkspacesNames() | |
125 | 111 | |
126 | 112 | def isActive(self, name): |
127 | 113 | return self.active_workspace.getName() == name |
128 | ||
129 | def getWorkspaceType(self, name): | |
130 | return self._dbTypeToNamedType(self.dbManager.getDbType(name)) | |
131 | ||
132 | def _dbTypeToNamedType(self, dbtype): | |
133 | if dbtype == DBTYPE.COUCHDB: | |
134 | return 'CouchDB' | |
135 | ||
136 | def namedTypeToDbType(self, name): | |
137 | if name == 'CouchDB': | |
138 | return DBTYPE.COUCHDB | |
139 | ||
140 | def getAvailableWorkspaceTypes(self): | |
141 | return [self._dbTypeToNamedType(dbtype) for | |
142 | dbtype in self.dbManager.getAvailableDBs()] | |
143 | 114 | |
144 | 115 | def isWorkspaceNameValid(self, ws_name): |
145 | 116 | """Returns True if the ws_name is valid, else if it's not""" |
11 | 11 | import requests |
12 | 12 | |
13 | 13 | from model.controller import ModelController |
14 | from persistence.persistence_managers import DbManager | |
15 | from controllers.change import ChangeController | |
16 | 14 | from managers.workspace_manager import WorkspaceManager |
17 | 15 | from plugins.controller import PluginController |
18 | 16 | |
47 | 45 | params={'version': CONF.getVersion()}, |
48 | 46 | timeout=1, |
49 | 47 | verify=True) |
50 | res.status_code | |
51 | 48 | except Exception: |
52 | 49 | model.api.devlog("CWE database couldn't be updated") |
53 | 50 | self.__event.wait(43200) |
57 | 54 | |
58 | 55 | |
59 | 56 | class MainApplication(object): |
60 | """ | |
61 | """ | |
62 | 57 | |
63 | 58 | def __init__(self, args): |
64 | 59 | self._original_excepthook = sys.excepthook |
66 | 61 | self.args = args |
67 | 62 | |
68 | 63 | self._mappers_manager = MapperManager() |
69 | self._changes_controller = ChangeController() | |
70 | self._db_manager = DbManager(self.on_connection_lost) | |
71 | 64 | |
72 | 65 | self._model_controller = ModelController(self._mappers_manager) |
73 | 66 | |
74 | 67 | self._plugin_manager = PluginManager( |
75 | os.path.join(CONF.getConfigPath(), "plugins"), | |
76 | self._mappers_manager) | |
68 | os.path.join(CONF.getConfigPath(), "plugins")) | |
77 | 69 | |
78 | 70 | self._workspace_manager = WorkspaceManager( |
79 | self._db_manager, | |
80 | self._mappers_manager, | |
81 | self._changes_controller) | |
71 | self._mappers_manager) | |
82 | 72 | |
83 | 73 | # Create a PluginController and send this to UI selected. |
84 | 74 | self._plugin_controller = PluginController( |
160 | 150 | model.api.devlog("stopping model controller thread...") |
161 | 151 | model.api.stopAPIServer() |
162 | 152 | restapi.stopServer() |
163 | self._changes_controller.stop() | |
164 | 153 | self._model_controller.stop() |
165 | 154 | self._model_controller.join() |
166 | 155 | self.timer.stop() |
50 | 50 | self.user = get_user() |
51 | 51 | self.ip = get_private_ip() |
52 | 52 | self.hostname = get_hostname() |
53 | self.itime = None | |
54 | self.duration = None | |
55 | self.params = None | |
56 | self.workspace = None | |
57 | ||
53 | 58 | for k, v in kwargs.items(): |
54 | 59 | setattr(self, k, v) |
55 | 60 |
263 | 263 | |
264 | 264 | def getMetadata(self): |
265 | 265 | """Returns the current metadata of the object""" |
266 | return self._metadata | |
266 | return self._metadata.__dict__ | |
267 | 267 | |
268 | 268 | def setMetadata(self, metadata): |
269 | 269 | self._metadata = metadata |
893 | 893 | self.publicattrs['Data'] = "getData" |
894 | 894 | self.publicattrs['Severity'] = 'getSeverity' |
895 | 895 | self.publicattrs['Refs'] = 'getRefs' |
896 | self.publicattrs['Resolution'] = 'getResolution' | |
896 | 897 | |
897 | 898 | self.publicattrsrefs['Name'] = 'name' |
898 | 899 | self.publicattrsrefs['Description'] = '_desc' |
899 | 900 | self.publicattrsrefs['Data'] = "data" |
900 | 901 | self.publicattrsrefs['Severity'] = 'severity' |
901 | 902 | self.publicattrsrefs['Refs'] = 'refs' |
903 | self.publicattrsrefs['Resolution'] = 'resolution' | |
902 | 904 | |
903 | 905 | def standarize(self, severity): |
904 | 906 | # Transform all severities into lower strings |
1213 | 1215 | self.setUsername(username) |
1214 | 1216 | if password is not None: |
1215 | 1217 | self.setPassword(password) |
1216 | ||
1217 | class TreeWordsTries(object): | |
1218 | instance = None | |
1219 | END = '_end_' | |
1220 | root = dict() | |
1221 | FOUND = True | |
1222 | ||
1223 | def __init__(self): | |
1224 | self.partial_match = False | |
1225 | self.partial_match_dict = {} | |
1226 | self.cur_idx = 0 | |
1227 | ||
1228 | def addWord(self, word): | |
1229 | current_dict = self.root | |
1230 | for letter in word: | |
1231 | current_dict = current_dict.setdefault(letter, {}) | |
1232 | ||
1233 | current_dict = current_dict.setdefault(self.END, self.END) | |
1234 | ||
1235 | def getWordsInText(self, text): | |
1236 | current_dict = self.root | |
1237 | list_of_word = list() | |
1238 | w = '' | |
1239 | for letter in text: | |
1240 | if letter in current_dict: | |
1241 | current_dict = current_dict[letter] | |
1242 | w += letter | |
1243 | elif self.END in current_dict: | |
1244 | list_of_word.append(w) | |
1245 | current_dict = self.root | |
1246 | w = '' | |
1247 | else: | |
1248 | current_dict = self.root | |
1249 | w = '' | |
1250 | ||
1251 | if self.END in current_dict: | |
1252 | list_of_word.append(w) | |
1253 | ||
1254 | return list_of_word | |
1255 | ||
1256 | ||
1257 | def isInTries(self, word): | |
1258 | current_dict = self.root | |
1259 | ||
1260 | if word is None: | |
1261 | return False | |
1262 | ||
1263 | for letter in word: | |
1264 | if letter in current_dict: | |
1265 | current_dict = current_dict[letter] | |
1266 | else: | |
1267 | return not self.FOUND | |
1268 | else: | |
1269 | if self.END in current_dict: | |
1270 | return self.FOUND | |
1271 | else: | |
1272 | return False | |
1273 | ||
1274 | def __new__(cls, *args, **kargs): | |
1275 | if cls.instance is None: | |
1276 | cls.instance = object.__new__(cls, *args, **kargs) | |
1277 | return cls.instance | |
1278 | ||
1279 | def removeWord(self, word): | |
1280 | previous_dict = None | |
1281 | current_dict = self.root | |
1282 | last_letter = '' | |
1283 | ||
1284 | if not self.isInTries(word): | |
1285 | return | |
1286 | ||
1287 | for letter in word: | |
1288 | if letter in current_dict: | |
1289 | if not previous_dict: | |
1290 | previous_dict = current_dict | |
1291 | last_letter = letter | |
1292 | if len(current_dict.keys()) != 1: | |
1293 | previous_dict = current_dict | |
1294 | last_letter = letter | |
1295 | current_dict = current_dict[letter] | |
1296 | else: | |
1297 | if self.END in current_dict: | |
1298 | previous_dict.pop(last_letter) | |
1299 | ||
1300 | def clear(self): | |
1301 | self.root = dict() | |
1302 | self.FOUND = True | |
1303 | ||
1304 | ||
1305 | ||
1306 | #------------------------------------------------------------------------------- | |
1307 | # taken from http://code.activestate.com/recipes/576477-yet-another-signalslot-implementation-in-python/ | |
1308 | # under MIT License | |
1309 | #TODO: decide if we are going to use this... | |
1310 | class Signal(object): | |
1311 | """ | |
1312 | used to handle signals between several objects | |
1313 | """ | |
1314 | def __init__(self): | |
1315 | self.__slots = WeakValueDictionary() | |
1316 | ||
1317 | def __call__(self, *args, **kargs): | |
1318 | for key in self.__slots: | |
1319 | func, _ = key | |
1320 | func(self.__slots[key], *args, **kargs) | |
1321 | ||
1322 | def connect(self, slot): | |
1323 | key = (slot.im_func, id(slot.im_self)) | |
1324 | self.__slots[key] = slot.im_self | |
1325 | ||
1326 | def disconnect(self, slot): | |
1327 | key = (slot.im_func, id(slot.im_self)) | |
1328 | if key in self.__slots: | |
1329 | self.__slots.pop(key) | |
1330 | ||
1331 | def clear(self): | |
1332 | self.__slots.clear() | |
1333 | ||
1334 | #------------------------------------------------------------------------------- |
7 | 7 | import threading |
8 | 8 | import Queue |
9 | 9 | import traceback |
10 | import model.common # this is to make sure the factory is created | |
10 | import model.common # this is to make sure the factory is created | |
11 | 11 | import model.hosts |
12 | 12 | |
13 | 13 | from config.configuration import getInstanceConfiguration |
14 | from model.common import TreeWordsTries | |
15 | 14 | from utils.logs import getLogger |
16 | 15 | import model.api as api |
17 | #import model.guiapi as guiapi | |
18 | 16 | from model.guiapi import notification_center as notifier |
19 | 17 | from gui.customevents import * |
20 | ||
21 | ||
22 | #XXX: consider re-writing this module! There's alot of repeated code | |
18 | from functools import wraps | |
19 | ||
20 | ||
21 | # XXX: consider re-writing this module! There's alot of repeated code | |
23 | 22 | # and things are really messy |
24 | 23 | |
25 | 24 | CONF = getInstanceConfiguration() |
145 | 144 | self.active_plugins_count = 0 |
146 | 145 | self.active_plugins_count_lock = threading.RLock() |
147 | 146 | |
148 | #TODO: check if it is better using collections.deque | |
147 | # TODO: check if it is better using collections.deque | |
149 | 148 | # a performance analysis should be done |
150 | 149 | # http://docs.python.org/library/collections.html#collections.deque |
151 | 150 | |
169 | 168 | self._setupActionDispatcher() |
170 | 169 | |
171 | 170 | self.objects_with_updates = [] |
172 | ||
173 | #used to highligthing | |
174 | self.treeWordsTries = TreeWordsTries() | |
175 | 171 | |
176 | 172 | def __getattr__(self, name): |
177 | 173 | getLogger(self).debug("ModelObject attribute to refactor: %s" % name) |
202 | 198 | self._object_factory.register(model.common.ModelObjectNote) |
203 | 199 | self._object_factory.register(model.common.ModelObjectCred) |
204 | 200 | |
201 | def _checkParent(self, parent_type): | |
202 | """Takes a parent_type and returns the appropiate checkParentDecorator, | |
203 | a function that takes another function (most probably you are using | |
204 | it for the __add method) and checks if the object as a parent of | |
205 | parent_type before adding it. | |
206 | """ | |
207 | def checkParentDecorator(add_func): | |
208 | @wraps(add_func) | |
209 | def addWrapper(new_obj, parent_id=None, *args): | |
210 | parent = self.mappers_manager.find(parent_type, parent_id) | |
211 | if parent: | |
212 | add_func(new_obj, parent_id, *args) | |
213 | else: | |
214 | msg = "A parent is needed for %s objects" % new_obj.class_signature | |
215 | getLogger(self).error(msg) | |
216 | return False | |
217 | return addWrapper | |
218 | return checkParentDecorator | |
219 | ||
205 | 220 | def _setupActionDispatcher(self): |
221 | ||
222 | # these are decorators for the __add method. | |
223 | checkParentHost = self._checkParent('Host') | |
224 | checkParentInterface = self._checkParent('Interface') | |
225 | checkParentService = self._checkParent('Service') | |
226 | checkParentVuln = self._checkParent('Vuln') | |
227 | checkParentNote = self._checkParent('Note') | |
228 | ||
206 | 229 | self._actionDispatcher = { |
207 | 230 | modelactions.ADDHOST: self.__add, |
208 | 231 | modelactions.DELHOST: self.__del, |
209 | 232 | modelactions.EDITHOST: self.__edit, |
210 | modelactions.ADDINTERFACE: self.__add, | |
233 | modelactions.ADDINTERFACE: checkParentHost(self.__add), | |
211 | 234 | modelactions.DELINTERFACE: self.__del, |
212 | 235 | modelactions.EDITINTERFACE: self.__edit, |
213 | modelactions.ADDSERVICEINT: self.__add, | |
236 | modelactions.ADDSERVICEINT: checkParentInterface(self.__add), | |
214 | 237 | modelactions.DELSERVICEINT: self.__del, |
215 | 238 | modelactions.EDITSERVICE: self.__edit, |
216 | #Vulnerability | |
217 | modelactions.ADDVULNINT: self.__add, | |
239 | # Vulnerability | |
240 | modelactions.ADDVULNINT: checkParentInterface(self.__add), | |
218 | 241 | modelactions.DELVULNINT: self.__del, |
219 | modelactions.ADDVULNHOST: self.__add, | |
242 | modelactions.ADDVULNHOST: checkParentHost(self.__add), | |
220 | 243 | modelactions.DELVULNHOST: self.__del, |
221 | modelactions.ADDVULNSRV: self.__add, | |
244 | modelactions.ADDVULNSRV: checkParentService(self.__add), | |
222 | 245 | modelactions.DELVULNSRV: self.__del, |
223 | 246 | modelactions.ADDVULN: self.__add, |
224 | 247 | modelactions.DELVULN: self.__del, |
225 | modelactions.ADDVULNWEBSRV: self.__add, | |
248 | modelactions.ADDVULNWEBSRV: checkParentService(self.__add), | |
226 | 249 | modelactions.EDITVULN: self.__edit, |
227 | #Note | |
228 | modelactions.ADDNOTEINT: self.__add, | |
250 | # Note | |
251 | modelactions.ADDNOTEINT: checkParentInterface(self.__add), | |
229 | 252 | modelactions.DELNOTEINT: self.__del, |
230 | modelactions.ADDNOTEHOST: self.__add, | |
253 | modelactions.ADDNOTEHOST: checkParentHost(self.__add), | |
231 | 254 | modelactions.DELNOTEHOST: self.__del, |
232 | modelactions.ADDNOTESRV: self.__add, | |
255 | modelactions.ADDNOTESRV: checkParentService(self.__add), | |
233 | 256 | modelactions.DELNOTESRV: self.__del, |
234 | modelactions.ADDNOTEVULN: self.__add, | |
257 | modelactions.ADDNOTEVULN: checkParentVuln(self.__add), | |
235 | 258 | modelactions.ADDNOTE: self.__add, |
236 | 259 | modelactions.DELNOTE: self.__del, |
237 | modelactions.ADDCREDSRV: self.__add, | |
260 | modelactions.ADDCREDSRV: checkParentService(self.__add), | |
238 | 261 | modelactions.DELCREDSRV: self.__del, |
239 | modelactions.ADDNOTENOTE: self.__add, | |
262 | modelactions.ADDNOTENOTE: checkParentNote(self.__add), | |
240 | 263 | modelactions.EDITNOTE: self.__edit, |
241 | 264 | modelactions.EDITCRED: self.__edit, |
242 | modelactions.ADDCRED: self.__add, | |
265 | modelactions.ADDCRED: checkParentHost(self.__add), | |
243 | 266 | modelactions.DELCRED: self.__del, |
244 | 267 | # Plugin states |
245 | 268 | modelactions.PLUGINSTART: self._pluginStart, |
262 | 285 | res = action_callback(*args) |
263 | 286 | except Exception: |
264 | 287 | api.log("An exception occurred while dispatching an action (%r(%r)\n%s" % |
265 | (action_callback, args, traceback.format_exc()), "ERROR") | |
288 | (action_callback, args, traceback.format_exc()), "ERROR") | |
266 | 289 | finally: |
267 | 290 | self.__release_host_lock() |
268 | 291 | return res |
276 | 299 | self._sync_api_request = True |
277 | 300 | |
278 | 301 | api.devlog("_processAction - %s - parameters = %s" % |
279 | (action, str(parameters))) | |
302 | (action, str(parameters))) | |
280 | 303 | |
281 | 304 | action_callback = self._actionDispatcher[action] |
282 | 305 | res = self._dispatchActionWithLock(action_callback, *parameters) |
283 | 306 | |
284 | 307 | # finally we notify the widgets about this change |
285 | #if res: # notify only if action was done successfuly | |
286 | #self._notifyModelUpdated(*parameters) | |
287 | #else: | |
308 | # if res: # notify only if action was done successfuly | |
309 | # self._notifyModelUpdated(*parameters) | |
310 | # else: | |
288 | 311 | if not res: |
289 | 312 | api.devlog("Action code %d failed. Parameters = %s" % |
290 | (action, str(parameters))) | |
313 | (action, str(parameters))) | |
291 | 314 | if sync: |
292 | 315 | self._sync_api_request = False |
293 | 316 | |
303 | 326 | def resolveConflict(self, conflict, kwargs): |
304 | 327 | if self.__edit(conflict.getFirstObject(), **kwargs): |
305 | 328 | conflict.getFirstObject().updateResolved(conflict) |
306 | if conflict.getModelObjectType() == "Interface": | |
307 | ipv4 = kwargs['ipv4'] | |
308 | ipv6 = kwargs['ipv6'] | |
309 | hostnames = kwargs['hostnames'] | |
310 | ||
311 | if not ipv4['address'] in ["0.0.0.0", None]: | |
312 | self.treeWordsTries.removeWord(ipv4['address']) | |
313 | self.treeWordsTries.addWord(ipv4['address']) | |
314 | ||
315 | if not ipv6['address'] in ["0000:0000:0000:0000:0000:0000:0000:0000", None]: | |
316 | self.treeWordsTries.removeWord(ipv6['address']) | |
317 | self.treeWordsTries.addWord(ipv6['address']) | |
318 | ||
319 | for h in hostnames: | |
320 | if h is not None: | |
321 | self.treeWordsTries.removeWord(h) | |
322 | self.treeWordsTries.addWord(h) | |
323 | ||
324 | 329 | notifier.conflictUpdate(-1) |
325 | #notifier.editHost(conflict.getFirstObject().getHost()) | |
326 | #self._notifyModelUpdated() | |
330 | # notifier.editHost(conflict.getFirstObject().getHost()) | |
331 | # self._notifyModelUpdated() | |
327 | 332 | |
328 | 333 | def removeConflictsByObject(self, obj): |
329 | 334 | if obj in self.objects_with_updates: |
375 | 380 | # if there is no new action it will block until timeout is reached |
376 | 381 | try: |
377 | 382 | # get new action or timeout (in secs) |
378 | #TODO: timeout should be set through config | |
383 | # TODO: timeout should be set through config | |
379 | 384 | current_action = self._pending_actions.get(timeout=2) |
380 | 385 | action = current_action[0] |
381 | 386 | parameters = current_action[1:] |
387 | 392 | # because if we don't do it, the daemon will be blocked forever |
388 | 393 | pass |
389 | 394 | except Exception: |
390 | getLogger(self).debug("something strange happened... unhandled exception?") | |
395 | getLogger(self).debug( | |
396 | "something strange happened... unhandled exception?") | |
391 | 397 | getLogger(self).debug(traceback.format_exc()) |
392 | 398 | |
393 | 399 | def sync_lock(self): |
413 | 419 | |
414 | 420 | def addUpdate(self, old_object, new_object): |
415 | 421 | # Returns True if the update was resolved without user interaction |
416 | res = True | |
417 | 422 | try: |
418 | 423 | mergeAction = old_object.addUpdate(new_object) |
419 | 424 | if mergeAction: |
420 | 425 | if old_object not in self.objects_with_updates: |
421 | 426 | self.objects_with_updates.append(old_object) |
422 | 427 | notifier.conflictUpdate(1) |
423 | res = False | |
428 | return False | |
424 | 429 | except: |
425 | res = False | |
426 | 430 | api.devlog("(%s).addUpdate(%s, %s) - failed" % |
427 | (self, old_object, new_object)) | |
428 | return res | |
429 | ||
431 | (self, old_object, new_object)) | |
432 | return False | |
433 | self.mappers_manager.update(old_object) | |
434 | notifier.editHost(old_object) | |
435 | return True | |
436 | ||
437 | # XXX: THIS DOESNT WORK | |
430 | 438 | def find(self, obj_id): |
431 | 439 | return self.mappers_manager.find(obj_id) |
432 | 440 | |
436 | 444 | Adds an action to the ModelController actions queue indicating a |
437 | 445 | new host must be added to the model |
438 | 446 | """ |
439 | self.__addPendingAction(modelactions.ADDHOST, host, category, update, old_hostname) | |
447 | self.__addPendingAction(modelactions.ADDHOST, | |
448 | host, category, update, old_hostname)\ | |
440 | 449 | |
441 | 450 | def addHostSYNC(self, host, category=None, update=False, old_hostname=None): |
442 | 451 | """ |
445 | 454 | """ |
446 | 455 | self._processAction(modelactions.ADDHOST, [host, None], sync=True) |
447 | 456 | |
448 | def __add(self, obj, parent_id=None, *args): | |
449 | dataMapper = self.mappers_manager.getMapper(obj.class_signature) | |
450 | old_obj = dataMapper.find(obj.getID()) | |
451 | if old_obj: | |
452 | if not old_obj.needs_merge(obj): | |
453 | # the object is exactly the same, | |
454 | # so return and do nothing | |
455 | return True | |
456 | if not self.addUpdate(old_obj, obj): | |
457 | return False | |
458 | dataMapper.save(old_obj) | |
459 | notifier.editHost(old_obj.getHost()) | |
460 | else: | |
461 | object_parent = self.mappers_manager.find(parent_id) | |
462 | if object_parent: | |
463 | object_parent.addChild(obj) | |
464 | # we have to make sure that certain objects have to have a parent | |
465 | if (obj.class_signature in | |
466 | [model.hosts.Interface.class_signature, | |
467 | model.hosts.Service.class_signature, | |
468 | model.common.ModelObjectNote.class_signature, | |
469 | model.common.ModelObjectVuln.class_signature, | |
470 | model.common.ModelObjectVulnWeb.class_signature, | |
471 | model.common.ModelObjectCred.class_signature] and object_parent is None): | |
472 | # TODO: refactor log module. We need to log twice to see it in | |
473 | # gui and in the terminal. Ugly. | |
474 | msg = "A parent is needed for %s objects" % obj.class_signature | |
475 | getLogger(self).error(msg) | |
476 | return False | |
477 | dataMapper.save(obj) | |
478 | self.treeWordsTries.addWord(obj.getName()) | |
479 | if obj.class_signature == model.hosts.Host.class_signature: | |
480 | notifier.addHost(obj) | |
481 | else: | |
482 | notifier.editHost(obj.getHost()) | |
483 | ||
457 | def _save_new_object(self, new_object): | |
458 | res = self.mappers_manager.save(new_object) | |
459 | if res: notifier.addObject(new_object) | |
460 | return res | |
461 | ||
462 | def _handle_conflict(self, old_obj, new_obj): | |
463 | if not old_obj.needs_merge(new_obj): return True | |
464 | return self.addUpdate(old_obj, new_obj) | |
465 | ||
466 | def __add(self, new_obj, parent_id=None, *args): | |
467 | old_obj = self.mappers_manager.find(new_obj.class_signature, new_obj.getID()) | |
468 | if not old_obj: | |
469 | return self._save_new_object(new_obj) | |
470 | return self._handle_conflict(old_obj, new_obj) | |
471 | ||
472 | def __edit(self, obj, *args, **kwargs): | |
473 | obj.updateAttributes(*args, **kwargs) | |
474 | self.mappers_manager.update(obj) | |
475 | ||
476 | # if obj.class_signature == model.hosts.Host.class_signature: | |
477 | notifier.editHost(obj) | |
478 | # else: | |
479 | # notifier.editHost(obj.getHost()) | |
484 | 480 | return True |
485 | 481 | |
486 | def __edit(self, obj, *args, **kwargs): | |
487 | dataMapper = self.mappers_manager.getMapper(obj.class_signature) | |
488 | obj.updateAttributes(*args, **kwargs) | |
489 | dataMapper.save(obj) | |
490 | # self.treeWordsTries.addWord(obj.getName()) | |
491 | ||
492 | if obj.class_signature == model.hosts.Host.class_signature: | |
493 | notifier.editHost(obj) | |
494 | else: | |
495 | notifier.editHost(obj.getHost()) | |
496 | return True | |
497 | ||
498 | def __del(self, objId, *args): | |
482 | def __del(self, objId, *args): | |
499 | 483 | obj = self.mappers_manager.find(objId) |
500 | 484 | if obj: |
501 | 485 | obj_parent = obj.getParent() |
502 | 486 | if obj_parent: |
503 | 487 | obj_parent.deleteChild(objId) |
504 | 488 | |
505 | if obj.getName(): | |
506 | self.treeWordsTries.removeWord(obj.getName()) | |
507 | ||
508 | 489 | self.removeConflictsByObject(obj) |
509 | 490 | |
510 | self.mappers_manager.remove(objId) | |
491 | self.mappers_manager.remove(objId, obj.class_signature) | |
511 | 492 | |
512 | 493 | if obj.class_signature == model.hosts.Host.class_signature: |
513 | 494 | notifier.delHost(objId) |
531 | 512 | """ |
532 | 513 | self._processAction(modelactions.DELHOST, [hostId], sync=True) |
533 | 514 | |
534 | ||
535 | 515 | def editHostSYNC(self, host, name, description, os, owned): |
536 | 516 | """ |
537 | 517 | SYNC API |
538 | 518 | Modifies a host from model |
539 | 519 | """ |
540 | self._processAction(modelactions.EDITHOST, [host, name, description, os, owned], sync=True) | |
520 | self._processAction(modelactions.EDITHOST, [ | |
521 | host, name, description, os, owned], sync=True) | |
541 | 522 | |
542 | 523 | def addInterfaceASYNC(self, hostid, interface, update=False): |
543 | 524 | """ |
552 | 533 | SYNC API |
553 | 534 | Adds interface directly to the model |
554 | 535 | """ |
555 | self._processAction(modelactions.ADDINTERFACE, [interface, hostId], sync=True) | |
536 | self._processAction(modelactions.ADDINTERFACE, [ | |
537 | interface, hostId], sync=True) | |
556 | 538 | |
557 | 539 | def delInterfaceASYNC(self, hostId, interfaceId): |
558 | 540 | """ |
567 | 549 | SYNC API |
568 | 550 | Deletes an interface from model |
569 | 551 | """ |
570 | self._processAction(modelactions.DELINTERFACE, [interface_id], sync=True) | |
552 | self._processAction(modelactions.DELINTERFACE, | |
553 | [interface_id], sync=True) | |
571 | 554 | |
572 | 555 | def editInterfaceSYNC(self, interface, name, description, hostnames, |
573 | 556 | mac, ipv4, ipv6, network_segment, |
589 | 572 | Adds an action to the ModelController actions queue indicating a |
590 | 573 | new services must be added to a specific host in a specific interface |
591 | 574 | """ |
592 | self.__addPendingAction(modelactions.ADDSERVICEINT, newService, interfaceId) | |
575 | self.__addPendingAction( | |
576 | modelactions.ADDSERVICEINT, newService, interfaceId) | |
593 | 577 | |
594 | 578 | def addServiceToInterfaceSYNC(self, host_id, interface_id, newService): |
595 | 579 | """ |
597 | 581 | Adds a service to a specific host in a specific interface |
598 | 582 | directly to the model |
599 | 583 | """ |
600 | self._processAction(modelactions.ADDSERVICEINT, [newService, interface_id], sync=True) | |
584 | self._processAction(modelactions.ADDSERVICEINT, [ | |
585 | newService, interface_id], sync=True) | |
601 | 586 | |
602 | 587 | def delServiceFromInterfaceASYNC(self, host, interfaceId, serviceId): |
603 | 588 | """ |
606 | 591 | particular service in a host and interface must be removed from the |
607 | 592 | model Interface parameter can be "ALL" |
608 | 593 | """ |
609 | self.__addPendingAction(modelactions.DELSERVICEINT, serviceId, interfaceId) | |
594 | self.__addPendingAction( | |
595 | modelactions.DELSERVICEINT, serviceId, interfaceId) | |
610 | 596 | |
611 | 597 | def delServiceFromInterfaceSYNC(self, host, interfaceId, serviceId): |
612 | 598 | """ |
622 | 608 | particular service in a host and interface must be removed from the model |
623 | 609 | appname parameter can be "ALL" |
624 | 610 | """ |
625 | self.__addPendingAction(modelactions.DELSERVICEAPP, host, appname, service) | |
611 | self.__addPendingAction( | |
612 | modelactions.DELSERVICEAPP, host, appname, service) | |
626 | 613 | |
627 | 614 | def delServiceFromApplicationSYNC(self, host, appname, service): |
628 | 615 | """ |
629 | 616 | SYNC API |
630 | 617 | Delete a service in a host and application from the model |
631 | 618 | """ |
632 | self._processAction(modelactions.DELSERVICEAPP, [host, appname, service], sync=True) | |
619 | self._processAction(modelactions.DELSERVICEAPP, [ | |
620 | host, appname, service], sync=True) | |
633 | 621 | |
634 | 622 | def editServiceSYNC(self, service, name, description, protocol, ports, status, version, owned): |
635 | 623 | """ |
636 | 624 | SYNC API |
637 | 625 | Modifies a host from model |
638 | 626 | """ |
639 | self._processAction(modelactions.EDITSERVICE, [service, name, description, protocol, ports, status, version, owned], sync=True) | |
627 | self._processAction(modelactions.EDITSERVICE, [ | |
628 | service, name, description, protocol, ports, status, version, owned], sync=True) | |
640 | 629 | |
641 | 630 | def editServiceASYNC(self, service, name, description, protocol, ports, status, version, owned): |
642 | 631 | """ |
643 | 632 | ASYNC API |
644 | 633 | Modifies a service from model |
645 | 634 | """ |
646 | self.__addPendingAction(modelactions.EDITSERVICE, service, name, description, protocol, ports, status, version, owned) | |
635 | self.__addPendingAction(modelactions.EDITSERVICE, service, | |
636 | name, description, protocol, ports, status, version, owned) | |
647 | 637 | |
648 | 638 | def __editService(self, service, name=None, description=None, |
649 | 639 | protocol=None, ports=None, status=None, |
650 | 640 | version=None, owned=None): |
651 | 641 | res = False |
652 | 642 | if service is not None: |
653 | service.updateAttributes(name, description, protocol, ports, status, version, owned) | |
643 | service.updateAttributes( | |
644 | name, description, protocol, ports, status, version, owned) | |
654 | 645 | notifier.editHost(service.getHost()) |
655 | 646 | res = True |
656 | 647 | return res |
679 | 670 | self.__addPendingAction(modelactions.ADDVULNINT, newVuln, intId) |
680 | 671 | |
681 | 672 | def addVulnToInterfaceSYNC(self, host, intId, newVuln): |
682 | self._processAction(modelactions.ADDVULNINT, [newVuln, intId], sync=True) | |
673 | self._processAction(modelactions.ADDVULNINT, [ | |
674 | newVuln, intId], sync=True) | |
683 | 675 | |
684 | 676 | def addVulnToApplicationASYNC(self, host, appname, newVuln): |
685 | self.__addPendingAction(modelactions.ADDVULNAPP, host, appname, newVuln) | |
677 | self.__addPendingAction(modelactions.ADDVULNAPP, | |
678 | host, appname, newVuln) | |
686 | 679 | |
687 | 680 | def addVulnToApplicationSYNC(self, host, appname, newVuln): |
688 | self._processAction(modelactions.ADDVULNAPP, [host, appname, newVuln], sync=True) | |
681 | self._processAction(modelactions.ADDVULNAPP, [ | |
682 | host, appname, newVuln], sync=True) | |
689 | 683 | |
690 | 684 | def addVulnToHostASYNC(self, hostId, newVuln): |
691 | 685 | self.__addPendingAction(modelactions.ADDVULNHOST, newVuln, hostId) |
692 | 686 | |
693 | 687 | def addVulnToHostSYNC(self, hostId, newVuln): |
694 | self._processAction(modelactions.ADDVULNHOST, [newVuln, hostId], sync=True) | |
688 | self._processAction(modelactions.ADDVULNHOST, [ | |
689 | newVuln, hostId], sync=True) | |
695 | 690 | |
696 | 691 | def addVulnToServiceASYNC(self, host, srvId, newVuln): |
697 | 692 | self.__addPendingAction(modelactions.ADDVULNSRV, newVuln, srvId) |
698 | 693 | |
699 | 694 | def addVulnToServiceSYNC(self, host, srvId, newVuln): |
700 | self._processAction(modelactions.ADDVULNSRV, [newVuln, srvId], sync=True) | |
695 | self._processAction(modelactions.ADDVULNSRV, [ | |
696 | newVuln, srvId], sync=True) | |
701 | 697 | |
702 | 698 | def addVulnSYNC(self, modelObjectId, newVuln): |
703 | self._processAction(modelactions.ADDVULN, [newVuln, modelObjectId], sync=True) | |
699 | self._processAction(modelactions.ADDVULN, [ | |
700 | newVuln, modelObjectId], sync=True) | |
704 | 701 | |
705 | 702 | def addVulnWebToServiceASYNC(self, host, srvId, newVuln): |
706 | 703 | self.__addPendingAction(modelactions.ADDVULNWEBSRV, newVuln, srvId) |
707 | 704 | |
708 | 705 | def addVulnWebToServiceSYNC(self, host, srvId, newVuln): |
709 | self._processAction(modelactions.ADDVULNWEBSRV, [newVuln, srvId], sync=True) | |
706 | self._processAction(modelactions.ADDVULNWEBSRV, | |
707 | [newVuln, srvId], sync=True) | |
710 | 708 | |
711 | 709 | def delVulnFromApplicationASYNC(self, hostname, appname, vuln): |
712 | self.__addPendingAction(modelactions.DELVULNAPP, hostname, appname, vuln) | |
710 | self.__addPendingAction(modelactions.DELVULNAPP, | |
711 | hostname, appname, vuln) | |
713 | 712 | |
714 | 713 | def delVulnFromApplicationSYNC(self, hostname, appname, vuln): |
715 | self._processAction(modelactions.DELVULNAPP, [hostname, appname, vuln], sync=True) | |
714 | self._processAction(modelactions.DELVULNAPP, [ | |
715 | hostname, appname, vuln], sync=True) | |
716 | 716 | |
717 | 717 | def delVulnFromInterfaceASYNC(self, hostname, intname, vuln): |
718 | self.__addPendingAction(modelactions.DELVULNINT, hostname, intname, vuln) | |
718 | self.__addPendingAction(modelactions.DELVULNINT, | |
719 | hostname, intname, vuln) | |
719 | 720 | |
720 | 721 | def delVulnFromInterfaceSYNC(self, hostname, intname, vuln): |
721 | self._processAction(modelactions.DELVULNINT, [hostname,intname, vuln], sync=True) | |
722 | self._processAction(modelactions.DELVULNINT, [ | |
723 | hostname, intname, vuln], sync=True) | |
722 | 724 | |
723 | 725 | def delVulnFromHostASYNC(self, hostId, vulnId): |
724 | 726 | self.__addPendingAction(modelactions.DELVULNHOST, vulnId) |
735 | 737 | def delVulnSYNC(self, model_object, vuln_id): |
736 | 738 | self._processAction(modelactions.DELVULN, [vuln_id], sync=True) |
737 | 739 | |
738 | ||
739 | 740 | def editVulnSYNC(self, vuln, name, desc, severity, resolution, refs): |
740 | self._processAction(modelactions.EDITVULN, [vuln, name, desc, severity, resolution, refs], sync=True) | |
741 | self._processAction(modelactions.EDITVULN, [ | |
742 | vuln, name, desc, severity, resolution, refs], sync=True) | |
741 | 743 | |
742 | 744 | def editVulnASYNC(self, vuln, name, desc, severity, resolution, refs): |
743 | self.__addPendingAction(modelactions.EDITVULN, vuln, name, desc, severity, resolution, refs) | |
745 | self.__addPendingAction(modelactions.EDITVULN, | |
746 | vuln, name, desc, severity, resolution, refs) | |
744 | 747 | |
745 | 748 | def editVulnWebSYNC(self, vuln, name, desc, website, path, refs, severity, resolution, |
746 | 749 | request, response, method, pname, params, query, |
754 | 757 | params, query, category): |
755 | 758 | self.__addPendingAction(modelactions.EDITVULN, |
756 | 759 | vuln, name, desc, website, path, refs, |
757 | severity, resolution, request, response, method, | |
758 | pname, params, query, category) | |
760 | severity, resolution, request, response, method, | |
761 | pname, params, query, category) | |
759 | 762 | |
760 | 763 | # Note |
761 | 764 | def addNoteToInterfaceASYNC(self, host, intId, newNote): |
762 | 765 | self.__addPendingAction(modelactions.ADDNOTEINT, newNote, intId) |
763 | 766 | |
764 | 767 | def addNoteToInterfaceSYNC(self, host, intId, newNote): |
765 | self._processAction(modelactions.ADDNOTEINT, [newNote, intId], sync=True) | |
768 | self._processAction(modelactions.ADDNOTEINT, [ | |
769 | newNote, intId], sync=True) | |
766 | 770 | |
767 | 771 | def addNoteToApplicationASYNC(self, host, appname, newNote): |
768 | self.__addPendingAction(modelactions.ADDNOTEAPP, host, appname, newNote) | |
772 | self.__addPendingAction(modelactions.ADDNOTEAPP, | |
773 | host, appname, newNote) | |
769 | 774 | |
770 | 775 | def addNoteToApplicationSYNC(self, host, appname, newNote): |
771 | self._processAction(modelactions.ADDNOTEAPP, [host, appname, newNote], sync=True) | |
776 | self._processAction(modelactions.ADDNOTEAPP, [ | |
777 | host, appname, newNote], sync=True) | |
772 | 778 | |
773 | 779 | def addNoteToHostASYNC(self, hostId, newNote): |
774 | 780 | self.__addPendingAction(modelactions.ADDNOTEHOST, newNote, hostId) |
775 | 781 | |
776 | 782 | def addNoteToHostSYNC(self, hostId, newNote): |
777 | self._processAction(modelactions.ADDNOTEHOST, [newNote, hostId], sync=True) | |
783 | self._processAction(modelactions.ADDNOTEHOST, [ | |
784 | newNote, hostId], sync=True) | |
778 | 785 | |
779 | 786 | def addNoteToServiceASYNC(self, host, srvId, newNote): |
780 | 787 | self.__addPendingAction(modelactions.ADDNOTESRV, newNote, srvId) |
783 | 790 | self.__addPendingAction(modelactions.ADDNOTENOTE, newNote, note_id) |
784 | 791 | |
785 | 792 | def addNoteToNoteSYNC(self, noteId, newNote): |
786 | self._processAction(modelactions.ADDNOTENOTE, [newNote, noteId], sync=True) | |
793 | self._processAction(modelactions.ADDNOTENOTE, [ | |
794 | newNote, noteId], sync=True) | |
787 | 795 | |
788 | 796 | def addNoteToServiceSYNC(self, host, srvId, newNote): |
789 | self._processAction(modelactions.ADDNOTESRV, [newNote, srvId], sync=True) | |
797 | self._processAction(modelactions.ADDNOTESRV, [ | |
798 | newNote, srvId], sync=True) | |
790 | 799 | |
791 | 800 | def addNoteSYNC(self, model_object, newNote): |
792 | self._processAction(modelactions.ADDNOTE, [newNote, model_object], sync=True) | |
801 | self._processAction(modelactions.ADDNOTE, [ | |
802 | newNote, model_object], sync=True) | |
793 | 803 | |
794 | 804 | def delNoteFromApplicationASYNC(self, hostname, appname, note): |
795 | self.__addPendingAction(modelactions.DELNOTEAPP, hostname, appname, note) | |
805 | self.__addPendingAction(modelactions.DELNOTEAPP, | |
806 | hostname, appname, note) | |
796 | 807 | |
797 | 808 | def delNoteFromApplicationSYNC(self, hostname, appname, note): |
798 | self._processAction(modelactions.DELNOTEAPP, [hostname, appname, note], sync=True) | |
809 | self._processAction(modelactions.DELNOTEAPP, [ | |
810 | hostname, appname, note], sync=True) | |
799 | 811 | |
800 | 812 | def delNoteFromInterfaceASYNC(self, hostname, intname, noteId): |
801 | 813 | self.__addPendingAction(modelactions.DELNOTEINT, noteId) |
822 | 834 | self.__addPendingAction(modelactions.ADDCREDSRV, newCred, srvId) |
823 | 835 | |
824 | 836 | def addCredToServiceSYNC(self, host, srvId, newCred): |
825 | self._processAction(modelactions.ADDCREDSRV, [newCred, srvId], sync=True) | |
837 | self._processAction(modelactions.ADDCREDSRV, [ | |
838 | newCred, srvId], sync=True) | |
826 | 839 | |
827 | 840 | def delCredFromServiceASYNC(self, hostname, srvname, credId): |
828 | 841 | self.__addPendingAction(modelactions.DELCREDSRV, credId) |
830 | 843 | def delCredFromServiceSYNC(self, hostname, srvname, credId): |
831 | 844 | self._processAction(modelactions.DELCREDSRV, [credId], sync=True) |
832 | 845 | |
833 | ||
834 | 846 | def editNoteSYNC(self, note, name, text): |
835 | self._processAction(modelactions.EDITNOTE, [note, name, text], sync=True) | |
847 | self._processAction(modelactions.EDITNOTE, [ | |
848 | note, name, text], sync=True) | |
836 | 849 | |
837 | 850 | def editNoteASYNC(self, note, name, text): |
838 | 851 | self.__addPendingAction(modelactions.EDITNOTE, note, name, text) |
839 | 852 | |
840 | 853 | def editCredSYNC(self, cred, username, password): |
841 | self._processAction(modelactions.EDITCRED, [cred, username, password], sync=True) | |
854 | self._processAction(modelactions.EDITCRED, [ | |
855 | cred, username, password], sync=True) | |
842 | 856 | |
843 | 857 | def editCredASYNC(self, cred, username, password): |
844 | self.__addPendingAction(modelactions.EDITCRED, cred, username, password) | |
858 | self.__addPendingAction(modelactions.EDITCRED, | |
859 | cred, username, password) | |
845 | 860 | |
846 | 861 | def addCredSYNC(self, model_object_id, newCred): |
847 | self._processAction(modelactions.ADDCRED, [newCred, model_object_id], sync=True) | |
862 | self._processAction(modelactions.ADDCRED, [ | |
863 | newCred, model_object_id], sync=True) | |
848 | 864 | |
849 | 865 | def delCredSYNC(self, model_object, cred_id): |
850 | 866 | self._processAction(modelactions.DELCRED, [cred_id], sync=True) |
907 | 923 | username, password=password, parent_id=parent_id) |
908 | 924 | |
909 | 925 | def getHost(self, name): |
910 | hosts_mapper = self.mappers_manager.getMapper(model.hosts.Host.class_signature) | |
926 | hosts_mapper = self.mappers_manager.getMapper( | |
927 | model.hosts.Host.class_signature) | |
911 | 928 | return hosts_mapper.find(name) |
912 | 929 | |
913 | 930 | def getAllHosts(self): |
932 | 949 | hosts = model.hosts.Host.class_signature |
933 | 950 | count = self.mappers_manager.getMapper(hosts).getCount() |
934 | 951 | except: |
935 | getLogger(self).debug("Couldn't get host count: assuming it is zero.") | |
952 | getLogger(self).debug( | |
953 | "Couldn't get host count: assuming it is zero.") | |
936 | 954 | count = 0 |
937 | 955 | return count |
938 | 956 | |
943 | 961 | services = model.hosts.Service.class_signature |
944 | 962 | count = self.mappers_manager.getMapper(services).getCount() |
945 | 963 | except: |
946 | getLogger(self).debug("Couldn't get services count: assuming it is zero.") | |
964 | getLogger(self).debug( | |
965 | "Couldn't get services count: assuming it is zero.") | |
947 | 966 | count = 0 |
948 | 967 | return count |
949 | 968 | |
956 | 975 | count = (self.mappers_manager.getMapper(vulns).getCount() + |
957 | 976 | self.mappers_manager.getMapper(web_vulns).getCount()) |
958 | 977 | except: |
959 | getLogger(self).debug("Couldn't get vulnerabilities count: assuming it is zero.") | |
978 | getLogger(self).debug( | |
979 | "Couldn't get vulnerabilities count: assuming it is zero.") | |
960 | 980 | count = 0 |
961 | 981 | return count |
7 | 7 | |
8 | 8 | class ModelObjectDiff(object): |
9 | 9 | def __init__(self, objLeft, objRight): |
10 | if not isinstance(objLeft, objRight.__class__): | |
10 | try: | |
11 | if not getattr(objLeft, 'class_signature') == getattr(objRight, 'class_signature'): | |
12 | raise Exception("Cannot compare objects of different signature. objLeft (%s) vs objRight (%s)" | |
13 | % (objLeft.class_signature, objRight.class_signature)) | |
14 | except: | |
11 | 15 | raise Exception("Cannot compare objects of different classes. objLeft (%s) vs objRight (%s)" |
12 | % (objLeft.__class__.__name__, objRight.__class__.__name__)) | |
16 | % (objLeft.__class__.__name__, objRight.__class__.__name__)) | |
17 | ||
13 | 18 | self.obj1, self.obj2 = objLeft, objRight |
14 | 19 | |
15 | 20 | self.conflicting = [] |
23 | 28 | |
24 | 29 | def getPropertiesDiff(self): |
25 | 30 | prop_diff = {} |
26 | for attrdesc, attrname in self.obj1.publicattrsrefs.items(): | |
31 | for attrname in self.obj1.publicattrsrefs().keys(): | |
27 | 32 | info = lambda attr_ref: attr_ref() if callable(attr_ref) else attr_ref |
28 | prop1 = info(self.obj1.__getattribute__(attrname)) | |
29 | prop2 = info(self.obj2.__getattribute__(attrname)) | |
33 | prop1 = info(self.obj1.__getattribute__(self.obj1.publicattrsrefs().get(attrname))) | |
34 | prop2 = info(self.obj2.__getattribute__(self.obj2.publicattrsrefs.get(attrname))) | |
30 | 35 | if prop1 != prop2: |
31 | prop_diff[attrdesc] = (prop1, prop2) | |
36 | prop_diff[attrname] = (prop1, prop2) | |
32 | 37 | |
33 | 38 | return prop_diff |
34 | 39 | |
35 | def getDifferences(self, ObjDiff, getAllFunc, getById): | |
36 | """ Polymorphic method to get the differences between the list of objects on a ModelObject. | |
37 | Pass the ObjectDiff class, the unbound method to get all the objects and the one to get one by ID""" | |
40 | # def getDifferences(self, ObjDiff, getAllFunc, getById): | |
41 | # """ Polymorphic method to get the differences between the list of objects on a ModelObject. | |
42 | # Pass the ObjectDiff class, the unbound method to get all the objects and the one to get one by ID""" | |
38 | 43 | |
39 | only_in_obj1 = [i for i in getAllFunc(self.obj1) if not i in getAllFunc(self.obj2)] | |
40 | only_in_obj2 = [i for i in getAllFunc(self.obj2) if not i in getAllFunc(self.obj1)] | |
44 | # only_in_obj1 = [i for i in getAllFunc(self.obj1) if not i in getAllFunc(self.obj2)] | |
45 | # only_in_obj2 = [i for i in getAllFunc(self.obj2) if not i in getAllFunc(self.obj1)] | |
41 | 46 | |
42 | return (only_in_obj1, only_in_obj2) | |
47 | # return (only_in_obj1, only_in_obj2) | |
43 | 48 | |
44 | def getDifferencesIn(self, getAllFunc): | |
45 | """ Polymorphic method to get the differences between the list of objects on a ModelObject. | |
46 | Pass the ObjectDiff class, the unbound method to get all the objects and the one to get one by ID""" | |
47 | only_in_obj1 = [i for i in getAllFunc(self.obj1) if not i in getAllFunc(self.obj2)] | |
48 | only_in_obj2 = [i for i in getAllFunc(self.obj2) if not i in getAllFunc(self.obj1)] | |
49 | # def getDifferencesIn(self, getAllFunc): | |
50 | # """ Polymorphic method to get the differences between the list of objects on a ModelObject. | |
51 | # Pass the ObjectDiff class, the unbound method to get all the objects and the one to get one by ID""" | |
52 | # only_in_obj1 = [i for i in getAllFunc(self.obj1) if not i in getAllFunc(self.obj2)] | |
53 | # only_in_obj2 = [i for i in getAllFunc(self.obj2) if not i in getAllFunc(self.obj1)] | |
49 | 54 | |
50 | return only_in_obj1, only_in_obj2 | |
55 | # return only_in_obj1, only_in_obj2 | |
51 | 56 | |
52 | 57 | |
53 | 58 | class MergeStrategy(object): |
54 | 59 | @staticmethod |
55 | 60 | def solve(old, new): |
56 | raise NotImplemented("This is an abstract class") | |
61 | raise NotImplementedError("This is an abstract class") | |
57 | 62 | |
58 | 63 | |
59 | 64 | class MergeKeepNew(MergeStrategy): |
455 | 455 | |
456 | 456 | def merge(host1, host2): |
457 | 457 | return __model_controller.merge(host1, host2) |
458 | ||
459 | def addHostFromChanges(obj): | |
460 | if obj is not None: | |
461 | notification_center.addHostFromChanges(obj) | |
462 | return True | |
463 | return False | |
464 | ||
465 | def deleteHostFromChanges(obj): | |
466 | if obj is not None: | |
467 | notification_center.deleteHostFromChanges(obj) | |
468 | return True | |
469 | return False | |
470 | ||
471 | def editHostFromChanges(obj): | |
472 | if obj is not None: | |
473 | notification_center.editHostFromChanges(obj) | |
474 | return True | |
475 | return False⏎ |
49 | 49 | self._default_gateway = api.getLocalDefaultGateway() \ |
50 | 50 | if default_gateway is None else default_gateway |
51 | 51 | |
52 | def __str__(self): | |
53 | return "{0} ({1})".format(self.name, self.getVulnAmount()) | |
54 | ||
52 | 55 | def _updatePublicAttributes(self): |
53 | 56 | |
54 | 57 | self.publicattrs['Operating System'] = 'getOS' |
55 | 58 | self.publicattrsrefs['Operating System'] = '_operating_system' |
59 | ||
60 | def getVulnAmount(self): | |
61 | vuln_count = 0 | |
62 | vuln_count += len(self.getVulns()) | |
63 | for interface in self.getAllInterfaces(): | |
64 | vuln_count += len(interface.getVulns()) | |
65 | for service in interface.getAllServices(): | |
66 | vuln_count += len(service.getVulns()) | |
67 | return vuln_count | |
68 | ||
56 | 69 | |
57 | 70 | def accept(self, visitor): |
58 | 71 | """ Accept method for visitor in the host leaf""" |
264 | 277 | self.amount_ports_closed = 0 |
265 | 278 | self.amount_ports_filtered = 0 |
266 | 279 | |
280 | def __str__(self): | |
281 | return "{0}".format(self.name) | |
282 | ||
267 | 283 | def _updatePublicAttributes(self): |
268 | 284 | |
269 | 285 | self.publicattrs['MAC Address'] = 'mac' |
296 | 312 | servs.accept(visitor) |
297 | 313 | visitor.visit(self) |
298 | 314 | |
299 | ||
300 | 315 | def tieBreakable(self, property_key): |
301 | 316 | if property_key in ["_hostnames"]: |
302 | 317 | return True |
317 | 332 | |
318 | 333 | def getName(self): |
319 | 334 | return self._name |
320 | ||
321 | ||
322 | 335 | |
323 | 336 | def setMAC(self, mac): |
324 | 337 | self.mac = mac |
497 | 510 | self.publicattrs['Status'] = 'getStatus' |
498 | 511 | self.publicattrs['Version'] = 'getVersion' |
499 | 512 | |
513 | def __str__(self): | |
514 | return "{0} ({1})".format(self.name, self.getVulnAmount()) | |
515 | ||
516 | def getVulnAmount(self): | |
517 | return len(self.getVulns()) | |
500 | 518 | |
501 | 519 | def setName(self, name): |
502 | 520 | self._name = name |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | ||
7 | from model.common import (ModelObjectNote, ModelObjectCred, ModelObjectVuln, | |
8 | ModelObjectVulnWeb) | |
9 | from model.hosts import Host, Interface, Service | |
10 | ||
11 | ||
12 | class CHANGETYPE(object): | |
13 | ADD = 1, | |
14 | UPDATE = 2 | |
15 | DELETE = 3 | |
16 | UNKNOWN = 999 | |
17 | ||
18 | ||
19 | class ChangeFactory(object): | |
20 | def __init__(self): | |
21 | pass | |
22 | ||
23 | def create(self, obj, revision, deleted): | |
24 | change_type = CHANGETYPE.UNKNOWN | |
25 | if deleted: | |
26 | change_type = CHANGETYPE.DELETE | |
27 | elif int(revision.split('-')[0]) > 1: | |
28 | change_type = CHANGETYPE.UPDATE | |
29 | else: | |
30 | change_type = CHANGETYPE.ADD | |
31 | ||
32 | obj_type = obj.class_signature | |
33 | if obj_type in [Host.class_signature, | |
34 | Interface.class_signature, | |
35 | Service.class_signature, | |
36 | ModelObjectNote.class_signature, | |
37 | ModelObjectVuln.class_signature, | |
38 | ModelObjectVulnWeb.class_signature, | |
39 | ModelObjectCred.class_signature, | |
40 | 'unknown']: | |
41 | return ChangeModelObject(obj, change_type) | |
42 | else: | |
43 | return ChangeCmd(obj, change_type) | |
44 | ||
45 | ||
46 | class Change(object): | |
47 | def __init__(self, obj, change_type): | |
48 | self.change_type = change_type | |
49 | self.object = obj | |
50 | self.msg = "Change: Action: %s - Type: %s" % ( | |
51 | self.change_type, self.object.class_signature) | |
52 | ||
53 | def getObject(self): | |
54 | return self.object | |
55 | ||
56 | def getChangeType(self): | |
57 | return self.change_type | |
58 | ||
59 | def getMessage(self): | |
60 | return self.msg | |
61 | ||
62 | ||
63 | class ChangeModelObject(Change): | |
64 | def __init__(self, obj, change_type): | |
65 | Change.__init__(self, obj, change_type) | |
66 | if self.change_type == CHANGETYPE.DELETE: | |
67 | self.msg = "%s %s deleted" % ( | |
68 | self.object.class_signature, self.object.getName()) | |
69 | elif self.change_type == CHANGETYPE.UPDATE: | |
70 | self.msg = "%s %s updated" % ( | |
71 | self.object.class_signature, self.object.getName()) | |
72 | elif self.change_type == CHANGETYPE.ADD: | |
73 | self.msg = "%s %s added" % ( | |
74 | self.object.class_signature, self.object.getName()) | |
75 | ||
76 | ||
77 | class ChangeCmd(Change): | |
78 | def __init__(self, obj, change_type): | |
79 | Change.__init__(self, obj, change_type) | |
80 | if self.change_type == CHANGETYPE.UPDATE: | |
81 | self.msg = "Command finished: %s@%s: %s %s" % ( | |
82 | self.object.user, self.object.hostname, | |
83 | self.object.command, self.object.params) | |
84 | elif self.change_type == CHANGETYPE.ADD: | |
85 | self.msg = "Command started: %s@%s: %s %s" % ( | |
86 | self.object.user, self.object.hostname, | |
87 | self.object.command, self.object.params) | |
88 | ||
89 | ||
90 | change_factory = ChangeFactory() |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | ||
7 | from persistence.persistence_managers import DbConnector | |
8 | ||
9 | ||
10 | class NullPersistenceManager(DbConnector): | |
11 | def __init__(self): | |
12 | super(NullPersistenceManager, self).__init__() | |
13 | ||
14 | def saveDocument(self, document): | |
15 | pass | |
16 | ||
17 | def getDocument(self, documentId): | |
18 | return None | |
19 | ||
20 | def remove(self, documentId): | |
21 | return True | |
22 | ||
23 | def getDocsByFilter(self, parentId, type): | |
24 | return [] | |
25 | ||
26 | def getChildren(self, parentId): | |
27 | return [] | |
28 | ||
29 | ||
30 | class AbstractMapper(object): | |
31 | mapped_class = None | |
32 | dummy_args = [] | |
33 | dummy_kwargs = {} | |
34 | ||
35 | def __init__(self, mmanager, pmanager=None): | |
36 | self.mapper_manager = mmanager | |
37 | self.pmanager = pmanager if pmanager else NullPersistenceManager() | |
38 | self.object_map = {} | |
39 | ||
40 | def setPersistenceManager(self, pmanager): | |
41 | self.pmanager = pmanager | |
42 | ||
43 | def save(self, obj): | |
44 | #save the object first | |
45 | doc = self.serialize(obj) | |
46 | self.pmanager.saveDocument(doc) | |
47 | ||
48 | #then add it to the IdentityMap | |
49 | self.object_map[obj.getID()] = obj | |
50 | return obj.getID() | |
51 | ||
52 | def serialize(self, obj): | |
53 | raise NotImplementedError("AbstractMapper should not be used directly") | |
54 | ||
55 | def unserialize(self, doc): | |
56 | raise NotImplementedError("AbstractMapper should not be used directly") | |
57 | ||
58 | def load(self, id): | |
59 | if id in self.object_map.keys(): | |
60 | return self.object_map.get(id) | |
61 | doc = self.pmanager.getDocument(id) | |
62 | if not doc or not doc.get("type") == self.mapped_class.class_signature: | |
63 | return None | |
64 | obj = self.mapped_class(*self.dummy_args, **self.dummy_kwargs) | |
65 | obj.setID(doc.get("_id")) | |
66 | self.object_map[obj.getID()] = obj | |
67 | self.unserialize(obj, doc) | |
68 | return obj | |
69 | ||
70 | def reload(self, obj): | |
71 | doc = self.pmanager.getDocument(obj.getID()) | |
72 | self.unserialize(obj, doc) | |
73 | ||
74 | def update(self, obj): | |
75 | self.serialize(obj) | |
76 | self.pmanager.saveDocument(obj) | |
77 | ||
78 | def delete(self, id): | |
79 | obj = None | |
80 | self.pmanager.remove(id) | |
81 | if id in self.object_map.keys(): | |
82 | obj = self.object_map.get(id) | |
83 | del self.object_map[id] | |
84 | return obj | |
85 | ||
86 | def find(self, id, with_load=True): | |
87 | if not id or id == "None": | |
88 | return None | |
89 | if self.object_map.get(id) or not with_load: | |
90 | return self.object_map.get(id) | |
91 | return self.load(id) | |
92 | ||
93 | def findByFilter(self, parent, type): | |
94 | res = self.pmanager.getDocsByFilter(parent, type) | |
95 | return res | |
96 | ||
97 | def getChildren(self, parent): | |
98 | res = self.pmanager.getChildren(parent) | |
99 | return res | |
100 | ||
101 | def getAll(self): | |
102 | return self.object_map.values() | |
103 | ||
104 | def getCount(self): | |
105 | return len(self.object_map.keys()) |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | ||
7 | from persistence.mappers.abstract_mapper import AbstractMapper | |
8 | from model.hosts import Host, Interface, Service | |
9 | from model.common import ModelObjectNote, ModelObjectVuln, ModelObjectVulnWeb, ModelObjectCred, Metadata | |
10 | from model.commands_history import CommandRunInformation | |
11 | from model.workspace import Workspace | |
12 | ||
13 | ||
14 | #Every mapper has to be registered in the dict at the end of the file | |
15 | ||
16 | ||
17 | class ModelObjectMapper(AbstractMapper): | |
18 | mapped_class = None | |
19 | dummy_args = [] | |
20 | dummy_kwargs = {} | |
21 | ||
22 | def __init__(self, mmanager, pmanager=None): | |
23 | super(ModelObjectMapper, self).__init__(mmanager, pmanager) | |
24 | self.children = [] | |
25 | ||
26 | def serialize(self, mobj): | |
27 | return { | |
28 | "type": mobj.class_signature, | |
29 | "_id": mobj.getID(), | |
30 | "name": mobj.getName(), | |
31 | "owned": mobj.isOwned(), | |
32 | "parent": mobj.getParent().getID() if mobj.getParent() is not None else None, | |
33 | "owner": mobj.getOwner(), | |
34 | "description": mobj.getDescription(), | |
35 | "metadata": mobj.getMetadata().__dict__ | |
36 | } | |
37 | ||
38 | def unserialize(self, mobj, doc): | |
39 | mobj_type = mobj.class_signature | |
40 | self.children = self.findChildren(mobj.getID()) | |
41 | mobj.setName(doc.get("name")) | |
42 | mobj.setOwned(doc.get("owned")) | |
43 | if doc.get("parent", None): | |
44 | mobj.setParent(self.mapper_manager.find(doc.get("parent"))) | |
45 | mobj.setOwner(doc.get("owner")) | |
46 | # NOTE: Vulnerability and VulnerabilityWeb, when modified from the web, | |
47 | # have a 'desc' key, not a description key, which is already handled | |
48 | # by their specific unserialize method | |
49 | if mobj_type != 'Vulnerability' and mobj_type != 'VulnerabilityWeb': | |
50 | mobj.setDescription(doc.get("description")) | |
51 | mobj.setMetadata( Metadata('').fromDict(mobj.getMetadata().__dict__)) | |
52 | if self.children: | |
53 | self.setNotes(mobj) | |
54 | self.setVulns(mobj) | |
55 | self.setCreds(mobj) | |
56 | return mobj | |
57 | ||
58 | def delete(self, mobj_id): | |
59 | mobj = self.mapper_manager.find(mobj_id) | |
60 | for child in mobj.getChilds().values(): | |
61 | self.mapper_manager.remove(child.getID()) | |
62 | super(ModelObjectMapper, self).delete(mobj_id) | |
63 | ||
64 | def _loadChilds(self, type): | |
65 | ids = [doc['_id'] | |
66 | for doc in self.children | |
67 | if doc.get("type") == type] | |
68 | mapper = self.mapper_manager.getMapper(type) | |
69 | obj_dict = {} | |
70 | for id in ids: | |
71 | obj = mapper.load(id) | |
72 | obj_dict[obj.getID()] = obj | |
73 | return obj_dict | |
74 | ||
75 | def setNotes(self, mobj): | |
76 | mobj.setNotes( | |
77 | self._loadChilds(ModelObjectNote.class_signature)) | |
78 | ||
79 | def setVulns(self, mobj): | |
80 | vulns = self._loadChilds(ModelObjectVuln.class_signature) | |
81 | vulns_web = self._loadChilds(ModelObjectVulnWeb.class_signature) | |
82 | vulns.update(vulns_web) | |
83 | mobj.setVulns(vulns) | |
84 | ||
85 | def setCreds(self, mobj): | |
86 | mobj.setCreds( | |
87 | self._loadChilds(ModelObjectCred.class_signature)) | |
88 | ||
89 | def findForParent(self, obj_id): | |
90 | return self.findByFilter(parent=obj_id, type=self.mapped_class.class_signature) | |
91 | ||
92 | def findChildren(self, obj_id): | |
93 | return self.getChildren(obj_id) | |
94 | #return self.findByFilter(parent=obj_id, type=None) | |
95 | ||
96 | ||
97 | class HostMapper(ModelObjectMapper): | |
98 | mapped_class = Host | |
99 | dummy_args = [] | |
100 | dummy_kwargs = {'name': 'dummy'} | |
101 | ||
102 | def __init__(self, mmanager, pmanager=None): | |
103 | super(HostMapper, self).__init__(mmanager, pmanager) | |
104 | ||
105 | def serialize(self, host): | |
106 | doc = super(HostMapper, self).serialize(host) | |
107 | doc.update({ | |
108 | "os": host.getOS(), | |
109 | "default_gateway": host.getDefaultGateway() | |
110 | }) | |
111 | return doc | |
112 | ||
113 | def unserialize(self, host, doc): | |
114 | host.setOS(doc.get("os")) | |
115 | host.setDefaultGateway(doc.get("default_gateway")) | |
116 | super(HostMapper, self).unserialize(host, doc) | |
117 | self.setInterfaces(host) | |
118 | return host | |
119 | ||
120 | def setInterfaces(self, host): | |
121 | host.setInterfaces( | |
122 | self._loadChilds(Interface.class_signature)) | |
123 | ||
124 | ||
125 | class InterfaceMapper(ModelObjectMapper): | |
126 | mapped_class = Interface | |
127 | dummy_args = [] | |
128 | dummy_kwargs = {'name': 'dummy'} | |
129 | ||
130 | def __init__(self, mmanager, pmanager=None): | |
131 | super(InterfaceMapper, self).__init__(mmanager, pmanager) | |
132 | ||
133 | def serialize(self, iface): | |
134 | doc = super(InterfaceMapper, self).serialize(iface) | |
135 | doc.update({ | |
136 | "mac": iface.getMAC(), | |
137 | "network_segment": iface.getNetworkSegment(), | |
138 | "hostnames": [hname for hname in iface.getHostnames()], | |
139 | "ipv4": iface.getIPv4(), | |
140 | "ipv6": iface.getIPv6(), | |
141 | "ports": { | |
142 | "opened": iface.getPortsOpened(), | |
143 | "closed": iface.getPortsClosed(), | |
144 | "filtered": iface.getPortsFiltered(), | |
145 | } | |
146 | }) | |
147 | return doc | |
148 | ||
149 | def unserialize(self, iface, doc): | |
150 | iface.setMAC(doc.get("mac")) | |
151 | iface.setNetworkSegment(doc.get("network_segment")) | |
152 | for hostname in doc.get("hostnames"): | |
153 | iface.addHostname(hostname) | |
154 | iface.setIPv4(doc.get("ipv4")) | |
155 | iface.setIPv6(doc.get("ipv6")) | |
156 | iface.setPortsOpened(doc.get("ports").get("opened")) | |
157 | iface.setPortsClosed(doc.get("ports").get("closed")) | |
158 | iface.setPortsFiltered(doc.get("ports").get("filtered")) | |
159 | super(InterfaceMapper, self).unserialize(iface, doc) | |
160 | self.setServices(iface) | |
161 | return iface | |
162 | ||
163 | def setServices(self, iface): | |
164 | iface.setServices( | |
165 | self._loadChilds(Service.class_signature)) | |
166 | ||
167 | ||
168 | class ServiceMapper(ModelObjectMapper): | |
169 | mapped_class = Service | |
170 | dummy_args = [] | |
171 | dummy_kwargs = {'name': 'dummy'} | |
172 | ||
173 | def __init__(self, mmanager, pmanager=None): | |
174 | super(ServiceMapper, self).__init__(mmanager, pmanager) | |
175 | ||
176 | def serialize(self, srv): | |
177 | doc = super(ServiceMapper, self).serialize(srv) | |
178 | doc.update({ | |
179 | "protocol": srv.getProtocol(), | |
180 | "status": srv.getStatus(), | |
181 | "version": srv.getVersion(), | |
182 | "ports": [port for port in srv.getPorts()], | |
183 | #"interfaces": [id for id in srv._getAllIDs("_interfaces")] | |
184 | }) | |
185 | return doc | |
186 | ||
187 | def unserialize(self, srv, doc): | |
188 | srv.setProtocol(doc.get("protocol")) | |
189 | srv.setStatus(doc.get("status")) | |
190 | srv.setVersion(doc.get("version")) | |
191 | for port in doc.get("ports"): | |
192 | srv.setPorts(int(port)) | |
193 | super(ServiceMapper, self).unserialize(srv, doc) | |
194 | return srv | |
195 | ||
196 | ||
197 | class NoteMapper(ModelObjectMapper): | |
198 | mapped_class = ModelObjectNote | |
199 | dummy_args = [] | |
200 | dummy_kwargs = {'name': 'dummy'} | |
201 | ||
202 | def __init__(self, mmanager, pmanager=None): | |
203 | super(NoteMapper, self).__init__(mmanager, pmanager) | |
204 | ||
205 | def serialize(self, note): | |
206 | doc = super(NoteMapper, self).serialize(note) | |
207 | doc.update({ | |
208 | "text": note.getText() | |
209 | }) | |
210 | return doc | |
211 | ||
212 | def unserialize(self, note, doc): | |
213 | note.setText(doc.get("text")) | |
214 | super(NoteMapper, self).unserialize(note, doc) | |
215 | return note | |
216 | ||
217 | ||
218 | class VulnMapper(ModelObjectMapper): | |
219 | mapped_class = ModelObjectVuln | |
220 | dummy_args = [] | |
221 | dummy_kwargs = {'name': 'dummy'} | |
222 | ||
223 | def __init__(self, mmanager, pmanager=None): | |
224 | super(VulnMapper, self).__init__(mmanager, pmanager) | |
225 | ||
226 | def serialize(self, vuln): | |
227 | doc = super(VulnMapper, self).serialize(vuln) | |
228 | doc.update({ | |
229 | "desc": vuln.getDesc(), | |
230 | "severity": vuln.getSeverity(), | |
231 | "resolution": vuln.getResolution(), | |
232 | "refs": vuln.getRefs(), | |
233 | "data": vuln.getData(), | |
234 | "confirmed": vuln.getConfirmed() | |
235 | }) | |
236 | return doc | |
237 | ||
238 | def unserialize(self, vuln, doc): | |
239 | vuln.setDesc(doc.get("desc")) | |
240 | vuln.setSeverity(doc.get("severity")) | |
241 | vuln.setResolution(doc.get("resolution")) | |
242 | vuln.setRefs(doc.get("refs")) | |
243 | vuln.setData(doc.get("data", "")) | |
244 | vuln.setConfirmed(doc.get("confirmed", True)) | |
245 | super(VulnMapper, self).unserialize(vuln, doc) | |
246 | return vuln | |
247 | ||
248 | ||
249 | class VulnWebMapper(VulnMapper): | |
250 | mapped_class = ModelObjectVulnWeb | |
251 | dummy_args = [] | |
252 | dummy_kwargs = {'name': 'dummy'} | |
253 | ||
254 | def __init__(self, mmanager, pmanager=None): | |
255 | super(VulnWebMapper, self).__init__(mmanager, pmanager) | |
256 | ||
257 | def serialize(self, vuln_web): | |
258 | doc = super(VulnWebMapper, self).serialize(vuln_web) | |
259 | doc.update({ | |
260 | "website": vuln_web.getWebsite(), | |
261 | "path": vuln_web.getPath(), | |
262 | "request": vuln_web.getRequest(), | |
263 | "response": vuln_web.getResponse(), | |
264 | "method": vuln_web.getMethod(), | |
265 | "pname": vuln_web.getPname(), | |
266 | "params": vuln_web.getParams(), | |
267 | "query": vuln_web.getQuery(), | |
268 | "category": vuln_web.getCategory() | |
269 | }) | |
270 | return doc | |
271 | ||
272 | def unserialize(self, vuln_web, doc): | |
273 | vuln_web.setDesc(doc.get("desc")) | |
274 | vuln_web.setWebsite(doc.get("website")) | |
275 | vuln_web.setPath(doc.get("path")) | |
276 | vuln_web.setRequest(doc.get("request")) | |
277 | vuln_web.setResponse(doc.get("response")) | |
278 | vuln_web.setMethod(doc.get("method")) | |
279 | vuln_web.setPname(doc.get("pname")) | |
280 | vuln_web.setParams(doc.get("params")) | |
281 | vuln_web.setQuery(doc.get("query")) | |
282 | vuln_web.setCategory(doc.get("category")) | |
283 | super(VulnWebMapper, self).unserialize(vuln_web, doc) | |
284 | return vuln_web | |
285 | ||
286 | ||
287 | class CredMapper(ModelObjectMapper): | |
288 | mapped_class = ModelObjectCred | |
289 | dummy_args = [] | |
290 | dummy_kwargs = {} | |
291 | ||
292 | def __init__(self, mmanager, pmanager=None): | |
293 | super(CredMapper, self).__init__(mmanager, pmanager) | |
294 | ||
295 | def serialize(self, cred): | |
296 | doc = super(CredMapper, self).serialize(cred) | |
297 | doc.update({ | |
298 | "username": cred.getUsername(), | |
299 | "password": cred.getPassword() | |
300 | }) | |
301 | return doc | |
302 | ||
303 | def unserialize(self, cred, doc): | |
304 | cred.setUsername(doc.get("username")) | |
305 | cred.setPassword(doc.get("password")) | |
306 | super(CredMapper, self).unserialize(cred, doc) | |
307 | return cred | |
308 | ||
309 | ||
310 | class CommandRunMapper(AbstractMapper): | |
311 | mapped_class = CommandRunInformation | |
312 | dummy_args = [] | |
313 | dummy_kwargs = {} | |
314 | ||
315 | def __init__(self, mmanager, pmanager=None): | |
316 | super(CommandRunMapper, self).__init__(mmanager, pmanager) | |
317 | ||
318 | def serialize(self, obj): | |
319 | return obj.__dict__ | |
320 | ||
321 | def unserialize(self, cmd, doc): | |
322 | for k, v in doc.items(): | |
323 | setattr(cmd, k, v) | |
324 | return cmd | |
325 | ||
326 | ||
327 | class WorkspaceMapper(AbstractMapper): | |
328 | mapped_class = Workspace | |
329 | dummy_args = [] | |
330 | dummy_kwargs = {'name': 'dummy'} | |
331 | ||
332 | def __init__(self, mmanager, pmanager=None): | |
333 | super(WorkspaceMapper, self).__init__(mmanager, pmanager) | |
334 | ||
335 | def serialize(self, obj): | |
336 | return { | |
337 | "type": obj.class_signature, | |
338 | "_id": obj.getID(), | |
339 | "name": obj.getName(), | |
340 | "description": obj.getDescription(), | |
341 | "customer": obj.getCustomer(), | |
342 | "sdate": obj.getStartDate(), | |
343 | "fdate": obj.getFinishDate() | |
344 | } | |
345 | ||
346 | def findChildren(self, obj_id): | |
347 | return self.findByFilter(parent=obj_id, type=None) | |
348 | ||
349 | def unserialize(self, workspace, doc): | |
350 | children = self.findChildren( | |
351 | workspace.getID()) + self.findChildren(None) + self.findChildren("None") | |
352 | workspace.setName(doc.get("name", doc.get("_id"))) | |
353 | workspace.setDescription(doc.get("description")) | |
354 | workspace.setCustomer(doc.get("customer")) | |
355 | workspace.setStartDate(doc.get("sdate")) | |
356 | workspace.setFinishDate(doc.get("fdate")) | |
357 | self.setHosts(workspace, children) | |
358 | return workspace | |
359 | ||
360 | def setHosts(self, workspace, docs): | |
361 | ids = [doc['_id'] | |
362 | for doc in docs | |
363 | if doc.get("type") == Host.class_signature] | |
364 | mapper = self.mapper_manager.getMapper(Host.class_signature) | |
365 | host_dict = {} | |
366 | for id in ids: | |
367 | host = mapper.load(id) | |
368 | host_dict[host.getID()] = host | |
369 | ||
370 | workspace.setHosts(host_dict) | |
371 | ||
372 | ||
373 | Mappers = { | |
374 | Host.class_signature: HostMapper, | |
375 | Interface.class_signature: InterfaceMapper, | |
376 | Service.class_signature: ServiceMapper, | |
377 | ModelObjectNote.class_signature: NoteMapper, | |
378 | ModelObjectVuln.class_signature: VulnMapper, | |
379 | ModelObjectVulnWeb.class_signature: VulnWebMapper, | |
380 | ModelObjectCred.class_signature: CredMapper, | |
381 | CommandRunInformation.class_signature: CommandRunMapper, | |
382 | Workspace.class_signature: WorkspaceMapper | |
383 | } |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | ||
7 | import os | |
8 | import restkit | |
9 | import threading | |
10 | import requests | |
11 | import time | |
12 | from urlparse import urlparse | |
13 | import traceback | |
14 | from couchdbkit import Server, ChangesStream | |
15 | from couchdbkit.resource import ResourceNotFound | |
16 | ||
17 | from utils.logs import getLogger | |
18 | from managers.all import ViewsManager | |
19 | ||
20 | from config.globals import CONST_BLACKDBS | |
21 | from config.configuration import getInstanceConfiguration | |
22 | ||
23 | CONF = getInstanceConfiguration() | |
24 | ||
25 | ||
26 | class DBTYPE(object): | |
27 | """A simple enumeration of the databases types. CouchDB is the only | |
28 | valid DB right now. | |
29 | """ | |
30 | COUCHDB = 1 | |
31 | ||
32 | ||
33 | class ConnectorContainer(object): | |
34 | def __init__(self, name, connector, type): | |
35 | self._connector = connector | |
36 | self.type = type | |
37 | self._name = name | |
38 | ||
39 | def getType(self): | |
40 | return self.type | |
41 | ||
42 | def connector(self): | |
43 | if self._connector.__class__.__name__ == 'function': | |
44 | self._connector = self._connector(self._name) | |
45 | return self._connector | |
46 | ||
47 | ||
48 | class DbManager(object): | |
49 | ||
50 | def __init__(self, couch_exc_callback): | |
51 | self.couch_exc_callback = couch_exc_callback | |
52 | self.load() | |
53 | ||
54 | def load(self): | |
55 | self.couchmanager = CouchDbManager(CONF.getCouchURI(), self.couch_exc_callback) | |
56 | self.managers = { | |
57 | DBTYPE.COUCHDB: self.couchmanager, | |
58 | } | |
59 | self.dbs = {} | |
60 | self._loadDbs() | |
61 | ||
62 | def getAvailableDBs(self): | |
63 | return [typ for typ, manag in self.managers.items() | |
64 | if manag.isAvailable()] | |
65 | ||
66 | def _loadDbs(self): | |
67 | self.dbs = {} | |
68 | for dbname, connector in self.couchmanager.getDbs().items(): | |
69 | self.dbs[dbname] = ConnectorContainer(dbname, connector, DBTYPE.COUCHDB) | |
70 | ||
71 | def _getManagerByType(self, dbtype): | |
72 | if dbtype == DBTYPE.COUCHDB: | |
73 | manager = self.couchmanager | |
74 | return manager | |
75 | ||
76 | def getConnector(self, name): | |
77 | # This returns a method that creates a connector | |
78 | # It's for lazy initalization in _loadDbs | |
79 | return self.dbs.get(name).connector() | |
80 | ||
81 | def connectorExists(self, name): | |
82 | return name in self.dbs.keys() | |
83 | ||
84 | def createDb(self, name, dbtype): | |
85 | if self.connectorExists(name): | |
86 | return False | |
87 | manager = self._getManagerByType(dbtype) | |
88 | self.addConnector(name, manager.createDb(name), dbtype) | |
89 | return self.getConnector(name) | |
90 | ||
91 | def addConnector(self, name, connector, dbtype): | |
92 | self.dbs[name] = ConnectorContainer(name, connector, dbtype) | |
93 | ||
94 | def getAllDbNames(self): | |
95 | self.refreshDbs() | |
96 | return self.dbs.keys() | |
97 | ||
98 | def refreshDbs(self): | |
99 | self.couchmanager.refreshDbs() | |
100 | for dbname, connector in self.couchmanager.getDbs().items(): | |
101 | if dbname not in self.dbs.keys(): | |
102 | self.dbs[dbname] = ConnectorContainer(dbname, connector, DBTYPE.COUCHDB) | |
103 | ||
104 | def removeDb(self, name): | |
105 | connector = self.getConnector(name) | |
106 | self._getManagerByType(connector.getType()).deleteDb(name) | |
107 | del self.dbs[name] | |
108 | return True | |
109 | ||
110 | def getDbType(self, dbname): | |
111 | return self.dbs.get(dbname).getType() | |
112 | ||
113 | def reloadConfig(self): | |
114 | self.load() | |
115 | ||
116 | ||
117 | class DbConnector(object): | |
118 | def __init__(self, type=None): | |
119 | self.changes_callback = None | |
120 | self.type = type | |
121 | ||
122 | def getType(self): | |
123 | return self.type | |
124 | ||
125 | def setChangesCallback(self, callback): | |
126 | self.changes_callback = callback | |
127 | ||
128 | def setNoWorkspacesCallback(self, callback): | |
129 | self.no_workspace_callback = callback | |
130 | ||
131 | def waitForDBChange(self): | |
132 | pass | |
133 | ||
134 | def forceUpdate(self): | |
135 | pass | |
136 | ||
137 | def saveDocument(self, document): | |
138 | raise NotImplementedError("DbConnector should not be used directly") | |
139 | ||
140 | def getDocument(self, documentId): | |
141 | raise NotImplementedError("DbConnector should not be used directly") | |
142 | ||
143 | def remove(self, documentId): | |
144 | raise NotImplementedError("DbConnector should not be used directly") | |
145 | ||
146 | def getDocsByFilter(self, parentId, type): | |
147 | raise NotImplementedError("DbConnector should not be used directly") | |
148 | ||
149 | def getChildren(self, document_id): | |
150 | raise NotImplementedError("DbConnector should not be used directly") | |
151 | ||
152 | ||
153 | class CouchDbConnector(DbConnector): | |
154 | # This ratio represents (db size / num of docs) | |
155 | # to compact the database when the size is too high | |
156 | MAXIMUM_RATIO_SIZE = 10000 | |
157 | # This value represents the number of maximum saves | |
158 | # before we try to compact the db | |
159 | MAXIMUM_SAVES = 1000 | |
160 | ||
161 | def __init__(self, db, seq_num=0): | |
162 | super(CouchDbConnector, self).__init__(type=DBTYPE.COUCHDB) | |
163 | self.db = db | |
164 | self.saves_counter = 0 | |
165 | self.mutex = threading.Lock() | |
166 | self._docs = {} | |
167 | try: | |
168 | vmanager = ViewsManager() | |
169 | vmanager.addViews(self.db) | |
170 | self._compactDatabase() | |
171 | except restkit.Unauthorized: | |
172 | getLogger(self).warn( | |
173 | "You're not authorized to upload views to this database") | |
174 | self.seq_num = self.db.info()['update_seq'] | |
175 | ||
176 | def getDocs(self): | |
177 | if len(self._docs.keys()) == 0: | |
178 | # TODO: change this. | |
179 | # backwards compatibility. ugly, but needed | |
180 | self._docs["orphan"] = {} | |
181 | self._docs["orphan"]["children"] = [] | |
182 | for doc in self.getAllDocs(): | |
183 | self.addDoc(doc) | |
184 | return self._docs | |
185 | ||
186 | def addDoc(self, doc): | |
187 | id = doc["_id"] | |
188 | doc["children"] = [] | |
189 | if self._docs.get(id, None): | |
190 | doc["children"] = self._docs[id]["children"] | |
191 | self._docs[id] = doc | |
192 | ||
193 | parent_id = doc.get("parent", None) | |
194 | # TODO: change this. | |
195 | # backwards compatibility. ugly, but needed | |
196 | if not parent_id or parent_id == "None": | |
197 | parent_id = "orphan" | |
198 | if parent_id in self._docs.keys(): | |
199 | self._docs[parent_id]["children"].append( | |
200 | self._docs[doc["_id"]]) | |
201 | ||
202 | def delDoc(self, doc_id): | |
203 | doc = self._docs[doc_id] | |
204 | parent_id = doc.get("parent", None) | |
205 | # TODO: change this. | |
206 | # backwards compatibility. ugly, but needed | |
207 | if not parent_id or parent_id == "None": | |
208 | parent_id == "orphan" | |
209 | if parent_id in self._docs.keys(): | |
210 | self._docs[parent_id]["children"].remove(doc) | |
211 | del self._docs[doc_id] | |
212 | ||
213 | def _ratio(self): | |
214 | return self.db.info()['disk_size'] / self.db.info()['doc_count'] | |
215 | ||
216 | def saveDocument(self, document): | |
217 | self.incrementSeqNumber() | |
218 | getLogger(self).debug( | |
219 | "Saving document in couch db %s" % self.db) | |
220 | res = self.db.save_doc(document, use_uuids=True, force_update=True) | |
221 | if res: | |
222 | self.saves_counter += 1 | |
223 | self.addDoc(document) | |
224 | if self.saves_counter > self.MAXIMUM_SAVES: | |
225 | self._compactDatabase() | |
226 | self.saves_counter = 0 | |
227 | return res | |
228 | ||
229 | def forceUpdate(self): | |
230 | """It will try to update the information on the DB if it can. | |
231 | The except clause is necesary to catch the case where we've lost | |
232 | the connection to the DB. | |
233 | """ | |
234 | ||
235 | doc = self.getDocument(self.db.dbname) | |
236 | try: | |
237 | return self.db.save_doc(doc, use_uuids=True, force_update=True) | |
238 | except: | |
239 | return False | |
240 | ||
241 | def getDocument(self, document_id): | |
242 | # getLogger(self).debug( | |
243 | # "Getting document %s for couch db %s" % (document_id, self.db)) | |
244 | doc = self.getDocs().get(document_id, None) | |
245 | if not doc: | |
246 | if self.db.doc_exist(document_id): | |
247 | doc = self.db.get(document_id) | |
248 | self.addDoc(doc) | |
249 | return doc | |
250 | ||
251 | def remove(self, document_id): | |
252 | """Remove a document from existence, both from the database | |
253 | and from the mappers.""" | |
254 | if self.db.doc_exist(document_id): | |
255 | self.incrementSeqNumber() | |
256 | self.db.delete_doc(document_id) | |
257 | self.delDoc(document_id) | |
258 | ||
259 | def getChildren(self, document_id): | |
260 | return self._docs[document_id]["children"] | |
261 | ||
262 | def getDocsByFilter(self, parentId, type): | |
263 | if not type: | |
264 | key = None | |
265 | if parentId: | |
266 | key = '%s' % parentId | |
267 | view = 'mapper/byparent' | |
268 | else: | |
269 | key = ['%s' % parentId, '%s' % type] | |
270 | view = 'mapper/byparentandtype' | |
271 | ||
272 | values = [doc.get("value") for doc in self.db.view(view, key=key)] | |
273 | return values | |
274 | ||
275 | def getAllDocs(self): | |
276 | docs = [doc.get("value") for doc in self.db.view('utils/docs')] | |
277 | return docs | |
278 | ||
279 | def incrementSeqNumber(self): | |
280 | self.mutex.acquire() | |
281 | self.seq_num += 1 | |
282 | self.mutex.release() | |
283 | ||
284 | def getSeqNumber(self): | |
285 | return self.seq_num | |
286 | ||
287 | def setSeqNumber(self, seq_num): | |
288 | self.seq_num = seq_num | |
289 | ||
290 | ||
291 | def waitForDBChange(self, since=0): | |
292 | """Listen to the stream of changes provided by CouchDbKit. Process | |
293 | these changes accordingly. If there's an exception while listening | |
294 | to the changes, return inmediatly.""" | |
295 | ||
296 | # XXX: the while True found here shouldn't be necessary because | |
297 | # changesStream already keeps listening 'for ever'. In a few tests | |
298 | # I ran, this hypothesis was confirmed, but with our current setup | |
299 | # i'm afraid I may be missing something. In any case, it works | |
300 | # as it is, but this definitevely needs revision. | |
301 | ||
302 | getLogger(self).debug( | |
303 | "Watching for changes") | |
304 | while True: | |
305 | last_seq = max(self.getSeqNumber(), since) | |
306 | self.stream = ChangesStream( | |
307 | self.db, | |
308 | feed="continuous", | |
309 | since=last_seq, | |
310 | heartbeat=True) | |
311 | try: | |
312 | for change in self.stream: | |
313 | if not self.changes_callback: | |
314 | return | |
315 | if not change.get('last_seq', None): | |
316 | if change['seq'] > self.getSeqNumber(): | |
317 | self.setSeqNumber(change['seq']) | |
318 | if not change['id'].startswith('_design'): | |
319 | getLogger(self).debug( | |
320 | "Changes from another instance") | |
321 | deleted = bool(change.get('deleted', False)) | |
322 | revision = change.get("changes")[-1].get('rev') | |
323 | obj_id = change.get('id') | |
324 | if not deleted: | |
325 | # update cache | |
326 | doc = self.db.get(obj_id) | |
327 | self.addDoc(doc) | |
328 | self.changes_callback(obj_id, revision, deleted) | |
329 | ||
330 | except ResourceNotFound as e: | |
331 | getLogger(self).info("The database couldn't be found") | |
332 | self.no_workspace_callback() | |
333 | return False | |
334 | ||
335 | except Exception as e: | |
336 | getLogger(self).info("Some exception happened while waiting for changes") | |
337 | getLogger(self).info(" The exception was: %s" % e) | |
338 | return False # kill thread, it's failed... in reconnection | |
339 | # another one will be created, don't worry | |
340 | ||
341 | def _compactDatabase(self): | |
342 | try: | |
343 | self.db.compact() | |
344 | except: | |
345 | getLogger(self).warn( | |
346 | "You're not authorized to compact this database") | |
347 | ||
348 | ||
349 | class AbstractPersistenceManager(object): | |
350 | def __init__(self): | |
351 | self.dbs = {} | |
352 | ||
353 | def createDb(self, name): | |
354 | if not self.getDb(name): | |
355 | self.dbs[name] = self._create(name) | |
356 | return self.dbs[name] | |
357 | ||
358 | def _loadDbs(self): | |
359 | raise NotImplementedError("AbstractPersistenceManager should not be used directly") | |
360 | ||
361 | def refreshDbs(self): | |
362 | self._loadDbs() | |
363 | ||
364 | def _create(self, name): | |
365 | raise NotImplementedError("AbstractPersistenceManager should not be used directly") | |
366 | ||
367 | def deleteDb(self, name): | |
368 | if self.getDb(name): | |
369 | self._delete(name) | |
370 | del self.dbs[name] | |
371 | return True | |
372 | return False | |
373 | ||
374 | def _delete(self, name): | |
375 | raise NotImplementedError("AbstractPersistenceManager should not be used directly") | |
376 | ||
377 | def getDbNames(self): | |
378 | return self.dbs.keys() | |
379 | ||
380 | def getDbs(self): | |
381 | return self.dbs | |
382 | ||
383 | def getDb(self, name): | |
384 | return self.dbs.get(name, None) | |
385 | ||
386 | def isAvailable(self): | |
387 | return self._available | |
388 | ||
389 | ||
390 | class CouchDbManager(AbstractPersistenceManager): | |
391 | """ | |
392 | This is a couchdb manager for the workspace, | |
393 | it will load from the couchdb databases | |
394 | """ | |
395 | def __init__(self, uri, couch_exception_callback): | |
396 | super(CouchDbManager, self).__init__() | |
397 | getLogger(self).debug( | |
398 | "Initializing CouchDBManager for url [%s]" % uri) | |
399 | self._lostConnection = False | |
400 | self.__uri = uri | |
401 | self._available = False | |
402 | self.couch_exception_callback = couch_exception_callback | |
403 | test_couch_thread = threading.Thread(target=self.continuosly_check_connection) | |
404 | test_couch_thread.daemon = True | |
405 | test_couch_thread.start() | |
406 | try: | |
407 | if uri is not None: | |
408 | self.testCouchUrl(uri) | |
409 | url = urlparse(uri) | |
410 | getLogger(self).debug( | |
411 | "Setting user,pass %s %s" % (url.username, url.password)) | |
412 | self.__serv = Server(uri=uri) | |
413 | self.__serv.resource_class.credentials = (url.username, url.password) | |
414 | self._available = True | |
415 | self.pushReports() | |
416 | self._loadDbs() | |
417 | except: | |
418 | getLogger(self).warn("No route to couchdb server on: %s" % uri) | |
419 | getLogger(self).debug(traceback.format_exc()) | |
420 | ||
421 | def continuosly_check_connection(self): | |
422 | """Intended to use on a separate thread. Call module-level | |
423 | function testCouch every second to see if response to the server_uri | |
424 | of the DB is still 200. Call the exception_callback if we can't access | |
425 | the server three times in a row. | |
426 | """ | |
427 | tolerance = 0 | |
428 | server_uri = self.__uri | |
429 | while True: | |
430 | time.sleep(1) | |
431 | test_was_successful = test_couch(server_uri) | |
432 | if test_was_successful: | |
433 | tolerance = 0 | |
434 | else: | |
435 | tolerance += 1 | |
436 | if tolerance == 3: | |
437 | self.couch_exception_callback() | |
438 | return False # kill the thread if something went wrong | |
439 | ||
440 | def _create(self, name): | |
441 | db = self.__serv.create_db(name.lower()) | |
442 | return CouchDbConnector(db) | |
443 | ||
444 | def _delete(self, name): | |
445 | self.__serv.delete_db(name) | |
446 | ||
447 | def _loadDbs(self): | |
448 | ||
449 | def conditions(database): | |
450 | begins_with_underscore = database.startswith("_") | |
451 | is_blacklisted = database in CONST_BLACKDBS | |
452 | return not begins_with_underscore and not is_blacklisted | |
453 | ||
454 | try: | |
455 | for dbname in filter(conditions, self.__serv.all_dbs()): | |
456 | if dbname not in self.dbs.keys(): | |
457 | getLogger(self).debug( | |
458 | "Asking for dbname[%s], registering for lazy initialization" % dbname) | |
459 | self.dbs[dbname] = lambda x: self._loadDb(x) | |
460 | except restkit.errors.RequestError as req_error: | |
461 | getLogger(self).error("Couldn't load databases. " | |
462 | "The connection to the CouchDB was probably lost. ") | |
463 | ||
464 | def _loadDb(self, dbname): | |
465 | db = self.__serv.get_db(dbname) | |
466 | seq = db.info()['update_seq'] | |
467 | self.dbs[dbname] = CouchDbConnector(db, seq_num=seq) | |
468 | return self.dbs[dbname] | |
469 | ||
470 | def refreshDbs(self): | |
471 | """Refresh databases using inherited method. On exception, asume | |
472 | no databases are available. | |
473 | """ | |
474 | try: | |
475 | return AbstractPersistenceManager.refreshDbs() | |
476 | except: | |
477 | return [] | |
478 | ||
479 | def pushReports(self): | |
480 | vmanager = ViewsManager() | |
481 | reports = os.path.join(os.getcwd(), "views", "reports") | |
482 | try: | |
483 | workspace = self.__serv.get_or_create_db("reports") | |
484 | vmanager.addView(reports, workspace) | |
485 | except: | |
486 | getLogger(self).warn( | |
487 | "Reports database couldn't be uploaded. You need to be an admin to do it") | |
488 | return self.__uri + "/reports/_design/reports/index.html" | |
489 | ||
490 | @staticmethod | |
491 | def testCouch(uri): | |
492 | """Redirect to the module-level function of the name, which | |
493 | serves the same purpose and is used by other classes too.""" | |
494 | return test_couch(uri) | |
495 | ||
496 | def testCouchUrl(self, uri): | |
497 | if uri is not None: | |
498 | url = urlparse(uri) | |
499 | host = url.hostname | |
500 | port = url.port | |
501 | self.test(host, int(port)) | |
502 | ||
503 | def test(self, address, port): | |
504 | import socket | |
505 | s = socket.socket() | |
506 | s.settimeout(1) | |
507 | s.connect((address, port)) | |
508 | ||
509 | def replicate(self, workspace, *targets_dbs, **kwargs): | |
510 | getLogger(self).debug("Targets to replicate %s" % str(targets_dbs)) | |
511 | for target_db in targets_dbs: | |
512 | src_db_path = "/".join([self.__uri, workspace]) | |
513 | dst_db_path = "/".join([target_db, workspace]) | |
514 | try: | |
515 | getLogger(self).info("workspace: %s, src_db_path: %s, dst_db_path: %s, **kwargs: %s" % (workspace, src_db_path, dst_db_path, kwargs)) | |
516 | self.__peerReplication(workspace, src_db_path, dst_db_path, **kwargs) | |
517 | except ResourceNotFound as e: | |
518 | raise e | |
519 | except Exception as e: | |
520 | getLogger(self).error(e) | |
521 | raise | |
522 | ||
523 | def __peerReplication(self, workspace, src, dst, **kwargs): | |
524 | mutual = kwargs.get("mutual", True) | |
525 | continuous = kwargs.get("continuous", True) | |
526 | ct = kwargs.get("create_target", True) | |
527 | ||
528 | self.__serv.replicate(workspace, dst, mutual = mutual, continuous = continuous, create_target = ct) | |
529 | if mutual: | |
530 | self.__serv.replicate(dst, src, continuous = continuous, **kwargs) | |
531 | ||
532 | ||
533 | def test_couch(uri): | |
534 | """Return True if we could access uri/_api/info, which should happen | |
535 | if we have an Internet connection, the server is up and we have the correct | |
536 | permissions (response_code == 200) | |
537 | """ | |
538 | try: | |
539 | response_code = requests.get(uri + '/_api/info', timeout=3).status_code | |
540 | return True if response_code == 200 else False | |
541 | except: | |
542 | return False |
0 | #!/usr/bin/python2.7 | |
1 | # -*- coding: utf-8 -*- | |
2 | ''' | |
3 | Faraday Penetration Test IDE | |
4 | Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | ||
7 | ''' | |
8 | import requests, json | |
9 | ||
10 | class CouchChangesStream(object): | |
11 | def __init__(self, workspace_name, server_url, **params): | |
12 | self._base_url = server_url | |
13 | self._change_url = "{0}/_changes".format(server_url) | |
14 | self._params = params | |
15 | self._response = None | |
16 | self._stop = False | |
17 | ||
18 | def __enter__(self): | |
19 | return self | |
20 | ||
21 | def __exit__(self, type, value, traceback): | |
22 | return False | |
23 | ||
24 | def __next__(self): | |
25 | return self | |
26 | ||
27 | def __iter__(self): | |
28 | try: | |
29 | self._response = requests.get(self._change_url, self._params, stream=True) | |
30 | if self._response.status_code != 200: | |
31 | raise requests.exceptions.RequestException | |
32 | if self._response: | |
33 | for raw_line in self._response.iter_lines(): | |
34 | line = self._sanitize(raw_line) | |
35 | if not line: | |
36 | if self._stop: break | |
37 | else: continue | |
38 | change = self._parse_change(line) | |
39 | if not change: | |
40 | continue | |
41 | object_type, object_name = self._get_object_type_and_name_from_change(change) | |
42 | yield change, object_type, object_name | |
43 | if not self._stop: # why did we stop if no one asked me to stop? | |
44 | raise requests.exceptions.RequestException | |
45 | except requests.exceptions.RequestException: | |
46 | self.stop() | |
47 | raise | |
48 | except Exception as e: | |
49 | self.stop() | |
50 | ||
51 | def _get_object_type_and_name_from_change(self, change): | |
52 | try: | |
53 | id = change['id'] | |
54 | response = requests.get("{0}/{1}".format(self._base_url, id)) | |
55 | object_json = response.json() | |
56 | except Exception: | |
57 | return None, None | |
58 | return object_json.get('type'), object_json.get('name') | |
59 | ||
60 | def _sanitize(self, raw_line): | |
61 | if not isinstance(raw_line, basestring): | |
62 | return None | |
63 | line = raw_line.strip() | |
64 | if not line or line in ('{"results":', '],'): | |
65 | return None | |
66 | if line.startswith('"last_seq"'): | |
67 | line = '{' + line | |
68 | if line.endswith(","): | |
69 | line = line[:-1] | |
70 | return line | |
71 | ||
72 | def _parse_change(self, line): | |
73 | try: | |
74 | obj = json.loads(line) | |
75 | return obj | |
76 | except ValueError: | |
77 | return None | |
78 | ||
79 | def stop(self): | |
80 | if self._response is not None: | |
81 | self._response.close() | |
82 | self._response = None | |
83 | self._stop = True |
0 | #!/usr/bin/python2.7 | |
1 | # -*- coding: utf-8 -*- | |
2 | ''' | |
3 | Faraday Penetration Test IDE | |
4 | Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | ||
7 | ''' | |
8 | import glob | |
9 | import os | |
10 | import sys | |
11 | from threading import Lock | |
12 | from persistence.server import server | |
13 | from persistence.server.utils import (force_unique, | |
14 | get_host_properties, | |
15 | get_interface_properties, | |
16 | get_service_properties, | |
17 | get_vuln_properties, | |
18 | get_vuln_web_properties, | |
19 | get_note_properties, | |
20 | get_credential_properties, | |
21 | get_command_properties, | |
22 | WrongObjectSignature) | |
23 | ||
24 | from model.diff import ModelObjectDiff, MergeSolver | |
25 | from model.conflict import ConflictUpdate | |
26 | from config.configuration import getInstanceConfiguration | |
27 | from functools import wraps | |
28 | ||
29 | CONF = getInstanceConfiguration() | |
30 | ||
31 | _CHANGES_LOCK = Lock() | |
32 | def get_changes_lock(): | |
33 | return _CHANGES_LOCK | |
34 | ||
35 | _LOCAL_CHANGES_ID_TO_REV = {} | |
36 | def local_changes(): | |
37 | return _LOCAL_CHANGES_ID_TO_REV | |
38 | ||
39 | def _ignore_in_changes(func): | |
40 | @wraps(func) | |
41 | def func_wrapper(*args, **kwargs): | |
42 | with get_changes_lock(): | |
43 | json = func(*args, **kwargs) | |
44 | if json.get('ok'): | |
45 | _LOCAL_CHANGES_ID_TO_REV[json['id']] = json['rev'] | |
46 | return json | |
47 | return func_wrapper | |
48 | ||
49 | def _get_faraday_ready_objects(workspace_name, faraday_ready_object_dictionaries, | |
50 | faraday_object_name): | |
51 | """Takes a workspace name, a faraday object ('hosts', 'vulns', | |
52 | 'interfaces' or 'services') a row_name (the name of the row where | |
53 | the information about the objects live) and an arbitray number | |
54 | of params to customize to request. | |
55 | ||
56 | Return a list of faraday objects | |
57 | (_Host, _Interface, _Service, _Vuln, _WevVuln) which the same interface | |
58 | for getting attribuetes than those defined my the ModelController. | |
59 | """ | |
60 | object_to_class = {'hosts': _Host, | |
61 | 'vulns': _Vuln, | |
62 | 'vulns_web': _VulnWeb, | |
63 | 'interfaces': _Interface, | |
64 | 'services': _Service, | |
65 | 'notes': _Note, | |
66 | 'credentials': _Credential, | |
67 | 'commands': _Command} | |
68 | ||
69 | appropiate_class = object_to_class[faraday_object_name] | |
70 | faraday_objects = [] | |
71 | if faraday_ready_object_dictionaries: | |
72 | for object_dictionary in faraday_ready_object_dictionaries: | |
73 | faraday_objects.append(appropiate_class(object_dictionary, workspace_name)) | |
74 | return faraday_objects | |
75 | ||
76 | def _get_faraday_ready_hosts(workspace_name, hosts_dictionaries): | |
77 | return _get_faraday_ready_objects(workspace_name, hosts_dictionaries, 'hosts') | |
78 | ||
79 | def _get_faraday_ready_vulns(workspace_name, vulns_dictionaries, vulns_type=None): | |
80 | if vulns_type: | |
81 | return _get_faraday_ready_objects(workspace_name, vulns_dictionaries, vulns_type) | |
82 | ||
83 | vulns = [vuln for vuln in vulns_dictionaries if vuln['value']['type'] == 'Vulnerability'] | |
84 | web_vulns = [w_vuln for w_vuln in vulns_dictionaries if w_vuln['value']['type'] == 'VulnerabilityWeb'] | |
85 | faraday_ready_vulns = _get_faraday_ready_objects(workspace_name, vulns, 'vulns') | |
86 | faraday_ready_web_vulns = _get_faraday_ready_objects(workspace_name, web_vulns, 'vulns_web') | |
87 | return faraday_ready_vulns + faraday_ready_web_vulns | |
88 | ||
89 | def _get_faraday_ready_services(workspace_name, services_dictionaries): | |
90 | return _get_faraday_ready_objects(workspace_name, services_dictionaries, 'services') | |
91 | ||
92 | def _get_faraday_ready_interfaces(workspace_name, interfaces_dictionaries): | |
93 | return _get_faraday_ready_objects(workspace_name, interfaces_dictionaries, 'interfaces') | |
94 | ||
95 | def _get_faraday_ready_credentials(workspace_name, credentials_dictionaries): | |
96 | return _get_faraday_ready_objects(workspace_name, credentials_dictionaries, 'credentials') | |
97 | ||
98 | def _get_faraday_ready_notes(workspace_name, notes_dictionaries): | |
99 | return _get_faraday_ready_objects(workspace_name, notes_dictionaries, 'notes') | |
100 | ||
101 | def _get_faraday_ready_commands(workspace_name, commands_dictionaries): | |
102 | return _get_faraday_ready_objects(workspace_name, commands_dictionaries, 'commands') | |
103 | ||
104 | def get_changes_stream(workspace_name, **params): | |
105 | since = server.get_workspace(workspace_name)['last_seq'] | |
106 | return server.get_changes_stream(workspace_name, since=since, | |
107 | heartbeat='1000') | |
108 | ||
109 | def get_hosts(workspace_name, **params): | |
110 | """Take a workspace name and a arbitrary number of params to customize the | |
111 | request. | |
112 | ||
113 | Return a list of Host objects. | |
114 | """ | |
115 | host_dictionaries = server.get_hosts(workspace_name, **params) | |
116 | return _get_faraday_ready_hosts(workspace_name, host_dictionaries) | |
117 | ||
118 | def get_host(workspace_name, host_id): | |
119 | """Return the host by host_id. None if it can't be found.""" | |
120 | return force_unique(get_hosts(workspace_name, couchid=host_id)) | |
121 | ||
122 | def get_all_vulns(workspace_name, **params): | |
123 | vulns_dictionaries = server.get_all_vulns(workspace_name, **params) | |
124 | return _get_faraday_ready_vulns(workspace_name, vulns_dictionaries) | |
125 | ||
126 | def get_vulns(workspace_name, **params): | |
127 | """Take a workspace name and a arbitrary number of params to customize the | |
128 | request. | |
129 | ||
130 | Return a list of Vuln objects. | |
131 | """ | |
132 | vulns_dictionaries = server.get_vulns(workspace_name, **params) | |
133 | return _get_faraday_ready_vulns(workspace_name, vulns_dictionaries, vulns_type='vulns') | |
134 | ||
135 | def get_vuln(workspace_name, vuln_id): | |
136 | """Return the Vuln of id vuln_id. None if not found.""" | |
137 | return force_unique(get_vulns(workspace_name, couchid=vuln_id)) | |
138 | ||
139 | def get_web_vulns(workspace_name, **params): | |
140 | """Take a workspace name and a arbitrary number of params to customize the | |
141 | request. | |
142 | ||
143 | Return a list of VulnWeb objects. | |
144 | """ | |
145 | vulns_web_dictionaries = server.get_web_vulns(workspace_name, **params) | |
146 | return _get_faraday_ready_vulns(workspace_name, vulns_web_dictionaries, vulns_type='vulns_web') | |
147 | ||
148 | def get_web_vuln(workspace_name, vuln_id): | |
149 | """Return the WebVuln of id vuln_id. None if not found.""" | |
150 | return force_unique(get_web_vulns(workspace_name, couchid=vuln_id)) | |
151 | ||
152 | def get_interfaces(workspace_name, **params): | |
153 | """Take a workspace name and a arbitrary number of params to customize the | |
154 | request. | |
155 | ||
156 | Return a list of Interfaces objects | |
157 | """ | |
158 | interfaces_dictionaries = server.get_interfaces(workspace_name, **params) | |
159 | return _get_faraday_ready_interfaces(workspace_name, interfaces_dictionaries) | |
160 | ||
161 | def get_interface(workspace_name, interface_id): | |
162 | """Return the Interface of id interface_id. None if not found.""" | |
163 | return force_unique(get_interfaces(workspace_name, couchid=interface_id)) | |
164 | ||
165 | def get_services(workspace_name, **params): | |
166 | """Take a workspace name and a arbitrary number of params to customize the | |
167 | request. | |
168 | ||
169 | Return a list of Services objects | |
170 | """ | |
171 | services_dictionary = server.get_services(workspace_name, **params) | |
172 | return _get_faraday_ready_services(workspace_name, services_dictionary) | |
173 | ||
174 | def get_service(workspace_name, service_id): | |
175 | """Return the Service of id service_id. None if not found.""" | |
176 | return force_unique(get_services(workspace_name, couchid=service_id)) | |
177 | ||
178 | def get_credentials(workspace_name, **params): | |
179 | credentials_dictionary = server.get_credentials(workspace_name, **params) | |
180 | return _get_faraday_ready_credentials(workspace_name, credentials_dictionary) | |
181 | ||
182 | def get_credential(workspace_name, credential_id): | |
183 | return force_unique(get_credentials(workspace_name, couchid=credential_id)) | |
184 | ||
185 | def get_notes(workspace_name, **params): | |
186 | notes_dictionary = server.get_notes(workspace_name, **params) | |
187 | return _get_faraday_ready_notes(workspace_name, notes_dictionary) | |
188 | ||
189 | def get_note(workspace_name, note_id): | |
190 | return force_unique(get_notes(workspace_name, couchid=note_id)) | |
191 | ||
192 | def get_workspace(workspace_name): | |
193 | """Return the Workspace of id workspace_name. None if not found.""" | |
194 | workspace = server.get_workspace(workspace_name) | |
195 | return _Workspace(workspace, workspace_name) if workspace else None | |
196 | ||
197 | def get_commands(workspace_name, **params): | |
198 | commands_dictionary = server.get_commands(workspace_name, **params) | |
199 | return _get_faraday_ready_commands(workspace_name, commands_dictionary) | |
200 | ||
201 | def get_command(workspace_name, command_id): | |
202 | return force_unique(get_commands(workspace_name, couchid=command_id)) | |
203 | ||
204 | def get_object(workspace_name, object_signature, object_id): | |
205 | """Given a workspace name, an object_signature as string and an arbitrary | |
206 | number of query params, return a list a dictionaries containg information | |
207 | about 'object_signature' objects matching the query. | |
208 | ||
209 | object_signature must be either 'Host', 'Vulnerability', 'VulnerabilityWeb', | |
210 | 'Interface', 'Service', 'Cred', 'Note' or 'CommandRunInformation'. | |
211 | Will raise an WrongObjectSignature error if this condition is not met. | |
212 | """ | |
213 | object_to_func = {_Host.class_signature: get_host, | |
214 | _Vuln.class_signature: get_vuln, | |
215 | _VulnWeb.class_signature: get_web_vuln, | |
216 | _Interface.class_signature: get_interface, | |
217 | _Service.class_signature: get_service, | |
218 | _Credential.class_signature: get_credential, | |
219 | _Note.class_signature: get_note, | |
220 | _Command.class_signature: get_command} | |
221 | try: | |
222 | appropiate_function = object_to_func[object_signature] | |
223 | except KeyError: | |
224 | raise WrongObjectSignature(object_signature) | |
225 | ||
226 | return appropiate_function(workspace_name, object_id) | |
227 | ||
228 | def get_deleted_object_name_and_type(workspace_name, object_id): | |
229 | """Return a tupe of (name, type) for the deleted object of object_id, | |
230 | if it can get around CouchDB to do it. Else None""" | |
231 | obj_dict = server.get_object_before_last_revision(workspace_name, object_id) | |
232 | return (obj_dict['name'], obj_dict['type']) if obj_dict else (None, None) | |
233 | ||
234 | @_ignore_in_changes | |
235 | def create_host(workspace_name, host): | |
236 | """Take a workspace_name and a host object and save it to the sever. | |
237 | ||
238 | Return the server's json response as a dictionary. | |
239 | """ | |
240 | host_properties = get_host_properties(host) | |
241 | return server.create_host(workspace_name, **host_properties) | |
242 | ||
243 | @_ignore_in_changes | |
244 | def update_host(workspace_name, host): | |
245 | host_properties = get_host_properties(host) | |
246 | return server.update_host(workspace_name, **host_properties) | |
247 | ||
248 | @_ignore_in_changes | |
249 | def create_interface(workspace_name, interface): | |
250 | """Take a workspace_name and an interface object and save it to the sever. | |
251 | Return the server's json response as a dictionary. | |
252 | """ | |
253 | interface_properties = get_interface_properties(interface) | |
254 | return server.create_interface(workspace_name, **interface_properties) | |
255 | ||
256 | @_ignore_in_changes | |
257 | def update_interface(workspace_name, interface): | |
258 | interface_properties = get_interface_properties(interface) | |
259 | return server.update_interface(workspace_name, **interface_properties) | |
260 | ||
261 | @_ignore_in_changes | |
262 | def create_service(workspace_name, service): | |
263 | """Take a workspace_name and a service object and save it to the sever. | |
264 | Return the server's json response as a dictionary. | |
265 | """ | |
266 | service_properties = get_service_properties(service) | |
267 | return server.create_service(workspace_name, **service_properties) | |
268 | ||
269 | @_ignore_in_changes | |
270 | def update_service(workspace_name, service): | |
271 | service_properties = get_service_properties(service) | |
272 | return server.update_service(workspace_name, **service_properties) | |
273 | ||
274 | @_ignore_in_changes | |
275 | def create_vuln(workspace_name, vuln): | |
276 | """Take a workspace_name and an vulnerability object and save it to the | |
277 | sever. The rev parameter must be provided if you are updating the object. | |
278 | Return the server's json response as a dictionary. | |
279 | """ | |
280 | vuln_properties = get_vuln_properties(vuln) | |
281 | return server.create_vuln(workspace_name, **vuln_properties) | |
282 | ||
283 | @_ignore_in_changes | |
284 | def update_vuln(workspace_name, vuln): | |
285 | vuln_properties = get_vuln_properties(vuln) | |
286 | return server.update_vuln(workspace_name, **vuln_properties) | |
287 | ||
288 | @_ignore_in_changes | |
289 | def create_vuln_web(workspace_name, vuln_web): | |
290 | """Take a workspace_name and an vulnerabilityWeb object and save it to the | |
291 | sever. | |
292 | Return the server's json response as a dictionary. | |
293 | """ | |
294 | vuln_web_properties = get_vuln_web_properties(vuln_web) | |
295 | return server.create_vuln_web(workspace_name, **vuln_web_properties) | |
296 | ||
297 | @_ignore_in_changes | |
298 | def update_vuln_web(workspace_name, vuln_web): | |
299 | vuln_web_properties = get_vuln_web_properties(vuln_web) | |
300 | return server.update_vuln_web(workspace_name, **vuln_web_properties) | |
301 | ||
302 | @_ignore_in_changes | |
303 | def create_note(workspace_name, note): | |
304 | """Take a workspace_name and an note object and save it to the sever. | |
305 | Return the server's json response as a dictionary. | |
306 | """ | |
307 | note_properties = get_note_properties(note) | |
308 | return server.create_note(workspace_name, **note_properties) | |
309 | ||
310 | @_ignore_in_changes | |
311 | def update_note(workspace_name, note): | |
312 | note_properties = get_note_properties(note) | |
313 | return server.update_note(workspace_name, **note_properties) | |
314 | ||
315 | @_ignore_in_changes | |
316 | def create_credential(workspace_name, credential): | |
317 | """Take a workspace_name and an credential object and save it to the sever. | |
318 | Return the server's json response as a dictionary. | |
319 | """ | |
320 | credential_properties = get_credential_properties(credential) | |
321 | return server.create_credential(workspace_name, **credential_properties) | |
322 | ||
323 | @_ignore_in_changes | |
324 | def update_credential(workspace_name, credential): | |
325 | credential_properties = get_credential_properties(credential) | |
326 | return server.update_credential(workspace_name, **credential_properties) | |
327 | ||
328 | @_ignore_in_changes | |
329 | def create_command(workspace_name, command): | |
330 | command_properties = get_command_properties(command) | |
331 | return server.create_command(workspace_name, **command_properties) | |
332 | ||
333 | @_ignore_in_changes | |
334 | def update_command(workspace_name, command): | |
335 | command_properties = get_command_properties(command) | |
336 | return server.update_command(workspace_name, **command_properties) | |
337 | ||
338 | def create_object(workspace_name, object_signature, obj): | |
339 | object_to_func = {_Host.class_signature: create_host, | |
340 | _Vuln.class_signature: create_vuln, | |
341 | _VulnWeb.class_signature: create_vuln_web, | |
342 | _Interface.class_signature: create_interface, | |
343 | _Service.class_signature: create_service, | |
344 | _Credential.class_signature: create_credential, | |
345 | _Note.class_signature: create_note, | |
346 | _Command.class_signature: create_command} | |
347 | try: | |
348 | appropiate_function = object_to_func[object_signature] | |
349 | except KeyError: | |
350 | raise WrongObjectSignature(object_signature) | |
351 | ||
352 | return appropiate_function(workspace_name, obj) | |
353 | ||
354 | def update_object(workspace_name, object_signature, obj): | |
355 | object_to_func = {_Host.class_signature: update_host, | |
356 | _Vuln.class_signature: update_vuln, | |
357 | _VulnWeb.class_signature: update_vuln_web, | |
358 | _Interface.class_signature: update_interface, | |
359 | _Service.class_signature: update_service, | |
360 | _Credential.class_signature: update_credential, | |
361 | _Note.class_signature: update_note, | |
362 | _Command.class_signature: update_command} | |
363 | try: | |
364 | appropiate_function = object_to_func[object_signature] | |
365 | except KeyError: | |
366 | raise WrongObjectSignature(object_signature) | |
367 | ||
368 | return appropiate_function(workspace_name, obj) | |
369 | ||
370 | ||
371 | def create_workspace(workspace_name, description, start_date, finish_date, | |
372 | customer=None): | |
373 | """Take the workspace_name and create the database first, | |
374 | then the workspace's document. | |
375 | Return the server's json response as a dictionary, if it can. If the | |
376 | DB couldn't be created, it will return None. If the DB could be created | |
377 | but there was a problem creating its basic documents, it will delete | |
378 | the document an raise the corresponding error. | |
379 | """ | |
380 | ||
381 | def upload_views(): | |
382 | """All wrongdoing is sin, but there is sin that does not lead to death. | |
383 | John 5:17 | |
384 | """ | |
385 | from managers.all import ViewsManager # Blessed are the merciful, for they shall receive mercy. | |
386 | import couchdbkit # for i have sinned and failed short of the glory of god | |
387 | s = couchdbkit.Server(uri=CONF.getCouchURI()) # if we confess our sins | |
388 | db = s[workspace_name] # he is faithful and just to forgive us | |
389 | views_manager = ViewsManager() # and to cleans us | |
390 | views_manager.addViews(db) # from all unrightousness | |
391 | ||
392 | db_creation = server.create_database(workspace_name) | |
393 | if db_creation.get('ok'): | |
394 | try: | |
395 | upload_views() | |
396 | return server.create_workspace(workspace_name, description, | |
397 | start_date, finish_date, customer) | |
398 | except: | |
399 | server.delete_workspace(workspace_name) | |
400 | raise | |
401 | else: | |
402 | return None | |
403 | ||
404 | def get_workspace_summary(workspace_number): | |
405 | return server.get_workspace_summary(workspace_number) | |
406 | ||
407 | def get_workspace_numbers(workspace_name): | |
408 | return server.get_workspace_numbers(workspace_name) | |
409 | ||
410 | def get_hosts_number(workspace_name, **params): | |
411 | return server.get_hosts_number(workspace_name, **params) | |
412 | ||
413 | def get_services_number(workspace_name, **params): | |
414 | return server.get_services_number(workspace_name, **params) | |
415 | ||
416 | def get_interfaces_number(workspace_name, **params): | |
417 | return server.get_interfaces_number(workspace_name, **params) | |
418 | ||
419 | def get_vulns_number(workspace_name, **params): | |
420 | return server.get_vulns_number(workspace_name, **params) | |
421 | ||
422 | @_ignore_in_changes | |
423 | def delete_host(workspace_name, host_id): | |
424 | return server.delete_host(workspace_name, host_id) | |
425 | ||
426 | @_ignore_in_changes | |
427 | def delete_interface(workspace_name, interface_id): | |
428 | return server.delete_interface(workspace_name, interface_id) | |
429 | ||
430 | @_ignore_in_changes | |
431 | def delete_service(workspace_name, service_id): | |
432 | return server.delete_service(workspace_name, service_id) | |
433 | ||
434 | @_ignore_in_changes | |
435 | def delete_vuln(workspace_name, vuln_id): | |
436 | return server.delete_vuln(workspace_name, vuln_id) | |
437 | ||
438 | @_ignore_in_changes | |
439 | def delete_note(workspace_name, note_id): | |
440 | return server.delete_note(workspace_name, note_id) | |
441 | ||
442 | @_ignore_in_changes | |
443 | def delete_credential(workspace_name, credential_id): | |
444 | return server.delete_credential(workspace_name, credential_id) | |
445 | ||
446 | @_ignore_in_changes | |
447 | def delete_vuln_web(workspace_name, vuln_id): | |
448 | return server.delete_vuln(workspace_name, vuln_id) | |
449 | ||
450 | @_ignore_in_changes | |
451 | def delete_command(workspace_name, command_id): | |
452 | return server.delete_command(workspace_name, command_id) | |
453 | ||
454 | def delete_object(workspace_name, object_signature, obj_id): | |
455 | object_to_func = {_Host.class_signature: delete_host, | |
456 | _Vuln.class_signature: delete_vuln, | |
457 | _VulnWeb.class_signature: delete_vuln_web, | |
458 | _Interface.class_signature: delete_interface, | |
459 | _Service.class_signature: delete_service, | |
460 | _Credential.class_signature: delete_credential, | |
461 | _Note.class_signature: delete_note, | |
462 | _Command.class_signature: delete_command} | |
463 | try: | |
464 | appropiate_function = object_to_func[object_signature] | |
465 | except KeyError: | |
466 | raise WrongObjectSignature(object_signature) | |
467 | ||
468 | return appropiate_function(workspace_name, obj_id) | |
469 | ||
470 | def delete_workspace(workspace_name): | |
471 | """Tries to delete the worskpace workspace_name and returns the json | |
472 | response. You should always try/except this function, at least catching | |
473 | server.Unathorized exception. | |
474 | """ | |
475 | return server.delete_workspace(workspace_name) | |
476 | ||
477 | def get_workspaces_names(): | |
478 | return server.get_workspaces_names()['workspaces'] | |
479 | ||
480 | def is_server_up(): | |
481 | return server.is_server_up() | |
482 | ||
483 | def test_server_url(url_to_test): | |
484 | return server.test_server_url(url_to_test) | |
485 | ||
486 | class ModelBase(object): | |
487 | def __init__(self, obj, workspace_name): | |
488 | self._workspace_name = workspace_name | |
489 | self._server_id = obj['_id'] | |
490 | self.id = obj['id'] | |
491 | self.name = obj['value']['name'] | |
492 | self.description = obj['value']['description'] | |
493 | self.owned = obj['value']['owned'] | |
494 | self.owner = obj['value']['owner'] | |
495 | self.metadata = obj['value']['metadata'] | |
496 | self.updates = [] | |
497 | ||
498 | @staticmethod | |
499 | def publicattrsrefs(): | |
500 | return {'Description': 'description', | |
501 | 'Name': 'name', | |
502 | 'Owned': 'owned'} | |
503 | ||
504 | def defaultValues(self): | |
505 | return [-1, 0, '', 'None', 'none', 'unknown', None, [], {}] | |
506 | ||
507 | def propertyTieBreaker(self, key, prop1, prop2): | |
508 | """ Breakes the conflict between two properties. If either of them | |
509 | is a default value returns the true and only. | |
510 | If neither returns the default value. | |
511 | If conflicting returns a tuple with the values """ | |
512 | if prop1 in self.defaultValues(): return prop2 | |
513 | elif prop2 in self.defaultValues(): return prop1 | |
514 | elif self.tieBreakable(key): return self.tieBreak(key, prop1, prop2) | |
515 | else: return (prop1, prop2) | |
516 | ||
517 | def tieBreakable(self, key): | |
518 | return False | |
519 | ||
520 | def tieBreak(self, key, prop1, prop2): | |
521 | return None | |
522 | ||
523 | def addUpdate(self, newModelObject): | |
524 | conflict = False | |
525 | diff = ModelObjectDiff(self, newModelObject) | |
526 | for k, v in diff.getPropertiesDiff().items(): | |
527 | attribute = self.publicattrsrefs().get(k) | |
528 | prop_update = self.propertyTieBreaker(attribute, *v) | |
529 | ||
530 | if not isinstance(prop_update, tuple) or CONF.getMergeStrategy(): | |
531 | # if there's a strategy set by the user, apply it | |
532 | if isinstance(prop_update, tuple): | |
533 | prop_update = MergeSolver(CONF.getMergeStrategy()) | |
534 | prop_update = prop_update.solve(prop_update[0], prop_update[1]) | |
535 | ||
536 | setattr(self, attribute, prop_update) | |
537 | else: | |
538 | conflict = True | |
539 | if conflict: | |
540 | self.updates.append(ConflictUpdate(self, newModelObject)) | |
541 | return conflict | |
542 | ||
543 | def getUpdates(self): | |
544 | return self.updates | |
545 | ||
546 | def updateResolved(self, update): | |
547 | self.updates.remove(update) | |
548 | ||
549 | def needs_merge(self, new_obj): | |
550 | return ModelObjectDiff(self, new_obj).existDiff() | |
551 | ||
552 | def updateMetadata(self): | |
553 | self.getMetadata().update(self.owner) | |
554 | ||
555 | def getOwner(self): return self.owner | |
556 | def isOwned(self): return self.owned | |
557 | def getName(self): return self.name | |
558 | def getMetadata(self): return self.metadata | |
559 | def getDescription(self): return self.description | |
560 | ||
561 | ||
562 | class _Host(ModelBase): | |
563 | """A simple Host class. Should implement all the methods of the | |
564 | Host object in Model.Host | |
565 | Any method here more than a couple of lines long probably represent | |
566 | a search the server is missing. | |
567 | """ | |
568 | class_signature = 'Host' | |
569 | ||
570 | def __init__(self, host, workspace_name): | |
571 | ModelBase.__init__(self, host, workspace_name) | |
572 | self.default_gateway = host['value']['default_gateway'] | |
573 | self.os = host['value']['os'] | |
574 | self.vuln_amount = int(host['value']['vulns']) | |
575 | ||
576 | @staticmethod | |
577 | def publicattrsrefs(): | |
578 | publicattrs = dict(ModelBase.publicattrsrefs(), **{ | |
579 | 'Operating System' : 'os' | |
580 | }) | |
581 | return publicattrs | |
582 | ||
583 | def updateAttributes(self, name=None, description=None, os=None, owned=None): | |
584 | ||
585 | self.updateMetadata() | |
586 | if name is not None: | |
587 | self.name = name | |
588 | if description is not None: | |
589 | self.description = description | |
590 | if os is not None: | |
591 | self.os = os | |
592 | if owned is not None: | |
593 | self.owned = owned | |
594 | ||
595 | def __str__(self): return "{0} ({1})".format(self.name, self.vuln_amount) | |
596 | def getOS(self): return self.os | |
597 | def getVulnAmount(self): return self.vuln_amount | |
598 | def getID(self): return self.id | |
599 | def getDefaultGateway(self): return self.default_gateway | |
600 | def getVulns(self): | |
601 | return get_all_vulns(self._workspace_name, hostid=self._server_id) | |
602 | def getInterface(self, interface_couch_id): | |
603 | service = get_interfaces(self._workspace_name, couchid=interface_couch_id) | |
604 | return service[0] | |
605 | def getAllInterfaces(self): | |
606 | return get_interfaces(self._workspace_name, host=self._server_id) | |
607 | def getServices(self): | |
608 | return get_services(self._workspace_name, hostid=self._server_id) | |
609 | ||
610 | ||
611 | class _Interface(ModelBase): | |
612 | """A simple Interface class. Should implement all the methods of the | |
613 | Interface object in Model.Host | |
614 | Any method here more than a couple of lines long probably represent | |
615 | a search the server is missing. | |
616 | """ | |
617 | class_signature = 'Interface' | |
618 | ||
619 | def __init__(self, interface, workspace_name): | |
620 | ModelBase.__init__(self, interface, workspace_name) | |
621 | self.hostnames = interface['value']['hostnames'] | |
622 | self.ipv4 = interface['value']['ipv4'] | |
623 | self.ipv6 = interface['value']['ipv6'] | |
624 | self.mac = interface['value']['mac'] | |
625 | self.network_segment = interface['value']['network_segment'] | |
626 | self.ports = interface['value']['ports'] | |
627 | ||
628 | self.amount_ports_opened = 0 | |
629 | self.amount_ports_closed = 0 | |
630 | self.amount_ports_filtered = 0 | |
631 | ||
632 | @staticmethod | |
633 | def publicattrsrefs(): | |
634 | publicattrs = dict(ModelBase.publicattrsrefs(), **{ | |
635 | 'MAC Address' : 'mac', | |
636 | 'IPV4 Settings' : 'ipv4', | |
637 | 'IPV6 Settings' : 'ipv6', | |
638 | 'Network Segment' : 'network_segment', | |
639 | 'Hostnames' : 'hostnames' | |
640 | }) | |
641 | return publicattrs | |
642 | ||
643 | def tieBreakable(self, property_key): | |
644 | if property_key in ["hostnames"]: | |
645 | return True | |
646 | return False | |
647 | ||
648 | def tieBreak(self, key, prop1, prop2): | |
649 | if key == "hostnames": | |
650 | prop1.extend(prop2) | |
651 | return list(set(prop1)) | |
652 | return None | |
653 | ||
654 | def updateAttributes(self, name=None, description=None, hostnames=None, mac=None, ipv4=None, ipv6=None, | |
655 | network_segment=None, amount_ports_opened=None, amount_ports_closed=None, | |
656 | amount_ports_filtered=None, owned=None): | |
657 | ||
658 | self.updateMetadata() | |
659 | if name is not None: | |
660 | self.name = name | |
661 | if description is not None: | |
662 | self.description = description | |
663 | if hostnames is not None: | |
664 | self.hostnames = hostnames | |
665 | if mac is not None: | |
666 | self.mac = mac | |
667 | if ipv4 is not None: | |
668 | self.ipv4 = ipv4 | |
669 | if ipv6 is not None: | |
670 | self.ipv6 = ipv6 | |
671 | if network_segment is not None: | |
672 | self.network_segment = network_segment | |
673 | if amount_ports_opened is not None: | |
674 | self.setPortsOpened(amount_ports_opened) | |
675 | if amount_ports_closed is not None: | |
676 | self.setPortsClosed(amount_ports_closed) | |
677 | if amount_ports_filtered is not None: | |
678 | self.setPortsFiltered(amount_ports_filtered) | |
679 | if owned is not None: | |
680 | self.owned = owned | |
681 | ||
682 | def setPortsOpened(self, ports_opened): | |
683 | self.amount_ports_opened = ports_opened | |
684 | ||
685 | def setPortsClosed(self, ports_closed): | |
686 | self.amount_ports_closed = ports_closed | |
687 | ||
688 | def setPortsFiltered(self, ports_filtered): | |
689 | self.amount_ports_filtered = ports_filtered | |
690 | ||
691 | def __str__(self): return "{0}".format(self.name) | |
692 | def getID(self): return self.id | |
693 | def getHostnames(self): return self.hostnames | |
694 | def getIPv4(self): return self.ipv4 | |
695 | def getIPv6(self): return self.ipv6 | |
696 | def getIPv4Address(self): return self.ipv4['address'] | |
697 | def getIPv4Mask(self): return self.ipv4['mask'] | |
698 | def getIPv4Gateway(self): return self.ipv4['gateway'] | |
699 | def getIPv4DNS(self): return self.ipv4['DNS'] | |
700 | def getIPv6Address(self): return self.ipv6['address'] | |
701 | def getIPv6Gateway(self): return self.ipv6['gateway'] | |
702 | def getIPv6DNS(self): return self.ipv6['DNS'] | |
703 | def getMAC(self): return self.mac | |
704 | def getNetworkSegment(self): return self.network_segment | |
705 | ||
706 | def getService(self, service_couch_id): | |
707 | return get_service(self._workspace_name, service_couch_id) | |
708 | def getAllServices(self): | |
709 | return get_services(self._workspace_name, interface=self._server_id) | |
710 | def getVulns(self): | |
711 | return get_all_vulns(self._workspace_name, interfaceid=self._server_id) | |
712 | ||
713 | ||
714 | class _Service(ModelBase): | |
715 | """A simple Service class. Should implement all the methods of the | |
716 | Service object in Model.Host | |
717 | Any method here more than a couple of lines long probably represent | |
718 | a search the server is missing. | |
719 | """ | |
720 | class_signature = 'Service' | |
721 | ||
722 | def __init__(self, service, workspace_name): | |
723 | ModelBase.__init__(self, service, workspace_name) | |
724 | self.protocol = service['value']['protocol'] | |
725 | self.ports = service['value']['ports'] | |
726 | self.version = service['value']['version'] | |
727 | self.status = service['value']['status'] | |
728 | self.vuln_amount = int(service['vulns']) | |
729 | ||
730 | @staticmethod | |
731 | def publicattrsrefs(): | |
732 | publicattrs = dict(ModelBase.publicattrsrefs(), **{ | |
733 | 'Ports' : 'ports', | |
734 | 'Protocol' : 'protocol', | |
735 | 'Status' : 'status', | |
736 | 'Version' : 'version' | |
737 | }) | |
738 | return publicattrs | |
739 | ||
740 | def updateAttributes(self, name=None, description=None, protocol=None, ports=None, | |
741 | status=None, version=None, owned=None): | |
742 | self.updateMetadata() | |
743 | if name is not None: | |
744 | self.name = name | |
745 | if description is not None: | |
746 | self.description = description | |
747 | if protocol is not None: | |
748 | self.protocol = protocol | |
749 | if ports is not None: | |
750 | self.ports = ports | |
751 | if status is not None: | |
752 | self.status = status | |
753 | if version is not None: | |
754 | self.version = version | |
755 | if owned is not None: | |
756 | self.owned = owned | |
757 | ||
758 | def __str__(self): return "{0} ({1})".format(self.name, self.vuln_amount) | |
759 | def getID(self): return self.id | |
760 | def getStatus(self): return self.status | |
761 | def getPorts(self): return self.ports # this is a list of one element in faraday | |
762 | def getVersion(self): return self.version | |
763 | def getProtocol(self): return self.protocol | |
764 | def isOwned(self): return self.owned | |
765 | def getVulns(self): return get_all_vulns(self._workspace_name, serviceid=self._server_id) | |
766 | ||
767 | ||
768 | class _Vuln(ModelBase): | |
769 | """A simple Vuln class. Should implement all the methods of the | |
770 | Vuln object in Model.Common | |
771 | Any method here more than a couple of lines long probably represent | |
772 | a search the server is missing. | |
773 | """ | |
774 | class_signature = 'Vulnerability' | |
775 | ||
776 | def __init__(self, vuln, workspace_name): | |
777 | ModelBase.__init__(self, vuln, workspace_name) | |
778 | self.desc = vuln['value']['desc'] | |
779 | self.data = vuln['value']['data'] | |
780 | self.severity = vuln['value']['severity'] | |
781 | self.refs = vuln['value']['refs'] | |
782 | self.confirmed = vuln['value']['confirmed'] | |
783 | self.resolution = vuln['value']['resolution'] | |
784 | ||
785 | @staticmethod | |
786 | def publicattrsrefs(): | |
787 | publicattrs = dict(ModelBase.publicattrsrefs(), **{ | |
788 | 'Data' : 'data', | |
789 | 'Severity' : 'severity', | |
790 | 'Refs' : 'refs', | |
791 | 'Resolution': 'resolution' | |
792 | }) | |
793 | return publicattrs | |
794 | ||
795 | def tieBreakable(self, key): | |
796 | if key == "confirmed": | |
797 | return True | |
798 | return False | |
799 | ||
800 | def tieBreak(self, key, prop1, prop2): | |
801 | if key == "confirmed": | |
802 | return True | |
803 | return (prop1, prop2) | |
804 | ||
805 | def standarize(self, severity): | |
806 | # Transform all severities into lower strings | |
807 | severity = str(severity).lower() | |
808 | # If it has info, med, high, critical in it, standarized to it: | |
809 | ||
810 | ||
811 | def align_string_based_vulns(severity): | |
812 | severities = ['info','low', 'med', 'high', 'critical'] | |
813 | for sev in severities: | |
814 | if severity[0:3] in sev: | |
815 | return sev | |
816 | return severity | |
817 | ||
818 | severity = align_string_based_vulns(severity) | |
819 | ||
820 | # Transform numeric severity into desc severity | |
821 | numeric_severities = { '0' : 'info', | |
822 | '1' : 'low', | |
823 | '2' : 'med', | |
824 | '3' : 'high', | |
825 | "4" : 'critical' } | |
826 | ||
827 | ||
828 | if not severity in numeric_severities.values(): | |
829 | severity = numeric_severities.get(severity, 'unclassified') | |
830 | ||
831 | return severity | |
832 | ||
833 | def updateAttributes(self, name=None, desc=None, data=None, | |
834 | severity=None, resolution=None, refs=None): | |
835 | self.updateMetadata() | |
836 | if name is not None: | |
837 | self.name = name | |
838 | if desc is not None: | |
839 | self.desc = desc | |
840 | if data is not None: | |
841 | self.data = data | |
842 | if resolution is not None: | |
843 | self.resolution = resolution | |
844 | if severity is not None: | |
845 | self.severity = self.standarize(severity) | |
846 | if refs is not None: | |
847 | self.refs = refs | |
848 | ||
849 | def getID(self): return self.id | |
850 | def getDesc(self): return self.desc | |
851 | def getData(self): return self.data | |
852 | def getSeverity(self): return self.severity | |
853 | def getRefs(self): return self.refs | |
854 | def getConfirmed(self): return self.confirmed | |
855 | def getResolution(self): return self.resolution | |
856 | ||
857 | ||
858 | class _VulnWeb(_Vuln): | |
859 | """A simple VulnWeb class. Should implement all the methods of the | |
860 | VulnWeb object in Model.Common | |
861 | Any method here more than a couple of lines long probably represent | |
862 | a search the server is missing. | |
863 | """ | |
864 | class_signature = 'VulnerabilityWeb' | |
865 | ||
866 | def __init__(self, vuln_web, workspace_name): | |
867 | _Vuln.__init__(self, vuln_web, workspace_name) | |
868 | self.path = vuln_web['value']['path'] | |
869 | self.website = vuln_web['value']['website'] | |
870 | self.request = vuln_web['value']['request'] | |
871 | self.response = vuln_web['value']['response'] | |
872 | self.method = vuln_web['value']['method'] | |
873 | self.pname = vuln_web['value']['pname'] | |
874 | self.params = vuln_web['value']['params'] | |
875 | self.query = vuln_web['value']['query'] | |
876 | self.resolution = vuln_web['value']['resolution'] | |
877 | self.attachments = vuln_web['value']['_attachments'] | |
878 | self.hostnames = vuln_web['value']['hostnames'] | |
879 | self.impact = vuln_web['value']['impact'] | |
880 | self.service = vuln_web['value']['service'] | |
881 | self.status = vuln_web['value']['status'] | |
882 | self.tags = vuln_web['value']['tags'] | |
883 | self.target = vuln_web['value']['target'] | |
884 | self.parent = vuln_web['value']['parent'] | |
885 | ||
886 | @staticmethod | |
887 | def publicattrsrefs(): | |
888 | publicattrs = dict(ModelBase.publicattrsrefs(), **{ | |
889 | 'Data' : 'data', | |
890 | 'Severity' : 'severity', | |
891 | 'Refs' : 'refs', | |
892 | 'Path' : 'path', | |
893 | 'Website' : 'website', | |
894 | 'Request' : 'request', | |
895 | 'Response' : 'response', | |
896 | 'Method' : 'method', | |
897 | 'Pname' : 'pname', | |
898 | 'Params' : 'params', | |
899 | 'Query' : 'query'}) | |
900 | return publicattrs | |
901 | ||
902 | def updateAttributes(self, name=None, desc=None, data=None, website=None, path=None, refs=None, | |
903 | severity=None, resolution=None, request=None,response=None, method=None, | |
904 | pname=None, params=None, query=None, category=None): | |
905 | ||
906 | super(_VulnWeb, self).updateAttributes(name, desc, data, severity, resolution, refs) | |
907 | self.updateMetadata() | |
908 | ||
909 | if website is not None: | |
910 | self.website = website | |
911 | if path is not None: | |
912 | self.path = path | |
913 | if request is not None: | |
914 | self.request = request | |
915 | if response is not None: | |
916 | self.response = response | |
917 | if method is not None: | |
918 | self.method = method | |
919 | if pname is not None: | |
920 | self.pname = pname | |
921 | if params is not None: | |
922 | self.params = params | |
923 | if query is not None: | |
924 | self.query = query | |
925 | if category is not None: | |
926 | self.category = category | |
927 | ||
928 | def getDescription(self): return self.description | |
929 | def getPath(self): return self.path | |
930 | def getWebsite(self): return self.website | |
931 | def getRequest(self): return self.request | |
932 | def getResponse(self): return self.response | |
933 | def getMethod(self): return self.method | |
934 | def getPname(self): return self.pname | |
935 | def getParams(self): return self.params | |
936 | def getQuery(self): return self.query | |
937 | def getResolution(self): return self.resolution | |
938 | def getAttachments(self): return self.attachments | |
939 | def getEaseOfResolution(self): return self.easeofresolution | |
940 | def getHostnames(self): return self.hostnames | |
941 | def getImpact(self): return self.impact | |
942 | def getService(self): return self.service | |
943 | def getStatus(self): return self.status | |
944 | def getTags(self): return self.tags | |
945 | def getTarget(self): return self.target | |
946 | def getParent(self): return self.parent | |
947 | ||
948 | class _Note(ModelBase): | |
949 | class_signature = 'Note' | |
950 | ||
951 | def __init__(self, note, workspace_name): | |
952 | ModelBase.__init__(self, note, workspace_name) | |
953 | self.text = note['value']['text'] | |
954 | ||
955 | def updateAttributes(self, name=None, text=None): | |
956 | self.updateMetadata() | |
957 | if name is not None: | |
958 | self.name = name | |
959 | if text is not None: | |
960 | self.text = text | |
961 | ||
962 | def getID(self): return self.id | |
963 | def getDescription(self): return self.description | |
964 | def getText(self): return self.text | |
965 | ||
966 | class _Credential(ModelBase): | |
967 | class_signature = "Cred" | |
968 | ||
969 | def __init__(self, credential, workspace_name): | |
970 | ModelBase.__init__(self, credential, workspace_name) | |
971 | self.username = credential['value']['username'] | |
972 | self.password = credential['value']['password'] | |
973 | ||
974 | def updateAttributes(self, username=None, password=None): | |
975 | self.updateMetadata() | |
976 | if username is not None: | |
977 | self.username =username | |
978 | if password is not None: | |
979 | self.password = password | |
980 | ||
981 | def getID(self): return self.id | |
982 | def getUsername(self): return self.username | |
983 | def getPassword(self): return self.password | |
984 | ||
985 | class _Command: | |
986 | class_signature = 'CommandRunInformation' | |
987 | def __init__(self, command, workspace_name): | |
988 | self._workspace_name = workspace_name | |
989 | self.id = command['id'] | |
990 | self.command = command['value']['command'] | |
991 | self.duration = command['value']['duration'] | |
992 | self.hostname = command['value']['hostname'] | |
993 | self.ip = command['value']['ip'] | |
994 | self.itime = command['value']['itime'] | |
995 | self.params = command['value']['params'] | |
996 | self.user = command['value']['user'] | |
997 | self.workspace = command['value']['workspace'] | |
998 | ||
999 | def getID(self): return self.id | |
1000 | def getCommand(self): return self.command | |
1001 | def getDuration(self): return self.duration | |
1002 | def getHostname(self): return self.hostname | |
1003 | def getIP(self): return self.ip | |
1004 | def getItime(self): return self.itime | |
1005 | def getParams(self): return self.params | |
1006 | def getUser(self): return self.user | |
1007 | def getWorkspace(self): return self.workspace | |
1008 | ||
1009 | class _Workspace: | |
1010 | class_signature = 'Workspace' | |
1011 | ||
1012 | def __init__(self, workspace, workspace_name): | |
1013 | self._id = workspace_name | |
1014 | self.name = workspace['name'] | |
1015 | self.description = workspace['description'] | |
1016 | self.customer = workspace['customer'] | |
1017 | self.start_date = workspace['sdate'] | |
1018 | self.finish_date = workspace['fdate'] | |
1019 | ||
1020 | def getID(self): return self._id | |
1021 | def getName(self): return self.name | |
1022 | def getDescription(self): return self.description | |
1023 | def getCustomer(self): return self.customer | |
1024 | def getStartDate(self): return self.start_date | |
1025 | def getFinishDate(self): return self.finish_date | |
1026 | ||
1027 | # NOTE: uncomment for test | |
1028 | # class SillyHost(): | |
1029 | # def __init__(self) : | |
1030 | # import random; self.id = random.randint(0, 1000) | |
1031 | # self.os = "Windows" | |
1032 | # def getID(self): return self.id | |
1033 | # def getOS(self): return self.os | |
1034 | # def getDefaultGateway(self): return '192.168.1.1' | |
1035 | # def getDescription(self): return "a description" | |
1036 | # def getName(self): return "my name" | |
1037 | # def isOwned(self): return False | |
1038 | # def getOwner(self): return False | |
1039 | # def getMetadata(self): return {'stuff': 'gives other stuff'} |
0 | #!/usr/bin/python2.7 | |
1 | # -*- coding: utf-8 -*- | |
2 | ''' | |
3 | Faraday Penetration Test IDE | |
4 | Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | ||
7 | ''' | |
8 | import requests | |
9 | import json | |
10 | from persistence.server.utils import force_unique | |
11 | from persistence.server.utils import WrongObjectSignature | |
12 | from persistence.server.changes_stream import CouchChangesStream | |
13 | ||
14 | # NOTE: Change is you want to use this module by itself. | |
15 | # If FARADAY_UP is False, SERVER_URL must be a valid faraday server url | |
16 | FARADAY_UP = True | |
17 | SERVER_URL = "http://127.0.1:5984" | |
18 | ||
19 | ||
20 | def _get_base_server_url(): | |
21 | if FARADAY_UP: | |
22 | from config.configuration import getInstanceConfiguration | |
23 | CONF = getInstanceConfiguration() | |
24 | server_url = CONF.getCouchURI() | |
25 | else: | |
26 | server_url = SERVER_URL | |
27 | return server_url | |
28 | ||
29 | ||
30 | def _create_server_api_url(): | |
31 | """Return the server's api url.""" | |
32 | return "{0}/_api".format(_get_base_server_url()) | |
33 | ||
34 | ||
35 | def _create_server_get_url(workspace_name, object_name=None): | |
36 | """Creates a url to get from the server. Takes the workspace name | |
37 | as a string, an object_name paramter which is the object you want to | |
38 | query as a string ('hosts', 'interfaces', etc) . | |
39 | ||
40 | object_name may be None if you want to get the workspace itself. | |
41 | ||
42 | Return the get_url as a string. | |
43 | """ | |
44 | object_name = "/{0}".format(object_name) if object_name else "" | |
45 | get_url = '{0}/ws/{1}{2}'.format(_create_server_api_url(), | |
46 | workspace_name, | |
47 | object_name) | |
48 | return get_url | |
49 | ||
50 | ||
51 | def _create_server_post_url(workspace_name, object_id): | |
52 | server_api_url = _create_server_api_url() | |
53 | post_url = '{0}/ws/{1}/doc/{2}'.format(server_api_url, workspace_name, object_id) | |
54 | return post_url | |
55 | ||
56 | ||
57 | def _create_server_delete_url(workspace_name, object_id): | |
58 | return _create_server_post_url(workspace_name, object_id) | |
59 | ||
60 | ||
61 | # XXX: COUCH IT! | |
62 | def _create_couch_get_url(workspace_name, object_id): | |
63 | server_url = _get_base_server_url() | |
64 | return "{0}/{1}/{2}".format(server_url, workspace_name, object_id) | |
65 | ||
66 | ||
67 | # XXX: COUCH IT! | |
68 | def _create_couch_post_url(workspace_name, object_id): | |
69 | return _create_couch_get_url(workspace_name, object_id) | |
70 | ||
71 | ||
72 | # XXX: COUCH IT! | |
73 | def _create_server_db_url(workspace_name): | |
74 | server_base_url = _get_base_server_url() | |
75 | db_url = '{0}/{1}'.format(server_base_url, workspace_name) | |
76 | return db_url | |
77 | ||
78 | ||
79 | def _unsafe_io_with_server(server_io_function, server_expected_response, | |
80 | server_url, **payload): | |
81 | """A wrapper for functions which deals with I/O to or from the server. | |
82 | It calls the server_io_function with url server_url and the payload, | |
83 | raising an CantCommunicateWithServerError if the response wasn't | |
84 | server_expected_response or if there was a Connection Error. | |
85 | ||
86 | Return the response from the server. | |
87 | """ | |
88 | try: | |
89 | answer = server_io_function(server_url, **payload) | |
90 | if answer.status_code == 409 and answer.json()['error'] == 'conflict': | |
91 | raise ConflictInDatabase(answer) | |
92 | if answer.status_code == 404: | |
93 | raise ResourceDoesNotExist(server_url) | |
94 | if answer.status_code == 403 or answer.status_code == 401: | |
95 | raise Unauthorized(answer) | |
96 | if answer.status_code != server_expected_response: | |
97 | raise requests.exceptions.ConnectionError() | |
98 | except requests.exceptions.ConnectionError: | |
99 | raise CantCommunicateWithServerError(server_io_function, server_url, payload) | |
100 | return answer | |
101 | ||
102 | ||
103 | def _parse_json(response_object): | |
104 | """Takes a response object and return its response as a dictionary.""" | |
105 | try: | |
106 | return response_object.json() | |
107 | except ValueError: | |
108 | return {} | |
109 | ||
110 | ||
111 | def _get(request_url, **params): | |
112 | """Get from the request_url. Takes an arbitrary number of parameters | |
113 | to customize the request_url if necessary. | |
114 | ||
115 | Will raise a CantCommunicateWithServerError if requests cant stablish | |
116 | connection to server or if response is not equal to 200. | |
117 | ||
118 | Return a dictionary with the information in the json. | |
119 | """ | |
120 | return _parse_json(_unsafe_io_with_server(requests.get, | |
121 | 200, | |
122 | request_url, | |
123 | params=params)) | |
124 | ||
125 | def _put(post_url, update=False, expected_response=201, **params): | |
126 | """Put to the post_url. If update is True, try to get the object | |
127 | revision first so as to update the object in Couch. You can | |
128 | customize the expected response (it should be 201, but Couchdbkit returns | |
129 | 200, so...). Also take an arbitrary number of parameters to put into the | |
130 | post_url. | |
131 | ||
132 | Will raise a CantCommunicateWithServerError if requests cant stablish | |
133 | connection to server or if response is not equal to 201. | |
134 | ||
135 | Return a dictionary with the response from couchdb, which looks like this: | |
136 | {u'id': u'61', u'ok': True, u'rev': u'1-967a00dff5e02add41819138abb3284d'} | |
137 | """ | |
138 | if update: | |
139 | last_rev = _get(post_url)['_rev'] | |
140 | params['_rev'] = last_rev | |
141 | return _parse_json(_unsafe_io_with_server(requests.put, | |
142 | expected_response, | |
143 | post_url, | |
144 | json=params)) | |
145 | ||
146 | ||
147 | def _delete(delete_url, database=False): | |
148 | """Deletes the object on delete_url. If you're deleting a database, | |
149 | specify the database parameter to True""" | |
150 | params = {} | |
151 | if not database: | |
152 | last_rev = _get(delete_url)['_rev'] | |
153 | params = {'_rev': last_rev} | |
154 | return _parse_json(_unsafe_io_with_server(requests.delete, | |
155 | 200, | |
156 | delete_url, | |
157 | params=params)) | |
158 | ||
159 | ||
160 | def _get_raw_hosts(workspace_name, **params): | |
161 | """Take a workspace_name and an arbitrary number of params and return | |
162 | a dictionary with the hosts table.""" | |
163 | request_url = _create_server_get_url(workspace_name, 'hosts') | |
164 | return _get(request_url, **params) | |
165 | ||
166 | ||
167 | def _get_raw_vulns(workspace_name, **params): | |
168 | """Take a workspace_name and an arbitrary number of params and return | |
169 | a dictionary with the vulns table.""" | |
170 | request_url = _create_server_get_url(workspace_name, 'vulns') | |
171 | return _get(request_url, **params) | |
172 | ||
173 | ||
174 | def _get_raw_interfaces(workspace_name, **params): | |
175 | """Take a workspace_name and an arbitrary number of params and return | |
176 | a dictionary with the interfaces table.""" | |
177 | request_url = _create_server_get_url(workspace_name, 'interfaces') | |
178 | return _get(request_url, **params) | |
179 | ||
180 | ||
181 | def _get_raw_services(workspace_name, **params): | |
182 | """Take a workspace_name and an arbitrary number of params and return | |
183 | a dictionary with the services table.""" | |
184 | request_url = _create_server_get_url(workspace_name, 'services') | |
185 | return _get(request_url, **params) | |
186 | ||
187 | ||
188 | def _get_raw_notes(workspace_name, **params): | |
189 | """Take a workspace name and an arbitrary number of params and | |
190 | return a dictionary with the notes table.""" | |
191 | request_url = _create_server_get_url(workspace_name, 'notes') | |
192 | return _get(request_url, **params) | |
193 | ||
194 | ||
195 | def _get_raw_credentials(workspace_name, **params): | |
196 | """Take a workspace name and an arbitrary number of params and | |
197 | return a dictionary with the credentials table.""" | |
198 | request_url = _create_server_get_url(workspace_name, 'credentials') | |
199 | return _get(request_url, **params) | |
200 | ||
201 | ||
202 | def _get_raw_commands(workspace_name, **params): | |
203 | request_url = _create_server_get_url(workspace_name, 'commands') | |
204 | return _get(request_url, **params) | |
205 | ||
206 | ||
207 | def _get_raw_workspace_summary(workspace_name): | |
208 | request_url = _create_server_get_url(workspace_name, 'summary') | |
209 | return _get(request_url) | |
210 | ||
211 | # XXX: COUCH IT! | |
212 | def _save_to_couch(workspace_name, faraday_object_id, **params): | |
213 | post_url = _create_couch_post_url(workspace_name, faraday_object_id) | |
214 | return _put(post_url, update=False, **params) | |
215 | ||
216 | # XXX: COUCH IT! | |
217 | def _update_in_couch(workspace_name, faraday_object_id, **params): | |
218 | post_url = _create_server_post_url(workspace_name, faraday_object_id) | |
219 | return _put(post_url, update=True, **params) | |
220 | ||
221 | def _save_to_server(workspace_name, faraday_object_id, **params): | |
222 | post_url = _create_server_post_url(workspace_name, faraday_object_id) | |
223 | return _put(post_url, update=False, expected_response=200, **params) | |
224 | ||
225 | def _update_in_server(workspace_name, faraday_object_id, **params): | |
226 | post_url = _create_server_post_url(workspace_name, faraday_object_id) | |
227 | return _put(post_url, update=True, expected_response=200, **params) | |
228 | ||
229 | # XXX: SEMI COUCH IT! | |
230 | def _delete_from_couch(workspace_name, faraday_object_id): | |
231 | delete_url = _create_server_delete_url(workspace_name, faraday_object_id) | |
232 | return _delete(delete_url) | |
233 | ||
234 | # XXX: COUCH IT! | |
235 | def _couch_changes(workspace_name, **params): | |
236 | return CouchChangesStream(workspace_name, | |
237 | _create_server_db_url(workspace_name), | |
238 | **params) | |
239 | ||
240 | ||
241 | def _get_faraday_ready_dictionaries(workspace_name, faraday_object_name, | |
242 | faraday_object_row_name, full_table=True, | |
243 | **params): | |
244 | """Takes a workspace_name (str), a faraday_object_name (str), | |
245 | a faraday_object_row_name (str) and an arbitrary number of params. | |
246 | Return a list of dictionaries that hold the information for the objects | |
247 | in table faraday_object_name. | |
248 | ||
249 | The full_table paramether may be used to get the full dictionary instead | |
250 | of just the one inside the 'value' key which holds information about the | |
251 | object. | |
252 | ||
253 | Preconditions: | |
254 | faraday_object_name == 'host', 'vuln', 'interface', 'service', 'note' | |
255 | or 'credential' | |
256 | ||
257 | faraday_object_row_name must be the key to the dictionary which holds | |
258 | the information of the object per se in the table. most times this is 'rows' | |
259 | """ | |
260 | object_to_func = {'hosts': _get_raw_hosts, | |
261 | 'vulns': _get_raw_vulns, | |
262 | 'interfaces': _get_raw_interfaces, | |
263 | 'services': _get_raw_services, | |
264 | 'notes': _get_raw_notes, | |
265 | 'credentials': _get_raw_credentials, | |
266 | 'commands': _get_raw_commands} | |
267 | ||
268 | appropiate_function = object_to_func[faraday_object_name] | |
269 | appropiate_dictionary = appropiate_function(workspace_name, **params) | |
270 | faraday_ready_dictionaries = [] | |
271 | if appropiate_dictionary: | |
272 | for raw_dictionary in appropiate_dictionary[faraday_object_row_name]: | |
273 | if not full_table: | |
274 | faraday_ready_dictionaries.append(raw_dictionary['value']) | |
275 | else: | |
276 | faraday_ready_dictionaries.append(raw_dictionary) | |
277 | return faraday_ready_dictionaries | |
278 | ||
279 | ||
280 | def get_hosts(workspace_name, **params): | |
281 | """Given a workspace name and an arbitrary number of query params, | |
282 | return a list a dictionaries containg information about hosts | |
283 | matching the query | |
284 | """ | |
285 | return _get_faraday_ready_dictionaries(workspace_name, 'hosts', | |
286 | 'rows', **params) | |
287 | ||
288 | ||
289 | def get_all_vulns(workspace_name, **params): | |
290 | """Given a workspace name and an arbitrary number of query params, | |
291 | return a list a dictionaries containg information about vulns | |
292 | matching the query | |
293 | """ | |
294 | return _get_faraday_ready_dictionaries(workspace_name, 'vulns', | |
295 | 'vulnerabilities', **params) | |
296 | ||
297 | ||
298 | def get_vulns(workspace_name, **params): | |
299 | """Given a workspace name and an arbitrary number of query params, | |
300 | return a list a dictionaries containg information about not web vulns | |
301 | matching the query | |
302 | """ | |
303 | return get_all_vulns(workspace_name, type='Vulnerability', **params) | |
304 | ||
305 | ||
306 | def get_web_vulns(workspace_name, **params): | |
307 | """Given a workspace name and an arbitrary number of query params, | |
308 | return a list a dictionaries containg information about web vulns | |
309 | matching the query | |
310 | """ | |
311 | return get_all_vulns(workspace_name, type="VulnerabilityWeb", **params) | |
312 | ||
313 | def get_interfaces(workspace_name, **params): | |
314 | """Given a workspace name and an arbitrary number of query params, | |
315 | return a list a dictionaries containg information about interfaces | |
316 | matching the query | |
317 | """ | |
318 | return _get_faraday_ready_dictionaries(workspace_name, 'interfaces', | |
319 | 'interfaces', **params) | |
320 | ||
321 | def get_services(workspace_name, **params): | |
322 | """Given a workspace name and an arbitrary number of query params, | |
323 | return a list a dictionaries containg information about services | |
324 | matching the query | |
325 | """ | |
326 | return _get_faraday_ready_dictionaries(workspace_name, 'services', | |
327 | 'services', **params) | |
328 | ||
329 | def get_credentials(workspace_name, **params): | |
330 | """Given a workspace name and an arbitrary number of query params, | |
331 | return a list a dictionaries containg information about credentials | |
332 | matching the query | |
333 | """ | |
334 | return _get_faraday_ready_dictionaries(workspace_name, 'credentials', | |
335 | 'rows', **params) | |
336 | ||
337 | def get_notes(workspace_name, **params): | |
338 | """Given a workspace name and an arbitrary number of query params, | |
339 | return a list a dictionaries containg information about notes | |
340 | matching the query | |
341 | """ | |
342 | return _get_faraday_ready_dictionaries(workspace_name, 'notes', | |
343 | 'rows', **params) | |
344 | ||
345 | def get_commands(workspace_name, **params): | |
346 | return _get_faraday_ready_dictionaries(workspace_name, 'commands', | |
347 | 'commands', **params) | |
348 | ||
349 | def get_objects(workspace_name, object_signature, **params): | |
350 | """Given a workspace name, an object_signature as string and an arbitrary | |
351 | number of query params, return a list a dictionaries containg information | |
352 | about 'object_signature' objects matching the query. | |
353 | ||
354 | object_signature must be either 'hosts', 'vulns', 'interfaces' | |
355 | 'services', 'credentials', 'notes' or 'commands'. | |
356 | Will raise an WrongObjectSignature error if this condition is not met. | |
357 | """ | |
358 | object_to_func = {'hosts': get_hosts, | |
359 | 'vulns': get_vulns, | |
360 | 'interfaces': get_interfaces, | |
361 | 'services': get_services, | |
362 | 'credentials': get_credentials, | |
363 | 'notes': get_notes, | |
364 | 'commands': get_commands} | |
365 | try: | |
366 | appropiate_function = object_to_func[object_signature] | |
367 | except KeyError: | |
368 | raise WrongObjectSignature(object_signature) | |
369 | ||
370 | return appropiate_function(workspace_name, **params) | |
371 | ||
372 | # cha cha cha chaaaanges! | |
373 | def get_changes_stream(workspace_name, since=0, heartbeat='1000', **params): | |
374 | return _couch_changes(workspace_name, since=since, feed='continuous', | |
375 | heartbeat=heartbeat, **params) | |
376 | ||
377 | def get_workspaces_names(): | |
378 | """Return a json containing the list with the workspaces names.""" | |
379 | return _get("{0}/ws".format(_create_server_api_url())) | |
380 | ||
381 | # XXX: COUCH IT! | |
382 | def _clean_up_stupid_couch_response(response_string): | |
383 | """Couch likes to give invalid jsons as a response :). So nice.""" | |
384 | interesting_part = "{".join(response_string.split("{")[1:]) | |
385 | almost_there = interesting_part.split("}")[0:-1] | |
386 | ok_yeah = "}".join(almost_there) | |
387 | hopefully_valid_json = "{{{0}}}".format(ok_yeah) | |
388 | return json.loads(hopefully_valid_json) | |
389 | ||
390 | # XXX: COUCH IT! | |
391 | # COUCH IT LEVEL: REVOLUTIONS | |
392 | def get_object_before_last_revision(workspace_name, object_id): | |
393 | """Return a dictionary containing the object information before | |
394 | its last revision (modification). Useful to get the attributes of | |
395 | objects already deleted.""" | |
396 | get_url = _create_couch_get_url(workspace_name, object_id) | |
397 | response = _unsafe_io_with_server(requests.get, 200, get_url, | |
398 | params={'revs': 'true', 'open_revs': 'all'}) | |
399 | try: | |
400 | valid_json_response = _clean_up_stupid_couch_response(response.text) | |
401 | except ValueError: | |
402 | return None | |
403 | try: | |
404 | id_before_del = valid_json_response['_revisions']['ids'][1] | |
405 | new_number_for_rev = valid_json_response['_revisions']['start'] - 1 | |
406 | except KeyError: # one if never too safe when you call a function called "_clean_up_stupid_couch_response" | |
407 | return None | |
408 | ||
409 | rev_id_before_del = "{0}-{1}".format(new_number_for_rev, id_before_del) | |
410 | object_dict = _get(get_url, rev=rev_id_before_del) | |
411 | return object_dict | |
412 | ||
413 | ||
414 | def get_object(workspace_name, object_signature, object_id): | |
415 | """Take a workspace_name, an object_signature and an object_id as strings, | |
416 | return the dictionary containging the object of type object_signature | |
417 | and matching object_id in the workspace workspace_name, or None if | |
418 | no object matching object_id was found. | |
419 | ||
420 | object_signature must be either 'hosts', 'vulns', 'interfaces' | |
421 | 'services', 'credentials', 'notes' or 'commands'. | |
422 | Will raise an WrongObjectSignature error if this condition is not met. | |
423 | ||
424 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
425 | the object_id is shared by two or more objects in the workspace. This | |
426 | should never happen. | |
427 | """ | |
428 | objects = get_objects(workspace_name, object_signature, couchid=object_id) | |
429 | return force_unique(objects) | |
430 | ||
431 | def get_host(workspace_name, host_id): | |
432 | """Take a workspace name and host_id as strings. Return a dictionary | |
433 | containing the host matching host_id on workspace workspace_name if found, | |
434 | or None if no hosts were found. | |
435 | ||
436 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
437 | the host_id is shared by two or more hosts in the workspace. This | |
438 | should never happen. | |
439 | """ | |
440 | return force_unique(get_hosts(workspace_name, couchid=host_id)) | |
441 | ||
442 | def get_vuln(workspace_name, vuln_id): | |
443 | """Take a workspace name and vuln_id as strings. Return a dictionary | |
444 | containing the vuln matching vuln_id on workspace workspace_name if found, | |
445 | or None if no vulns were found. | |
446 | ||
447 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
448 | the vuln_id is shared by two or more vulns in the workspace. This | |
449 | should never happen. | |
450 | """ | |
451 | return force_unique(get_vulns(workspace_name, couchid=vuln_id)) | |
452 | ||
453 | def get_web_vuln(workspace_name, vuln_id): | |
454 | """Take a workspace name and vuln_id as strings. Return a dictionary | |
455 | containing the web vuln matching vuln_id on workspace workspace_name if found, | |
456 | or None if no web vulns were found. | |
457 | ||
458 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
459 | the vuln_id is shared by two or more web vulns in the workspace. This | |
460 | should never happen. | |
461 | """ | |
462 | return force_unique(get_web_vulns(workspace_name, couchid=vuln_id)) | |
463 | ||
464 | def get_interface(workspace_name, interface_id): | |
465 | """Take a workspace name and interface_id as strings. Return a dictionary | |
466 | containing the interface matching interface_id on workspace workspace_name | |
467 | if found, or None if no interfaces were found. | |
468 | ||
469 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
470 | the interface_id is shared by two or more interfaces in the workspace. This | |
471 | should never happen. | |
472 | """ | |
473 | return force_unique(get_interfaces(workspace_name, couchid=interface_id)) | |
474 | ||
475 | def get_service(workspace_name, service_id): | |
476 | """Take a workspace name and service_id as strings. Return a dictionary | |
477 | containing the service matching service_id on workspace workspace_name if | |
478 | found, or None if no services were found. | |
479 | ||
480 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
481 | the service_id is shared by two or more services in the workspace. This | |
482 | should never happen. | |
483 | """ | |
484 | return force_unique(get_services(workspace_name, couchid=service_id)) | |
485 | ||
486 | def get_note(workspace_name, note_id): | |
487 | """Take a workspace name and note_id as strings. Return a dictionary | |
488 | containing the note matching note_id on workspace workspace_name if found, | |
489 | or None if no notes were found. | |
490 | ||
491 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
492 | the note_id is shared by two or more notes in the workspace. This | |
493 | should never happen. | |
494 | """ | |
495 | return force_unique(get_notes(workspace_name, couchid=note_id)) | |
496 | ||
497 | def get_credential(workspace_name, credential_id): | |
498 | """Take a workspace name and credential_id as strings. Return a dictionary | |
499 | containing the credential matching credential_id on workspace | |
500 | workspace_name if found, or None if no credentials were found. | |
501 | ||
502 | Will raise a MoreThanOneObjectFoundByID error if for some reason | |
503 | the credential_id is shared by two or more credentials in the workspace. | |
504 | This should never happen. | |
505 | """ | |
506 | return force_unique(get_services(workspace_name, couchid=credential_id)) | |
507 | ||
508 | def get_command(workspace_name, command_id): | |
509 | return force_unique(get_commands(workspace_name, couchid=command_id)) | |
510 | ||
511 | def get_workspace(workspace_name, **params): | |
512 | """Take a workspace name as string. Return a dictionary | |
513 | containing the workspace document on couch database with the same | |
514 | workspace_name if found, or None if no db or document were found. | |
515 | """ | |
516 | request_url = _create_server_get_url(workspace_name) | |
517 | return _get(request_url, **params) | |
518 | ||
519 | def get_workspace_summary(workspace_name): | |
520 | return _get_raw_workspace_summary(workspace_name)['stats'] | |
521 | ||
522 | def get_workspace_numbers(workspace_name): | |
523 | """Returns a 4-uple of (host_amount, interface_amount, service_amount, vuln_amount) | |
524 | inside of workspace workspace_name. | |
525 | """ | |
526 | stats = _get_raw_workspace_summary(workspace_name)['stats'] | |
527 | return stats['hosts'], stats['interfaces'], stats['services'], stats['total_vulns'] | |
528 | ||
529 | def get_hosts_number(workspace_name, **params): | |
530 | """Return the number of host found in workspace workspace_name""" | |
531 | return int(get_workspace_summary(workspace_name)['hosts']) | |
532 | ||
533 | def get_services_number(workspace_name, **params): | |
534 | """Return the number of services found in workspace workspace_name""" | |
535 | return int(get_workspace_summary(workspace_name)['interfaces']) | |
536 | ||
537 | def get_interfaces_number(workspace_name, **params): | |
538 | """Return the number of interfaces found in workspace workspace_name""" | |
539 | return int(get_workspace_summary(workspace_name)['interfaces']) | |
540 | ||
541 | def get_vulns_number(workspace_name, **params): | |
542 | """Return the number of vulns found in workspace workspace_name""" | |
543 | return int(get_workspace_summary(workspace_name)['total_vulns']) | |
544 | ||
545 | def get_notes_number(workspace_name, **params): | |
546 | """Return the number of notes on workspace workspace_name.""" | |
547 | return int(get_workspace_summary(workspace_name)['notes']) | |
548 | ||
549 | def get_credentials_number(workspace_name, **params): | |
550 | """Return the number of credential on workspace workspace_name.""" | |
551 | return int(_get_raw_credentials(workspace_name, **params)) | |
552 | ||
553 | def get_commands_number(workspace_name, **params): | |
554 | """Return the number of commands on workspace workspace_name.""" | |
555 | return int(_get_raw_commands(workspace_name, **params)) | |
556 | ||
557 | def create_host(workspace_name, id, name, os, default_gateway, | |
558 | description="", metadata=None, owned=False, owner="", | |
559 | parent=None): | |
560 | """Save a host to the server. Return a dictionary with the server's | |
561 | reponse. | |
562 | """ | |
563 | return _save_to_server(workspace_name, | |
564 | id, | |
565 | name=name, os=os, | |
566 | default_gateway=default_gateway, | |
567 | owned=owned, | |
568 | metadata=metadata, | |
569 | owner=owner, | |
570 | parent=parent, | |
571 | description=description, | |
572 | type="Host") | |
573 | ||
574 | def update_host(workspace_name, id, name, os, default_gateway, | |
575 | description="", metadata=None, owned=False, owner="", | |
576 | parent=None): | |
577 | """Update an host in the server. Return a dictionary with the | |
578 | server's response.""" | |
579 | return _update_in_server(workspace_name, | |
580 | id, | |
581 | name=name, os=os, | |
582 | default_gateway=default_gateway, | |
583 | owned=owned, | |
584 | metadata=metadata, | |
585 | owner=owner, | |
586 | parent=parent, | |
587 | description=description, | |
588 | type="Host") | |
589 | ||
590 | def create_interface(workspace_name, id, name, description, mac, | |
591 | owned=False, owner="", hostnames=None, network_segment=None, | |
592 | ipv4=None, ipv6=None, metadata=None): | |
593 | """Save an interface to the server. Return a dictionary with the | |
594 | server's response.""" | |
595 | return _save_to_server(workspace_name, | |
596 | id, | |
597 | name=name, | |
598 | description=description, | |
599 | mac=mac, | |
600 | owned=owned, | |
601 | owner=owner, | |
602 | hostnames=hostnames, | |
603 | network_segment=network_segment, | |
604 | ipv4=ipv4, | |
605 | ipv6=ipv6, | |
606 | type="Interface", | |
607 | metadata=metadata) | |
608 | ||
609 | def update_interface(workspace_name, id, name, description, mac, | |
610 | owned=False, owner="", hostnames=None, network_segment=None, | |
611 | ipv4=None, ipv6=None, metadata=None): | |
612 | """Update an interface in the server. Return a dictionary with the | |
613 | server's response.""" | |
614 | return _update_in_server(workspace_name, | |
615 | id, | |
616 | name=name, | |
617 | description=description, | |
618 | mac=mac, | |
619 | owned=owned, | |
620 | owner=owner, | |
621 | hostnames=hostnames, | |
622 | network_segment=network_segment, | |
623 | ipv4=ipv4, | |
624 | ipv6=ipv6, | |
625 | type="Interface", | |
626 | metadata=metadata) | |
627 | ||
628 | def create_service(workspace_name, id, name, description, ports, | |
629 | owned=False, owner="", protocol="", status="", version="", | |
630 | metadata=None): | |
631 | """Save a service to the server. Return a dictionary with the | |
632 | server's response.""" | |
633 | return _save_to_server(workspace_name, | |
634 | id, | |
635 | name=name, | |
636 | description=description, | |
637 | ports=ports, | |
638 | owned=owned, | |
639 | owner=owner, | |
640 | protocol=protocol, | |
641 | status=status, | |
642 | version=version, | |
643 | type="Service", | |
644 | metadata=metadata) | |
645 | ||
646 | def update_service(workspace_name, id, name, description, ports, | |
647 | owned=False, owner="", protocol="", status="", version="", | |
648 | metadata=None): | |
649 | """Update a service in the server. Return a dictionary with the | |
650 | server's response.""" | |
651 | return _update_in_server(workspace_name, | |
652 | id, | |
653 | name=name, | |
654 | description=description, | |
655 | ports=ports, | |
656 | owned=owned, | |
657 | owner=owner, | |
658 | protocol=protocol, | |
659 | status=status, | |
660 | version=version, | |
661 | type="Service", | |
662 | metadata=metadata) | |
663 | ||
664 | ||
665 | def create_vuln(workspace_name, id, name, description, owned=None, owner="", | |
666 | confirmed=False, data="", refs=None, severity="info", resolution="", | |
667 | desc="", metadata=None): | |
668 | """Save a vulnerability to the server. Return the json with the | |
669 | server's response. | |
670 | """ | |
671 | return _save_to_server(workspace_name, | |
672 | id, | |
673 | name=name, | |
674 | description=description, | |
675 | owned=owned, | |
676 | owner=owner, | |
677 | confirmed=confirmed, | |
678 | data=data, | |
679 | refs=refs, | |
680 | severity=severity, | |
681 | resolution=resolution, | |
682 | desc=desc, | |
683 | type="Vulnerability", | |
684 | metadata=metadata) | |
685 | ||
686 | def update_vuln(workspace_name, id, name, description, owned=None, owner="", | |
687 | confirmed=False, data="", refs=None, severity="info", resolution="", | |
688 | desc="", metadata=None): | |
689 | """Update a vulnerability in the server. Return the json with the | |
690 | server's response. | |
691 | """ | |
692 | return _update_in_server(workspace_name, | |
693 | id, | |
694 | name=name, | |
695 | description=description, | |
696 | owned=owned, | |
697 | owner=owner, | |
698 | confirmed=confirmed, | |
699 | data=data, | |
700 | refs=refs, | |
701 | severity=severity, | |
702 | resolution=resolution, | |
703 | desc=desc, | |
704 | type="Vulnerability", | |
705 | metadata=metadata) | |
706 | ||
707 | def create_vuln_web(workspace_name, id, name, description, owned=None, owner="", | |
708 | confirmed=False, data="", refs=None, severity="info", resolution="", | |
709 | desc="", metadata=None, method=None, params="", path=None, pname=None, | |
710 | query=None, request=None, response=None, category="", website=None): | |
711 | """Save a web vulnerability to the server. Return the json with the | |
712 | server's response. | |
713 | """ | |
714 | return _save_to_server(workspace_name, | |
715 | id, | |
716 | name=name, | |
717 | description=description, | |
718 | owned=owned, | |
719 | owner=owner, | |
720 | confirmed=confirmed, | |
721 | data=data, | |
722 | refs=refs, | |
723 | severity=severity, | |
724 | resolution=resolution, | |
725 | desc=desc, | |
726 | metadata=metadata, | |
727 | method=method, | |
728 | params=params, | |
729 | path=path, | |
730 | pname=pname, | |
731 | query=query, | |
732 | request=request, | |
733 | response=response, | |
734 | website=website, | |
735 | category=category, | |
736 | type='VulnerabilityWeb') | |
737 | ||
738 | def update_vuln_web(workspace_name, id, name, description, owned=None, owner="", | |
739 | confirmed=False, data="", refs=None, severity="info", resolution="", | |
740 | desc="", metadata=None, method=None, params="", path=None, pname=None, | |
741 | query=None, request=None, response=None, category="", website=None): | |
742 | """Update a web vulnerability in the server. Return the json with the | |
743 | server's response. | |
744 | """ | |
745 | return _update_in_server(workspace_name, | |
746 | id, | |
747 | name=name, | |
748 | description=description, | |
749 | owned=owned, | |
750 | owner=owner, | |
751 | confirmed=confirmed, | |
752 | data=data, | |
753 | refs=refs, | |
754 | severity=severity, | |
755 | resolution=resolution, | |
756 | desc=desc, | |
757 | metadata=metadata, | |
758 | method=method, | |
759 | params=params, | |
760 | path=path, | |
761 | pname=pname, | |
762 | query=query, | |
763 | request=request, | |
764 | response=response, | |
765 | website=website, | |
766 | category=category, | |
767 | type='VulnerabilityWeb') | |
768 | ||
769 | def create_note(workspace_name, id, name, text, owned=None, owner="", | |
770 | description="", metadata=None): | |
771 | """Save a note to the server. Return the json with the | |
772 | server's response. | |
773 | """ | |
774 | return _save_to_server(workspace_name, | |
775 | id, | |
776 | name=name, | |
777 | description=description, | |
778 | owned=owned, | |
779 | owner=owner, | |
780 | text=text, | |
781 | type="Note", | |
782 | metadata=metadata) | |
783 | ||
784 | def update_note(workspace_name, id, name, text, owned=None, owner="", | |
785 | description="", metadata=None): | |
786 | """Update a note in the server. Return the json with the | |
787 | server's response. | |
788 | """ | |
789 | return _update_in_server(workspace_name, | |
790 | id, | |
791 | name=name, | |
792 | description=description, | |
793 | owned=owned, | |
794 | owner=owner, | |
795 | text=text, | |
796 | type="Note", | |
797 | metadata=metadata) | |
798 | ||
799 | ||
800 | def create_credential(workspace_name, id, name, username, password, | |
801 | owned=None, owner="", description="", metadata=None): | |
802 | """Save a credential to the server. Return the json with the | |
803 | server's response. | |
804 | """ | |
805 | return _save_to_server(workspace_name, | |
806 | id, | |
807 | name=name, | |
808 | description=description, | |
809 | owned=owned, | |
810 | owner=owner, | |
811 | metadata=metadata, | |
812 | username=username, | |
813 | password=password, | |
814 | type="Credential") | |
815 | ||
816 | def update_credential(workspace_name, id, name, username, password, | |
817 | owned=None, owner="", description="", metadata=None): | |
818 | """Update a credential in the server. Return the json with the | |
819 | server's response. | |
820 | """ | |
821 | return _update_in_server(workspace_name, | |
822 | id, | |
823 | name=name, | |
824 | description=description, | |
825 | owned=owned, | |
826 | owner=owner, | |
827 | metadata=metadata, | |
828 | username=username, | |
829 | password=password, | |
830 | type="Credential") | |
831 | ||
832 | def create_command(workspace_name, id, command, duration=None, hostname=None, | |
833 | ip=None, itime=None, params=None, user=None): | |
834 | """Create a command in the server. Return the json with the | |
835 | server's response. | |
836 | """ | |
837 | return _save_to_server(workspace_name, | |
838 | id, | |
839 | command=command, | |
840 | duration=duration, | |
841 | hostname=hostname, | |
842 | ip=ip, | |
843 | itime=itime, | |
844 | params=params, | |
845 | user=user, | |
846 | workspace=workspace_name, | |
847 | type="CommandRunInformation") | |
848 | ||
849 | def update_command(workspace_name, id, command, duration=None, hostname=None, | |
850 | ip=None, itime=None, params=None, user=None): | |
851 | """Update a command in the server. Return the json with the | |
852 | server's response. | |
853 | """ | |
854 | return _update_in_server(workspace_name, | |
855 | id, | |
856 | command=command, | |
857 | duration=duration, | |
858 | hostname=hostname, | |
859 | ip=ip, | |
860 | itime=itime, | |
861 | params=params, | |
862 | user=user, | |
863 | workspace=workspace_name, | |
864 | type="CommandRunInformation") | |
865 | ||
866 | ||
867 | # COUCH IT! | |
868 | def create_database(workspace_name): | |
869 | """Create a database in the server. Return the json with the | |
870 | server's response. Can throw an Unauthorized exception | |
871 | """ | |
872 | ||
873 | # NOTE: this function is still talking to couch directly, | |
874 | # that's why it is unable to use the _put function: | |
875 | # it returns s 201 response code if everything went ok | |
876 | db_url = _create_server_db_url(workspace_name) | |
877 | return _parse_json(_unsafe_io_with_server(requests.put, | |
878 | 201, | |
879 | db_url)) | |
880 | ||
881 | def create_workspace(workspace_name, description, start_date, finish_date, | |
882 | customer=None): | |
883 | """Create a workspace in the server. Return the json with the | |
884 | server's response. | |
885 | """ | |
886 | return _save_to_couch(workspace_name, | |
887 | workspace_name, | |
888 | name=workspace_name, | |
889 | description=description, | |
890 | customer=customer, | |
891 | sdate=start_date, | |
892 | fdate=finish_date, | |
893 | type="Workspace") | |
894 | ||
895 | def delete_host(workspace_name, host_id): | |
896 | """Delete host of id host_id from the database.""" | |
897 | return _delete_from_couch(workspace_name, host_id) | |
898 | ||
899 | def delete_interface(workspace_name, interface_id): | |
900 | """Delete interface of id interface_id from the database.""" | |
901 | return _delete_from_couch(workspace_name, interface_id) | |
902 | ||
903 | def delete_service(workspace_name, service_id): | |
904 | """Delete service of id service_id from the database.""" | |
905 | return _delete_from_couch(workspace_name, service_id) | |
906 | ||
907 | def delete_vuln(workspace_name, vuln_id): | |
908 | """Delete vuln of id vuln_id from the database.""" | |
909 | return _delete_from_couch(workspace_name, vuln_id) | |
910 | ||
911 | def delete_note(workspace_name, note_id): | |
912 | """Delete note of id note_id from the database.""" | |
913 | return _delete_from_couch(workspace_name, note_id) | |
914 | ||
915 | def delete_credential(workspace_name, credential_id): | |
916 | """Delete credential of id credential_id from the database.""" | |
917 | return _delete_from_couch(workspace_name, credential_id) | |
918 | ||
919 | def delete_command(workspace_name, command_id): | |
920 | """Delete command of id command_id from the database.""" | |
921 | return _delete_from_couch(workspace_name, command_id) | |
922 | ||
923 | def delete_workspace(workspace_name): | |
924 | """Delete the couch database of id workspace_name""" | |
925 | db_url = _create_server_db_url(workspace_name) | |
926 | return _delete(db_url, database=True) | |
927 | ||
928 | def is_server_up(): | |
929 | try: | |
930 | _get("{0}/info".format(_create_server_api_url())) | |
931 | is_server_up = True | |
932 | except: | |
933 | is_server_up = False | |
934 | return is_server_up | |
935 | ||
936 | def test_server_url(url_to_test): | |
937 | try: | |
938 | _get("{0}/_api/info".format(url_to_test)) | |
939 | test_okey = True | |
940 | except: | |
941 | test_okey = False | |
942 | return test_okey | |
943 | ||
944 | class ServerRequestException(Exception): | |
945 | def __init__(self): | |
946 | pass | |
947 | ||
948 | class CantCommunicateWithServerError(ServerRequestException): | |
949 | def __init__(self, function, server_url, payload): | |
950 | self.function = function | |
951 | self.server_url = server_url | |
952 | self.payload = payload | |
953 | ||
954 | def __str__(self): | |
955 | return ("Couldn't get a valid response from the server when requesting " | |
956 | "to URL {0} with parameters {1} and function {2}.".format(self.server_url, | |
957 | self.payload, | |
958 | self.function)) | |
959 | ||
960 | ||
961 | class ConflictInDatabase(ServerRequestException): | |
962 | def __init__(self, answer): | |
963 | self.answer = answer | |
964 | ||
965 | def __str__(self): | |
966 | return ("There was a conflict trying to save your document. " | |
967 | "Most probably the document already existed and you " | |
968 | "did not provided a _rev argument to your payload. " | |
969 | "The answer from the server was {0}".format(self.answer)) | |
970 | ||
971 | class ResourceDoesNotExist(ServerRequestException): | |
972 | def __init__(self, url): | |
973 | self.url = url | |
974 | ||
975 | def __str__(self): | |
976 | return ("Can't find anything on URL {0}".format(self.url)) | |
977 | ||
978 | class Unauthorized(ServerRequestException): | |
979 | def __init__(self, answer): | |
980 | self.answer = answer | |
981 | ||
982 | def __str__(self): | |
983 | return ("You're not authorized to make this request. " | |
984 | "The answer from the server was {0}".format(self.answer)) |
0 | #!/usr/bin/python2.7 | |
1 | # -*- coding: utf-8 -*- | |
2 | ''' | |
3 | Faraday Penetration Test IDE | |
4 | Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | ||
7 | ''' | |
8 | ||
9 | class MoreThanOneObjectFoundByID(Exception): | |
10 | def __init__(self, faulty_list): | |
11 | self.faulty_list = faulty_list | |
12 | ||
13 | def __str__(self): | |
14 | return ("More than one object has been found." | |
15 | "These are all the objects found with the same ID: {0}" | |
16 | .format(self.faulty_list)) | |
17 | ||
18 | class WrongObjectSignature(Exception): | |
19 | def __init__(self, param): | |
20 | self.param = param | |
21 | ||
22 | def __str__(self): | |
23 | return ("object_signature must be either 'host', 'vuln', 'vuln_web'," | |
24 | "'interface' 'service', 'credential' or 'note' and it was {0}" | |
25 | .format(self.param)) | |
26 | ||
27 | def force_unique(lst): | |
28 | """Takes a list and return its only member if the list len is 1, | |
29 | None if list is empty or raises an MoreThanOneObjectFoundByID error | |
30 | if list has more than one element. | |
31 | """ | |
32 | if len(lst) == 1: | |
33 | return lst[0] | |
34 | elif len(lst) == 0: | |
35 | return None | |
36 | else: | |
37 | raise MoreThanOneObjectFoundByID(lst) | |
38 | ||
39 | def get_object_properties(obj): | |
40 | return {'id': obj.getID(), | |
41 | 'name': obj.getName(), | |
42 | 'description': obj.getDescription(), | |
43 | 'metadata': obj.getMetadata(), | |
44 | 'owned': obj.isOwned(), | |
45 | 'owner': obj.getOwner() | |
46 | } | |
47 | ||
48 | def get_host_properties(host): | |
49 | host_dict = {'os': host.getOS(), | |
50 | 'default_gateway': host.getDefaultGateway()} | |
51 | host_dict.update(get_object_properties(host)) | |
52 | return host_dict | |
53 | ||
54 | def get_interface_properties(interface): | |
55 | interface_dict = {'mac': interface.getMAC(), | |
56 | 'hostnames': interface.getHostnames(), | |
57 | 'network_segment': interface.getNetworkSegment(), | |
58 | 'ipv4': interface.getIPv4(), | |
59 | 'ipv6': interface.getIPv6() | |
60 | } | |
61 | interface_dict.update(get_object_properties(interface)) | |
62 | return interface_dict | |
63 | ||
64 | def get_service_properties(service): | |
65 | service_dict = {'ports': service.getPorts(), | |
66 | 'protocol': service.getProtocol(), | |
67 | 'status': service.getStatus(), | |
68 | 'version': service.getVersion() | |
69 | } | |
70 | service_dict.update(get_object_properties(service)) | |
71 | return service_dict | |
72 | ||
73 | def get_vuln_properties(vuln): | |
74 | vuln_dict = {'confirmed': vuln.getConfirmed(), | |
75 | 'data': vuln.getData(), | |
76 | 'refs': vuln.getRefs(), | |
77 | 'severity': vuln.getSeverity(), | |
78 | 'resolution': vuln.getResolution(), | |
79 | 'desc': vuln.getDesc()} | |
80 | vuln_dict.update(get_object_properties(vuln)) | |
81 | return vuln_dict | |
82 | ||
83 | def get_vuln_web_properties(vuln_web): | |
84 | vuln_web_dict = {'method': vuln_web.getMethod(), | |
85 | 'params': vuln_web.getParams(), | |
86 | 'request': vuln_web.getRequest(), | |
87 | 'response': vuln_web.getResponse(), | |
88 | 'website': vuln_web.getWebsite(), | |
89 | 'path': vuln_web.getPath(), | |
90 | 'pname': vuln_web.getPname(), | |
91 | 'query': vuln_web.getQuery(), | |
92 | } | |
93 | vuln_web_dict.update(get_object_properties(vuln_web)) | |
94 | vuln_web_dict.update(get_vuln_properties(vuln_web)) | |
95 | return vuln_web_dict | |
96 | ||
97 | def get_note_properties(note): | |
98 | note_dict = {'text': note.getText()} | |
99 | note_dict.update(get_object_properties(note)) | |
100 | return note_dict | |
101 | ||
102 | def get_credential_properties(credential): | |
103 | cred_dict = {'username': credential.getUsername(), | |
104 | 'password': credential.getPassword()} | |
105 | cred_dict.update(get_object_properties(credential)) | |
106 | return cred_dict | |
107 | ||
108 | def get_command_properties(command): | |
109 | return {'id': command.getID(), | |
110 | 'command': command.command, | |
111 | 'user': command.user, | |
112 | 'ip': command.ip, | |
113 | 'hostname': command.hostname, | |
114 | 'itime': command.itime, | |
115 | 'duration': command.duration, | |
116 | 'params': command.params} |
245 | 245 | plugin, cmd_info = self._active_plugins.get(pid) |
246 | 246 | |
247 | 247 | cmd_info.duration = time.time() - cmd_info.itime |
248 | self._mapper_manager.save(cmd_info) | |
248 | self._mapper_manager.update(cmd_info) | |
249 | 249 | |
250 | 250 | self.processOutput(plugin, term_output) |
251 | 251 | del self._active_plugins[pid] |
263 | 263 | if plugin in self._plugins: |
264 | 264 | self.processOutput(self._plugins[plugin], filepath, True) |
265 | 265 | cmd_info.duration = time.time() - cmd_info.itime |
266 | self._mapper_manager.save(cmd_info) | |
266 | self._mapper_manager.update(cmd_info) | |
267 | 267 | return True |
268 | 268 | return False |
269 | 269 |
21 | 21 | |
22 | 22 | |
23 | 23 | class PluginManager(object): |
24 | def __init__(self, plugin_repo_path, mapper_manager): | |
24 | def __init__(self, plugin_repo_path): | |
25 | 25 | self._controllers = {} |
26 | 26 | self._plugin_modules = {} |
27 | 27 | self._loadPlugins(plugin_repo_path) |
28 | self._mapper_manager = mapper_manager | |
29 | ||
30 | 28 | self._plugin_settings = {} |
31 | 29 | self._loadSettings() |
32 | 30 | |
36 | 34 | def _loadSettings(self): |
37 | 35 | _plugin_settings = CONF.getPluginSettings() |
38 | 36 | if _plugin_settings: |
39 | ||
40 | 37 | self._plugin_settings = _plugin_settings |
41 | 38 | |
42 | 39 | activep = self._instancePlugins() |
43 | 40 | for plugin_id, plugin in activep.iteritems(): |
44 | ||
45 | if plugin_id not in _plugin_settings: | |
46 | self._plugin_settings[plugin_id] = { | |
47 | "name": plugin.name, | |
48 | "description": plugin.description, | |
49 | "version": plugin.version, | |
50 | "plugin_version": plugin.plugin_version, | |
51 | "settings": dict(plugin.getSettings()) | |
52 | } | |
41 | if plugin_id in _plugin_settings: | |
42 | plugin.updateSettings(_plugin_settings[plugin_id]["settings"]) | |
43 | self._plugin_settings[plugin_id] = { | |
44 | "name": plugin.name, | |
45 | "description": plugin.description, | |
46 | "version": plugin.version, | |
47 | "plugin_version": plugin.plugin_version, | |
48 | "settings": dict(plugin.getSettings()) | |
49 | } | |
53 | 50 | |
54 | 51 | dplugins = [] |
55 | 52 | for k, v in self._plugin_settings.iteritems(): |
118 | 115 | plugin.updateSettings(self._plugin_settings[id]["settings"]) |
119 | 116 | return plugins |
120 | 117 | |
121 | def _updatePluginSettings(self, new_plugin_id): | |
122 | pass | |
123 | ||
124 | 118 | def _verifyPlugin(self, new_plugin): |
125 | 119 | """ |
126 | 120 | Generic method that decides is a plugin is valid |
75 | 75 | |
76 | 76 | def updateSettings(self, new_settings): |
77 | 77 | for name, value in new_settings.iteritems(): |
78 | setting_type, curr_value = self._settings[name] | |
79 | self._settings[name] = setting_type, setting_type(value) | |
78 | if name in self._settings: | |
79 | setting_type, curr_value = self._settings[name] | |
80 | self._settings[name] = setting_type, setting_type(value) | |
80 | 81 | |
81 | 82 | def canParseCommandString(self, current_input): |
82 | 83 | """ |
43 | 43 | |
44 | 44 | except: |
45 | 45 | print "Bad report format" |
46 | sys.exit() | |
46 | return None | |
47 | 47 | |
48 | 48 | file.close() |
49 | 49 | return xml |
273 | 273 | for item in parser.items: |
274 | 274 | h_id = self.createAndAddHost(item['name'], item['os']) |
275 | 275 | i_id = self.createAndAddInterface(h_id, item['name'], ipv4_address=item[ |
276 | 'name'], hostname_resolution=item['hostnames']) | |
276 | 'name'], hostname_resolution= ' '.join( list( item['hostnames'] ))) | |
277 | 277 | |
278 | 278 | for v in item['vulns']: |
279 | 279 | v_id = self.createAndAddVulnToHost(h_id, v['name'], v['desc'], v[ |
78 | 78 | """ |
79 | 79 | @return items A list of Host instances |
80 | 80 | """ |
81 | node = tree.findall('report')[0] | |
82 | node2 = node.findall('results')[0] | |
83 | ||
84 | for node in node2.findall('result'): | |
85 | yield Item(node) | |
81 | try: | |
82 | node = tree.findall('report')[0] | |
83 | node2 = node.findall('results')[0] | |
84 | for node in node2.findall('result'): | |
85 | yield Item(node) | |
86 | ||
87 | except Exception: | |
88 | ||
89 | node2 = tree.findall('result') | |
90 | for node in node2: | |
91 | yield Item(node) | |
86 | 92 | |
87 | 93 | |
88 | 94 | def get_attrib_from_subnode(xml_node, subnode_xpath_expr, attrib_name): |
97 | 103 | if ETREE_VERSION[0] <= 1 and ETREE_VERSION[1] < 3: |
98 | 104 | |
99 | 105 | match_obj = re.search( |
100 | "([^\@]+?)\[\@([^=]*?)=\'([^\']*?)\'", subnode_xpath_expr) | |
106 | "([^\@]+?)\[\@([^=]*?)=\'([^\']*?)\'", | |
107 | subnode_xpath_expr) | |
108 | ||
101 | 109 | if match_obj is not None: |
102 | 110 | node_to_find = match_obj.group(1) |
103 | 111 | xpath_attrib = match_obj.group(2) |
121 | 129 | class Item(object): |
122 | 130 | """ |
123 | 131 | An abstract representation of a Item |
124 | ||
125 | ||
126 | 132 | @param item_node A item_node taken from an openvas xml tree |
127 | 133 | """ |
128 | 134 | |
138 | 144 | self.description = self.get_text_from_subnode('description') |
139 | 145 | self.port = "None" |
140 | 146 | self.severity = self.get_text_from_subnode('threat') |
141 | self.service = "" | |
147 | self.service = "Unknown" | |
142 | 148 | self.protocol = "" |
143 | 149 | port = self.get_text_from_subnode('port') |
144 | 150 | |
145 | if (re.search("^general", port) is None): | |
151 | if re.search("^general", port) is None: | |
152 | ||
146 | 153 | mregex = re.search("([\w]+) \(([\d]+)\/([\w]+)\)", port) |
154 | ||
147 | 155 | if mregex is not None: |
148 | 156 | self.service = mregex.group(1) |
149 | 157 | self.port = mregex.group(2) |
223 | 231 | web = False |
224 | 232 | ids = {} |
225 | 233 | for item in parser.items: |
234 | ||
226 | 235 | if item.name is not None: |
236 | ||
227 | 237 | ref = [] |
228 | 238 | if item.cve: |
229 | 239 | ref.append(item.cve.encode("utf-8")) |
239 | 249 | ids[item.subnet] = h_id |
240 | 250 | |
241 | 251 | if item.port == "None": |
242 | v_id = self.createAndAddVulnToHost(h_id, item.name.encode("utf-8"), desc=item.description.encode("utf-8"), | |
243 | severity=item.severity.encode( | |
244 | "utf-8"), | |
245 | ref=ref) | |
252 | v_id = self.createAndAddVulnToHost( | |
253 | h_id, | |
254 | item.name.encode("utf-8"), | |
255 | desc=item.description.encode("utf-8"), | |
256 | severity=item.severity.encode("utf-8"), | |
257 | ref=ref) | |
246 | 258 | else: |
247 | 259 | |
248 | 260 | if item.service: |
249 | 261 | web = True if re.search( |
250 | r'^(www|http)', item.service) else False | |
262 | r'^(www|http)', | |
263 | item.service) else False | |
251 | 264 | else: |
252 | web = True if item.port in ( | |
253 | '80', '443', '8080') else False | |
265 | ||
266 | web = True if item.port in ('80', '443', '8080') else False | |
254 | 267 | |
255 | 268 | if ids.has_key(item.subnet + "_" + item.subnet): |
256 | 269 | i_id = ids[item.subnet + "_" + item.subnet] |
258 | 271 | |
259 | 272 | if self._isIPV4(item.subnet): |
260 | 273 | i_id = self.createAndAddInterface( |
261 | h_id, item.subnet, ipv4_address=item.subnet, hostname_resolution=item.host) | |
274 | h_id, | |
275 | item.subnet, | |
276 | ipv4_address=item.subnet, | |
277 | hostname_resolution=item.host) | |
262 | 278 | else: |
263 | 279 | i_id = self.createAndAddInterface( |
264 | h_id, item.subnet, ipv6_address=item.subnet, hostname_resolution=item.host) | |
280 | h_id, | |
281 | item.subnet, | |
282 | ipv6_address=item.subnet, | |
283 | hostname_resolution=item.host) | |
265 | 284 | |
266 | 285 | ids[item.subnet + "_" + item.subnet] = i_id |
267 | 286 | |
268 | 287 | if ids.has_key(item.subnet + "_" + item.port): |
269 | 288 | s_id = ids[item.subnet + "_" + item.port] |
270 | 289 | else: |
271 | s_id = self.createAndAddServiceToInterface(h_id, i_id, item.service, | |
272 | item.protocol, | |
273 | ports=[ | |
274 | str(item.port)], | |
275 | status="open") | |
290 | ||
291 | s_id = self.createAndAddServiceToInterface( | |
292 | h_id, | |
293 | i_id, | |
294 | item.service, | |
295 | item.protocol, | |
296 | ports=[str(item.port)], | |
297 | status="open") | |
298 | ||
276 | 299 | ids[item.subnet + "_" + item.port] = s_id |
300 | ||
277 | 301 | if web: |
302 | ||
278 | 303 | n_id = self.createAndAddNoteToService( |
279 | h_id, s_id, "website", "") | |
304 | h_id, | |
305 | s_id, | |
306 | "website", | |
307 | "") | |
308 | ||
280 | 309 | self.createAndAddNoteToNote( |
281 | h_id, s_id, n_id, item.host, "") | |
310 | h_id, | |
311 | s_id, | |
312 | n_id, | |
313 | item.host, | |
314 | "") | |
282 | 315 | |
283 | 316 | if item.name: |
284 | 317 | if web: |
285 | v_id = self.createAndAddVulnWebToService(h_id, s_id, item.name.encode("utf-8"), | |
286 | desc=item.description.encode("utf-8"), website=item.host, | |
287 | severity=item.severity.encode("utf-8"), ref=ref) | |
318 | v_id = self.createAndAddVulnWebToService( | |
319 | h_id, | |
320 | s_id, | |
321 | item.name.encode("utf-8"), | |
322 | desc=item.description.encode("utf-8"), | |
323 | website=item.host, | |
324 | severity=item.severity.encode("utf-8"), | |
325 | ref=ref) | |
288 | 326 | else: |
289 | self.createAndAddVulnToService(h_id, s_id, item.name.encode("utf-8"), | |
290 | desc=item.description.encode("utf-8"), severity=item.severity.encode("utf-8"), ref=ref) | |
327 | self.createAndAddVulnToService( | |
328 | h_id, | |
329 | s_id, | |
330 | item.name.encode("utf-8"), | |
331 | desc=item.description.encode("utf-8"), | |
332 | severity=item.severity.encode("utf-8"), | |
333 | ref=ref) | |
291 | 334 | |
292 | 335 | del parser |
293 | 336 |
49 | 49 | if host_info is None: |
50 | 50 | api.log("No hosts detected") |
51 | 51 | else: |
52 | for host in output.split('\r\n'): | |
52 | for host in output.splitlines(): | |
53 | 53 | if host != "": |
54 | 54 | h_id = self.createAndAddHost(host) |
55 | 55 | i_id = self.createAndAddInterface( |
127 | 127 | self.error_message = message |
128 | 128 | |
129 | 129 | def hashKey(self, key): |
130 | key = repr(key).strip("'") | |
130 | # from sqlmap/lib/utils/hashdb.py | |
131 | # we don't sanitize key, because we only work | |
132 | # with plain string | |
131 | 133 | retVal = int(hashlib.md5(key).hexdigest(), 16) & 0x7fffffffffffffff |
132 | ||
133 | 134 | return retVal |
134 | 135 | |
135 | 136 | def hashDBRetrieve(self, key, unserialize=False, db=False): |
293 | 294 | dbms_version = self.hashDBRetrieve(self.HASHDB_KEYS.DBMS, False, db) |
294 | 295 | |
295 | 296 | self.ip = self.getAddress(self.hostname) |
296 | ||
297 | dbms = str(dbms_version.split(" ")[0]) | |
298 | ||
297 | ||
299 | 298 | h_id = self.createAndAddHost(self.ip) |
300 | 299 | |
301 | 300 | i_id = self.createAndAddInterface( |
326 | 325 | self.hostname, |
327 | 326 | '') |
328 | 327 | |
329 | db_port = self.db_port[dbms] | |
328 | for item in self.db_port.keys(): | |
329 | if dbms_version.find(item) >= 0: | |
330 | db_port = self.db_port[item] | |
330 | 331 | |
331 | 332 | s_id2 = self.createAndAddServiceToInterface( |
332 | 333 | h_id, |
333 | 334 | i_id, |
334 | name=dbms, | |
335 | name=dbms_version, | |
335 | 336 | protocol="tcp", |
336 | 337 | status="down", |
337 | 338 | version=str(dbms_version), |
439 | 440 | |
440 | 441 | if args.u: |
441 | 442 | |
442 | urlComponents = urlparse(args.u) | |
443 | ||
444 | self.protocol = urlComponents.scheme | |
443 | if args.u.find('http://') < 0 or args.u.find('https://') < 0: | |
444 | urlComponents = urlparse('http://' + args.u) | |
445 | else: | |
446 | urlComponents = urlparse(args.u) | |
447 | ||
448 | self.protocol = urlComponents.scheme | |
445 | 449 | self.hostname = urlComponents.netloc |
446 | 450 | |
447 | 451 | if urlComponents.port: |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | ||
10 | from plugins import core | |
11 | import re | |
12 | import socket | |
13 | ||
14 | __author__ = "Joaquin L. Pereyra" | |
15 | __copyright__ = "Copyright (c) 2016, Infobyte LLC" | |
16 | __credits__ = ["Joaquin L. Pereyra"] | |
17 | __license__ = "" | |
18 | __version__ = "0.0.1" | |
19 | __maintainer__ = "Joaquin L. Pereyra" | |
20 | __email__ = "[email protected]" | |
21 | __status__ = "Development" | |
22 | ||
23 | ||
24 | class WPScanPlugin(core.PluginBase): | |
25 | """ Handle the WPScan tool. Detects the output of the tool | |
26 | and adds the information to Faraday. | |
27 | """ | |
28 | ||
29 | def __init__(self): | |
30 | """Initalizes the plugin with some basic params. | |
31 | Right now the plugin doesnt support being executed from another folder, | |
32 | like /dir/wpscan.rb | |
33 | """ | |
34 | core.PluginBase.__init__(self) | |
35 | self.id = "wpscan" | |
36 | self.name = "WPscan" | |
37 | self.plugin_version = "0.0.1" | |
38 | self.version = "2.9.1" | |
39 | self._command_regex = re.compile( | |
40 | r"^((sudo )?(ruby )?(\.\/)?(wpscan)(.rb)?)") | |
41 | ||
42 | def parseOutputString(self, output, debug=False): | |
43 | """Parses the output given as a string by the wpscan tool and creates | |
44 | the appropiate hosts, interface, service and vulnerabilites. Return | |
45 | nothing. | |
46 | """ | |
47 | service, base_url = self.__get_service_and_url_from_output(output) | |
48 | host_ip = socket.gethostbyname_ex(base_url)[2][0] | |
49 | host_id = self.createAndAddHost(host_ip) | |
50 | interface_id = self.createAndAddInterface(host_id, host_ip, | |
51 | ipv4_address=host_ip, | |
52 | hostname_resolution=base_url) | |
53 | ||
54 | service_id = self.createAndAddServiceToInterface(host_id, interface_id, | |
55 | service, "tcp") | |
56 | ||
57 | potential_vulns = re.findall(r"(\[\!\].*)", output) | |
58 | for potential_vuln in potential_vulns: | |
59 | vuln_name, severity = self.__get_name_and_severity(potential_vuln) | |
60 | if vuln_name is not None: | |
61 | vuln = potential_vuln # they grow up so fast | |
62 | path = self.__get_path_from_vuln(vuln) | |
63 | self.createAndAddVulnWebToService(host_id, service_id, | |
64 | name=vuln_name, | |
65 | website=base_url, | |
66 | path=path, severity=severity) | |
67 | ||
68 | def __get_service_and_url_from_output(self, output): | |
69 | """ Return the service (http or https) and the base URL (URL without | |
70 | protocol) from a given string. In case more than one URL is found, | |
71 | return the service and base_url of the first one, ignore others. | |
72 | """ | |
73 | search_url = re.search(r"\[\+\](.*?)URL: (https?)://(.*?)/", output) | |
74 | service, base_url = search_url.group(2), search_url.group(3) | |
75 | return service, base_url | |
76 | ||
77 | def __get_name_and_severity(self, potential_vuln): | |
78 | """Regex the potential_vuln string against a regex with all | |
79 | the vulnerabilities given by WPscan. Returns a regex match object with | |
80 | the vulnerability's name and severity if the regex found something | |
81 | and (None, None) if the regex found nothing. | |
82 | """ | |
83 | critical_search = re.search(r"Website is not fully configured|" | |
84 | "Debug log file found|" | |
85 | "wp-config\.php backup file has been found|" | |
86 | "searchreplacedb2.php has been found", | |
87 | potential_vuln) | |
88 | if critical_search: | |
89 | return critical_search.group(0), "critical" | |
90 | ||
91 | info_search = re.search(r"Directory listing is enabled|" | |
92 | "An error_log file has been found|" | |
93 | "file exists exposing a version number|" | |
94 | "Full Path Disclosure|" | |
95 | "Registration is enabled|" | |
96 | "(Upload|Includes) directory has directory listing enabled|" | |
97 | "Default first Wordpress username 'admin' is still used", | |
98 | potential_vuln) | |
99 | if info_search: | |
100 | return info_search.group(0), "info" | |
101 | ||
102 | return None, None | |
103 | ||
104 | def __get_path_from_vuln(self, vuln): | |
105 | """Given a vuln as string, return the path as a string (empty string | |
106 | for path not found). | |
107 | """ | |
108 | path_search = re.search("(?P<url>https?://[^\s]+)", vuln) | |
109 | path = path_search.group('url') if path_search else "" | |
110 | return path | |
111 | ||
112 | def processCommandString(self, username, current_path, command_string): | |
113 | return None | |
114 | ||
115 | ||
116 | def createPlugin(): | |
117 | return WPScanPlugin() |
3 | 3 | flask>=0.10.1 |
4 | 4 | twisted>=16.1.1 |
5 | 5 | sqlalchemy>=1.0.12 |
6 | pyopenssl>=16.0.0⏎ | |
6 | pyopenssl>=16.0.0 | |
7 | service_identity>=16.0.0 |
20 | 20 | __status__ = "Development" |
21 | 21 | |
22 | 22 | # Configuration |
23 | SHODAN_API_KEY = "INSERT YOUR SHODAN KEY HERE" | |
23 | SHODAN_API_KEY = "lT3whkyVHH7iAtP28iNIq7hVNlK638vR" | |
24 | 24 | |
25 | 25 | def strip_non_ascii(string): |
26 | 26 | ''' Returns the string without non ASCII characters''' |
28 | 28 | return ''.join(stripped) |
29 | 29 | |
30 | 30 | def send_faraday(result): |
31 | print 'IP: %s' % result['ip_str'] | |
31 | print 'IP: %s' % result['ip_str'] | |
32 | 32 | |
33 | 33 | if result['data'] is not None: |
34 | 34 | result['data'] = base64.b64encode(strip_non_ascii(str(result['data']))) #fix: to avoid non ascii caracters |
46 | 46 | "tcp",str(result['port']),"open",str(result['version']) if result.has_key('version') else "") |
47 | 47 | if result['data'] is not None: |
48 | 48 | n_id = api.createAndAddNoteToService(h_id,s_id,"shadon_response",str(result['data'])) |
49 | ||
49 | ||
50 | 50 | #Notes - Information geo/shadon |
51 | 51 | n_id = api.createAndAddNoteToHost(h_id,"geo_country",result['location']['country_name'] if result['location']['country_name'] is not None else "" ) |
52 | 52 | n_id = api.createAndAddNoteToHost(h_id,"geo_latitude",result['location']['latitude'] if result['location']['latitude'] is not None else "") |
75 | 75 | |
76 | 76 | results = shodan_api.search(args.shodan_query) |
77 | 77 | print 'Results found: %s, query "%s"' % (results['total'], args.shodan_query) |
78 | ||
78 | ||
79 | 79 | for r in shodan_api.search_cursor(args.shodan_query, minify=True, retries=5): |
80 | 80 | if args.count != "all" and c_page >= int(args.count): |
81 | 81 | break |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | import flask | |
5 | ||
6 | from server.app import app | |
7 | from server.utils.logger import get_logger | |
8 | from server.utils.web import gzipped, validate_workspace, filter_request_args | |
9 | from server.dao.command import CommandDAO | |
10 | ||
11 | @gzipped | |
12 | @app.route('/ws/<workspace>/commands', methods=['GET']) | |
13 | def list_commands(workspace=None): | |
14 | validate_workspace(workspace) | |
15 | get_logger(__name__).debug( | |
16 | "Request parameters: {!r}".format(flask.request.args)) | |
17 | ||
18 | commands_filter = filter_request_args() | |
19 | ||
20 | dao = CommandDAO(workspace) | |
21 | ||
22 | result = dao.list(command_filter=commands_filter) | |
23 | ||
24 | return flask.jsonify(result)⏎ |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | import flask | |
5 | ||
6 | from server.app import app | |
7 | from server.utils.logger import get_logger | |
8 | from server.utils.web import gzipped, validate_workspace, filter_request_args | |
9 | from server.dao.credential import CredentialDAO | |
10 | ||
11 | ||
12 | @gzipped | |
13 | @app.route('/ws/<workspace>/credentials', methods=['GET']) | |
14 | def list_credentials(workspace=None): | |
15 | ||
16 | validate_workspace(workspace) | |
17 | ||
18 | get_logger(__name__).debug("Request parameters: {!r}"\ | |
19 | .format(flask.request.args)) | |
20 | ||
21 | cred_filter = filter_request_args() | |
22 | ||
23 | dao = CredentialDAO(workspace) | |
24 | result = dao.list(cred_filter=cred_filter) | |
25 | ||
26 | return flask.jsonify(result)⏎ |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | import flask, json | |
5 | import server.database | |
6 | import server.utils.logger | |
7 | ||
8 | from server.app import app | |
9 | from server.utils.web import validate_workspace | |
10 | from restkit.errors import RequestFailed, ResourceError | |
11 | ||
12 | logger = server.utils.logger.get_logger(__name__) | |
13 | ||
14 | def build_bad_request_response(msg): | |
15 | response = flask.jsonify({'error': msg}) | |
16 | response.status_code = 400 | |
17 | return response | |
18 | ||
19 | @app.route('/ws/<workspace>/doc/<doc_id>', methods=['GET']) | |
20 | def get_document(workspace, doc_id): | |
21 | validate_workspace(workspace) | |
22 | ws = server.database.get(workspace) | |
23 | couchdb_conn = ws.couchdb | |
24 | response = couchdb_conn.get_document(doc_id) | |
25 | return flask.jsonify(response) | |
26 | ||
27 | @app.route('/ws/<workspace>/doc/<doc_id>', methods=['PUT']) | |
28 | def add_or_update_document(workspace, doc_id): | |
29 | validate_workspace(workspace) | |
30 | ||
31 | try: | |
32 | document = json.loads(flask.request.data) | |
33 | except ValueError: | |
34 | return build_bad_request_response('invalid json') | |
35 | ||
36 | document['_id'] = doc_id # document dictionary does not have id, add it | |
37 | ws = server.database.get(workspace) | |
38 | couchdb_conn = ws.couchdb | |
39 | is_update_request = bool(document.get('_rev', False)) | |
40 | ||
41 | try: | |
42 | response = couchdb_conn.save_doc(document) | |
43 | except RequestFailed as e: | |
44 | response = flask.jsonify(json.loads(e.msg)) | |
45 | response.status_code = e.status_int | |
46 | return response | |
47 | except ResourceError as e: | |
48 | response = flask.jsonify({'error': e.message}) | |
49 | response.status_code = e.status_int | |
50 | return response | |
51 | ||
52 | if response.get('ok', False): | |
53 | doc_importer = server.database.DocumentImporter(ws.connector) | |
54 | if is_update_request: | |
55 | doc_importer.update_entity_from_doc(document) | |
56 | else: | |
57 | doc_importer.add_entity_from_doc(document) | |
58 | ||
59 | return flask.jsonify(response) | |
60 | ||
61 | @app.route('/ws/<workspace>/doc/<doc_id>', methods=['DELETE']) | |
62 | def delete_document(workspace, doc_id): | |
63 | validate_workspace(workspace) | |
64 | ||
65 | ws = server.database.get(workspace) | |
66 | couchdb_conn = ws.couchdb | |
67 | doc_rev = flask.request.args.get('rev', '') | |
68 | ||
69 | try: | |
70 | response = couchdb_conn.delete_doc({'_id': doc_id, '_rev': doc_rev}) | |
71 | ||
72 | except RequestFailed as e: | |
73 | response = flask.jsonify(json.loads(e.msg)) | |
74 | response.status_code = e.status_int | |
75 | return response | |
76 | ||
77 | except ResourceError as e: | |
78 | response = flask.jsonify({'error': e.message}) | |
79 | response.status_code = e.status_int | |
80 | return response | |
81 | ||
82 | if response.get('ok', False): | |
83 | doc_importer = server.database.DocumentImporter(ws.connector) | |
84 | doc_importer.delete_entity_from_doc_id(doc_id) | |
85 | ||
86 | return flask.jsonify(response) |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | import flask | |
5 | ||
6 | from server.app import app | |
7 | from server.utils.logger import get_logger | |
8 | from server.utils.web import gzipped, validate_workspace,\ | |
9 | get_integer_parameter, filter_request_args, get_mandatory_integer_parameter | |
10 | ||
11 | from server.dao.interface import InterfaceDAO | |
12 | ||
13 | ||
14 | @gzipped | |
15 | @app.route('/ws/<workspace>/interfaces', methods=['GET']) | |
16 | def list_interfaces(workspace=None): | |
17 | validate_workspace(workspace) | |
18 | get_logger(__name__).debug("Request parameters: {!r}"\ | |
19 | .format(flask.request.args)) | |
20 | ||
21 | dao = InterfaceDAO(workspace) | |
22 | result = dao.list(interface_filter=flask.request.args) | |
23 | ||
24 | return flask.jsonify(result) | |
25 |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | from flask import request, jsonify, abort | |
5 | ||
6 | from server.app import app | |
7 | ||
8 | from server.utils.logger import get_logger | |
9 | from server.utils.web import gzipped, validate_workspace, filter_request_args | |
10 | ||
11 | from server.dao.note import NoteDAO | |
12 | ||
13 | ||
14 | @gzipped | |
15 | @app.route('/ws/<workspace>/notes', methods=['GET']) | |
16 | def list_notes(workspace=None): | |
17 | ||
18 | validate_workspace(workspace) | |
19 | get_logger(__name__).debug( | |
20 | "Request parameters: {!r}".format(request.args)) | |
21 | ||
22 | note_filter = filter_request_args() | |
23 | ||
24 | dao = NoteDAO(workspace) | |
25 | ||
26 | result = dao.list(note_filter=note_filter) | |
27 | ||
28 | return jsonify(result) | |
29 | ||
30 | @app.route('/ws/<workspace>/notes/count', methods=['GET']) | |
31 | @gzipped | |
32 | def count_notes(workspace=None): | |
33 | validate_workspace(workspace) | |
34 | get_logger(__name__).debug("Request parameters: {!r}"\ | |
35 | .format(request.args)) | |
36 | ||
37 | services_dao = NoteDAO(workspace) | |
38 | result = services_dao.count() | |
39 | if result is None: | |
40 | abort(400) | |
41 | ||
42 | return jsonify(result)⏎ |
16 | 16 | get_logger(__name__).debug("Request parameters: {!r}"\ |
17 | 17 | .format(flask.request.args)) |
18 | 18 | |
19 | port = get_integer_parameter('port', default=None) | |
19 | services_dao = ServiceDAO(workspace) | |
20 | 20 | |
21 | services_dao = ServiceDAO(workspace) | |
22 | services_by_host = services_dao.list(port) | |
21 | services = services_dao.list(service_filter=flask.request.args) | |
23 | 22 | |
24 | result = { 'hosts': services_by_host } | |
25 | ||
26 | return flask.jsonify(result) | |
23 | return flask.jsonify(services) | |
27 | 24 | |
28 | 25 | @app.route('/ws/<workspace>/services/count', methods=['GET']) |
29 | 26 | @gzipped |
9 | 9 | from server.dao.service import ServiceDAO |
10 | 10 | from server.dao.interface import InterfaceDAO |
11 | 11 | from server.dao.note import NoteDAO |
12 | from server.utils.web import gzipped, validate_workspace | |
13 | from server.couchdb import list_workspaces_as_user | |
12 | from server.utils.web import gzipped, validate_workspace, get_basic_auth | |
13 | from server.couchdb import list_workspaces_as_user, get_workspace, get_auth_info | |
14 | 14 | |
15 | 15 | |
16 | 16 | @app.route('/ws', methods=['GET']) |
17 | 17 | @gzipped |
18 | 18 | def workspace_list(): |
19 | return flask.jsonify(list_workspaces_as_user(flask.request.cookies)) | |
19 | return flask.jsonify( | |
20 | list_workspaces_as_user( | |
21 | flask.request.cookies, get_basic_auth())) | |
20 | 22 | |
21 | 23 | @app.route('/ws/<workspace>/summary', methods=['GET']) |
22 | 24 | @gzipped |
43 | 45 | |
44 | 46 | return flask.jsonify(response) |
45 | 47 | |
48 | @app.route('/ws/<workspace>', methods=['GET']) | |
49 | @gzipped | |
50 | def workspace(workspace): | |
51 | validate_workspace(workspace) | |
52 | workspaces = list_workspaces_as_user( | |
53 | flask.request.cookies, get_basic_auth())['workspaces'] | |
54 | ws = get_workspace(workspace, flask.request.cookies, get_basic_auth()) if workspace in workspaces else None | |
55 | # TODO: When the workspace DAO is ready, we have to remove this next line | |
56 | if not ws.get('fdate'): ws['fdate'] = ws.get('duration').get('end') | |
57 | if not ws.get('description'): ws['description'] = '' | |
58 | return flask.jsonify(ws) | |
59 |
8 | 8 | from server.utils.logger import LOGGING_HANDLERS |
9 | 9 | |
10 | 10 | |
11 | def create_app(): | |
12 | app = flask.Flask(__name__) | |
13 | configure(app) | |
14 | return app | |
11 | app = flask.Flask(__name__) | |
15 | 12 | |
16 | def configure(app): | |
13 | def setup(): | |
17 | 14 | app.debug = server.config.is_debug_mode() |
18 | 15 | minify_json_output(app) |
19 | 16 | |
33 | 30 | app.json_encoder = MiniJSONEncoder |
34 | 31 | app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False |
35 | 32 | |
36 | app = create_app() | |
37 | ||
38 | 33 | # Load APIs |
39 | 34 | import server.api |
40 | 35 | import server.modules.info |
14 | 14 | LOGGING_LEVEL = INFO |
15 | 15 | |
16 | 16 | FARADAY_BASE = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) |
17 | FARADAY_SERVER_DBS_DIR = os.path.join(FARADAY_BASE, 'server/workspaces') | |
17 | 18 | FARADAY_SERVER_PID_FILE = os.path.join(FARADAY_BASE, 'server/.faraday-server.pid') |
18 | 19 | REQUIREMENTS_FILE = os.path.join(FARADAY_BASE, 'requirements_server.txt') |
19 | 20 | DEFAULT_CONFIG_FILE = os.path.join(FARADAY_BASE, 'server/default.ini') |
20 | 21 | VERSION_FILE = os.path.join(FARADAY_BASE, CONSTANTS.CONST_VERSION_FILE) |
21 | 22 | WEB_CONFIG_FILE = os.path.join(FARADAY_BASE, 'server/www/config/config.json') |
23 | REPORTS_VIEWS_DIR = os.path.join(FARADAY_BASE, 'views/reports') | |
22 | 24 | LOCAL_CONFIG_FILE = os.path.expanduser( |
23 | 25 | os.path.join(CONSTANTS.CONST_FARADAY_HOME_PATH, 'config/server.ini')) |
24 | 26 |
11 | 11 | from couchdbkit import Server |
12 | 12 | from couchdbkit.exceptions import ResourceNotFound |
13 | 13 | from couchdbkit.resource import CouchdbResource |
14 | from managers.all import ViewsManager | |
14 | 15 | from server import config |
15 | 16 | |
16 | 17 | |
28 | 29 | |
29 | 30 | def __authenticate(self): |
30 | 31 | user, passwd = config.couchdb.user, config.couchdb.password |
31 | if (all((user, passwd))): | |
32 | if all((user, passwd)): | |
32 | 33 | auth = restkit.BasicAuth(user, passwd) |
33 | 34 | self.__auth_resource = CouchdbResource(filters=[auth]) |
34 | 35 | else: |
43 | 44 | def get_workspace_handler(self, ws_name): |
44 | 45 | return self.__server.get_db(ws_name) |
45 | 46 | |
47 | def get_or_create_db(self, ws_name): | |
48 | return self.__server.get_or_create_db(ws_name) | |
49 | ||
46 | 50 | |
47 | 51 | class Workspace(object): |
48 | def __init__(self, ws_name): | |
49 | self.__server = CouchDBServer() | |
52 | def __init__(self, ws_name, couchdb_server_conn=None): | |
50 | 53 | self.__ws_name = ws_name |
54 | self.__server = couchdb_server_conn or CouchDBServer() | |
55 | self.__changes_monitor_thread = None | |
51 | 56 | self.__get_workspace() |
52 | self.__changes_monitor_thread = None | |
53 | 57 | |
54 | 58 | def __get_workspace(self): |
55 | 59 | self.__workspace = self.__server.get_workspace_handler(self.__ws_name) |
101 | 105 | def save_doc(self, document): |
102 | 106 | return self.__workspace.save_doc(document) |
103 | 107 | |
108 | def delete_doc(self, document): | |
109 | return self.__workspace.delete_doc(document) | |
110 | ||
104 | 111 | def create_doc(self, doc_content): |
105 | 112 | # Remember to add "_id" in the doc if you want |
106 | 113 | # to specify an arbitrary id |
146 | 153 | stream=True, auth=get_auth_info()) |
147 | 154 | |
148 | 155 | for raw_line in self.__response.iter_lines(): |
149 | line = self.__sanitize(raw_line) | |
156 | if self.__stop: | |
157 | break | |
158 | ||
159 | line = self.__sanitize(raw_line) | |
150 | 160 | if not line: |
151 | 161 | continue |
152 | 162 | |
157 | 167 | yield change |
158 | 168 | |
159 | 169 | except Exception, e: |
170 | # On workspace deletion, requests will probably | |
171 | # fail to perform the request or the connection | |
172 | # will be closed. Check if this was intentional | |
173 | # by checking on the __stop flag. | |
174 | if self.__stop: | |
175 | break | |
176 | ||
160 | 177 | import traceback |
161 | 178 | logger.debug(traceback.format_exc()) |
162 | 179 | |
163 | 180 | # Close everything but keep retrying |
164 | 181 | self.stop() |
165 | self.__stop = True | |
166 | ||
167 | logger.warning(u"Lost connection to CouchDB. Retrying in 5 seconds...") | |
168 | time.sleep(5) | |
182 | self.__stop = False | |
183 | ||
184 | logger.warning(u"Lost connection to CouchDB. Retrying in 3 seconds...") | |
185 | time.sleep(3) | |
169 | 186 | logger.info(u"Retrying...") |
170 | 187 | |
171 | 188 | def __sanitize(self, raw_line): |
181 | 198 | return None |
182 | 199 | |
183 | 200 | # Modify line cases |
184 | if line.startswith('"last_seq"'): | |
201 | if line.startswith('"last_seq"'): | |
185 | 202 | line = '{' + line |
186 | 203 | if line.endswith(","): |
187 | 204 | line = line[:-1] |
196 | 213 | return None |
197 | 214 | |
198 | 215 | def stop(self): |
216 | self.__stop = True | |
199 | 217 | if self.__response is not None: |
200 | 218 | self.__response.close() |
201 | 219 | self.__response = None |
202 | self.__stop = True | |
203 | 220 | |
204 | 221 | class Change(object): |
222 | REQUIRED_FIELDS = ('doc', 'changes', 'id', 'seq') | |
223 | ||
224 | @staticmethod | |
225 | def validate(change_doc): | |
226 | return all(map(lambda prop: prop in change_doc, Change.REQUIRED_FIELDS)) | |
227 | ||
205 | 228 | def __init__(self, change_doc): |
206 | 229 | self.change_doc = change_doc |
207 | 230 | self.doc = change_doc.get('doc') |
213 | 236 | self.updated = (int(self.revision.split('-')[0]) > 1) |
214 | 237 | self.added = (not self.deleted and not self.updated) |
215 | 238 | |
239 | ||
216 | 240 | class DBChange(object): |
241 | @staticmethod | |
242 | def validate(change_doc): | |
243 | return True | |
244 | ||
217 | 245 | def __init__(self, change_doc): |
218 | 246 | self.change_doc = change_doc |
219 | 247 | self.type = change_doc.get('type', None) |
232 | 260 | def run(self): |
233 | 261 | for change_doc in self.__stream: |
234 | 262 | try: |
235 | self.__changes_callback(self.CHANGE_CLS(change_doc)) | |
263 | if self.CHANGE_CLS.validate(change_doc): | |
264 | self.__changes_callback(self.CHANGE_CLS(change_doc)) | |
265 | else: | |
266 | logger.debug(u'Ignoring change: {}'.format(change_doc)) | |
267 | ||
236 | 268 | except Exception, e: |
237 | 269 | import traceback |
238 | 270 | logger.debug(traceback.format_exc()) |
248 | 280 | elif change_doc.get('reason') == 'no_db_file': |
249 | 281 | self.__stream.stop() |
250 | 282 | break |
251 | ||
283 | ||
252 | 284 | def stop(self): |
253 | 285 | self.__stream.stop() |
254 | 286 | |
273 | 305 | def is_usable_workspace(ws_name): |
274 | 306 | return not ws_name.startswith('_') and ws_name not in config.WS_BLACKLIST |
275 | 307 | |
276 | def list_workspaces_as_user(cookies): | |
308 | def list_workspaces_as_user(cookies, credentials=None): | |
277 | 309 | all_dbs_url = get_couchdb_url() + '/_all_dbs' |
278 | response = requests.get(all_dbs_url, verify=False, cookies=cookies) | |
310 | response = requests.get(all_dbs_url, verify=False, cookies=cookies, auth=credentials) | |
279 | 311 | if response.status_code != requests.codes.ok: |
280 | 312 | raise Exception("Couldn't obtain workspaces list") |
281 | 313 | |
282 | workspaces = filter(lambda ws_name: is_usable_workspace(ws_name) and has_permissions_for(ws_name, cookies),\ | |
283 | response.json()) | |
284 | ||
314 | def is_workspace_accessible_for_user(ws_name): | |
315 | return is_usable_workspace(ws_name) and\ | |
316 | has_permissions_for(ws_name, cookies, credentials) | |
317 | ||
318 | workspaces = filter(is_workspace_accessible_for_user, response.json()) | |
285 | 319 | return { 'workspaces': workspaces } |
286 | 320 | |
287 | def has_permissions_for(workspace_name, cookies=None, credentials=None): | |
321 | def server_has_access_to(ws_name): | |
322 | return has_permissions_for(ws_name, credentials=get_auth_info()) | |
323 | ||
324 | def get_workspace(workspace_name, cookies, credentials): | |
325 | workspace = _get_workspace_doc(workspace_name, cookies, credentials).json() | |
326 | ws_info_url = get_couchdb_url() + ('/%s' % (workspace_name)) | |
327 | response = requests.get(ws_info_url, verify=False, cookies=cookies, auth=credentials) | |
328 | workspace['last_seq'] = response.json()['update_seq'] | |
329 | return workspace | |
330 | ||
331 | def _get_workspace_doc(workspace_name, cookies, credentials): | |
288 | 332 | # TODO: SANITIZE WORKSPACE NAME IF NECESSARY. POSSIBLE SECURITY BUG |
289 | 333 | ws_url = get_couchdb_url() + ('/%s/%s' % (workspace_name, workspace_name)) |
290 | response = requests.get(ws_url, verify=False, cookies=cookies, auth=credentials) | |
291 | ||
334 | return requests.get(ws_url, verify=False, cookies=cookies, auth=credentials) | |
335 | ||
336 | def has_permissions_for(workspace_name, cookies=None, credentials=None): | |
337 | response = _get_workspace_doc(workspace_name, cookies, credentials) | |
292 | 338 | # Even if the document doesn't exist, CouchDB will |
293 | 339 | # respond 401 if it doesn't have access to it |
294 | 340 | return (response.status_code != requests.codes.unauthorized) |
301 | 347 | monitor_thread.start() |
302 | 348 | return monitor_thread |
303 | 349 | |
350 | def push_reports(): | |
351 | vmanager = ViewsManager() | |
352 | try: | |
353 | logger.debug(u'Pushing Reports DB into CouchDB') | |
354 | couchdb_server = CouchDBServer() | |
355 | workspace = couchdb_server.get_or_create_db('reports') | |
356 | vmanager.addView(config.REPORTS_VIEWS_DIR, workspace) | |
357 | except: | |
358 | import traceback | |
359 | logger.debug(traceback.format_exc()) | |
360 | logger.warning("Reports database couldn't be uploaded. You need to be an admin to do it") | |
361 |
14 | 14 | def __init__(self, workspace): |
15 | 15 | self._logger = server.utils.logger.get_logger(self) |
16 | 16 | ws_instance = server.database.get(workspace) |
17 | self._session = ws_instance.database.session | |
17 | self._session = ws_instance.session | |
18 | 18 | self._couchdb = ws_instance.couchdb |
19 | 19 | |
20 | 20 | def get_all(self): |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | from sqlalchemy.orm.query import Bundle | |
5 | ||
6 | from server.dao.base import FaradayDAO | |
7 | from server.models import Command, EntityMetadata | |
8 | from server.utils.database import apply_search_filter | |
9 | ||
10 | class CommandDAO(FaradayDAO): | |
11 | MAPPED_ENTITY = Command | |
12 | COLUMNS_MAP = { | |
13 | 'couchid': [EntityMetadata.couchdb_id] | |
14 | } | |
15 | STRICT_FILTERING = ["couchid"] | |
16 | ||
17 | def list(self, search=None, command_filter={}): | |
18 | results = self.__query_database(search, command_filter) | |
19 | ||
20 | rows = [ self.__get_command_data(result.command) for result in results ] | |
21 | ||
22 | result = { | |
23 | 'commands': rows | |
24 | } | |
25 | ||
26 | return result | |
27 | ||
28 | def __query_database(self, search=None, command_filter={}): | |
29 | command_bundle = Bundle('command', | |
30 | Command.itime, | |
31 | Command.ip, | |
32 | Command.hostname, | |
33 | Command.command, | |
34 | Command.user, | |
35 | Command.workspace, | |
36 | Command.duration, | |
37 | Command.params, | |
38 | EntityMetadata.couchdb_id) | |
39 | ||
40 | query = self._session.query(command_bundle)\ | |
41 | .outerjoin(EntityMetadata, EntityMetadata.id == Command.entity_metadata_id) | |
42 | ||
43 | # Apply filtering options to the query | |
44 | query = apply_search_filter(query, self.COLUMNS_MAP, None, command_filter, self.STRICT_FILTERING) | |
45 | ||
46 | results = query.all() | |
47 | ||
48 | return results | |
49 | ||
50 | def __get_command_data(self, command): | |
51 | return { | |
52 | 'id': command.couchdb_id, | |
53 | 'key': command.couchdb_id, | |
54 | 'value': { | |
55 | "_id": command.couchdb_id, | |
56 | "itime": command.itime, | |
57 | "ip": command.ip, | |
58 | "hostname": command.hostname, | |
59 | "command": command.command, | |
60 | "user": command.user, | |
61 | "workspace": command.workspace, | |
62 | "duration": command.duration, | |
63 | "params": command.params}}⏎ |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | from sqlalchemy.orm.query import Bundle | |
5 | ||
6 | from server.dao.base import FaradayDAO | |
7 | from server.models import Credential, EntityMetadata | |
8 | from server.utils.database import apply_search_filter | |
9 | ||
10 | class CredentialDAO(FaradayDAO): | |
11 | ||
12 | MAPPED_ENTITY = Credential | |
13 | ||
14 | COLUMNS_MAP = { | |
15 | 'couchid': [EntityMetadata.couchdb_id], | |
16 | 'username': [Credential.username], | |
17 | 'password': [Credential.password], | |
18 | } | |
19 | ||
20 | STRICT_FILTERING = ["couchid"] | |
21 | ||
22 | def list(self, search=None, cred_filter={}): | |
23 | results = self.__query_database(search, cred_filter) | |
24 | ||
25 | rows = [ self.__get_cred_data(result.cred) for result in results ] | |
26 | ||
27 | result = { | |
28 | 'rows': rows | |
29 | } | |
30 | ||
31 | return result | |
32 | ||
33 | def __query_database(self, search=None, cred_filter={}): | |
34 | creds_bundle = Bundle('cred', Credential.username, Credential.password, Credential.name, | |
35 | Credential.description, Credential.owned, EntityMetadata.couchdb_id,\ | |
36 | EntityMetadata.revision, EntityMetadata.update_time, EntityMetadata.update_user,\ | |
37 | EntityMetadata.update_action, EntityMetadata.creator, EntityMetadata.create_time,\ | |
38 | EntityMetadata.update_controller_action, EntityMetadata.owner) | |
39 | ||
40 | query = self._session.query(creds_bundle)\ | |
41 | .outerjoin(EntityMetadata, EntityMetadata.id == Credential.entity_metadata_id) | |
42 | ||
43 | # Apply filtering options to the query | |
44 | query = apply_search_filter(query, self.COLUMNS_MAP, search, cred_filter, self.STRICT_FILTERING) | |
45 | ||
46 | results = query.all() | |
47 | return results | |
48 | ||
49 | def __get_cred_data(self, cred): | |
50 | return { | |
51 | 'id': cred.couchdb_id, | |
52 | 'key': cred.couchdb_id, | |
53 | 'value': { | |
54 | '_id': cred.couchdb_id, | |
55 | 'username': cred.username, | |
56 | 'password': cred.password, | |
57 | 'owner': cred.owner, | |
58 | 'owned': cred.owned, | |
59 | 'description': cred.description, | |
60 | 'name': cred.name, | |
61 | 'metadata': { | |
62 | 'update_time': cred.update_time, | |
63 | 'update_user': cred.update_user, | |
64 | 'update_action': cred.update_action, | |
65 | 'creator': cred.creator, | |
66 | 'create_time': cred.create_time, | |
67 | 'update_controller_action': cred.update_controller_action, | |
68 | 'owner': cred.owner | |
69 | }, | |
70 | 'couchid': cred.couchdb_id }} | |
71 |
13 | 13 | class HostDAO(FaradayDAO): |
14 | 14 | MAPPED_ENTITY = Host |
15 | 15 | COLUMNS_MAP = { |
16 | "couchid": [EntityMetadata.couchdb_id], | |
16 | 17 | "name": [Host.name], |
17 | 18 | "service": [Service.name], |
18 | 19 | "services": ["open_services_count"], |
20 | 21 | "os": [Host.os], |
21 | 22 | "owned": [Host.owned], |
22 | 23 | } |
23 | STRICT_FILTERING = ["service"] | |
24 | STRICT_FILTERING = ["service", "couchid"] | |
24 | 25 | |
25 | 26 | def list(self, search=None, page=0, page_size=0, order_by=None, order_dir=None, host_filter={}): |
26 | 27 | results, count = self.__query_database(search, page, page_size, order_by, order_dir, host_filter) |
35 | 36 | return result |
36 | 37 | |
37 | 38 | def __query_database(self, search=None, page=0, page_size=0, order_by=None, order_dir=None, host_filter={}): |
38 | host_bundle = Bundle('host', Host.name, Host.os, Host.description, Host.owned, EntityMetadata.couchdb_id,\ | |
39 | host_bundle = Bundle('host', Host.id, Host.name, Host.os, Host.description, Host.owned,\ | |
40 | Host.default_gateway_ip, Host.default_gateway_mac, EntityMetadata.couchdb_id,\ | |
39 | 41 | EntityMetadata.revision, EntityMetadata.update_time, EntityMetadata.update_user,\ |
40 | 42 | EntityMetadata.update_action, EntityMetadata.creator, EntityMetadata.create_time,\ |
41 | EntityMetadata.update_controller_action,\ | |
43 | EntityMetadata.update_controller_action, EntityMetadata.owner, | |
44 | func.group_concat(distinct(Interface.id)).label('interfaces'),\ | |
42 | 45 | func.count(distinct(Vulnerability.id)).label('vuln_count'),\ |
43 | 46 | func.count(distinct(Service.id)).label('open_services_count')) |
44 | 47 | |
45 | 48 | query = self._session.query(host_bundle)\ |
46 | 49 | .outerjoin(EntityMetadata, EntityMetadata.id == Host.entity_metadata_id)\ |
50 | .outerjoin(Interface, Host.id == Interface.host_id)\ | |
47 | 51 | .outerjoin(Vulnerability, Host.id == Vulnerability.host_id)\ |
48 | .outerjoin(Service, (Host.id == Service.host_id) & (Service.status.in_(('open', 'running'))))\ | |
52 | .outerjoin(Service, (Host.id == Service.host_id) & (Service.status.in_(('open', 'running', 'opened'))))\ | |
49 | 53 | .group_by(Host.id) |
50 | 54 | |
51 | 55 | # Apply pagination, sorting and filtering options to the query |
64 | 68 | return { |
65 | 69 | 'id': host.couchdb_id, |
66 | 70 | 'key': host.couchdb_id, |
71 | '_id': host.id, | |
67 | 72 | 'value': { |
68 | 73 | '_id': host.couchdb_id, |
69 | 74 | '_rev': host.revision, |
70 | 75 | 'name': host.name, |
71 | 76 | 'os': host.os, |
72 | 77 | 'owned': host.owned, |
73 | 'owner': False, | |
78 | 'owner': host.owner, | |
74 | 79 | 'description': host.description, |
75 | 'default_gateway': None, | |
80 | 'default_gateway': [host.default_gateway_ip, host.default_gateway_mac], | |
76 | 81 | 'metadata': { |
77 | 82 | 'update_time': host.update_time, |
78 | 83 | 'update_user': host.update_user, |
80 | 85 | 'creator': host.creator, |
81 | 86 | 'create_time': host.create_time, |
82 | 87 | 'update_controller_action': host.update_controller_action, |
83 | 'owner': '' | |
88 | 'owner': host.owner | |
84 | 89 | }, |
85 | 90 | 'vulns': host.vuln_count, |
86 | 'services': host.open_services_count }} | |
91 | 'services': host.open_services_count, | |
92 | 'interfaces': map(int, host.interfaces.split(',')) if host.interfaces else [] }} | |
87 | 93 | |
88 | 94 | def count(self, group_by=None): |
89 | 95 | total_count = self._session.query(func.count(Host.id)).scalar() |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | from server.utils.database import apply_search_filter | |
5 | from server.dao.base import FaradayDAO | |
6 | from server.models import Interface, EntityMetadata | |
7 | from sqlalchemy.orm.query import Bundle | |
4 | 8 | from sqlalchemy.sql import func |
5 | from server.dao.base import FaradayDAO | |
6 | from server.models import Interface | |
7 | ||
8 | 9 | |
9 | 10 | class InterfaceDAO(FaradayDAO): |
10 | 11 | MAPPED_ENTITY = Interface |
12 | COLUMNS_MAP = { | |
13 | "host": [Interface.host_id], | |
14 | "couchid": [EntityMetadata.couchdb_id], | |
15 | } | |
16 | STRICT_FILTERING = ["host", "couchid"] | |
11 | 17 | |
12 | 18 | def count(self): |
13 | 19 | total_count = self._session.query(func.count(Interface.id)).scalar() |
14 | 20 | return { 'total_count': total_count } |
15 | 21 | |
22 | def list(self, interface_filter={}): | |
23 | interface_bundle = Bundle('interface', | |
24 | Interface.id, Interface.name, Interface.description, Interface.mac, | |
25 | Interface.owned, Interface.hostnames, Interface.network_segment, Interface.ipv4_address, | |
26 | Interface.ipv4_gateway, Interface.ipv4_dns, Interface.ipv4_mask, Interface.ipv6_address, | |
27 | Interface.ipv6_gateway, Interface.ipv6_dns, Interface.ipv6_prefix, Interface.ports_filtered, | |
28 | Interface.ports_opened, Interface.ports_closed, Interface.host_id, EntityMetadata.couchdb_id,\ | |
29 | EntityMetadata.revision, EntityMetadata.update_time, EntityMetadata.update_user,\ | |
30 | EntityMetadata.update_action, EntityMetadata.creator, EntityMetadata.create_time,\ | |
31 | EntityMetadata.update_controller_action, EntityMetadata.owner) | |
32 | ||
33 | query = self._session.query(interface_bundle).\ | |
34 | outerjoin(EntityMetadata, EntityMetadata.id == Interface.entity_metadata_id) | |
35 | ||
36 | query = apply_search_filter(query, self.COLUMNS_MAP, None, interface_filter, self.STRICT_FILTERING) | |
37 | ||
38 | raw_interfaces = query.all() | |
39 | interfaces = [self.__get_interface_data(r.interface) for r in raw_interfaces] | |
40 | result = {'interfaces': interfaces} | |
41 | ||
42 | return result | |
43 | ||
44 | def __get_interface_data(self, interface): | |
45 | return { | |
46 | 'id': interface.couchdb_id, | |
47 | 'key': interface.couchdb_id, | |
48 | '_id': interface.id, | |
49 | 'value': { | |
50 | '_id': interface.couchdb_id, | |
51 | '_rev': interface.revision, | |
52 | 'name': interface.name, | |
53 | 'description': interface.description, | |
54 | 'mac': interface.mac, | |
55 | 'owned': interface.owned, | |
56 | 'owner': interface.owner, | |
57 | 'hostnames': interface.hostnames.split(',') if interface.hostnames else [], | |
58 | 'network_segment': interface.network_segment, | |
59 | 'ipv4': {'address': interface.ipv4_address, | |
60 | 'gateway': interface.ipv4_gateway, | |
61 | 'DNS': interface.ipv4_dns.split(',') if interface.ipv4_dns else [], | |
62 | 'mask': interface.ipv4_mask}, | |
63 | 'ipv6': {'address': interface.ipv6_address, | |
64 | 'gateway': interface.ipv6_gateway, | |
65 | 'DNS': interface.ipv6_dns.split(',') if interface.ipv6_dns else [], | |
66 | 'prefix': interface.ipv6_prefix}, | |
67 | 'ports': {'filtered': interface.ports_filtered, | |
68 | 'opened': interface.ports_opened, | |
69 | 'closed': interface.ports_closed}, | |
70 | 'metadata': { | |
71 | 'update_time': interface.update_time, | |
72 | 'update_user': interface.update_user, | |
73 | 'update_action': interface.update_action, | |
74 | 'creator': interface.creator, | |
75 | 'create_time': interface.create_time, | |
76 | 'update_controller_action': interface.update_controller_action, | |
77 | 'owner': interface.owner | |
78 | }, | |
79 | 'host_id': interface.host_id} | |
80 | } |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | 4 | from sqlalchemy.sql import func |
5 | from sqlalchemy.orm.query import Bundle | |
6 | ||
5 | 7 | from server.dao.base import FaradayDAO |
6 | from server.models import Note | |
7 | ||
8 | from server.models import Note, EntityMetadata | |
9 | from server.utils.database import apply_search_filter | |
8 | 10 | |
9 | 11 | class NoteDAO(FaradayDAO): |
10 | 12 | MAPPED_ENTITY = Note |
13 | COLUMNS_MAP = { | |
14 | 'couchid': [EntityMetadata.couchdb_id], | |
15 | 'name': [Note.name], | |
16 | 'text': [Note.text], | |
17 | 'description': [Note.description], | |
18 | } | |
19 | STRICT_FILTERING = ["couchid"] | |
20 | ||
21 | def list(self, search=None, note_filter={}): | |
22 | results = self.__query_database(search, note_filter) | |
23 | ||
24 | rows = [ self.__get_note_data(result.note) for result in results ] | |
25 | ||
26 | result = { | |
27 | 'rows': rows | |
28 | } | |
29 | ||
30 | return result | |
31 | ||
32 | def __query_database(self, search=None, note_filter={}): | |
33 | note_bundle = Bundle('note', Note.id, Note.name, Note.text, Note.description, Note.owned, EntityMetadata.couchdb_id,\ | |
34 | EntityMetadata.revision, EntityMetadata.update_time, EntityMetadata.update_user,\ | |
35 | EntityMetadata.update_action, EntityMetadata.creator, EntityMetadata.create_time,\ | |
36 | EntityMetadata.update_controller_action, EntityMetadata.owner) | |
37 | ||
38 | query = self._session.query(note_bundle)\ | |
39 | .outerjoin(EntityMetadata, EntityMetadata.id == Note.entity_metadata_id) | |
40 | ||
41 | # Apply filtering options to the query | |
42 | query = apply_search_filter(query, self.COLUMNS_MAP, search, note_filter, self.STRICT_FILTERING) | |
43 | ||
44 | results = query.all() | |
45 | ||
46 | return results | |
47 | ||
48 | def __get_note_data(self, note): | |
49 | return { | |
50 | 'id': note.couchdb_id, | |
51 | 'key': note.couchdb_id, | |
52 | '_id': note.id, | |
53 | 'value': { | |
54 | '_id': note.couchdb_id, | |
55 | 'name': note.name, | |
56 | 'text': note.text, | |
57 | 'description': note.description, | |
58 | 'owned': note.owned, | |
59 | 'owner': note.owner, | |
60 | 'metadata': { | |
61 | 'update_time': note.update_time, | |
62 | 'update_user': note.update_user, | |
63 | 'update_action': note.update_action, | |
64 | 'creator': note.creator, | |
65 | 'create_time': note.create_time, | |
66 | 'update_controller_action': note.update_controller_action, | |
67 | 'owner': note.owner | |
68 | }, | |
69 | 'couchid': note.couchdb_id }} | |
11 | 70 | |
12 | 71 | def count(self): |
13 | 72 | total_count = self._session.query(func.count(Note.id)).scalar() |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | from sqlalchemy import distinct | |
4 | 5 | from sqlalchemy.sql import func |
6 | from sqlalchemy.orm.query import Bundle | |
7 | ||
5 | 8 | from server.dao.base import FaradayDAO |
6 | from server.models import Host, Interface, Service | |
7 | ||
9 | from server.models import Host, Interface, Service, EntityMetadata, Vulnerability | |
10 | from server.utils.database import apply_search_filter | |
8 | 11 | |
9 | 12 | class ServiceDAO(FaradayDAO): |
10 | 13 | MAPPED_ENTITY = Service |
11 | 14 | COLUMNS_MAP = { |
12 | "name": Service.name, | |
13 | "protocol": Service.protocol, | |
14 | "version": Service.version, | |
15 | "status": Service.status, | |
16 | "owned": Service.owned | |
15 | "interface": [Service.interface_id], | |
16 | "couchid": [EntityMetadata.couchdb_id], | |
17 | 'id': [Service.id], | |
18 | "name": [Service.name], | |
19 | "protocol": [Service.protocol], | |
20 | "version": [Service.version], | |
21 | "status": [Service.status], | |
22 | "owned": [Service.owned], | |
23 | "hostid": [Host.id] | |
17 | 24 | } |
25 | STRICT_FILTERING = ["couchid", "interface", 'id', 'hostid'] | |
18 | 26 | |
19 | def list(self, port=None): | |
20 | return self.__get_services_by_host(port) | |
27 | def list(self, service_filter={}): | |
28 | service_bundle = Bundle('service', | |
29 | Service.id, Service.name, Service.description, Service.protocol, | |
30 | Service.status, Service.ports, Service.version, Service.owned, | |
31 | Service.interface_id, | |
32 | func.count(distinct(Vulnerability.id)).label('vuln_count'), EntityMetadata.couchdb_id,\ | |
33 | EntityMetadata.revision, EntityMetadata.update_time, EntityMetadata.update_user,\ | |
34 | EntityMetadata.update_action, EntityMetadata.creator, EntityMetadata.create_time,\ | |
35 | EntityMetadata.update_controller_action, EntityMetadata.owner) | |
21 | 36 | |
22 | def __get_services_by_host(self, port=None): | |
23 | result = self._session.query(Host.name, | |
24 | Host.os, | |
25 | Interface.ipv4_address, | |
26 | Interface.ipv6_address, | |
27 | Service.name, | |
28 | Service.ports).join(Host.interfaces, Interface.services).all() | |
37 | query = self._session.query(service_bundle).\ | |
38 | group_by(Service.id).\ | |
39 | outerjoin(EntityMetadata, EntityMetadata.id == Service.entity_metadata_id).\ | |
40 | outerjoin(Vulnerability, Service.id == Vulnerability.service_id).group_by(Service.id).\ | |
41 | outerjoin(Interface, Interface.id == Service.interface_id).\ | |
42 | outerjoin(Host, Host.id == Interface.host_id) | |
29 | 43 | |
30 | hosts = {} | |
31 | for service in result: | |
32 | service_ports = map(int, service[5].split(',')) | |
33 | if port is not None and port not in service_ports: | |
34 | continue | |
44 | query = apply_search_filter(query, self.COLUMNS_MAP, None, service_filter, self.STRICT_FILTERING) | |
35 | 45 | |
36 | host = hosts.get(service[0], None) | |
37 | if not host: | |
38 | hosts[service[0]] = { | |
39 | 'name': service[0], | |
40 | 'os': service[1], | |
41 | 'ipv4': service[2], | |
42 | 'ipv6': service[3], | |
43 | 'services': [] } | |
44 | host = hosts[service[0]] | |
46 | raw_services = query.all() | |
47 | services = [self.__get_service_data(r.service) for r in raw_services] | |
48 | result = {'services': services} | |
49 | return result | |
45 | 50 | |
46 | host['services'].append({ 'name': service[4], 'ports': service_ports }) | |
47 | ||
48 | return hosts.values() | |
51 | def __get_service_data(self, service): | |
52 | return { | |
53 | 'id': service.couchdb_id, | |
54 | 'key': service.couchdb_id, | |
55 | '_id': service.id, | |
56 | 'value': { | |
57 | '_id': service.couchdb_id, | |
58 | '_rev': service.revision, | |
59 | 'name': service.name, | |
60 | 'description': service.description, | |
61 | 'metadata': { | |
62 | 'update_time': service.update_time, | |
63 | 'update_user': service.update_user, | |
64 | 'update_action': service.update_action, | |
65 | 'creator': service.creator, | |
66 | 'create_time': service.create_time, | |
67 | 'update_controller_action': service.update_controller_action, | |
68 | 'owner': service.owner | |
69 | }, | |
70 | 'protocol': service.protocol, | |
71 | 'status': service.status, | |
72 | 'ports': [ int(i) for i in service.ports.split(',') if service.ports], | |
73 | 'version': service.version, | |
74 | 'owned': service.owned, | |
75 | 'owner': service.owner | |
76 | }, | |
77 | 'vulns': service.vuln_count, | |
78 | } | |
49 | 79 | |
50 | 80 | def count(self, group_by=None): |
51 | 81 | total_count = self._session.query(func.count(Service.id)).scalar() |
58 | 88 | if group_by not in ServiceDAO.COLUMNS_MAP: |
59 | 89 | return None |
60 | 90 | |
61 | col = ServiceDAO.COLUMNS_MAP.get(group_by) | |
91 | col = ServiceDAO.COLUMNS_MAP.get(group_by)[0] | |
62 | 92 | query = self._session.query(col, func.count())\ |
63 | 93 | .filter(Service.status.in_(('open', 'running')))\ |
64 | 94 | .group_by(col) |
15 | 15 | class VulnerabilityDAO(FaradayDAO): |
16 | 16 | MAPPED_ENTITY = Vulnerability |
17 | 17 | COLUMNS_MAP = { |
18 | "couchid": [EntityMetadata.couchdb_id], | |
19 | "id": [Vulnerability.id], | |
18 | 20 | "date": [EntityMetadata.create_time], # TODO: fix search for this field |
19 | 21 | "confirmed": [Vulnerability.confirmed], |
20 | 22 | "name": [Vulnerability.name], |
42 | 44 | "pname": [Vulnerability.pname], |
43 | 45 | "query": [Vulnerability.query], |
44 | 46 | "response": [Vulnerability.response], |
47 | "hostid": [Host.id], | |
48 | "serviceid": [Service.id], | |
49 | "interfaceid": [Interface.id], | |
45 | 50 | "web": [], |
46 | 51 | "issuetracker": [] |
47 | 52 | } |
48 | ||
49 | STRICT_FILTERING = ["type", "service"] | |
53 | ||
54 | STRICT_FILTERING = ["type", "service", "couchid", "hostid", "serviceid", 'interfaceid', 'id'] | |
50 | 55 | |
51 | 56 | def list(self, search=None, page=0, page_size=0, order_by=None, order_dir=None, vuln_filter={}): |
52 | 57 | results, count = self.__query_database(search, page, page_size, order_by, order_dir, vuln_filter) |
63 | 68 | # Instead of using SQLAlchemy ORM facilities to fetch rows, we bundle involved columns for |
64 | 69 | # organizational and MAINLY performance reasons. Doing it this way, we improve retrieving |
65 | 70 | # times from large workspaces almost 2x. |
66 | vuln_bundle = Bundle('vuln', Vulnerability.name.label('v_name'), Vulnerability.confirmed, Vulnerability.data,\ | |
71 | vuln_bundle = Bundle('vuln', Vulnerability.id.label('server_id'),Vulnerability.name.label('v_name'),\ | |
72 | Vulnerability.confirmed, Vulnerability.data,\ | |
67 | 73 | Vulnerability.description, Vulnerability.easeofresolution, Vulnerability.impact_accountability,\ |
68 | 74 | Vulnerability.impact_availability, Vulnerability.impact_confidentiality, Vulnerability.impact_integrity,\ |
69 | 75 | Vulnerability.refs, Vulnerability.resolution, Vulnerability.severity, Vulnerability.owned,\ |
72 | 78 | EntityMetadata.couchdb_id, EntityMetadata.revision, EntityMetadata.create_time, EntityMetadata.creator,\ |
73 | 79 | EntityMetadata.owner, EntityMetadata.update_action, EntityMetadata.update_controller_action,\ |
74 | 80 | EntityMetadata.update_time, EntityMetadata.update_user, EntityMetadata.document_type, Vulnerability.attachments) |
75 | service_bundle = Bundle('service', Service.name.label('s_name'), Service.ports, Service.protocol) | |
81 | service_bundle = Bundle('service', Service.name.label('s_name'), Service.ports, Service.protocol, Service.id) | |
76 | 82 | host_bundle = Bundle('host', Host.name) |
77 | 83 | |
78 | 84 | # IMPORTANT: OUTER JOINS on those tables is IMPERATIVE. Changing them could result in loss of |
131 | 137 | return { |
132 | 138 | 'id': vuln.couchdb_id, |
133 | 139 | 'key': vuln.couchdb_id, |
140 | '_id': vuln.server_id, | |
134 | 141 | 'value': { |
135 | 142 | '_id': vuln.couchdb_id, |
136 | 143 | '_rev': vuln.revision, |
137 | 144 | 'confirmed': vuln.confirmed, |
138 | 145 | 'data': vuln.data, |
139 | 146 | 'desc': vuln.description, |
147 | 'description': vuln.description, | |
140 | 148 | 'easeofresolution': vuln.easeofresolution, |
141 | 149 | 'impact': { |
142 | 150 | 'accountability': vuln.impact_accountability, |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | import os, sys | |
4 | import os, sys, re | |
5 | 5 | import atexit |
6 | 6 | import logging |
7 | 7 | import threading |
8 | 8 | import server.models |
9 | 9 | import server.config |
10 | 10 | import server.couchdb |
11 | import server.importer | |
11 | 12 | import server.utils.logger |
12 | 13 | |
13 | 14 | from sqlalchemy import create_engine |
15 | from sqlalchemy.exc import IntegrityError | |
14 | 16 | from sqlalchemy.orm import scoped_session, sessionmaker |
15 | 17 | from sqlalchemy.orm.exc import MultipleResultsFound |
16 | 18 | from restkit.errors import RequestError, Unauthorized |
17 | 19 | |
18 | 20 | logger = server.utils.logger.get_logger(__name__) |
19 | workspace = {} | |
20 | ||
21 | ||
22 | class WorkspaceDatabase(object): | |
23 | LAST_SEQ_CONFIG = 'last_seq' | |
24 | MIGRATION_SUCCESS = 'migration' | |
25 | SCHEMA_VERSION = 'version' | |
26 | ||
27 | def __init__(self, name): | |
28 | self.__workspace = name | |
29 | ||
30 | self.database = Database(self.__workspace) | |
31 | self.couchdb = server.couchdb.Workspace(self.__workspace) | |
32 | ||
33 | self.__setup_database_synchronization() | |
34 | self.__open_or_create_database() | |
35 | self.__start_database_synchronization() | |
36 | ||
37 | def __open_or_create_database(self): | |
38 | if not self.database.exists(): | |
39 | self.create_database() | |
21 | ||
22 | ||
23 | _db_manager = None | |
24 | ||
25 | def initialize(): | |
26 | global _db_manager | |
27 | _db_manager = Manager() | |
28 | ||
29 | def is_valid_workspace(workspace_name): | |
30 | return _db_manager.is_valid_workspace(workspace_name) | |
31 | ||
32 | def get(workspace_name): | |
33 | return _db_manager.get_workspace(workspace_name) | |
34 | ||
35 | def teardown_context(): | |
36 | """ This is called by Flask to cleanup sessions created in the context of a request """ | |
37 | _db_manager.close_sessions() | |
38 | ||
39 | ||
40 | class Manager(object): | |
41 | def __init__(self): | |
42 | self.__workspaces = {} | |
43 | ||
44 | # Open all existent databases on workspaces path | |
45 | self.__init_sessions() | |
46 | ||
47 | # Start CouchDB database monitor | |
48 | server.couchdb.start_dbs_monitor(self.__process_workspace_change) | |
49 | ||
50 | # Register database closing to be executed when process goes down | |
51 | atexit.register(self.close_databases) | |
52 | ||
53 | def __init_sessions(self): | |
54 | # Only loads does databases that are already created and | |
55 | # are present on the current CouchDB instance | |
56 | databases = self.__list_databases().intersection(self.__list_workspaces()) | |
57 | ||
58 | for database_name in databases: | |
59 | self.__init_workspace(database_name) | |
60 | ||
61 | def __list_databases(self): | |
62 | def is_a_valid_database(filename): | |
63 | return is_a_file(filename) and is_a_valid_name(filename) | |
64 | def is_a_file(filename): | |
65 | return os.path.isfile(os.path.join(server.config.FARADAY_SERVER_DBS_DIR, filename)) | |
66 | def is_a_valid_name(filename): | |
67 | return bool(re.match('^[a-z][a-z0-9_$()+/-]*\\.db$', filename)) | |
68 | ||
69 | # List all valid databases stored on configured directory | |
70 | db_filenames = filter(is_a_valid_database, os.listdir(server.config.FARADAY_SERVER_DBS_DIR)) | |
71 | ||
72 | # Remove extensions and move on | |
73 | return set([os.path.splitext(filename)[0] for filename in db_filenames]) | |
74 | ||
75 | def __list_workspaces(self): | |
76 | couchdb_server_conn = server.couchdb.CouchDBServer() | |
77 | return set(couchdb_server_conn.list_workspaces()) | |
78 | ||
79 | def __init_workspace(self, ws_name, db_conn=None): | |
80 | if ws_name not in self.__workspaces: | |
81 | new_workspace = Workspace(ws_name, db_conn=db_conn) | |
82 | new_workspace.start_sync_job() | |
83 | self.__workspaces[ws_name] = new_workspace | |
84 | ||
85 | def get_workspace(self, ws_name): | |
86 | try: | |
87 | return self.__workspaces[ws_name] | |
88 | except KeyError: | |
89 | raise WorkspaceNotFound(ws_name) | |
90 | ||
91 | def __process_workspace_change(self, change): | |
92 | if change.created: | |
93 | logger.info(u'Workspace {} was created'.format(change.db_name)) | |
94 | self.__process_new_workspace(change.db_name) | |
95 | ||
96 | elif change.deleted: | |
97 | logger.info(u'Workspace {} was deleted'.format(change.db_name)) | |
98 | self.__process_delete_workspace(change.db_name) | |
99 | ||
100 | def __process_new_workspace(self, ws_name): | |
101 | if ws_name in self.__workspaces: | |
102 | logger.info(u"Workspace {} already exists. Ignoring change.".format(ws_name)) | |
103 | elif not server.couchdb.server_has_access_to(ws_name): | |
104 | logger.error(u"Unauthorized access to CouchDB for Workspace {}. Make sure faraday-server's"\ | |
105 | " configuration file has CouchDB admin's credentials set".format(ws_name)) | |
40 | 106 | else: |
41 | self.database.open_session() | |
42 | self.check_database_integrity() | |
43 | ||
44 | def check_database_integrity(self): | |
45 | if not self.was_migration_successful(): | |
46 | logger.info(u"Workspace {} wasn't migrated successfully. Remigrating workspace...".format( | |
47 | self.__workspace)) | |
48 | self.remigrate_database() | |
49 | ||
50 | elif self.get_schema_version() != server.models.SCHEMA_VERSION: | |
51 | logger.info(u"Workspace {} has an old schema version ({} != {}). Remigrating workspace...".format( | |
52 | self.__workspace, self.get_schema_version(), server.models.SCHEMA_VERSION)) | |
53 | self.remigrate_database() | |
54 | ||
55 | def remigrate_database(self): | |
56 | self.database.close() | |
57 | self.database.delete() | |
58 | self.database = Database(self.__workspace) | |
59 | ||
60 | self.create_database() | |
61 | ||
62 | def create_database(self): | |
63 | logger.info(u'Creating database for workspace {}'.format(self.__workspace)) | |
64 | self.database.create() | |
65 | self.database.open_session() | |
66 | ||
67 | try: | |
68 | # Add metadata information to database | |
69 | self.set_last_seq(self.couchdb.get_last_seq()) | |
70 | self.set_migration_status(False) | |
71 | self.set_schema_version() | |
72 | ||
73 | self.import_from_couchdb() | |
74 | ||
75 | # Reaching this far without errors means a successful migration | |
76 | self.set_migration_status(True) | |
77 | ||
78 | except Exception, e: | |
79 | import traceback | |
80 | logger.debug(traceback.format_exc()) | |
81 | logger.error(u'Error while importing workspace {}: {!s}'.format(self.__workspace, e)) | |
82 | self.delete() | |
83 | raise e | |
84 | ||
85 | def import_from_couchdb(self): | |
86 | total_amount = self.couchdb.get_total_amount_of_documents() | |
87 | processed_docs, progress = 0, 0 | |
88 | should_flush_changes = False | |
89 | host_entities = {} | |
90 | ||
91 | def flush_changes(): | |
92 | host_entities.clear() | |
93 | self.database.session.commit() | |
94 | self.database.session.expunge_all() | |
95 | ||
96 | for doc in self.couchdb.get_documents(per_request=1000): | |
97 | processed_docs = processed_docs + 1 | |
98 | current_progress = (processed_docs * 100) / total_amount | |
99 | if current_progress > progress: | |
100 | self.__show_progress(u' * Importing {} from CouchDB'.format( | |
101 | self.__workspace), progress) | |
102 | progress = current_progress | |
103 | should_flush_changes = True | |
104 | ||
105 | entity = server.models.FaradayEntity.parse(doc.get('doc')) | |
106 | if entity is not None: | |
107 | if isinstance(entity, server.models.Host) and should_flush_changes: | |
108 | flush_changes() | |
109 | should_flush_changes = False | |
110 | ||
111 | try: | |
112 | entity.add_relationships_from_dict(host_entities) | |
113 | except server.models.EntityNotFound as e: | |
114 | logger.warning(u"Ignoring {} entity ({}) because its parent wasn't found".format( | |
115 | entity.entity_metadata.document_type, entity.entity_metadata.couchdb_id)) | |
116 | else: | |
117 | host_entities[doc.get('key')] = entity | |
118 | self.database.session.add(entity) | |
119 | ||
120 | logger.info(u'{} importation done!'.format(self.__workspace)) | |
121 | flush_changes() | |
122 | ||
123 | def __show_progress(self, msg, percentage): | |
124 | sys.stdout.write('{}: {}%\r'.format(msg, percentage)) | |
125 | sys.stdout.flush() | |
126 | ||
127 | def __setup_database_synchronization(self): | |
107 | self.__create_and_import_workspace(ws_name) | |
108 | ||
109 | def __create_and_import_workspace(self, ws_name): | |
110 | new_db_conn = Connector(ws_name) | |
111 | ||
112 | if new_db_conn.exists(): | |
113 | # TODO(mrocha): if somehow this happens, then we should check for integrity and reimport | |
114 | # if necessary. After that we should add it into the databases dict | |
115 | logger.warning(u"Workspace {} already exists but wasn't registered at startup".format(ws_name)) | |
116 | else: | |
117 | server.importer.import_workspace_into_database(ws_name, new_db_conn) | |
118 | ||
119 | self.__init_workspace(ws_name, db_conn=new_db_conn) | |
120 | ||
121 | def __process_delete_workspace(self, ws_name): | |
122 | if ws_name not in self.__workspaces: | |
123 | logger.info(u"Workspace {} doesn't exist. Ignoring change.".format(ws_name)) | |
124 | else: | |
125 | logger.info(u"Deleting workspace {} from Faraday Server".format(ws_name)) | |
126 | self.__delete_workspace(ws_name) | |
127 | ||
128 | def __delete_workspace(self, ws_name): | |
129 | self.get_workspace(ws_name).delete() | |
130 | del self.__workspaces[ws_name] | |
131 | ||
132 | def is_valid_workspace(self, ws_name): | |
133 | return ws_name in self.__workspaces | |
134 | ||
135 | def close_sessions(self): | |
136 | for workspace in self.__workspaces.values(): | |
137 | workspace.close_session() | |
138 | ||
139 | def close_databases(self): | |
140 | for workspace in self.__workspaces.values(): | |
141 | workspace.close() | |
142 | ||
143 | ||
144 | class Workspace(object): | |
145 | def __init__(self, db_name, db_conn=None, couchdb_conn=None, couchdb_server_conn=None): | |
146 | self.__db_conn = db_conn or Connector(db_name) | |
147 | self.__couchdb_conn = couchdb_conn or server.couchdb.Workspace(db_name, couchdb_server_conn) | |
148 | self.__sync = Synchronizer(self.__db_conn, self.__couchdb_conn) | |
149 | ||
150 | @property | |
151 | def connector(self): | |
152 | return self.__db_conn | |
153 | ||
154 | @property | |
155 | def session(self): | |
156 | # TODO(mrocha): should we check if session is None here??? | |
157 | return self.__db_conn.session | |
158 | ||
159 | @property | |
160 | def couchdb(self): | |
161 | return self.__couchdb_conn | |
162 | ||
163 | def start_sync_job(self): | |
164 | self.__sync.start() | |
165 | ||
166 | def wait_until_sync(self, timeout): | |
167 | self.__sync.wait_until_sync(timeout) | |
168 | ||
169 | def close_session(self): | |
170 | self.__db_conn.close() | |
171 | ||
172 | def close(self): | |
173 | self.__sync.close() | |
174 | self.__couchdb_conn.close() | |
175 | self.close_session() | |
176 | ||
177 | def delete(self): | |
178 | self.close() | |
179 | self.__db_conn.delete() | |
180 | ||
181 | ||
182 | class Connector(object): | |
183 | def __init__(self, db_name): | |
184 | self.db_name = db_name | |
185 | ||
186 | self.__db_path = self.__get_db_path() | |
187 | self.__db_conf = Configuration(self) | |
188 | self.__setup_engine() | |
189 | ||
190 | # From here it is now ready to open, or create/open | |
191 | if self.exists(): | |
192 | self.session = self.__open_session() | |
193 | else: | |
194 | self.session = None | |
195 | ||
196 | def __get_db_path(self): | |
197 | return os.path.join(server.config.FARADAY_SERVER_DBS_DIR, '%s.db' % self.db_name) | |
198 | ||
199 | def __setup_engine(self): | |
200 | self.__engine = create_engine('sqlite:///%s' % self.__db_path) # XXX: is this safe? | |
201 | # TODO(mrocha): review this piece of code. i'm not sure what this implicates | |
202 | # when having multiple databases open using the same model | |
203 | server.models.Base.metadata.bind = self.__engine | |
204 | ||
205 | def __open_session(self): | |
206 | return scoped_session(sessionmaker(autocommit=False, | |
207 | autoflush=False, | |
208 | bind=self.__engine)) | |
209 | ||
210 | def create(self): | |
211 | if self.exists(): | |
212 | raise RuntimeError("Cannot create new database. Database {} already exists".format(self.db_name)) | |
213 | ||
214 | server.models.Base.metadata.create_all(self.__engine) | |
215 | self.session = self.__open_session() | |
216 | self.__db_conf.setup_new_database() | |
217 | ||
218 | def exists(self): | |
219 | return os.path.exists(self.__db_path) | |
220 | ||
221 | def close(self): | |
222 | # TODO(mrocha): Detail how this works | |
223 | if self.session is not None: | |
224 | self.session.remove() | |
225 | ||
226 | def delete(self): | |
227 | self.close() | |
228 | os.remove(self.__db_path) | |
229 | ||
230 | def is_integrous(self): | |
231 | if not self.__db_conf.was_migration_successful(): | |
232 | logger.info(u"Workspace {} wasn't migrated successfully".format(self.db_name)) | |
233 | return False | |
234 | ||
235 | elif self.__db_conf.get_schema_version() != server.models.SCHEMA_VERSION: | |
236 | logger.info(u"Workspace {} has an old schema version ({} != {})".format( | |
237 | self.db_name, self.__db_conf.get_schema_version(), server.models.SCHEMA_VERSION)) | |
238 | return False | |
239 | ||
240 | return True | |
241 | ||
242 | ||
243 | class Synchronizer(object): | |
244 | def __init__(self, db_conn, couchdb_conn): | |
245 | self.__db_conn = db_conn | |
246 | self.__db_conf = Configuration(db_conn) | |
247 | self.__doc_importer = self.__build_doc_importer() | |
248 | self.__couchdb_conn = couchdb_conn | |
128 | 249 | self.__sync_seq_milestone = 0 |
129 | 250 | |
130 | 251 | # As far as we know, before the changes monitor is |
133 | 254 | self.__data_sync_event = threading.Event() |
134 | 255 | self.__data_sync_event.set() |
135 | 256 | |
136 | def __start_database_synchronization(self): | |
137 | self.__last_seq = self.get_last_seq() | |
138 | logger.debug(u'Workspace {} last update: {}'.format(self.__workspace, self.__last_seq)) | |
139 | self.couchdb.start_changes_monitor(self.__process_change, last_seq=self.__last_seq) | |
140 | ||
141 | # CHA, CHA, CHA, CHANGESSSS | |
142 | def __process_change(self, change): | |
143 | logger.debug(u'New change for {}: {}'.format(self.__workspace, change.change_doc)) | |
144 | ||
145 | if change.deleted: | |
146 | logger.debug(u'Doc {} was deleted'.format(change.doc_id)) | |
147 | self.__process_del(change) | |
148 | ||
149 | elif change.updated: | |
150 | logger.debug(u'Doc {} was updated'.format(change.doc_id)) | |
151 | self.__process_update(change) | |
152 | ||
153 | elif change.added: | |
154 | logger.debug(u'Doc {} was added'.format(change.doc_id)) | |
155 | self.__process_add(change) | |
156 | ||
157 | self.__update_last_seq(change) | |
158 | ||
159 | def __process_del(self, change): | |
160 | """ | |
161 | ISSUES: | |
162 | * Delete child entities. Have not found cases where this is a problem. So far, | |
163 | clients are deleting all CouchDBs documents properly, and if they don't, the | |
164 | DBs still are consistent. Maybe use SQLAlchemy's cascades if this become a | |
165 | problem. Status: Somewhat OK | |
166 | ||
167 | * Doc ID maps to multiple elements. This could happen since the ID is a hash | |
168 | based in a few entity's properties which can be replicated. Status: TODO | |
169 | """ | |
170 | entity = self.__get_modified_entity(change) | |
171 | if entity is not None: | |
172 | self.database.session.delete(entity) | |
173 | self.database.session.commit() | |
174 | logger.info(u'A {} ({}) was deleted'.format( | |
175 | entity.entity_metadata.document_type, entity.name)) | |
176 | ||
177 | def __process_update(self, change): | |
178 | """ | |
179 | ISSUES: | |
180 | * Updated relationships are not taken into account. Status: TODO | |
181 | """ | |
182 | entity = self.__get_modified_entity(change) | |
183 | if entity is not None: | |
184 | entity.update_from_document(change.doc) | |
185 | entity.entity_metadata.update_from_document(change.doc) | |
186 | self.database.session.commit() | |
187 | logger.info(u'A {} ({}) was updated'.format( | |
188 | entity.entity_metadata.document_type, entity.name)) | |
189 | ||
190 | def __get_modified_entity(self, change): | |
191 | try: | |
192 | metadata = self.database.session.query(server.models.EntityMetadata)\ | |
193 | .filter(server.models.EntityMetadata.couchdb_id == change.doc_id)\ | |
194 | .one_or_none() | |
195 | except MultipleResultsFound: | |
196 | logger.warning(u'Multiple entities were found for doc {}.'\ | |
197 | 'Ignoring change'.format(change.doc_id)) | |
198 | return None | |
199 | ||
200 | if metadata is not None: | |
201 | # Obtain the proper table on which to perform the entity operation | |
202 | entity_cls = server.models.FaradayEntity.get_entity_class_from_type( | |
203 | metadata.document_type) | |
204 | ||
205 | entity = self.database.session.query(entity_cls)\ | |
206 | .join(server.models.EntityMetadata)\ | |
207 | .filter(server.models.EntityMetadata.couchdb_id == change.doc_id)\ | |
208 | .one() | |
209 | ||
210 | return entity | |
211 | ||
212 | else: | |
213 | logger.info(u'Doc {} was not found in the database'.format(change.doc_id)) | |
214 | return None | |
215 | ||
216 | def __process_add(self, change): | |
217 | """ | |
218 | ISSUES: | |
219 | * Other entities related to this new document may be not already | |
220 | include into the database (ie: these documents are added on future | |
221 | changes) | |
222 | """ | |
223 | entity = server.models.FaradayEntity.parse(change.doc) | |
224 | if entity is not None: | |
225 | entity.add_relationships_from_db(self.database.session) | |
226 | self.database.session.add(entity) | |
227 | self.database.session.commit() | |
228 | logger.info(u'New {} ({}) was added'.format( | |
229 | entity.entity_metadata.document_type, entity.name)) | |
230 | ||
231 | def get_last_seq(self): | |
232 | config = self.get_config(WorkspaceDatabase.LAST_SEQ_CONFIG) | |
233 | if config is None: | |
234 | return 0 | |
235 | ||
236 | last_seq = int(config.value) | |
237 | return last_seq | |
238 | ||
239 | def was_migration_successful(self): | |
240 | config = self.get_config(WorkspaceDatabase.MIGRATION_SUCCESS) | |
241 | return (config is not None and config.value == 'true') | |
242 | ||
243 | def get_schema_version(self): | |
244 | config = self.get_config(WorkspaceDatabase.SCHEMA_VERSION) | |
245 | return config.value if config is not None else None | |
246 | ||
247 | def get_config(self, option): | |
248 | query = self.database.session.query(server.models.DatabaseMetadata) | |
249 | query = query.filter(server.models.DatabaseMetadata.option == option) | |
250 | ||
251 | try: | |
252 | result = query.one_or_none() | |
253 | except MultipleResultsFound: | |
254 | msg = u'Database {} should not have the option {} defined multiple times'.format(self.__workspace, option) | |
255 | logger.error(msg) | |
256 | raise RuntimeError(msg) | |
257 | ||
258 | return result | |
259 | ||
260 | def set_last_seq(self, last_seq): | |
261 | self.set_config(WorkspaceDatabase.LAST_SEQ_CONFIG, last_seq) | |
262 | self.__last_seq = last_seq | |
263 | # Set sync event when the database is updated relative | |
264 | # to the milestone set | |
265 | if self.__last_seq >= self.__sync_seq_milestone: | |
266 | self.__data_sync_event.set() | |
267 | ||
268 | def set_migration_status(self, was_successful): | |
269 | self.set_config(WorkspaceDatabase.MIGRATION_SUCCESS, 'true' if was_successful else 'false') | |
270 | ||
271 | def set_schema_version(self): | |
272 | self.set_config(WorkspaceDatabase.SCHEMA_VERSION, server.models.SCHEMA_VERSION) | |
273 | ||
274 | def set_config(self, option, value): | |
275 | config = self.get_config(option) | |
276 | if config is None: | |
277 | config = server.models.DatabaseMetadata(option=option) | |
278 | config.value = value | |
279 | ||
280 | self.database.session.merge(config) | |
281 | self.database.session.commit() | |
282 | ||
283 | def __update_last_seq(self, change): | |
284 | if change.seq is not None: | |
285 | self.set_last_seq(change.seq) | |
257 | def __build_doc_importer(self): | |
258 | def post_change_cbk(change): | |
259 | self.__last_seq = change.seq | |
260 | # Set sync event when the database is updated relative | |
261 | # to the milestone set | |
262 | if self.__last_seq >= self.__sync_seq_milestone: | |
263 | self.__data_sync_event.set() | |
264 | ||
265 | return DocumentImporter(self.__db_conn, post_processing_change_cbk=post_change_cbk) | |
266 | ||
267 | def start(self): | |
268 | self.__last_seq = self.__db_conf.get_last_seq() | |
269 | logger.debug(u'Workspace {} last update: {}'.format(self.__db_conn.db_name, self.__last_seq)) | |
270 | self.__couchdb_conn.start_changes_monitor(self.__doc_importer.process_change, last_seq=self.__last_seq) | |
286 | 271 | |
287 | 272 | def close(self): |
288 | self.database.close() | |
289 | ||
290 | def delete(self): | |
291 | self.database.close() | |
292 | self.database.delete() | |
273 | self.__couchdb_conn.close() | |
293 | 274 | |
294 | 275 | def wait_until_sync(self, timeout): |
295 | 276 | """ |
347 | 328 | Set a milestone from where we can check if data is synchronized |
348 | 329 | between the server and CouchDB |
349 | 330 | """ |
350 | self.__sync_seq_milestone = self.couchdb.get_last_seq() | |
331 | self.__sync_seq_milestone = self.__couchdb_conn.get_last_seq() | |
351 | 332 | # Clear event if last database seq version is outdated |
352 | 333 | # relative to CouchDB |
353 | 334 | if self.__last_seq < self.__sync_seq_milestone: |
354 | 335 | self.__data_sync_event.clear() |
355 | 336 | |
356 | class Database(object): | |
357 | def __init__(self, db_name): | |
358 | self.__db_path = os.path.join(server.config.FARADAY_BASE, 'server/workspaces/%s.db' % db_name) | |
359 | self.__engine = create_engine('sqlite:///%s' % self.__db_path) # XXX: Is this safe? | |
360 | server.models.Base.metadata.bind = self.__engine | |
361 | ||
362 | def create(self): | |
363 | server.models.Base.metadata.create_all(self.__engine) | |
364 | ||
365 | def open_session(self): | |
366 | self.session = scoped_session(sessionmaker(autocommit=False, | |
367 | autoflush=False, | |
368 | bind=self.__engine)) | |
369 | ||
370 | def exists(self): | |
371 | return os.path.exists(self.__db_path) | |
372 | ||
373 | def close(self): | |
374 | pass | |
375 | ||
376 | def delete(self): | |
377 | os.remove(self.__db_path) | |
378 | ||
379 | def teardown_context(self): | |
380 | self.session.remove() | |
381 | ||
382 | def setup(): | |
383 | setup_workspaces() | |
384 | server.couchdb.start_dbs_monitor(process_db_change) | |
385 | ||
386 | def setup_workspaces(): | |
387 | try: | |
388 | couchdb = server.couchdb.CouchDBServer() | |
389 | workspaces_list = couchdb.list_workspaces() | |
390 | except RequestError: | |
391 | logger.error(u"CouchDB is not running at {}. Check faraday-server's"\ | |
392 | " configuration and make sure CouchDB is running".format( | |
393 | server.couchdb.get_couchdb_url())) | |
394 | sys.exit(1) | |
395 | except Unauthorized: | |
396 | logger.error(u"Unauthorized access to CouchDB. Make sure faraday-server's"\ | |
397 | " configuration file has CouchDB admin's credentials set") | |
398 | sys.exit(1) | |
399 | ||
400 | for ws in workspaces_list: | |
401 | setup_workspace(ws) | |
402 | ||
403 | atexit.register(server.database.close_databases) | |
404 | ||
405 | def setup_workspace(ws_name): | |
406 | logger.info(u'Setting up workspace {}'.format(ws_name)) | |
407 | check_admin_access_to(ws_name) | |
408 | workspace[ws_name] = WorkspaceDatabase(ws_name) | |
409 | ||
410 | def check_admin_access_to(ws_name): | |
411 | if not server.couchdb.has_permissions_for(ws_name, | |
412 | credentials=server.couchdb.get_auth_info()): | |
413 | logger.error(u"Unauthorized access to CouchDB. Make sure faraday-server's"\ | |
414 | " configuration file has CouchDB admin's credentials set") | |
415 | sys.exit(1) | |
416 | ||
417 | def close_databases(): | |
418 | for ws in workspace.values(): | |
419 | ws.close() | |
420 | ||
421 | def process_db_change(change): | |
422 | if change.created: | |
423 | logger.info(u'Workspace {} was created'.format(change.db_name)) | |
424 | process_new_workspace(change.db_name) | |
425 | elif change.deleted: | |
426 | logger.info(u'Workspace {} was deleted'.format(change.db_name)) | |
427 | process_delete_workspace(change.db_name) | |
428 | ||
429 | def process_new_workspace(ws_name): | |
430 | if ws_name in workspace: | |
431 | logger.info(u"Workspace {} was already migrated. Ignoring change.".format(ws_name)) | |
432 | else: | |
433 | setup_workspace(ws_name) | |
434 | ||
435 | def process_delete_workspace(ws_name): | |
436 | if ws_name not in workspace: | |
437 | logger.info(u"Workspace {} wasn't migrated at startup. Ignoring change.".format(ws_name)) | |
438 | else: | |
439 | logger.info(u"Deleting workspace {} from Faraday Server".format(ws_name)) | |
440 | delete_workspace(ws_name) | |
441 | ||
442 | def delete_workspace(ws_name): | |
443 | get(ws_name).delete() | |
444 | del workspace[ws_name] | |
445 | ||
446 | def teardown_context(): | |
447 | """ This is called by Flask to cleanup sessions created in the context of a request """ | |
448 | for ws in workspace.values(): | |
449 | ws.database.teardown_context() | |
450 | ||
451 | class WorkspaceNotFound(Exception): | |
452 | def __init__(self, workspace_name): | |
453 | super(WorkspaceNotFound, self).__init__('Workspace "%s" not found' % workspace_name) | |
454 | ||
455 | def is_valid_workspace(workspace_name): | |
456 | return workspace_name in workspace | |
457 | ||
458 | def get(ws_name): | |
459 | try: | |
460 | return workspace[ws_name] | |
461 | except KeyError: | |
462 | raise WorkspaceNotFound(ws_name) | |
337 | ||
338 | class DocumentImporter(object): | |
339 | def __init__(self, db_conn, post_processing_change_cbk=None): | |
340 | self.__db_conn = db_conn | |
341 | self.__db_conf = Configuration(db_conn) | |
342 | self.__post_processing_change_cbk = post_processing_change_cbk | |
343 | ||
344 | # CHA, CHA, CHA, CHANGESSSS | |
345 | def process_change(self, change): | |
346 | logger.debug(u'New change for {}: {}'.format(self.__db_conn.db_name, change.change_doc)) | |
347 | ||
348 | if change.deleted: | |
349 | logger.debug(u'Doc {} was deleted'.format(change.doc_id)) | |
350 | self.delete_entity_from_doc_id(change.doc['_id']) | |
351 | ||
352 | elif change.updated: | |
353 | logger.debug(u'Doc {} was updated'.format(change.doc_id)) | |
354 | self.update_entity_from_doc(change.doc) | |
355 | ||
356 | elif change.added: | |
357 | if self.add_entity_from_doc(change.doc): | |
358 | logger.debug(u'Doc {} was added'.format(change.doc_id)) | |
359 | else: | |
360 | logger.debug(u"Doc {} was not added".format(change.doc_id)) | |
361 | ||
362 | if change.seq is not None: | |
363 | self.__db_conf.set_last_seq(change.seq) | |
364 | if self.__post_processing_change_cbk: | |
365 | self.__post_processing_change_cbk(change) | |
366 | ||
367 | def add_entity_from_doc(self, document): | |
368 | """ | |
369 | ISSUES: | |
370 | * Other entities related to this new document may be not already | |
371 | include into the database (ie: these documents are added on future | |
372 | changes) | |
373 | """ | |
374 | entity = server.models.FaradayEntity.parse(document) | |
375 | if entity is None: | |
376 | return False | |
377 | ||
378 | entity.add_relationships_from_db(self.__db_conn.session) | |
379 | self.__db_conn.session.add(entity) | |
380 | ||
381 | try: | |
382 | self.__db_conn.session.commit() | |
383 | logger.info(u'New {} ({}) was added in Workspace {}'.format( | |
384 | entity.entity_metadata.document_type, | |
385 | getattr(entity, 'name', '<no-name>'), | |
386 | self.__db_conn.db_name)) | |
387 | ||
388 | except IntegrityError, e: | |
389 | # For now, we silently rollback because it is an excepted | |
390 | # scenario when we create documents from the server and its | |
391 | # change notification arrives | |
392 | self.__db_conn.session.rollback() | |
393 | return False | |
394 | ||
395 | return True | |
396 | ||
397 | def delete_entity_from_doc_id(self, document_id): | |
398 | """ | |
399 | ISSUES: | |
400 | * Delete child entities. Have not found cases where this is a problem. So far, | |
401 | clients are deleting all CouchDBs documents properly, and if they don't, the | |
402 | DBs still are consistent. Maybe use SQLAlchemy's cascades if this become a | |
403 | problem. Status: Somewhat OK | |
404 | ||
405 | * Doc ID maps to multiple elements. This could happen since the ID is a hash | |
406 | based in a few entity's properties which can be replicated. Status: TODO | |
407 | """ | |
408 | entity = self.__get_modified_entity(document_id) | |
409 | if entity is not None: | |
410 | self.__db_conn.session.delete(entity) | |
411 | self.__db_conn.session.commit() | |
412 | logger.info(u'A {} ({}) was deleted in Workspace {}'.format( | |
413 | entity.entity_metadata.document_type, | |
414 | getattr(entity, 'name', '<no-name>'), | |
415 | self.__db_conn.db_name)) | |
416 | return True | |
417 | ||
418 | logger.debug(u'Document ({}) was not present in database to delete'.format(document_id)) | |
419 | return False | |
420 | ||
421 | def update_entity_from_doc(self, document): | |
422 | """ | |
423 | ISSUES: | |
424 | * Updated relationships are not taken into account. Status: TODO | |
425 | """ | |
426 | entity = self.__get_modified_entity(document.get('_id')) | |
427 | if entity is not None: | |
428 | entity.update_from_document(document) | |
429 | entity.entity_metadata.update_from_document(document) | |
430 | self.__db_conn.session.commit() | |
431 | logger.info(u'A {} ({}) was updated in Workspace {}'.format( | |
432 | entity.entity_metadata.document_type, | |
433 | getattr(entity, 'name', '<no-name>'), | |
434 | self.__db_conn.db_name)) | |
435 | return True | |
436 | ||
437 | logger.debug(u'Document ({}) was not present in database to update'.format(document.get('_id'))) | |
438 | return False | |
439 | ||
440 | def __get_modified_entity(self, document_id): | |
441 | metadata = self.get_document_metadata(document_id) | |
442 | if metadata is None: | |
443 | logger.info(u'Doc {} was not found in the database'.format(document_id)) | |
444 | return None | |
445 | ||
446 | # Obtain the proper table on which to perform the entity operation | |
447 | entity_cls = server.models.FaradayEntity.get_entity_class_from_type( | |
448 | metadata.document_type) | |
449 | ||
450 | # TODO(mrocha): Add error handling here when no or more than one entities where found. | |
451 | entity = self.__db_conn.session.query(entity_cls)\ | |
452 | .join(server.models.EntityMetadata)\ | |
453 | .filter(server.models.EntityMetadata.couchdb_id == document_id)\ | |
454 | .one() | |
455 | return entity | |
456 | ||
457 | def get_document_metadata(self, document_id): | |
458 | metadata = None | |
459 | try: | |
460 | metadata = self.__db_conn.session.query(server.models.EntityMetadata)\ | |
461 | .filter(server.models.EntityMetadata.couchdb_id == document_id)\ | |
462 | .one_or_none() | |
463 | except MultipleResultsFound: | |
464 | logger.warning(u'Multiple entities were found for doc {}.'\ | |
465 | 'Ignoring change'.format(document_id)) | |
466 | return metadata | |
467 | ||
468 | ||
469 | class Configuration(object): | |
470 | # TODO(mrocha): use enums in database metadata table | |
471 | # instead of constants defined here | |
472 | LAST_SEQ_CONFIG = 'last_seq' | |
473 | MIGRATION_SUCCESS = 'migration' | |
474 | SCHEMA_VERSION = 'version' | |
475 | ||
476 | def __init__(self, db_conn): | |
477 | self.__db_conn = db_conn | |
478 | ||
479 | def setup_new_database(self, from_seq=0): | |
480 | self.set_last_seq(from_seq) | |
481 | self.set_migration_status(False) | |
482 | self.set_schema_version() | |
483 | ||
484 | def get_last_seq(self): | |
485 | config = self.__get_config(Configuration.LAST_SEQ_CONFIG) | |
486 | if config is not None: | |
487 | return int(config.value) | |
488 | else: | |
489 | return 0 | |
490 | ||
491 | def set_last_seq(self, last_seq): | |
492 | self.__set_config(Configuration.LAST_SEQ_CONFIG, last_seq) | |
493 | ||
494 | ||
495 | def was_migration_successful(self): | |
496 | config = self.__get_config(Configuration.MIGRATION_SUCCESS) | |
497 | return (config is not None and config.value == 'true') | |
498 | ||
499 | def get_schema_version(self): | |
500 | config = self.__get_config(Configuration.SCHEMA_VERSION) | |
501 | return (config.value if config is not None else None) | |
502 | ||
503 | def set_migration_status(self, was_successful): | |
504 | self.__set_config(Configuration.MIGRATION_SUCCESS, 'true' if was_successful else 'false') | |
505 | ||
506 | def set_schema_version(self): | |
507 | self.__set_config(Configuration.SCHEMA_VERSION, server.models.SCHEMA_VERSION) | |
508 | ||
509 | def __get_config(self, option): | |
510 | try: | |
511 | result = self.__db_conn.session\ | |
512 | .query(server.models.DatabaseMetadata)\ | |
513 | .filter(server.models.DatabaseMetadata.option == option)\ | |
514 | .one_or_none() | |
515 | ||
516 | except MultipleResultsFound: | |
517 | msg = u'Database {} should not have the option {} defined multiple times'.format(self.__db_conn.db_name, option) | |
518 | logger.error(msg) | |
519 | raise RuntimeError(msg) | |
520 | ||
521 | return result | |
522 | ||
523 | def __set_config(self, option, value): | |
524 | config = self.__get_config(option) | |
525 | if config is None: | |
526 | config = server.models.DatabaseMetadata(option=option) | |
527 | config.value = value | |
528 | ||
529 | self.__db_conn.session.merge(config) | |
530 | self.__db_conn.session.commit() | |
531 | ||
463 | 532 | |
464 | 533 | # |
465 | 534 | # Profile queries performance on debug mode |
477 | 546 | logger.debug(u"Parameters:\n{!r}".format(parameters)) |
478 | 547 | |
479 | 548 | @event.listens_for(Engine, "after_cursor_execute") |
480 | def after_cursor_execute(conn, cursor, statement, | |
549 | def after_cursor_execute(conn, cursor, statement, | |
481 | 550 | parameters, context, executemany): |
482 | 551 | total = time.time() - context._query_start_time |
483 | 552 | logger.debug(u"Query Complete. Total Time: {:.02f}ms".format(total*1000)) |
484 | 553 | |
554 | ||
555 | # | |
556 | # Exception definitions | |
557 | # | |
558 | class WorkspaceNotFound(Exception): | |
559 | def __init__(self, workspace_name): | |
560 | super(WorkspaceNotFound, self).__init__('Workspace "%s" not found' % workspace_name) | |
561 |
0 | 0 | [faraday_server] |
1 | port=5984 | |
1 | port=5985 | |
2 | 2 | bind_address=localhost |
3 | 3 | |
4 | 4 | [ssl] |
5 | port=6984 | |
5 | port=6985 | |
6 | 6 | certificate= |
7 | 7 | keyfile= |
8 | 8 | ;keyfile_pwd='' |
9 | 9 | |
10 | 10 | [couchdb] |
11 | 11 | host=localhost |
12 | port=5985 | |
13 | ssl_port=6985 | |
14 | user=faraday | |
15 | password=changeme | |
12 | port=5984 | |
13 | ssl_port=6984 | |
14 | user= | |
15 | password= | |
16 | 16 | protocol=http |
17 | 17 |
0 | # Faraday Penetration Test IDE | |
1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) | |
2 | # See the file 'doc/LICENSE' for the license information | |
3 | ||
4 | import sys | |
5 | import server.utils.logger | |
6 | import server.couchdb | |
7 | import server.database | |
8 | import server.models | |
9 | from restkit.errors import RequestError, Unauthorized | |
10 | ||
11 | logger = server.utils.logger.get_logger(__name__) | |
12 | ||
13 | def import_workspaces(): | |
14 | couchdb_server_conn, workspaces_list = _open_couchdb_conn() | |
15 | ||
16 | for workspace_name in workspaces_list: | |
17 | logger.info(u'Setting up workspace {}'.format(workspace_name)) | |
18 | ||
19 | if not server.couchdb.server_has_access_to(workspace_name): | |
20 | logger.error(u"Unauthorized access to CouchDB. Make sure faraday-server's"\ | |
21 | " configuration file has CouchDB admin's credentials set") | |
22 | sys.exit(1) | |
23 | ||
24 | import_workspace_into_database(workspace_name, couchdb_server_conn=couchdb_server_conn) | |
25 | ||
26 | def _open_couchdb_conn(): | |
27 | try: | |
28 | couchdb_server_conn = server.couchdb.CouchDBServer() | |
29 | workspaces_list = couchdb_server_conn.list_workspaces() | |
30 | ||
31 | except RequestError: | |
32 | logger.error(u"CouchDB is not running at {}. Check faraday-server's"\ | |
33 | " configuration and make sure CouchDB is running".format( | |
34 | server.couchdb.get_couchdb_url())) | |
35 | sys.exit(1) | |
36 | ||
37 | except Unauthorized: | |
38 | logger.error(u"Unauthorized access to CouchDB. Make sure faraday-server's"\ | |
39 | " configuration file has CouchDB admin's credentials set") | |
40 | sys.exit(1) | |
41 | ||
42 | return couchdb_server_conn, workspaces_list | |
43 | ||
44 | def import_workspace_into_database(workspace_name, db_conn=None, couchdb_conn=None, couchdb_server_conn=None): | |
45 | db_conn = db_conn or server.database.Connector(workspace_name) | |
46 | couchdb_conn = couchdb_conn or server.couchdb.Workspace(workspace_name, couchdb_server_conn) | |
47 | ||
48 | # If database doesn't exist. Create and import workspace | |
49 | if not db_conn.exists(): | |
50 | import_on_new_database(db_conn, couchdb_conn) | |
51 | ||
52 | # Database exists. Check if it is corrupt to reimport | |
53 | elif not db_conn.is_integrous(): | |
54 | reimport_on_database(db_conn, couchdb_conn) | |
55 | ||
56 | return db_conn | |
57 | ||
58 | def import_on_new_database(db_conn, couchdb_conn): | |
59 | if db_conn.exists(): | |
60 | raise RuntimeError('Database {} already exists'.format(db_conn.db_name)) | |
61 | ||
62 | logger.info(u'Creating database for workspace {}'.format(db_conn.db_name)) | |
63 | _create_and_import_db(db_conn, couchdb_conn) | |
64 | ||
65 | def reimport_on_database(db_conn, couchdb_conn): | |
66 | """ WARNING: Make sure to do all necessary verifications on | |
67 | the database you are working on. If the database exists then | |
68 | it will truncate and lose all data previously stored there""" | |
69 | if not db_conn.exists(): | |
70 | raise RuntimeError('Database {} does not exist'.format(db_conn.db_name)) | |
71 | ||
72 | logger.info(u'Importing workspace {} again'.format(db_conn.db_name)) | |
73 | _truncate_and_import_db(db_conn, couchdb_conn) | |
74 | ||
75 | def _create_and_import_db(db_conn, couchdb_conn): | |
76 | db_conn.create() | |
77 | db_conf = server.database.Configuration(db_conn) | |
78 | db_conf.set_last_seq(couchdb_conn.get_last_seq()) | |
79 | ||
80 | try: | |
81 | _import_from_couchdb(db_conn, couchdb_conn) | |
82 | except Exception, e: | |
83 | import traceback | |
84 | logger.debug(traceback.format_exc()) | |
85 | logger.error(u'Error while importing workspace {}: {!s}'.format(db_conn.db_name, e)) | |
86 | db_conn.delete() | |
87 | raise e | |
88 | ||
89 | # Reaching this far without errors means a successful migration | |
90 | db_conf.set_migration_status(True) | |
91 | ||
92 | def _truncate_and_import_db(db_conn, couchdb_conn): | |
93 | db_conn.delete() | |
94 | ||
95 | db_conn = server.database.Connector(db_conn.db_name) | |
96 | _create_and_import_db(db_conn, couchdb_conn) | |
97 | ||
98 | def _import_from_couchdb(db_conn, couchdb_conn): | |
99 | total_amount = couchdb_conn.get_total_amount_of_documents() | |
100 | processed_docs, progress = 0, 0 | |
101 | should_flush_changes = False | |
102 | host_entities = {} | |
103 | ||
104 | def flush_changes(): | |
105 | host_entities.clear() | |
106 | db_conn.session.commit() | |
107 | db_conn.session.expunge_all() | |
108 | ||
109 | for doc in couchdb_conn.get_documents(per_request=1000): | |
110 | processed_docs = processed_docs + 1 | |
111 | current_progress = (processed_docs * 100) / total_amount | |
112 | if current_progress > progress: | |
113 | _show_progress(u' * Importing {} from CouchDB'.format(db_conn.db_name), progress) | |
114 | progress = current_progress | |
115 | should_flush_changes = True | |
116 | ||
117 | entity = server.models.FaradayEntity.parse(doc.get('doc')) | |
118 | if entity is not None: | |
119 | if isinstance(entity, server.models.Host) and should_flush_changes: | |
120 | flush_changes() | |
121 | should_flush_changes = False | |
122 | ||
123 | try: | |
124 | entity.add_relationships_from_dict(host_entities) | |
125 | except server.models.EntityNotFound as e: | |
126 | logger.warning(u"Ignoring {} entity ({}) because its parent wasn't found".format( | |
127 | entity.entity_metadata.document_type, entity.entity_metadata.couchdb_id)) | |
128 | else: | |
129 | host_entities[doc.get('key')] = entity | |
130 | db_conn.session.add(entity) | |
131 | ||
132 | logger.info(u'{} importation done!'.format(db_conn.db_name)) | |
133 | flush_changes() | |
134 | ||
135 | def _show_progress(msg, percentage): | |
136 | sys.stdout.write('{}: {}%\r'.format(msg, percentage)) | |
137 | sys.stdout.flush() | |
138 |
3 | 3 | |
4 | 4 | import json |
5 | 5 | |
6 | from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, Float, Text | |
6 | from sqlalchemy import Column, Integer, String, Boolean, ForeignKey, Float, Text, UniqueConstraint | |
7 | 7 | from sqlalchemy.orm import relationship |
8 | 8 | from sqlalchemy.ext.declarative import declarative_base |
9 | 9 | |
10 | 10 | |
11 | SCHEMA_VERSION = 'W.0.3' | |
11 | SCHEMA_VERSION = 'W.2.1.0' | |
12 | 12 | |
13 | 13 | Base = declarative_base() |
14 | 14 | |
67 | 67 | class EntityMetadata(Base): |
68 | 68 | # Table schema |
69 | 69 | __tablename__ = 'metadata' |
70 | __table_args__ = ( | |
71 | UniqueConstraint('couchdb_id'), | |
72 | ) | |
73 | ||
70 | 74 | id = Column(Integer, primary_key=True) |
71 | 75 | update_time = Column(Float, nullable=True) |
72 | 76 | update_user = Column(String(250), nullable=True) |
96 | 100 | self.revision=document.get('_rev') |
97 | 101 | self.document_type=document.get('type') |
98 | 102 | |
103 | if self.create_time is not None: | |
104 | self.create_time = self.__truncate_to_epoch_in_seconds(self.create_time) | |
105 | ||
106 | def __truncate_to_epoch_in_seconds(self, timestamp): | |
107 | """ In a not so elegant fashion, identifies and truncate | |
108 | epoch timestamps expressed in milliseconds to seconds""" | |
109 | limit = 32503680000 # 01 Jan 3000 00:00:00 GMT | |
110 | if timestamp > limit: | |
111 | return timestamp / 1000 | |
112 | else: | |
113 | return timestamp | |
114 | ||
99 | 115 | |
100 | 116 | class Host(FaradayEntity, Base): |
101 | 117 | DOC_TYPE = 'Host' |
118 | 134 | interfaces = relationship('Interface') |
119 | 135 | services = relationship('Service') |
120 | 136 | vulnerabilities = relationship('Vulnerability') |
137 | credentials = relationship('Credential') | |
121 | 138 | |
122 | 139 | def update_from_document(self, document): |
123 | 140 | default_gateway = self.__get_default_gateway(document) |
188 | 205 | self.ipv6_gateway=document.get('ipv6').get('gateway') |
189 | 206 | self.ipv6_dns=u','.join(document.get('ipv6').get('DNS')) |
190 | 207 | self.ipv6_prefix=str(document.get('ipv6').get('prefix')) |
191 | self.ports_filtered=document.get('ports').get('filtered') | |
192 | self.ports_opened=document.get('ports').get('opened') | |
193 | self.ports_closed=document.get('ports').get('closed') | |
208 | self.ports_filtered=document.get('ports',{}).get('filtered') | |
209 | self.ports_opened=document.get('ports',{}).get('opened') | |
210 | self.ports_closed=document.get('ports',{}).get('closed') | |
194 | 211 | |
195 | 212 | def add_relationships_from_dict(self, entities): |
196 | 213 | host_id = '.'.join(self.entity_metadata.couchdb_id.split('.')[:-1]) |
228 | 245 | interface = relationship('Interface', back_populates='services') |
229 | 246 | |
230 | 247 | vulnerabilities = relationship('Vulnerability') |
248 | credentials = relationship('Credential') | |
231 | 249 | |
232 | 250 | def update_from_document(self, document): |
233 | 251 | self.name=document.get('name') |
372 | 390 | name = Column(String(250), nullable=False) |
373 | 391 | text = Column(Text(), nullable=True) |
374 | 392 | description = Column(Text(), nullable=True) |
393 | owned = Column(Boolean) | |
375 | 394 | |
376 | 395 | entity_metadata = relationship(EntityMetadata, uselist=False, cascade="all, delete-orphan", single_parent=True) |
377 | 396 | entity_metadata_id = Column(Integer, ForeignKey(EntityMetadata.id), index=True) |
380 | 399 | self.name=document.get('name') |
381 | 400 | self.text=document.get('text', None) |
382 | 401 | self.description=document.get('description', None) |
383 | ||
402 | self.owned=document.get('owned', False) | |
403 | ||
404 | class Credential(FaradayEntity, Base): | |
405 | DOC_TYPE = 'Cred' | |
406 | ||
407 | # Table schema | |
408 | __tablename__ = 'credential' | |
409 | id = Column(Integer, primary_key=True) | |
410 | username = Column(String(250), nullable=False) | |
411 | password = Column(Text(), nullable=False) | |
412 | owned = Column(Boolean) | |
413 | description = Column(Text(), nullable=True) | |
414 | name = Column(String(250), nullable=True) | |
415 | ||
416 | entity_metadata = relationship(EntityMetadata, uselist=False, cascade="all, delete-orphan", single_parent=True) | |
417 | entity_metadata_id = Column(Integer, ForeignKey(EntityMetadata.id), index=True) | |
418 | ||
419 | host_id = Column(Integer, ForeignKey(Host.id), index=True) | |
420 | host = relationship('Host', back_populates='credentials') | |
421 | ||
422 | service_id = Column(Integer, ForeignKey(Service.id), index=True) | |
423 | service = relationship('Service', back_populates='credentials') | |
424 | ||
425 | def update_from_document(self, document): | |
426 | self.username=document.get('username') | |
427 | self.password=document.get('password', '') | |
428 | self.owned=document.get('owned', False) | |
429 | self.description=document.get('description', '') | |
430 | self.name=document.get('name','') | |
431 | ||
432 | class Command(FaradayEntity, Base): | |
433 | DOC_TYPE = 'CommandRunInformation' | |
434 | ||
435 | # Table schema | |
436 | __tablename__ = 'command' | |
437 | id = Column(Integer, primary_key=True) | |
438 | command = Column(String(250), nullable=True) | |
439 | duration = Column(Float, nullable=True) | |
440 | itime = Column(Float, nullable=True) | |
441 | ip = Column(String(250), nullable=True) | |
442 | hostname = Column(String(250), nullable=True) | |
443 | params = Column(String(250), nullable=True) | |
444 | user = Column(String(250), nullable=True) | |
445 | workspace = Column(String(250), nullable=True) | |
446 | ||
447 | ||
448 | entity_metadata = relationship(EntityMetadata, uselist=False, cascade="all, delete-orphan", single_parent=True) | |
449 | entity_metadata_id = Column(Integer, ForeignKey(EntityMetadata.id), index=True) | |
450 | ||
451 | def update_from_document(self, document): | |
452 | self.command = document.get('command', None) | |
453 | self.duration = document.get('duration', None) | |
454 | self.itime = document.get('itime', None) | |
455 | self.ip = document.get('ip', None) | |
456 | self.hostname = document.get('hostname', None) | |
457 | self.params = document.get('params', None) | |
458 | self.user = document.get('user', None) | |
459 | self.workspace = document.get('workspace', None) | |
460 |
1 | 1 | # Copyright (C) 2016 Infobyte LLC (http://www.infobytesec.com/) |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | import server.database | |
5 | 4 | import server.utils.logger |
6 | 5 | |
7 | 6 | from sqlalchemy import distinct, Boolean |
3 | 3 | |
4 | 4 | import os |
5 | 5 | import logging |
6 | import logging.handlers | |
6 | 7 | import server.config |
7 | 8 | import errno |
8 | 9 | |
72 | 73 | os.makedirs(os.path.dirname(LOG_FILE)) |
73 | 74 | except OSError as e: |
74 | 75 | if e.errno != errno.EEXIST: |
75 | raise⏎ | |
76 | raise | |
77 | ||
78 | setup_logging() | |
79 | ||
80 |
2 | 2 | # See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | 4 | import gzip |
5 | import functools | |
5 | import functools | |
6 | 6 | import server.database |
7 | 7 | import server.couchdb |
8 | 8 | |
15 | 15 | param = request.args.get(query_parameter) |
16 | 16 | try: |
17 | 17 | return int(param) if param is not None else default |
18 | except ValueError: | |
19 | abort(400) | |
20 | ||
21 | def get_mandatory_integer_parameter(query_parameter): | |
22 | """Obtains an integer parameter and ensures its type, if it can't | |
23 | will raise an 400 response""" | |
24 | param = request.args[query_parameter] | |
25 | try: | |
26 | return int(param) | |
18 | 27 | except ValueError: |
19 | 28 | abort(400) |
20 | 29 | |
43 | 52 | 'Content-Encoding' in response.headers): |
44 | 53 | return response |
45 | 54 | gzip_buffer = IO() |
46 | gzip_file = gzip.GzipFile(mode='wb', | |
55 | gzip_file = gzip.GzipFile(mode='wb', | |
47 | 56 | fileobj=gzip_buffer) |
48 | 57 | gzip_file.write(response.data) |
49 | 58 | gzip_file.close() |
59 | 68 | |
60 | 69 | return view_func |
61 | 70 | |
71 | def get_basic_auth(): | |
72 | if request.authorization: | |
73 | user, passwd = request.authorization.get('username'), request.authorization.get('password') | |
74 | if (all((user, passwd))): | |
75 | return (user, passwd) | |
76 | return None | |
77 | ||
62 | 78 | def validate_workspace(workspace_name, timeout_sync=0.1): |
63 | 79 | if not server.database.is_valid_workspace(workspace_name): |
64 | 80 | abort(404) |
65 | 81 | |
66 | if not server.couchdb.has_permissions_for(workspace_name, request.cookies): | |
82 | if not server.couchdb.has_permissions_for(workspace_name, request.cookies, get_basic_auth()): | |
67 | 83 | abort(401) |
68 | 84 | |
69 | 85 | wait_for_ws_sync_with_couchdb(workspace_name, timeout_sync) |
19 | 19 | def connectionLost(self, reason): |
20 | 20 | if not reason.check(error.ConnectionClosed): |
21 | 21 | logger.get_logger(__name__).error("Connection error: {}".format(reason.value)) |
22 | return proxy.ProxyClient.connectionLost(self, reason) | |
22 | ||
23 | try: | |
24 | proxy.ProxyClient.connectionLost(self, reason) | |
25 | ||
26 | except RuntimeError, e: | |
27 | # Dirty way to ignore this expected exception from twisted. It happens | |
28 | # when one endpoint of the connection is still transmitting data while | |
29 | # the other one is disconnected. | |
30 | ignore_error_msg = 'Request.finish called on a request after its connection was lost' | |
31 | if ignore_error_msg not in e.message: | |
32 | raise e | |
23 | 33 | |
24 | 34 | |
25 | 35 | class HTTPProxyClientFactory(proxy.ProxyClientFactory): |
107 | 107 | aside nav.index{padding-top:55px;} |
108 | 108 | aside nav a { |
109 | 109 | display: block; |
110 | padding: 12px 0; | |
110 | padding: 8px 0; | |
111 | 111 | color: #fff; |
112 | 112 | text-align: center; |
113 | 113 | transition: all .3s ease; |
55 | 55 | controller: 'workspacesCtrl', |
56 | 56 | title: 'Dashboard | ' |
57 | 57 | }). |
58 | when('/help', { | |
59 | templateUrl: 'scripts/help/partials/help.html', | |
60 | title: 'Help | ' | |
61 | }). | |
58 | 62 | when('/hosts/ws/:wsId/search/:search', { |
59 | 63 | templateUrl: 'scripts/hosts/partials/list.html', |
60 | 64 | controller: 'hostsCtrl', |
269 | 269 | }); |
270 | 270 | }; |
271 | 271 | |
272 | commonsFact.parseSearchURL = function(searchParams) { | |
273 | var i = -1, searchFilter = {}, searchTerms = searchParams.split("&"); | |
274 | ||
275 | searchTerms.forEach(function(term) { | |
276 | i = term.indexOf("="); | |
277 | if(i > 0) { | |
278 | var filterField = decodeURIComponent(term.slice(0, i)); | |
279 | var filterValue = decodeURIComponent(term.slice(i+1)); | |
280 | searchFilter[filterField] = filterValue; | |
281 | } | |
282 | }); | |
283 | ||
284 | return trimSearchFilter(searchFilter); | |
285 | }; | |
286 | ||
287 | commonsFact.parseSearchExpression = function(searchExpression) { | |
288 | var i = -1; | |
289 | var searchFilter = {}; | |
290 | var lastFilterField = "search"; | |
291 | var expressionTerms = searchExpression.split(" "); | |
292 | ||
293 | expressionTerms.forEach(function(term) { | |
294 | i = term.indexOf(":"); | |
295 | if (i > 0) { | |
296 | var filterField = term.slice(0, i); | |
297 | var filterValueChunk = term.slice(i+1); | |
298 | searchFilter[filterField] = filterValueChunk; | |
299 | lastFilterField = filterField; | |
300 | } else { | |
301 | if (!searchFilter.hasOwnProperty(lastFilterField)) { | |
302 | searchFilter[lastFilterField] = term; | |
303 | } else { | |
304 | searchFilter[lastFilterField] += ' ' + term; | |
305 | } | |
306 | } | |
307 | }); | |
308 | ||
309 | return trimSearchFilter(searchFilter); | |
310 | }; | |
311 | ||
312 | var trimSearchFilter = function(searchFilter) { | |
313 | for (var filter in searchFilter) { | |
314 | if (searchFilter.hasOwnProperty(filter)) { | |
315 | searchFilter[filter] = searchFilter[filter].trim(); | |
316 | } | |
317 | } | |
318 | return searchFilter; | |
319 | }; | |
320 | ||
321 | commonsFact.searchFilterToExpression = function(searchFilter) { | |
322 | var searchExpression = ""; | |
323 | ||
324 | if (searchFilter.hasOwnProperty("search")) { | |
325 | searchExpression += searchFilter.search; | |
326 | } | |
327 | ||
328 | for (var filter in searchFilter) { | |
329 | if (searchFilter.hasOwnProperty(filter)) { | |
330 | if (filter !== "search" && filter !== "confirmed") { | |
331 | if (searchExpression != "") { | |
332 | searchExpression += " "; | |
333 | } | |
334 | searchExpression += filter + ":" + searchFilter[filter]; | |
335 | } | |
336 | } | |
337 | } | |
338 | ||
339 | return searchExpression.trim(); | |
340 | }; | |
341 | ||
342 | commonsFact.searchFilterToURLParams = function(searchFilter) { | |
343 | var searchURLParams = ""; | |
344 | for (var filter in searchFilter) { | |
345 | if (searchFilter.hasOwnProperty(filter)) { | |
346 | if (searchFilter[filter] != "") { | |
347 | var paramName = encodeURIComponent(filter); | |
348 | var paramValue = encodeURIComponent(searchFilter[filter]); | |
349 | searchURLParams += "&" + paramName + "=" + paramValue; | |
350 | } | |
351 | } | |
352 | } | |
353 | return searchURLParams.slice(1); | |
354 | }; | |
355 | ||
272 | 356 | return commonsFact; |
273 | 357 | }]); |
31 | 31 | }, true); |
32 | 32 | |
33 | 33 | scope.render = function(data) { |
34 | //remove existing treemap container, if any | |
35 | d3.select(ele[0]).select("#treemap_container").remove(); | |
34 | 36 | |
35 | 37 | if (!data || data.length == 0) return; |
36 | 38 | |
37 | var width = data.width || 160, | |
39 | var width = data.width || 160, | |
38 | 40 | height = data.height || 133; |
39 | 41 | |
40 | 42 | var div = d3.select(ele[0]) |
37 | 37 | $scope.cmdSortReverse = !$scope.cmdSortReverse; |
38 | 38 | } |
39 | 39 | |
40 | dashboardSrv.registerCallback(init); | |
41 | ||
40 | 42 | init(); |
41 | 43 | }]);⏎ |
3 | 3 | |
4 | 4 | angular.module('faradayApp') |
5 | 5 | .controller('compoundCtrl', |
6 | ['$scope', '$location', '$route', '$routeParams', '$uibModal', 'hostsManager', 'workspacesFact', | |
7 | function($scope, $location, $route, $routeParams, $uibModal, hostsManager, workspacesFact) { | |
6 | ['$scope', '$location', '$route', '$routeParams', '$uibModal', 'hostsManager', 'workspacesFact', 'dashboardSrv', | |
7 | function($scope, $location, $route, $routeParams, $uibModal, hostsManager, workspacesFact, dashboardSrv) { | |
8 | 8 | |
9 | 9 | init = function() { |
10 | 10 | // hosts list |
137 | 137 | } |
138 | 138 | }; |
139 | 139 | |
140 | dashboardSrv.registerCallback(loadHosts); | |
141 | ||
140 | 142 | init(); |
141 | 143 | }]);⏎ |
15 | 15 | workspacesFact.list().then(function(wss) { |
16 | 16 | $scope.workspaces = wss; |
17 | 17 | }); |
18 | dashboardSrv.setConfirmedFromCookie(); | |
19 | dashboardSrv.startTimer(); | |
18 | 20 | }; |
19 | 21 | |
20 | 22 | $scope.navigate = function(route) { |
25 | 27 | dashboardSrv.setConfirmed(); |
26 | 28 | }; |
27 | 29 | |
30 | $scope.$on('$destroy', function(){ | |
31 | dashboardSrv.stopTimer(); | |
32 | }) | |
33 | ||
34 | $scope.reload = function() { | |
35 | dashboardSrv.updateData(); | |
36 | } | |
37 | ||
28 | 38 | init(); |
29 | 39 | }]); |
61 | 61 | return total; |
62 | 62 | }; |
63 | 63 | |
64 | dashboardSrv.registerCallback($scope.loadData); | |
65 | ||
64 | 66 | init(); |
65 | 67 | }]);⏎ |
14 | 14 | if($routeParams.wsId != undefined) { |
15 | 15 | $scope.workspace = $routeParams.wsId; |
16 | 16 | |
17 | $scope.loadData(); | |
17 | $scope.loadData(true); | |
18 | 18 | |
19 | 19 | $scope.$watch(function() { |
20 | 20 | return dashboardSrv.props.confirmed; |
21 | 21 | }, function(newValue, oldValue) { |
22 | 22 | if (oldValue != newValue) |
23 | $scope.loadData(); | |
23 | $scope.loadData(true); | |
24 | 24 | }, true); |
25 | 25 | } |
26 | 26 | }; |
27 | 27 | |
28 | $scope.loadData = function() { | |
28 | $scope.loadData = function(animated) { | |
29 | 29 | dashboardSrv.getVulnerabilitiesCount($scope.workspace) |
30 | 30 | .then(function(vulns) { |
31 | $scope.data = {key: [], value: [], colors: [], options: {maintainAspectRatio: false}}; | |
31 | $scope.data = {key: [], value: [], colors: [], options: {maintainAspectRatio: false, animateRotate: animated}}; | |
32 | 32 | $scope.loaded = true; |
33 | 33 | SEVERITIES.forEach(function(severity, index) { |
34 | 34 | if(severity != "unclassified" && vulns[severity] != undefined) { |
37 | 37 | $scope.data.colors.push(dashboardSrv.vulnColors[index]); |
38 | 38 | } |
39 | 39 | }); |
40 | // angular.copy(tmp, $scope.data); | |
41 | // | |
40 | 42 | }); |
41 | 43 | }; |
42 | 44 | |
45 | dashboardSrv.registerCallback(function (){ | |
46 | $scope.loadData(false); | |
47 | }); | |
48 | ||
43 | 49 | init(); |
44 | 50 | }]);⏎ |
3 | 3 | |
4 | 4 | angular.module('faradayApp') |
5 | 5 | .controller('workspaceProgressCtrl', |
6 | ['$scope', '$routeParams', 'workspacesFact', | |
7 | function($scope, $routeParams, workspacesFact) { | |
6 | ['$scope', '$routeParams', 'workspacesFact', 'dashboardSrv', | |
7 | function($scope, $routeParams, workspacesFact, dashboardSrv) { | |
8 | 8 | |
9 | 9 | $scope.workspace; |
10 | 10 | $scope.wsDuration; |
49 | 49 | return progress; |
50 | 50 | }; |
51 | 51 | |
52 | dashboardSrv.registerCallback(init); | |
53 | ||
52 | 54 | init(); |
53 | 55 | }]);⏎ |
9 | 9 | Dashboard for {{ workspace }} ({{props["confirmed"] === false ? 'all vulns' : 'confirmed' }}) |
10 | 10 | </span><!-- WS name --> |
11 | 11 | <div id="ws-control" class="btn-group"> |
12 | <button id="refresh" type="button" class="btn btn-danger" title="Refresh current workspace" ng-click="location.reload()"> | |
12 | <button id="refresh" type="button" class="btn btn-danger" title="Refresh current workspace" ng-click="reload()"> | |
13 | 13 | <span class="glyphicon glyphicon-refresh"></span> |
14 | 14 | </button> |
15 | 15 | <button type="button" class="btn btn-danger dropdown-toggle" data-toggle="dropdown" title="Change current workspace"> |
24 | 24 | <tr ng-repeat="vuln in vulns"> |
25 | 25 | <td><span am-time-ago="vuln.metadata.create_time * 1000"></span></td> |
26 | 26 | <td> |
27 | {{vuln.target}} | |
27 | <a href="" ng-click="navigate('/status/ws/'+workspace+'/search/target='+vuln.target)">{{vuln.target}}</a> | |
28 | 28 | </td> |
29 | <td><span class="label vuln fondo-{{vuln.severity}}">{{vuln.severity | uppercase}}</span></td> | |
30 | <td class="wrapword">{{vuln.name}}</td> | |
29 | <td><a href="" ng-click="navigate('/status/ws/'+workspace+'/search/severity='+vuln.severity)"><span class="label vuln fondo-{{vuln.severity}}">{{vuln.severity | uppercase}}</span></a></td> | |
30 | <td class="wrapword"><a href="" ng-click="navigate('/status/ws/'+workspace+'/search/name='+vuln.name)">{{vuln.name}}</a></td> | |
31 | 31 | <td> |
32 | 32 | <span class="glyphicon glyphicon-ok" ng-show="vuln.web"></span> |
33 | 33 | <span class="glyphicon glyphicon-remove" ng-show="!vuln.web"></span> |
35 | 35 | </tr> |
36 | 36 | </tbody> |
37 | 37 | </table> |
38 | </article>⏎ | |
38 | </article> |
2 | 2 | // See the file 'doc/LICENSE' for the license information |
3 | 3 | |
4 | 4 | angular.module('faradayApp') |
5 | .factory('dashboardSrv', ['BASEURL', 'SEVERITIES', '$cookies', '$q', '$http', 'hostsManager', function(BASEURL, SEVERITIES, $cookies, $q, $http, hostsManager) { | |
5 | .factory('dashboardSrv', ['BASEURL', 'SEVERITIES', '$cookies', '$q', '$http', '$interval', 'hostsManager', | |
6 | function(BASEURL, SEVERITIES, $cookies, $q, $http, $interval, hostsManager) { | |
6 | 7 | var dashboardSrv = {}; |
7 | 8 | |
8 | 9 | dashboardSrv._getView = function(url) { |
19 | 20 | }; |
20 | 21 | |
21 | 22 | dashboardSrv.props = {}; |
22 | dashboardSrv.props["confirmed"] = ($cookies.get('confirmed') == undefined) ? false : JSON.parse($cookies.get('confirmed')); | |
23 | dashboardSrv.setConfirmedFromCookie = function() { | |
24 | dashboardSrv.props["confirmed"] = ($cookies.get('confirmed') == undefined) ? false : JSON.parse($cookies.get('confirmed')); | |
25 | } | |
23 | 26 | |
24 | 27 | dashboardSrv.setConfirmed = function(val) { |
25 | 28 | if(val == undefined) { |
310 | 313 | }); |
311 | 314 | }; |
312 | 315 | |
316 | var timer = undefined; | |
317 | ||
318 | dashboardSrv.startTimer = function() { | |
319 | timer = $interval(function(){ | |
320 | dashboardSrv.updateData(); | |
321 | }, 60000) | |
322 | } | |
323 | dashboardSrv._callbacks = []; | |
324 | ||
325 | dashboardSrv.registerCallback = function(callback) { | |
326 | dashboardSrv._callbacks.push(callback); | |
327 | } | |
328 | ||
329 | dashboardSrv.stopTimer = function() { | |
330 | dashboardSrv._callbacks = []; | |
331 | if (angular.isDefined(timer)) { | |
332 | $interval.cancel(timer); | |
333 | timer = undefined; | |
334 | } | |
335 | } | |
336 | ||
337 | dashboardSrv.updateData = function() { | |
338 | for (var i = 0; i < dashboardSrv._callbacks.length; i++) { | |
339 | var callback = dashboardSrv._callbacks[i]; | |
340 | callback(); | |
341 | } | |
342 | } | |
343 | ||
313 | 344 | return dashboardSrv; |
314 | 345 | }]); |
13 | 13 | return function(scope, element, attr) { |
14 | 14 | |
15 | 15 | element.on('click', function(event) { |
16 | var a_href, content, extension, title, type, url, _ref; | |
17 | _ref = fn(scope), content = _ref.content, extension = _ref.extension, title = _ref.title, type = _ref.type; | |
16 | fn(scope).then(function (res){ | |
17 | var a_href, content, extension, title, type, url, _ref; | |
18 | _ref = res; | |
19 | content = _ref.content, extension = _ref.extension, title = _ref.title, type = _ref.type; | |
18 | 20 | |
19 | if (!(content != null) && !(extension != null) && !(title != null) && !(type != null)) { | |
20 | $log.warn("Invalid content, extension, title or type in file exporter : ", content, extension, title, type); | |
21 | return; | |
22 | } | |
23 | ||
24 | title = $blob.sanitizeFileName(title, extension); | |
25 | type = $blob.sanitizeFileType(type); | |
26 | url = $blob.fileToURL(content, type); | |
27 | ||
28 | element.append("<a download=\"" + title + "\" href=\"" + url + "\"></a>"); | |
29 | a_href = element.find('a')[0]; | |
30 | ||
31 | $click.on(a_href); | |
32 | $timeout(function() {$blob.revoke(url);}); | |
33 | ||
34 | element[0].removeChild(a_href); | |
21 | if (!(content != null) && !(extension != null) && !(title != null) && !(type != null)) { | |
22 | $log.warn("Invalid content, extension, title or type in file exporter : ", content, extension, title, type); | |
23 | return; | |
24 | } | |
25 | ||
26 | title = $blob.sanitizeFileName(title, extension); | |
27 | type = $blob.sanitizeFileType(type); | |
28 | url = $blob.fileToURL(content, type); | |
29 | ||
30 | element.append("<a download=\"" + title + "\" href=\"" + url + "\"></a>"); | |
31 | a_href = element.find('a')[0]; | |
32 | ||
33 | $click.on(a_href); | |
34 | $timeout(function() {$blob.revoke(url);}); | |
35 | ||
36 | element[0].removeChild(a_href); | |
37 | }); | |
35 | 38 | }); |
36 | 39 | }; |
37 | 40 | } |
0 | <!-- Faraday Penetration Test IDE --> | |
1 | <!-- Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) --> | |
2 | <!-- See the file 'doc/LICENSE' for the license information --> | |
3 | ||
4 | <section id="main" class="seccion clearfix"> | |
5 | ||
6 | <div class="right-main"><div id="reports-main" class="fila clearfix"> | |
7 | <h2 class="ws-label"> | |
8 | <span id="ws-name" title="Help">Relax! Help is coming your way!</span> | |
9 | </h2><!-- .ws-label --> | |
10 | <div class="reports col-md-10 col-sm-10 col-xs-12"> | |
11 | We strongly recommend that every Faraday user starts by reading our <a href="https://faradaysec.com/help/docs" target="_blank">official documentation</a>. | |
12 | <br/><br/> | |
13 | If after reading it you are still in the need of some assistance then we have some recommendations: | |
14 | <br/><br/> | |
15 | <div class="reports col-md-10 col-sm-10 col-xs-12"><ul> | |
16 | <li> | |
17 | * Maybe someone already had this question before, have you tried the <a href="https://faradaysec.com/help/faq" target="_blank">frequently asked questions</a> part of our docs? | |
18 | </li> | |
19 | <li> | |
20 | * Not a question but a problem, it could help to take a look at the <a href="https://faradaysec.com/help/troubleshooting" target="_blank">troubleshooting guide</a> | |
21 | </li> | |
22 | <li> | |
23 | * We also have a bunch of interesting <a href="https://faradaysec.com/help/demos" target="_blank">video demos</a> that may help | |
24 | </li> | |
25 | <li> | |
26 | * <a href="https://faradaysec.com/help/issues" target="_blank">Open a ticket</a> | |
27 | </li> | |
28 | </ul></div><!-- .reports .col-md-10 .col-sm-10 .col-xs-12 --> | |
29 | <br/><br/><br/><br/><br/> | |
30 | Still can't fix it? You can reach us and find other Faraday users on: | |
31 | <br/><br/> | |
32 | <div class="reports col-md-10 col-sm-10 col-xs-12"><ul> | |
33 | <li> | |
34 | * User forum - <a href="https://forum.faradaysec.com" target="_blank">forum.faradaysec.com</a> | |
35 | </li> | |
36 | <li> | |
37 | * IRC chat - <a href="https://faradaysec.com/help/irc" target="_blank">#faraday-dev in freenode</a> | |
38 | </li> | |
39 | <li> | |
40 | * Mailing list - <a href="https://faradaysec.com/help/googlegroup" target="_blank">groups.google.com/d/forum/faradaysec</a> | |
41 | </li> | |
42 | <li> | |
43 | * Twitter - <a href="https://faradaysec.com/help/twitter" target="_blank">twitter.com/faradaysec</a> | |
44 | </li> | |
45 | </ul></div><!-- .reports .col-md-10 .col-sm-10 .col-xs-12 --> | |
46 | <br/><br/><br/><br/><br/><br/> | |
47 | More info about Faraday at <a href="http://faradaysec.com" target="_blank">faradaysec.com</a> | |
48 | </div><!-- .reports --> | |
49 | </div><!-- #reports-main --></div><!-- .right-main --> | |
50 | </section><!-- #main --> |
4 | 4 | angular.module('faradayApp') |
5 | 5 | .controller('hostCtrl', |
6 | 6 | ['$scope', '$cookies', '$filter', '$location', '$route', '$routeParams', '$uibModal', '$q', |
7 | 'hostsManager', 'workspacesFact', 'dashboardSrv', 'servicesManager', | |
7 | 'hostsManager', 'workspacesFact', 'dashboardSrv', 'servicesManager', | |
8 | 8 | function($scope, $cookies, $filter, $location, $route, $routeParams, $uibModal, $q, |
9 | 9 | hostsManager, workspacesFact, dashboardSrv, servicesManager) { |
10 | 10 | |
16 | 16 | var hostId = $routeParams.hidId; |
17 | 17 | |
18 | 18 | $scope.services = []; |
19 | $scope.sortField = "name"; | |
19 | $scope.sortField = "ports"; | |
20 | 20 | $scope.reverse = false; |
21 | 21 | |
22 | 22 | $scope.loadedServices = false; |
53 | 53 | |
54 | 54 | $scope.loadedServices = true; |
55 | 55 | |
56 | return hostsManager.getAllVulnsCount($scope.workspace); | |
56 | return servicesManager.getServiceVulnCount($scope.workspace, $scope.services) | |
57 | 57 | }) |
58 | 58 | .then(function(vulns) { |
59 | var vulnsCount = {}; | |
60 | vulns.forEach(function(vuln) { | |
61 | vulnsCount[vuln.key] = vuln.value; | |
62 | }); | |
63 | 59 | $scope.services.forEach(function(service) { |
64 | service.vulns = vulnsCount[service._id] || 0; | |
60 | service.vulns = vulns[service._id] || 0; | |
65 | 61 | }); |
66 | 62 | }) |
67 | 63 | .catch(function(e) { |
69 | 65 | }); |
70 | 66 | |
71 | 67 | $scope.pageSize = 10; |
72 | $scope.currentPage = 0; | |
73 | $scope.newCurrentPage = 0; | |
68 | $scope.currentPage = 1; | |
69 | $scope.newCurrentPage = 1; | |
74 | 70 | |
75 | 71 | if(!isNaN(parseInt($cookies.pageSize))) $scope.pageSize = parseInt($cookies.pageSize); |
76 | 72 | $scope.newPageSize = $scope.pageSize; |
111 | 107 | }; |
112 | 108 | |
113 | 109 | $scope.go = function() { |
110 | if ($scope.newPageSize === undefined) | |
111 | $scope.newPageSize = 1; | |
114 | 112 | $scope.pageSize = $scope.newPageSize; |
115 | 113 | $cookies.pageSize = $scope.pageSize; |
116 | $scope.currentPage = 0; | |
117 | if($scope.newCurrentPage <= parseInt($scope.services.length/$scope.pageSize) | |
118 | && $scope.newCurrentPage > -1 && !isNaN(parseInt($scope.newCurrentPage))) { | |
114 | $scope.currentPage = 1; | |
115 | if ($scope.newCurrentPage <= $scope.pageCount() && $scope.newCurrentPage > 0 && | |
116 | !isNaN(parseInt($scope.newCurrentPage))) { | |
119 | 117 | $scope.currentPage = $scope.newCurrentPage; |
120 | 118 | } |
121 | 119 | }; |
353 | 351 | } |
354 | 352 | |
355 | 353 | filter = function(data) { |
354 | // this is going to be replaced by a server query | |
356 | 355 | var tmp_data = $filter('orderBy')(data, $scope.sortField, $scope.reverse); |
357 | 356 | tmp_data = $filter('filter')(tmp_data, $scope.expression); |
358 | tmp_data = tmp_data.splice($scope.pageSize * $scope.currentPage, $scope.pageSize); | |
359 | ||
357 | tmp_data = tmp_data.splice($scope.pageSize * ($scope.currentPage - 1), $scope.pageSize); | |
360 | 358 | return tmp_data; |
359 | }; | |
360 | ||
361 | // paging | |
362 | ||
363 | $scope.prevPage = function() { | |
364 | $scope.currentPage -= 1; | |
365 | }; | |
366 | ||
367 | $scope.prevPageDisabled = function() { | |
368 | return $scope.currentPage <= 1; | |
369 | }; | |
370 | ||
371 | $scope.nextPage = function() { | |
372 | $scope.currentPage += 1; | |
373 | }; | |
374 | ||
375 | $scope.nextPageDisabled = function() { | |
376 | return $scope.currentPage >= $scope.pageCount(); | |
377 | }; | |
378 | ||
379 | $scope.pageCount = function() { | |
380 | var tmp_services = $filter('orderBy')($scope.services, $scope.sortField, $scope.reverse); | |
381 | tmp_services = $filter('filter')(tmp_services, $scope.expression); | |
382 | return Math.ceil(tmp_services.length / $scope.pageSize); | |
361 | 383 | }; |
362 | 384 | |
363 | 385 | init(); |
3 | 3 | |
4 | 4 | angular.module('faradayApp') |
5 | 5 | .controller('hostsCtrl', |
6 | ['$scope', '$cookies', '$filter', '$location', '$route', '$routeParams', '$uibModal', 'hostsManager', 'workspacesFact', | |
7 | function($scope, $cookies, $filter, $location, $route, $routeParams, $uibModal, hostsManager, workspacesFact) { | |
6 | ['$scope', '$cookies', '$filter', '$location', '$route', '$routeParams', '$uibModal', 'hostsManager', 'workspacesFact', 'commonsFact', | |
7 | function($scope, $cookies, $filter, $location, $route, $routeParams, $uibModal, hostsManager, workspacesFact, commonsFact) { | |
8 | 8 | |
9 | 9 | init = function() { |
10 | 10 | $scope.selectall_hosts = false; |
31 | 31 | if(!isNaN(parseInt($cookies.pageSize))) $scope.pageSize = parseInt($cookies.pageSize); |
32 | 32 | $scope.newPageSize = $scope.pageSize; |
33 | 33 | |
34 | decodeSearchFromURL(); | |
35 | ||
36 | loadHosts(); | |
37 | }; | |
38 | ||
39 | var decodeSearchFromURL = function() { | |
34 | parseSearchQuery(); | |
35 | ||
36 | loadHosts(); | |
37 | }; | |
38 | ||
39 | var parseSearchQuery = function() { | |
40 | 40 | $scope.search = $routeParams.search; |
41 | 41 | $scope.searchParams = ""; |
42 | 42 | $scope.expression = {}; |
43 | 43 | |
44 | 44 | if($scope.search != "" && $scope.search != undefined && $scope.search.indexOf("=") > -1) { |
45 | // search expression for filter | |
46 | $scope.expression = $scope.decodeSearch($scope.search); | |
47 | // search params for search field, which shouldn't be used for filtering | |
48 | $scope.searchParams = $scope.stringSearch($scope.expression); | |
49 | // TODO: This sucks man | |
50 | $scope.expression = prepareFilter($scope.searchParams); | |
45 | $scope.expression = commonsFact.parseSearchURL($scope.search); | |
46 | $scope.searchParams = commonsFact.searchFilterToExpression($scope.expression); | |
51 | 47 | } |
52 | 48 | }; |
53 | 49 | |
86 | 82 | }); |
87 | 83 | }; |
88 | 84 | |
89 | var prepareFilter = function(searchText) { | |
90 | var params = searchText.split(" "); | |
91 | var chunks = {}; | |
92 | var i = -1; | |
93 | ||
94 | params.forEach(function(chunk) { | |
95 | i = chunk.indexOf(":"); | |
96 | if (i > 0) { | |
97 | chunks[chunk.slice(0, i)] = chunk.slice(i+1); | |
98 | } else { | |
99 | if (!chunks.hasOwnProperty("search")) { | |
100 | chunks.search = chunk; | |
101 | } else { | |
102 | chunks.search += ' ' + chunk; | |
103 | } | |
104 | } | |
105 | }); | |
106 | ||
107 | return chunks; | |
108 | }; | |
109 | ||
110 | 85 | // changes the URL according to search params |
111 | 86 | $scope.searchFor = function(search, params) { |
112 | 87 | if (search && params != "" && params != undefined) { |
113 | $scope.expression = prepareFilter(params); | |
88 | $scope.expression = commonsFact.parseSearchExpression(params); | |
114 | 89 | } else { |
115 | 90 | $scope.expression = {}; |
116 | 91 | } |
119 | 94 | }; |
120 | 95 | |
121 | 96 | $scope.go = function() { |
97 | if ($scope.newPageSize === undefined) | |
98 | $scope.newPageSize = 1; | |
122 | 99 | $scope.pageSize = $scope.newPageSize; |
123 | 100 | $cookies.pageSize = $scope.pageSize; |
124 | 101 | $scope.currentPage = 1; |
127 | 104 | $scope.currentPage = $scope.newCurrentPage; |
128 | 105 | } |
129 | 106 | loadHosts(); |
130 | }; | |
131 | ||
132 | // encodes search string in order to send it through URL | |
133 | $scope.encodeSearch = function(search) { | |
134 | var i = -1, | |
135 | encode = "", | |
136 | params = search.split(" "), | |
137 | chunks = {}; | |
138 | ||
139 | params.forEach(function(chunk) { | |
140 | i = chunk.indexOf(":"); | |
141 | if(i > 0) { | |
142 | chunks[chunk.slice(0, i)] = chunk.slice(i+1); | |
143 | } else { | |
144 | if(!chunks.hasOwnProperty("free")) { | |
145 | chunks.free = ""; | |
146 | } | |
147 | chunks.free += " ".concat(chunk); | |
148 | } | |
149 | }); | |
150 | ||
151 | if(chunks.hasOwnProperty("free")) { | |
152 | chunks.free = chunks.free.slice(1); | |
153 | } | |
154 | ||
155 | for(var prop in chunks) { | |
156 | if(chunks.hasOwnProperty(prop)) { | |
157 | if(chunks.prop != "") { | |
158 | encode += "&" + encodeURIComponent(prop) + "=" + encodeURIComponent(chunks[prop]); | |
159 | } | |
160 | } | |
161 | } | |
162 | return encode.slice(1); | |
163 | }; | |
164 | ||
165 | // decodes search parameters to object in order to use in filter | |
166 | $scope.decodeSearch = function(search) { | |
167 | var i = -1, | |
168 | decode = {}, | |
169 | params = search.split("&"); | |
170 | ||
171 | params.forEach(function(param) { | |
172 | i = param.indexOf("="); | |
173 | decode[decodeURIComponent(param.slice(0,i))] = decodeURIComponent(param.slice(i+1)); | |
174 | }); | |
175 | ||
176 | if(decode.hasOwnProperty("free")) { | |
177 | decode['$'] = decode.free; | |
178 | delete decode.free; | |
179 | } | |
180 | ||
181 | return decode; | |
182 | }; | |
183 | ||
184 | // converts current search object to string to be displayed in search field | |
185 | $scope.stringSearch = function(obj) { | |
186 | var search = ""; | |
187 | ||
188 | for(var prop in obj) { | |
189 | if(obj.hasOwnProperty(prop)) { | |
190 | if(search != "") { | |
191 | search += " "; | |
192 | } | |
193 | if(prop == "$") { | |
194 | search += obj[prop]; | |
195 | } else { | |
196 | search += prop + ":" + obj[prop]; | |
197 | } | |
198 | } | |
199 | } | |
200 | ||
201 | return search; | |
202 | 107 | }; |
203 | 108 | |
204 | 109 | $scope.remove = function(ids) { |
382 | 287 | |
383 | 288 | $scope.selectedHosts = function() { |
384 | 289 | selected = []; |
385 | ||
386 | tmp_hosts = filter($scope.hosts); | |
387 | tmp_hosts.forEach(function(host) { | |
290 | $scope.hosts.forEach(function(host) { | |
388 | 291 | if(host.selected === true) { |
389 | 292 | selected.push(host); |
390 | 293 | } |
394 | 297 | |
395 | 298 | $scope.checkAll = function() { |
396 | 299 | $scope.selectall_hosts = !$scope.selectall_hosts; |
397 | ||
398 | tmp_hosts = filter($scope.hosts); | |
399 | tmp_hosts.forEach(function(host) { | |
300 | $scope.hosts.forEach(function(host) { | |
400 | 301 | host.selected = $scope.selectall_hosts; |
401 | 302 | }); |
402 | 303 | }; |
421 | 322 | } |
422 | 323 | } |
423 | 324 | |
424 | filter = function(data) { | |
425 | var tmp_data = $filter('orderBy')(data, $scope.sortField, $scope.reverse); | |
426 | tmp_data = $filter('filter')(tmp_data, $scope.expression); | |
427 | tmp_data = tmp_data.splice($scope.pageSize * ($scope.currentPage - 1), $scope.pageSize); | |
428 | ||
429 | return tmp_data; | |
430 | }; | |
431 | ||
432 | 325 | // paging |
433 | 326 | $scope.prevPage = function() { |
434 | 327 | $scope.currentPage -= 1; |
107 | 107 | <input type="number" min="1" max="{{pageCount()}}" class="form-control" ng-model="newCurrentPage" placeholder="Go to page"/> |
108 | 108 | </div> |
109 | 109 | <button class="btn btn-default" ng-click="go()">GO</button> |
110 | <input type="number" min="0" class="form-control vuln_per_page" ng-model=newPageSize placeholder="Number page" /> | |
110 | <input type="number" min="1" class="form-control vuln_per_page" ng-model=newPageSize placeholder="Number page" /> | |
111 | 111 | </form> |
112 | 112 | </div> |
113 | 113 | </div><!-- .showPagination --> |
49 | 49 | save: function(ws, interfaceData) { |
50 | 50 | var self = this; |
51 | 51 | bulk = {docs:[self,interfaceData]}; |
52 | return $http.post(BASEURL + ws + "/_bulk_docs", JSON.stringify(bulk)).success(function(data){ | |
53 | if(data.id == self._id){ | |
54 | self._rev = data.rev; | |
55 | } else { | |
56 | interfaceData._rev = data.rev; | |
57 | } | |
52 | return $http.put(BASEURL + ws + "/" + self._id, JSON.stringify(self)).success(function(host_data){ | |
53 | $http.put(BASEURL + ws + "/" + interfaceData._id, JSON.stringify(interfaceData)).success(function(interface_data) { | |
54 | self._rev = host_data.rev; | |
55 | interfaceData._rev = interface_data.rev; | |
56 | }); | |
58 | 57 | }); |
59 | 58 | } |
60 | 59 | } |
53 | 53 | <i class="fa fa-certificate host"></i> |
54 | 54 | </a> |
55 | 55 | </li> |
56 | <li> | |
57 | <a href="#/help" class="executive-report" style="color: #ffffff !important" uib-tooltip="Help" tooltip-placement="right"> | |
58 | <i class="fa fa-question host"></i> | |
59 | </a> | |
60 | </li> | |
56 | 61 | </ul> |
57 | 62 | </nav> |
58 | 63 | <div ng-show="isIceweasel" class="alert alert-danger alert-dismissible"> |
103 | 103 | </tr> |
104 | 104 | </thead> |
105 | 105 | <tbody> |
106 | <tr ng-repeat="service in filtered = (services | filter:expression) | orderBy:sortField:reverse | startFrom:currentPage*pageSize | limitTo:pageSize" | |
106 | <tr ng-repeat="service in filtered = (services | filter:expression) | orderBy:sortField:reverse | startFrom:(currentPage-1)*pageSize | limitTo:pageSize" | |
107 | 107 | selection-model selection-model-type="checkbox" |
108 | 108 | selection-model-mode="multiple-additive" |
109 | 109 | selection-model-selected-class="multi-selected"> |
130 | 130 | </td> |
131 | 131 | <td ng-bind="service.status || '-'"></td> |
132 | 132 | <td> |
133 | <a ng-href="#/status/ws/{{workspace}}/search/service={{service.ports}}"> | |
133 | <a ng-href="#/status/ws/{{workspace}}/search/service={{service.ports}}&target={{host.name}}"> | |
134 | 134 | <span ng-bind="service.vulns"></span> |
135 | 135 | </a> |
136 | 136 | </td> |
140 | 140 | <div class="showPagination"> |
141 | 141 | <div class="form-group"> |
142 | 142 | <ul class="pagination"> |
143 | <li><a ng-hide="currentPage <= 0" ng-click="currentPage = currentPage - 1"><span aria-hidden="true">«</span><span class="sr-only">Previous</span></a></li> | |
144 | <li><a>{{currentPage}}/{{ ((filtered.length / pageSize) | integer)}}</a></li> | |
145 | <li><a ng-hide="currentPage >= ((filtered.length / pageSize) | integer)" ng-click="currentPage = currentPage + 1"><span aria-hidden="true">»</span><span class="sr-only">Next</span></a></li> | |
143 | <li><a ng-hide="prevPageDisabled()" ng-click="prevPage()"><span aria-hidden="true">«</span><span class="sr-only">Previous</span></a></li> | |
144 | <li><a>{{currentPage}}/{{pageCount()}}</a></li> | |
145 | <li><a ng-hide="nextPageDisabled()" ng-click="nextPage()"><span aria-hidden="true">»</span><span class="sr-only">Next</span></a></li> | |
146 | 146 | </ul> |
147 | 147 | <form name="goToPage" id="goToPageStatus"> |
148 | 148 | <div class="col-md-2"> |
149 | <input type="number" min="0" max="{{ (filtered.length / pageSize) | integer }}" class="form-control" ng-model="newCurrentPage" placeholder="Go to page"/> | |
149 | <input type="number" min="0" max="{{pageCount()}}" class="form-control" ng-model="newCurrentPage" placeholder="Go to page"/> | |
150 | 150 | </div> |
151 | 151 | <button class="btn btn-default" ng-click="go()">GO</button> |
152 | <input type="number" min="0" class="form-control vuln_per_page" ng-model=newPageSize placeholder="Number page" /> | |
152 | <input type="number" min="1" class="form-control vuln_per_page" ng-model=newPageSize placeholder="Number page" /> | |
153 | 153 | </form> |
154 | 154 | </div> |
155 | 155 | </div><!-- .showPagination --> |
103 | 103 | return deferred.promise; |
104 | 104 | } |
105 | 105 | |
106 | servicesManager.getServiceVulnCount = function(ws, services) { | |
107 | var deferred = $q.defer(); | |
108 | var promises = []; | |
109 | services.forEach(function(service) { | |
110 | var url = BASEURL + "_api/ws/" + ws + "/services?couchid=" + service._id; | |
111 | promises.push($http.get(url)); | |
112 | }); | |
113 | $q.all(promises).then(function(services){ | |
114 | var result = {}; | |
115 | services.forEach(function(service) { | |
116 | var service_data = service.data.services[0]; | |
117 | result[service_data.id] = service_data.vulns; | |
118 | }); | |
119 | deferred.resolve(result); | |
120 | }, function(){ | |
121 | deferred.reject([]); | |
122 | }); | |
123 | return deferred.promise; | |
124 | } | |
125 | ||
106 | 126 | servicesManager.createService = function(serviceData, ws) { |
107 | 127 | var deferred = $q.defer(); |
108 | 128 | var self = this; |
5 | 5 | .controller('statusReportCtrl', |
6 | 6 | ['$scope', '$filter', '$routeParams', |
7 | 7 | '$location', '$uibModal', '$cookies', '$q', '$window', 'BASEURL', |
8 | 'SEVERITIES', 'EASEOFRESOLUTION', 'hostsManager', | |
8 | 'SEVERITIES', 'EASEOFRESOLUTION', 'hostsManager', 'commonsFact', | |
9 | 9 | 'vulnsManager', 'workspacesFact', 'csvService', 'uiGridConstants', |
10 | 10 | function($scope, $filter, $routeParams, |
11 | 11 | $location, $uibModal, $cookies, $q, $window, BASEURL, |
12 | SEVERITIES, EASEOFRESOLUTION, hostsManager, | |
12 | SEVERITIES, EASEOFRESOLUTION, hostsManager, commonsFact, | |
13 | 13 | vulnsManager, workspacesFact, csvService, uiGridConstants) { |
14 | 14 | $scope.baseurl; |
15 | 15 | $scope.columns; |
16 | 16 | $scope.easeofresolution; |
17 | $scope.expression; | |
18 | 17 | $scope.interfaces; |
19 | 18 | $scope.reverse; |
20 | 19 | $scope.severities; |
132 | 131 | // current search |
133 | 132 | $scope.search = $routeParams.search; |
134 | 133 | $scope.searchParams = ""; |
135 | $scope.expression = {}; | |
136 | 134 | if($scope.confirmed === true) { |
137 | 135 | if($scope.search !== undefined) { |
138 | 136 | $scope.search = $scope.search.concat("&confirmed=true"); |
147 | 145 | } |
148 | 146 | |
149 | 147 | if($scope.search != "" && $scope.search != undefined && $scope.search.indexOf("=") > -1) { |
150 | search_obj = $scope.decodeSearch($scope.search); | |
151 | search_exp = $scope.stringSearch(search_obj); | |
152 | $scope.searchParams = search_exp; | |
153 | searchFilter = prepareFilter(search_exp); | |
148 | searchFilter = commonsFact.parseSearchURL($scope.search); | |
149 | $scope.searchParams = commonsFact.searchFilterToExpression(searchFilter); | |
154 | 150 | } |
155 | 151 | |
156 | 152 | $scope.columns = { |
169 | 165 | "request": false, |
170 | 166 | "refs": true, |
171 | 167 | "evidence": false, |
172 | "hostnames": false, | |
168 | "hostnames": true, | |
173 | 169 | "impact": false, |
174 | 170 | "method": false, |
175 | 171 | "params": false, |
236 | 232 | cellTemplate: 'scripts/statusReport/partials/ui-grid/columns/severitycolumn.html', |
237 | 233 | headerCellTemplate: header, |
238 | 234 | type: 'string', |
239 | width: '110', | |
235 | width: '70', | |
240 | 236 | visible: $scope.columns["severity"], |
241 | 237 | sortingAlgorithm: compareSeverities |
242 | 238 | }); |
245 | 241 | headerCellTemplate: header, |
246 | 242 | width: '110', |
247 | 243 | visible: $scope.columns["service"] |
244 | }); | |
245 | $scope.gridOptions.columnDefs.push({ name : 'hostnames', | |
246 | cellTemplate: 'scripts/statusReport/partials/ui-grid/columns/hostnamescolumn.html', | |
247 | headerCellTemplate: header, | |
248 | minWidth: '100', | |
249 | maxWidth: '200', | |
250 | visible: $scope.columns["hostnames"] | |
248 | 251 | }); |
249 | 252 | $scope.gridOptions.columnDefs.push({ name : 'target', |
250 | 253 | cellTemplate: 'scripts/statusReport/partials/ui-grid/columns/targetcolumn.html', |
304 | 307 | cellTemplate: 'scripts/statusReport/partials/ui-grid/columns/evidencecolumn.html', |
305 | 308 | headerCellTemplate: header, |
306 | 309 | visible: $scope.columns["evidence"] |
307 | }); | |
308 | $scope.gridOptions.columnDefs.push({ name : 'hostnames', | |
309 | cellTemplate: 'scripts/statusReport/partials/ui-grid/columns/hostnamescolumn.html', | |
310 | headerCellTemplate: header, | |
311 | visible: $scope.columns["hostnames"] | |
312 | 310 | }); |
313 | 311 | $scope.gridOptions.columnDefs.push({ name : 'impact', |
314 | 312 | cellTemplate: 'scripts/statusReport/partials/ui-grid/columns/impactcolumn.html', |
451 | 449 | }; |
452 | 450 | |
453 | 451 | $scope.csv = function() { |
454 | tmp_vulns = $filter('filter')($scope.gridOptions.data, $scope.expression); | |
455 | return csvService.generator($scope.columns, tmp_vulns, $scope.workspace); | |
456 | }; | |
457 | ||
458 | $scope.toggleFilter = function(expression) { | |
459 | if(expression["confirmed"] === undefined) { | |
460 | expression["confirmed"] = true; | |
461 | $scope.expression = expression; | |
462 | $cookies.put('confirmed', $scope.expression.confirmed); | |
463 | $scope.confirmed = true; | |
464 | loadVulns(); | |
465 | } else { | |
466 | $scope.expression = {}; | |
467 | for(key in expression) { | |
468 | if(expression.hasOwnProperty(key)) { | |
469 | if(key !== "confirmed") { | |
470 | $scope.expression[key] = expression[key]; | |
471 | } | |
472 | } | |
473 | } | |
474 | $cookies.put('confirmed', $scope.expression.confirmed); | |
475 | $scope.confirmed = false; | |
476 | loadVulns(); | |
477 | } | |
452 | deferred = $q.defer(); | |
453 | delete searchFilter.confirmed; | |
454 | if ($scope.confirmed) | |
455 | searchFilter.confirmed = true; | |
456 | vulnsManager.getVulns($scope.workspace, | |
457 | null, | |
458 | null, | |
459 | searchFilter, | |
460 | null, | |
461 | null) | |
462 | .then(function(response) { | |
463 | deferred.resolve(csvService.generator($scope.columns, response.vulnerabilities, $scope.workspace)); | |
464 | }); | |
465 | return deferred.promise; | |
466 | }; | |
467 | ||
468 | $scope.toggleFilter = function() { | |
469 | $scope.confirmed = !$scope.confirmed; | |
470 | $cookies.put('confirmed', $scope.confirmed); | |
471 | console.log($scope.confirmed); | |
472 | loadVulns(); | |
478 | 473 | }; |
479 | 474 | |
480 | 475 | showMessage = function(msg) { |
843 | 838 | }); |
844 | 839 | }; |
845 | 840 | |
846 | // encodes search string in order to send it through URL | |
847 | $scope.encodeSearch = function(search) { | |
848 | var i = -1, | |
849 | encode = "", | |
850 | params = search.split(" "), | |
851 | chunks = {}; | |
852 | ||
853 | params.forEach(function(chunk) { | |
854 | i = chunk.indexOf(":"); | |
855 | if(i > 0) { | |
856 | chunks[chunk.slice(0, i)] = chunk.slice(i+1); | |
857 | } else { | |
858 | if(!chunks.hasOwnProperty("free")) { | |
859 | chunks.free = ""; | |
860 | } | |
861 | chunks.free += " ".concat(chunk); | |
862 | } | |
863 | }); | |
864 | ||
865 | if(chunks.hasOwnProperty("free")) { | |
866 | chunks.free = chunks.free.slice(1); | |
867 | } | |
868 | ||
869 | for(var prop in chunks) { | |
870 | if(chunks.hasOwnProperty(prop)) { | |
871 | if(chunks.prop != "") { | |
872 | encode += "&" + encodeURIComponent(prop) + "=" + encodeURIComponent(chunks[prop]); | |
873 | } | |
874 | } | |
875 | } | |
876 | return encode.slice(1); | |
877 | }; | |
878 | ||
879 | // decodes search parameters to object in order to use in filter | |
880 | $scope.decodeSearch = function(search) { | |
881 | var i = -1, | |
882 | decode = {}, | |
883 | params = search.split("&"); | |
884 | ||
885 | params.forEach(function(param) { | |
886 | i = param.indexOf("="); | |
887 | decode[decodeURIComponent(param.slice(0,i))] = decodeURIComponent(param.slice(i+1)); | |
888 | }); | |
889 | ||
890 | if(decode.hasOwnProperty("free")) { | |
891 | decode['$'] = decode.free; | |
892 | delete decode.free; | |
893 | } | |
894 | ||
895 | return decode; | |
896 | }; | |
897 | ||
898 | // converts current search object to string to be displayed in search field | |
899 | $scope.stringSearch = function(obj) { | |
900 | var search = ""; | |
901 | ||
902 | for(var prop in obj) { | |
903 | if(obj.hasOwnProperty(prop)) { | |
904 | if(search != "") { | |
905 | search += " "; | |
906 | } | |
907 | if(prop == "$") { | |
908 | search += obj[prop]; | |
909 | } else { | |
910 | if(prop !== "confirmed"){ | |
911 | search += prop + ":" + obj[prop]; | |
912 | } | |
913 | } | |
914 | } | |
915 | } | |
916 | ||
917 | return search.trim(); | |
918 | }; | |
919 | ||
920 | var prepareFilter = function(searchText) { | |
921 | var params = searchText.split(" "); | |
922 | var chunks = {}; | |
923 | var i = -1; | |
924 | ||
925 | params.forEach(function(chunk) { | |
926 | i = chunk.indexOf(":"); | |
927 | if (i > 0) { | |
928 | chunks[chunk.slice(0, i)] = chunk.slice(i+1); | |
929 | } else { | |
930 | if (!chunks.hasOwnProperty("search")) { | |
931 | chunks.search = chunk; | |
932 | } else { | |
933 | chunks.search += ' ' + chunk; | |
934 | } | |
935 | } | |
936 | }); | |
937 | ||
938 | return chunks; | |
939 | }; | |
940 | ||
941 | 841 | // changes the URL according to search params |
942 | 842 | $scope.searchFor = function(search, params) { |
943 | 843 | // TODO: It would be nice to find a way for changing |
949 | 849 | } |
950 | 850 | |
951 | 851 | if(search && params != "" && params != undefined) { |
952 | url += "/search/" + $scope.encodeSearch(params); | |
852 | var filter = commonsFact.parseSearchExpression(params); | |
853 | var URLParams = commonsFact.searchFilterToURLParams(filter); | |
854 | url += "/search/" + URLParams; | |
953 | 855 | } |
954 | 856 | |
955 | 857 | $location.path(url); |
14 | 14 | <button id="refresh" type="button" class="btn btn-danger" title="Refresh current workspace" ng-click="location.reload()"> |
15 | 15 | <span class="glyphicon glyphicon-refresh"></span> |
16 | 16 | </button> |
17 | <button type="button" class="btn btn-danger" title="{{ confirmed === true ? 'All vulns' : 'Confirmed vulns' }}" ng-click="toggleFilter(expression)"> | |
17 | <button type="button" class="btn btn-danger" title="{{ confirmed === true ? 'All vulns' : 'Confirmed vulns' }}" ng-click="toggleFilter()"> | |
18 | 18 | <span class="glyphicon glyphicon-filter" ng-style="{ 'opacity': (confirmed === true) ? '1' : '0.7' }"></span> |
19 | 19 | </button> |
20 | 20 | <button type="button" class="btn btn-danger dropdown-toggle" data-toggle="dropdown" title="Change current workspace"> |
0 | <div ng-if="row.entity._id != undefined"><div ng-if="!col.grouping || col.grouping.groupPriority === undefined || col.grouping.groupPriority === null || ( row.groupHeader && col.grouping.groupPriority === row.treeLevel )" class="ui-grid-cell-contents white-space">{{row.entity.metadata.create_time * 1000 | date:"MM/dd/yyyy"}}</div></div><div ng-if="row.groupHeader && col.grouping.groupPriority !== undefined" class="ui-grid-cell-contents white-space">{{row.entity.metadata.create_time.split(" ")[0] * 1000 | date:"MM/dd/yyyy"}}</div>⏎ | |
0 | <div ng-if="row.entity._id != undefined"> | |
1 | <div ng-if="!col.grouping || col.grouping.groupPriority === undefined || col.grouping.groupPriority === null || ( row.groupHeader && col.grouping.groupPriority === row.treeLevel )" class="ui-grid-cell-contents white-space"><span uib-tooltip="{{row.entity.metadata.create_time * 1000 | amTimeAgo}}">{{row.entity.metadata.create_time * 1000 | date:"MM/dd/yyyy"}}</span> | |
2 | </div> | |
3 | </div> | |
4 | <div ng-if="row.groupHeader && col.grouping.groupPriority !== undefined" class="ui-grid-cell-contents white-space">{{row.entity.metadata.create_time.split(" ")[0] * 1000 | date:"MM/dd/yyyy"}}</div> |
0 | 0 | <div ng-if="row.entity._id != undefined"> |
1 | 1 | <div class="ui-grid-cell-contents center"> |
2 | <p ng-repeat="hostname in COL_FIELD"><a href='//www.shodan.io/search?query={{hostname}}' class="pos-middle crop-text" uib-tooltip="Search in Shodan" target="_blank"><img src="images/shodan.png" height="15px" width="15px" style="margin-left:5px"/></a><a href="{{grid.appScope.hash}}/search/hostnames={{hostname}}">{{hostname}}</a></p> | |
2 | <p ng-repeat="hostname in COL_FIELD"><a href="{{grid.appScope.hash}}/search/hostnames={{hostname}}">{{hostname}}</a></p> | |
3 | 3 | </div> |
4 | 4 | </div> |
5 | 5 | <div ng-if="row.groupHeader && col.grouping.groupPriority !== undefined" class="ui-grid-cell-contents white-space">{{COL_FIELD.split('(')[0] !== ' ' ? COL_FIELD : 'EMPTY' + COL_FIELD}}</div> |
0 | <div ng-if='row.entity._id != undefined' class='ui-grid-cell-contents row-tooltip'><a ng-href="{{grid.appScope.hash}}/search/target={{row.entity.target}}">{{COL_FIELD CUSTOM_FILTERS}}</a><a ng-href="//www.shodan.io/search?query={{row.entity.target}}" uib-tooltip="Search in Shodan" target="_blank"><img ng-src="images/shodan.png" height="15px" width="15px" style='margin-left:5px'/></a></div><div ng-if="row.groupHeader && col.grouping.groupPriority !== undefined" class="ui-grid-cell-contents white-space">{{COL_FIELD CUSTOM_FILTERS}}</div> | |
0 | <div ng-if='row.entity._id != undefined' class='ui-grid-cell-contents row-tooltip'><a ng-href="{{grid.appScope.hash}}/search/target={{row.entity.target}}">{{COL_FIELD CUSTOM_FILTERS}}</a></div><div ng-if="row.groupHeader && col.grouping.groupPriority !== undefined" class="ui-grid-cell-contents white-space">{{COL_FIELD CUSTOM_FILTERS}}</div> |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | import unittest | |
10 | import sys | |
11 | import os | |
12 | sys.path.append(os.path.abspath(os.getcwd())) | |
13 | ||
14 | from model.controller import ModelController | |
15 | from auth.manager import SecurityManager | |
16 | from managers.mapper_manager import MapperManager | |
17 | from managers.all import PluginManager | |
18 | import apis.rest.api as restapi | |
19 | from apis.rest.client import ModelRestApiClient | |
20 | ||
21 | from model.hosts import Host, Interface, Service | |
22 | ||
23 | from mockito import mock, when | |
24 | ||
25 | from config.configuration import getInstanceConfiguration | |
26 | CONF = getInstanceConfiguration() | |
27 | ||
28 | ||
29 | class CreationModelObjectsApiRest(unittest.TestCase): | |
30 | """ | |
31 | This suite tests the interaction between the rest api server, | |
32 | the model controller, the factory and the rest api client. | |
33 | The client is going to be used by the plugins (through PluginBase). | |
34 | """ | |
35 | def setUp(self): | |
36 | self._security_manager = mock(SecurityManager()) | |
37 | self._mappers_manager = mock(MapperManager()) | |
38 | self._plugin_manager = mock(PluginManager) | |
39 | ||
40 | self._model_controller = ModelController( | |
41 | self._security_manager, | |
42 | self._mappers_manager) | |
43 | ||
44 | restapi.startAPIs( | |
45 | self._plugin_manager, self._model_controller, | |
46 | self._mappers_manager) | |
47 | ||
48 | #TODO: load conf from file | |
49 | self.client = ModelRestApiClient("127.0.0.1", 9977) | |
50 | ||
51 | def tearDown(self): | |
52 | restapi.stopAPIs() | |
53 | ||
54 | def test_host_creation(self): | |
55 | name = "pepito" | |
56 | os = "Windows" | |
57 | host_id = self.client.createHost(name, os) | |
58 | host = Host(name, os) | |
59 | ||
60 | self.assertEquals(host.getID(), host_id, "ids should be the same") | |
61 | ||
62 | def test_interface_creation(self): | |
63 | name = "pepito" | |
64 | os = "Windows" | |
65 | host = Host(name, os) | |
66 | ||
67 | when(self._model_controller).find(host.getID()).thenReturn(host) | |
68 | ||
69 | name = "" | |
70 | mac = "00:00:00:00:00:00" | |
71 | ipv4_address = "0.0.0.0" | |
72 | ipv4_mask = "0.0.0.0" | |
73 | ipv4_gateway = "0.0.0.0" | |
74 | ipv4_dns = [] | |
75 | ipv6_address = "0000:0000:0000:0000:0000:0000:0000:0000" | |
76 | ipv6_prefix = "00" | |
77 | ipv6_gateway = "0000:0000:0000:0000:0000:0000:0000:0000" | |
78 | ipv6_dns = [] | |
79 | network_segment = "" | |
80 | hostname_resolution = [] | |
81 | ||
82 | interface_id = self.client.createInterface( | |
83 | name, mac, ipv4_address, ipv4_mask, ipv4_gateway, ipv4_dns, | |
84 | ipv6_address, ipv6_prefix, ipv6_gateway, ipv6_dns, network_segment, | |
85 | hostname_resolution, host.getID()) | |
86 | ||
87 | interface = Interface( | |
88 | name, mac, ipv4_address, ipv4_mask, ipv4_gateway, ipv4_dns, | |
89 | ipv6_address, ipv6_prefix, ipv6_gateway, ipv6_dns, network_segment, | |
90 | hostname_resolution, parent_id=host.getID()) | |
91 | ||
92 | self.assertNotEquals( | |
93 | interface_id, None, "interface created shouldn't be None") | |
94 | ||
95 | self.assertEquals( | |
96 | interface.getID(), interface_id, "ids should be the same") | |
97 | ||
98 | def test_service_creation(self): | |
99 | name = "pepito" | |
100 | os = "Windows" | |
101 | host = Host(name, os) | |
102 | ||
103 | name = "" | |
104 | mac = "00:00:00:00:00:00" | |
105 | ipv4_address = "0.0.0.0" | |
106 | ipv4_mask = "0.0.0.0" | |
107 | ipv4_gateway = "0.0.0.0" | |
108 | ipv4_dns = [] | |
109 | ipv6_address = "0000:0000:0000:0000:0000:0000:0000:0000" | |
110 | ipv6_prefix = "00" | |
111 | ipv6_gateway = "0000:0000:0000:0000:0000:0000:0000:0000" | |
112 | ipv6_dns = [] | |
113 | network_segment = "" | |
114 | hostname_resolution = [] | |
115 | ||
116 | interface = Interface( | |
117 | name, mac, ipv4_address, ipv4_mask, ipv4_gateway, ipv4_dns, | |
118 | ipv6_address, ipv6_prefix, ipv6_gateway, ipv6_dns, network_segment, | |
119 | hostname_resolution, parent_id=host.getID()) | |
120 | ||
121 | when(self._model_controller).find( | |
122 | interface.getID()).thenReturn(interface) | |
123 | ||
124 | name = "srv" | |
125 | protocol = "tcp" | |
126 | ports = [] | |
127 | status = "running" | |
128 | version = "unknown" | |
129 | description = "" | |
130 | ||
131 | service_id = self.client.createService( | |
132 | name, protocol, ports, status, version, description, | |
133 | interface.getID()) | |
134 | ||
135 | service = Service(name, protocol, ports, status, version, description, | |
136 | parent_id=interface.getID()) | |
137 | ||
138 | self.assertNotEquals( | |
139 | service_id, None, "service created shouldn't be None") | |
140 | ||
141 | self.assertEquals( | |
142 | service.getID(), service_id, "ids should be the same") | |
143 | ||
144 | ||
145 | if __name__ == '__main__': | |
146 | unittest.main() |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | import unittest | |
8 | import sys | |
9 | sys.path.append('.') | |
10 | import model.controller as controller | |
11 | from mockito import mock, verify, when, any | |
12 | from model.hosts import Host, Interface, Service | |
13 | from model.common import ModelObjectVuln, ModelObjectVulnWeb, ModelObjectNote, ModelObjectCred | |
14 | from urlparse import urlparse | |
15 | from config.configuration import getInstanceConfiguration | |
16 | CONF = getInstanceConfiguration() | |
17 | from utils.logs import getLogger | |
18 | from couchdbkit import Server, ChangesStream, Database | |
19 | from controllers.change import ChangeController | |
20 | from persistence.persistence_managers import CouchDbConnector, CouchDbManager, FileSystemManager, DBTYPE, DbManager | |
21 | ||
22 | ||
23 | class ModelChanges(unittest.TestCase): | |
24 | def testThreadStops(self): | |
25 | changes_controller = ChangeController() | |
26 | mapper = mock() | |
27 | uri = CONF.getCouchURI() | |
28 | url = urlparse(uri) | |
29 | getLogger(self).debug( | |
30 | "Setting user,pass %s %s" % (url.username, url.password)) | |
31 | self.cdbManager = CouchDbManager(uri=uri) | |
32 | ||
33 | dbCouchController = self.cdbManager.createDb('testWkspc') | |
34 | dbCouchController.saveDocument({'_id':'testwkspc', | |
35 | 'type':'workspace' }) | |
36 | ||
37 | changes_controller.watch(mapper, dbCouchController) | |
38 | self.assertTrue(changes_controller.isAlive()) | |
39 | ||
40 | changes_controller.unwatch() | |
41 | self.assertFalse(changes_controller.isAlive()) | |
42 | ||
43 | def testThreadStopsInFS(self): | |
44 | dbManagerClass = DbManager | |
45 | dbManagerClass._loadDbs = lambda x: None | |
46 | dbManager = DbManager() | |
47 | changes_controller = ChangeController() | |
48 | mapper = mock() | |
49 | fsController = dbManager.createDb('testWkspc', DBTYPE.FS) | |
50 | ||
51 | fsController.saveDocument({'_id':'testwkspc', | |
52 | 'type':'workspace' }) | |
53 | ||
54 | changes_controller.watch(mapper, fsController) | |
55 | self.assertTrue(changes_controller.isAlive()) | |
56 | ||
57 | changes_controller.unwatch() | |
58 | self.assertFalse(changes_controller.isAlive()) | |
59 | ||
60 | ||
61 | if __name__ == '__main__': | |
62 | unittest.main() | |
63 |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | import unittest | |
10 | import sys | |
11 | import os | |
12 | sys.path.append(os.path.abspath(os.getcwd())) | |
13 | import random | |
14 | ||
15 | from mockito import mock, when | |
16 | import model.guiapi | |
17 | import time | |
18 | from model import api | |
19 | from gui.notifier import NotificationCenter | |
20 | import plugins.core as plcore | |
21 | import model.controller as controller | |
22 | from persistence.change import ChangeModelObject, ChangeCmd, Change | |
23 | ||
24 | from config.configuration import getInstanceConfiguration | |
25 | CONF = getInstanceConfiguration() | |
26 | ||
27 | ||
28 | def new_random_workspace_name(): | |
29 | return ("aworkspace" + "".join(random.sample( | |
30 | [chr(i) for i in range(65, 90)], 10))).lower() | |
31 | ||
32 | ||
33 | class ChangesTestSuite(unittest.TestCase): | |
34 | ||
35 | @classmethod | |
36 | def setUpClass(cls): | |
37 | cls.model_controller = mock(controller.ModelController) | |
38 | cls.workspace_manager = mock() | |
39 | api.setUpAPIs(cls.model_controller, cls.workspace_manager) | |
40 | cls.couch_uri = CONF.getCouchURI() | |
41 | # cls.cdm = CouchdbManager(uri=cls.couch_uri) | |
42 | ||
43 | class NotificationTest(NotificationCenter): | |
44 | def __init__(self, ui): | |
45 | self.changes = [] | |
46 | ||
47 | def changeFromInstance(self, change): | |
48 | self.changes.append(change) | |
49 | ||
50 | cls.notifier = NotificationTest(None) | |
51 | model.guiapi.notification_center = cls.notifier | |
52 | cls._couchdb_workspaces = [] | |
53 | cls.wm = WorkspaceManager(cls.model_controller, | |
54 | mock(plcore.PluginController)) | |
55 | cls.workspace = cls.wm.createWorkspace(new_random_workspace_name(), | |
56 | workspaceClass=WorkspaceOnCouch) | |
57 | when(cls.workspace).load().thenReturn(True) | |
58 | cls._couchdb_workspaces.append(cls.workspace.name) | |
59 | cls.wm.setActiveWorkspace(cls.workspace) | |
60 | ||
61 | def setUp(self): | |
62 | self.notifier.changes = [] | |
63 | ||
64 | @classmethod | |
65 | def tearDownClass(cls): | |
66 | WorkspacePersister.stopThreads() | |
67 | # cls.cleanCouchDatabases() | |
68 | ||
69 | # @classmethod | |
70 | # def cleanCouchDatabases(cls): | |
71 | # try: | |
72 | # for wname in cls._couchdb_workspaces: | |
73 | # cls.cdm.removeWorkspace(wname) | |
74 | # except Exception as e: | |
75 | # print(e) | |
76 | ||
77 | def test_model_objects_added(self): | |
78 | d1 = { | |
79 | 'type': 'Service' | |
80 | } | |
81 | d2 = { | |
82 | 'type': 'Host' | |
83 | } | |
84 | d3 = { | |
85 | 'type': 'Interface' | |
86 | } | |
87 | self.cdm._getDb(self.workspace.name).save_doc(d1, use_uuids=True, | |
88 | force_update=True) | |
89 | self.cdm._getDb(self.workspace.name).save_doc(d2, use_uuids=True, | |
90 | force_update=True) | |
91 | self.cdm._getDb(self.workspace.name).save_doc(d3, use_uuids=True, | |
92 | force_update=True) | |
93 | ||
94 | time.sleep(1) | |
95 | ||
96 | self.assertEquals(len(self.notifier.changes), 3, | |
97 | "Some changes weren't added") | |
98 | for change in self.notifier.changes: | |
99 | self.assertIsInstance(change, ChangeModelObject, | |
100 | "It should be a ChangeModelObject") | |
101 | self.assertNotIsInstance(change, ChangeCmd, | |
102 | "It shouldn't be a ChangeCmd") | |
103 | self.assertEquals(change.getAction(), Change.MODEL_OBJECT_ADDED, | |
104 | "Change should be an addition") | |
105 | ||
106 | def test_model_objects_delete(self): | |
107 | d1 = { | |
108 | '_id': '1', | |
109 | 'type': 'Host', | |
110 | } | |
111 | self.cdm._getDb(self.workspace.name).save_doc(d1, use_uuids=True, | |
112 | force_update=True) | |
113 | ||
114 | time.sleep(1) | |
115 | ||
116 | self.assertEquals(len(self.notifier.changes), 1, | |
117 | "Some changes weren't added") | |
118 | ||
119 | self.assertEquals(self.notifier.changes[0].getAction(), | |
120 | Change.MODEL_OBJECT_ADDED, | |
121 | "First change should be an addition") | |
122 | ||
123 | self.cdm._getDb(self.workspace.name).delete_doc(d1['_id']) | |
124 | time.sleep(1) | |
125 | ||
126 | self.assertEquals(self.notifier.changes[1].getAction(), | |
127 | Change.MODEL_OBJECT_DELETED, | |
128 | "Second change should be a Removal") | |
129 | ||
130 | def test_model_objects_modified(self): | |
131 | d1 = { | |
132 | '_id': '1', | |
133 | 'type': 'Host', | |
134 | } | |
135 | self.cdm._getDb(self.workspace.name).save_doc(d1, use_uuids=True, | |
136 | force_update=True) | |
137 | d1 = { | |
138 | '_id': '1', | |
139 | 'type': 'Host', | |
140 | 'foo': 'bar' | |
141 | } | |
142 | self.cdm._getDb(self.workspace.name).save_doc(d1, use_uuids=True, | |
143 | force_update=True) | |
144 | ||
145 | time.sleep(1) | |
146 | ||
147 | self.assertEquals(len(self.notifier.changes), 2, | |
148 | "Some changes weren't added") | |
149 | self.assertEquals(self.notifier.changes[0].getAction(), | |
150 | Change.MODEL_OBJECT_ADDED, | |
151 | "First change should be an addition") | |
152 | self.assertEquals(self.notifier.changes[1].getAction(), | |
153 | Change.MODEL_OBJECT_MODIFIED, | |
154 | "Second change should be a modification") | |
155 | ||
156 | def test_cmd_executed(self): | |
157 | d1 = { | |
158 | 'command': 'nmap', | |
159 | 'params': '-A -T4 127.0.0.1', | |
160 | 'type': 'CommandRunInformation', | |
161 | } | |
162 | self.cdm._getDb(self.workspace.name).save_doc(d1, use_uuids=True, | |
163 | force_update=True) | |
164 | ||
165 | time.sleep(1) | |
166 | ||
167 | self.assertEquals(len(self.notifier.changes), 1, | |
168 | "The change wasn't added") | |
169 | change = self.notifier.changes[0] | |
170 | self.assertNotIsInstance(change, ChangeModelObject, | |
171 | "It shouldn't be a ChangeModelObject") | |
172 | self.assertIsInstance(change, ChangeCmd, | |
173 | "It should be a ChangeCmd") | |
174 | self.assertEquals(change.getAction(), Change.CMD_EXECUTED, | |
175 | "Change should be an executed command") | |
176 | ||
177 | def test_cmd_finished(self): | |
178 | d1 = { | |
179 | 'command': 'nmap', | |
180 | 'params': '-A -T4 127.0.0.1', | |
181 | 'type': 'CommandRunInformation', | |
182 | } | |
183 | self.cdm._getDb(self.workspace.name).save_doc(d1, use_uuids=True, | |
184 | force_update=True) | |
185 | d2 = { | |
186 | 'command': 'nmap', | |
187 | 'params': '-A -T4 127.0.0.1', | |
188 | 'type': 'CommandRunInformation', | |
189 | 'duration': '5' | |
190 | } | |
191 | self.cdm._getDb(self.workspace.name).save_doc(d2, use_uuids=True, | |
192 | force_update=True) | |
193 | ||
194 | time.sleep(1) | |
195 | ||
196 | self.assertEquals(len(self.notifier.changes), 2, | |
197 | "Some changes weren't added") | |
198 | change = self.notifier.changes[1] | |
199 | self.assertNotIsInstance(change, ChangeModelObject, | |
200 | "It shouldn't be a ChangeModelObject") | |
201 | self.assertIsInstance(change, ChangeCmd, | |
202 | "It should be a ChangeCmd") | |
203 | self.assertEquals(change.getAction(), Change.CMD_FINISHED, | |
204 | "Change should be a finished command") | |
205 | ||
206 | if __name__ == '__main__': | |
207 | unittest.main() |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | import unittest | |
8 | import sys | |
9 | sys.path.append('.') | |
10 | import model.controller as controller | |
11 | from mockito import mock, verify, when, any | |
12 | from model.hosts import Host, Interface, Service | |
13 | from model.common import ModelObjectVuln, ModelObjectVulnWeb, ModelObjectNote, ModelObjectCred | |
14 | ||
15 | ||
16 | class ModelObjectControllerUnitTest(unittest.TestCase): | |
17 | # TODO: Notifier goes into mapper? | |
18 | ||
19 | def testAddHostGetsMapperDispatchSaveSYNC(self): | |
20 | host = Host('coco') | |
21 | ||
22 | mappersManager = self.createMapperMock() | |
23 | objectMapper = mock() | |
24 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
25 | when(objectMapper).save(host).thenReturn(True) | |
26 | ||
27 | model_controller = controller.ModelController(mock(), mappersManager) | |
28 | ||
29 | model_controller.addHostSYNC(host) | |
30 | verify(mappersManager).getMapper(host.class_signature) | |
31 | verify(objectMapper).save(host) | |
32 | ||
33 | def testAddHostGetsMapperDispatchSaveASYNC(self): | |
34 | host = Host('coco') | |
35 | ||
36 | mappersManager = self.createMapperMock() | |
37 | objectMapper = mock() | |
38 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
39 | when(objectMapper).save(host).thenReturn(True) | |
40 | ||
41 | model_controller = controller.ModelController(mock(), mappersManager) | |
42 | ||
43 | model_controller.addHostASYNC(host) | |
44 | model_controller.processAllPendingActions() | |
45 | ||
46 | verify(mappersManager).getMapper(host.class_signature) | |
47 | verify(objectMapper).save(host) | |
48 | ||
49 | def testAddInterfaceGetsMapperDispatchSaveSYNC(self): | |
50 | host = Host('coco') | |
51 | interface = Interface("int_mock0") | |
52 | ||
53 | mappersManager = self.createMapperMock() | |
54 | objectMapper = mock() | |
55 | when(mappersManager).getMapper(interface.class_signature).thenReturn(objectMapper) | |
56 | when(objectMapper).save(interface).thenReturn(True) | |
57 | ||
58 | model_controller = controller.ModelController(mock(), mappersManager) | |
59 | ||
60 | model_controller.addInterfaceSYNC(host.getID(), interface) | |
61 | verify(mappersManager).getMapper(interface.class_signature) | |
62 | verify(objectMapper).save(interface) | |
63 | ||
64 | def testAddInterfaceGetsMapperDispatchSaveASYNC(self): | |
65 | host = Host('coco') | |
66 | interface = Interface("int_mock0") | |
67 | ||
68 | mappersManager = self.createMapperMock() | |
69 | objectMapper = mock() | |
70 | when(mappersManager).getMapper(interface.class_signature).thenReturn(objectMapper) | |
71 | when(objectMapper).save(interface).thenReturn(True) | |
72 | ||
73 | model_controller = controller.ModelController(mock(), mappersManager) | |
74 | ||
75 | model_controller.addInterfaceASYNC(host.getID(), interface) | |
76 | model_controller.processAllPendingActions() | |
77 | ||
78 | verify(mappersManager).getMapper(interface.class_signature) | |
79 | verify(objectMapper).save(interface) | |
80 | ||
81 | def testAddObjectSavesChildInParent(self): | |
82 | host = Host('coco') | |
83 | interface = Interface("int_mock0") | |
84 | ||
85 | mappersManager = self.createMapperMock() | |
86 | objectMapper = mock() | |
87 | ||
88 | when(mappersManager).getMapper(interface.class_signature).thenReturn(objectMapper) | |
89 | when(objectMapper).save(interface).thenReturn(True) | |
90 | when(mappersManager).find(host.getID()).thenReturn(host) | |
91 | ||
92 | model_controller = controller.ModelController(mock(), mappersManager) | |
93 | ||
94 | model_controller.addInterfaceSYNC(host.getID(), interface) | |
95 | verify(mappersManager).getMapper(interface.class_signature) | |
96 | verify(objectMapper).save(interface) | |
97 | ||
98 | self.assertEquals(interface, host.findChild(interface.getID()), | |
99 | "Orphan child, what happen papi?") | |
100 | ||
101 | def testAddServiceGetsMapperDispatchSaveSYNC(self): | |
102 | interface = Interface("int_mock0") | |
103 | service = Service("servi") | |
104 | ||
105 | mappersManager = self.createMapperMock() | |
106 | objectMapper = mock() | |
107 | when(mappersManager).getMapper(service.class_signature).thenReturn(objectMapper) | |
108 | when(objectMapper).save(service).thenReturn(True) | |
109 | ||
110 | model_controller = controller.ModelController(mock(), mappersManager) | |
111 | ||
112 | model_controller.addServiceToInterfaceSYNC(None, interface.getID(), service) | |
113 | ||
114 | verify(mappersManager).getMapper(service.class_signature) | |
115 | verify(objectMapper).save(service) | |
116 | ||
117 | def testAddServiceGetsMapperDispatchSaveASYNC(self): | |
118 | interface = Interface("int_mock0") | |
119 | service = Service("servi") | |
120 | ||
121 | mappersManager = self.createMapperMock() | |
122 | objectMapper = mock() | |
123 | when(mappersManager).getMapper(service.class_signature).thenReturn(objectMapper) | |
124 | when(objectMapper).save(service).thenReturn(True) | |
125 | ||
126 | model_controller = controller.ModelController(mock(), mappersManager) | |
127 | ||
128 | model_controller.addServiceToInterfaceASYNC(None, interface.getID(), service) | |
129 | model_controller.processAllPendingActions() | |
130 | ||
131 | verify(mappersManager).getMapper(service.class_signature) | |
132 | verify(objectMapper).save(service) | |
133 | ||
134 | def testAddVulnToServiceGetsMapperDispatchSaveSYNC(self): | |
135 | service = Service("servi") | |
136 | vuln = ModelObjectVuln("a_vuln") | |
137 | ||
138 | mappersManager = self.createMapperMock() | |
139 | objectMapper = mock() | |
140 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
141 | when(objectMapper).save(vuln).thenReturn(True) | |
142 | ||
143 | model_controller = controller.ModelController(mock(), mappersManager) | |
144 | ||
145 | model_controller.addVulnToServiceSYNC(None, service.getID(), vuln) | |
146 | ||
147 | verify(mappersManager).getMapper(vuln.class_signature) | |
148 | verify(objectMapper).save(vuln) | |
149 | ||
150 | def testAddVulnToServiceGetsMapperDispatchSaveASYNC(self): | |
151 | service = Service("servi") | |
152 | vuln = ModelObjectVuln("a_vuln") | |
153 | ||
154 | mappersManager = self.createMapperMock() | |
155 | objectMapper = mock() | |
156 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
157 | when(objectMapper).save(vuln).thenReturn(True) | |
158 | ||
159 | model_controller = controller.ModelController(mock(), mappersManager) | |
160 | ||
161 | model_controller.addVulnToServiceASYNC(None, service.getID(), vuln) | |
162 | model_controller.processAllPendingActions() | |
163 | ||
164 | verify(mappersManager).getMapper(vuln.class_signature) | |
165 | verify(objectMapper).save(vuln) | |
166 | ||
167 | def testAddVulnToInterfaceGetsMapperDispatchSaveSYNC(self): | |
168 | interface = Interface("int0") | |
169 | vuln = ModelObjectVuln("a_vuln") | |
170 | ||
171 | mappersManager = self.createMapperMock() | |
172 | objectMapper = mock() | |
173 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
174 | when(objectMapper).save(vuln).thenReturn(True) | |
175 | ||
176 | model_controller = controller.ModelController(mock(), mappersManager) | |
177 | ||
178 | model_controller.addVulnToInterfaceSYNC(None, interface.getID(), vuln) | |
179 | ||
180 | verify(mappersManager).getMapper(vuln.class_signature) | |
181 | verify(objectMapper).save(vuln) | |
182 | ||
183 | def testAddVulnToInterfaceGetsMapperDispatchSaveASYNC(self): | |
184 | interface = Interface("int0") | |
185 | vuln = ModelObjectVuln("a_vuln") | |
186 | ||
187 | mappersManager = self.createMapperMock() | |
188 | objectMapper = mock() | |
189 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
190 | when(objectMapper).save(vuln).thenReturn(True) | |
191 | ||
192 | model_controller = controller.ModelController(mock(), mappersManager) | |
193 | ||
194 | model_controller.addVulnToInterfaceASYNC(None, interface.getID(), vuln) | |
195 | model_controller.processAllPendingActions() | |
196 | ||
197 | verify(mappersManager).getMapper(vuln.class_signature) | |
198 | verify(objectMapper).save(vuln) | |
199 | ||
200 | def testAddVulnToHostGetsMapperDispatchSaveSYNC(self): | |
201 | host = Host("pepito") | |
202 | vuln = ModelObjectVuln("a_vuln") | |
203 | ||
204 | mappersManager = self.createMapperMock() | |
205 | objectMapper = mock() | |
206 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
207 | when(objectMapper).save(vuln).thenReturn(True) | |
208 | ||
209 | model_controller = controller.ModelController(mock(), mappersManager) | |
210 | ||
211 | model_controller.addVulnToHostSYNC(host.getID(), vuln) | |
212 | ||
213 | verify(mappersManager).getMapper(vuln.class_signature) | |
214 | verify(objectMapper).save(vuln) | |
215 | ||
216 | def testAddVulnToHostGetsMapperDispatchSaveASYNC(self): | |
217 | host = Host("pepito") | |
218 | vuln = ModelObjectVuln("a_vuln") | |
219 | ||
220 | mappersManager = self.createMapperMock() | |
221 | objectMapper = mock() | |
222 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
223 | when(objectMapper).save(vuln).thenReturn(True) | |
224 | ||
225 | model_controller = controller.ModelController(mock(), mappersManager) | |
226 | ||
227 | model_controller.addVulnToHostASYNC(host.getID(), vuln) | |
228 | model_controller.processAllPendingActions() | |
229 | ||
230 | verify(mappersManager).getMapper(vuln.class_signature) | |
231 | verify(objectMapper).save(vuln) | |
232 | ||
233 | def testAddNoteToServiceGetsMapperDispatchSaveSYNC(self): | |
234 | service = Service("servi") | |
235 | note = ModelObjectNote("a_note") | |
236 | ||
237 | mappersManager = self.createMapperMock() | |
238 | objectMapper = mock() | |
239 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
240 | when(objectMapper).save(note).thenReturn(True) | |
241 | ||
242 | model_controller = controller.ModelController(mock(), mappersManager) | |
243 | ||
244 | model_controller.addNoteToServiceSYNC(None, service.getID(), note) | |
245 | ||
246 | verify(mappersManager).getMapper(note.class_signature) | |
247 | verify(objectMapper).save(note) | |
248 | ||
249 | def testAddNoteToServiceGetsMapperDispatchSaveASYNC(self): | |
250 | service = Service("servi") | |
251 | note = ModelObjectNote("a_note") | |
252 | ||
253 | mappersManager = self.createMapperMock() | |
254 | objectMapper = mock() | |
255 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
256 | when(objectMapper).save(note).thenReturn(True) | |
257 | ||
258 | model_controller = controller.ModelController(mock(), mappersManager) | |
259 | ||
260 | model_controller.addNoteToServiceASYNC(None, service.getID(), note) | |
261 | model_controller.processAllPendingActions() | |
262 | ||
263 | verify(mappersManager).getMapper(note.class_signature) | |
264 | verify(objectMapper).save(note) | |
265 | ||
266 | def testAddNoteToVulnGetsMapperDispatchSave(self): | |
267 | vuln = ModelObjectVuln('a vuln') | |
268 | note = ModelObjectNote("a_note") | |
269 | ||
270 | mappersManager = self.createMapperMock() | |
271 | objectMapper = mock() | |
272 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
273 | when(objectMapper).save(note).thenReturn(True) | |
274 | ||
275 | model_controller = controller.ModelController(mock(), mappersManager) | |
276 | ||
277 | model_controller.addNoteToServiceSYNC(None, vuln.getID(), note) | |
278 | ||
279 | verify(mappersManager).getMapper(note.class_signature) | |
280 | verify(objectMapper).save(note) | |
281 | ||
282 | def testAddNoteToServiceGetsMapperDispatchSaveSYNC(self): | |
283 | service = Service("servi") | |
284 | note = ModelObjectNote("a_note") | |
285 | ||
286 | mappersManager = self.createMapperMock() | |
287 | objectMapper = mock() | |
288 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
289 | when(objectMapper).save(note).thenReturn(True) | |
290 | ||
291 | model_controller = controller.ModelController(mock(), mappersManager) | |
292 | ||
293 | model_controller.addNoteToServiceSYNC(None, service.getID(), note) | |
294 | ||
295 | verify(mappersManager).getMapper(note.class_signature) | |
296 | verify(objectMapper).save(note) | |
297 | ||
298 | def testAddNoteToServiceGetsMapperDispatchSaveASYNC(self): | |
299 | service = Service("servi") | |
300 | note = ModelObjectNote("a_note") | |
301 | ||
302 | mappersManager = self.createMapperMock() | |
303 | objectMapper = mock() | |
304 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
305 | when(objectMapper).save(note).thenReturn(True) | |
306 | ||
307 | model_controller = controller.ModelController(mock(), mappersManager) | |
308 | ||
309 | model_controller.addNoteToServiceASYNC(None, service.getID(), note) | |
310 | model_controller.processAllPendingActions() | |
311 | ||
312 | verify(mappersManager).getMapper(note.class_signature) | |
313 | verify(objectMapper).save(note) | |
314 | ||
315 | def testAddNoteToInterfaceGetsMapperDispatchSaveSYNC(self): | |
316 | interface = Interface("int0") | |
317 | note = ModelObjectNote("a_note") | |
318 | ||
319 | mappersManager = self.createMapperMock() | |
320 | objectMapper = mock() | |
321 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
322 | when(objectMapper).save(note).thenReturn(True) | |
323 | ||
324 | model_controller = controller.ModelController(mock(), mappersManager) | |
325 | ||
326 | model_controller.addNoteToServiceSYNC(None, interface.getID(), note) | |
327 | ||
328 | verify(mappersManager).getMapper(note.class_signature) | |
329 | verify(objectMapper).save(note) | |
330 | ||
331 | def testAddNoteToInterfaceGetsMapperDispatchSaveASYNC(self): | |
332 | interface = Interface("int0") | |
333 | note = ModelObjectNote("a_note") | |
334 | ||
335 | mappersManager = self.createMapperMock() | |
336 | objectMapper = mock() | |
337 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
338 | when(objectMapper).save(note).thenReturn(True) | |
339 | ||
340 | model_controller = controller.ModelController(mock(), mappersManager) | |
341 | ||
342 | model_controller.addNoteToServiceASYNC(None, interface.getID(), note) | |
343 | model_controller.processAllPendingActions() | |
344 | ||
345 | verify(mappersManager).getMapper(note.class_signature) | |
346 | verify(objectMapper).save(note) | |
347 | ||
348 | def testAddNoteToHostGetsMapperDispatchSaveSYNC(self): | |
349 | host = Host("pepito") | |
350 | note = ModelObjectNote("a_note") | |
351 | ||
352 | mappersManager = self.createMapperMock() | |
353 | objectMapper = mock() | |
354 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
355 | when(objectMapper).save(note).thenReturn(True) | |
356 | ||
357 | model_controller = controller.ModelController(mock(), mappersManager) | |
358 | ||
359 | model_controller.addNoteToHostSYNC(host.getID(), note) | |
360 | ||
361 | verify(mappersManager).getMapper(note.class_signature) | |
362 | verify(objectMapper).save(note) | |
363 | ||
364 | def testAddNoteToHostGetsMapperDispatchSaveASYNC(self): | |
365 | host = Host("pepito") | |
366 | note = ModelObjectNote("a_note") | |
367 | ||
368 | mappersManager = self.createMapperMock() | |
369 | objectMapper = mock() | |
370 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
371 | when(objectMapper).save(note).thenReturn(True) | |
372 | ||
373 | model_controller = controller.ModelController(mock(), mappersManager) | |
374 | ||
375 | model_controller.addNoteToHostASYNC(host.getID(), note) | |
376 | model_controller.processAllPendingActions() | |
377 | ||
378 | verify(mappersManager).getMapper(note.class_signature) | |
379 | verify(objectMapper).save(note) | |
380 | ||
381 | def testAddNoteToInterfaceGetsMapperDispatchSaveSYNC(self): | |
382 | interface = Interface("pepito") | |
383 | note = ModelObjectNote("a_note") | |
384 | ||
385 | mappersManager = self.createMapperMock() | |
386 | objectMapper = mock() | |
387 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
388 | when(objectMapper).save(note).thenReturn(True) | |
389 | ||
390 | model_controller = controller.ModelController(mock(), mappersManager) | |
391 | ||
392 | model_controller.addNoteToInterfaceSYNC(None, interface.getID(), note) | |
393 | ||
394 | verify(mappersManager).getMapper(note.class_signature) | |
395 | verify(objectMapper).save(note) | |
396 | ||
397 | def testAddNoteToInterfaceGetsMapperDispatchSaveASYNC(self): | |
398 | interface = Interface("pepito") | |
399 | note = ModelObjectNote("a_note") | |
400 | ||
401 | mappersManager = self.createMapperMock() | |
402 | objectMapper = mock() | |
403 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
404 | when(objectMapper).save(note).thenReturn(True) | |
405 | ||
406 | model_controller = controller.ModelController(mock(), mappersManager) | |
407 | ||
408 | model_controller.addNoteToInterfaceASYNC(None, interface.getID(), note) | |
409 | model_controller.processAllPendingActions() | |
410 | ||
411 | verify(mappersManager).getMapper(note.class_signature) | |
412 | verify(objectMapper).save(note) | |
413 | ||
414 | def testAddNoteToNoteGetsMapperDispatchSaveSYNC(self): | |
415 | host = Host("pepito") | |
416 | note = ModelObjectNote("a_note") | |
417 | ||
418 | mappersManager = self.createMapperMock() | |
419 | objectMapper = mock() | |
420 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
421 | when(objectMapper).save(note).thenReturn(True) | |
422 | ||
423 | model_controller = controller.ModelController(mock(), mappersManager) | |
424 | ||
425 | model_controller.addNoteToNoteSYNC(note.getID(), note) | |
426 | ||
427 | verify(mappersManager).getMapper(note.class_signature) | |
428 | verify(objectMapper).save(note) | |
429 | ||
430 | def testAddNoteToNoteGetsMapperDispatchSaveASYNC(self): | |
431 | host = Host("pepito") | |
432 | note = ModelObjectNote("a_note") | |
433 | ||
434 | mappersManager = self.createMapperMock() | |
435 | objectMapper = mock() | |
436 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
437 | when(objectMapper).save(note).thenReturn(True) | |
438 | ||
439 | model_controller = controller.ModelController(mock(), mappersManager) | |
440 | ||
441 | model_controller.addNoteToNoteASYNC(None, None, note.getID(), note) | |
442 | model_controller.processAllPendingActions() | |
443 | ||
444 | verify(mappersManager).getMapper(note.class_signature) | |
445 | verify(objectMapper).save(note) | |
446 | ||
447 | def testAddSavesObjectNameInTrie(self): | |
448 | host = Host('coco') | |
449 | ||
450 | mappersManager = self.createMapperMock() | |
451 | objectMapper = mock() | |
452 | triemock = mock() | |
453 | ||
454 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
455 | when(objectMapper).save(host).thenReturn(True) | |
456 | when(triemock).addWord(host.getName()).thenReturn(True) | |
457 | ||
458 | model_controller = controller.ModelController(mock(), mappersManager) | |
459 | model_controller.treeWordsTries = triemock | |
460 | ||
461 | model_controller.addHostSYNC(host) | |
462 | ||
463 | verify(mappersManager).getMapper(host.class_signature) | |
464 | verify(objectMapper).save(host) | |
465 | verify(triemock).addWord(host.getName()) | |
466 | ||
467 | def testAddNoteToModelObjectSYNC(self): | |
468 | host = Host("pepito") | |
469 | note = ModelObjectNote("a_note") | |
470 | ||
471 | mappersManager = self.createMapperMock() | |
472 | objectMapper = mock() | |
473 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
474 | when(objectMapper).save(note).thenReturn(True) | |
475 | ||
476 | model_controller = controller.ModelController(mock(), mappersManager) | |
477 | ||
478 | model_controller.addNoteSYNC(host.getID(), note) | |
479 | ||
480 | verify(mappersManager).getMapper(note.class_signature) | |
481 | verify(objectMapper).save(note) | |
482 | ||
483 | def createMapperMock(self): | |
484 | map_mock = mock() | |
485 | when(map_mock).find(any()).thenReturn(mock()) | |
486 | when(map_mock).find(None).thenReturn(None) | |
487 | return map_mock | |
488 | ||
489 | def testAddCredGetsMapperDispatchSaveSYNC(self): | |
490 | host = Host("pepito") | |
491 | cred = ModelObjectCred("usr", "pass") | |
492 | ||
493 | mappersManager = self.createMapperMock() | |
494 | objectMapper = mock() | |
495 | when(mappersManager).getMapper(cred.class_signature).thenReturn(objectMapper) | |
496 | when(objectMapper).save(cred).thenReturn(True) | |
497 | ||
498 | model_controller = controller.ModelController(mock(), mappersManager) | |
499 | ||
500 | model_controller.addCredSYNC(cred.getID(), cred) | |
501 | ||
502 | verify(mappersManager).getMapper(cred.class_signature) | |
503 | verify(objectMapper).save(cred) | |
504 | ||
505 | ||
506 | def testAddCredToServiceGetsMapperDispatchSaveSYNC(self): | |
507 | service = Service("pepito") | |
508 | cred = ModelObjectCred("usr", "pass") | |
509 | ||
510 | mappersManager = self.createMapperMock() | |
511 | objectMapper = mock() | |
512 | when(mappersManager).getMapper(cred.class_signature).thenReturn(objectMapper) | |
513 | when(objectMapper).save(cred).thenReturn(True) | |
514 | ||
515 | model_controller = controller.ModelController(mock(), mappersManager) | |
516 | ||
517 | model_controller.addCredToServiceSYNC(None, cred.getID(), cred) | |
518 | ||
519 | verify(mappersManager).getMapper(cred.class_signature) | |
520 | verify(objectMapper).save(cred) | |
521 | ||
522 | def testAddCredToServiceGetsMapperDispatchSaveASYNC(self): | |
523 | service = Service("pepito") | |
524 | cred = ModelObjectCred("usr", "pass") | |
525 | ||
526 | mappersManager = self.createMapperMock() | |
527 | objectMapper = mock() | |
528 | when(mappersManager).getMapper(cred.class_signature).thenReturn(objectMapper) | |
529 | when(objectMapper).save(cred).thenReturn(True) | |
530 | ||
531 | model_controller = controller.ModelController(mock(), mappersManager) | |
532 | ||
533 | model_controller.addCredToServiceASYNC(None, cred.getID(), cred) | |
534 | model_controller.processAllPendingActions() | |
535 | ||
536 | verify(mappersManager).getMapper(cred.class_signature) | |
537 | verify(objectMapper).save(cred) | |
538 | ||
539 | def testDeleteHostObjectDispatchRemoveSYNC(self): | |
540 | host = Host("coquito") | |
541 | ||
542 | mappersManager = self.createMapperMock() | |
543 | objectMapper = mock() | |
544 | when(mappersManager).find(host.getID()).thenReturn(host) | |
545 | when(mappersManager).remove(host.getID()).thenReturn(True) | |
546 | ||
547 | model_controller = controller.ModelController(mock(), mappersManager) | |
548 | model_controller.delHostSYNC(host.getID()) | |
549 | verify(mappersManager).remove(host.getID()) | |
550 | verify(mappersManager).find(host.getID()) | |
551 | ||
552 | def testDeleteHostObjectDispatchRemoveASYNC(self): | |
553 | host = Host("coquito") | |
554 | ||
555 | mappersManager = self.createMapperMock() | |
556 | objectMapper = mock() | |
557 | when(mappersManager).find(host.getID()).thenReturn(host) | |
558 | when(mappersManager).remove(host.getID()).thenReturn(True) | |
559 | ||
560 | model_controller = controller.ModelController(mock(), mappersManager) | |
561 | model_controller.delHostASYNC(host.getID()) | |
562 | model_controller.processAllPendingActions() | |
563 | ||
564 | verify(mappersManager).remove(host.getID()) | |
565 | ||
566 | def testDeleteModelObjectRemovesChildFromParentSYNC(self): | |
567 | host = Host('coco') | |
568 | interface = Interface("int_mock0") | |
569 | self.genericDelTest(host, interface, controller.ModelController.delInterfaceSYNC) | |
570 | ||
571 | def testDeleteModelObjectRemovesChildFromParentASYNC(self): | |
572 | host = Host('coco') | |
573 | interface = Interface("int_mock0") | |
574 | self.genericDelTest(host, interface, controller.ModelController.delInterfaceASYNC, process_pending=True) | |
575 | ||
576 | def testInterfaceFromHostRemovedSYNC(self): | |
577 | host = Host('coco') | |
578 | interface = Interface("int_mock0") | |
579 | self.genericDelTest(host, interface, | |
580 | controller.ModelController.delInterfaceSYNC) | |
581 | ||
582 | def testInterfaceFromHostRemovedSYNC(self): | |
583 | service = Service('coco') | |
584 | interface = Interface("int_mock0") | |
585 | interface.addChild(service) | |
586 | self.genericDelTest(interface, service, | |
587 | controller.ModelController.delServiceFromInterfaceSYNC) | |
588 | ||
589 | def testInterfaceFromHostRemovedASYNC(self): | |
590 | service = Service('coco') | |
591 | interface = Interface("int_mock0") | |
592 | interface.addChild(service) | |
593 | self.genericDelTest(interface, service, | |
594 | controller.ModelController.delServiceFromInterfaceASYNC, process_pending=True) | |
595 | ||
596 | def testDelVulnFromHostSYNC(self): | |
597 | host = Host('coco') | |
598 | vuln = ModelObjectVuln("int_mock0") | |
599 | host.addChild(vuln) | |
600 | self.genericDelTest(host, vuln, | |
601 | controller.ModelController.delVulnFromHostSYNC) | |
602 | ||
603 | def testDelVulnFromHostASYNC(self): | |
604 | host = Host('coco') | |
605 | vuln = ModelObjectVuln("int_mock0") | |
606 | host.addChild(vuln) | |
607 | self.genericDelTest(host, vuln, | |
608 | controller.ModelController.delVulnFromHostASYNC, process_pending=True) | |
609 | ||
610 | def testDelVulnFromObjectSYNC(self): | |
611 | host = Host('coco') | |
612 | vuln = ModelObjectVuln("int_mock0") | |
613 | host.addChild(vuln) | |
614 | self.genericDelTest(host, vuln, | |
615 | controller.ModelController.delVulnSYNC) | |
616 | ||
617 | def testDelVulnFromServiceSYNC(self): | |
618 | service = Service('coco') | |
619 | vuln = ModelObjectVuln("int_mock0") | |
620 | service.addChild(vuln) | |
621 | self.genericDelTest(service, vuln, | |
622 | controller.ModelController.delVulnFromServiceSYNC) | |
623 | ||
624 | def testDelVulnFromServiceASYNC(self): | |
625 | service = Service('coco') | |
626 | vuln = ModelObjectVuln("int_mock0") | |
627 | service.addChild(vuln) | |
628 | self.genericDelTest(service, vuln, | |
629 | controller.ModelController.delVulnFromServiceASYNC, process_pending=True) | |
630 | ||
631 | # def delNoteFromInterfaceSYNC(self, hostname, intname, noteId): | |
632 | ||
633 | def testDelNoteFromInterfaceSYNC(self): | |
634 | interface = Interface('coco') | |
635 | note = ModelObjectNote("int_mock0") | |
636 | interface.addChild(note) | |
637 | self.genericDelTest(interface, note, | |
638 | controller.ModelController.delNoteFromInterfaceSYNC) | |
639 | ||
640 | def testDelNoteFromInterfaceASYNC(self): | |
641 | interface = Interface('coco') | |
642 | note = ModelObjectNote("int_mock0") | |
643 | interface.addChild(note) | |
644 | self.genericDelTest(interface, note, | |
645 | controller.ModelController.delNoteFromInterfaceASYNC, process_pending=True) | |
646 | ||
647 | ||
648 | def testDelNoteFromServiceSYNC(self): | |
649 | service = Service('coco') | |
650 | note = ModelObjectNote("int_mock0") | |
651 | service.addChild(note) | |
652 | self.genericDelTest(service, note, | |
653 | controller.ModelController.delNoteFromServiceSYNC) | |
654 | ||
655 | def testDelNoteFromServiceASYNC(self): | |
656 | service = Service('coco') | |
657 | note = ModelObjectNote("int_mock0") | |
658 | service.addChild(note) | |
659 | self.genericDelTest(service, note, | |
660 | controller.ModelController.delNoteFromServiceASYNC, process_pending=True) | |
661 | ||
662 | def testDelNoteFromHostSYNC(self): | |
663 | host = Host('coco') | |
664 | note = ModelObjectNote("int_mock0") | |
665 | host.addChild(note) | |
666 | self.genericDelTest(host, note, | |
667 | controller.ModelController.delNoteFromHostSYNC) | |
668 | ||
669 | def testDelNoteFromHostSYNC(self): | |
670 | host = Host('coco') | |
671 | note = ModelObjectNote("int_mock0") | |
672 | host.addChild(note) | |
673 | self.genericDelTest(host, note, | |
674 | controller.ModelController.delNoteFromHostASYNC, process_pending=True) | |
675 | ||
676 | def testDelNoteFromModelObjectSYNC(self): | |
677 | host = Host('coco') | |
678 | note = ModelObjectNote("int_mock0") | |
679 | host.addChild(note) | |
680 | self.genericDelTest(host, note, | |
681 | controller.ModelController.delNoteSYNC) | |
682 | ||
683 | def testDelCredentialFromServiceSYNC(self): | |
684 | service = Service('coco') | |
685 | cred = ModelObjectCred("int_mock0") | |
686 | service.addChild(cred) | |
687 | self.genericDelTest(service, cred, | |
688 | controller.ModelController.delCredFromServiceSYNC) | |
689 | ||
690 | def testDelCredentialFromServiceASYNC(self): | |
691 | service = Service('coco') | |
692 | cred = ModelObjectCred("int_mock0") | |
693 | service.addChild(cred) | |
694 | self.genericDelTest(service, cred, | |
695 | controller.ModelController.delCredFromServiceASYNC, process_pending=True) | |
696 | ||
697 | def testDelCredentialFromModelObjectSYNC(self): | |
698 | service = Service('coco') | |
699 | cred = ModelObjectCred("int_mock0") | |
700 | service.addChild(cred) | |
701 | self.genericDelTest(service, cred, | |
702 | controller.ModelController.delCredSYNC) | |
703 | ||
704 | def testDelRemovesObjectFromTrie(self): | |
705 | host = Host("coquito") | |
706 | ||
707 | mappersManager = self.createMapperMock() | |
708 | objectMapper = mock() | |
709 | triemock = mock() | |
710 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
711 | when(mappersManager).find(host.getID()).thenReturn(host) | |
712 | when(triemock).addWord(host.getName()).thenReturn(True) | |
713 | ||
714 | model_controller = controller.ModelController(mock(), mappersManager) | |
715 | model_controller.treeWordsTries = triemock | |
716 | model_controller.delHostSYNC(host.getID()) | |
717 | verify(mappersManager).remove(host.getID()) | |
718 | ||
719 | verify(triemock).removeWord(host.getName()) | |
720 | ||
721 | def genericDelTest(self, obj1, obj2, test_method, process_pending=False): | |
722 | mappersManager = self.createMapperMock() | |
723 | objectMapper = mock() | |
724 | triemock = mock() | |
725 | when(mappersManager).find(obj2.getID()).thenReturn(obj2) | |
726 | when(objectMapper).delObject(obj2.getID()).thenReturn(True) | |
727 | ||
728 | model_controller = controller.ModelController(mock(), mappersManager) | |
729 | model_controller.treeWordsTries = triemock | |
730 | ||
731 | try: | |
732 | test_method(model_controller, None, obj2.getID()) | |
733 | except: | |
734 | test_method(model_controller, None, None, obj2.getID()) | |
735 | ||
736 | if process_pending: | |
737 | model_controller.processAllPendingActions() | |
738 | ||
739 | verify(mappersManager).find(obj2.getID()) | |
740 | verify(mappersManager).remove(obj2.getID()) | |
741 | ||
742 | def testEditHostSyncGetsMapperDispatchedSYNC(self): | |
743 | host = Host("coquito") | |
744 | ||
745 | mappersManager = self.createMapperMock() | |
746 | dataMapper = mock() | |
747 | objectMapper = mock() | |
748 | triemock = mock() | |
749 | when(mappersManager).getMapper(host.class_signature).thenReturn(dataMapper) | |
750 | when(dataMapper).save(host).thenReturn(True) | |
751 | ||
752 | model_controller = controller.ModelController(mock(), mappersManager) | |
753 | ||
754 | model_controller.editHostSYNC(host, 'new_name', 'new_desc', 'new_os', True) | |
755 | ||
756 | verify(dataMapper).save(host) | |
757 | ||
758 | self.assertEquals(host.getName(), 'new_name', "Name not updated") | |
759 | self.assertEquals(host.getDescription(), 'new_desc', "Description not updated") | |
760 | self.assertEquals(host.getOS(), 'new_os', "OS not updated") | |
761 | self.assertEquals(host.isOwned(), True, "Owned status not updated") | |
762 | ||
763 | def testEditServiceSyncGetsMapperDispatchedSYNC(self): | |
764 | service = Service("coquito") | |
765 | ||
766 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
767 | self.genericEdit(service, params, controller.ModelController.editServiceSYNC) | |
768 | ||
769 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
770 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
771 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
772 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
773 | ||
774 | def testEditServiceSyncGetsMapperDispatchedASYNC(self): | |
775 | service = Service("coquito") | |
776 | ||
777 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
778 | self.genericEdit(service, params, controller.ModelController.editServiceASYNC, | |
779 | process_pending=True) | |
780 | ||
781 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
782 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
783 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
784 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
785 | ||
786 | def testEditServiceSyncGetsMapperDispatchedSYNC(self): | |
787 | service = Service("coquito") | |
788 | ||
789 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
790 | self.genericEdit(service, params, controller.ModelController.editServiceSYNC) | |
791 | ||
792 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
793 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
794 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
795 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
796 | ||
797 | def testEditServiceSyncGetsMapperDispatchedASYNC(self): | |
798 | service = Service("coquito") | |
799 | ||
800 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
801 | self.genericEdit(service, params, controller.ModelController.editServiceASYNC, process_pending=True) | |
802 | ||
803 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
804 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
805 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
806 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
807 | ||
808 | def testEditInterfaceSyncGetsMapperDispatchedSYNC(self): | |
809 | inter = Interface("coquito") | |
810 | ||
811 | params = ('new_name', 'new_desc', 'hostname1', "FF:AA:EE:11:00", None, | |
812 | None, None, None, None, None, True) | |
813 | ||
814 | self.genericEdit(inter, params, controller.ModelController.editInterfaceSYNC) | |
815 | ||
816 | self.assertEquals(inter.getName(), 'new_name', "Name not updated") | |
817 | self.assertEquals(inter.getDescription(), 'new_desc', "Description not updated") | |
818 | self.assertEquals(inter.isOwned(), True, "Owned status not updated") | |
819 | ||
820 | ||
821 | def testEditVulnSyncGetsMapperDispatchedSYNC(self): | |
822 | vuln = ModelObjectVuln("coquito") | |
823 | ||
824 | params = ('new_name', 'new_desc', 'high', "ref1") | |
825 | ||
826 | self.genericEdit(vuln, params, controller.ModelController.editVulnSYNC) | |
827 | ||
828 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
829 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
830 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
831 | ||
832 | def testEditVulnSyncGetsMapperDispatchedASYNC(self): | |
833 | vuln = ModelObjectVuln("coquito") | |
834 | ||
835 | params = ('new_name', 'new_desc', 'high', "ref1") | |
836 | ||
837 | self.genericEdit(vuln, params, controller.ModelController.editVulnASYNC, process_pending=True) | |
838 | ||
839 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
840 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
841 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
842 | ||
843 | def testEditVulnWebSyncGetsMapperDispatchedSYNC(self): | |
844 | vuln = ModelObjectVulnWeb("coquito") | |
845 | ||
846 | params = ('new_name', 'new_desc', 'www.goole.com', 'index.html', | |
847 | "ref1", 'high', None, None, 'GET', 'pepe', 'coco' , 'caca', | |
848 | None) | |
849 | ||
850 | self.genericEdit(vuln, params, controller.ModelController.editVulnWebSYNC) | |
851 | ||
852 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
853 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
854 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
855 | ||
856 | def testEditVulnWebSyncGetsMapperDispatchedASYNC(self): | |
857 | vuln = ModelObjectVulnWeb("coquito") | |
858 | ||
859 | params = ('new_name', 'new_desc', 'www.goole.com', 'index.html', | |
860 | "ref1", 'high', None, None, 'GET', 'pepe', 'coco' , 'caca', | |
861 | None) | |
862 | ||
863 | self.genericEdit(vuln, params, controller.ModelController.editVulnWebASYNC, process_pending=True) | |
864 | ||
865 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
866 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
867 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
868 | ||
869 | def testEditNoteSyncGetsMapperDispatchedSYNC(self): | |
870 | note = ModelObjectNote("coquito") | |
871 | ||
872 | params = ('new_name', 'new_desc') | |
873 | self.genericEdit(note, params, controller.ModelController.editNoteSYNC) | |
874 | self.assertEquals(note.getName(), 'new_name', "Name not updated") | |
875 | self.assertEquals(note.text, 'new_desc', "Description not updated") | |
876 | ||
877 | def testEditNoteSyncGetsMapperDispatchedASYNC(self): | |
878 | note = ModelObjectNote("coquito") | |
879 | ||
880 | params = ('new_name', 'new_desc') | |
881 | self.genericEdit(note, params, controller.ModelController.editNoteASYNC, process_pending=True) | |
882 | self.assertEquals(note.getName(), 'new_name', "Name not updated") | |
883 | self.assertEquals(note.text, 'new_desc', "Description not updated") | |
884 | ||
885 | def testEditCredSyncGetsMapperDispatchedSYNC(self): | |
886 | cred = ModelObjectCred("coquito") | |
887 | ||
888 | params = ('new_user', 'new_pass') | |
889 | self.genericEdit(cred, params, controller.ModelController.editCredSYNC) | |
890 | self.assertEquals(cred.getUsername(), 'new_user', "Username not updated") | |
891 | self.assertEquals(cred.getPassword(), 'new_pass', "Password not updated") | |
892 | ||
893 | def testEditCredSyncGetsMapperDispatchedASYNC(self): | |
894 | cred = ModelObjectCred("coquito") | |
895 | ||
896 | params = ('new_user', 'new_pass') | |
897 | self.genericEdit(cred, params, controller.ModelController.editCredASYNC, process_pending=True) | |
898 | self.assertEquals(cred.getUsername(), 'new_user', "Username not updated") | |
899 | self.assertEquals(cred.getPassword(), 'new_pass', "Password not updated") | |
900 | ||
901 | def testGetAllHosts(self): | |
902 | hosts = [ Host("coquito%i" % i ) for i in range(10)] | |
903 | ||
904 | mappersManager = self.createMapperMock() | |
905 | objectMapper = mock() | |
906 | when(mappersManager).getMapper(Host.__name__).thenReturn(objectMapper) | |
907 | when(objectMapper).getAll().thenReturn(hosts) | |
908 | ||
909 | model_controller = controller.ModelController(mock(), mappersManager) | |
910 | hosts_obt = model_controller.getAllHosts() | |
911 | verify(objectMapper).getAll() | |
912 | verify(mappersManager).getMapper(Host.__name__) | |
913 | ||
914 | self.assertListEqual(hosts, hosts_obt) | |
915 | ||
916 | def testGetHost(self): | |
917 | host = Host("coquito") | |
918 | ||
919 | mappersManager = self.createMapperMock() | |
920 | objectMapper = mock() | |
921 | when(mappersManager).getMapper(host.__class__.__name__).thenReturn(objectMapper) | |
922 | when(objectMapper).find(host.getName()).thenReturn(host) | |
923 | ||
924 | model_controller = controller.ModelController(mock(), mappersManager) | |
925 | ||
926 | host_obt = model_controller.getHost('coquito') | |
927 | ||
928 | verify(objectMapper).find(host.getName()) | |
929 | verify(mappersManager).getMapper(host.__class__.__name__) | |
930 | ||
931 | self.assertEqual(host, host_obt) | |
932 | ||
933 | def genericEdit(self, obj, params, callback, process_pending=False): | |
934 | mappersManager = self.createMapperMock() | |
935 | dataMapper = mock() | |
936 | objId = obj.getID() | |
937 | when(mappersManager).getMapper(obj.class_signature).thenReturn(dataMapper) | |
938 | when(dataMapper).save(obj).thenReturn(True) | |
939 | when(mappersManager).find(objId).thenReturn(obj) | |
940 | when(mappersManager).save(obj).thenReturn(True) | |
941 | model_controller = controller.ModelController(mock(), mappersManager) | |
942 | callback(model_controller, obj, *params) | |
943 | if process_pending: | |
944 | model_controller.processAllPendingActions() | |
945 | ||
946 | ||
947 | if __name__ == '__main__': | |
948 | unittest.main() | |
949 |
0 | #!/usr/bin/python | |
1 | ||
2 | ''' | |
3 | Faraday Penetration Test IDE | |
4 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | ||
7 | ''' | |
8 | ||
9 | from unittest import TestCase | |
10 | import unittest | |
11 | import sys | |
12 | sys.path.append('.') | |
13 | import model.controller as controller | |
14 | from mockito import mock, when | |
15 | from model import api | |
16 | from plugins.core import PluginBase, PluginController | |
17 | from model.workspace import Workspace | |
18 | from model.container import ModelObjectContainer | |
19 | from managers.all import CommandManager | |
20 | from time import time | |
21 | from model.commands_history import CommandRunInformation | |
22 | ||
23 | ||
24 | class TestPluginCreateModelObject(TestCase): | |
25 | """docstring for TestModelObjectCRUD""" | |
26 | def setUp(self): | |
27 | self._model_controller = controller.ModelController(mock()) | |
28 | self.cm = mock(CommandManager) | |
29 | when(self.cm).saveCommand().thenReturn(True) | |
30 | self._plugin_controller = PluginController("test", {}, self.cm) | |
31 | ||
32 | class PluginTest(PluginBase): | |
33 | def __init__(self): | |
34 | PluginBase.__init__(self) | |
35 | self.id = "Test" | |
36 | self.name = "Test" | |
37 | ||
38 | def parseOutputString(self, output, debug=False): | |
39 | pass | |
40 | ||
41 | self.workspace = mock(Workspace) | |
42 | when(self.workspace).getContainee().thenReturn(ModelObjectContainer()) | |
43 | self._model_controller.setWorkspace(self.workspace) | |
44 | ||
45 | self.plugin = PluginTest() | |
46 | api.setUpAPIs(self._model_controller) | |
47 | ||
48 | self._plugin_controller.setActivePlugin(self.plugin) | |
49 | self.cmdinfo = CommandRunInformation( | |
50 | **{'workspace': 'test', | |
51 | 'itime': time(), | |
52 | 'command': 'test', | |
53 | 'params': 'test'}) | |
54 | ||
55 | def test_create_host(self): | |
56 | """ | |
57 | Testing the creation of one host | |
58 | """ | |
59 | h = self.plugin.createAndAddHost("pepito", "linux") | |
60 | self._plugin_controller.last_command_information = self.cmdinfo | |
61 | self._plugin_controller.onCommandFinished() | |
62 | self._model_controller.processAllPendingActions() | |
63 | ||
64 | self.assertTrue(h is not None, "host should have an ID") | |
65 | self.assertTrue(len(self._model_controller.getAllHosts()) == 1, "The controller should have one host") | |
66 | self.assertTrue(self._model_controller.getHost(h) is not None, "The host should be in the controller") | |
67 | ||
68 | def test_create_same_host_two_times(self): | |
69 | """ | |
70 | Testing the creation of the same host, two times. | |
71 | This simulates two plugins creating the host with the same name | |
72 | We should end up with just one host in the controller | |
73 | """ | |
74 | h1 = self.plugin.createAndAddHost("pepito", "linux") | |
75 | h2 = self.plugin.createAndAddHost("pepito", "linux") | |
76 | self._plugin_controller.last_command_information = self.cmdinfo | |
77 | self._plugin_controller.onCommandFinished() | |
78 | self._model_controller.processAllPendingActions() | |
79 | ||
80 | self.assertTrue(len(self._model_controller.getAllHosts()) == 1, "The controller should have just one host") | |
81 | self.assertTrue(self._model_controller.getHost(h1) == self._model_controller.getHost(h2), "The host should be the same") | |
82 | ||
83 | def test_create_host_with_interface(self): | |
84 | """ | |
85 | Testing the creation of one host, with one interface | |
86 | """ | |
87 | h = self.plugin.createAndAddHost("pepito", "linux") | |
88 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
89 | self._plugin_controller.last_command_information = self.cmdinfo | |
90 | self._plugin_controller.onCommandFinished() | |
91 | self._model_controller.processAllPendingActions() | |
92 | ||
93 | self.assertTrue(i is not None, "interface should have an ID") | |
94 | host = self._model_controller.getHost(h) | |
95 | self.assertTrue(len(host.getAllInterfaces()) == 1, "Host should have one interface") | |
96 | self.assertTrue(host.getInterface(i) is not None, "The interface should be the one we've just create") | |
97 | ||
98 | def test_create_interface_two_times(self): | |
99 | """ | |
100 | Testing the creation of the same interface, two times. | |
101 | This simulates two plugins creating the host with the same interface | |
102 | We should end up with just one interface in that host | |
103 | """ | |
104 | h1 = self.plugin.createAndAddHost("pepito", "linux") | |
105 | i1 = self.plugin.createAndAddInterface(h1, "1.2.3.4") | |
106 | ||
107 | h2 = self.plugin.createAndAddHost("pepito", "linux") | |
108 | i2 = self.plugin.createAndAddInterface(h2, "1.2.3.4") | |
109 | ||
110 | self._plugin_controller.last_command_information = self.cmdinfo | |
111 | self._plugin_controller.onCommandFinished() | |
112 | self._model_controller.processAllPendingActions() | |
113 | ||
114 | self.assertTrue(len(self._model_controller.getAllHosts()) == 1, "The controller should have just one host") | |
115 | self.assertTrue(len(self._model_controller.getHost(h1).getAllInterfaces()) == 1, "The host should have just one interface") | |
116 | ||
117 | def test_create_host_with_interface_with_service(self): | |
118 | """ | |
119 | Testing the creation of one host, with one interface and one service on that interface | |
120 | """ | |
121 | h = self.plugin.createAndAddHost("pepito", "linux") | |
122 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
123 | s = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
124 | self._plugin_controller.last_command_information = self.cmdinfo | |
125 | self._plugin_controller.onCommandFinished() | |
126 | self._model_controller.processAllPendingActions() | |
127 | ||
128 | host = self._model_controller.getHost(h) | |
129 | interface = host.getInterface(i) | |
130 | self.assertTrue(len(interface.getAllServices()) == 1, "The interface should have just one service") | |
131 | self.assertTrue(interface.getService(s) is not None, "The service should be the one we've just create") | |
132 | ||
133 | def test_create_two_services_different_names_equal_port(self): | |
134 | """ | |
135 | Testing the creation of two services with different names but same protocol and port | |
136 | The result should only one services being created, since both have the same id | |
137 | """ | |
138 | h = self.plugin.createAndAddHost("pepito", "linux") | |
139 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
140 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
141 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "test", protocol="tcp", ports=['80']) | |
142 | self._plugin_controller.last_command_information = self.cmdinfo | |
143 | self._plugin_controller.onCommandFinished() | |
144 | self._model_controller.processAllPendingActions() | |
145 | ||
146 | host = self._model_controller.getHost(h) | |
147 | interface = host.getInterface(i) | |
148 | self.assertEqual(s1, s2, "Both services should have the same id") | |
149 | self.assertTrue(len(interface.getAllServices()) == 1, "The interface should have just one service") | |
150 | ||
151 | def test_create_two_services_same_names_different_port(self): | |
152 | """ | |
153 | Testing the creation of two services with same names but different port | |
154 | The result should only two services being created, since both have the different ids | |
155 | """ | |
156 | h = self.plugin.createAndAddHost("pepito", "linux") | |
157 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
158 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
159 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
160 | self._plugin_controller.last_command_information = self.cmdinfo | |
161 | self._plugin_controller.onCommandFinished() | |
162 | self._model_controller.processAllPendingActions() | |
163 | ||
164 | host = self._model_controller.getHost(h) | |
165 | interface = host.getInterface(i) | |
166 | self.assertNotEqual(s1, s2, "Both services should have the same id") | |
167 | self.assertTrue(len(interface.getAllServices()) == 2, "The interface should have two services") | |
168 | ||
169 | def test_create_vuln_to_service(self): | |
170 | """ | |
171 | Testing the creation of a vuln to a service | |
172 | """ | |
173 | h = self.plugin.createAndAddHost("pepito", "linux") | |
174 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
175 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
176 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
177 | v = self.plugin.createAndAddVulnToService(h, s1, "vuln1", "descripcion") | |
178 | self._plugin_controller.last_command_information = self.cmdinfo | |
179 | self._plugin_controller.onCommandFinished() | |
180 | self._model_controller.processAllPendingActions() | |
181 | ||
182 | host = self._model_controller.getHost(h) | |
183 | interface = host.getInterface(i) | |
184 | service1 = interface.getService(s1) | |
185 | service2 = interface.getService(s2) | |
186 | self.assertTrue(len(service1.getVulns()) == 1, "The service should have one vuln") | |
187 | self.assertTrue(service1.getVuln(v) is not None, "The vuln should be the one we've just create") | |
188 | self.assertTrue(len(service2.getVulns()) == 0, "The service should't have any vuln") | |
189 | ||
190 | def test_create_note_to_service(self): | |
191 | """ | |
192 | Testing the creation of a vuln to a service | |
193 | """ | |
194 | h = self.plugin.createAndAddHost("pepito", "linux") | |
195 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
196 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
197 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
198 | n = self.plugin.createAndAddNoteToService(h, s1, "note1", "desc1") | |
199 | self._plugin_controller.last_command_information = self.cmdinfo | |
200 | self._plugin_controller.onCommandFinished() | |
201 | self._model_controller.processAllPendingActions() | |
202 | ||
203 | host = self._model_controller.getHost(h) | |
204 | interface = host.getInterface(i) | |
205 | service1 = interface.getService(s1) | |
206 | service2 = interface.getService(s2) | |
207 | self.assertTrue(len(service1.getNotes()) == 1, "The service should have one vuln") | |
208 | self.assertTrue(service1.getNote(n) is not None, "The vuln should be the one we've just create") | |
209 | self.assertTrue(len(service2.getNotes()) == 0, "The service should't have any vuln") | |
210 | ||
211 | def test_create_note_to_note_service(self): | |
212 | """ | |
213 | Testing the creation of a vuln to a service | |
214 | """ | |
215 | h = self.plugin.createAndAddHost("pepito", "linux") | |
216 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
217 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
218 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
219 | n = self.plugin.createAndAddNoteToService(h, s1, "note1", "desc1") | |
220 | n2 = self.plugin.createAndAddNoteToNote(h, s1, n, "note2", "desc2") | |
221 | self._plugin_controller.last_command_information = self.cmdinfo | |
222 | self._plugin_controller.onCommandFinished() | |
223 | self._model_controller.processAllPendingActions() | |
224 | ||
225 | host = self._model_controller.getHost(h) | |
226 | interface = host.getInterface(i) | |
227 | service1 = interface.getService(s1) | |
228 | service2 = interface.getService(s2) | |
229 | note1 = service1.getNote(n) | |
230 | self.assertTrue(service1.getNote(n) is not None, "The note should be the one we've just create") | |
231 | self.assertTrue(len(note1.getNotes()) == 1, "The note should have a nested note") | |
232 | ||
233 | def test_create_cred_to_service(self): | |
234 | """ | |
235 | Testing the creation of a vuln to a service | |
236 | """ | |
237 | h = self.plugin.createAndAddHost("pepito", "linux") | |
238 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
239 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
240 | c = self.plugin.createAndAddCredToService(h, s1, "user", "pass") | |
241 | self._plugin_controller.last_command_information = self.cmdinfo | |
242 | self._plugin_controller.onCommandFinished() | |
243 | self._model_controller.processAllPendingActions() | |
244 | ||
245 | host = self._model_controller.getHost(h) | |
246 | interface = host.getInterface(i) | |
247 | service1 = interface.getService(s1) | |
248 | cred = service1.getCred(c) | |
249 | self.assertTrue(service1.getCred(c) is not None, "The cred should be the one we've just create") | |
250 | self.assertTrue(len(service1.getCreds()) == 1, "The service should have a nested note") | |
251 | ||
252 | if __name__ == '__main__': | |
253 | unittest.main() |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | ||
8 | import unittest | |
9 | import sys | |
10 | sys.path.append('.') | |
11 | import model.controller | |
12 | import managers.mapper_manager | |
13 | from mockito import mock | |
14 | from persistence.mappers.abstract_mapper import NullPersistenceManager | |
15 | from model.hosts import Host, ModelObjectVuln | |
16 | from model.diff import ModelObjectDiff | |
17 | ||
18 | import test_cases.common as test_utils | |
19 | ||
20 | ||
21 | class DiffTests(unittest.TestCase): | |
22 | ||
23 | def setUp(self): | |
24 | pass | |
25 | ||
26 | def tearDown(self): | |
27 | pass | |
28 | ||
29 | def test_diff_between_equal_hosts(self): | |
30 | """ | |
31 | This test case creates a host and the compares it | |
32 | with another equal host using the ModelObjectDiff class | |
33 | """ | |
34 | h1 = Host(name='host1', os='Windows') | |
35 | h2 = Host(name='host1', os='Windows') | |
36 | ||
37 | diff = ModelObjectDiff(h1, h2) | |
38 | ||
39 | self.assertFalse(diff.existDiff()) | |
40 | ||
41 | def test_diff_between_different_hosts(self): | |
42 | """ | |
43 | This test case creates a host and the compares it | |
44 | with another different host using the ModelObjectDiff class | |
45 | """ | |
46 | h1 = Host(name='host1', os='Windows') | |
47 | h2 = Host(name='host1', os='Linux') | |
48 | ||
49 | diff = ModelObjectDiff(h1, h2) | |
50 | ||
51 | self.assertTrue(diff.existDiff()) | |
52 | ||
53 | def test_diff_between_equal_vulns_with_different_confirmed(self): | |
54 | v1 = ModelObjectVuln(name="vuln1", | |
55 | desc="description", | |
56 | severity="high", | |
57 | confirmed=True) | |
58 | v2 = ModelObjectVuln(name="vuln1", | |
59 | desc="description", severity="high") | |
60 | ||
61 | self.assertFalse(v1.addUpdate(v2), | |
62 | "The conflict should be resolved automatically") | |
63 | self.assertTrue(v1.confirmed, | |
64 | "The vuln should be still confirmed") | |
65 | ||
66 | ||
67 | class UpdatesTests(unittest.TestCase): | |
68 | ||
69 | def setUp(self): | |
70 | self._mappers_manager = managers.mapper_manager.MapperManager() | |
71 | self._persistence_manager = NullPersistenceManager() | |
72 | self._mappers_manager.createMappers(self._persistence_manager) | |
73 | self.model_controller = model.controller.ModelController( | |
74 | mock(), self._mappers_manager) | |
75 | ||
76 | def tearDown(self): | |
77 | pass | |
78 | ||
79 | def test_add_host_and_generate_solvable_update(self): | |
80 | """ | |
81 | This test case creates a host within the Model Controller context | |
82 | and then creates another with the same key elements, but different | |
83 | non-key attributes with default value to generate an automatic | |
84 | solvable update | |
85 | """ | |
86 | # When | |
87 | hostname = 'host' | |
88 | host1a = test_utils.create_host(self, host_name=hostname, os='windows') | |
89 | ||
90 | host = self._mappers_manager.find(host1a.getID()) | |
91 | self.assertEquals( | |
92 | host.getOS(), | |
93 | 'windows', | |
94 | 'Host\'s OS should be windows') | |
95 | ||
96 | # Then, we generate an update | |
97 | host1b = test_utils.create_host(self, host_name=hostname, os='unknown') | |
98 | ||
99 | self.assertEquals( | |
100 | host1a.getID(), | |
101 | host1b.getID(), | |
102 | 'Both hosts should have the same id') | |
103 | ||
104 | self.assertEquals( | |
105 | len(self.model_controller.getConflicts()), | |
106 | 0, | |
107 | 'Update was generated') | |
108 | ||
109 | host = self._mappers_manager.find(host1a.getID()) | |
110 | ||
111 | self.assertEquals( | |
112 | host.getOS(), | |
113 | 'windows', | |
114 | 'Host\'s OS should still be windows') | |
115 | ||
116 | def test_add_host_and_generate_solvable_update_with_edition(self): | |
117 | """ | |
118 | This test case creates a host with a default value in a non-key | |
119 | attrribute within the Model Controller context and then creates | |
120 | another with the same key elements, but different non-key | |
121 | attributes to generate an automatic solvable update | |
122 | """ | |
123 | # When | |
124 | hostname = 'host' | |
125 | host1a = test_utils.create_host(self, host_name=hostname, os='unknown') | |
126 | ||
127 | host = self._mappers_manager.find(host1a.getID()) | |
128 | ||
129 | self.assertEquals( | |
130 | host.getOS(), | |
131 | 'unknown', | |
132 | 'Host\'s OS should be unknown') | |
133 | ||
134 | # Then, we generate an update | |
135 | host1b = test_utils.create_host(self, host_name=hostname, os='windows') | |
136 | ||
137 | self.assertEquals( | |
138 | host1a.getID(), | |
139 | host1b.getID(), | |
140 | 'Both hosts should have the same id') | |
141 | ||
142 | self.assertEquals( | |
143 | len(self.model_controller.getConflicts()), | |
144 | 0, | |
145 | 'Update was generated') | |
146 | ||
147 | host = self._mappers_manager.find(host1a.getID()) | |
148 | ||
149 | self.assertEquals( | |
150 | host.getOS(), | |
151 | 'windows', | |
152 | 'Host\'s OS should now be windows') | |
153 | ||
154 | def test_add_host_and_generate_unsolvable_update(self): | |
155 | """ | |
156 | This test case creates a host within the Model Controller | |
157 | context and then creates another with the same key elements, | |
158 | but different non-key attributes to generate an update to | |
159 | be resolved by the user | |
160 | """ | |
161 | # When | |
162 | hostname = 'host' | |
163 | host1a = test_utils.create_host(self, host_name=hostname, os='windows') | |
164 | ||
165 | host = self._mappers_manager.find(host1a.getID()) | |
166 | ||
167 | self.assertEquals( | |
168 | host.getOS(), | |
169 | 'windows', | |
170 | 'Host\'s OS should be windows') | |
171 | ||
172 | # Then, we generate an update | |
173 | host1b = test_utils.create_host(self, host_name=hostname, os='linux') | |
174 | ||
175 | self.assertEquals( | |
176 | host1a.getID(), | |
177 | host1b.getID(), | |
178 | 'Both hosts should have the same id') | |
179 | ||
180 | self.assertEquals( | |
181 | len(self.model_controller.getConflicts()), | |
182 | 1, | |
183 | 'Update was not generated') | |
184 | ||
185 | host = self._mappers_manager.find(host1a.getID()) | |
186 | ||
187 | self.assertEquals( | |
188 | host.getOS(), | |
189 | 'windows', | |
190 | 'Host\'s OS should still be windows') | |
191 | ||
192 | self.assertEquals( | |
193 | len(host.getUpdates()), | |
194 | 1, | |
195 | 'The host should have a pending update') | |
196 | ||
197 | ||
198 | if __name__ == '__main__': | |
199 | unittest.main() |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | import unittest | |
10 | import sys | |
11 | import os | |
12 | sys.path.append(os.path.abspath(os.getcwd())) | |
13 | import model.api | |
14 | from plugins.core import PluginController | |
15 | from managers.all import PluginManager | |
16 | import re | |
17 | from mockito import mock, when | |
18 | from model.controller import ModelController | |
19 | from config.configuration import getInstanceConfiguration | |
20 | CONF = getInstanceConfiguration() | |
21 | from model.workspace import WorkspaceOnCouch, WorkspaceManager | |
22 | from auth.manager import SecurityManager | |
23 | ||
24 | ||
25 | class PluginControllerTestSuite(unittest.TestCase): | |
26 | ||
27 | def setUp(self): | |
28 | self.plugin_repo_path = os.path.join(os.getcwd(), "plugins", "repo") | |
29 | self.plugin_manager = PluginManager(self.plugin_repo_path) | |
30 | ||
31 | controller = ModelController(mock(SecurityManager)) | |
32 | ||
33 | wm = WorkspaceManager(controller, mock(PluginController)) | |
34 | work = wm.createWorkspace('default', workspaceClass=WorkspaceOnCouch) | |
35 | work.setModelController(controller) | |
36 | controller.setWorkspace(work) | |
37 | model.api.setUpAPIs(controller) | |
38 | ||
39 | ||
40 | class WorkspaceStub(): | |
41 | def __init__(self): | |
42 | self.id = "test_space" | |
43 | self.controller = self.plugin_manager.createController(WorkspaceStub()) | |
44 | ||
45 | def tearDown(self): | |
46 | pass | |
47 | ||
48 | def test_instantiation(self): | |
49 | """ | |
50 | Generic test to verify that the object exists and can be | |
51 | instantiated without problems. | |
52 | """ | |
53 | controller = PluginController("test", {}, mock()) | |
54 | self.assertTrue(controller is not None) | |
55 | ||
56 | def test_sanitation_checker(self): | |
57 | """ | |
58 | The object of this test is to verify that the plugin controller | |
59 | is able to detect and avoid malicious commands sent by rogue plugins. | |
60 | The mechanism is not intend to be perfect but at least should give some | |
61 | amount of protection. | |
62 | """ | |
63 | controller = PluginController("test", {}, mock()) | |
64 | ||
65 | original_command = "nmap -v -iR 10000 -PN -p 80" | |
66 | modified_command = "nmap -v -iR 10000 -PN -p 80|" | |
67 | ||
68 | self.assertTrue(controller._is_command_malformed(original_command, modified_command), | |
69 | 'Modified command is malformed') | |
70 | ||
71 | original_command = "nmap -v -iR 10000 -PN -p 80" | |
72 | modified_command = "nmap -v -i#R 10000 -PN -p 80" | |
73 | self.assertTrue(controller._is_command_malformed(original_command, modified_command), | |
74 | 'Modified command is malformed') | |
75 | ||
76 | original_command = "nmap -v -iR 10000 -PN -p 80" | |
77 | modified_command = "nmap -v -iR $10000 -PN -p 80" | |
78 | self.assertTrue(controller._is_command_malformed(original_command, modified_command), | |
79 | 'Modified command is malformed') | |
80 | ||
81 | original_command = "nmap -v -iR 10000 -PN -p 80" | |
82 | modified_command = "nmap -v -iR 10000 -PN -p 80" | |
83 | ||
84 | self.assertTrue( not controller._is_command_malformed(original_command, modified_command), | |
85 | "Original Command same as modified command but is malformed") | |
86 | ||
87 | def test_input_processing(self): | |
88 | """ | |
89 | Check that the controller is able to give the active plugin an input and | |
90 | verify that what the plugin gives back to it is a safe command string. | |
91 | ||
92 | TODO: Fix the docstring. It sucks. | |
93 | TODO: Use a generic plugin. | |
94 | """ | |
95 | ||
96 | prompt = "fdeguzman@testserver:$" | |
97 | ||
98 | command_string = "nmap localhost" | |
99 | modified_string = self.controller.processCommandInput(prompt, "", "", | |
100 | command_string, False) | |
101 | arg_search = re.match(r"^.*(-oX\s*[^\s]+).*$", modified_string) | |
102 | self.assertTrue(arg_search is not None) | |
103 | ||
104 | command_string = "nmap -oX benito_camelas.xml localhost" | |
105 | modified_string = self.controller.processCommandInput(prompt, "", "", command_string, False) | |
106 | arg_search = re.match(r"^.*(-oX benito_camelas\.xml).*$", modified_string) | |
107 | self.assertTrue(arg_search is None) | |
108 | ||
109 | def test_process_command_keep_information(self): | |
110 | ||
111 | prompt = "fdeguzman@testserver:$" | |
112 | ||
113 | command_string = "nmap -oX benito_camelas.xml localhost" | |
114 | modified_string = self.controller.processCommandInput(prompt, "", "", command_string, False) | |
115 | ||
116 | self.assertIsNotNone(self.controller.last_command_information, "Command Information not saved") | |
117 | ||
118 | ||
119 | ||
120 | if __name__ == '__main__': | |
121 | unittest.main() | |
122 |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | ||
7 | import unittest | |
8 | import os | |
9 | import requests | |
10 | import json | |
11 | import sys | |
12 | import base64 | |
13 | from mockito import mock, when | |
14 | ||
15 | sys.path.append('.') | |
16 | ||
17 | from managers.all import PluginManager | |
18 | import apis.rest.api as api | |
19 | import model.api | |
20 | import model.controller | |
21 | from model.workspace import Workspace | |
22 | from model.container import ModelObjectContainer | |
23 | from managers.all import PersistenceManager | |
24 | import test_cases.common as test_utils | |
25 | ||
26 | ||
27 | class TestPluginControllerApi(unittest.TestCase): | |
28 | ||
29 | @classmethod | |
30 | def setUpClass(cls): | |
31 | cls.model_controller = model.controller.ModelController(mock()) | |
32 | plugin_repo_path = os.path.join(os.getcwd(), "plugins", "repo") | |
33 | plugin_manager = PluginManager(plugin_repo_path) | |
34 | api.startAPIs(plugin_manager, cls.model_controller) | |
35 | ||
36 | @classmethod | |
37 | def tearDownClass(cls): | |
38 | api.stopAPIs() | |
39 | ||
40 | def setUp(self): | |
41 | self.workspace = mock(Workspace) | |
42 | self.workspace.name = "default" | |
43 | self.workspace._dmanager = mock(PersistenceManager()) | |
44 | when(self.workspace._dmanager).saveDocument().thenReturn(True) | |
45 | when(self.workspace).getContainee().thenReturn(ModelObjectContainer()) | |
46 | self.model_controller.setWorkspace(self.workspace) | |
47 | ||
48 | model.api.setUpAPIs(self.model_controller) | |
49 | self.url_input = "http://127.0.0.1:9977/cmd/input" | |
50 | self.url_output = "http://127.0.0.1:9977/cmd/output" | |
51 | self.url_active_plugins = "http://127.0.0.1:9977/cmd/active-plugins" | |
52 | self.headers = {'Content-type': 'application/json', 'Accept': 'application/json'} | |
53 | ||
54 | self.url_model_edit_vulns = "http://127.0.0.1:9977/model/edit/vulns" | |
55 | self.url_model_del_vulns = "http://127.0.0.1:9977/model/del/vulns" | |
56 | ||
57 | def tearDown(self): | |
58 | requests.delete(self.url_active_plugins) | |
59 | ||
60 | def test_cmd_input_ls(self): | |
61 | cmd = "ls" | |
62 | data = {"cmd": cmd} | |
63 | response = requests.post(self.url_input, | |
64 | data=json.dumps(data), | |
65 | headers=self.headers) | |
66 | ||
67 | self.assertEquals(response.status_code, 204, "Status Code should be 204: No Content, but received: %d" % response.status_code) | |
68 | ||
69 | ||
70 | def test_cmd_input_ping(self): | |
71 | cmd = "ping 127.0.0.1" | |
72 | data = {"cmd": cmd} | |
73 | response = requests.post(self.url_input, | |
74 | data=json.dumps(data), | |
75 | headers=self.headers) | |
76 | json_response = response.json() | |
77 | ||
78 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK, but received: %d" % response.status_code) | |
79 | self.assertIn("cmd", json_response.keys(), "Json response should have a cmd key") | |
80 | self.assertIn("custom_output_file", json_response.keys(), "Json response should have a custom_output_file key") | |
81 | self.assertIsNone(json_response.get("cmd"), "cmd should be None") | |
82 | self.assertIsNone(json_response.get("custom_output_file"), "custom_output_file should be None") | |
83 | ||
84 | def test_cmd_input_nmap(self): | |
85 | cmd = "nmap 127.0.0.1" | |
86 | data = {"cmd": cmd} | |
87 | response = requests.post(self.url_input, | |
88 | data=json.dumps(data), | |
89 | headers=self.headers) | |
90 | json_response = response.json() | |
91 | ||
92 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK, but received: %d" % response.status_code) | |
93 | self.assertIn("cmd", json_response.keys(), "Json response should have a cmd key") | |
94 | self.assertIn("custom_output_file", json_response.keys(), "Json response should have a custom_output_file key") | |
95 | self.assertIsNotNone(json_response.get("cmd"), "cmd shouldn't be None") | |
96 | self.assertIsNotNone(json_response.get("custom_output_file"), "custom_output_file shouldn't be None") | |
97 | ||
98 | def test_cmd_input_get_instead_post(self): | |
99 | cmd = "ls" | |
100 | data = {"cmd": cmd} | |
101 | response = requests.get(self.url_input, | |
102 | data=json.dumps(data), | |
103 | headers=self.headers) | |
104 | ||
105 | self.assertEquals(response.status_code, 405, "Status code should be 405, but received: %d" % response.status_code) | |
106 | ||
107 | def test_cmd_output_nmap(self): | |
108 | # send input to register the active plugin | |
109 | cmd = "nmap 127.0.0.1" | |
110 | data = {"cmd": cmd} | |
111 | response = requests.post(self.url_input, | |
112 | data=json.dumps(data), | |
113 | headers=self.headers) | |
114 | ||
115 | ||
116 | #send output, using a fake nmap xml ouput | |
117 | output_file = open(os.path.join(os.getcwd(), 'test_cases/data/nmap_plugin_with_api.xml')) | |
118 | output = base64.b64encode(output_file.read()) | |
119 | data = {"cmd": cmd, "output": output} | |
120 | response = requests.post(self.url_output, | |
121 | data=json.dumps(data), | |
122 | headers=self.headers) | |
123 | self.model_controller.processAllPendingActions() | |
124 | ||
125 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK, but received: %d" % response.status_code) | |
126 | self.assertEquals(len(self.model_controller.getAllHosts()), 1, "Controller should have 1 host") | |
127 | ||
128 | def test_cmd_output_plugin_not_active(self): | |
129 | #send output, using a fake nmap xml ouput | |
130 | cmd = "nmap 127.0.0.1" | |
131 | output_file = open(os.path.join(os.getcwd(), 'test_cases/data/nmap_plugin_with_api.xml')) | |
132 | output = base64.b64encode(output_file.read()) | |
133 | data = {"cmd": cmd, "output": output} | |
134 | response = requests.post(self.url_output, | |
135 | data=json.dumps(data), | |
136 | headers=self.headers) | |
137 | ||
138 | self.assertEquals(response.status_code, 400, "Status Code should be 400: Bad Request, but received: %d" % response.status_code) | |
139 | ||
140 | def test_model_edit_host_vuln(self): | |
141 | host = test_utils.create_host(self) | |
142 | vuln = test_utils.create_host_vuln(self, host, 'vuln', 'desc', 'high') | |
143 | ||
144 | data = {"vulnid": vuln.getID(), "hostid": host.getID(), 'name': 'coco', | |
145 | 'desc': 'newdesc', 'severity': 'low'} | |
146 | ||
147 | response = requests.post(self.url_model_edit_vulns, | |
148 | data=json.dumps(data), | |
149 | headers=self.headers) | |
150 | ||
151 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK") | |
152 | ||
153 | addedhost = self.model_controller.getHost(host.getID()) | |
154 | addedvuln = addedhost.getVuln(vuln.getID()) | |
155 | ||
156 | self.assertEquals(addedvuln.name, 'coco', 'Name not updated') | |
157 | self.assertEquals(addedvuln.desc, 'newdesc', 'Desc not updated') | |
158 | self.assertEquals(addedvuln.severity, 'low', 'Severity not updated') | |
159 | ||
160 | ||
161 | def test_model_edit_int_vuln(self): | |
162 | host = test_utils.create_host(self) | |
163 | inter = test_utils.create_interface(self, host) | |
164 | vuln = test_utils.create_int_vuln(self, host, inter, 'vuln', 'desc', 'high') | |
165 | ||
166 | data = {"vulnid": vuln.getID(), "hostid": host.getID(), 'name': 'coco', | |
167 | 'desc': 'newdesc', 'severity': 'low'} | |
168 | ||
169 | response = requests.post(self.url_model_edit_vulns, | |
170 | data=json.dumps(data), | |
171 | headers=self.headers) | |
172 | ||
173 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK") | |
174 | ||
175 | addedhost = self.model_controller.getHost(host.getID()) | |
176 | addedInterface = addedhost.getInterface(inter.getID()) | |
177 | addedvuln = addedInterface.getVuln(vuln.getID()) | |
178 | ||
179 | self.assertEquals(addedvuln.name, 'coco', 'Name not updated') | |
180 | self.assertEquals(addedvuln.desc, 'newdesc', 'Desc not updated') | |
181 | self.assertEquals(addedvuln.severity, 'low', 'Severity not updated') | |
182 | ||
183 | ||
184 | def test_model_edit_serv_vuln(self): | |
185 | host = test_utils.create_host(self) | |
186 | inter = test_utils.create_interface(self, host) | |
187 | serv = test_utils.create_service(self, host, inter) | |
188 | vuln = test_utils.create_serv_vuln(self, host, serv, 'vuln', 'desc', 'high') | |
189 | ||
190 | data = {"vulnid": vuln.getID(), "hostid": host.getID(), 'name': 'coco', | |
191 | 'desc': 'newdesc', 'severity': 'low'} | |
192 | ||
193 | response = requests.post(self.url_model_edit_vulns, | |
194 | data=json.dumps(data), | |
195 | headers=self.headers) | |
196 | ||
197 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK") | |
198 | ||
199 | addedhost = self.model_controller.getHost(host.getID()) | |
200 | addedInterface = addedhost.getInterface(inter.getID()) | |
201 | addedService = addedInterface.getService(serv.getID()) | |
202 | addedvuln = addedService.getVuln(vuln.getID()) | |
203 | ||
204 | self.assertEquals(addedvuln.name, 'coco', 'Name not updated') | |
205 | self.assertEquals(addedvuln.desc, 'newdesc', 'Desc not updated') | |
206 | self.assertEquals(addedvuln.severity, 'low', 'Severity not updated') | |
207 | ||
208 | ||
209 | def test_model_remove_host_vuln(self): | |
210 | host = test_utils.create_host(self) | |
211 | vuln = test_utils.create_host_vuln(self, host, 'vuln', 'desc', 'high') | |
212 | ||
213 | data = {"vulnid": vuln.getID(), "hostid": host.getID(), 'name': 'coco', | |
214 | 'desc': 'newdesc', 'severity': 'low'} | |
215 | ||
216 | response = requests.delete(self.url_model_del_vulns, | |
217 | data=json.dumps(data), | |
218 | headers=self.headers) | |
219 | ||
220 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK") | |
221 | ||
222 | addedhost = self.model_controller.getHost(host.getID()) | |
223 | addedvuln = addedhost.getVulns() | |
224 | ||
225 | self.assertEquals(len(addedvuln), 0, 'Vuln not removed from Host') | |
226 | ||
227 | def test_model_del_int_vuln(self): | |
228 | host = test_utils.create_host(self) | |
229 | inter = test_utils.create_interface(self, host) | |
230 | vuln = test_utils.create_int_vuln(self, host, inter, 'vuln', 'desc', 'high') | |
231 | ||
232 | data = {"vulnid": vuln.getID(), "hostid": host.getID()} | |
233 | ||
234 | response = requests.delete(self.url_model_del_vulns, | |
235 | data=json.dumps(data), | |
236 | headers=self.headers) | |
237 | ||
238 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK") | |
239 | ||
240 | addedhost = self.model_controller.getHost(host.getID()) | |
241 | addedInterface = addedhost.getInterface(inter.getID()) | |
242 | self.assertEquals(len(addedInterface.getVulns()), 0, 'Interface vulns not deleted') | |
243 | ||
244 | def test_model_remove_serv_vuln(self): | |
245 | host = test_utils.create_host(self) | |
246 | inter = test_utils.create_interface(self, host) | |
247 | serv = test_utils.create_service(self, host, inter) | |
248 | vuln = test_utils.create_serv_vuln(self, host, serv, 'vuln', 'desc', 'high') | |
249 | ||
250 | data = {"vulnid": vuln.getID(), "hostid": host.getID()} | |
251 | ||
252 | response = requests.delete(self.url_model_del_vulns, | |
253 | data=json.dumps(data), | |
254 | headers=self.headers) | |
255 | ||
256 | self.assertEquals(response.status_code, 200, "Status Code should be 200: OK") | |
257 | ||
258 | addedhost = self.model_controller.getHost(host.getID()) | |
259 | addedInterface = addedhost.getInterface(inter.getID()) | |
260 | addedService = addedInterface.getService(serv.getID()) | |
261 | ||
262 | self.assertEquals(len(addedService.getVulns()), 0, 'Service vulns not removed') | |
263 | ||
264 | ||
265 | if __name__ == '__main__': | |
266 | unittest.main() |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | import unittest | |
10 | import sys | |
11 | import os | |
12 | from time import time | |
13 | sys.path.append(os.path.abspath(os.getcwd())) | |
14 | ||
15 | from mockito import mock, when, any | |
16 | ||
17 | from model.hosts import Host, Interface, Service | |
18 | from model.workspace import Workspace | |
19 | from model.commands_history import CommandRunInformation | |
20 | from model.common import Metadata, ModelObject | |
21 | from persistence.mappers.abstract_mapper import NullPersistenceManager | |
22 | from managers.mapper_manager import MapperManager | |
23 | from persistence.mappers.data_mappers import ModelObjectMapper, Mappers | |
24 | ||
25 | from config.configuration import getInstanceConfiguration | |
26 | CONF = getInstanceConfiguration() | |
27 | ||
28 | ||
29 | class HostMapperTestSuite(unittest.TestCase): | |
30 | def setUp(self): | |
31 | self.mapper_manager = MapperManager() | |
32 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
33 | self.hmapper = self.mapper_manager.getMapper(Host.__name__) | |
34 | ||
35 | def tearDown(self): | |
36 | pass | |
37 | ||
38 | def test_host_serialization(self): | |
39 | host = Host(name="pepito", os="linux") | |
40 | host.setDescription("Some description") | |
41 | host.setOwned(True) | |
42 | hserialized = self.hmapper.serialize(host) | |
43 | # if serialization fails, returns None | |
44 | self.assertNotEqual( | |
45 | hserialized, | |
46 | None, | |
47 | "Serialized host shouldn't be None") | |
48 | # we check the host attributes | |
49 | self.assertEquals( | |
50 | hserialized.get("_id"), | |
51 | host.getID(), | |
52 | "Serialized ID is not the same as Host ID") | |
53 | self.assertEquals( | |
54 | hserialized.get("name"), | |
55 | host.getName(), | |
56 | "Serialized name is not the same as Host name") | |
57 | self.assertEquals( | |
58 | hserialized.get("os"), | |
59 | host.getOS(), | |
60 | "Serialized OS is not the same as Host OS") | |
61 | self.assertEquals( | |
62 | hserialized.get("description"), | |
63 | host.getDescription(), | |
64 | "Serialized description is not the same as Host description") | |
65 | self.assertEquals( | |
66 | hserialized.get("owned"), | |
67 | host.isOwned(), | |
68 | "Serialized owned flag is not the same as Host owned flag") | |
69 | ||
70 | def test_host_creation(self): | |
71 | host = Host(name="pepito", os="linux") | |
72 | self.hmapper.save(host) | |
73 | h = self.hmapper.find(host.getID()) | |
74 | self.assertEquals( | |
75 | h, | |
76 | host, | |
77 | "Host retrieved should be the same as persisted") | |
78 | self.assertEquals( | |
79 | h.getID(), | |
80 | host.getID(), | |
81 | "Host retrieved's Id should be the same as persisted's Id") | |
82 | ||
83 | def test_load_nonexistent_host(self): | |
84 | self.assertEquals( | |
85 | self.hmapper.load("1234"), | |
86 | None, | |
87 | "Nonexistent host should return None") | |
88 | ||
89 | def test_find_not_loaded_host(self): | |
90 | # we need to mock the persistence manager first, | |
91 | # so we can return a simulated doc | |
92 | doc = { | |
93 | "type": "Host", | |
94 | "_id": "1234", | |
95 | "name": "pepito", | |
96 | "owned": False, | |
97 | "parent": None, | |
98 | "owner": None, | |
99 | "description": "some description", | |
100 | "metadata": None, | |
101 | "os": "linux", | |
102 | "default_gateway": None | |
103 | } | |
104 | ||
105 | when(self.hmapper.pmanager).getDocument("1234").thenReturn(doc) | |
106 | ||
107 | host = self.hmapper.find("1234") | |
108 | self.assertNotEquals( | |
109 | host, | |
110 | None, | |
111 | "Existent host shouldn't return None") | |
112 | ||
113 | self.assertEquals( | |
114 | host.getName(), | |
115 | "pepito", | |
116 | "Host name should be pepito") | |
117 | ||
118 | self.assertEquals( | |
119 | host.getOS(), | |
120 | "linux", | |
121 | "Host os should be linux") | |
122 | ||
123 | def test_load_rubbish_host_doc(self): | |
124 | # we need to mock the persistence manager first, | |
125 | # so we can return an erroneous simulated doc | |
126 | doc = { | |
127 | "type": "RUBBISH", | |
128 | "_id": "1234", | |
129 | "name": "pepito", | |
130 | "owned": False, | |
131 | "parent": None, | |
132 | "owner": None, | |
133 | "description": "some description", | |
134 | "metadata": None, | |
135 | "os": "linux", | |
136 | "default_gateway": None | |
137 | } | |
138 | pmanager = mock(NullPersistenceManager) | |
139 | when(pmanager).getDocument(any(str)).thenReturn(doc) | |
140 | self.hmapper.setPersistenceManager(pmanager) | |
141 | ||
142 | host = self.hmapper.find("1234") | |
143 | self.assertEquals( | |
144 | host, | |
145 | None, | |
146 | "Doc is malformed so we should get None") | |
147 | ||
148 | def test_host_create_and_delete(self): | |
149 | host = Host(name="pepito", os="linux") | |
150 | self.hmapper.save(host) | |
151 | h_id = host.getID() | |
152 | ||
153 | self.assertNotEquals( | |
154 | self.hmapper.load(h_id), | |
155 | None, | |
156 | "Host should be saved") | |
157 | ||
158 | self.hmapper.delete(h_id) | |
159 | ||
160 | self.assertEquals( | |
161 | self.hmapper.find(h_id), | |
162 | None, | |
163 | "Host shouldn't exist anymore") | |
164 | ||
165 | ||
166 | class InterfaceMapperTestSuite(unittest.TestCase): | |
167 | def setUp(self): | |
168 | self.mapper_manager = MapperManager() | |
169 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
170 | self.imapper = self.mapper_manager.getMapper(Interface.__name__) | |
171 | ||
172 | def tearDown(self): | |
173 | pass | |
174 | ||
175 | def test_interface_serialization(self): | |
176 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
177 | iface.setDescription("Some description") | |
178 | iface.setOwned(True) | |
179 | iface.addHostname("www.test.com") | |
180 | iface.setIPv4({ | |
181 | "address": "192.168.10.168", | |
182 | "mask": "255.255.255.0", | |
183 | "gateway": "192.168.10.1", | |
184 | "DNS": "192.168.10.1" | |
185 | }) | |
186 | iface.setPortsOpened(2) | |
187 | iface.setPortsClosed(3) | |
188 | iface.setPortsFiltered(4) | |
189 | iserialized = self.imapper.serialize(iface) | |
190 | # if serialization fails, returns None | |
191 | self.assertNotEqual( | |
192 | iserialized, | |
193 | None, | |
194 | "Serialized interface shouldn't be None") | |
195 | # we check the interface attributes | |
196 | self.assertEquals( | |
197 | iserialized.get("_id"), | |
198 | iface.getID(), | |
199 | "Serialized ID is not the same as Interface ID") | |
200 | self.assertEquals( | |
201 | iserialized.get("name"), | |
202 | iface.getName(), | |
203 | "Serialized name is not the same as Interface name") | |
204 | self.assertEquals( | |
205 | iserialized.get("mac"), | |
206 | iface.getMAC(), | |
207 | "Serialized MAC is not the same as Interface MAC") | |
208 | self.assertEquals( | |
209 | iserialized.get("network_segment"), | |
210 | iface.getNetworkSegment(), | |
211 | "Serialized Network Segment is not the same as Interface Network Segment") | |
212 | self.assertEquals( | |
213 | iserialized.get("description"), | |
214 | iface.getDescription(), | |
215 | "Serialized description is not the same as Interface description") | |
216 | self.assertEquals( | |
217 | iserialized.get("owned"), | |
218 | iface.isOwned(), | |
219 | "Serialized owned flag is not the same as Interface owned flag") | |
220 | ||
221 | def test_interface_creation(self): | |
222 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
223 | iface.setDescription("Some description") | |
224 | iface.setOwned(True) | |
225 | iface.addHostname("www.test.com") | |
226 | iface.setIPv4({ | |
227 | "address": "192.168.10.168", | |
228 | "mask": "255.255.255.0", | |
229 | "gateway": "192.168.10.1", | |
230 | "DNS": "192.168.10.1" | |
231 | }) | |
232 | iface.setPortsOpened(2) | |
233 | iface.setPortsClosed(3) | |
234 | iface.setPortsFiltered(4) | |
235 | ||
236 | self.imapper.save(iface) | |
237 | i = self.imapper.find(iface.getID()) | |
238 | self.assertEquals( | |
239 | i, | |
240 | iface, | |
241 | "Interface retrieved should be the same as persisted") | |
242 | self.assertEquals( | |
243 | i.getID(), | |
244 | iface.getID(), | |
245 | "Interface retrieved's Id should be the same as persisted's Id") | |
246 | ||
247 | def test_load_nonexistent_interface(self): | |
248 | self.assertEquals( | |
249 | self.imapper.load("1234"), | |
250 | None, | |
251 | "Nonexistent interface should return None") | |
252 | ||
253 | def test_find_not_loaded_interface(self): | |
254 | # we need to mock the persistence manager first, | |
255 | # so we can return a simulated doc | |
256 | doc = { | |
257 | "type": "Interface", | |
258 | "_id": "1234", | |
259 | "name": "192.168.10.168", | |
260 | "owned": False, | |
261 | "parent": None, | |
262 | "owner": None, | |
263 | "description": "some description", | |
264 | "metadata": None, | |
265 | "mac": "01:02:03:04:05:06", | |
266 | "network_segment": None, | |
267 | "hostnames": ["www.test.com"], | |
268 | "ipv4": { | |
269 | "address": "192.168.10.168", | |
270 | "mask": "255.255.255.0", | |
271 | "gateway": "192.168.10.1", | |
272 | "DNS": "192.168.10.1" | |
273 | }, | |
274 | "ipv6": {}, | |
275 | "ports": { | |
276 | "opened": 2, | |
277 | "closed": 3, | |
278 | "filtered": 4, | |
279 | } | |
280 | } | |
281 | when(self.imapper.pmanager).getDocument("1234").thenReturn(doc) | |
282 | ||
283 | iface = self.imapper.find("1234") | |
284 | self.assertNotEquals( | |
285 | iface, | |
286 | None, | |
287 | "Existent interface shouldn't return None") | |
288 | ||
289 | self.assertEquals( | |
290 | iface.getName(), | |
291 | "192.168.10.168", | |
292 | "Inteface name should be 192.168.10.168") | |
293 | ||
294 | self.assertEquals( | |
295 | iface.getMAC(), | |
296 | "01:02:03:04:05:06", | |
297 | "Interface MAC should be 01:02:03:04:05:06") | |
298 | ||
299 | def test_interface_create_and_delete(self): | |
300 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
301 | self.imapper.save(iface) | |
302 | i_id = iface.getID() | |
303 | ||
304 | self.assertNotEquals( | |
305 | self.imapper.load(i_id), | |
306 | None, | |
307 | "Inteface should be saved") | |
308 | ||
309 | self.imapper.delete(i_id) | |
310 | ||
311 | self.assertEquals( | |
312 | self.imapper.find(i_id), | |
313 | None, | |
314 | "Inteface shouldn't exist anymore") | |
315 | ||
316 | ||
317 | class ServiceMapperTestSuite(unittest.TestCase): | |
318 | def setUp(self): | |
319 | self.mapper_manager = MapperManager() | |
320 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
321 | self.smapper = self.mapper_manager.getMapper(Service.__name__) | |
322 | ||
323 | def tearDown(self): | |
324 | pass | |
325 | ||
326 | def test_service_serialization(self): | |
327 | srv = Service(name="http") | |
328 | srv.setDescription("Some description") | |
329 | srv.setOwned(True) | |
330 | srv.setProtocol("tcp") | |
331 | srv.setPorts(80) | |
332 | srv.setStatus("open") | |
333 | srv.setVersion("Apache 2.4") | |
334 | sserialized = self.smapper.serialize(srv) | |
335 | # if serialization fails, returns None | |
336 | self.assertNotEqual( | |
337 | sserialized, | |
338 | None, | |
339 | "Serialized service shouldn't be None") | |
340 | # we check the service attributes | |
341 | self.assertEquals( | |
342 | sserialized.get("_id"), | |
343 | srv.getID(), | |
344 | "Serialized ID is not the same as Service ID") | |
345 | self.assertEquals( | |
346 | sserialized.get("name"), | |
347 | srv.getName(), | |
348 | "Serialized name is not the same as Service name") | |
349 | self.assertEquals( | |
350 | sserialized.get("protocol"), | |
351 | srv.getProtocol(), | |
352 | "Serialized protocol is not the same as Service protocol") | |
353 | self.assertEquals( | |
354 | sserialized.get("status"), | |
355 | srv.getStatus(), | |
356 | "Serialized status is not the same as Service status") | |
357 | self.assertEquals( | |
358 | sserialized.get("ports"), | |
359 | srv.getPorts(), | |
360 | "Serialized ports is not the same as Service ports") | |
361 | self.assertEquals( | |
362 | sserialized.get("description"), | |
363 | srv.getDescription(), | |
364 | "Serialized description is not the same as Interface description") | |
365 | self.assertEquals( | |
366 | sserialized.get("owned"), | |
367 | srv.isOwned(), | |
368 | "Serialized owned flag is not the same as Interface owned flag") | |
369 | ||
370 | def test_service_creation(self): | |
371 | srv = Service(name="http") | |
372 | srv.setDescription("Some description") | |
373 | srv.setOwned(True) | |
374 | srv.setProtocol("tcp") | |
375 | srv.setPorts(80) | |
376 | srv.setStatus("open") | |
377 | srv.setVersion("Apache 2.4") | |
378 | ||
379 | self.smapper.save(srv) | |
380 | s = self.smapper.find(srv.getID()) | |
381 | self.assertEquals( | |
382 | s, | |
383 | srv, | |
384 | "Service retrieved should be the same as persisted") | |
385 | self.assertEquals( | |
386 | s.getID(), | |
387 | srv.getID(), | |
388 | "Service retrieved's Id should be the same as persisted's Id") | |
389 | ||
390 | def test_load_nonexistent_service(self): | |
391 | self.assertEquals( | |
392 | self.smapper.load("1234"), | |
393 | None, | |
394 | "Nonexistent service should return None") | |
395 | ||
396 | def test_find_not_loaded_service(self): | |
397 | # we need to mock the persistence manager first, | |
398 | # so we can return a simulated doc | |
399 | doc = { | |
400 | "type": "Service", | |
401 | "_id": "1234", | |
402 | "name": "http", | |
403 | "owned": False, | |
404 | "parent": None, | |
405 | "owner": None, | |
406 | "description": "some description", | |
407 | "metadata": None, | |
408 | "protocol": "tcp", | |
409 | "status": "open", | |
410 | "ports": [80], | |
411 | "version": "Apache 2.4" | |
412 | } | |
413 | when(self.smapper.pmanager).getDocument("1234").thenReturn(doc) | |
414 | ||
415 | srv = self.smapper.find("1234") | |
416 | self.assertNotEquals( | |
417 | srv, | |
418 | None, | |
419 | "Existent service shouldn't return None") | |
420 | ||
421 | self.assertEquals( | |
422 | srv.getName(), | |
423 | "http", | |
424 | "Service name should be http") | |
425 | ||
426 | self.assertEquals( | |
427 | srv.getProtocol(), | |
428 | "tcp", | |
429 | "Service protocol should be tcp") | |
430 | ||
431 | def test_service_create_and_delete(self): | |
432 | srv = Service(name="http") | |
433 | self.smapper.save(srv) | |
434 | s_id = srv.getID() | |
435 | ||
436 | self.assertNotEquals( | |
437 | self.smapper.load(s_id), | |
438 | None, | |
439 | "Service should be saved") | |
440 | ||
441 | self.smapper.delete(s_id) | |
442 | ||
443 | self.assertEquals( | |
444 | self.smapper.find(s_id), | |
445 | None, | |
446 | "Service shouldn't exist anymore") | |
447 | ||
448 | class ModelObjectMapperTestSuite(unittest.TestCase): | |
449 | def setUp(self): | |
450 | Mappers[ModelObject.class_signature] = ModelObjectMapper | |
451 | self.mapper_manager = MapperManager() | |
452 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
453 | self.mapper = self.mapper_manager.getMapper(ModelObject.__name__) | |
454 | ||
455 | def tearDown(self): | |
456 | pass | |
457 | ||
458 | def test_metadata_deserialization(self): | |
459 | import time | |
460 | modelobject = ModelObject() | |
461 | modelobject.updateID = lambda *args : 'ModelObjectID' | |
462 | # {'create_time': 1417205460.253131, | |
463 | # 'creator': 'ninja_owner', | |
464 | # 'owner': 'ninja_owner', | |
465 | # 'update_time': 1417205460.253132, | |
466 | # 'update_user': 'ninja_owner'} | |
467 | modelobjectserialized = {'name': '', | |
468 | 'parent': None, | |
469 | 'owner': '', | |
470 | '_id': None, | |
471 | 'type': 'ModelObject', | |
472 | 'metadata': {'update_time': 1417207650.761777, | |
473 | 'update_user': '', | |
474 | 'update_action': 0, | |
475 | 'creator': '', | |
476 | 'create_time': 1417207650.761777, | |
477 | 'update_controller_action': 'No model controller call', | |
478 | 'owner': ''}, | |
479 | 'owned': False, | |
480 | 'description': ''} | |
481 | modelobject2 = self.mapper.unserialize(modelobject, modelobjectserialized) | |
482 | metadata = modelobject2._metadata | |
483 | metadataserialized = modelobjectserialized['metadata'] | |
484 | # if serialization fails, returns None | |
485 | self.assertNotEqual( | |
486 | modelobjectserialized, | |
487 | None, | |
488 | "Serialized ModelObejct shouldn't be None") | |
489 | # we check the cmd attributes | |
490 | self.assertEquals( | |
491 | modelobjectserialized.get("_id"), | |
492 | modelobject.getID(), | |
493 | "Serialized ID is not the same as metadata ID") | |
494 | import ipdb; ipdb.set_trace() | |
495 | self.assertEquals( | |
496 | metadataserialized.get("owner"), | |
497 | metadata.__getattribute__("owner"), | |
498 | "Serialized owner is not the same as metadata owner") | |
499 | self.assertEquals( | |
500 | metadataserialized.get("creator"), | |
501 | metadata.__getattribute__("creator"), | |
502 | "Serialized owner is not the same as metadata creator") | |
503 | ||
504 | self.assertTrue( | |
505 | isinstance(metadataserialized.get("create_time"), float), | |
506 | "Serialized create_time is not int") | |
507 | self.assertTrue( | |
508 | isinstance(metadataserialized.get("update_time"), float), | |
509 | "Serialized update_time is not int") | |
510 | ||
511 | def test_metadata_serialization(self): | |
512 | import time | |
513 | modelobject = ModelObject() | |
514 | modelobject.updateID = lambda *args : 'ModelObjectID' | |
515 | metadata = Metadata('') | |
516 | # {'create_time': 1417205460.253131, | |
517 | # 'creator': 'ninja_owner', | |
518 | # 'owner': 'ninja_owner', | |
519 | # 'update_time': 1417205460.253132, | |
520 | # 'update_user': 'ninja_owner'} | |
521 | ||
522 | modelobjectserialized = self.mapper.serialize(modelobject) | |
523 | metadataserialized = modelobjectserialized['metadata'] | |
524 | # if serialization fails, returns None | |
525 | self.assertNotEqual( | |
526 | modelobjectserialized, | |
527 | None, | |
528 | "Serialized ModelObejct shouldn't be None") | |
529 | # we check the cmd attributes | |
530 | self.assertEquals( | |
531 | modelobjectserialized.get("_id"), | |
532 | modelobject.getID(), | |
533 | "Serialized ID is not the same as metadata ID") | |
534 | self.assertEquals( | |
535 | metadataserialized.get("creator"), | |
536 | metadata.__getattribute__("creator"), | |
537 | "Serialized owner is not the same as metadata creator") | |
538 | self.assertEquals( | |
539 | metadataserialized.get("owner"), | |
540 | metadata.__getattribute__("owner"), | |
541 | "Serialized owner is not the same as metadata owner") | |
542 | ||
543 | self.assertTrue( | |
544 | isinstance(metadataserialized.get("create_time"), float), | |
545 | "Serialized create_time is not int") | |
546 | self.assertTrue( | |
547 | isinstance(metadataserialized.get("update_time"), float), | |
548 | "Serialized update_time is not int") | |
549 | ||
550 | ||
551 | ||
552 | class CommandRunInformationMapperTestSuite(unittest.TestCase): | |
553 | def setUp(self): | |
554 | self.mapper_manager = MapperManager() | |
555 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
556 | self.mapper = self.mapper_manager.getMapper(CommandRunInformation.__name__) | |
557 | ||
558 | def tearDown(self): | |
559 | pass | |
560 | ||
561 | def test_cmd_serialization(self): | |
562 | import time | |
563 | cmd = CommandRunInformation(**{ | |
564 | 'workspace': 'fakews', | |
565 | 'itime': time.time(), | |
566 | 'command': "ping", | |
567 | 'params': "127.0.0.1"}) | |
568 | cmdserialized = self.mapper.serialize(cmd) | |
569 | # if serialization fails, returns None | |
570 | self.assertNotEqual( | |
571 | cmdserialized, | |
572 | None, | |
573 | "Serialized cmd shouldn't be None") | |
574 | # we check the cmd attributes | |
575 | self.assertEquals( | |
576 | cmdserialized.get("_id"), | |
577 | cmd.getID(), | |
578 | "Serialized ID is not the same as cmd ID") | |
579 | self.assertEquals( | |
580 | cmdserialized.get("command"), | |
581 | cmd.__getattribute__("command"), | |
582 | "Serialized name is not the same as cmd command") | |
583 | self.assertEquals( | |
584 | cmdserialized.get("params"), | |
585 | cmd.__getattribute__("params"), | |
586 | "Serialized name is not the same as cmd params") | |
587 | ||
588 | def test_cmd_creation(self): | |
589 | import time | |
590 | cmd = CommandRunInformation(**{ | |
591 | 'workspace': 'fakews', | |
592 | 'itime': time.time(), | |
593 | 'command': "ping", | |
594 | 'params': "127.0.0.1"}) | |
595 | ||
596 | self.mapper.save(cmd) | |
597 | c = self.mapper.find(cmd.getID()) | |
598 | self.assertEquals( | |
599 | c, | |
600 | cmd, | |
601 | "Cmd retrieved should be the same as persisted") | |
602 | self.assertEquals( | |
603 | c.getID(), | |
604 | cmd.getID(), | |
605 | "Cmd retrieved's Id should be the same as persisted's Id") | |
606 | ||
607 | def test_load_nonexistent_cmd(self): | |
608 | self.assertEquals( | |
609 | self.mapper.load("1234"), | |
610 | None, | |
611 | "Nonexistent cmd should return None") | |
612 | ||
613 | def test_find_not_loaded_cmd(self): | |
614 | # we need to mock the persistence manager first, | |
615 | # so we can return a simulated doc | |
616 | doc = { | |
617 | "_id": "1234", | |
618 | "itime": 1409856507.891718, | |
619 | "command": "ping", | |
620 | "workspace": "fakews", | |
621 | "duration": 0.6570961475372314, | |
622 | "params": "127.0.0.1", | |
623 | "type": "CommandRunInformation", | |
624 | } | |
625 | when(self.mapper.pmanager).getDocument("1234").thenReturn(doc) | |
626 | ||
627 | cmd = self.mapper.find("1234") | |
628 | self.assertNotEquals( | |
629 | cmd, | |
630 | None, | |
631 | "Existent cmd shouldn't return None") | |
632 | ||
633 | self.assertEquals( | |
634 | cmd.__getattribute__("command"), | |
635 | "ping", | |
636 | "Cmd command should be ping") | |
637 | ||
638 | def test_cmd_create_and_delete(self): | |
639 | import time | |
640 | cmd = CommandRunInformation(**{ | |
641 | 'workspace': 'fakews', | |
642 | 'itime': time.time(), | |
643 | 'command': "ping", | |
644 | 'params': "127.0.0.1"}) | |
645 | ||
646 | self.mapper.save(cmd) | |
647 | cmd_id = cmd.getID() | |
648 | ||
649 | self.assertNotEquals( | |
650 | self.mapper.load(cmd_id), | |
651 | None, | |
652 | "Command should be saved") | |
653 | ||
654 | self.mapper.delete(cmd_id) | |
655 | ||
656 | self.assertEquals( | |
657 | self.mapper.find(cmd_id), | |
658 | None, | |
659 | "Command shouldn't exist anymore") | |
660 | ||
661 | ||
662 | class WorkspaceMapperTestSuite(unittest.TestCase): | |
663 | def setUp(self): | |
664 | self.mapper_manager = MapperManager() | |
665 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
666 | self.wmapper = self.mapper_manager.getMapper(Workspace.__name__) | |
667 | ||
668 | def tearDown(self): | |
669 | pass | |
670 | ||
671 | def test_workspace_serialization(self): | |
672 | workspace = Workspace(name="workspace_test") | |
673 | workspace.setDescription("Some description") | |
674 | workspace.setCustomer("Infobyte") | |
675 | wserialized = self.wmapper.serialize(workspace) | |
676 | # if serialization fails, returns None | |
677 | self.assertNotEqual( | |
678 | wserialized, | |
679 | None, | |
680 | "Serialized workspace shouldn't be None") | |
681 | # we check the host attributes | |
682 | self.assertEquals( | |
683 | wserialized.get("_id"), | |
684 | workspace.getID(), | |
685 | "Serialized ID is not the same as workspace ID") | |
686 | self.assertEquals( | |
687 | wserialized.get("name"), | |
688 | workspace.getName(), | |
689 | "Serialized name is not the same as workspace name") | |
690 | self.assertEquals( | |
691 | wserialized.get("description"), | |
692 | workspace.getDescription(), | |
693 | "Serialized description is not the same as workspace description") | |
694 | self.assertEquals( | |
695 | wserialized.get("customer"), | |
696 | workspace.getCustomer(), | |
697 | "Serialized customer is not the same as workspace customer") | |
698 | ||
699 | def test_workspace_creation(self): | |
700 | workspace = Workspace(name="workspace_test") | |
701 | self.wmapper.save(workspace) | |
702 | w = self.wmapper.find(workspace.getID()) | |
703 | self.assertEquals( | |
704 | w, | |
705 | workspace, | |
706 | "Workspace retrieved should be the same as persisted") | |
707 | self.assertEquals( | |
708 | w.getID(), | |
709 | workspace.getID(), | |
710 | "Workspace retrieved's Id should be the same as persisted's Id") | |
711 | ||
712 | def test_load_nonexistent_workspace(self): | |
713 | self.assertEquals( | |
714 | self.wmapper.load("Nonexistent"), | |
715 | None, | |
716 | "Nonexistent workspace should return None") | |
717 | ||
718 | def test_find_not_loaded_workspace(self): | |
719 | # we need to mock the persistence manager first, | |
720 | # so we can return a simulated doc | |
721 | doc = { | |
722 | "type": "Workspace", | |
723 | "_id": "workspace_test", | |
724 | "name": "workspace_test", | |
725 | "description": "some description", | |
726 | "customer": "Infobyte", | |
727 | "sdate": time(), | |
728 | "fdate": time() | |
729 | } | |
730 | when(self.wmapper.pmanager).getDocument("workspace_test").thenReturn(doc) | |
731 | ||
732 | workspace = self.wmapper.find("workspace_test") | |
733 | self.assertNotEquals( | |
734 | workspace, | |
735 | None, | |
736 | "Existent workspace shouldn't return None") | |
737 | ||
738 | self.assertEquals( | |
739 | workspace.getName(), | |
740 | "workspace_test", | |
741 | "Workspace name should be workspace_test") | |
742 | ||
743 | self.assertEquals( | |
744 | workspace.getCustomer(), | |
745 | "Infobyte", | |
746 | "Host os should be Infobyte") | |
747 | ||
748 | def test_workspace_create_and_delete(self): | |
749 | workspace = Workspace(name="workspace_test") | |
750 | self.wmapper.save(workspace) | |
751 | w_id = workspace.getID() | |
752 | ||
753 | self.assertNotEquals( | |
754 | self.wmapper.load(w_id), | |
755 | None, | |
756 | "Workspace should be saved") | |
757 | ||
758 | self.wmapper.delete(w_id) | |
759 | ||
760 | self.assertEquals( | |
761 | self.wmapper.find(w_id), | |
762 | None, | |
763 | "Workspace shouldn't exist anymore") | |
764 | ||
765 | ||
766 | class MapperManagerTestSuite(unittest.TestCase): | |
767 | def setUp(self): | |
768 | self.mapper_manager = MapperManager() | |
769 | ||
770 | def tearDown(self): | |
771 | pass | |
772 | ||
773 | def test_create_and_retrieve_host(self): | |
774 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
775 | host = Host(name="pepito", os="linux") | |
776 | host.setDescription("Some description") | |
777 | host.setOwned(True) | |
778 | self.mapper_manager.save(host) | |
779 | ||
780 | h = self.mapper_manager.find(host.getID()) | |
781 | ||
782 | self.assertNotEquals( | |
783 | h, | |
784 | None, | |
785 | "Host retrieved shouldn't be None") | |
786 | ||
787 | self.assertEquals( | |
788 | host, | |
789 | h, | |
790 | "Host created should be the same as host retrieved") | |
791 | ||
792 | ||
793 | class CompositeMapperTestSuite(unittest.TestCase): | |
794 | def setUp(self): | |
795 | self.mapper_manager = MapperManager() | |
796 | self.mapper_manager.createMappers(NullPersistenceManager()) | |
797 | ||
798 | def tearDown(self): | |
799 | pass | |
800 | ||
801 | def create_host(self): | |
802 | host = Host(name="pepito", os="linux") | |
803 | host.setDescription("Some description") | |
804 | host.setOwned(True) | |
805 | return host | |
806 | ||
807 | def create_interface(self): | |
808 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
809 | iface.setDescription("Some description") | |
810 | iface.setOwned(True) | |
811 | iface.addHostname("www.test.com") | |
812 | iface.setIPv4({ | |
813 | "address": "192.168.10.168", | |
814 | "mask": "255.255.255.0", | |
815 | "gateway": "192.168.10.1", | |
816 | "DNS": "192.168.10.1" | |
817 | }) | |
818 | iface.setPortsOpened(2) | |
819 | iface.setPortsClosed(3) | |
820 | iface.setPortsFiltered(4) | |
821 | return iface | |
822 | ||
823 | def test_find_composite_host(self): | |
824 | ''' | |
825 | We are going to create a host, then save it. | |
826 | Next we create an interface and then add it | |
827 | to the host, and finally save it. | |
828 | ''' | |
829 | # add host | |
830 | host = self.create_host() | |
831 | self.mapper_manager.save(host) | |
832 | # add inteface | |
833 | interface = self.create_interface() | |
834 | host.addChild(interface) | |
835 | self.mapper_manager.save(interface) | |
836 | ||
837 | h = self.mapper_manager.find(host.getID()) | |
838 | self.assertEquals( | |
839 | h.getAllInterfaces(), | |
840 | host.getAllInterfaces(), | |
841 | "Interfaces from original host should be equals to retrieved host's interfaces") | |
842 | ||
843 | def test_load_composite_one_host_one_interface(self): | |
844 | ''' | |
845 | We are going to create a host, then save it. | |
846 | Next we create an interface and then add it | |
847 | to the host, and finally save it. | |
848 | ''' | |
849 | ||
850 | doc_host = { | |
851 | "type": "Host", | |
852 | "_id": "1234", | |
853 | "name": "pepito", | |
854 | "owned": False, | |
855 | "parent": None, | |
856 | "owner": None, | |
857 | "description": "some description", | |
858 | "metadata": None, | |
859 | "os": "linux", | |
860 | "default_gateway": None | |
861 | } | |
862 | ||
863 | doc_interface = { | |
864 | "type": "Interface", | |
865 | "_id": "5678", | |
866 | "name": "192.168.10.168", | |
867 | "owned": False, | |
868 | "parent": "1234", | |
869 | "owner": None, | |
870 | "description": "some description", | |
871 | "metadata": None, | |
872 | "mac": "01:02:03:04:05:06", | |
873 | "network_segment": None, | |
874 | "hostnames": ["www.test.com"], | |
875 | "ipv4": { | |
876 | "address": "192.168.10.168", | |
877 | "mask": "255.255.255.0", | |
878 | "gateway": "192.168.10.1", | |
879 | "DNS": "192.168.10.1" | |
880 | }, | |
881 | "ipv6": {}, | |
882 | "ports": { | |
883 | "opened": 2, | |
884 | "closed": 3, | |
885 | "filtered": 4, | |
886 | } | |
887 | } | |
888 | ||
889 | pmanager = mock(NullPersistenceManager) | |
890 | when(pmanager).getDocument("1234").thenReturn(doc_host) | |
891 | when(pmanager).getDocument("5678").thenReturn(doc_interface) | |
892 | when(pmanager).getChildren(any(str)).thenReturn([]) | |
893 | when(pmanager).getChildren("1234").thenReturn([{'_id': "5678", 'type': "Interface"}]) | |
894 | self.mapper_manager.createMappers(pmanager) | |
895 | ||
896 | host = self.mapper_manager.find("1234") | |
897 | self.assertNotEquals( | |
898 | host, | |
899 | None, | |
900 | "Existent host shouldn't be None") | |
901 | ||
902 | self.assertEquals( | |
903 | len(host.getAllInterfaces()), | |
904 | 1, | |
905 | "Host should have one interface") | |
906 | ||
907 | iface = self.mapper_manager.find("5678") | |
908 | self.assertNotEquals( | |
909 | iface, | |
910 | None, | |
911 | "Existent interface shouldn't be None") | |
912 | ||
913 | self.assertEquals( | |
914 | host.getInterface("5678"), | |
915 | iface, | |
916 | "Interface inside host should be equals to retrieved interface") | |
917 | ||
918 | self.assertEquals( | |
919 | iface.getParent(), | |
920 | host, | |
921 | "Host should be the interface's parent") | |
922 | ||
923 | def test_load_composite_one_host_two_interfaces(self): | |
924 | ||
925 | doc_host = { | |
926 | "type": "Host", | |
927 | "_id": "1234", | |
928 | "name": "pepito", | |
929 | "owned": False, | |
930 | "parent": None, | |
931 | "owner": None, | |
932 | "description": "some description", | |
933 | "metadata": None, | |
934 | "os": "linux", | |
935 | "default_gateway": None | |
936 | } | |
937 | ||
938 | doc_interface1 = { | |
939 | "type": "Interface", | |
940 | "_id": "5678", | |
941 | "name": "192.168.10.168", | |
942 | "owned": False, | |
943 | "parent": "1234", | |
944 | "owner": None, | |
945 | "description": "some description", | |
946 | "metadata": None, | |
947 | "mac": "01:02:03:04:05:06", | |
948 | "network_segment": None, | |
949 | "hostnames": ["www.test.com"], | |
950 | "ipv4": { | |
951 | "address": "192.168.10.168", | |
952 | "mask": "255.255.255.0", | |
953 | "gateway": "192.168.10.1", | |
954 | "DNS": "192.168.10.1" | |
955 | }, | |
956 | "ipv6": {}, | |
957 | "ports": { | |
958 | "opened": 2, | |
959 | "closed": 3, | |
960 | "filtered": 4, | |
961 | } | |
962 | } | |
963 | ||
964 | doc_interface2 = { | |
965 | "type": "Interface", | |
966 | "_id": "6789", | |
967 | "name": "192.168.10.168", | |
968 | "owned": False, | |
969 | "parent": "1234", | |
970 | "owner": None, | |
971 | "description": "some description", | |
972 | "metadata": None, | |
973 | "mac": "01:02:03:04:05:06", | |
974 | "network_segment": None, | |
975 | "hostnames": ["www.test.com"], | |
976 | "ipv4": { | |
977 | "address": "192.168.10.168", | |
978 | "mask": "255.255.255.0", | |
979 | "gateway": "192.168.10.1", | |
980 | "DNS": "192.168.10.1" | |
981 | }, | |
982 | "ipv6": {}, | |
983 | "ports": { | |
984 | "opened": 2, | |
985 | "closed": 3, | |
986 | "filtered": 4, | |
987 | } | |
988 | } | |
989 | ||
990 | pmanager = mock(NullPersistenceManager) | |
991 | when(pmanager).getDocument("1234").thenReturn(doc_host) | |
992 | when(pmanager).getDocument("5678").thenReturn(doc_interface1) | |
993 | when(pmanager).getDocument("6789").thenReturn(doc_interface2) | |
994 | when(pmanager).getChildren(any(str)).thenReturn([]) | |
995 | when(pmanager).getChildren("1234").thenReturn([{'_id': "5678", 'type': "Interface"}, {'_id': "6789", 'type': "Interface"}]) | |
996 | self.mapper_manager.createMappers(pmanager) | |
997 | ||
998 | host = self.mapper_manager.find("1234") | |
999 | self.assertNotEquals( | |
1000 | host, | |
1001 | None, | |
1002 | "Existent host shouldn't be None") | |
1003 | ||
1004 | self.assertEquals( | |
1005 | len(host.getAllInterfaces()), | |
1006 | 2, | |
1007 | "Host should have two interface") | |
1008 | ||
1009 | iface1 = self.mapper_manager.find("5678") | |
1010 | self.assertNotEquals( | |
1011 | iface1, | |
1012 | None, | |
1013 | "Existent interface1 shouldn't be None") | |
1014 | ||
1015 | self.assertEquals( | |
1016 | host.getInterface("5678"), | |
1017 | iface1, | |
1018 | "Interface1 inside host should be equals to retrieved interface1") | |
1019 | ||
1020 | self.assertEquals( | |
1021 | iface1.getParent(), | |
1022 | host, | |
1023 | "Host should be the interface1's parent") | |
1024 | ||
1025 | iface2 = self.mapper_manager.find("6789") | |
1026 | self.assertNotEquals( | |
1027 | iface2, | |
1028 | None, | |
1029 | "Existent interface2 shouldn't be None") | |
1030 | ||
1031 | self.assertEquals( | |
1032 | host.getInterface("6789"), | |
1033 | iface2, | |
1034 | "Interface2 inside host should be equals to retrieved interface2") | |
1035 | ||
1036 | self.assertEquals( | |
1037 | iface2.getParent(), | |
1038 | host, | |
1039 | "Host should be the interface2's parent") | |
1040 | ||
1041 | def test_load_composite_one_host_one_interface_two_services(self): | |
1042 | ||
1043 | doc_host = { | |
1044 | "type": "Host", | |
1045 | "_id": "1234", | |
1046 | "name": "pepito", | |
1047 | "owned": False, | |
1048 | "parent": None, | |
1049 | "owner": None, | |
1050 | "description": "some description", | |
1051 | "metadata": None, | |
1052 | "os": "linux", | |
1053 | "default_gateway": None | |
1054 | } | |
1055 | ||
1056 | doc_interface = { | |
1057 | "type": "Interface", | |
1058 | "_id": "5678", | |
1059 | "name": "192.168.10.168", | |
1060 | "owned": False, | |
1061 | "parent": "1234", | |
1062 | "owner": None, | |
1063 | "description": "some description", | |
1064 | "metadata": None, | |
1065 | "mac": "01:02:03:04:05:06", | |
1066 | "network_segment": None, | |
1067 | "hostnames": ["www.test.com"], | |
1068 | "ipv4": { | |
1069 | "address": "192.168.10.168", | |
1070 | "mask": "255.255.255.0", | |
1071 | "gateway": "192.168.10.1", | |
1072 | "DNS": "192.168.10.1" | |
1073 | }, | |
1074 | "ipv6": {}, | |
1075 | "ports": { | |
1076 | "opened": 2, | |
1077 | "closed": 3, | |
1078 | "filtered": 4, | |
1079 | } | |
1080 | } | |
1081 | ||
1082 | doc_service1 = { | |
1083 | "type": "Service", | |
1084 | "_id": "abcd", | |
1085 | "name": "http", | |
1086 | "owned": False, | |
1087 | "parent": "5678", | |
1088 | "owner": None, | |
1089 | "description": "some description", | |
1090 | "metadata": None, | |
1091 | "protocol": "tcp", | |
1092 | "status": "open", | |
1093 | "ports": [80], | |
1094 | "version": "Apache 2.4" | |
1095 | } | |
1096 | ||
1097 | doc_service2 = { | |
1098 | "type": "Service", | |
1099 | "_id": "efgh", | |
1100 | "name": "ssh", | |
1101 | "owned": False, | |
1102 | "parent": "5678", | |
1103 | "owner": None, | |
1104 | "description": "some description", | |
1105 | "metadata": None, | |
1106 | "protocol": "tcp", | |
1107 | "status": "open", | |
1108 | "ports": [22], | |
1109 | "version": "OpenSSH" | |
1110 | } | |
1111 | ||
1112 | pmanager = mock(NullPersistenceManager) | |
1113 | when(pmanager).getDocument("1234").thenReturn(doc_host) | |
1114 | when(pmanager).getDocument("5678").thenReturn(doc_interface) | |
1115 | when(pmanager).getDocument("abcd").thenReturn(doc_service1) | |
1116 | when(pmanager).getDocument("efgh").thenReturn(doc_service2) | |
1117 | when(pmanager).getChildren(any(str)).thenReturn([]) | |
1118 | when(pmanager).getChildren("1234").thenReturn([{'_id': "5678", 'type': "Interface"}]) | |
1119 | when(pmanager).getChildren("5678").thenReturn([{'_id': "abcd", 'type': "Service"}, {'_id': "efgh", 'type': "Service"}]) | |
1120 | self.mapper_manager.createMappers(pmanager) | |
1121 | ||
1122 | iface = self.mapper_manager.find("5678") | |
1123 | self.assertNotEquals( | |
1124 | iface, | |
1125 | None, | |
1126 | "Existent interface shouldn't be None") | |
1127 | ||
1128 | # Lets make sure that the host was created | |
1129 | host = iface.getParent() | |
1130 | self.assertEquals( | |
1131 | host.getID(), | |
1132 | "1234", | |
1133 | "Interface's parent id should be 1234") | |
1134 | ||
1135 | self.assertEquals( | |
1136 | host, | |
1137 | self.mapper_manager.find("1234"), | |
1138 | "Interface1's parent should be equals to the host retrieved") | |
1139 | ||
1140 | self.assertEquals( | |
1141 | len(iface.getAllServices()), | |
1142 | 2, | |
1143 | "Interface should have two services") | |
1144 | ||
1145 | services_ids = [srv.getID() for srv in iface.getAllServices()] | |
1146 | self.assertIn( | |
1147 | "abcd", | |
1148 | services_ids, | |
1149 | "Service 'abcd' should be one of the interface's services") | |
1150 | ||
1151 | self.assertIn( | |
1152 | "efgh", | |
1153 | services_ids, | |
1154 | "Service 'efgh' should be one of the interface's services") | |
1155 | ||
1156 | def test_load_composite_one_host_one_note_one_vuln_one_credential(self): | |
1157 | ||
1158 | doc_host = { | |
1159 | "type": "Host", | |
1160 | "_id": "1234", | |
1161 | "name": "pepito", | |
1162 | "owned": False, | |
1163 | "parent": None, | |
1164 | "owner": None, | |
1165 | "description": "some description", | |
1166 | "metadata": None, | |
1167 | "os": "linux", | |
1168 | "default_gateway": None | |
1169 | } | |
1170 | ||
1171 | doc_note = { | |
1172 | "type": "Note", | |
1173 | "_id": "note1", | |
1174 | "name": "Note1", | |
1175 | "owned": False, | |
1176 | "parent": "1234", | |
1177 | "owner": None, | |
1178 | "description": "some description", | |
1179 | "metadata": None, | |
1180 | "text": "this is a note" | |
1181 | } | |
1182 | ||
1183 | doc_vuln = { | |
1184 | "type": "Vulnerability", | |
1185 | "_id": "vuln1", | |
1186 | "name": "Vuln1", | |
1187 | "owned": False, | |
1188 | "parent": "1234", | |
1189 | "owner": None, | |
1190 | "description": "some description", | |
1191 | "metadata": None, | |
1192 | "desc": "this is a vuln", | |
1193 | "severity": "high", | |
1194 | "refs": ["cve1", "cve2"] | |
1195 | } | |
1196 | ||
1197 | doc_cred = { | |
1198 | "type": "Cred", | |
1199 | "_id": "cred1", | |
1200 | "name": "Vuln1", | |
1201 | "owned": False, | |
1202 | "parent": "1234", | |
1203 | "owner": None, | |
1204 | "description": "some description", | |
1205 | "metadata": None, | |
1206 | "username": "infobyte", | |
1207 | "password": "secret" | |
1208 | } | |
1209 | ||
1210 | pmanager = mock(NullPersistenceManager) | |
1211 | when(pmanager).getDocument("1234").thenReturn(doc_host) | |
1212 | when(pmanager).getDocument("note1").thenReturn(doc_note) | |
1213 | when(pmanager).getDocument("vuln1").thenReturn(doc_vuln) | |
1214 | when(pmanager).getDocument("cred1").thenReturn(doc_cred) | |
1215 | when(pmanager).getChildren(any(str)).thenReturn([]) | |
1216 | when(pmanager).getChildren("1234").thenReturn( | |
1217 | [{'_id': "note1", 'type': "Note"}, | |
1218 | {'_id': "vuln1", 'type': "Vulnerability"}, | |
1219 | {'_id': "cred1", 'type': "Cred"}]) | |
1220 | ||
1221 | self.mapper_manager.createMappers(pmanager) | |
1222 | ||
1223 | host = self.mapper_manager.find("1234") | |
1224 | self.assertNotEquals( | |
1225 | host, | |
1226 | None, | |
1227 | "Existent host shouldn't be None") | |
1228 | ||
1229 | self.assertEquals( | |
1230 | len(host.getNotes()), | |
1231 | 1, | |
1232 | "Host should have one note") | |
1233 | ||
1234 | ||
1235 | self.assertEquals( | |
1236 | len(host.getVulns()), | |
1237 | 1, | |
1238 | "Host should have one vuln") | |
1239 | ||
1240 | self.assertEquals( | |
1241 | len(host.getCreds()), | |
1242 | 1, | |
1243 | "Host should have one cred") | |
1244 | ||
1245 | def test_delete_interface_from_composite_one_host_one_interface_two_services(self): | |
1246 | doc_host = { | |
1247 | "type": "Host", | |
1248 | "_id": "1234", | |
1249 | "name": "pepito", | |
1250 | "owned": False, | |
1251 | "parent": None, | |
1252 | "owner": None, | |
1253 | "description": "some description", | |
1254 | "metadata": None, | |
1255 | "os": "linux", | |
1256 | "default_gateway": None | |
1257 | } | |
1258 | ||
1259 | doc_interface = { | |
1260 | "type": "Interface", | |
1261 | "_id": "5678", | |
1262 | "name": "192.168.10.168", | |
1263 | "owned": False, | |
1264 | "parent": "1234", | |
1265 | "owner": None, | |
1266 | "description": "some description", | |
1267 | "metadata": None, | |
1268 | "mac": "01:02:03:04:05:06", | |
1269 | "network_segment": None, | |
1270 | "hostnames": ["www.test.com"], | |
1271 | "ipv4": { | |
1272 | "address": "192.168.10.168", | |
1273 | "mask": "255.255.255.0", | |
1274 | "gateway": "192.168.10.1", | |
1275 | "DNS": "192.168.10.1" | |
1276 | }, | |
1277 | "ipv6": {}, | |
1278 | "ports": { | |
1279 | "opened": 2, | |
1280 | "closed": 3, | |
1281 | "filtered": 4, | |
1282 | } | |
1283 | } | |
1284 | ||
1285 | doc_service1 = { | |
1286 | "type": "Service", | |
1287 | "_id": "abcd", | |
1288 | "name": "http", | |
1289 | "owned": False, | |
1290 | "parent": "5678", | |
1291 | "owner": None, | |
1292 | "description": "some description", | |
1293 | "metadata": None, | |
1294 | "protocol": "tcp", | |
1295 | "status": "open", | |
1296 | "ports": [80], | |
1297 | "version": "Apache 2.4" | |
1298 | } | |
1299 | ||
1300 | doc_service2 = { | |
1301 | "type": "Service", | |
1302 | "_id": "efgh", | |
1303 | "name": "ssh", | |
1304 | "owned": False, | |
1305 | "parent": "5678", | |
1306 | "owner": None, | |
1307 | "description": "some description", | |
1308 | "metadata": None, | |
1309 | "protocol": "tcp", | |
1310 | "status": "open", | |
1311 | "ports": [22], | |
1312 | "version": "OpenSSH" | |
1313 | } | |
1314 | ||
1315 | self.pmanager = mock(NullPersistenceManager) | |
1316 | when(self.pmanager).getDocument("1234").thenReturn(doc_host) | |
1317 | when(self.pmanager).getDocument("5678").thenReturn(doc_interface) | |
1318 | when(self.pmanager).getDocument("abcd").thenReturn(doc_service1) | |
1319 | when(self.pmanager).getDocument("efgh").thenReturn(doc_service2) | |
1320 | when(self.pmanager).getChildren(any(str)).thenReturn([]) | |
1321 | when(self.pmanager).getChildren("1234").thenReturn([{'_id': "5678", 'type': "Interface"}]) | |
1322 | when(self.pmanager).getChildren("5678").thenReturn([{'_id': "abcd", 'type': "Service"}, {'_id': "efgh", 'type': "Service"}]) | |
1323 | ||
1324 | self.mapper_manager.createMappers(self.pmanager) | |
1325 | ||
1326 | # load the host first | |
1327 | host = self.mapper_manager.find("1234") | |
1328 | ||
1329 | #then remove the interface | |
1330 | iface_id = host.getInterface("5678").getID() | |
1331 | host.deleteChild(iface_id) | |
1332 | ||
1333 | def fake_remove(id): | |
1334 | when(self.pmanager).getDocument(id).thenReturn(None) | |
1335 | when(self.pmanager).remove("5678").thenReturn(fake_remove("5678")) | |
1336 | when(self.pmanager).remove("abcd").thenReturn(fake_remove("abcd")) | |
1337 | when(self.pmanager).remove("efgh").thenReturn(fake_remove("efgh")) | |
1338 | self.mapper_manager.remove(iface_id) | |
1339 | ||
1340 | # now we make sure that we have removed the interface | |
1341 | # and the services | |
1342 | ||
1343 | self.assertEquals( | |
1344 | len(host.getAllInterfaces()), | |
1345 | 0, | |
1346 | "Host should have no interfaces") | |
1347 | ||
1348 | self.assertEquals( | |
1349 | self.mapper_manager.find("5678"), | |
1350 | None, | |
1351 | "Service abcd shouldn't exist anymore") | |
1352 | ||
1353 | ||
1354 | self.assertEquals( | |
1355 | self.mapper_manager.find("abcd"), | |
1356 | None, | |
1357 | "Service abcd shouldn't exist anymore") | |
1358 | ||
1359 | self.assertEquals( | |
1360 | self.mapper_manager.find("efgh"), | |
1361 | None, | |
1362 | "Service efgh shouldn't exist anymore") | |
1363 | ||
1364 | def test_load_composite_one_workspace_two_hosts(self): | |
1365 | ||
1366 | doc_ws = { | |
1367 | "type": "Workspace", | |
1368 | "_id": "test_ws", | |
1369 | "name": "test_ws", | |
1370 | "description": "some description", | |
1371 | "customer": "Infobyte", | |
1372 | "sdate": None, | |
1373 | "fdate": None | |
1374 | } | |
1375 | ||
1376 | doc_host1 = { | |
1377 | "type": "Host", | |
1378 | "_id": "1234", | |
1379 | "name": "pepito", | |
1380 | "owned": False, | |
1381 | "parent": "test_ws", | |
1382 | "owner": None, | |
1383 | "description": "some description", | |
1384 | "metadata": None, | |
1385 | "os": "linux", | |
1386 | "default_gateway": None | |
1387 | } | |
1388 | ||
1389 | doc_host2 = { | |
1390 | "type": "Host", | |
1391 | "_id": "5678", | |
1392 | "name": "coquito", | |
1393 | "owned": False, | |
1394 | "parent": "test_ws", | |
1395 | "owner": None, | |
1396 | "description": "some description", | |
1397 | "metadata": None, | |
1398 | "os": "windows", | |
1399 | "default_gateway": None | |
1400 | } | |
1401 | ||
1402 | pmanager = NullPersistenceManager() | |
1403 | when(pmanager).getDocument("test_ws").thenReturn(doc_ws) | |
1404 | when(pmanager).getDocument("1234").thenReturn(doc_host1) | |
1405 | when(pmanager).getDocument("5678").thenReturn(doc_host2) | |
1406 | when(pmanager).getDocsByFilter(any, any).thenReturn([]) | |
1407 | when(pmanager).getDocsByFilter(any(str), None).thenReturn([]) | |
1408 | when(pmanager).getDocsByFilter(None, None).thenReturn([]) | |
1409 | when(pmanager).getDocsByFilter("test_ws", None).thenReturn( | |
1410 | [{'_id': "1234", 'type': "Host"}, | |
1411 | {'_id': "5678", 'type': "Host"}]) | |
1412 | #when(pmanager).getDocsByFilter(None, "Host").thenReturn([]) | |
1413 | ||
1414 | self.mapper_manager.createMappers(pmanager) | |
1415 | ||
1416 | ws = self.mapper_manager.find("test_ws") | |
1417 | self.assertNotEquals( | |
1418 | ws, | |
1419 | None, | |
1420 | "Existent Workspace shouldn't be None") | |
1421 | ||
1422 | self.assertEquals( | |
1423 | len(ws.getHosts()), | |
1424 | 2, | |
1425 | "Workspace should have two hosts") | |
1426 | ||
1427 | hosts_ids = [host.getID() for host in ws.getHosts()] | |
1428 | self.assertIn( | |
1429 | "1234", | |
1430 | hosts_ids, | |
1431 | "Host '1234' should be one of the workspace's hosts") | |
1432 | ||
1433 | self.assertIn( | |
1434 | "5678", | |
1435 | hosts_ids, | |
1436 | "Host '5678' should be one of the workspace's hosts") | |
1437 | ||
1438 | ||
1439 | if __name__ == '__main__': | |
1440 | unittest.main() |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | import unittest | |
10 | import sys | |
11 | import os | |
12 | import random | |
13 | sys.path.append(os.path.abspath(os.getcwd())) | |
14 | import time | |
15 | ||
16 | from persistence.persistence_managers import CouchDbManager, FileSystemManager | |
17 | from managers.mapper_manager import MapperManager | |
18 | ||
19 | from model.hosts import Host, Interface | |
20 | ||
21 | from config.configuration import getInstanceConfiguration | |
22 | CONF = getInstanceConfiguration() | |
23 | ||
24 | ||
25 | class MapperWithCouchDbManagerInegrationTest(unittest.TestCase): | |
26 | def setUp(self): | |
27 | self.db_name = self.new_random_workspace_name() | |
28 | ||
29 | self.couchdbmanager = CouchDbManager(CONF.getCouchURI()) | |
30 | ||
31 | self.connector = self.couchdbmanager.createDb(self.db_name) | |
32 | self.mapper_manager = MapperManager() | |
33 | self.mapper_manager.createMappers(self.connector) | |
34 | ||
35 | def new_random_workspace_name(self): | |
36 | return ("aworkspace" + "".join(random.sample( | |
37 | [chr(i) for i in range(65, 90)], 10))).lower() | |
38 | ||
39 | def tearDown(self): | |
40 | self.couchdbmanager.deleteDb(self.db_name) | |
41 | time.sleep(3) | |
42 | ||
43 | def test_host_saving(self): | |
44 | host = Host(name="pepito", os="linux") | |
45 | host.setDescription("Some description") | |
46 | host.setOwned(True) | |
47 | self.mapper_manager.save(host) | |
48 | ||
49 | self.assertNotEquals( | |
50 | self.connector.getDocument(host.getID()), | |
51 | None, | |
52 | "Document shouldn't be None") | |
53 | ||
54 | self.assertEquals( | |
55 | self.connector.getDocument(host.getID()).get("name"), | |
56 | host.getName(), | |
57 | "Document should have the same host name") | |
58 | ||
59 | def test_load_nonexistent_host_using_manager_find(self): | |
60 | self.assertEquals( | |
61 | self.connector.getDocument("1234"), | |
62 | None, | |
63 | "Nonexistent host should return None document") | |
64 | ||
65 | self.assertEquals( | |
66 | self.mapper_manager.find("1234"), | |
67 | None, | |
68 | "Nonexistent host should return None object") | |
69 | ||
70 | def test_load_nonexistent_host_using_mapper_find(self): | |
71 | self.assertEquals( | |
72 | self.connector.getDocument("1234"), | |
73 | None, | |
74 | "Nonexistent host should return None document") | |
75 | ||
76 | self.assertEquals( | |
77 | self.mapper_manager.getMapper(Host.__name__).find("1234"), | |
78 | None, | |
79 | "Nonexistent host should return None object") | |
80 | ||
81 | def test_find_not_loaded_host(self): | |
82 | host = Host(name="pepito", os="linux") | |
83 | host.setDescription("Some description") | |
84 | host.setOwned(True) | |
85 | self.mapper_manager.save(host) | |
86 | ||
87 | #create a set of mappers, so we have a clean map | |
88 | self.mapper_manager = MapperManager() | |
89 | self.mapper_manager.createMappers(self.connector) | |
90 | ||
91 | h = self.mapper_manager.find(host.getID()) | |
92 | self.assertNotEquals( | |
93 | h, | |
94 | None, | |
95 | "Existent host shouldn't return None") | |
96 | ||
97 | self.assertEquals( | |
98 | h.getName(), | |
99 | "pepito", | |
100 | "Host name should be pepito") | |
101 | ||
102 | self.assertEquals( | |
103 | h.getOS(), | |
104 | "linux", | |
105 | "Host os should be linux") | |
106 | ||
107 | def test_host_create_and_delete(self): | |
108 | host = Host(name="coquito") | |
109 | self.mapper_manager.save(host) | |
110 | h_id = host.getID() | |
111 | ||
112 | self.assertNotEquals( | |
113 | self.mapper_manager.find(h_id), | |
114 | None, | |
115 | "Host should be in the mapper") | |
116 | ||
117 | self.assertNotEquals( | |
118 | self.connector.getDocument(h_id), | |
119 | None, | |
120 | "Host should be in the db") | |
121 | ||
122 | self.mapper_manager.remove(h_id) | |
123 | ||
124 | self.assertEquals( | |
125 | self.mapper_manager.find(h_id), | |
126 | None, | |
127 | "Host shouldn't exist anymore in the mapper") | |
128 | ||
129 | self.assertEquals( | |
130 | self.connector.getDocument(h_id), | |
131 | None, | |
132 | "Host shouldn't exist anymore in the db") | |
133 | ||
134 | def test_composite_host(self): | |
135 | # add host | |
136 | host = Host(name="pepito", os="linux") | |
137 | host.setDescription("Some description") | |
138 | host.setOwned(True) | |
139 | self.mapper_manager.save(host) | |
140 | # add inteface | |
141 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
142 | iface.setDescription("Some description") | |
143 | iface.setOwned(True) | |
144 | iface.addHostname("www.test.com") | |
145 | iface.setIPv4({ | |
146 | "address": "192.168.10.168", | |
147 | "mask": "255.255.255.0", | |
148 | "gateway": "192.168.10.1", | |
149 | "DNS": "192.168.10.1" | |
150 | }) | |
151 | iface.setPortsOpened(2) | |
152 | iface.setPortsClosed(3) | |
153 | iface.setPortsFiltered(4) | |
154 | host.addChild(iface) | |
155 | self.mapper_manager.save(iface) | |
156 | ||
157 | h = self.mapper_manager.find(host.getID()) | |
158 | self.assertEquals( | |
159 | len(h.getAllInterfaces()), | |
160 | len(host.getAllInterfaces()), | |
161 | "Interfaces from original host should be equals to retrieved host's interfaces") | |
162 | ||
163 | i = self.mapper_manager.find(h.getAllInterfaces()[0].getID()) | |
164 | self.assertEquals( | |
165 | i.getID(), | |
166 | iface.getID(), | |
167 | "Interface's id' from original host should be equals to retrieved host's interface's id") | |
168 | ||
169 | def test_load_not_loaded_composite_host(self): | |
170 | # add host | |
171 | host = Host(name="pepito", os="linux") | |
172 | host.setDescription("Some description") | |
173 | host.setOwned(True) | |
174 | self.mapper_manager.save(host) | |
175 | # add inteface | |
176 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
177 | iface.setDescription("Some description") | |
178 | iface.setOwned(True) | |
179 | iface.addHostname("www.test.com") | |
180 | iface.setIPv4({ | |
181 | "address": "192.168.10.168", | |
182 | "mask": "255.255.255.0", | |
183 | "gateway": "192.168.10.1", | |
184 | "DNS": "192.168.10.1" | |
185 | }) | |
186 | iface.setPortsOpened(2) | |
187 | iface.setPortsClosed(3) | |
188 | iface.setPortsFiltered(4) | |
189 | host.addChild(iface) | |
190 | self.mapper_manager.save(iface) | |
191 | ||
192 | #create a set of mappers, so we have a clean map | |
193 | self.mapper_manager = MapperManager() | |
194 | self.mapper_manager.createMappers(self.connector) | |
195 | ||
196 | h = self.mapper_manager.find(host.getID()) | |
197 | self.assertEquals( | |
198 | len(h.getAllInterfaces()), | |
199 | len(host.getAllInterfaces()), | |
200 | "Interfaces from original host should be equals to retrieved host's interfaces") | |
201 | ||
202 | i = self.mapper_manager.find(h.getAllInterfaces()[0].getID()) | |
203 | self.assertEquals( | |
204 | i.getID(), | |
205 | iface.getID(), | |
206 | "Interface's id' from original host should be equals to retrieved host's interface's id") | |
207 | ||
208 | ||
209 | class MapperWithFileSystemManagerInegrationTest(unittest.TestCase): | |
210 | def setUp(self): | |
211 | self.db_name = self.new_random_workspace_name() | |
212 | ||
213 | self.fsmanager = FileSystemManager() | |
214 | ||
215 | self.connector = self.fsmanager.createDb(self.db_name) | |
216 | self.mapper_manager = MapperManager() | |
217 | self.mapper_manager.createMappers(self.connector) | |
218 | ||
219 | def new_random_workspace_name(self): | |
220 | return ("aworkspace" + "".join(random.sample( | |
221 | [chr(i) for i in range(65, 90)], 10))).lower() | |
222 | ||
223 | def tearDown(self): | |
224 | self.fsmanager.deleteDb(self.db_name) | |
225 | ||
226 | def test_host_saving(self): | |
227 | host = Host(name="pepito", os="linux") | |
228 | host.setDescription("Some description") | |
229 | host.setOwned(True) | |
230 | self.mapper_manager.save(host) | |
231 | ||
232 | self.assertNotEquals( | |
233 | self.connector.getDocument(host.getID()), | |
234 | None, | |
235 | "Document shouldn't be None") | |
236 | ||
237 | self.assertEquals( | |
238 | self.connector.getDocument(host.getID()).get("name"), | |
239 | host.getName(), | |
240 | "Document should have the same host name") | |
241 | ||
242 | def test_load_nonexistent_host_using_manager_find(self): | |
243 | self.assertEquals( | |
244 | self.connector.getDocument("1234"), | |
245 | None, | |
246 | "Nonexistent host should return None document") | |
247 | ||
248 | self.assertEquals( | |
249 | self.mapper_manager.find("1234"), | |
250 | None, | |
251 | "Nonexistent host should return None object") | |
252 | ||
253 | def test_load_nonexistent_host_using_mapper_find(self): | |
254 | self.assertEquals( | |
255 | self.connector.getDocument("1234"), | |
256 | None, | |
257 | "Nonexistent host should return None document") | |
258 | ||
259 | self.assertEquals( | |
260 | self.mapper_manager.getMapper(Host.__name__).find("1234"), | |
261 | None, | |
262 | "Nonexistent host should return None object") | |
263 | ||
264 | def test_find_not_loaded_host(self): | |
265 | host = Host(name="pepito", os="linux") | |
266 | host.setDescription("Some description") | |
267 | host.setOwned(True) | |
268 | self.mapper_manager.save(host) | |
269 | ||
270 | #create a set of mappers, so we have a clean map | |
271 | self.mapper_manager = MapperManager() | |
272 | self.mapper_manager.createMappers(self.connector) | |
273 | ||
274 | h = self.mapper_manager.find(host.getID()) | |
275 | self.assertNotEquals( | |
276 | h, | |
277 | None, | |
278 | "Existent host shouldn't return None") | |
279 | ||
280 | self.assertEquals( | |
281 | h.getName(), | |
282 | "pepito", | |
283 | "Host name should be pepito") | |
284 | ||
285 | self.assertEquals( | |
286 | h.getOS(), | |
287 | "linux", | |
288 | "Host os should be linux") | |
289 | ||
290 | def test_host_create_and_delete(self): | |
291 | host = Host(name="coquito") | |
292 | self.mapper_manager.save(host) | |
293 | h_id = host.getID() | |
294 | ||
295 | self.assertNotEquals( | |
296 | self.mapper_manager.find(h_id), | |
297 | None, | |
298 | "Host should be in the mapper") | |
299 | ||
300 | self.assertNotEquals( | |
301 | self.connector.getDocument(h_id), | |
302 | None, | |
303 | "Host should be in the db") | |
304 | ||
305 | self.mapper_manager.remove(h_id) | |
306 | ||
307 | self.assertEquals( | |
308 | self.mapper_manager.find(h_id), | |
309 | None, | |
310 | "Host shouldn't exist anymore in the mapper") | |
311 | ||
312 | self.assertEquals( | |
313 | self.connector.getDocument(h_id), | |
314 | None, | |
315 | "Host shouldn't exist anymore in the db") | |
316 | ||
317 | def test_composite_host(self): | |
318 | # add host | |
319 | host = Host(name="pepito", os="linux") | |
320 | host.setDescription("Some description") | |
321 | host.setOwned(True) | |
322 | self.mapper_manager.save(host) | |
323 | # add inteface | |
324 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
325 | iface.setDescription("Some description") | |
326 | iface.setOwned(True) | |
327 | iface.addHostname("www.test.com") | |
328 | iface.setIPv4({ | |
329 | "address": "192.168.10.168", | |
330 | "mask": "255.255.255.0", | |
331 | "gateway": "192.168.10.1", | |
332 | "DNS": "192.168.10.1" | |
333 | }) | |
334 | iface.setPortsOpened(2) | |
335 | iface.setPortsClosed(3) | |
336 | iface.setPortsFiltered(4) | |
337 | host.addChild(iface) | |
338 | self.mapper_manager.save(iface) | |
339 | ||
340 | h = self.mapper_manager.find(host.getID()) | |
341 | self.assertEquals( | |
342 | len(h.getAllInterfaces()), | |
343 | len(host.getAllInterfaces()), | |
344 | "Interfaces from original host should be equals to retrieved host's interfaces") | |
345 | ||
346 | i = self.mapper_manager.find(h.getAllInterfaces()[0].getID()) | |
347 | self.assertEquals( | |
348 | i.getID(), | |
349 | iface.getID(), | |
350 | "Interface's id' from original host should be equals to retrieved host's interface's id") | |
351 | ||
352 | def test_load_not_loaded_composite_host(self): | |
353 | # add host | |
354 | host = Host(name="pepito", os="linux") | |
355 | host.setDescription("Some description") | |
356 | host.setOwned(True) | |
357 | self.mapper_manager.save(host) | |
358 | # add inteface | |
359 | iface = Interface(name="192.168.10.168", mac="01:02:03:04:05:06") | |
360 | iface.setDescription("Some description") | |
361 | iface.setOwned(True) | |
362 | iface.addHostname("www.test.com") | |
363 | iface.setIPv4({ | |
364 | "address": "192.168.10.168", | |
365 | "mask": "255.255.255.0", | |
366 | "gateway": "192.168.10.1", | |
367 | "DNS": "192.168.10.1" | |
368 | }) | |
369 | iface.setPortsOpened(2) | |
370 | iface.setPortsClosed(3) | |
371 | iface.setPortsFiltered(4) | |
372 | host.addChild(iface) | |
373 | self.mapper_manager.save(iface) | |
374 | ||
375 | #create a set of mappers, so we have a clean map | |
376 | self.mapper_manager = MapperManager() | |
377 | self.mapper_manager.createMappers(self.connector) | |
378 | ||
379 | h = self.mapper_manager.find(host.getID()) | |
380 | self.assertEquals( | |
381 | len(h.getAllInterfaces()), | |
382 | len(host.getAllInterfaces()), | |
383 | "Interfaces from original host should be equals to retrieved host's interfaces") | |
384 | ||
385 | i = self.mapper_manager.find(h.getAllInterfaces()[0].getID()) | |
386 | self.assertEquals( | |
387 | i.getID(), | |
388 | iface.getID(), | |
389 | "Interface's id' from original host should be equals to retrieved host's interface's id") | |
390 | ||
391 | ||
392 | if __name__ == '__main__': | |
393 | unittest.main() |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | from unittest import TestCase | |
8 | import unittest | |
9 | import sys | |
10 | sys.path.append('.') | |
11 | import model.controller as controller | |
12 | import plugins.core as plcore | |
13 | from mockito import mock, verify, when, any | |
14 | from model import api | |
15 | from model.hosts import Host, Interface, Service | |
16 | from model.common import ModelObjectVuln, ModelObjectVulnWeb, ModelObjectNote, ModelComposite, ModelObjectCred | |
17 | import random | |
18 | ||
19 | from model.visitor import VulnsLookupVisitor | |
20 | import test_cases.common as test_utils | |
21 | ||
22 | ||
23 | class ModelObjectComposite(unittest.TestCase): | |
24 | ||
25 | def testAddInterfaceToHost(self): | |
26 | host = Host('coco') | |
27 | inter = Interface('cuca') | |
28 | host.addChild(inter) | |
29 | ||
30 | self.assertIn(inter, host.childs.values(), 'Interface not in childs') | |
31 | self.assertIn(inter, host.getAllInterfaces(), 'Interface not accessible') | |
32 | ||
33 | def testAddServiceToInterface(self): | |
34 | interface = Interface('coco') | |
35 | serv = Service('cuca') | |
36 | interface.addChild(serv) | |
37 | ||
38 | self.assertIn(serv, interface.childs.values(), 'Service not in childs') | |
39 | self.assertIn(serv, interface.getAllServices(), 'Service not accessible') | |
40 | ||
41 | def testAddVulnToInterface(self): | |
42 | serv = Service('cuca') | |
43 | vuln = ModelObjectVuln('vuln') | |
44 | serv.addChild(vuln) | |
45 | ||
46 | self.assertIn(vuln, serv.childs.values(), 'Vuln not in childs') | |
47 | self.assertIn(vuln, serv.getVulns(), 'Vuln not accessible') | |
48 | ||
49 | def testHostWithMultipleChildTypes(self): | |
50 | host = Host('coco') | |
51 | inter = Interface('cuca') | |
52 | vuln = ModelObjectVuln('vuln') | |
53 | host.addChild(inter) | |
54 | host.addChild(vuln) | |
55 | ||
56 | self.assertEquals(len(host.getVulns()), 1, "Vulns added is not 1") | |
57 | self.assertIn(vuln, host.getVulns(), "Vuln not accessible") | |
58 | self.assertEquals(len(host.getAllInterfaces()), 1, "Interfaces added is not 1") | |
59 | ||
60 | def testInterfaceWithMultipleChildTypes(self): | |
61 | inter = Interface('coco') | |
62 | serv = Service('cuca') | |
63 | vuln = ModelObjectVuln('vuln') | |
64 | inter.addChild(serv) | |
65 | inter.addChild(vuln) | |
66 | ||
67 | self.assertEquals(len(inter.getVulns()), 1, "Vulns added is not 1") | |
68 | self.assertIn(vuln, inter.getVulns(), "Vuln not accessible") | |
69 | self.assertEquals(len(inter.getAllServices()), 1, "Services added is not 1") | |
70 | ||
71 | def testServiceWithMultipleChildTypes(self): | |
72 | serv = Service('cuca') | |
73 | vuln = ModelObjectVuln('vuln') | |
74 | note = ModelObjectNote('nota') | |
75 | serv.addChild(note) | |
76 | serv.addChild(vuln) | |
77 | ||
78 | self.assertEquals(len(serv.getVulns()), 1, "Vulns added is not 1") | |
79 | self.assertIn(vuln, serv.getVulns(), "Vuln not accessible") | |
80 | self.assertEquals(len(serv.getNotes()), 1, "Notes added is not 1") | |
81 | self.assertIn(note, serv.getNotes(), "Note not accessible") | |
82 | ||
83 | def testHostWithCredentials(self): | |
84 | host = Host('coco') | |
85 | cred = ModelObjectCred('coco', 'coco123') | |
86 | host.addChild(cred) | |
87 | self.assertEquals(len(host.getCreds()), 1, "Creds added is not 1") | |
88 | self.assertIn(cred, host.getCreds(), "Cred not accessible") | |
89 | ||
90 | def testInterfaceSetServices(self): | |
91 | inter = Interface('coco') | |
92 | services = {} | |
93 | for i in range(50, 60): | |
94 | serv = Service('cuca%s' % i, ports=[i]) | |
95 | services[serv.getID()] = serv | |
96 | inter.setServices(services) | |
97 | ||
98 | self.assertEquals(len(inter.getChildsByType(Service.__name__)), 10, "not all services added") | |
99 | for s in services.values(): | |
100 | self.assertIn(s, inter.getChildsByType(Service.__name__), "what happened with services?") | |
101 | ||
102 | def testHostSetInterfaces(self): | |
103 | host = Host('coco') | |
104 | interfaces = {} | |
105 | for i in range(50, 60): | |
106 | inter = Interface('cuca%s' % i, ipv4_address="192.168.0.%d" % i) | |
107 | interfaces[inter.getID()] = inter | |
108 | host.setInterfaces(interfaces) | |
109 | ||
110 | self.assertEquals(len(host.getChildsByType(Interface.__name__)), 10, "not all interfaces added") | |
111 | for s in interfaces.values(): | |
112 | self.assertIn(s, host.getChildsByType(Interface.__name__), "what happened with interfaces?") | |
113 | ||
114 | ||
115 | if __name__ == '__main__': | |
116 | unittest.main() | |
117 | ||
118 | ||
119 | ||
120 |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | import unittest | |
8 | import sys | |
9 | sys.path.append('.') | |
10 | import model.controller as controller | |
11 | from mockito import mock, verify, when, any | |
12 | from model.hosts import Host, Interface, Service | |
13 | from model.common import ModelObjectVuln, ModelObjectVulnWeb, ModelObjectNote, ModelObjectCred | |
14 | ||
15 | ||
16 | class ModelObjectControllerUnitTest(unittest.TestCase): | |
17 | # TODO: Notifier goes into mapper? | |
18 | ||
19 | def testAddHostGetsMapperDispatchSaveSYNC(self): | |
20 | host = Host('coco') | |
21 | ||
22 | mappersManager = self.createMapperMock() | |
23 | objectMapper = mock() | |
24 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
25 | when(objectMapper).save(host).thenReturn(True) | |
26 | ||
27 | model_controller = controller.ModelController(mock(), mappersManager) | |
28 | ||
29 | model_controller.addHostSYNC(host) | |
30 | verify(mappersManager).getMapper(host.class_signature) | |
31 | verify(objectMapper).save(host) | |
32 | ||
33 | def testAddHostGetsMapperDispatchSaveASYNC(self): | |
34 | host = Host('coco') | |
35 | ||
36 | mappersManager = self.createMapperMock() | |
37 | objectMapper = mock() | |
38 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
39 | when(objectMapper).save(host).thenReturn(True) | |
40 | ||
41 | model_controller = controller.ModelController(mock(), mappersManager) | |
42 | ||
43 | model_controller.addHostASYNC(host) | |
44 | model_controller.processAllPendingActions() | |
45 | ||
46 | verify(mappersManager).getMapper(host.class_signature) | |
47 | verify(objectMapper).save(host) | |
48 | ||
49 | def testAddInterfaceGetsMapperDispatchSaveSYNC(self): | |
50 | host = Host('coco') | |
51 | interface = Interface("int_mock0") | |
52 | ||
53 | mappersManager = self.createMapperMock() | |
54 | objectMapper = mock() | |
55 | when(mappersManager).getMapper(interface.class_signature).thenReturn(objectMapper) | |
56 | when(objectMapper).save(interface).thenReturn(True) | |
57 | ||
58 | model_controller = controller.ModelController(mock(), mappersManager) | |
59 | ||
60 | model_controller.addInterfaceSYNC(host.getID(), interface) | |
61 | verify(mappersManager).getMapper(interface.class_signature) | |
62 | verify(objectMapper).save(interface) | |
63 | ||
64 | def testAddInterfaceGetsMapperDispatchSaveASYNC(self): | |
65 | host = Host('coco') | |
66 | interface = Interface("int_mock0") | |
67 | ||
68 | mappersManager = self.createMapperMock() | |
69 | objectMapper = mock() | |
70 | when(mappersManager).getMapper(interface.class_signature).thenReturn(objectMapper) | |
71 | when(objectMapper).save(interface).thenReturn(True) | |
72 | ||
73 | model_controller = controller.ModelController(mock(), mappersManager) | |
74 | ||
75 | model_controller.addInterfaceASYNC(host.getID(), interface) | |
76 | model_controller.processAllPendingActions() | |
77 | ||
78 | verify(mappersManager).getMapper(interface.class_signature) | |
79 | verify(objectMapper).save(interface) | |
80 | ||
81 | def testAddObjectSavesChildInParent(self): | |
82 | host = Host('coco') | |
83 | interface = Interface("int_mock0") | |
84 | ||
85 | mappersManager = self.createMapperMock() | |
86 | objectMapper = mock() | |
87 | ||
88 | when(mappersManager).getMapper(interface.class_signature).thenReturn(objectMapper) | |
89 | when(objectMapper).save(interface).thenReturn(True) | |
90 | when(mappersManager).find(host.getID()).thenReturn(host) | |
91 | ||
92 | model_controller = controller.ModelController(mock(), mappersManager) | |
93 | ||
94 | model_controller.addInterfaceSYNC(host.getID(), interface) | |
95 | verify(mappersManager).getMapper(interface.class_signature) | |
96 | verify(objectMapper).save(interface) | |
97 | ||
98 | self.assertEquals(interface, host.findChild(interface.getID()), | |
99 | "Orphan child, what happen papi?") | |
100 | ||
101 | def testAddServiceGetsMapperDispatchSaveSYNC(self): | |
102 | interface = Interface("int_mock0") | |
103 | service = Service("servi") | |
104 | ||
105 | mappersManager = self.createMapperMock() | |
106 | objectMapper = mock() | |
107 | when(mappersManager).getMapper(service.class_signature).thenReturn(objectMapper) | |
108 | when(objectMapper).save(service).thenReturn(True) | |
109 | ||
110 | model_controller = controller.ModelController(mock(), mappersManager) | |
111 | ||
112 | model_controller.addServiceToInterfaceSYNC(None, interface.getID(), service) | |
113 | ||
114 | verify(mappersManager).getMapper(service.class_signature) | |
115 | verify(objectMapper).save(service) | |
116 | ||
117 | def testAddServiceGetsMapperDispatchSaveASYNC(self): | |
118 | interface = Interface("int_mock0") | |
119 | service = Service("servi") | |
120 | ||
121 | mappersManager = self.createMapperMock() | |
122 | objectMapper = mock() | |
123 | when(mappersManager).getMapper(service.class_signature).thenReturn(objectMapper) | |
124 | when(objectMapper).save(service).thenReturn(True) | |
125 | ||
126 | model_controller = controller.ModelController(mock(), mappersManager) | |
127 | ||
128 | model_controller.addServiceToInterfaceASYNC(None, interface.getID(), service) | |
129 | model_controller.processAllPendingActions() | |
130 | ||
131 | verify(mappersManager).getMapper(service.class_signature) | |
132 | verify(objectMapper).save(service) | |
133 | ||
134 | def testAddVulnToServiceGetsMapperDispatchSaveSYNC(self): | |
135 | service = Service("servi") | |
136 | vuln = ModelObjectVuln("a_vuln") | |
137 | ||
138 | mappersManager = self.createMapperMock() | |
139 | objectMapper = mock() | |
140 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
141 | when(objectMapper).save(vuln).thenReturn(True) | |
142 | ||
143 | model_controller = controller.ModelController(mock(), mappersManager) | |
144 | ||
145 | model_controller.addVulnToServiceSYNC(None, service.getID(), vuln) | |
146 | ||
147 | verify(mappersManager).getMapper(vuln.class_signature) | |
148 | verify(objectMapper).save(vuln) | |
149 | ||
150 | def testAddVulnToServiceGetsMapperDispatchSaveASYNC(self): | |
151 | service = Service("servi") | |
152 | vuln = ModelObjectVuln("a_vuln") | |
153 | ||
154 | mappersManager = self.createMapperMock() | |
155 | objectMapper = mock() | |
156 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
157 | when(objectMapper).save(vuln).thenReturn(True) | |
158 | ||
159 | model_controller = controller.ModelController(mock(), mappersManager) | |
160 | ||
161 | model_controller.addVulnToServiceASYNC(None, service.getID(), vuln) | |
162 | model_controller.processAllPendingActions() | |
163 | ||
164 | verify(mappersManager).getMapper(vuln.class_signature) | |
165 | verify(objectMapper).save(vuln) | |
166 | ||
167 | def testAddVulnToInterfaceGetsMapperDispatchSaveSYNC(self): | |
168 | interface = Interface("int0") | |
169 | vuln = ModelObjectVuln("a_vuln") | |
170 | ||
171 | mappersManager = self.createMapperMock() | |
172 | objectMapper = mock() | |
173 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
174 | when(objectMapper).save(vuln).thenReturn(True) | |
175 | ||
176 | model_controller = controller.ModelController(mock(), mappersManager) | |
177 | ||
178 | model_controller.addVulnToInterfaceSYNC(None, interface.getID(), vuln) | |
179 | ||
180 | verify(mappersManager).getMapper(vuln.class_signature) | |
181 | verify(objectMapper).save(vuln) | |
182 | ||
183 | def testAddVulnToInterfaceGetsMapperDispatchSaveASYNC(self): | |
184 | interface = Interface("int0") | |
185 | vuln = ModelObjectVuln("a_vuln") | |
186 | ||
187 | mappersManager = self.createMapperMock() | |
188 | objectMapper = mock() | |
189 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
190 | when(objectMapper).save(vuln).thenReturn(True) | |
191 | ||
192 | model_controller = controller.ModelController(mock(), mappersManager) | |
193 | ||
194 | model_controller.addVulnToInterfaceASYNC(None, interface.getID(), vuln) | |
195 | model_controller.processAllPendingActions() | |
196 | ||
197 | verify(mappersManager).getMapper(vuln.class_signature) | |
198 | verify(objectMapper).save(vuln) | |
199 | ||
200 | def testAddVulnToHostGetsMapperDispatchSaveSYNC(self): | |
201 | host = Host("pepito") | |
202 | vuln = ModelObjectVuln("a_vuln") | |
203 | ||
204 | mappersManager = self.createMapperMock() | |
205 | objectMapper = mock() | |
206 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
207 | when(objectMapper).save(vuln).thenReturn(True) | |
208 | ||
209 | model_controller = controller.ModelController(mock(), mappersManager) | |
210 | ||
211 | model_controller.addVulnToHostSYNC(host.getID(), vuln) | |
212 | ||
213 | verify(mappersManager).getMapper(vuln.class_signature) | |
214 | verify(objectMapper).save(vuln) | |
215 | ||
216 | def testAddVulnToHostGetsMapperDispatchSaveASYNC(self): | |
217 | host = Host("pepito") | |
218 | vuln = ModelObjectVuln("a_vuln") | |
219 | ||
220 | mappersManager = self.createMapperMock() | |
221 | objectMapper = mock() | |
222 | when(mappersManager).getMapper(vuln.class_signature).thenReturn(objectMapper) | |
223 | when(objectMapper).save(vuln).thenReturn(True) | |
224 | ||
225 | model_controller = controller.ModelController(mock(), mappersManager) | |
226 | ||
227 | model_controller.addVulnToHostASYNC(host.getID(), vuln) | |
228 | model_controller.processAllPendingActions() | |
229 | ||
230 | verify(mappersManager).getMapper(vuln.class_signature) | |
231 | verify(objectMapper).save(vuln) | |
232 | ||
233 | def testAddNoteToServiceGetsMapperDispatchSaveSYNC(self): | |
234 | service = Service("servi") | |
235 | note = ModelObjectNote("a_note") | |
236 | ||
237 | mappersManager = self.createMapperMock() | |
238 | objectMapper = mock() | |
239 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
240 | when(objectMapper).save(note).thenReturn(True) | |
241 | ||
242 | model_controller = controller.ModelController(mock(), mappersManager) | |
243 | ||
244 | model_controller.addNoteToServiceSYNC(None, service.getID(), note) | |
245 | ||
246 | verify(mappersManager).getMapper(note.class_signature) | |
247 | verify(objectMapper).save(note) | |
248 | ||
249 | def testAddNoteToServiceGetsMapperDispatchSaveASYNC(self): | |
250 | service = Service("servi") | |
251 | note = ModelObjectNote("a_note") | |
252 | ||
253 | mappersManager = self.createMapperMock() | |
254 | objectMapper = mock() | |
255 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
256 | when(objectMapper).save(note).thenReturn(True) | |
257 | ||
258 | model_controller = controller.ModelController(mock(), mappersManager) | |
259 | ||
260 | model_controller.addNoteToServiceASYNC(None, service.getID(), note) | |
261 | model_controller.processAllPendingActions() | |
262 | ||
263 | verify(mappersManager).getMapper(note.class_signature) | |
264 | verify(objectMapper).save(note) | |
265 | ||
266 | def testAddNoteToVulnGetsMapperDispatchSave(self): | |
267 | vuln = ModelObjectVuln('a vuln') | |
268 | note = ModelObjectNote("a_note") | |
269 | ||
270 | mappersManager = self.createMapperMock() | |
271 | objectMapper = mock() | |
272 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
273 | when(objectMapper).save(note).thenReturn(True) | |
274 | ||
275 | model_controller = controller.ModelController(mock(), mappersManager) | |
276 | ||
277 | model_controller.addNoteToServiceSYNC(None, vuln.getID(), note) | |
278 | ||
279 | verify(mappersManager).getMapper(note.class_signature) | |
280 | verify(objectMapper).save(note) | |
281 | ||
282 | def testAddNoteToServiceGetsMapperDispatchSaveSYNC(self): | |
283 | service = Service("servi") | |
284 | note = ModelObjectNote("a_note") | |
285 | ||
286 | mappersManager = self.createMapperMock() | |
287 | objectMapper = mock() | |
288 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
289 | when(objectMapper).save(note).thenReturn(True) | |
290 | ||
291 | model_controller = controller.ModelController(mock(), mappersManager) | |
292 | ||
293 | model_controller.addNoteToServiceSYNC(None, service.getID(), note) | |
294 | ||
295 | verify(mappersManager).getMapper(note.class_signature) | |
296 | verify(objectMapper).save(note) | |
297 | ||
298 | def testAddNoteToServiceGetsMapperDispatchSaveASYNC(self): | |
299 | service = Service("servi") | |
300 | note = ModelObjectNote("a_note") | |
301 | ||
302 | mappersManager = self.createMapperMock() | |
303 | objectMapper = mock() | |
304 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
305 | when(objectMapper).save(note).thenReturn(True) | |
306 | ||
307 | model_controller = controller.ModelController(mock(), mappersManager) | |
308 | ||
309 | model_controller.addNoteToServiceASYNC(None, service.getID(), note) | |
310 | model_controller.processAllPendingActions() | |
311 | ||
312 | verify(mappersManager).getMapper(note.class_signature) | |
313 | verify(objectMapper).save(note) | |
314 | ||
315 | def testAddNoteToInterfaceGetsMapperDispatchSaveSYNC(self): | |
316 | interface = Interface("int0") | |
317 | note = ModelObjectNote("a_note") | |
318 | ||
319 | mappersManager = self.createMapperMock() | |
320 | objectMapper = mock() | |
321 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
322 | when(objectMapper).save(note).thenReturn(True) | |
323 | ||
324 | model_controller = controller.ModelController(mock(), mappersManager) | |
325 | ||
326 | model_controller.addNoteToServiceSYNC(None, interface.getID(), note) | |
327 | ||
328 | verify(mappersManager).getMapper(note.class_signature) | |
329 | verify(objectMapper).save(note) | |
330 | ||
331 | def testAddNoteToInterfaceGetsMapperDispatchSaveASYNC(self): | |
332 | interface = Interface("int0") | |
333 | note = ModelObjectNote("a_note") | |
334 | ||
335 | mappersManager = self.createMapperMock() | |
336 | objectMapper = mock() | |
337 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
338 | when(objectMapper).save(note).thenReturn(True) | |
339 | ||
340 | model_controller = controller.ModelController(mock(), mappersManager) | |
341 | ||
342 | model_controller.addNoteToServiceASYNC(None, interface.getID(), note) | |
343 | model_controller.processAllPendingActions() | |
344 | ||
345 | verify(mappersManager).getMapper(note.class_signature) | |
346 | verify(objectMapper).save(note) | |
347 | ||
348 | def testAddNoteToHostGetsMapperDispatchSaveSYNC(self): | |
349 | host = Host("pepito") | |
350 | note = ModelObjectNote("a_note") | |
351 | ||
352 | mappersManager = self.createMapperMock() | |
353 | objectMapper = mock() | |
354 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
355 | when(objectMapper).save(note).thenReturn(True) | |
356 | ||
357 | model_controller = controller.ModelController(mock(), mappersManager) | |
358 | ||
359 | model_controller.addNoteToHostSYNC(host.getID(), note) | |
360 | ||
361 | verify(mappersManager).getMapper(note.class_signature) | |
362 | verify(objectMapper).save(note) | |
363 | ||
364 | def testAddNoteToHostGetsMapperDispatchSaveASYNC(self): | |
365 | host = Host("pepito") | |
366 | note = ModelObjectNote("a_note") | |
367 | ||
368 | mappersManager = self.createMapperMock() | |
369 | objectMapper = mock() | |
370 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
371 | when(objectMapper).save(note).thenReturn(True) | |
372 | ||
373 | model_controller = controller.ModelController(mock(), mappersManager) | |
374 | ||
375 | model_controller.addNoteToHostASYNC(host.getID(), note) | |
376 | model_controller.processAllPendingActions() | |
377 | ||
378 | verify(mappersManager).getMapper(note.class_signature) | |
379 | verify(objectMapper).save(note) | |
380 | ||
381 | def testAddNoteToInterfaceGetsMapperDispatchSaveSYNC(self): | |
382 | interface = Interface("pepito") | |
383 | note = ModelObjectNote("a_note") | |
384 | ||
385 | mappersManager = self.createMapperMock() | |
386 | objectMapper = mock() | |
387 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
388 | when(objectMapper).save(note).thenReturn(True) | |
389 | ||
390 | model_controller = controller.ModelController(mock(), mappersManager) | |
391 | ||
392 | model_controller.addNoteToInterfaceSYNC(None, interface.getID(), note) | |
393 | ||
394 | verify(mappersManager).getMapper(note.class_signature) | |
395 | verify(objectMapper).save(note) | |
396 | ||
397 | def testAddNoteToInterfaceGetsMapperDispatchSaveASYNC(self): | |
398 | interface = Interface("pepito") | |
399 | note = ModelObjectNote("a_note") | |
400 | ||
401 | mappersManager = self.createMapperMock() | |
402 | objectMapper = mock() | |
403 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
404 | when(objectMapper).save(note).thenReturn(True) | |
405 | ||
406 | model_controller = controller.ModelController(mock(), mappersManager) | |
407 | ||
408 | model_controller.addNoteToInterfaceASYNC(None, interface.getID(), note) | |
409 | model_controller.processAllPendingActions() | |
410 | ||
411 | verify(mappersManager).getMapper(note.class_signature) | |
412 | verify(objectMapper).save(note) | |
413 | ||
414 | def testAddNoteToNoteGetsMapperDispatchSaveSYNC(self): | |
415 | host = Host("pepito") | |
416 | note = ModelObjectNote("a_note") | |
417 | ||
418 | mappersManager = self.createMapperMock() | |
419 | objectMapper = mock() | |
420 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
421 | when(objectMapper).save(note).thenReturn(True) | |
422 | ||
423 | model_controller = controller.ModelController(mock(), mappersManager) | |
424 | ||
425 | model_controller.addNoteToNoteSYNC(note.getID(), note) | |
426 | ||
427 | verify(mappersManager).getMapper(note.class_signature) | |
428 | verify(objectMapper).save(note) | |
429 | ||
430 | def testAddNoteToNoteGetsMapperDispatchSaveASYNC(self): | |
431 | host = Host("pepito") | |
432 | note = ModelObjectNote("a_note") | |
433 | ||
434 | mappersManager = self.createMapperMock() | |
435 | objectMapper = mock() | |
436 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
437 | when(objectMapper).save(note).thenReturn(True) | |
438 | ||
439 | model_controller = controller.ModelController(mock(), mappersManager) | |
440 | ||
441 | model_controller.addNoteToNoteASYNC(None, None, note.getID(), note) | |
442 | model_controller.processAllPendingActions() | |
443 | ||
444 | verify(mappersManager).getMapper(note.class_signature) | |
445 | verify(objectMapper).save(note) | |
446 | ||
447 | def testAddSavesObjectNameInTrie(self): | |
448 | host = Host('coco') | |
449 | ||
450 | mappersManager = self.createMapperMock() | |
451 | objectMapper = mock() | |
452 | triemock = mock() | |
453 | ||
454 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
455 | when(objectMapper).save(host).thenReturn(True) | |
456 | when(triemock).addWord(host.getName()).thenReturn(True) | |
457 | ||
458 | model_controller = controller.ModelController(mock(), mappersManager) | |
459 | model_controller.treeWordsTries = triemock | |
460 | ||
461 | model_controller.addHostSYNC(host) | |
462 | ||
463 | verify(mappersManager).getMapper(host.class_signature) | |
464 | verify(objectMapper).save(host) | |
465 | verify(triemock).addWord(host.getName()) | |
466 | ||
467 | def testAddNoteToModelObjectSYNC(self): | |
468 | host = Host("pepito") | |
469 | note = ModelObjectNote("a_note") | |
470 | ||
471 | mappersManager = self.createMapperMock() | |
472 | objectMapper = mock() | |
473 | when(mappersManager).getMapper(note.class_signature).thenReturn(objectMapper) | |
474 | when(objectMapper).save(note).thenReturn(True) | |
475 | ||
476 | model_controller = controller.ModelController(mock(), mappersManager) | |
477 | ||
478 | model_controller.addNoteSYNC(host.getID(), note) | |
479 | ||
480 | verify(mappersManager).getMapper(note.class_signature) | |
481 | verify(objectMapper).save(note) | |
482 | ||
483 | def createMapperMock(self): | |
484 | map_mock = mock() | |
485 | when(map_mock).find(any()).thenReturn(mock()) | |
486 | when(map_mock).find(None).thenReturn(None) | |
487 | return map_mock | |
488 | ||
489 | def testAddCredGetsMapperDispatchSaveSYNC(self): | |
490 | host = Host("pepito") | |
491 | cred = ModelObjectCred("usr", "pass") | |
492 | ||
493 | mappersManager = self.createMapperMock() | |
494 | objectMapper = mock() | |
495 | when(mappersManager).getMapper(cred.class_signature).thenReturn(objectMapper) | |
496 | when(objectMapper).save(cred).thenReturn(True) | |
497 | ||
498 | model_controller = controller.ModelController(mock(), mappersManager) | |
499 | ||
500 | model_controller.addCredSYNC(cred.getID(), cred) | |
501 | ||
502 | verify(mappersManager).getMapper(cred.class_signature) | |
503 | verify(objectMapper).save(cred) | |
504 | ||
505 | ||
506 | def testAddCredToServiceGetsMapperDispatchSaveSYNC(self): | |
507 | service = Service("pepito") | |
508 | cred = ModelObjectCred("usr", "pass") | |
509 | ||
510 | mappersManager = self.createMapperMock() | |
511 | objectMapper = mock() | |
512 | when(mappersManager).getMapper(cred.class_signature).thenReturn(objectMapper) | |
513 | when(objectMapper).save(cred).thenReturn(True) | |
514 | ||
515 | model_controller = controller.ModelController(mock(), mappersManager) | |
516 | ||
517 | model_controller.addCredToServiceSYNC(None, cred.getID(), cred) | |
518 | ||
519 | verify(mappersManager).getMapper(cred.class_signature) | |
520 | verify(objectMapper).save(cred) | |
521 | ||
522 | def testAddCredToServiceGetsMapperDispatchSaveASYNC(self): | |
523 | service = Service("pepito") | |
524 | cred = ModelObjectCred("usr", "pass") | |
525 | ||
526 | mappersManager = self.createMapperMock() | |
527 | objectMapper = mock() | |
528 | when(mappersManager).getMapper(cred.class_signature).thenReturn(objectMapper) | |
529 | when(objectMapper).save(cred).thenReturn(True) | |
530 | ||
531 | model_controller = controller.ModelController(mock(), mappersManager) | |
532 | ||
533 | model_controller.addCredToServiceASYNC(None, cred.getID(), cred) | |
534 | model_controller.processAllPendingActions() | |
535 | ||
536 | verify(mappersManager).getMapper(cred.class_signature) | |
537 | verify(objectMapper).save(cred) | |
538 | ||
539 | def testDeleteHostObjectDispatchRemoveSYNC(self): | |
540 | host = Host("coquito") | |
541 | ||
542 | mappersManager = self.createMapperMock() | |
543 | objectMapper = mock() | |
544 | when(mappersManager).find(host.getID()).thenReturn(host) | |
545 | when(mappersManager).remove(host.getID()).thenReturn(True) | |
546 | ||
547 | model_controller = controller.ModelController(mock(), mappersManager) | |
548 | model_controller.delHostSYNC(host.getID()) | |
549 | verify(mappersManager).remove(host.getID()) | |
550 | verify(mappersManager).find(host.getID()) | |
551 | ||
552 | def testDeleteHostObjectDispatchRemoveASYNC(self): | |
553 | host = Host("coquito") | |
554 | ||
555 | mappersManager = self.createMapperMock() | |
556 | objectMapper = mock() | |
557 | when(mappersManager).find(host.getID()).thenReturn(host) | |
558 | when(mappersManager).remove(host.getID()).thenReturn(True) | |
559 | ||
560 | model_controller = controller.ModelController(mock(), mappersManager) | |
561 | model_controller.delHostASYNC(host.getID()) | |
562 | model_controller.processAllPendingActions() | |
563 | ||
564 | verify(mappersManager).remove(host.getID()) | |
565 | ||
566 | def testDeleteModelObjectRemovesChildFromParentSYNC(self): | |
567 | host = Host('coco') | |
568 | interface = Interface("int_mock0") | |
569 | self.genericDelTest(host, interface, controller.ModelController.delInterfaceSYNC) | |
570 | ||
571 | def testDeleteModelObjectRemovesChildFromParentASYNC(self): | |
572 | host = Host('coco') | |
573 | interface = Interface("int_mock0") | |
574 | self.genericDelTest(host, interface, controller.ModelController.delInterfaceASYNC, process_pending=True) | |
575 | ||
576 | def testInterfaceFromHostRemovedSYNC(self): | |
577 | host = Host('coco') | |
578 | interface = Interface("int_mock0") | |
579 | self.genericDelTest(host, interface, | |
580 | controller.ModelController.delInterfaceSYNC) | |
581 | ||
582 | def testInterfaceFromHostRemovedSYNC(self): | |
583 | service = Service('coco') | |
584 | interface = Interface("int_mock0") | |
585 | interface.addChild(service) | |
586 | self.genericDelTest(interface, service, | |
587 | controller.ModelController.delServiceFromInterfaceSYNC) | |
588 | ||
589 | def testInterfaceFromHostRemovedASYNC(self): | |
590 | service = Service('coco') | |
591 | interface = Interface("int_mock0") | |
592 | interface.addChild(service) | |
593 | self.genericDelTest(interface, service, | |
594 | controller.ModelController.delServiceFromInterfaceASYNC, process_pending=True) | |
595 | ||
596 | def testDelVulnFromHostSYNC(self): | |
597 | host = Host('coco') | |
598 | vuln = ModelObjectVuln("int_mock0") | |
599 | host.addChild(vuln) | |
600 | self.genericDelTest(host, vuln, | |
601 | controller.ModelController.delVulnFromHostSYNC) | |
602 | ||
603 | def testDelVulnFromHostASYNC(self): | |
604 | host = Host('coco') | |
605 | vuln = ModelObjectVuln("int_mock0") | |
606 | host.addChild(vuln) | |
607 | self.genericDelTest(host, vuln, | |
608 | controller.ModelController.delVulnFromHostASYNC, process_pending=True) | |
609 | ||
610 | def testDelVulnFromObjectSYNC(self): | |
611 | host = Host('coco') | |
612 | vuln = ModelObjectVuln("int_mock0") | |
613 | host.addChild(vuln) | |
614 | self.genericDelTest(host, vuln, | |
615 | controller.ModelController.delVulnSYNC) | |
616 | ||
617 | def testDelVulnFromServiceSYNC(self): | |
618 | service = Service('coco') | |
619 | vuln = ModelObjectVuln("int_mock0") | |
620 | service.addChild(vuln) | |
621 | self.genericDelTest(service, vuln, | |
622 | controller.ModelController.delVulnFromServiceSYNC) | |
623 | ||
624 | def testDelVulnFromServiceASYNC(self): | |
625 | service = Service('coco') | |
626 | vuln = ModelObjectVuln("int_mock0") | |
627 | service.addChild(vuln) | |
628 | self.genericDelTest(service, vuln, | |
629 | controller.ModelController.delVulnFromServiceASYNC, process_pending=True) | |
630 | ||
631 | # def delNoteFromInterfaceSYNC(self, hostname, intname, noteId): | |
632 | ||
633 | def testDelNoteFromInterfaceSYNC(self): | |
634 | interface = Interface('coco') | |
635 | note = ModelObjectNote("int_mock0") | |
636 | interface.addChild(note) | |
637 | self.genericDelTest(interface, note, | |
638 | controller.ModelController.delNoteFromInterfaceSYNC) | |
639 | ||
640 | def testDelNoteFromInterfaceASYNC(self): | |
641 | interface = Interface('coco') | |
642 | note = ModelObjectNote("int_mock0") | |
643 | interface.addChild(note) | |
644 | self.genericDelTest(interface, note, | |
645 | controller.ModelController.delNoteFromInterfaceASYNC, process_pending=True) | |
646 | ||
647 | ||
648 | def testDelNoteFromServiceSYNC(self): | |
649 | service = Service('coco') | |
650 | note = ModelObjectNote("int_mock0") | |
651 | service.addChild(note) | |
652 | self.genericDelTest(service, note, | |
653 | controller.ModelController.delNoteFromServiceSYNC) | |
654 | ||
655 | def testDelNoteFromServiceASYNC(self): | |
656 | service = Service('coco') | |
657 | note = ModelObjectNote("int_mock0") | |
658 | service.addChild(note) | |
659 | self.genericDelTest(service, note, | |
660 | controller.ModelController.delNoteFromServiceASYNC, process_pending=True) | |
661 | ||
662 | def testDelNoteFromHostSYNC(self): | |
663 | host = Host('coco') | |
664 | note = ModelObjectNote("int_mock0") | |
665 | host.addChild(note) | |
666 | self.genericDelTest(host, note, | |
667 | controller.ModelController.delNoteFromHostSYNC) | |
668 | ||
669 | def testDelNoteFromHostSYNC(self): | |
670 | host = Host('coco') | |
671 | note = ModelObjectNote("int_mock0") | |
672 | host.addChild(note) | |
673 | self.genericDelTest(host, note, | |
674 | controller.ModelController.delNoteFromHostASYNC, process_pending=True) | |
675 | ||
676 | def testDelNoteFromModelObjectSYNC(self): | |
677 | host = Host('coco') | |
678 | note = ModelObjectNote("int_mock0") | |
679 | host.addChild(note) | |
680 | self.genericDelTest(host, note, | |
681 | controller.ModelController.delNoteSYNC) | |
682 | ||
683 | def testDelCredentialFromServiceSYNC(self): | |
684 | service = Service('coco') | |
685 | cred = ModelObjectCred("int_mock0") | |
686 | service.addChild(cred) | |
687 | self.genericDelTest(service, cred, | |
688 | controller.ModelController.delCredFromServiceSYNC) | |
689 | ||
690 | def testDelCredentialFromServiceASYNC(self): | |
691 | service = Service('coco') | |
692 | cred = ModelObjectCred("int_mock0") | |
693 | service.addChild(cred) | |
694 | self.genericDelTest(service, cred, | |
695 | controller.ModelController.delCredFromServiceASYNC, process_pending=True) | |
696 | ||
697 | def testDelCredentialFromModelObjectSYNC(self): | |
698 | service = Service('coco') | |
699 | cred = ModelObjectCred("int_mock0") | |
700 | service.addChild(cred) | |
701 | self.genericDelTest(service, cred, | |
702 | controller.ModelController.delCredSYNC) | |
703 | ||
704 | def testDelRemovesObjectFromTrie(self): | |
705 | host = Host("coquito") | |
706 | ||
707 | mappersManager = self.createMapperMock() | |
708 | objectMapper = mock() | |
709 | triemock = mock() | |
710 | when(mappersManager).getMapper(host.class_signature).thenReturn(objectMapper) | |
711 | when(mappersManager).find(host.getID()).thenReturn(host) | |
712 | when(triemock).addWord(host.getName()).thenReturn(True) | |
713 | ||
714 | model_controller = controller.ModelController(mock(), mappersManager) | |
715 | model_controller.treeWordsTries = triemock | |
716 | model_controller.delHostSYNC(host.getID()) | |
717 | verify(mappersManager).remove(host.getID()) | |
718 | ||
719 | verify(triemock).removeWord(host.getName()) | |
720 | ||
721 | def genericDelTest(self, obj1, obj2, test_method, process_pending=False): | |
722 | mappersManager = self.createMapperMock() | |
723 | objectMapper = mock() | |
724 | triemock = mock() | |
725 | when(mappersManager).find(obj2.getID()).thenReturn(obj2) | |
726 | when(objectMapper).delObject(obj2.getID()).thenReturn(True) | |
727 | ||
728 | model_controller = controller.ModelController(mock(), mappersManager) | |
729 | model_controller.treeWordsTries = triemock | |
730 | ||
731 | try: | |
732 | test_method(model_controller, None, obj2.getID()) | |
733 | except: | |
734 | test_method(model_controller, None, None, obj2.getID()) | |
735 | ||
736 | if process_pending: | |
737 | model_controller.processAllPendingActions() | |
738 | ||
739 | verify(mappersManager).find(obj2.getID()) | |
740 | verify(mappersManager).remove(obj2.getID()) | |
741 | ||
742 | def testEditHostSyncGetsMapperDispatchedSYNC(self): | |
743 | host = Host("coquito") | |
744 | ||
745 | mappersManager = self.createMapperMock() | |
746 | dataMapper = mock() | |
747 | objectMapper = mock() | |
748 | triemock = mock() | |
749 | when(mappersManager).getMapper(host.class_signature).thenReturn(dataMapper) | |
750 | when(dataMapper).save(host).thenReturn(True) | |
751 | ||
752 | model_controller = controller.ModelController(mock(), mappersManager) | |
753 | ||
754 | model_controller.editHostSYNC(host, 'new_name', 'new_desc', 'new_os', True) | |
755 | ||
756 | verify(dataMapper).save(host) | |
757 | ||
758 | self.assertEquals(host.getName(), 'new_name', "Name not updated") | |
759 | self.assertEquals(host.getDescription(), 'new_desc', "Description not updated") | |
760 | self.assertEquals(host.getOS(), 'new_os', "OS not updated") | |
761 | self.assertEquals(host.isOwned(), True, "Owned status not updated") | |
762 | ||
763 | def testEditServiceSyncGetsMapperDispatchedSYNC(self): | |
764 | service = Service("coquito") | |
765 | ||
766 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
767 | self.genericEdit(service, params, controller.ModelController.editServiceSYNC) | |
768 | ||
769 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
770 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
771 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
772 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
773 | ||
774 | def testEditServiceSyncGetsMapperDispatchedASYNC(self): | |
775 | service = Service("coquito") | |
776 | ||
777 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
778 | self.genericEdit(service, params, controller.ModelController.editServiceASYNC, | |
779 | process_pending=True) | |
780 | ||
781 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
782 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
783 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
784 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
785 | ||
786 | def testEditServiceSyncGetsMapperDispatchedSYNC(self): | |
787 | service = Service("coquito") | |
788 | ||
789 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
790 | self.genericEdit(service, params, controller.ModelController.editServiceSYNC) | |
791 | ||
792 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
793 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
794 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
795 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
796 | ||
797 | def testEditServiceSyncGetsMapperDispatchedASYNC(self): | |
798 | service = Service("coquito") | |
799 | ||
800 | params = ('new_name', 'new_desc', 'upd', 9000, 'closed', '2.1', True) | |
801 | self.genericEdit(service, params, controller.ModelController.editServiceASYNC, process_pending=True) | |
802 | ||
803 | self.assertEquals(service.getName(), 'new_name', "Name not updated") | |
804 | self.assertEquals(service.getDescription(), 'new_desc', "Description not updated") | |
805 | self.assertEquals(service.getProtocol(), 'upd', "Protocol not updated") | |
806 | self.assertEquals(service.isOwned(), True, "Owned status not updated") | |
807 | ||
808 | def testEditInterfaceSyncGetsMapperDispatchedSYNC(self): | |
809 | inter = Interface("coquito") | |
810 | ||
811 | params = ('new_name', 'new_desc', 'hostname1', "FF:AA:EE:11:00", None, | |
812 | None, None, None, None, None, True) | |
813 | ||
814 | self.genericEdit(inter, params, controller.ModelController.editInterfaceSYNC) | |
815 | ||
816 | self.assertEquals(inter.getName(), 'new_name', "Name not updated") | |
817 | self.assertEquals(inter.getDescription(), 'new_desc', "Description not updated") | |
818 | self.assertEquals(inter.isOwned(), True, "Owned status not updated") | |
819 | ||
820 | ||
821 | def testEditVulnSyncGetsMapperDispatchedSYNC(self): | |
822 | vuln = ModelObjectVuln("coquito") | |
823 | ||
824 | params = ('new_name', 'new_desc', 'high', "ref1") | |
825 | ||
826 | self.genericEdit(vuln, params, controller.ModelController.editVulnSYNC) | |
827 | ||
828 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
829 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
830 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
831 | ||
832 | def testEditVulnSyncGetsMapperDispatchedASYNC(self): | |
833 | vuln = ModelObjectVuln("coquito") | |
834 | ||
835 | params = ('new_name', 'new_desc', 'high', "ref1") | |
836 | ||
837 | self.genericEdit(vuln, params, controller.ModelController.editVulnASYNC, process_pending=True) | |
838 | ||
839 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
840 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
841 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
842 | ||
843 | def testEditVulnWebSyncGetsMapperDispatchedSYNC(self): | |
844 | vuln = ModelObjectVulnWeb("coquito") | |
845 | ||
846 | params = ('new_name', 'new_desc', 'www.goole.com', 'index.html', | |
847 | "ref1", 'high', None, None, 'GET', 'pepe', 'coco' , 'caca', | |
848 | None) | |
849 | ||
850 | self.genericEdit(vuln, params, controller.ModelController.editVulnWebSYNC) | |
851 | ||
852 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
853 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
854 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
855 | ||
856 | def testEditVulnWebSyncGetsMapperDispatchedASYNC(self): | |
857 | vuln = ModelObjectVulnWeb("coquito") | |
858 | ||
859 | params = ('new_name', 'new_desc', 'www.goole.com', 'index.html', | |
860 | "ref1", 'high', None, None, 'GET', 'pepe', 'coco' , 'caca', | |
861 | None) | |
862 | ||
863 | self.genericEdit(vuln, params, controller.ModelController.editVulnWebASYNC, process_pending=True) | |
864 | ||
865 | self.assertEquals(vuln.getName(), 'new_name', "Name not updated") | |
866 | self.assertEquals(vuln.getDescription(), 'new_desc', "Description not updated") | |
867 | self.assertEquals(vuln.getSeverity(), 'high', "Severity not updated") | |
868 | ||
869 | def testEditNoteSyncGetsMapperDispatchedSYNC(self): | |
870 | note = ModelObjectNote("coquito") | |
871 | ||
872 | params = ('new_name', 'new_desc') | |
873 | self.genericEdit(note, params, controller.ModelController.editNoteSYNC) | |
874 | self.assertEquals(note.getName(), 'new_name', "Name not updated") | |
875 | self.assertEquals(note.text, 'new_desc', "Description not updated") | |
876 | ||
877 | def testEditNoteSyncGetsMapperDispatchedASYNC(self): | |
878 | note = ModelObjectNote("coquito") | |
879 | ||
880 | params = ('new_name', 'new_desc') | |
881 | self.genericEdit(note, params, controller.ModelController.editNoteASYNC, process_pending=True) | |
882 | self.assertEquals(note.getName(), 'new_name', "Name not updated") | |
883 | self.assertEquals(note.text, 'new_desc', "Description not updated") | |
884 | ||
885 | def testEditCredSyncGetsMapperDispatchedSYNC(self): | |
886 | cred = ModelObjectCred("coquito") | |
887 | ||
888 | params = ('new_user', 'new_pass') | |
889 | self.genericEdit(cred, params, controller.ModelController.editCredSYNC) | |
890 | self.assertEquals(cred.getUsername(), 'new_user', "Username not updated") | |
891 | self.assertEquals(cred.getPassword(), 'new_pass', "Password not updated") | |
892 | ||
893 | def testEditCredSyncGetsMapperDispatchedASYNC(self): | |
894 | cred = ModelObjectCred("coquito") | |
895 | ||
896 | params = ('new_user', 'new_pass') | |
897 | self.genericEdit(cred, params, controller.ModelController.editCredASYNC, process_pending=True) | |
898 | self.assertEquals(cred.getUsername(), 'new_user', "Username not updated") | |
899 | self.assertEquals(cred.getPassword(), 'new_pass', "Password not updated") | |
900 | ||
901 | def testGetAllHosts(self): | |
902 | hosts = [ Host("coquito%i" % i ) for i in range(10)] | |
903 | ||
904 | mappersManager = self.createMapperMock() | |
905 | objectMapper = mock() | |
906 | when(mappersManager).getMapper(Host.__name__).thenReturn(objectMapper) | |
907 | when(objectMapper).getAll().thenReturn(hosts) | |
908 | ||
909 | model_controller = controller.ModelController(mock(), mappersManager) | |
910 | hosts_obt = model_controller.getAllHosts() | |
911 | verify(objectMapper).getAll() | |
912 | verify(mappersManager).getMapper(Host.__name__) | |
913 | ||
914 | self.assertListEqual(hosts, hosts_obt) | |
915 | ||
916 | def testGetHost(self): | |
917 | host = Host("coquito") | |
918 | ||
919 | mappersManager = self.createMapperMock() | |
920 | objectMapper = mock() | |
921 | when(mappersManager).getMapper(host.__class__.__name__).thenReturn(objectMapper) | |
922 | when(objectMapper).find(host.getName()).thenReturn(host) | |
923 | ||
924 | model_controller = controller.ModelController(mock(), mappersManager) | |
925 | ||
926 | host_obt = model_controller.getHost('coquito') | |
927 | ||
928 | verify(objectMapper).find(host.getName()) | |
929 | verify(mappersManager).getMapper(host.__class__.__name__) | |
930 | ||
931 | self.assertEqual(host, host_obt) | |
932 | ||
933 | def genericEdit(self, obj, params, callback, process_pending=False): | |
934 | mappersManager = self.createMapperMock() | |
935 | dataMapper = mock() | |
936 | objId = obj.getID() | |
937 | when(mappersManager).getMapper(obj.class_signature).thenReturn(dataMapper) | |
938 | when(dataMapper).save(obj).thenReturn(True) | |
939 | when(mappersManager).find(objId).thenReturn(obj) | |
940 | when(mappersManager).save(obj).thenReturn(True) | |
941 | model_controller = controller.ModelController(mock(), mappersManager) | |
942 | callback(model_controller, obj, *params) | |
943 | if process_pending: | |
944 | model_controller.processAllPendingActions() | |
945 | ||
946 | ||
947 | if __name__ == '__main__': | |
948 | unittest.main() | |
949 |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | import unittest | |
8 | import sys | |
9 | import os | |
10 | sys.path.append(os.path.abspath(os.getcwd())) | |
11 | ||
12 | from model.common import ModelObjectVuln | |
13 | ||
14 | ||
15 | class VulnerabilityCreationTests(unittest.TestCase): | |
16 | ||
17 | def testStandarizeNumericVulnSeverity(self): | |
18 | """ Verifies numeric severity transformed into 'info, low, high, | |
19 | critical' severity""" | |
20 | ||
21 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
22 | severity=0) | |
23 | ||
24 | self.assertEquals(vuln.severity, 'info', | |
25 | 'Vulnerability severity not transformed correctly') | |
26 | ||
27 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
28 | severity=1) | |
29 | ||
30 | self.assertEquals(vuln.severity, 'low', | |
31 | 'Vulnerability severity not transformed correctly') | |
32 | ||
33 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
34 | severity=2) | |
35 | ||
36 | self.assertEquals(vuln.severity, 'med', | |
37 | 'Vulnerability severity not transformed correctly') | |
38 | ||
39 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
40 | severity=3) | |
41 | ||
42 | self.assertEquals(vuln.severity, 'high', | |
43 | 'Vulnerability severity not transformed correctly') | |
44 | ||
45 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
46 | severity=4) | |
47 | ||
48 | self.assertEquals(vuln.severity, 'critical', | |
49 | 'Vulnerability severity not transformed correctly') | |
50 | ||
51 | ||
52 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
53 | severity=5) | |
54 | ||
55 | self.assertEquals(vuln.severity, 'unclassified', | |
56 | 'Vulnerability severity not transformed correctly') | |
57 | ||
58 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
59 | severity=-1) | |
60 | ||
61 | self.assertEquals(vuln.severity, 'unclassified', | |
62 | 'Vulnerability severity not transformed correctly') | |
63 | ||
64 | def testStandarizeShortnameVulnSeverity(self): | |
65 | """ Verifies longname severity transformed into 'info, low, high, | |
66 | critical' severity (informational -> info)""" | |
67 | ||
68 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
69 | severity='informational') | |
70 | ||
71 | self.assertEquals(vuln.severity, 'info', | |
72 | 'Vulnerability severity not transformed correctly') | |
73 | ||
74 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
75 | severity='medium') | |
76 | ||
77 | self.assertEquals(vuln.severity, 'med', | |
78 | 'Vulnerability severity not transformed correctly') | |
79 | ||
80 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
81 | severity='highest') | |
82 | ||
83 | self.assertEquals(vuln.severity, 'high', | |
84 | 'Vulnerability severity not transformed correctly') | |
85 | ||
86 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
87 | severity='criticalosiuos') | |
88 | ||
89 | self.assertEquals(vuln.severity, 'critical', | |
90 | 'Vulnerability severity not transformed correctly') | |
91 | ||
92 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
93 | severity='tuvieja') | |
94 | ||
95 | self.assertEquals(vuln.severity, 'unclassified', | |
96 | 'Vulnerability severity not transformed correctly') | |
97 | ||
98 | def testStandarizeUpdatedSeverity(self): | |
99 | vuln = ModelObjectVuln(name='VulnTest', desc='TestDescription', | |
100 | severity='informational') | |
101 | ||
102 | self.assertEquals(vuln.severity, 'info', | |
103 | 'Vulnerability severity not transformed correctly') | |
104 | ||
105 | vuln.updateAttributes(severity='3') | |
106 | self.assertEquals(vuln.severity, 'high', | |
107 | 'Vulnerability severity not transformed correctly') | |
108 | ||
109 | ||
110 | class VulnerabiltyEdtionTests(unittest.TestCase): | |
111 | def testChangeVulnDescription(self): | |
112 | """ | |
113 | Until we have a single attribute to store the vuln's descrption | |
114 | we need to make sure we're always accessing the valid one (_desc) | |
115 | """ | |
116 | vuln = ModelObjectVuln( | |
117 | name='VulnTest', desc='TestDescription', severity='info') | |
118 | ||
119 | self.assertEquals(vuln._desc, 'TestDescription', | |
120 | 'Vulnerability desc should be the given during creation') | |
121 | ||
122 | vuln.setDescription("new description") | |
123 | ||
124 | self.assertEquals(vuln.getDescription(), 'new description', | |
125 | 'Vulnerability desc wasn\'t updated correctly') | |
126 | ||
127 | self.assertEquals(vuln._desc, 'new description', | |
128 | 'Vulnerability desc wasn\'t updated correctly') | |
129 | ||
130 | def testChangeVulnDescriptionUsingUpdateAttributesMethod(self): | |
131 | """ | |
132 | Until we have a single attribute to store the vuln's descrption | |
133 | we need to make sure we're always accessing the valid one (_desc) | |
134 | """ | |
135 | vuln = ModelObjectVuln( | |
136 | name='VulnTest', desc='TestDescription', severity='info') | |
137 | ||
138 | self.assertEquals(vuln._desc, 'TestDescription', | |
139 | 'Vulnerability desc should be the given during creation') | |
140 | ||
141 | vuln.updateAttributes(desc="new description") | |
142 | ||
143 | self.assertEquals(vuln.getDescription(), 'new description', | |
144 | 'Vulnerability desc wasn\'t updated correctly') | |
145 | ||
146 | self.assertEquals(vuln._desc, 'new description', | |
147 | 'Vulnerability desc wasn\'t updated correctly') | |
148 | ||
149 | ||
150 | if __name__ == '__main__': | |
151 | unittest.main() | |
152 |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | import unittest | |
7 | import sys | |
8 | import os | |
9 | sys.path.append('.') | |
10 | import model.controller as controller | |
11 | from model.workspace import Workspace | |
12 | from model.container import ModelObjectContainer | |
13 | import model.api as api | |
14 | #from model import controller | |
15 | #from model import api | |
16 | from plugins.repo.netsparker import plugin | |
17 | from plugins.core import PluginControllerForApi | |
18 | from mockito import mock, when | |
19 | from managers.all import CommandManager | |
20 | ||
21 | ||
22 | class NetsparkerPluginTest(unittest.TestCase): | |
23 | ||
24 | def setUp(self): | |
25 | """ | |
26 | Generic test to verify that the object exists and can be | |
27 | instantiated without problems. | |
28 | """ | |
29 | self.model_controller = controller.ModelController(mock()) | |
30 | self.workspace = mock(Workspace) | |
31 | when(self.workspace).getContainee().thenReturn(ModelObjectContainer()) | |
32 | self.cm = mock(CommandManager) | |
33 | when(self.cm).saveCommand().thenReturn(True) | |
34 | self.model_controller.setWorkspace(self.workspace) | |
35 | self._plugin_controller = PluginControllerForApi("test", {"netsparker": plugin.NetsparkerPlugin()}, self.cm) | |
36 | api.setUpAPIs(self.model_controller) | |
37 | ||
38 | def test_report(self): | |
39 | output_file = open(os.path.join(os.getcwd(), 'test_cases/data/netsparker_plugin_with_api.xml')) | |
40 | output = output_file.read() | |
41 | self._plugin_controller.processCommandInput("./netsparker report") | |
42 | self._plugin_controller.onCommandFinished("./netsparker report", output) | |
43 | self.model_controller.processAllPendingActions() | |
44 | self.assertEquals(len(self.model_controller.getAllHosts()), 1, | |
45 | "Not all hosts added to model") | |
46 | ||
47 | ||
48 | if __name__ == '__main__': | |
49 | unittest.main() |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | ||
10 | import unittest | |
11 | import sys | |
12 | import os | |
13 | import shutil | |
14 | import json | |
15 | sys.path.append(os.path.abspath(os.getcwd())) | |
16 | from couchdbkit import Server, ResourceNotFound | |
17 | import time | |
18 | ||
19 | from persistence.persistence_managers import CouchDbConnector, FileSystemConnector | |
20 | import random | |
21 | ||
22 | from config.configuration import getInstanceConfiguration | |
23 | CONF = getInstanceConfiguration() | |
24 | ||
25 | ||
26 | def new_random_workspace_name(): | |
27 | return ("aworkspace" + "".join(random.sample([chr(i) for i in range(65, 90)], 10))).lower() | |
28 | ||
29 | ||
30 | class DbConnectorCouchTestSuite(unittest.TestCase): | |
31 | def setUp(self): | |
32 | self.couch_srv = Server(uri=CONF.getCouchURI()) | |
33 | self.db_name = new_random_workspace_name() | |
34 | self.db = self.couch_srv.create_db(self.db_name) | |
35 | ||
36 | def tearDown(self): | |
37 | self.couch_srv.delete_db(self.db_name) | |
38 | time.sleep(3) | |
39 | ||
40 | def test_save_Document(self): | |
41 | couchConnector = CouchDbConnector(self.db) | |
42 | doc = { | |
43 | '_id': '123', | |
44 | 'data': 'some data' | |
45 | } | |
46 | couchConnector.saveDocument(doc) | |
47 | ||
48 | doc_from_db = self.db.get('123') | |
49 | ||
50 | self.assertNotEquals( | |
51 | doc_from_db, | |
52 | None, | |
53 | "Document should be retrieved") | |
54 | ||
55 | self.assertEquals( | |
56 | doc_from_db.get('data'), | |
57 | 'some data', | |
58 | "Data retrieved should be the same as data saved") | |
59 | ||
60 | def test_get_Document(self): | |
61 | couchConnector = CouchDbConnector(self.db) | |
62 | doc = { | |
63 | '_id': '123', | |
64 | 'data': 'some data' | |
65 | } | |
66 | couchConnector.saveDocument(doc) | |
67 | ||
68 | doc_retrieved = couchConnector.getDocument('123') | |
69 | ||
70 | self.assertNotEquals( | |
71 | doc_retrieved, | |
72 | None, | |
73 | "Document should be retrieved") | |
74 | ||
75 | self.assertEquals( | |
76 | doc_retrieved.get('data'), | |
77 | 'some data', | |
78 | "Data retrieved should be the same as data saved") | |
79 | ||
80 | def test_remove_Document(self): | |
81 | couchConnector = CouchDbConnector(self.db) | |
82 | doc = { | |
83 | '_id': '123', | |
84 | 'data': 'some data' | |
85 | } | |
86 | couchConnector.saveDocument(doc) | |
87 | ||
88 | couchConnector.remove('123') | |
89 | ||
90 | try: | |
91 | doc_from_db = self.db.get('123') | |
92 | except ResourceNotFound: | |
93 | doc_from_db = None | |
94 | ||
95 | self.assertEquals( | |
96 | doc_from_db, | |
97 | None, | |
98 | "Document should be None") | |
99 | ||
100 | def test_get_by_parent_and_type(self): | |
101 | couchConnector = CouchDbConnector(self.db) | |
102 | doc = { | |
103 | '_id': '123', | |
104 | 'type': 'father', | |
105 | 'parent': None, | |
106 | } | |
107 | couchConnector.saveDocument(doc) | |
108 | ||
109 | doc = { | |
110 | '_id': '456', | |
111 | 'type': 'child', | |
112 | 'parent': '123', | |
113 | } | |
114 | couchConnector.saveDocument(doc) | |
115 | ||
116 | doc = { | |
117 | '_id': '789', | |
118 | 'type': 'child', | |
119 | 'parent': '123', | |
120 | } | |
121 | couchConnector.saveDocument(doc) | |
122 | ||
123 | ids = couchConnector.getDocsByFilter(parentId='123', type='child') | |
124 | ||
125 | self.assertEquals( | |
126 | len(ids), | |
127 | 2, | |
128 | "There should be two 'childs' with parent '123'") | |
129 | ||
130 | self.assertIn( | |
131 | '456', | |
132 | ids, | |
133 | "Child '456' should be in the list of childs") | |
134 | ||
135 | self.assertIn( | |
136 | '789', | |
137 | ids, | |
138 | "Child '789' should be in the list of childs") | |
139 | ||
140 | ids = couchConnector.getDocsByFilter(parentId='123', type='son') | |
141 | ||
142 | self.assertEquals( | |
143 | len(ids), | |
144 | 0, | |
145 | "There shouldn't be any 'son' with parent '123'") | |
146 | ||
147 | ids = couchConnector.getDocsByFilter(parentId='456', type='child') | |
148 | ||
149 | self.assertEquals( | |
150 | len(ids), | |
151 | 0, | |
152 | "There shouldn't be any 'child' with parent '456'") | |
153 | ||
154 | ||
155 | class DbConnectorFileSystemTestSuite(unittest.TestCase): | |
156 | def setUp(self): | |
157 | self.path = CONF.getPersistencePath() | |
158 | self.db_path = os.path.join(self.path, new_random_workspace_name()) | |
159 | os.mkdir(self.db_path) | |
160 | ||
161 | def tearDown(self): | |
162 | shutil.rmtree(self.db_path) | |
163 | ||
164 | def test_save_Document(self): | |
165 | fsConnector = FileSystemConnector(self.db_path) | |
166 | doc = { | |
167 | '_id': '123', | |
168 | 'data': 'some data' | |
169 | } | |
170 | fsConnector.saveDocument(doc) | |
171 | ||
172 | doc_from_db = open(os.path.join(self.db_path, '%s.json' % '123'), 'r') | |
173 | doc_from_db = json.loads(doc_from_db.read()) | |
174 | ||
175 | self.assertNotEquals( | |
176 | doc_from_db, | |
177 | None, | |
178 | "Document should be retrieved") | |
179 | ||
180 | self.assertEquals( | |
181 | doc_from_db.get('data'), | |
182 | 'some data', | |
183 | "Data retrieved should be the same as data saved") | |
184 | ||
185 | def test_get_Document(self): | |
186 | fsConnector = FileSystemConnector(self.db_path) | |
187 | doc = { | |
188 | '_id': '123', | |
189 | 'data': 'some data' | |
190 | } | |
191 | fsConnector.saveDocument(doc) | |
192 | ||
193 | doc_retrieved = fsConnector.getDocument('123') | |
194 | ||
195 | self.assertNotEquals( | |
196 | doc_retrieved, | |
197 | None, | |
198 | "Document should be retrieved") | |
199 | ||
200 | self.assertEquals( | |
201 | doc_retrieved.get('data'), | |
202 | 'some data', | |
203 | "Data retrieved should be the same as data saved") | |
204 | ||
205 | def test_remove_Document(self): | |
206 | fsConnector = FileSystemConnector(self.db_path) | |
207 | doc = { | |
208 | '_id': '123', | |
209 | 'data': 'some data' | |
210 | } | |
211 | fsConnector.saveDocument(doc) | |
212 | ||
213 | fsConnector.remove('123') | |
214 | ||
215 | try: | |
216 | doc_from_db = open(os.path.join(self.db_path, '%s.json' % '123'), 'r') | |
217 | doc_from_db = json.loads(doc_from_db.read()) | |
218 | except IOError: | |
219 | doc_from_db = None | |
220 | ||
221 | self.assertEquals( | |
222 | doc_from_db, | |
223 | None, | |
224 | "Document should be None") | |
225 | ||
226 | def test_get_by_parent_and_type(self): | |
227 | fsConnector = FileSystemConnector(self.db_path) | |
228 | doc = { | |
229 | '_id': '123', | |
230 | 'type': 'father', | |
231 | 'parent': None, | |
232 | } | |
233 | fsConnector.saveDocument(doc) | |
234 | ||
235 | doc = { | |
236 | '_id': '456', | |
237 | 'type': 'child', | |
238 | 'parent': '123', | |
239 | } | |
240 | fsConnector.saveDocument(doc) | |
241 | ||
242 | doc = { | |
243 | '_id': '789', | |
244 | 'type': 'child', | |
245 | 'parent': '123', | |
246 | } | |
247 | fsConnector.saveDocument(doc) | |
248 | ||
249 | ids = fsConnector.getDocsByFilter(parentId='123', type='child') | |
250 | ||
251 | self.assertEquals( | |
252 | len(ids), | |
253 | 2, | |
254 | "There should be two 'childs' with parent '123'") | |
255 | ||
256 | self.assertIn( | |
257 | '456', | |
258 | ids, | |
259 | "Child '456' should be in the list of childs") | |
260 | ||
261 | self.assertIn( | |
262 | '789', | |
263 | ids, | |
264 | "Child '789' should be in the list of childs") | |
265 | ||
266 | ids = fsConnector.getDocsByFilter(parentId='123', type='son') | |
267 | ||
268 | self.assertEquals( | |
269 | len(ids), | |
270 | 0, | |
271 | "There shouldn't be any 'son' with parent '123'") | |
272 | ||
273 | ids = fsConnector.getDocsByFilter(parentId='456', type='child') | |
274 | ||
275 | self.assertEquals( | |
276 | len(ids), | |
277 | 0, | |
278 | "There shouldn't be any 'child' with parent '456'") | |
279 | ||
280 | if __name__ == '__main__': | |
281 | unittest.main() |
0 | #!/usr/bin/env python | |
1 | # -*- coding: utf-8 -*- | |
2 | ||
3 | ''' | |
4 | Faraday Penetration Test IDE | |
5 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
6 | See the file 'doc/LICENSE' for the license information | |
7 | ||
8 | ''' | |
9 | ||
10 | import unittest | |
11 | import sys | |
12 | import os | |
13 | sys.path.append(os.path.abspath(os.getcwd())) | |
14 | import random | |
15 | from couchdbkit import Server | |
16 | ||
17 | from persistence.persistence_managers import CouchDbManager, FileSystemManager | |
18 | ||
19 | from config.configuration import getInstanceConfiguration | |
20 | CONF = getInstanceConfiguration() | |
21 | ||
22 | ||
23 | def new_random_workspace_name(): | |
24 | return ("aworkspace" + "".join(random.sample([chr(i) for i in range(65, 90)], 10))).lower() | |
25 | ||
26 | ||
27 | class CouchDbManagerTestSuite(unittest.TestCase): | |
28 | def setUp(self): | |
29 | self.dbname = new_random_workspace_name() | |
30 | ||
31 | def tearDown(self): | |
32 | server = Server(uri=CONF.getCouchURI()) | |
33 | if self.dbname in server.all_dbs(): | |
34 | server.delete_db(self.dbname) | |
35 | ||
36 | def test_create_and_get_db(self): | |
37 | couch_manager = CouchDbManager(uri=CONF.getCouchURI()) | |
38 | couch_manager.createDb(self.dbname) | |
39 | ||
40 | self.assertNotEquals( | |
41 | couch_manager.getDb(self.dbname), | |
42 | None, | |
43 | "Db %s shouldn't be None" % self.dbname) | |
44 | ||
45 | server = Server(uri=CONF.getCouchURI()) | |
46 | self.assertIn( | |
47 | self.dbname, | |
48 | server.all_dbs(), | |
49 | "Db %s should be in the db list" % self.dbname) | |
50 | ||
51 | def test_delete_db(self): | |
52 | couch_manager = CouchDbManager(uri=CONF.getCouchURI()) | |
53 | couch_manager.createDb(self.dbname) | |
54 | ||
55 | self.assertNotEquals( | |
56 | couch_manager.getDb(self.dbname), | |
57 | None, | |
58 | "Db %s shouldn't be None" % self.dbname) | |
59 | ||
60 | couch_manager.deleteDb(self.dbname) | |
61 | self.assertEquals( | |
62 | couch_manager.getDb(self.dbname), | |
63 | None, | |
64 | "Db %s should be None" % self.dbname) | |
65 | ||
66 | server = Server(uri=CONF.getCouchURI()) | |
67 | self.assertNotIn( | |
68 | self.dbname, | |
69 | server.all_dbs(), | |
70 | "Db %s shouldn't be in the db list" % self.dbname) | |
71 | ||
72 | ||
73 | if __name__ == '__main__': | |
74 | unittest.main() |
0 | #!/usr/bin/python | |
1 | ||
2 | ''' | |
3 | Faraday Penetration Test IDE | |
4 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | ||
7 | ''' | |
8 | ||
9 | from unittest import TestCase | |
10 | import unittest | |
11 | import sys | |
12 | sys.path.append('.') | |
13 | import model.controller as controller | |
14 | from mockito import mock, when | |
15 | from model import api | |
16 | from plugins.core import PluginBase, PluginController | |
17 | from model.workspace import Workspace | |
18 | from model.container import ModelObjectContainer | |
19 | from managers.all import CommandManager | |
20 | from time import time | |
21 | from model.commands_history import CommandRunInformation | |
22 | ||
23 | ||
24 | class TestPluginCreateModelObject(TestCase): | |
25 | """docstring for TestModelObjectCRUD""" | |
26 | def setUp(self): | |
27 | self._model_controller = controller.ModelController(mock()) | |
28 | self.cm = mock(CommandManager) | |
29 | when(self.cm).saveCommand().thenReturn(True) | |
30 | self._plugin_controller = PluginController("test", {}, self.cm) | |
31 | ||
32 | class PluginTest(PluginBase): | |
33 | def __init__(self): | |
34 | PluginBase.__init__(self) | |
35 | self.id = "Test" | |
36 | self.name = "Test" | |
37 | ||
38 | def parseOutputString(self, output, debug=False): | |
39 | pass | |
40 | ||
41 | self.workspace = mock(Workspace) | |
42 | when(self.workspace).getContainee().thenReturn(ModelObjectContainer()) | |
43 | self._model_controller.setWorkspace(self.workspace) | |
44 | ||
45 | self.plugin = PluginTest() | |
46 | api.setUpAPIs(self._model_controller) | |
47 | ||
48 | self._plugin_controller.setActivePlugin(self.plugin) | |
49 | self.cmdinfo = CommandRunInformation( | |
50 | **{'workspace': 'test', | |
51 | 'itime': time(), | |
52 | 'command': 'test', | |
53 | 'params': 'test'}) | |
54 | ||
55 | def test_create_host(self): | |
56 | """ | |
57 | Testing the creation of one host | |
58 | """ | |
59 | h = self.plugin.createAndAddHost("pepito", "linux") | |
60 | self._plugin_controller.last_command_information = self.cmdinfo | |
61 | self._plugin_controller.onCommandFinished() | |
62 | self._model_controller.processAllPendingActions() | |
63 | ||
64 | self.assertTrue(h is not None, "host should have an ID") | |
65 | self.assertTrue(len(self._model_controller.getAllHosts()) == 1, "The controller should have one host") | |
66 | self.assertTrue(self._model_controller.getHost(h) is not None, "The host should be in the controller") | |
67 | ||
68 | def test_create_same_host_two_times(self): | |
69 | """ | |
70 | Testing the creation of the same host, two times. | |
71 | This simulates two plugins creating the host with the same name | |
72 | We should end up with just one host in the controller | |
73 | """ | |
74 | h1 = self.plugin.createAndAddHost("pepito", "linux") | |
75 | h2 = self.plugin.createAndAddHost("pepito", "linux") | |
76 | self._plugin_controller.last_command_information = self.cmdinfo | |
77 | self._plugin_controller.onCommandFinished() | |
78 | self._model_controller.processAllPendingActions() | |
79 | ||
80 | self.assertTrue(len(self._model_controller.getAllHosts()) == 1, "The controller should have just one host") | |
81 | self.assertTrue(self._model_controller.getHost(h1) == self._model_controller.getHost(h2), "The host should be the same") | |
82 | ||
83 | def test_create_host_with_interface(self): | |
84 | """ | |
85 | Testing the creation of one host, with one interface | |
86 | """ | |
87 | h = self.plugin.createAndAddHost("pepito", "linux") | |
88 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
89 | self._plugin_controller.last_command_information = self.cmdinfo | |
90 | self._plugin_controller.onCommandFinished() | |
91 | self._model_controller.processAllPendingActions() | |
92 | ||
93 | self.assertTrue(i is not None, "interface should have an ID") | |
94 | host = self._model_controller.getHost(h) | |
95 | self.assertTrue(len(host.getAllInterfaces()) == 1, "Host should have one interface") | |
96 | self.assertTrue(host.getInterface(i) is not None, "The interface should be the one we've just create") | |
97 | ||
98 | def test_create_interface_two_times(self): | |
99 | """ | |
100 | Testing the creation of the same interface, two times. | |
101 | This simulates two plugins creating the host with the same interface | |
102 | We should end up with just one interface in that host | |
103 | """ | |
104 | h1 = self.plugin.createAndAddHost("pepito", "linux") | |
105 | i1 = self.plugin.createAndAddInterface(h1, "1.2.3.4") | |
106 | ||
107 | h2 = self.plugin.createAndAddHost("pepito", "linux") | |
108 | i2 = self.plugin.createAndAddInterface(h2, "1.2.3.4") | |
109 | ||
110 | self._plugin_controller.last_command_information = self.cmdinfo | |
111 | self._plugin_controller.onCommandFinished() | |
112 | self._model_controller.processAllPendingActions() | |
113 | ||
114 | self.assertTrue(len(self._model_controller.getAllHosts()) == 1, "The controller should have just one host") | |
115 | self.assertTrue(len(self._model_controller.getHost(h1).getAllInterfaces()) == 1, "The host should have just one interface") | |
116 | ||
117 | def test_create_host_with_interface_with_service(self): | |
118 | """ | |
119 | Testing the creation of one host, with one interface and one service on that interface | |
120 | """ | |
121 | h = self.plugin.createAndAddHost("pepito", "linux") | |
122 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
123 | s = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
124 | self._plugin_controller.last_command_information = self.cmdinfo | |
125 | self._plugin_controller.onCommandFinished() | |
126 | self._model_controller.processAllPendingActions() | |
127 | ||
128 | host = self._model_controller.getHost(h) | |
129 | interface = host.getInterface(i) | |
130 | self.assertTrue(len(interface.getAllServices()) == 1, "The interface should have just one service") | |
131 | self.assertTrue(interface.getService(s) is not None, "The service should be the one we've just create") | |
132 | ||
133 | def test_create_two_services_different_names_equal_port(self): | |
134 | """ | |
135 | Testing the creation of two services with different names but same protocol and port | |
136 | The result should only one services being created, since both have the same id | |
137 | """ | |
138 | h = self.plugin.createAndAddHost("pepito", "linux") | |
139 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
140 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
141 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "test", protocol="tcp", ports=['80']) | |
142 | self._plugin_controller.last_command_information = self.cmdinfo | |
143 | self._plugin_controller.onCommandFinished() | |
144 | self._model_controller.processAllPendingActions() | |
145 | ||
146 | host = self._model_controller.getHost(h) | |
147 | interface = host.getInterface(i) | |
148 | self.assertEqual(s1, s2, "Both services should have the same id") | |
149 | self.assertTrue(len(interface.getAllServices()) == 1, "The interface should have just one service") | |
150 | ||
151 | def test_create_two_services_same_names_different_port(self): | |
152 | """ | |
153 | Testing the creation of two services with same names but different port | |
154 | The result should only two services being created, since both have the different ids | |
155 | """ | |
156 | h = self.plugin.createAndAddHost("pepito", "linux") | |
157 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
158 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
159 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
160 | self._plugin_controller.last_command_information = self.cmdinfo | |
161 | self._plugin_controller.onCommandFinished() | |
162 | self._model_controller.processAllPendingActions() | |
163 | ||
164 | host = self._model_controller.getHost(h) | |
165 | interface = host.getInterface(i) | |
166 | self.assertNotEqual(s1, s2, "Both services should have the same id") | |
167 | self.assertTrue(len(interface.getAllServices()) == 2, "The interface should have two services") | |
168 | ||
169 | def test_create_vuln_to_service(self): | |
170 | """ | |
171 | Testing the creation of a vuln to a service | |
172 | """ | |
173 | h = self.plugin.createAndAddHost("pepito", "linux") | |
174 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
175 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
176 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
177 | v = self.plugin.createAndAddVulnToService(h, s1, "vuln1", "descripcion") | |
178 | self._plugin_controller.last_command_information = self.cmdinfo | |
179 | self._plugin_controller.onCommandFinished() | |
180 | self._model_controller.processAllPendingActions() | |
181 | ||
182 | host = self._model_controller.getHost(h) | |
183 | interface = host.getInterface(i) | |
184 | service1 = interface.getService(s1) | |
185 | service2 = interface.getService(s2) | |
186 | self.assertTrue(len(service1.getVulns()) == 1, "The service should have one vuln") | |
187 | self.assertTrue(service1.getVuln(v) is not None, "The vuln should be the one we've just create") | |
188 | self.assertTrue(len(service2.getVulns()) == 0, "The service should't have any vuln") | |
189 | ||
190 | def test_create_note_to_service(self): | |
191 | """ | |
192 | Testing the creation of a vuln to a service | |
193 | """ | |
194 | h = self.plugin.createAndAddHost("pepito", "linux") | |
195 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
196 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
197 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
198 | n = self.plugin.createAndAddNoteToService(h, s1, "note1", "desc1") | |
199 | self._plugin_controller.last_command_information = self.cmdinfo | |
200 | self._plugin_controller.onCommandFinished() | |
201 | self._model_controller.processAllPendingActions() | |
202 | ||
203 | host = self._model_controller.getHost(h) | |
204 | interface = host.getInterface(i) | |
205 | service1 = interface.getService(s1) | |
206 | service2 = interface.getService(s2) | |
207 | self.assertTrue(len(service1.getNotes()) == 1, "The service should have one vuln") | |
208 | self.assertTrue(service1.getNote(n) is not None, "The vuln should be the one we've just create") | |
209 | self.assertTrue(len(service2.getNotes()) == 0, "The service should't have any vuln") | |
210 | ||
211 | def test_create_note_to_note_service(self): | |
212 | """ | |
213 | Testing the creation of a vuln to a service | |
214 | """ | |
215 | h = self.plugin.createAndAddHost("pepito", "linux") | |
216 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
217 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
218 | s2 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['443']) | |
219 | n = self.plugin.createAndAddNoteToService(h, s1, "note1", "desc1") | |
220 | n2 = self.plugin.createAndAddNoteToNote(h, s1, n, "note2", "desc2") | |
221 | self._plugin_controller.last_command_information = self.cmdinfo | |
222 | self._plugin_controller.onCommandFinished() | |
223 | self._model_controller.processAllPendingActions() | |
224 | ||
225 | host = self._model_controller.getHost(h) | |
226 | interface = host.getInterface(i) | |
227 | service1 = interface.getService(s1) | |
228 | service2 = interface.getService(s2) | |
229 | note1 = service1.getNote(n) | |
230 | self.assertTrue(service1.getNote(n) is not None, "The note should be the one we've just create") | |
231 | self.assertTrue(len(note1.getNotes()) == 1, "The note should have a nested note") | |
232 | ||
233 | def test_create_cred_to_service(self): | |
234 | """ | |
235 | Testing the creation of a vuln to a service | |
236 | """ | |
237 | h = self.plugin.createAndAddHost("pepito", "linux") | |
238 | i = self.plugin.createAndAddInterface(h, "1.2.3.4") | |
239 | s1 = self.plugin.createAndAddServiceToInterface(h, i, "unknown", protocol="tcp", ports=['80']) | |
240 | c = self.plugin.createAndAddCredToService(h, s1, "user", "pass") | |
241 | self._plugin_controller.last_command_information = self.cmdinfo | |
242 | self._plugin_controller.onCommandFinished() | |
243 | self._model_controller.processAllPendingActions() | |
244 | ||
245 | host = self._model_controller.getHost(h) | |
246 | interface = host.getInterface(i) | |
247 | service1 = interface.getService(s1) | |
248 | cred = service1.getCred(c) | |
249 | self.assertTrue(service1.getCred(c) is not None, "The cred should be the one we've just create") | |
250 | self.assertTrue(len(service1.getCreds()) == 1, "The service should have a nested note") | |
251 | ||
252 | if __name__ == '__main__': | |
253 | unittest.main() |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | import unittest | |
7 | import sys | |
8 | import os | |
9 | sys.path.append('.') | |
10 | import model.controller as controller | |
11 | from model.workspace import Workspace | |
12 | from model.container import ModelObjectContainer | |
13 | import model.api as api | |
14 | #from model import controller | |
15 | #from model import api | |
16 | from managers.model_managers import WorkspaceManager | |
17 | from plugins.repo.nmap import plugin as nmap_plugin | |
18 | from plugins.repo.nessus import plugin as nessus_plugin | |
19 | from plugins.core import PluginControllerForApi | |
20 | from mockito import mock, when, any | |
21 | ||
22 | from persistence.persistence_managers import DBTYPE | |
23 | ||
24 | from managers.mapper_manager import MapperManager | |
25 | from managers.reports_managers import ReportManager | |
26 | from persistence.persistence_managers import DbManager | |
27 | ||
28 | class PluginsToModelControllerIntegration(unittest.TestCase): | |
29 | ||
30 | def setUp(self): | |
31 | """ | |
32 | Generic test to verify that the object exists and can be | |
33 | instantiated without problems. | |
34 | """ | |
35 | self.dbManager = mock() | |
36 | self.changesController = mock() | |
37 | self.reportManager = mock() | |
38 | ||
39 | self.dbManager = DbManager() | |
40 | self.mappersManager = MapperManager() | |
41 | ||
42 | self.model_controller = controller.ModelController(mock(), self.mappersManager) | |
43 | self.workspace_manager = WorkspaceManager(self.dbManager, | |
44 | self.mappersManager, | |
45 | self.changesController, | |
46 | self.reportManager) | |
47 | self.workspace_manager.createWorkspace('temp_workspace', 'desc', DBTYPE.FS) | |
48 | self.workspace_manager.openWorkspace('temp_workspace') | |
49 | ||
50 | self._plugin_controller = PluginControllerForApi("test", {"nmap": nmap_plugin.NmapPlugin(), | |
51 | "nessus": nessus_plugin.NessusPlugin()}, mock()) | |
52 | ||
53 | api.setUpAPIs(self.model_controller, self.workspace_manager) | |
54 | ||
55 | def tearDown(self): | |
56 | self.workspace_manager.removeWorkspace('temp_workspace') | |
57 | ||
58 | def test_nmap_scan_saves_host(self): | |
59 | output_file = open(os.path.join(os.getcwd(), 'test_cases/data/nmap_plugin_with_api.xml')) | |
60 | output = output_file.read() | |
61 | self._plugin_controller.processCommandInput("nmap localhost") | |
62 | self._plugin_controller.onCommandFinished("nmap localhost", output) | |
63 | self.model_controller.processAllPendingActions() | |
64 | self.assertEquals(len(self.model_controller.getAllHosts()), 1, | |
65 | "Not all hosts added to model") | |
66 | ||
67 | host = self.model_controller.getAllHosts()[0] | |
68 | self.assertEquals(len(host.getAllInterfaces()), 1, | |
69 | "Not all interfaces added to model") | |
70 | ||
71 | interface = host.getAllInterfaces()[0] | |
72 | self.assertEquals(len(interface.getAllServices()), 3, | |
73 | "Not all services added to model") | |
74 | ||
75 | services = interface.getAllServices() | |
76 | self.assertTrue(all( [ s.getStatus() == 'open' for s in services]), | |
77 | "Port status not saved correctly") | |
78 | ||
79 | ||
80 | def test_nessus_scan_saves_host(self): | |
81 | output_file = open(os.path.join(os.getcwd(), 'test_cases/data/nessus_plugin_with_api.nessus')) | |
82 | output = output_file.read() | |
83 | self._plugin_controller.processCommandInput("./nessus report") | |
84 | self._plugin_controller.onCommandFinished("./nessus report", output) | |
85 | self.model_controller.processAllPendingActions() | |
86 | self.assertEquals(len(self.model_controller.getAllHosts()), 7, | |
87 | "Not all hosts added to model") | |
88 | ||
89 | if __name__ == '__main__': | |
90 | unittest.main() |
0 | #!/usr/bin/python | |
1 | """ | |
2 | Faraday Penetration Test IDE. | |
3 | ||
4 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
5 | See the file 'doc/LICENSE' for the license information | |
6 | """ | |
7 | ||
8 | import mock | |
9 | import unittest | |
10 | import sys | |
11 | sys.path.append('.') | |
12 | ||
13 | from plugins.core import PluginControllerForApi | |
14 | from managers.reports_managers import ReportProcessor | |
15 | from managers.reports_managers import ReportParser | |
16 | ||
17 | class UnitTestReportParser(unittest.TestCase): | |
18 | ||
19 | def testSendReportWithPlugin(self): | |
20 | ||
21 | plugin_controller = mock.Mock(spec=PluginControllerForApi) | |
22 | plugin_controller.processCommandInput.return_value = (True, None, None) | |
23 | report_processor = ReportProcessor(plugin_controller) | |
24 | ||
25 | file_mock = mock.MagicMock(spec=file) | |
26 | file_mock.read.return_value = 'Stringreturned' | |
27 | ||
28 | with mock.patch('__builtin__.open', create=True) as mock_open: | |
29 | res = report_processor._sendReport("nmap", 'strings') | |
30 | self.assertTrue(res, "The plugin should be executed") | |
31 | ||
32 | def testSendReportWithoutPlugin(self): | |
33 | ||
34 | plugin_controller = mock.Mock(spec=PluginControllerForApi) | |
35 | plugin_controller.processCommandInput.return_value = (False, None, None) | |
36 | report_processor = ReportProcessor(plugin_controller) | |
37 | ||
38 | file_mock = mock.MagicMock(spec=file) | |
39 | file_mock.read.return_value = 'Stringreturned' | |
40 | ||
41 | with mock.patch('__builtin__.open', create=True) as mock_open: | |
42 | res = report_processor._sendReport("nmap", 'strings') | |
43 | self.assertFalse(res, "The plugin should not be executed") | |
44 | ||
45 | if __name__ == '__main__': | |
46 | unittest.main() |
2 | 2 | # nosetests2 --no-byte-compile --with-coverage --cover-html --cover-html-dir=cover --cover-package=auth --cover-package=bin --cover-package=config --cover-package=exporters --cover-package=external --cover-package=gui --cover-package=managers --cover-package=model --cover-package=persistence --cover-package=plugins --cover-package=shell --cover-package=utils test_cases/model_controller.py |
3 | 3 | # nosetests2 --no-byte-compile --with-coverage --cover-html --cover-html-dir=cover --cover-package=auth --cover-package=bin --cover-package=config --cover-package=exporters --cover-package=external --cover-package=gui --cover-package=managers --cover-package=model --cover-package=persistence --cover-package=plugins --cover-package=shell --cover-package=utils test_cases/model_controller.py |
4 | 4 | # nosetests2 --with-coverage --cover-html --cover-html-dir=cover --cover-package=model test_cases/*.py |
5 | nosetests2 --ignore-files='.*dont_run_rest_controller_apis.*' --no-byte-compile -v `find test_cases -name '*.py' | grep -v dont_run` | |
5 | nosetests --ignore-files='.*dont_run_rest_controller_apis.*' --no-byte-compile -v `find test_cases -name '*.py' | grep -v dont_run` | |
6 | 6 |
0 | import responses | |
1 | import requests | |
2 | import unittest | |
3 | from persistence.server import server | |
4 | from persistence.server import utils | |
5 | from mock import MagicMock, patch | |
6 | ||
7 | server.FARADAY_UP = False | |
8 | server.SERVER_URL = "http://s:p" | |
9 | example_url = "http://just_some_url" | |
10 | class ClientServerAPITests(unittest.TestCase): | |
11 | ||
12 | def setUp(self): | |
13 | self.ws_name = "a_ws" | |
14 | self.server_api_url = "http://s:p/_api" | |
15 | ||
16 | def test_get_base_server_url(self): | |
17 | s = server._get_base_server_url() | |
18 | self.assertEqual(server.SERVER_URL, s) | |
19 | ||
20 | def test_create_server_api_url(self): | |
21 | s = server._create_server_api_url() | |
22 | self.assertEqual("{0}/_api".format(server.SERVER_URL), s) | |
23 | ||
24 | def test_create_server_get_url(self): | |
25 | obj_name = "hosts" | |
26 | s = server._create_server_get_url(self.ws_name, obj_name) | |
27 | self.assertEqual("{0}/_api/ws/{1}/{2}".format(server.SERVER_URL, self.ws_name, obj_name), s) | |
28 | ||
29 | def test_create_serve_post_url(self): | |
30 | objid = "123456" | |
31 | server_post_url = server._create_server_post_url(self.ws_name, objid) | |
32 | self.assertEqual(self.server_api_url + '/ws/' + self.ws_name + '/doc/' + objid, server_post_url) | |
33 | ||
34 | def test_create_server_get_ws_names_url(self): | |
35 | s = server._create_server_get_url(self.ws_name) | |
36 | self.assertEqual("{0}/_api/ws/{1}".format(server.SERVER_URL, self.ws_name), s) | |
37 | ||
38 | @responses.activate | |
39 | def test_raise_conflict_in_database(self): | |
40 | url = "http://just_raise_conflict.com" | |
41 | responses.add(responses.PUT, url, body='{"name": "betcha"}', status=409, | |
42 | content_type="application/json", json={'error': 'conflict'}) | |
43 | with self.assertRaises(server.ConflictInDatabase): | |
44 | server._unsafe_io_with_server(requests.put, 200, url, json={"name": "betcha"}) | |
45 | ||
46 | @responses.activate | |
47 | def test_raise_resource_does_not_exist(self): | |
48 | url = "http://dont_exist.com" | |
49 | responses.add(responses.GET, url, body='{"name": "betcha"}', status=404) | |
50 | with self.assertRaises(server.ResourceDoesNotExist): | |
51 | server._unsafe_io_with_server(requests.get, 200, url, json={"name": "betcha"}) | |
52 | ||
53 | @responses.activate | |
54 | def test_raise_unauthorized(self): | |
55 | url = "http://nope.com" | |
56 | responses.add(responses.GET, url, body='{"name": "betcha"}', status=403) | |
57 | with self.assertRaises(server.Unauthorized): | |
58 | server._unsafe_io_with_server(requests.get, 200, url, json={"name": "betcha"}) | |
59 | url2 = "http://nope2.com" | |
60 | responses.add(responses.GET, url2, body='{"name": "betcha"}', status=401) | |
61 | with self.assertRaises(server.Unauthorized): | |
62 | server._unsafe_io_with_server(requests.get, 200, url, json={"name": "betcha"}) | |
63 | ||
64 | @responses.activate | |
65 | def test_raise_cant_comm_with_server_on_wrong_response_code(self): | |
66 | url = "http://yes.com" | |
67 | responses.add(responses.GET, url, status=204) | |
68 | with self.assertRaises(server.CantCommunicateWithServerError): | |
69 | server._unsafe_io_with_server(requests.get, 200, url) | |
70 | ||
71 | @responses.activate | |
72 | def test_server_with_okey_request(self): | |
73 | url = "http://this-is-ok.com" | |
74 | responses.add(responses.GET, url, body='{"name": "betcha"}', status=200) | |
75 | responses.add(responses.PUT, url, body='{"ok": "true"}', status=200) | |
76 | response_get = server._unsafe_io_with_server(requests.get, 200, url) | |
77 | response_put = server._unsafe_io_with_server(requests.put, 200, url) | |
78 | self.assertEqual(response_get.text, requests.get(url).text) | |
79 | self.assertEqual(response_put.text, requests.put(url).text) | |
80 | ||
81 | @responses.activate | |
82 | def test_json_parsing(self): | |
83 | url = "http://give_me_json.com" | |
84 | responses.add(responses.GET, url, body='{"some": "valid", "json": "string"}') | |
85 | url2 = "http://give_me_invalid_json.com" | |
86 | responses.add(responses.GET, url2, body='{"this is not", "valid": "json"}') | |
87 | json_as_dict = server._parse_json(requests.get(url)) | |
88 | json_as_empty_dict = server._parse_json(requests.get(url2)) | |
89 | self.assertEqual({'some': 'valid', 'json': 'string'}, json_as_dict) | |
90 | self.assertEqual({}, json_as_empty_dict) | |
91 | ||
92 | @responses.activate | |
93 | def test_get(self): | |
94 | url = "http://get_url" | |
95 | responses.add(responses.GET, url, body='{"some": "object"}') | |
96 | expected_json = server._get(url) | |
97 | self.assertEqual(expected_json, {"some": "object"}) | |
98 | ||
99 | @responses.activate | |
100 | def test_put_with_no_update(self): | |
101 | responses.add(responses.PUT, example_url, body='{"ok": "true"}', status=200) | |
102 | self.assertEqual(server._put(example_url, expected_response=200), {"ok": "true"}) | |
103 | ||
104 | @responses.activate | |
105 | def test_put_with_update(self): | |
106 | responses.add(responses.GET, example_url, body='{"_rev": "1-asf"}') | |
107 | responses.add(responses.PUT, example_url, body='{"ok": "true"}', status=200) | |
108 | server._put(example_url, update=True, expected_response=200) | |
109 | self.assertIn("_rev", responses.calls[0].response.text) | |
110 | ||
111 | @responses.activate | |
112 | def test_delete_object(self): | |
113 | responses.add(responses.GET, example_url, body='{"_rev": "1-asf"}') | |
114 | responses.add(responses.DELETE, example_url, body='{"ok": "true"}', status=200) | |
115 | server._delete(example_url) | |
116 | self.assertIn("_rev", responses.calls[0].response.text) | |
117 | self.assertEqual(responses.calls[1].request.method, 'DELETE') | |
118 | ||
119 | def test_faraday_dictionary_dispatcher_result(self): | |
120 | mock_raw_hosts = MagicMock() | |
121 | mock_raw_hosts.return_value = {'rows': [{'a': 'host', 'value': {'stuff': 'other_stuff'}}], 'total_rows': 4} | |
122 | with patch('persistence.server.server._get_raw_hosts', mock_raw_hosts): | |
123 | list_of_dicts = server._get_faraday_ready_dictionaries('some_workspace', 'hosts', 'rows', full_table=False) | |
124 | with patch('persistence.server.server._get_raw_hosts', mock_raw_hosts): | |
125 | full_list_of_dicts = server._get_faraday_ready_dictionaries('some_workspace', 'hosts', | |
126 | 'rows', full_table=True) | |
127 | self.assertTrue(len(list_of_dicts) == 1 == len(full_list_of_dicts)) | |
128 | self.assertEqual(list_of_dicts, [mock_raw_hosts.return_value['rows'][0]['value']]) | |
129 | self.assertEqual(full_list_of_dicts, mock_raw_hosts.return_value['rows']) | |
130 | ||
131 | @patch('persistence.server.server._get_raw_hosts') | |
132 | @patch('persistence.server.server._get_raw_vulns') | |
133 | @patch('persistence.server.server._get_raw_interfaces') | |
134 | @patch('persistence.server.server._get_raw_services') | |
135 | @patch('persistence.server.server._get_raw_notes') | |
136 | @patch('persistence.server.server._get_raw_credentials') | |
137 | @patch('persistence.server.server._get_raw_commands') | |
138 | def test_faraday_dictionary_dispatcher_calls(self, mock_hosts, mock_vulns, mock_interfaces, | |
139 | mock_services, mock_notes, mock_credentials, mock_commands): | |
140 | # NOTE: if you finds any bugs here, i have the suspipcion that mock_host is actually mock_commands | |
141 | # i mean, the parameters names are wrong. I'd check for that. Good luck. | |
142 | server._get_faraday_ready_dictionaries('a', 'hosts', 'whatever') | |
143 | server._get_faraday_ready_dictionaries('a', 'interfaces', 'whatever') | |
144 | server._get_faraday_ready_dictionaries('a', 'vulns', 'whatever') | |
145 | server._get_faraday_ready_dictionaries('a', 'services', 'whatever') | |
146 | server._get_faraday_ready_dictionaries('a', 'notes', 'whatever') | |
147 | server._get_faraday_ready_dictionaries('a', 'credentials', 'whatever') | |
148 | server._get_faraday_ready_dictionaries('a', 'commands', 'whatever') | |
149 | mock_hosts.assert_called_once_with('a') | |
150 | mock_vulns.assert_called_once_with('a') | |
151 | mock_interfaces.assert_called_once_with('a') | |
152 | mock_services.assert_called_once_with('a') | |
153 | mock_notes.assert_called_once_with('a') | |
154 | mock_credentials.assert_called_once_with('a') | |
155 | mock_commands.assert_called_once_with('a') | |
156 | ||
157 | @patch('persistence.server.server.get_hosts', return_value='hosts') | |
158 | @patch('persistence.server.server.get_vulns', return_value='vulns') | |
159 | @patch('persistence.server.server.get_interfaces', return_value='interfaces') | |
160 | @patch('persistence.server.server.get_services', return_value='services') | |
161 | @patch('persistence.server.server.get_credentials', return_value='CREDENTIAL') | |
162 | @patch('persistence.server.server.get_notes', return_value='NOTE') | |
163 | @patch('persistence.server.server.get_commands', return_value='COMMAND') | |
164 | def test_get_objects(self, not_command, not_note, not_credential, not_service, | |
165 | not_interface, not_vuln, not_host): | |
166 | obj_sign_to_mock = {'hosts': not_host, 'vulns': not_vuln, 'interfaces': not_interface, | |
167 | 'services': not_service, 'credentials': not_credential, | |
168 | 'notes': not_note, 'commands': not_command} | |
169 | for obj_sign in obj_sign_to_mock.keys(): | |
170 | server.get_objects('a', obj_sign) | |
171 | obj_sign_to_mock[obj_sign].assert_called_once_with('a') | |
172 | with self.assertRaises(utils.WrongObjectSignature): | |
173 | server.get_objects('a', 'not a signature') |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | ||
8 | import unittest | |
9 | import sys | |
10 | sys.path.append('.') | |
11 | import model.controller | |
12 | import managers.mapper_manager | |
13 | from mockito import mock | |
14 | from persistence.mappers.abstract_mapper import NullPersistenceManager | |
15 | from model.hosts import Host, ModelObjectVuln | |
16 | from model.diff import ModelObjectDiff | |
17 | ||
18 | import test_cases.common as test_utils | |
19 | ||
20 | ||
21 | class DiffTests(unittest.TestCase): | |
22 | ||
23 | def setUp(self): | |
24 | pass | |
25 | ||
26 | def tearDown(self): | |
27 | pass | |
28 | ||
29 | def test_diff_between_equal_hosts(self): | |
30 | """ | |
31 | This test case creates a host and the compares it | |
32 | with another equal host using the ModelObjectDiff class | |
33 | """ | |
34 | h1 = Host(name='host1', os='Windows') | |
35 | h2 = Host(name='host1', os='Windows') | |
36 | ||
37 | diff = ModelObjectDiff(h1, h2) | |
38 | ||
39 | self.assertFalse(diff.existDiff()) | |
40 | ||
41 | def test_diff_between_different_hosts(self): | |
42 | """ | |
43 | This test case creates a host and the compares it | |
44 | with another different host using the ModelObjectDiff class | |
45 | """ | |
46 | h1 = Host(name='host1', os='Windows') | |
47 | h2 = Host(name='host1', os='Linux') | |
48 | ||
49 | diff = ModelObjectDiff(h1, h2) | |
50 | ||
51 | self.assertTrue(diff.existDiff()) | |
52 | ||
53 | def test_diff_between_equal_vulns_with_different_confirmed(self): | |
54 | v1 = ModelObjectVuln(name="vuln1", | |
55 | desc="description", | |
56 | severity="high", | |
57 | confirmed=True) | |
58 | v2 = ModelObjectVuln(name="vuln1", | |
59 | desc="description", severity="high") | |
60 | ||
61 | self.assertFalse(v1.addUpdate(v2), | |
62 | "The conflict should be resolved automatically") | |
63 | self.assertTrue(v1.confirmed, | |
64 | "The vuln should be still confirmed") | |
65 | ||
66 | ||
67 | class UpdatesTests(unittest.TestCase): | |
68 | ||
69 | def setUp(self): | |
70 | self._mappers_manager = managers.mapper_manager.MapperManager() | |
71 | self._persistence_manager = NullPersistenceManager() | |
72 | self._mappers_manager.createMappers(self._persistence_manager) | |
73 | self.model_controller = model.controller.ModelController( | |
74 | mock(), self._mappers_manager) | |
75 | ||
76 | def tearDown(self): | |
77 | pass | |
78 | ||
79 | def test_add_host_and_generate_solvable_update(self): | |
80 | """ | |
81 | This test case creates a host within the Model Controller context | |
82 | and then creates another with the same key elements, but different | |
83 | non-key attributes with default value to generate an automatic | |
84 | solvable update | |
85 | """ | |
86 | # When | |
87 | hostname = 'host' | |
88 | host1a = test_utils.create_host(self, host_name=hostname, os='windows') | |
89 | ||
90 | host = self._mappers_manager.find(host1a.getID()) | |
91 | self.assertEquals( | |
92 | host.getOS(), | |
93 | 'windows', | |
94 | 'Host\'s OS should be windows') | |
95 | ||
96 | # Then, we generate an update | |
97 | host1b = test_utils.create_host(self, host_name=hostname, os='unknown') | |
98 | ||
99 | self.assertEquals( | |
100 | host1a.getID(), | |
101 | host1b.getID(), | |
102 | 'Both hosts should have the same id') | |
103 | ||
104 | self.assertEquals( | |
105 | len(self.model_controller.getConflicts()), | |
106 | 0, | |
107 | 'Update was generated') | |
108 | ||
109 | host = self._mappers_manager.find(host1a.getID()) | |
110 | ||
111 | self.assertEquals( | |
112 | host.getOS(), | |
113 | 'windows', | |
114 | 'Host\'s OS should still be windows') | |
115 | ||
116 | def test_add_host_and_generate_solvable_update_with_edition(self): | |
117 | """ | |
118 | This test case creates a host with a default value in a non-key | |
119 | attrribute within the Model Controller context and then creates | |
120 | another with the same key elements, but different non-key | |
121 | attributes to generate an automatic solvable update | |
122 | """ | |
123 | # When | |
124 | hostname = 'host' | |
125 | host1a = test_utils.create_host(self, host_name=hostname, os='unknown') | |
126 | ||
127 | host = self._mappers_manager.find(host1a.getID()) | |
128 | ||
129 | self.assertEquals( | |
130 | host.getOS(), | |
131 | 'unknown', | |
132 | 'Host\'s OS should be unknown') | |
133 | ||
134 | # Then, we generate an update | |
135 | host1b = test_utils.create_host(self, host_name=hostname, os='windows') | |
136 | ||
137 | self.assertEquals( | |
138 | host1a.getID(), | |
139 | host1b.getID(), | |
140 | 'Both hosts should have the same id') | |
141 | ||
142 | self.assertEquals( | |
143 | len(self.model_controller.getConflicts()), | |
144 | 0, | |
145 | 'Update was generated') | |
146 | ||
147 | host = self._mappers_manager.find(host1a.getID()) | |
148 | ||
149 | self.assertEquals( | |
150 | host.getOS(), | |
151 | 'windows', | |
152 | 'Host\'s OS should now be windows') | |
153 | ||
154 | def test_add_host_and_generate_unsolvable_update(self): | |
155 | """ | |
156 | This test case creates a host within the Model Controller | |
157 | context and then creates another with the same key elements, | |
158 | but different non-key attributes to generate an update to | |
159 | be resolved by the user | |
160 | """ | |
161 | # When | |
162 | hostname = 'host' | |
163 | host1a = test_utils.create_host(self, host_name=hostname, os='windows') | |
164 | ||
165 | host = self._mappers_manager.find(host1a.getID()) | |
166 | ||
167 | self.assertEquals( | |
168 | host.getOS(), | |
169 | 'windows', | |
170 | 'Host\'s OS should be windows') | |
171 | ||
172 | # Then, we generate an update | |
173 | host1b = test_utils.create_host(self, host_name=hostname, os='linux') | |
174 | ||
175 | self.assertEquals( | |
176 | host1a.getID(), | |
177 | host1b.getID(), | |
178 | 'Both hosts should have the same id') | |
179 | ||
180 | self.assertEquals( | |
181 | len(self.model_controller.getConflicts()), | |
182 | 1, | |
183 | 'Update was not generated') | |
184 | ||
185 | host = self._mappers_manager.find(host1a.getID()) | |
186 | ||
187 | self.assertEquals( | |
188 | host.getOS(), | |
189 | 'windows', | |
190 | 'Host\'s OS should still be windows') | |
191 | ||
192 | self.assertEquals( | |
193 | len(host.getUpdates()), | |
194 | 1, | |
195 | 'The host should have a pending update') | |
196 | ||
197 | ||
198 | if __name__ == '__main__': | |
199 | unittest.main() |
0 | ''' | |
1 | Faraday Penetration Test IDE | |
2 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
3 | See the file 'doc/LICENSE' for the license information | |
4 | ||
5 | ''' | |
6 | import unittest | |
7 | import sys | |
8 | import os | |
9 | sys.path.append('.') | |
10 | import model.controller as controller | |
11 | from model.workspace import Workspace | |
12 | from model.container import ModelObjectContainer | |
13 | import model.api as api | |
14 | #from model import controller | |
15 | #from model import api | |
16 | from plugins.repo.w3af import plugin | |
17 | from plugins.core import PluginControllerForApi | |
18 | from mockito import mock, when | |
19 | from managers.all import CommandManager | |
20 | ||
21 | ||
22 | class W3afPluginTest(unittest.TestCase): | |
23 | ||
24 | def setUp(self): | |
25 | """ | |
26 | Generic test to verify that the object exists and can be | |
27 | instantiated without problems. | |
28 | """ | |
29 | self.model_controller = controller.ModelController(mock()) | |
30 | self.workspace = mock(Workspace) | |
31 | when(self.workspace).getContainee().thenReturn(ModelObjectContainer()) | |
32 | self.cm = mock(CommandManager) | |
33 | when(self.cm).saveCommand().thenReturn(True) | |
34 | self.model_controller.setWorkspace(self.workspace) | |
35 | self._plugin_controller = PluginControllerForApi("test", {"w3af": plugin.W3afPlugin()}, self.cm) | |
36 | api.setUpAPIs(self.model_controller) | |
37 | ||
38 | def test_report(self): | |
39 | output_file = open(os.path.join(os.getcwd(), 'test_cases/data/w3af_plugin_with_api.xml')) | |
40 | output = output_file.read() | |
41 | self._plugin_controller.processCommandInput("./w3af report") | |
42 | self._plugin_controller.onCommandFinished("./w3af report", output) | |
43 | self.model_controller.processAllPendingActions() | |
44 | self.assertEquals(len(self.model_controller.getAllHosts()), 1, | |
45 | "Not all hosts added to model") | |
46 | ||
47 | ||
48 | if __name__ == '__main__': | |
49 | unittest.main() |
0 | #!/usr/bin/python | |
1 | ''' | |
2 | Faraday Penetration Test IDE | |
3 | Copyright (C) 2013 Infobyte LLC (http://www.infobytesec.com/) | |
4 | See the file 'doc/LICENSE' for the license information | |
5 | ||
6 | ''' | |
7 | import unittest | |
8 | import sys | |
9 | sys.path.append('.') | |
10 | ||
11 | from config.configuration import getInstanceConfiguration | |
12 | from model.workspace import Workspace | |
13 | from managers.model_managers import WorkspaceManager | |
14 | from persistence.persistence_managers import DBTYPE | |
15 | from mockito import mock, verify, when, any | |
16 | CONF = getInstanceConfiguration() | |
17 | ||
18 | class UnitTestWorkspaceManager(unittest.TestCase): | |
19 | """ Unit tests for WorkspaceManager """ | |
20 | ||
21 | def testCreateWorkspaceManager(self): | |
22 | workspace_manager = WorkspaceManager(mock(), mock(), mock(), mock()) | |
23 | self.assertIsNotNone(workspace_manager) | |
24 | ||
25 | def testOpenWorkspaceChangesAndReportManagerWatch(self): | |
26 | reportManager = mock() | |
27 | ||
28 | dbManager = mock() | |
29 | mappersManager = mock() | |
30 | dbConnector = mock() | |
31 | mappers = mock() | |
32 | changesController = mock() | |
33 | workspaceMapper = mock() | |
34 | ||
35 | workspace = Workspace('test_workspace', 'a desc') | |
36 | ||
37 | when(dbManager).getAllDbNames().thenReturn(['test_workspace']) | |
38 | when(dbManager).getConnector('test_workspace').thenReturn(dbConnector) | |
39 | when(mappersManager).createMappers(dbConnector).thenReturn(True) | |
40 | when(mappersManager).getMapper(Workspace.__name__).thenReturn(workspaceMapper) | |
41 | when(workspaceMapper).find('test_workspace').thenReturn(workspace) | |
42 | ||
43 | workspace_manager = WorkspaceManager(dbManager, | |
44 | mappersManager, | |
45 | changesController, | |
46 | reportManager) | |
47 | ||
48 | ||
49 | opened_workspace = workspace_manager.openWorkspace('test_workspace') | |
50 | ||
51 | verify(reportManager).watch('test_workspace') | |
52 | verify(changesController).watch(mappersManager, dbConnector) | |
53 | self.assertEquals(opened_workspace.getName(), 'test_workspace') | |
54 | ||
55 | ||
56 | def testCreateWorkspaceDBManagerInteract(self): | |
57 | dbManager = mock() | |
58 | dbConnector = mock() | |
59 | changesController = mock() | |
60 | ||
61 | when(dbManager).createDb('test_workspace', DBTYPE.FS).thenReturn(dbConnector) | |
62 | workspace_manager = WorkspaceManager(dbManager, mock(), changesController, mock()) | |
63 | workspace_manager.createWorkspace('test_workspace', 'a test workspace', | |
64 | DBTYPE.FS) | |
65 | verify(dbManager).createDb('test_workspace', DBTYPE.FS) | |
66 | ||
67 | def testCreateWorkspaceCreateMappersAndWorkspace(self): | |
68 | dbManager = mock() | |
69 | mappersManager = mock() | |
70 | dbConnector = mock() | |
71 | mappers = mock() | |
72 | changesController = mock() | |
73 | workspaceMapper = mock() | |
74 | ||
75 | when(mappersManager).getMapper(Workspace.__name__).thenReturn(workspaceMapper) | |
76 | when(mappersManager).save(any()).thenReturn(True) | |
77 | when(dbManager).createDb('test_workspace', DBTYPE.FS).thenReturn(dbConnector) | |
78 | when(mappersManager).createMappers(dbConnector).thenReturn(True) | |
79 | ||
80 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
81 | workspace = workspace_manager.createWorkspace('test_workspace', 'a test workspace', | |
82 | DBTYPE.FS) | |
83 | ||
84 | verify(mappersManager).createMappers(dbConnector) | |
85 | verify(mappersManager).save(any()) | |
86 | ||
87 | self.assertTrue(workspace, 'workspace not instantiated') | |
88 | self.assertEquals(workspace.name, 'test_workspace', | |
89 | 'Workspace name not set, is it valid?') | |
90 | ||
91 | def testCreateExistingWorkspaceReturnsFalse(self): | |
92 | dbManager = mock() | |
93 | mappersManager = mock() | |
94 | dbConnector = mock() | |
95 | mappers = mock() | |
96 | changesController = mock() | |
97 | ||
98 | when(mappersManager).save(any()).thenReturn(True) | |
99 | when(dbManager).createDb('test_workspace', DBTYPE.FS).thenReturn(False) | |
100 | when(mappersManager).createMappers(dbConnector).thenReturn(True) | |
101 | ||
102 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
103 | workspace = workspace_manager.createWorkspace('test_workspace', 'a test workspace', | |
104 | DBTYPE.FS) | |
105 | ||
106 | verify(dbManager).createDb('test_workspace', DBTYPE.FS) | |
107 | verify(mappersManager, times=0).createMappers(dbConnector) | |
108 | verify(mappersManager, times=0).save(any()) | |
109 | ||
110 | def testOpenWorkspace(self): | |
111 | dbManager = mock() | |
112 | mappersManager = mock() | |
113 | dbConnector = mock() | |
114 | mappers = mock() | |
115 | changesController = mock() | |
116 | workspaceMapper = mock() | |
117 | ||
118 | workspace = Workspace('test_workspace', 'a desc') | |
119 | ||
120 | when(dbManager).getConnector('test_workspace').thenReturn(dbConnector) | |
121 | when(mappersManager).getMapper(Workspace.__name__).thenReturn(workspaceMapper) | |
122 | when(dbManager).getAllDbNames().thenReturn(['test_workspace']) | |
123 | when(mappersManager).createMappers(dbConnector).thenReturn(True) | |
124 | when(workspaceMapper).find('test_workspace').thenReturn(workspace) | |
125 | ||
126 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
127 | ||
128 | opened_workspace = workspace_manager.openWorkspace('test_workspace') | |
129 | ||
130 | verify(dbManager).getConnector('test_workspace') | |
131 | verify(mappersManager).createMappers(dbConnector) | |
132 | verify(workspaceMapper).find('test_workspace') | |
133 | self.assertEquals(opened_workspace.getName(), 'test_workspace') | |
134 | ||
135 | def testOpenWorkspaceSetsChangesCallback(self): | |
136 | dbManager = mock() | |
137 | mappersManager = mock() | |
138 | dbConnector = mock() | |
139 | mappers = mock() | |
140 | changesController = mock() | |
141 | workspaceMapper = mock() | |
142 | ||
143 | workspace = Workspace('test_workspace', 'a desc') | |
144 | ||
145 | when(dbManager).getConnector('test_workspace').thenReturn(dbConnector) | |
146 | when(mappersManager).getMapper(Workspace.__name__).thenReturn(workspaceMapper) | |
147 | when(dbManager).getAllDbNames().thenReturn(['test_workspace']) | |
148 | when(mappersManager).createMappers(dbConnector).thenReturn(True) | |
149 | when(workspaceMapper).find('test_workspace').thenReturn(workspace) | |
150 | ||
151 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
152 | ||
153 | opened_workspace = workspace_manager.openWorkspace('test_workspace') | |
154 | ||
155 | verify(changesController).watch(mappersManager, dbConnector) | |
156 | ||
157 | def testCreateWorkspaceSetsChangesCallback(self): | |
158 | dbManager = mock() | |
159 | mappersManager = mock() | |
160 | dbConnector = mock() | |
161 | mappers = mock() | |
162 | changesController = mock() | |
163 | ||
164 | when(mappersManager).save(any()).thenReturn(True) | |
165 | when(dbManager).createDb('test_workspace', DBTYPE.FS).thenReturn(dbConnector) | |
166 | when(mappersManager).createMappers(dbConnector).thenReturn(True) | |
167 | ||
168 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
169 | workspace = workspace_manager.createWorkspace('test_workspace', 'a test workspace', | |
170 | DBTYPE.FS) | |
171 | ||
172 | verify(changesController).watch(mappersManager, dbConnector) | |
173 | ||
174 | def testOpenWorkspaceNoneExisting(self): | |
175 | dbManager = mock() | |
176 | mappersManager = mock() | |
177 | dbConnector = mock() | |
178 | mappers = mock() | |
179 | changesController = mock() | |
180 | ||
181 | workspace = Workspace('test_workspace', 'a desc') | |
182 | when(dbManager).getAllDbNames().thenReturn([]) | |
183 | ||
184 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
185 | opened_workspace = workspace_manager.openWorkspace('test_workspace') | |
186 | ||
187 | ||
188 | verify(mappersManager, times=0).createMappers(dbConnector) | |
189 | verify(mappersManager, times=0).find('test_workspace') | |
190 | self.assertFalse(opened_workspace, 'Workspace retrieved but non existing') | |
191 | ||
192 | def testRemoveWorkspace(self): | |
193 | dbManager = mock() | |
194 | mappersManager = mock() | |
195 | dbConnector = mock() | |
196 | mappers = mock() | |
197 | changesController = mock() | |
198 | ||
199 | workspace = Workspace('test_workspace', 'a desc') | |
200 | when(dbManager).removeDb('test_workspace').thenReturn(True) | |
201 | when(dbManager).getAllDbNames().thenReturn(['test_workspace']) | |
202 | ||
203 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
204 | remove_ret = workspace_manager.removeWorkspace('test_workspace') | |
205 | ||
206 | verify(dbManager).removeDb('test_workspace') | |
207 | self.assertTrue(remove_ret, 'bbdd not removed') | |
208 | ||
209 | def testSetActiveWorkspace(self): | |
210 | work = Workspace('testname') | |
211 | dbManager = mock() | |
212 | mappersManager = mock() | |
213 | changesController = mock() | |
214 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
215 | ||
216 | workspace_manager.setActiveWorkspace(work) | |
217 | ||
218 | self.assertEquals(workspace_manager.active_workspace, work, | |
219 | 'active workspace not set') | |
220 | self.assertTrue(workspace_manager.isActive(work.getName()), | |
221 | 'could not retrive as active workspace') | |
222 | ||
223 | def testGetWorkspaceTypeCouchDb(self): | |
224 | work = Workspace('testname') | |
225 | dbManager = mock() | |
226 | mappersManager = mock() | |
227 | changesController = mock() | |
228 | when(dbManager).getDbType('testname').thenReturn(DBTYPE.COUCHDB) | |
229 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
230 | ||
231 | wtype = workspace_manager.getWorkspaceType(work.getName()) | |
232 | self.assertEquals(wtype, 'CouchDB', 'Workspace type not returning correct value') | |
233 | ||
234 | def testGetWorkspaceTypeFS(self): | |
235 | work = Workspace('testname') | |
236 | dbManager = mock() | |
237 | mappersManager = mock() | |
238 | changesController = mock() | |
239 | when(dbManager).getDbType('testname').thenReturn(DBTYPE.FS) | |
240 | workspace_manager = WorkspaceManager(dbManager, mappersManager, changesController, mock()) | |
241 | ||
242 | wtype = workspace_manager.getWorkspaceType(work.getName()) | |
243 | self.assertEquals(wtype, 'FS', 'Workspace type not returning correct value') | |
244 | ||
245 | def testGetAvailableWorkspaceTypes(self): | |
246 | dbManager = mock() | |
247 | workspace_manager = WorkspaceManager(dbManager, | |
248 | mock(), | |
249 | mock(), | |
250 | mock()) | |
251 | when(dbManager).getAvailableDBs().thenReturn([DBTYPE.COUCHDB, DBTYPE.FS]) | |
252 | retrievedTypes = workspace_manager.getAvailableWorkspaceTypes() | |
253 | ||
254 | self.assertListEqual(['CouchDB', 'FS'], retrievedTypes, | |
255 | "Workspaces available Types not set") | |
256 | ||
257 | def testCloseWorkspace(self): | |
258 | dbManager = mock() | |
259 | mappersManager = mock() | |
260 | changesController = mock() | |
261 | reportManager = mock() | |
262 | ||
263 | ||
264 | workspace_manager = WorkspaceManager(dbManager, | |
265 | mappersManager, | |
266 | changesController, | |
267 | reportManager) | |
268 | ||
269 | workspace_manager.closeWorkspace() | |
270 | verify(changesController).unwatch() | |
271 | ||
272 | def testResourceManager(self): | |
273 | dbManager = mock() | |
274 | mappersManager = mock() | |
275 | changesController = mock() | |
276 | reportManager = mock() | |
277 | ||
278 | ||
279 | workspace_manager = WorkspaceManager(dbManager, | |
280 | mappersManager, | |
281 | changesController, | |
282 | reportManager) | |
283 | ||
284 | workspace_manager.resource() | |
285 | ||
286 | verify(dbManager).reloadConfig() | |
287 | ||
288 | ||
289 | ||
290 | ||
291 | if __name__ == '__main__': | |
292 | unittest.main() | |
293 |
8 | 8 | import subprocess |
9 | 9 | import couchdbkit |
10 | 10 | import model.workspace |
11 | import persistence.mappers.data_mappers as dm | |
12 | 11 | from utils.logs import getLogger |
13 | 12 | from config.globals import * |
14 | 13 | logger = getLogger('Updater') |
20 | 19 | import os |
21 | 20 | import shutil |
22 | 21 | from managers.all import ViewsManager |
22 | from persistence.server.models import create_workspace | |
23 | 23 | |
24 | 24 | class Updater(object): |
25 | 25 | def doUpdates(self): |
65 | 65 | dbs = filter(lambda x: not x.startswith("_") and 'backup' not in x and x not in CONST_BLACKDBS, serv.all_dbs()) |
66 | 66 | logger.info('Dbs to upgrade: %s' % (', '.join(dbs))) |
67 | 67 | |
68 | ||
69 | 68 | logger.info('Preparing updates on Couchdbs') |
70 | 69 | processed = 0 |
71 | 70 | views_uploader = ViewsManager() |
102 | 101 | |
103 | 102 | # Crear documento 'workspace' |
104 | 103 | logger.info('Creating workspace document') |
105 | workspace = model.workspace.Workspace(db_name, | |
106 | 'Migrated Workspace ') | |
107 | ||
108 | dict_workspace = dm.WorkspaceMapper(None).serialize(workspace) | |
109 | db_source.save_doc(dict_workspace, force_update = True) | |
104 | ||
105 | create_workspace(db_name, | |
106 | 'Migrated Workspace', | |
107 | int(time.time() * 1000), | |
108 | int(time.time() * 1000), | |
109 | "") | |
110 | 110 | types = {} |
111 | 111 | |
112 | 112 | logger.info('Updating modelobject documents') |