Skip to content

Commit

Permalink
Some more cleanups discovered by PyCharm
Browse files Browse the repository at this point in the history
  • Loading branch information
brad-sp committed Apr 20, 2015
1 parent 5b54478 commit bd2690c
Show file tree
Hide file tree
Showing 14 changed files with 68 additions and 40 deletions.
2 changes: 1 addition & 1 deletion analyzer/windows/analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,7 +291,7 @@ def hookdll_encode(names):
filepath = proc.get_filepath()
filename = os.path.basename(filepath)

if not protected_filename(filename):
if not in_protected_path(filename):
add_pid(process_id)
log.info("Announce process name : %s", filename)
PROCESS_LOCK.release()
Expand Down
2 changes: 1 addition & 1 deletion analyzer/windows/lib/api/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ def __init__(self, pid=0, h_process=0, thread_id=0, h_thread=0, suspended=False)
self.thread_id = thread_id
self.h_thread = h_thread
self.suspended = suspended
self.system_info = SYSTEM_INFO()

def __del__(self):
"""Close open handles."""
Expand All @@ -77,7 +78,6 @@ def __del__(self):

def get_system_info(self):
"""Get system information."""
self.system_info = SYSTEM_INFO()
KERNEL32.GetSystemInfo(byref(self.system_info))

def open(self):
Expand Down
2 changes: 1 addition & 1 deletion analyzer/windows/modules/packages/swf.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ class SWF(Package):
"""

def start(self, path):
return self.execute(path="bin/flashplayer.exe", path, path)
return self.execute("bin/flashplayer.exe", path, path)
4 changes: 2 additions & 2 deletions data/html/sections/behavior.html
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ <h4><a href="javascript:showHide('process_{{process.process_id}}');">{{process.p
<td>{{call.timestamp[11:]}}</td>
<td>{{call.thread_id}}</td>
<td><span class="mono">{{call.api}}</span></td>
<td style="word-wrap: break-word">
<td style="word-wrap: break-word;">
{% for argument in call.arguments %}
{% if argument.pretty_value %}
{{argument.name}} => <span class="mono">{{argument.pretty_value}}</span><br />
Expand All @@ -221,7 +221,7 @@ <h4><a href="javascript:showHide('process_{{process.process_id}}');">{{process.p
</td>
<td>{% if call.status %}SUCCESS{% else %}FAILURE{% endif %}</td>
{% if call.pretty_return %}
<td style="word-wrap: break-word">{{call.pretty_return}}</td>
<td style="word-wrap: break-word;">{{call.pretty_return}}</td>
{% else %}
<td>{{call.return}}</td>
{% endif %}
Expand Down
1 change: 1 addition & 0 deletions docs/book/src/installation/guest/network.rst
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,4 @@ your subnet address)::
And adding IP forwarding::

sysctl -w net.ipv4.ip_forward=1

1 change: 1 addition & 0 deletions docs/book/src/installation/upgrade.rst
Original file line number Diff line number Diff line change
Expand Up @@ -79,3 +79,4 @@ configuration may corrupt your data, backup should save kittens!
Run the database migrations with::

alembic upgrade head

1 change: 1 addition & 0 deletions docs/book/src/usage/packages.rst
Original file line number Diff line number Diff line change
Expand Up @@ -167,3 +167,4 @@ specify the package name whenever possible.
For example, to launch a malware and specify some options you can do::

$ ./utils/submit.py --package dll --options function=FunctionName,loader=explorer.exe /path/to/malware.dll

1 change: 1 addition & 0 deletions docs/book/src/usage/utilities.rst
Original file line number Diff line number Diff line change
Expand Up @@ -178,3 +178,4 @@ Following are the available options::
--snapshot SNAPSHOT Specific Virtual Machine Snapshot to use.
--resultserver RESULTSERVER
IP:Port of the Result Server.

1 change: 1 addition & 0 deletions modules/feeds/bad_ssl_certs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ class AbuseCH_SSL(Feed):
enabled = False

def __init__(self):
Feed.__init__(self)
# Location of the feed to be fetched
self.downloadurl = "https://sslbl.abuse.ch/downloads/ssl_extended.csv"
# Used in creating the file path on disk
Expand Down
73 changes: 46 additions & 27 deletions modules/processing/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,15 @@

try:
import GeoIP

IS_GEOIP = True
gi = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
except ImportError:
IS_GEOIP = False

try:
import dpkt

IS_DPKT = True
except ImportError:
IS_DPKT = False
Expand All @@ -46,6 +48,7 @@

log = logging.getLogger(__name__)


class Pcap:
"""Reads network data from PCAP file."""

Expand Down Expand Up @@ -167,7 +170,7 @@ def _add_hosts(self, connection):
pass

def _enrich_hosts(self, unique_hosts):
enriched_hosts=[]
enriched_hosts = []
while unique_hosts:
ip = unique_hosts.pop()
inaddrarpa = ""
Expand Down Expand Up @@ -214,7 +217,7 @@ def _check_icmp(self, icmp_data):
"""
try:
return isinstance(icmp_data, dpkt.icmp.ICMP) and \
len(icmp_data.data) > 0
len(icmp_data.data) > 0
except:
return False

Expand Down Expand Up @@ -264,8 +267,8 @@ def _add_dns(self, udpdata):
query = {}

if dns.rcode == dpkt.dns.DNS_RCODE_NOERR or \
dns.qr == dpkt.dns.DNS_R or \
dns.opcode == dpkt.dns.DNS_QUERY or True:
dns.qr == dpkt.dns.DNS_R or \
dns.opcode == dpkt.dns.DNS_QUERY or True:
# DNS question.
try:
q_name = dns.qd[0].name
Expand Down Expand Up @@ -327,12 +330,12 @@ def _add_dns(self, udpdata):
elif answer.type == dpkt.dns.DNS_SOA:
ans["type"] = "SOA"
ans["data"] = ",".join([answer.mname,
answer.rname,
str(answer.serial),
str(answer.refresh),
str(answer.retry),
str(answer.expire),
str(answer.minimum)])
answer.rname,
str(answer.serial),
str(answer.refresh),
str(answer.retry),
str(answer.expire),
str(answer.minimum)])
elif answer.type == dpkt.dns.DNS_HINFO:
ans["type"] = "HINFO"
ans["data"] = " ".join(answer.text)
Expand All @@ -344,7 +347,7 @@ def _add_dns(self, udpdata):
query["answers"].append(ans)

if dns.rcode == dpkt.dns.DNS_RCODE_NXDOMAIN:
ans = { }
ans = {}
ans["type"] = "NXDOMAIN"
ans["data"] = ""
query["answers"].append(ans)
Expand Down Expand Up @@ -391,7 +394,7 @@ def _check_http(self, tcpdata):
r.unpack(tcpdata)
except dpkt.dpkt.UnpackError:
if r.method is not None or r.version is not None or \
r.uri is not None:
r.uri is not None:
return True
return False

Expand All @@ -415,7 +418,9 @@ def _add_http(self, conn, tcpdata):
try:
entry = {"count": 1}

if "host" in http.headers and re.match(r'^([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9])(\.([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9]))+(:[0-9]{1,5})?$', http.headers["host"], re.IGNORECASE):
if "host" in http.headers and re.match(
r'^([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9])(\.([A-Z0-9]|[A-Z0-9][A-Z0-9\-]{0,61}[A-Z0-9]))+(:[0-9]{1,5})?$',
http.headers["host"], re.IGNORECASE):
entry["host"] = convert_to_printable(http.headers["host"])
else:
entry["host"] = conn["dst"]
Expand Down Expand Up @@ -491,15 +496,15 @@ def _add_irc(self, conn, tcpdata):
reqc = ircMessage()
reqs = ircMessage()
filters_sc = ["266"]
client = reqc.getClientMessages(tcpdata)
for message in client:
message.update(conn)
server = reqs.getServerMessagesFilter(tcpdata, filters_sc)
for message in server:
message.update(conn)
client = reqc.getClientMessages(tcpdata)
for message in client:
message.update(conn)
server = reqs.getServerMessagesFilter(tcpdata, filters_sc)
for message in server:
message.update(conn)
self.irc_requests = self.irc_requests + \
client + \
server
client + \
server
except Exception:
return False

Expand Down Expand Up @@ -575,9 +580,11 @@ def run(self):
connection["dport"] = tcp.dport
self._tcp_dissect(connection, tcp.data)

src, sport, dst, dport = (connection["src"], connection["sport"], connection["dst"], connection["dport"])
if not ((dst, dport, src, sport) in self.tcp_connections_seen or (src, sport, dst, dport) in self.tcp_connections_seen):
self.tcp_connections.append((src, sport, dst, dport, offset, ts-first_ts))
src, sport, dst, dport = (
connection["src"], connection["sport"], connection["dst"], connection["dport"])
if not ((dst, dport, src, sport) in self.tcp_connections_seen or (
src, sport, dst, dport) in self.tcp_connections_seen):
self.tcp_connections.append((src, sport, dst, dport, offset, ts - first_ts))
self.tcp_connections_seen.add((src, sport, dst, dport))

elif ip.p == dpkt.ip.IP_PROTO_UDP:
Expand All @@ -590,9 +597,11 @@ def run(self):
connection["dport"] = udp.dport
self._udp_dissect(connection, udp.data)

src, sport, dst, dport = (connection["src"], connection["sport"], connection["dst"], connection["dport"])
if not ((dst, dport, src, sport) in self.udp_connections_seen or (src, sport, dst, dport) in self.udp_connections_seen):
self.udp_connections.append((src, sport, dst, dport, offset, ts-first_ts))
src, sport, dst, dport = (
connection["src"], connection["sport"], connection["dst"], connection["dport"])
if not ((dst, dport, src, sport) in self.udp_connections_seen or (
src, sport, dst, dport) in self.udp_connections_seen):
self.udp_connections.append((src, sport, dst, dport, offset, ts - first_ts))
self.udp_connections_seen.add((src, sport, dst, dport))

elif ip.p == dpkt.ip.IP_PROTO_ICMP:
Expand Down Expand Up @@ -628,6 +637,7 @@ def run(self):

return self.results


class NetworkAnalysis(Processing):
"""Network analysis."""

Expand Down Expand Up @@ -662,6 +672,7 @@ def run(self):

return results


def iplayer_from_raw(raw, linktype=1):
"""Converts a raw packet to a dpkt packet regarding of link type.
@param raw: raw packet
Expand All @@ -676,13 +687,15 @@ def iplayer_from_raw(raw, linktype=1):
raise CuckooProcessingError("unknown PCAP linktype")
return ip


def conn_from_flowtuple(ft):
"""Convert the flow tuple into a dictionary (suitable for JSON)"""
sip, sport, dip, dport, offset, relts = ft
return {"src": sip, "sport": sport,
"dst": dip, "dport": dport,
"offset": offset, "time": relts}


# input_iterator should be a class that also supports writing so we can use
# it for the temp files
# this code is mostly taken from some SO post, can't remember the url though
Expand Down Expand Up @@ -717,6 +730,7 @@ def batch_sort(input_iterator, output_path, buffer_size=32000, output_class=None
except Exception:
pass


# magic
class SortCap(object):
"""SortCap is a wrapper around the packet lib (dpkt) that allows us to sort pcaps
Expand Down Expand Up @@ -766,12 +780,14 @@ def next(self):
self.conns.add(flowtuple)
return Keyed((flowtuple, ts, self.ctr), rpkt)


def sort_pcap(inpath, outpath):
"""Use SortCap class together with batch_sort to sort a pcap"""
inc = SortCap(inpath)
batch_sort(inc, outpath, output_class=lambda path: SortCap(path, linktype=inc.linktype))
return 0


def flowtuple_from_raw(raw, linktype=1):
"""Parse a packet from a pcap just enough to gain a flow description tuple"""
ip = iplayer_from_raw(raw, linktype)
Expand All @@ -793,6 +809,7 @@ def flowtuple_from_raw(raw, linktype=1):
flowtuple = (sip, dip, sport, dport, proto)
return flowtuple


def payload_from_raw(raw, linktype=1):
"""Get the payload from a packet, the data below TCP/UDP basically"""
ip = iplayer_from_raw(raw, linktype)
Expand All @@ -801,6 +818,7 @@ def payload_from_raw(raw, linktype=1):
except:
return ""


def next_connection_packets(piter, linktype=1):
"""Extract all packets belonging to the same flow from a pcap packet iterator"""
first_ft = None
Expand All @@ -820,6 +838,7 @@ def next_connection_packets(piter, linktype=1):
"direction": first_ft == ft,
}


def packets_for_stream(fobj, offset):
"""Open a PCAP, seek to a packet offset, then get all packets belonging to the same connection"""
pcap = dpkt.pcap.Reader(fobj)
Expand Down
12 changes: 8 additions & 4 deletions modules/processing/strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,14 @@ def run(self):
minchars = self.options.get("minchars", 5)

if nulltermonly:
strings = re.findall("([\x20-\x7e]{" + str(minchars) + ",})\x00", data)
strings += [str(ws.decode("utf-16le")) for ws in re.findall("((?:[\x20-\x7e][\x00]){" + str(minchars) + ",})\x00\x00", data)]
apat = "([\x20-\x7e]{" + str(minchars) + ",})\x00"
strings = re.findall(apat, data)
upat = "((?:[\x20-\x7e][\x00]){" + str(minchars) + ",})\x00\x00"
strings += [str(ws.decode("utf-16le")) for ws in re.findall(upat, data)]
else:
strings = re.findall("([\x20-\x7e]{" + str(minchars) + ",})\x00", data)
strings += [str(ws.decode("utf-16le")) for ws in re.findall("(?:[\x20-\x7e][\x00]){" + str(minchars) + ",}", data)]
apat = "([\x20-\x7e]{" + str(minchars) + ",})\x00"
strings = re.findall(apat, data)
upat = "(?:[\x20-\x7e][\x00]){" + str(minchars) + ",}"
strings += [str(ws.decode("utf-16le")) for ws in re.findall(upat, data)]

return strings
2 changes: 1 addition & 1 deletion modules/processing/suricata.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def run(self):
d = json.loads(l)
except:
log.warning("failed to load JSON from file log")
next
continue
if d["stored"]==True:
src_file = "%s/file.%s" % (SURICATA_FILES_DIR_FULL_PATH,d["id"])
if SURICATA_FILE_COPY_MAGIC_RE and SURICATA_FILE_COPY_DST_DIR and os.path.exists(SURICATA_FILE_COPY_DST_DIR):
Expand Down
4 changes: 2 additions & 2 deletions modules/reporting/moloch.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def run(self,results):
self.pcap_path = os.path.join(self.analysis_path, "dump.pcap")
self.MOLOCH_URL = self.options.get("base",None)

m = re.search(r"\/(?P<task_id>\d+)\/dump.pcap$",self.pcap_path)
m = re.search(r"/(?P<task_id>\d+)/dump.pcap$",self.pcap_path)
if m == None:
log.warning("Unable to find task id from %s" % (self.pcap_path))
return results
Expand Down Expand Up @@ -118,7 +118,7 @@ def run(self,results):

if results.has_key('suricata'):
if results["suricata"].has_key("alerts"):
afastre = re.compile(r".+\[1\:(?P<sid>\d+)\:\d+\].+\{(?P<proto>UDP|TCP|ICMP|(PROTO\:)?\d+)\}\s(?P<src>\d+\.\d+\.\d+\.\d+)(:(?P<sport>\d+))?\s.+\s(?P<dst>\d+\.\d+\.\d+\.\d+)(:(?P<dport>\d+))?")
afastre = re.compile(r".+\[1:(?P<sid>\d+):\d+\].+\{(?P<proto>UDP|TCP|ICMP|(PROTO:)?\d+)\}\s(?P<src>\d+\.\d+\.\d+\.\d+)(:(?P<sport>\d+))?\s.+\s(?P<dst>\d+\.\d+\.\d+\.\d+)(:(?P<dport>\d+))?")
for alert in results["suricata"]["alerts"]:
m = afastre.match(alert)
if m:
Expand Down
2 changes: 1 addition & 1 deletion utils/machine.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,11 @@ def main():
log.setLevel(logging.DEBUG)

db = Database()
conf = Config()

if args.resultserver:
resultserver_ip, resultserver_port = args.resultserver.split(":")
else:
conf = Config()
resultserver_ip = conf.resultserver.ip
resultserver_port = conf.resultserver.port

Expand Down

0 comments on commit bd2690c

Please sign in to comment.