Skip to content
Permalink
Browse files

Merge pull request #1112 from mpurzynski/adjust_proxy_alerts

Adjust proxy alerts
  • Loading branch information...
pwnbus committed Feb 28, 2019
2 parents e9566f6 + 607850f commit f0c45c26482a23421332a9163192731cb576b22f
@@ -12,28 +12,28 @@

class AlertProxyDropExecutable(AlertTask):
def main(self):
self.parse_config('proxy_drop_executable.conf', ['extensions'])
self.parse_config("proxy_drop_executable.conf", ["extensions"])

search_query = SearchQuery(minutes=20)

search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
search_query.add_must(
[
TermMatch("category", "proxy"),
TermMatch("details.proxyaction", "TCP_DENIED"),
]
)

# Only notify on certain file extensions from config
filename_regex = "/.*\.({0})/".format(
self.config.extensions.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(filename_regex))
])
filename_regex = "/.*\.({0})/".format(self.config.extensions.replace(",", "|"))
search_query.add_must(
[QueryStringMatch("details.destination: {}".format(filename_regex))]
)

self.filtersManual(search_query)

# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
@@ -43,18 +43,17 @@ def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
category = "squid"
tags = ["squid", "proxy"]
severity = "WARNING"

dropped_urls = set()
for event in aggreg['allevents']:
dropped_urls.add(event['_source']['details']['destination'])
for event in aggreg["allevents"]:
dropped_urls.add(event["_source"]["details"]["destination"])

summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following executable file destination(s): {1}'.format(
aggreg['value'],
",".join(sorted(dropped_urls))
summary = "Suspicious Proxy DROP event(s) detected from {0} to the following executable file destination(s): {1}".format(
aggreg["value"], ",".join(sorted(dropped_urls))
)

# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
return self.createAlertDict(summary, category, tags, aggreg["events"], severity)
@@ -8,54 +8,64 @@

from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
import re
import netaddr


class AlertProxyDropIP(AlertTask):
def main(self):
search_query = SearchQuery(minutes=20)

search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])
search_query.add_must(
[
TermMatch("category", "proxy"),
TermMatch("details.proxyaction", "TCP_DENIED"),
]
)

# Match on everything that looks like the first octet of either the IPv4 or the IPv6 address
# This will over-match, but will get weeded out below
ip_regex = "/[0-9a-fA-F]{1,4}.*/"

# Match on 1.1.1.1, http://1.1.1.1, or https://1.1.1.1
# This will over-match on short 3-char domains like foo.bar.baz.com, but will get weeded out below
ip_regex = '/.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/'
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(ip_regex))
])
search_query.add_must([QueryStringMatch("details.host: {}".format(ip_regex))])

self.filtersManual(search_query)
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
self.walkAggregations(threshold=1)

# Set alert properties
def onAggregation(self, aggreg):
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'

# Lucene search has a slight potential for overmatches, so we'd double-check
# with this pattern to ensure it's truely an IP before we add dest to our dropped list
pattern = r'^(http:\/\/|https:\/\/|)\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}'
category = "squid"
tags = ["squid", "proxy"]
severity = "WARNING"

dropped_destinations = set()
final_aggr = {}
final_aggr["value"] = aggreg["value"]
final_aggr["allevents"] = []
final_aggr["events"] = []

for event in aggreg['allevents']:
if re.search(pattern, event['_source']['details']['destination']):
dropped_destinations.add(
event['_source']['details']['destination'])
i = 0
for event in aggreg["allevents"]:
ip = None
try:
ip = netaddr.IPAddress(event["_source"]["details"]["host"])
except (netaddr.core.AddrFormatError, ValueError):
pass
if ip is not None:
dropped_destinations.add(event["_source"]["details"]["host"])
final_aggr["allevents"].append(event)
final_aggr["events"].append(event)
i += i
final_aggr["count"] = i

# If it's all over-matches, don't throw the alert
if len(dropped_destinations) == 0:
return None

summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following IP-based destination(s): {1}'.format(
aggreg['value'],
",".join(sorted(dropped_destinations))
summary = "Suspicious Proxy DROP event(s) detected from {0} to the following IP-based destination(s): {1}".format(
final_aggr["value"], ",".join(sorted(dropped_destinations))
)

return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
return self.createAlertDict(
summary, category, tags, final_aggr["allevents"], severity
)
@@ -7,52 +7,46 @@


from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
from mozdef_util.query_models import SearchQuery, TermMatch


class AlertProxyDropNonStandardPort(AlertTask):
def main(self):
self.parse_config(
'proxy_drop_non_standard_port.conf', ['excludedports'])
self.parse_config("proxy_drop_non_standard_port.conf", ["excludedports"])

search_query = SearchQuery(minutes=20)

search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-'),
TermMatch('details.tcpaction', 'CONNECT')
])

# Only notify on certain ports from config
port_regex = "/.*:({0})/".format(
self.config.excludedports.replace(',', '|'))
search_query.add_must_not([
QueryStringMatch('details.destination: {}'.format(port_regex))
])
search_query.add_must(
[
TermMatch("category", "proxy"),
TermMatch("details.proxyaction", "TCP_DENIED"),
TermMatch("details.method", "CONNECT"),
]
)
for port in self.config.excludedports.split(","):
search_query.add_must_not([TermMatch("details.destinationport", port)])

self.filtersManual(search_query)

# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)

def onAggregation(self, aggreg):
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
category = "squid"
tags = ["squid", "proxy"]
severity = "WARNING"

destinations = set()
for event in aggreg['allevents']:
destinations.add(event['_source']['details']['destination'])
for event in aggreg["allevents"]:
destinations.add(event["_source"]["details"]["destination"])

summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following non-std port destination(s): {1}'.format(
aggreg['value'],
",".join(sorted(destinations))
summary = "Suspicious Proxy DROP event(s) detected from {0} to the following non-std port destination(s): {1}".format(
aggreg["value"], ",".join(sorted(destinations))
)

# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
return self.createAlertDict(summary, category, tags, aggreg["events"], severity)
@@ -6,34 +6,26 @@
# Copyright (c) 2014 Mozilla Corporation


from urlparse import urlparse
from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch
from mozdef_util.query_models import SearchQuery, TermMatch


class AlertProxyExfilDomains(AlertTask):
def main(self):
self.parse_config('proxy_exfil_domains.conf', ['exfil_domains'])
self.parse_config("proxy_exfil_domains.conf", ["exfil_domains"])

search_query = SearchQuery(minutes=20)

search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
])
search_query.add_must([TermMatch("category", "proxy")])

# Only notify on certain domains listed in the config
domain_regex = "/.*({0}).*/".format(
self.config.exfil_domains.replace(',', '|'))
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(domain_regex))
])
for domain in self.config.exfil_domains.split(","):
search_query.add_should([TermMatch("details.host", domain)])

self.filtersManual(search_query)

# Search aggregations on field 'hostname', keep X samples of
# events at most
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.searchEventsAggregated("details.sourceipaddress", samplesLimit=10)
# alert when >= X matching events in an aggregation
# I think it makes sense to alert every time here
self.walkAggregations(threshold=1)
@@ -43,25 +35,18 @@ def onAggregation(self, aggreg):
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts
# aggreg['value']: value of the aggregation field, ex: toto@example.com
# aggreg['events']: list of events in the aggregation
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'
category = "squid"
tags = ["squid", "proxy"]
severity = "WARNING"

exfil_domains = set()
for event in aggreg['allevents']:
try:
domain = urlparse(event['_source']['details']['destination']).netloc
except Exception:
# We already have a domain, not a URL
target = event['_source']['details']['destination'].split(':')
domain = target[0]

for event in aggreg["allevents"]:
domain = event["_source"]["details"]["host"]
exfil_domains.add(domain)

summary = 'Suspicious Proxy event(s) detected from {0} to the following exfil domain(s): {1}'.format(
aggreg['value'],
",".join(sorted(exfil_domains))
summary = "Suspicious Proxy event(s) detected from {0} to the following exfil domain(s): {1}".format(
aggreg["value"], ",".join(sorted(exfil_domains))
)

# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
return self.createAlertDict(summary, category, tags, aggreg["events"], severity)
Oops, something went wrong.

0 comments on commit f0c45c2

Please sign in to comment.
You can’t perform that action at this time.
You signed in with another tab or window. Reload to refresh your session. You signed out in another tab or window. Reload to refresh your session.