Skip to content
This repository has been archived by the owner on Nov 3, 2021. It is now read-only.

Proxy Drop Alert on IP Destinations #778

Merged
merged 12 commits into from Nov 6, 2018
61 changes: 61 additions & 0 deletions alerts/proxy_drop_ip.py
@@ -0,0 +1,61 @@
#!/usr/bin/env python

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation


from lib.alerttask import AlertTask
from mozdef_util.query_models import QueryStringMatch, SearchQuery, TermMatch
import re


class AlertProxyDropIP(AlertTask):
def main(self):
search_query = SearchQuery(minutes=20)

search_query.add_must([
TermMatch('category', 'squid'),
TermMatch('tags', 'squid'),
TermMatch('details.proxyaction', 'TCP_DENIED/-')
])

# Match on 1.1.1.1, http://1.1.1.1, or https://1.1.1.1
# This will over-match on short 3-char domains like foo.bar.baz.com, but will get weeded out below
ip_regex = '/.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/'
pwnbus marked this conversation as resolved.
Show resolved Hide resolved
search_query.add_must([
QueryStringMatch('details.destination: {}'.format(ip_regex))
])

self.filtersManual(search_query)
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10)
self.walkAggregations(threshold=1)

# Set alert properties
def onAggregation(self, aggreg):
category = 'squid'
tags = ['squid', 'proxy']
severity = 'WARNING'

# Lucene search has a slight potential for overmatches, so we'd double-check
# with this pattern to ensure it's truely an IP before we add dest to our dropped list
pattern = r'^(http:\/\/|https:\/\/|)\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}'

dropped_destinations = set()

for event in aggreg['allevents']:
if re.search(pattern, event['_source']['details']['destination']):
dropped_destinations.add(
event['_source']['details']['destination'])

# If it's all over-matches, don't throw the alert
if len(dropped_destinations) == 0:
return None

summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following IP-based destination(s): {1}'.format(
aggreg['value'],
",".join(sorted(dropped_destinations))
)

return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
124 changes: 124 additions & 0 deletions tests/alerts/test_proxy_drop_ip.py
@@ -0,0 +1,124 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from positive_alert_test_case import PositiveAlertTestCase
from negative_alert_test_case import NegativeAlertTestCase
from alert_test_suite import AlertTestSuite


class TestAlertProxyDropIP(AlertTestSuite):
alert_filename = "proxy_drop_ip"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_type": "event",
"_source": {
"category": "squid",
"tags": ["squid"],
"details": {
"sourceipaddress": "1.2.3.4",
"destination": "http://1.2.3.5/",
"proxyaction": "TCP_DENIED/-",
}
}
}

# This event is an alternate destination that we'd want to aggregate
default_event2 = AlertTestSuite.copy(default_event)
default_event2["_source"]["details"]["destination"] = "1.2.3.5:1337"

# This event is the default negative event that will not cause the
# alert to trigger
default_negative_event = AlertTestSuite.copy(default_event)
default_negative_event["_source"]["details"]["destination"] = "http://foo.mozilla.com"

# This alert is the expected result from running this task
default_alert = {
"category": "squid",
"tags": ['squid', 'proxy'],
"severity": "WARNING",
"summary": 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following IP-based destination(s): http://1.2.3.5/',
}

# This alert is the expected result from this task against multiple matching events
default_alert_aggregated = AlertTestSuite.copy(default_alert)
default_alert_aggregated[
"summary"] = 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following IP-based destination(s): 1.2.3.5:1337,http://1.2.3.5/'

test_cases = []

test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert
)
)

test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - dedup",
events=AlertTestSuite.create_events(default_event, 2),
expected_alert=default_alert
)
)

events1 = AlertTestSuite.create_events(default_event, 1)
events2 = AlertTestSuite.create_events(default_event2, 1)
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected - different dests",
events=events1 + events2,
expected_alert=default_alert_aggregated
)
)

test_cases.append(
NegativeAlertTestCase(
description="Negative test with default negative event",
events=AlertTestSuite.create_events(default_negative_event, 1),
)
)

events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['category'] = 'bad'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect category",
events=events,
)
)

events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['tags'] = 'bad tag example'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect tags",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
)
)

events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['details']['destination'] = 'http://evil.com/evil.pdf'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with non blacklisted extension",
events=events,
)
)
21 changes: 21 additions & 0 deletions tests/mozdef_util/query_models/test_query_string_match.py
Expand Up @@ -9,6 +9,9 @@
hostname_test_regex = 'hostname: /(.*\.)*(groupa|groupb)\.(.*\.)*subdomain\.(.*\.)*.*/'
filename_matcher = 'summary: /.*\.(exe|sh)/'

# Note that this has potential for over-matching on foo.bar.baz.com, which needs further validation in alerts
ip_matcher = 'destination: /.*\..{1,3}\..{1,3}\..{1,3}(:.*|\/.*)/'


class TestQueryStringMatchPositiveTestSuite(PositiveTestSuite):
def query_tests(self):
Expand Down Expand Up @@ -42,6 +45,18 @@ def query_tests(self):
{'summary': 'test.exe'},
{'summary': 'test.sh'},
],

QueryStringMatch(ip_matcher): [
{'destination': 'http://1.2.3.4/somepath'},
{'destination': 'https://1.2.3.4/somepath'},
{'destination': '1.2.3.4/somepath'},
{'destination': '1.2.3.4/somepath'},
{'destination': '1.2.3.4:443'},
{'destination': '1.2.3.4:80'},
# Over-match examples (which need to be validated further in alerts)
{'destination': 'https://foo.bar.baz.com/somepath'},
{'destination': 'foo.bar.baz.com:80'},
]
}
return tests

Expand Down Expand Up @@ -83,5 +98,11 @@ def query_tests(self):
{'summary': '.exe.test'},
],

QueryStringMatch(ip_matcher): [
{'destination': 'https://foo.bar.mozilla.com/somepath'},
{'destination': 'foo.bar.mozilla.com:80'},
{'destination': 'http://example.com/somepath'},
{'destination': 'example.com:443'}
],
}
return tests