This repository has been archived by the owner on Nov 3, 2021. It is now read-only.
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #902 from mozilla/proxy_drop_exfil_domain
Create an alert on attempts to use known exfil domains
- Loading branch information
Showing
3 changed files
with
179 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
[options] | ||
exfil_domains = pastebin.com,www.sendspace.com |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
#!/usr/bin/env python | ||
|
||
# This Source Code Form is subject to the terms of the Mozilla Public | ||
# License, v. 2.0. If a copy of the MPL was not distributed with this | ||
# file, You can obtain one at http://mozilla.org/MPL/2.0/. | ||
# Copyright (c) 2014 Mozilla Corporation | ||
|
||
|
||
from lib.alerttask import AlertTask | ||
from mozdef_util.query_models import SearchQuery, TermMatch, QueryStringMatch, ExistsMatch, PhraseMatch, WildcardMatch | ||
|
||
|
||
class AlertProxyDropExfilDomains(AlertTask): | ||
def main(self): | ||
self.parse_config('proxy_drop_exfil_domains.conf', ['exfil_domains']) | ||
|
||
search_query = SearchQuery(minutes=20) | ||
|
||
search_query.add_must([ | ||
TermMatch('category', 'squid'), | ||
TermMatch('tags', 'squid'), | ||
TermMatch('details.proxyaction', "TCP_DENIED/-") | ||
]) | ||
|
||
# Only notify on certain domains listed in the config | ||
domain_regex = "/({0}).*/".format( | ||
self.config.exfil_domains.replace(',', '|')) | ||
search_query.add_must([ | ||
QueryStringMatch('details.destination: {}'.format(domain_regex)) | ||
]) | ||
|
||
self.filtersManual(search_query) | ||
|
||
# Search aggregations on field 'hostname', keep X samples of | ||
# events at most | ||
self.searchEventsAggregated('details.sourceipaddress', samplesLimit=10) | ||
# alert when >= X matching events in an aggregation | ||
# I think it makes sense to alert every time here | ||
self.walkAggregations(threshold=1) | ||
|
||
# Set alert properties | ||
def onAggregation(self, aggreg): | ||
# aggreg['count']: number of items in the aggregation, ex: number of failed login attempts | ||
# aggreg['value']: value of the aggregation field, ex: toto@example.com | ||
# aggreg['events']: list of events in the aggregation | ||
category = 'squid' | ||
tags = ['squid', 'proxy'] | ||
severity = 'WARNING' | ||
|
||
exfil_domains = set() | ||
for event in aggreg['allevents']: | ||
domain = event['_source']['details']['destination'].split(':') | ||
exfil_domains.add(domain[0]) | ||
|
||
summary = 'Suspicious Proxy DROP event(s) detected from {0} to the following exfil domain(s): {1}'.format( | ||
aggreg['value'], | ||
",".join(sorted(exfil_domains)) | ||
) | ||
|
||
# Create the alert object based on these properties | ||
return self.createAlertDict(summary, category, tags, aggreg['events'], severity) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,116 @@ | ||
# This Source Code Form is subject to the terms of the Mozilla Public | ||
# License, v. 2.0. If a copy of the MPL was not distributed with this | ||
# file, You can obtain one at http://mozilla.org/MPL/2.0/. | ||
# Copyright (c) 2017 Mozilla Corporation | ||
from positive_alert_test_case import PositiveAlertTestCase | ||
from negative_alert_test_case import NegativeAlertTestCase | ||
from alert_test_suite import AlertTestSuite | ||
|
||
|
||
class TestProxyDropExfilDomains(AlertTestSuite): | ||
alert_filename = "proxy_drop_exfil_domains" | ||
alert_classname = "AlertProxyDropExfilDomains" | ||
|
||
# This event is the default positive event that will cause the | ||
# alert to trigger | ||
default_event = { | ||
"_type": "event", | ||
"_source": { | ||
"category": "squid", | ||
"tags": ["squid"], | ||
"details": { | ||
"sourceipaddress": "1.2.3.4", | ||
"destination": "pastebin.com", | ||
"proxyaction": "TCP_DENIED/-", | ||
} | ||
} | ||
} | ||
|
||
# This event is an alternate destination that we'd want to aggregate | ||
default_event2 = AlertTestSuite.copy(default_event) | ||
default_event2["_source"]["details"]["destination"] = "www.sendspace.com" | ||
|
||
# This event is the default negative event that will not cause the | ||
# alert to trigger | ||
default_negative_event = AlertTestSuite.copy(default_event) | ||
default_negative_event["_source"]["details"]["destination"] = "foo.mozilla.com" | ||
|
||
# This alert is the expected result from running this task | ||
default_alert = { | ||
"category": "squid", | ||
"tags": ['squid', 'proxy'], | ||
"severity": "WARNING", | ||
"summary": 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com', | ||
} | ||
|
||
# This alert is the expected result from this task against multiple matching events | ||
default_alert_aggregated = AlertTestSuite.copy(default_alert) | ||
default_alert_aggregated[ | ||
"summary"] = 'Suspicious Proxy DROP event(s) detected from 1.2.3.4 to the following exfil domain(s): pastebin.com,www.sendspace.com' | ||
|
||
test_cases = [] | ||
|
||
test_cases.append( | ||
PositiveAlertTestCase( | ||
description="Positive test with default events and default alert expected", | ||
events=AlertTestSuite.create_events(default_event, 1), | ||
expected_alert=default_alert | ||
) | ||
) | ||
|
||
test_cases.append( | ||
PositiveAlertTestCase( | ||
description="Positive test with default events and default alert expected - dedup", | ||
events=AlertTestSuite.create_events(default_event, 2), | ||
expected_alert=default_alert | ||
) | ||
) | ||
|
||
events1 = AlertTestSuite.create_events(default_event, 1) | ||
events2 = AlertTestSuite.create_events(default_event2, 1) | ||
test_cases.append( | ||
PositiveAlertTestCase( | ||
description="Positive test with default events and default alert expected - different destinations", | ||
events=events1 + events2, | ||
expected_alert=default_alert_aggregated | ||
) | ||
) | ||
|
||
test_cases.append( | ||
NegativeAlertTestCase( | ||
description="Negative test with default negative event", | ||
events=AlertTestSuite.create_events(default_negative_event, 1), | ||
) | ||
) | ||
|
||
events = AlertTestSuite.create_events(default_event, 10) | ||
for event in events: | ||
event['_source']['category'] = 'bad' | ||
test_cases.append( | ||
NegativeAlertTestCase( | ||
description="Negative test case with events with incorrect category", | ||
events=events, | ||
) | ||
) | ||
|
||
events = AlertTestSuite.create_events(default_event, 10) | ||
for event in events: | ||
event['_source']['tags'] = 'bad tag example' | ||
test_cases.append( | ||
NegativeAlertTestCase( | ||
description="Negative test case with events with incorrect tags", | ||
events=events, | ||
) | ||
) | ||
events = AlertTestSuite.create_events(default_event, 10) | ||
for event in events: | ||
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({ | ||
'minutes': 241}) | ||
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({ | ||
'minutes': 241}) | ||
test_cases.append( | ||
NegativeAlertTestCase( | ||
description="Negative test case with old timestamp", | ||
events=events, | ||
) | ||
) |