Skip to content
Permalink
Browse files

Merge remote-tracking branch 'origin/master' into testing_kibanaurl

  • Loading branch information...
pwnbus committed May 14, 2019
2 parents 5f9ccef + 9d33494 commit 48ec5f26579410be0d788d9555925d316d129b3d
Showing with 1,396 additions and 661 deletions.
  1. +5 −6 CODEOWNERS
  2. +2 −2 Makefile
  3. +38 −33 alerts/celeryconfig.py
  4. +37 −0 alerts/cloudtrail_excessive_describe.py
  5. +1 −0 alerts/get_watchlist.conf
  6. +10 −14 alerts/get_watchlist.py
  7. +153 −107 alerts/lib/alerttask.py
  8. +1 −1 cloudy_mozdef/ci/docker_tag_or_push
  9. +23 −0 cloudy_mozdef/cloudformation/base-iam.yml
  10. +178 −20 cloudy_mozdef/cloudformation/mozdef-instance.yml
  11. +43 −10 cloudy_mozdef/cloudformation/mozdef-parent.yml
  12. +7 −2 cloudy_mozdef/cloudformation/mozdef-security-group.yml
  13. +33 −0 cloudy_mozdef/cloudformation/mozdef-sqs.yml
  14. +2 −0 cloudy_mozdef/lambda_layer/Makefile
  15. +85 −15 docker/compose/docker-compose-cloudy-mozdef.yml
  16. +8 −1 docker/compose/docker-compose.yml
  17. +1 −0 docker/compose/mozdef_alerts/Dockerfile
  18. +1 −0 docker/compose/mozdef_alerts/files/config.py
  19. +5 −0 docker/compose/mozdef_alerts/files/get_watchlist.conf
  20. +7 −0 docker/compose/mozdef_base/Dockerfile
  21. +5 −0 docker/compose/mozdef_cognito_proxy/Dockerfile
  22. +4 −0 docker/compose/mozdef_cognito_proxy/README.md
  23. +104 −0 docker/compose/mozdef_cognito_proxy/files/default.conf
  24. +1 −0 docker/compose/mozdef_cognito_proxy/files/htpasswd.example
  25. +16 −0 docker/compose/mozdef_cognito_proxy/files/nginx.conf
  26. +1 −2 meteor/client/menu.html
  27. +3 −2 meteor/client/mozdef.html
  28. +2 −2 meteor/client/mozdef.js
  29. +15 −4 meteor/imports/themes/classic/mozdef.css
  30. +15 −5 meteor/imports/themes/dark/mozdef.css
  31. +21 −4 meteor/imports/themes/light/mozdef.css
  32. +477 −429 meteor/package-lock.json
  33. +1 −1 meteor/package.json
  34. BIN meteor/public/images/moz_defense-platform_01.png
  35. +2 −0 requirements.txt
  36. +6 −1 tests/alerts/alert_test_suite.py
  37. +83 −0 tests/alerts/test_cloudtrail_excessive_describe.py
@@ -4,10 +4,9 @@
* @pwnbus @mpurzynski @Phrozyn @tristanweir

# Allow review by Gene or Andrew for cloudy MozDef code
/cloudy_mozdef/ @gene1wood @andrewkrug
/cloudy_mozdef/ @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug

# Anyone in EIS can review documentation
# https://github.com/orgs/mozilla/teams/enterprise-information-security/members
/README.md @mozilla/enterprise-information-security
/CHANGELOG @mozilla/enterprise-information-security
/docs/ @mozilla/enterprise-information-security
# Entire set can review certain documentation files
/README.md @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug
/CHANGELOG @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug
/docs/ @pwnbus @mpurzynski @Phrozyn @tristanweir @gene1wood @andrewkrug
@@ -7,7 +7,7 @@
ROOT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
DKR_IMAGES := mozdef_alertactions mozdef_alerts mozdef_base mozdef_bootstrap mozdef_meteor mozdef_rest \
mozdef_mq_worker mozdef_loginput mozdef_cron mozdef_elasticsearch mozdef_mongodb \
mozdef_syslog mozdef_nginx mozdef_tester mozdef_rabbitmq mozdef_kibana
mozdef_syslog mozdef_nginx mozdef_tester mozdef_rabbitmq mozdef_kibana mozdef_cognito_proxy
BUILD_MODE := build ## Pass `pull` in order to pull images instead of building them
NAME := mozdef
VERSION := 0.1
@@ -65,7 +65,7 @@ build: build-from-cwd

.PHONY: build-from-cwd
build-from-cwd: ## Build local MozDef images (use make NO_CACHE=--no-cache build to disable caching)
docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(NO_CACHE) $(BUILD_MODE)
docker-compose -f docker/compose/docker-compose.yml -p $(NAME) $(NO_CACHE) $(BUILD_MODE) --parallel

.PHONY: build-from-github
build-from-github: ## Build local MozDef images from the github branch (use make NO_CACHE=--no-cache build to disable caching).
@@ -1,3 +1,4 @@
import os
from celery import Celery
from importlib import import_module
from lib.config import ALERTS, LOGGING, RABBITMQ
@@ -6,42 +7,47 @@
# Alert files to include
alerts_include = []
for alert in ALERTS.keys():
alerts_include.append('.'.join((alert).split('.')[:-1]))
alerts_include.append(".".join((alert).split(".")[:-1]))
alerts_include = list(set(alerts_include))

BROKER_URL = 'amqp://{0}:{1}@{2}:{3}//'.format(
RABBITMQ['mquser'],
RABBITMQ['mqpassword'],
RABBITMQ['mqserver'],
RABBITMQ['mqport']
)
# XXX TBD this should get wrapped into an object that provides pyconfig
if os.getenv("OPTIONS_MQPROTOCOL", "amqp") == "sqs":
BROKER_URL = "sqs://@"
BROKER_TRANSPORT_OPTIONS = {'region': os.getenv('OPTIONS_ALERTSQSQUEUEURL').split('.')[1]}
CELERY_RESULT_BACKEND = None
alert_queue_name = os.getenv('OPTIONS_ALERTSQSQUEUEURL').split('/')[4]
CELERY_DEFAULT_QUEUE = alert_queue_name
CELERY_QUEUES = {
alert_queue_name: {"exchange": alert_queue_name, "binding_key": alert_queue_name}
}
else:
BROKER_URL = "amqp://{0}:{1}@{2}:{3}//".format(
RABBITMQ["mquser"], RABBITMQ["mqpassword"], RABBITMQ["mqserver"], RABBITMQ["mqport"]
)
CELERY_QUEUES = {
"celery-default": {"exchange": "celery-default", "binding_key": "celery-default"}
}
CELERY_DEFAULT_QUEUE = 'celery-default'

CELERY_DISABLE_RATE_LIMITS = True
CELERYD_CONCURRENCY = 1
CELERY_IGNORE_RESULT = True
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_DEFAULT_QUEUE = 'celery-default'
CELERY_QUEUES = {
'celery-default': {
"exchange": "celery-default",
"binding_key": "celery-default",
},
}

CELERY_ACCEPT_CONTENT = ["json"]
CELERY_TASK_SERIALIZER = "json"
CELERYBEAT_SCHEDULE = {}

# Register frequency of the tasks in the scheduler
for alert in ALERTS.keys():
CELERYBEAT_SCHEDULE[alert] = {
'task': alert,
'schedule': ALERTS[alert]['schedule'],
'options': {'queue': 'celery-default', "exchange": "celery-default"},
"task": alert,
"schedule": ALERTS[alert]["schedule"],
"options": {"queue": CELERY_DEFAULT_QUEUE, "exchange": CELERY_DEFAULT_QUEUE},
}
# add optional parameters:
if 'args' in ALERTS[alert]:
CELERYBEAT_SCHEDULE[alert]['args']=ALERTS[alert]['args']
if 'kwargs' in ALERTS[alert]:
CELERYBEAT_SCHEDULE[alert]['kwargs']=ALERTS[alert]['kwargs']
if "args" in ALERTS[alert]:
CELERYBEAT_SCHEDULE[alert]["args"] = ALERTS[alert]["args"]
if "kwargs" in ALERTS[alert]:
CELERYBEAT_SCHEDULE[alert]["kwargs"] = ALERTS[alert]["kwargs"]

# Load logging config
dictConfig(LOGGING)
@@ -52,27 +58,26 @@
# app.conf.update(
# CELERY_TASK_RESULT_EXPIRES=3600,
# )
app = Celery('alerts',
include=alerts_include)
app.config_from_object('celeryconfig', force=True)
app = Celery("alerts", include=alerts_include)
app.config_from_object("celeryconfig", force=True)

# As a result of celery 3 to celery 4, we need to dynamically
# register all of the alert tasks specifically
for alert_namespace in CELERYBEAT_SCHEDULE:
try:
alert_tokens = alert_namespace.split('.')
alert_tokens = alert_namespace.split(".")
alert_module_name = alert_tokens[0]
alert_classname = alert_tokens[1]
alert_module = import_module(alert_module_name)
alert_class = getattr(alert_module, alert_classname)
app.register_task(alert_class())
except ImportError as e:
print "Error importing {}".format(alert_namespace)
print e
print("Error importing {}").format(alert_namespace)
print(e)
pass
except Exception as e:
print "Error addding alert"
print e
print("Error addding alert")
print(e)

if __name__ == '__main__':
if __name__ == "__main__":
app.start()
@@ -0,0 +1,37 @@
#!/usr/bin/env python

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation


from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch, ExistsMatch


class AlertCloudtrailExcessiveDescribe(AlertTask):
def main(self):
# Create a query to look back the last 20 minutes
search_query = SearchQuery(minutes=20)

# Add search terms to our query
search_query.add_must([
TermMatch('source', 'cloudtrail'),
TermMatch('details.eventverb', 'Describe'),
ExistsMatch('details.source')
])

self.filtersManual(search_query)
# We aggregate on details.hostname which is the AWS service name
self.searchEventsAggregated('details.source', samplesLimit=2)
self.walkAggregations(threshold=50)

def onAggregation(self, aggreg):
category = 'access'
tags = ['cloudtrail']
severity = 'WARNING'
summary = "Excessive Describe calls on {0} ({1})".format(aggreg['value'], aggreg['count'])

# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, aggreg['events'], severity)
@@ -2,3 +2,4 @@
# set the following to your protected endpoint api_url
api_url = http://localhost:8081/getwatchlist
jwt_secret = secret
use_auth = false
@@ -17,27 +17,23 @@

class AlertWatchList(AlertTask):
def main(self):
self.parse_config('get_watchlist.conf', ['api_url', 'jwt_secret'])
self.parse_config('get_watchlist.conf', ['api_url', 'jwt_secret', 'use_auth'])

jwt_token = JWTAuth(self.config.jwt_secret)
jwt_token.set_header_format('Bearer %s')
jwt_token = None
if self.config.use_auth.lower() != 'false':
jwt_token = JWTAuth(self.config.jwt_secret)
jwt_token.set_header_format('Bearer %s')

# Connect to rest api and grab response
r = requests.get(self.config.api_url, auth=jwt_token)
status = r.status_code
index = 0
if status == 200:
status = r.status_code
# Connect to rest api and grab response
if r.ok:
response = r.text
terms_list = json.loads(response)
while index < len(terms_list):
term = terms_list[index]
term = '"{}"'.format(term)
for term in terms_list:
self.watchterm = term
index += 1
self.process_alert(term)
self.process_alert()
else:
logger.error('The watchlist request failed. Status {0}.\n'.format(status))
logger.error('The watchlist request failed. Status {0}.\n'.format(r))

def process_alert(self, term):
search_query = SearchQuery(minutes=20)
Oops, something went wrong.

0 comments on commit 48ec5f2

Please sign in to comment.
You can’t perform that action at this time.