Skip to content
Browse files

Merge remote-tracking branch 'origin/infosec_workweek' into virtualen…

  • Loading branch information...
pwnbus committed Oct 24, 2018
2 parents 17a0750 + 1adc758 commit 663fd76ab2783b266cbaa27d0c32fe9e3f6b0b3f
Showing 422 changed files with 12,081 additions and 74,635 deletions.
42 .flake8
@@ -0,0 +1,42 @@
exclude =
ignore =
E114 # indentation is not a multiple of four (comment)
E116 # unexpected indentation (comment)
E121 # continuation line under-indented for hanging indent
E122 # continuation line missing indentation or outdented
E123 # closing bracket does not match indentation of opening bracket's line
E124 # closing bracket does not match visual indentation
E125 # continuation line with same indent as next logical line
E126 # continuation line over-indented for hanging indent
E127 # continuation line over-indented for visual indent
E128 # continuation line under-indented for visual indent
E129 # visually indented line with same indent as next logical line
E131 # continuation line unaligned for hanging indent
E222 # multiple spaces after operator
E225 # missing whitespace around operator
E226 # missing whitespace around arithmetic operator
E228 # missing whitespace around modulo operator
E231 # missing whitespace after ','
E241 # multiple spaces after ','
E261 # at least two spaces before inline comment
E265 # block comment should start with '# '
E266 # too many leading '#' for block comment
E301 # expected 1 blank line
E302 # expected 2 blank lines, found 1
E305 # expected 2 blank lines after class or function definition
E402 # module level import not at top of file
E501 # line too long
E711 # comparison to None should be 'if cond is not None
E712 # comparison to True should be 'if cond is True
E713 # test for membership should be 'not in'
E722 # do not use bare except'
F401 # library imported but unused
F601 # dictionary key 'tags' repeated with different values
F811 # redefinition of unused 'datetime' from line 10
F821 # undefined name 'SysLogHandler'
F841 # local variable 'CIDR' is assigned to but never used
W503 # line break before binary operator
@@ -1,5 +1,6 @@
@@ -12,3 +13,4 @@ alerts/generic_alerts
@@ -1,17 +1,19 @@
language: python
- '2.7.11'
- ES_VERSION=5.6.7; curl -O${ES_VERSION}.deb && sudo dpkg -i --force-confnew elasticsearch-${ES_VERSION}.deb && sudo service elasticsearch restart
- sudo ln -fs /usr/share/zoneinfo/UTC /etc/localtime
- sudo dpkg-reconfigure --frontend noninteractive tzdata
sudo: required
- rabbitmq
- docker
# Restrict push builds to only master
- master
# Fail immediately on any error
- set -e
- "pip install -r requirements.txt"
- "pip install -r tests/requirements_tests.txt"
- sleep 5
- export BOTO_CONFIG=/dev/null
# Build containers
# Choose nobuild if you prefer pulling existing images
- make build-tests
#- make nobuild-tests
- py.test --delete_indexes --delete_queues tests
- make test
# - make test-fast
187 Makefile
@@ -4,109 +4,84 @@
# Copyright (c) 2014 Mozilla Corporation

# usage:
# make single-build - build new single image from Dockerfile
# make single-build-no-cache - build new single image from Dockerfile from scratch
# make single-debug - debug run already created image by tag
# make single-run - run a single instance of MozDef
# make single-stop - stop a single instance of MozDef
# make single-rebuild - build, stop and run a new single instance of MozDef
# make multiple-build - build new mozdef environment in multiple containers
# make multiple-build-tests - build new mozdef environment for tests in multiple containers
# make multiple-build-no-cache - build new mozdef environment in multiple containers from scratch
# make multiple-run - run new mozdef environment in multiple containers
# make multiple-run-tests - run new mozdef environment for tests in multiple containers
# make multiple-stop - stop new mozdef environment in multiple containers
# make multiple-stop-tests - stop new mozdef environment for tests in multiple containers
# make multiple-rm - stop new mozdef environment in multiple containers and deattach volumes
# make multiple-rm-tests - stop new mozdef tests environment in multiple containers and deattach volumes
# make multiple-rebuild - build, stop and run new mozdef environment in multiple containers
# make multiple-rebuild-new - build, stop/rm and run new mozdef environment in multiple containers
# make multiple-rebuild-tests - build, stop/rm and run new mozdef environment for tests in multiple containers
# make multiple-rebuild-tests-new - build, stop/rm and run new mozdef environment for tests in multiple containers


docker build -f docker/Dockerfile -t $(NAME):$(VERSION) .

docker build -f docker/Dockerfile --no-cache -t $(NAME):$(VERSION) .

docker run \
-e TZ=UTC \
-p 80:80 \
-p 9090:9090 \
-p 8080:8080 \
-p 8081:8081 \
-p 9200:9200 \
-p 5672:5672 \
-v mozdef-elasticsearch:/var/lib/elasticsearch \
-v mozdef-mongodb:/var/lib/mongo \
-v mozdef-rabbitmq:/var/lib/rabbitmq \
-v mozdef-data:/opt/mozdef/envs/mozdef/data \
-h $(NAME) --name $(NAME) -d $(NAME):$(VERSION)

docker run \
-e TZ=UTC \
-p 80:80 \
-p 9090:9090 \
-p 8080:8080 \
-p 8081:8081 \
-p 3002:3002 \
-p 5672:5672 \
-p 15672:15672 \
-p 9200:9200 \
-v mozdef-elasticsearch:/var/lib/elasticsearch \
-v mozdef-mongodb:/var/lib/mongo \
-v mozdef-rabbitmq:/var/lib/rabbitmq \
-v mozdef-data:/opt/mozdef/envs/mozdef/data \
-h $(NAME) -t -i $(NAME):$(VERSION) /bin/bash

-docker rm -f $(NAME)

single-rebuild: single-build single-stop single-run

.PHONY: single-build single-build-no-cache single-run single-debug single-stop single-rebuild

docker-compose -f docker/compose/docker-compose.yml -p $(NAME) up -d

docker-compose -f docker/compose/docker-compose-tests.yml -p $(NAME) up -d --remove-orphans

docker-compose -f docker/compose/docker-compose.yml -p $(NAME) build

docker-compose -f docker/compose/docker-compose-tests.yml -p $(NAME) build

docker-compose -f docker/compose/docker-compose.yml -p $(NAME) build --no-cache

-docker-compose -f docker/compose/docker-compose.yml -p $(NAME) stop

-docker-compose -f docker/compose/docker-compose-tests.yml -p $(NAME) stop

-docker-compose -f docker/compose/docker-compose.yml -p $(NAME) down -v --remove-orphans

-docker-compose -f docker/compose/docker-compose-tests.yml -p $(NAME) down -v --remove-orphans

multiple-rebuild: multiple-build multiple-stop multiple-run

multiple-rebuild-new: multiple-build multiple-rm multiple-run

multiple-rebuild-tests: multiple-build-tests multiple-stop-tests multiple-run-tests

multiple-rebuild-tests-new: multiple-build-tests multiple-rm-tests multiple-run-tests

.PHONY: multiple-build multiple-run multiple-stop multiple-rebuild
ROOT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
DKR_IMAGES := mozdef_alertplugins mozdef_alerts mozdef_base mozdef_bootstrap mozdef_meteor mozdef_rest \
mozdef_mq_eventtask mozdef_loginput mozdef_cron mozdef_elasticsearch mozdef_mongodb \
mozdef_syslog mozdef_nginx mozdef_tester mozdef_rabbitmq mozdef_kibana
USE_DKR_IMAGES := docker/compose/docker-compose-rebuild.yml ## Pass docker/compose/docker-compose-norebuild.yml to use images
NAME := mozdef
VERSION := 0.1
NO_CACHE := ## Pass `--no-cache` in order to disable Docker cache
GITHASH := $(shell git rev-parse --short HEAD) ## Pass `latest` to tag docker hub images as latest instead

@echo 'Available make targets:'
@grep '^[^#[:space:]^\.PHONY.*].*:' Makefile

.PHONY: run run-only
run: build ## Run all MozDef containers
docker-compose -f $(USE_DKR_IMAGES) -f docker/compose/docker-compose.yml -p $(NAME) up -d

docker-compose -f $(USE_DKR_IMAGES) -f docker/compose/docker-compose.yml -p $(NAME) up -d

.PHONY: run-cloudy-mozdef restart-cloudy-mozdef
run-cloudy-mozdef: ## Run the MozDef containers necessary to run in AWS (`cloudy-mozdef`). This is used by the CloudFormation-initiated setup.
$(shell test -f docker/compose/cloudy_mozdef.env || touch docker/compose/cloudy_mozdef.env)
$(shell test -f docker/compose/cloudy_mozdef_kibana.env || touch docker/compose/cloudy_mozdef_kibana.env)
docker-compose -f docker/compose/docker-compose-cloudy-mozdef.yml -p $(NAME) pull
docker-compose -f docker/compose/docker-compose-cloudy-mozdef.yml -p $(NAME) up -d

docker-compose -f docker/compose/docker-compose-cloudy-mozdef.yml -p $(NAME) restart

# TODO? add custom test targets for individual tests (what used to be `multiple-tests` for example
# The docker files are still in docker/compose/docker*test*
.PHONY: test tests run-tests
test: build-tests run-tests ## Running tests from locally-built images
tests: build-tests run-tests

docker-compose -f $(USE_DKR_IMAGES) -f tests/docker-compose.yml -p $(NAME) up -d
@echo "Waiting for the instance to come up..."
sleep 10
@echo "Running flake8.."
docker run -it mozdef_tester bash -c "source /opt/mozdef/envs/python/bin/activate && flake8 --config .flake8 ./"
@echo "Running py.test..."
docker run -it --network=mozdef_default mozdef_tester bash -c "source /opt/mozdef/envs/python/bin/activate && py.test --delete_indexes --delete_queues tests"

.PHONY: build
build: ## Build local MozDef images (use make NO_CACHE=--no-cache build to disable caching)
docker-compose -f $(USE_DKR_IMAGES) -f docker/compose/docker-compose.yml -p $(NAME) $(NO_CACHE) build base
docker-compose -f $(USE_DKR_IMAGES) -f docker/compose/docker-compose.yml -p $(NAME) $(NO_CACHE) build

.PHONY: build-tests nobuild-tests
docker-compose -f $(USE_DKR_IMAGES) -f tests/docker-compose.yml -p $(NAME) $(NO_CACHE) build base
docker-compose -f $(USE_DKR_IMAGES) -f tests/docker-compose.yml -p $(NAME) $(NO_CACHE) build

.PHONY: stop down
stop: down
down: ## Shutdown all services we started with docker-compose
docker-compose -f $(USE_DKR_IMAGES) -f docker/compose/docker-compose.yml -p $(NAME) stop

.PHONY: docker-push docker-get hub hub-get
docker-push: hub
hub: ## Upload locally built MozDef images tagged as the current git head (
docker login
@echo "Tagging current docker images with git HEAD shorthash..."
$(foreach var,$(DKR_IMAGES),docker tag $(var) mozdef/$(var):$(GITHASH);)
@echo "Uploading images to docker..."
$(foreach var,$(DKR_IMAGES),docker push mozdef/$(var):$(GITHASH);)

docker-get: hub-get
hub-get: ## Download all pre-built images (
$(foreach var,$(DKR_IMAGES),docker pull mozdef/$(var):$(GITHASH);)

.PHONY: clean
clean: ## Cleanup all docker volumes and shutdown all related services
-docker-compose -f $(USE_DKR_IMAGES) -f docker/compose/docker-compose.yml -p $(NAME) down -v --remove-orphans
# Shorthands
.PHONY: rebuild
rebuild: clean build
@@ -12,7 +12,7 @@ The Mozilla Defense Platform (MozDef) seeks to automate the security incident ha
## Goals:

* Provide a platform for use by defenders to rapidly discover and respond to security incidents.
* Automate interfaces to other systems like bunker, banhammer, mig
* Automate interfaces to other systems like bunker, cymon, mig
* Provide metrics for security events and incidents
* Facilitate real-time collaboration amongst incident handlers
* Facilitate repeatable, predictable processes for incident handling
@@ -18,8 +18,7 @@
from lib.alert_plugin_set import AlertPluginSet
from lib.config import ALERT_PLUGINS

sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../lib'))
from utilities.logger import logger, initLogger
from mozdef_util.utilities.logger import logger, initLogger

class alertConsumer(ConsumerMixin):
@@ -0,0 +1,2 @@
commands = command1,command2
@@ -0,0 +1,48 @@
#!/usr/bin/env python

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at
# Copyright (c) 2017 Mozilla Corporation

from lib.alerttask import AlertTask
from mozdef_util.query_models import SearchQuery, TermMatch

class AlertAuditdCommands(AlertTask):
def main(self):
self.parse_config('auditd_commands.conf', ['commands'])
search_query = SearchQuery(minutes=30)

auditd_match = TermMatch('category', 'auditd')
auditd_match |= TermMatch('tags', 'audit')

command_names_matcher = None
for name in self.config.commands.split(","):
if command_names_matcher is None:
command_names_matcher = TermMatch('details.processname', name)
command_names_matcher |= TermMatch('details.processname', name)



def onEvent(self, event):
category = 'auditd'
tags = ['auditd_command']
severity = 'WARNING'

user = event['_source']['details']['originaluser']
host = event['_source']['hostname']
command = event['_source']['details']['processname']
summary = "{user} on {host} executed {command}".format(

return self.createAlertDict(summary, category, tags, [event], severity)
@@ -6,7 +6,7 @@
# Copyright (c) 2014 Mozilla Corporation

from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch

class AlertSFTPEvent(AlertTask):
@@ -34,9 +34,9 @@ def onEvent(self, event):
username = 'unknown'
directory = 'unknown'
x = event['_source']
if 'hostname' in x:
srchost = x['hostname']
if 'details' in x:
if 'hostname' in x['details']:
srchost = x['details']['hostname']
if 'originaluser' in x['details']:
username = x['details']['originaluser']
if 'cwd' in x['details']:
@@ -6,7 +6,7 @@
# Copyright (c) 2017 Mozilla Corporation

from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch, PhraseMatch, TermsMatch
from mozdef_util.query_models import SearchQuery, TermMatch, PhraseMatch, TermsMatch

class AlertBruteforceSsh(AlertTask):
@@ -18,7 +18,7 @@ def main(self):
PhraseMatch('summary', 'failed'),
TermMatch('details.program', 'sshd'),
TermsMatch('summary', ['login', 'invalid', 'ldap_count_entries'])
TermsMatch('summary', ['login', 'invalid', 'ldap_count_entries', 'publickey'])

for ip_address in self.config.skiphosts.split():
@@ -42,7 +42,7 @@ def onAggregation(self, aggreg):
severity = 'NOTICE'

summary = ('{0} ssh bruteforce attempts by {1}'.format(aggreg['count'], aggreg['value']))
hosts = self.mostCommon(aggreg['allevents'], '_source.details.hostname')
hosts = self.mostCommon(aggreg['allevents'], '_source.hostname')
for i in hosts[:5]:
summary += ' {0} ({1} hits)'.format(i[0], i[1])

Oops, something went wrong.

0 comments on commit 663fd76

Please sign in to comment.
You can’t perform that action at this time.