Skip to content

Commit

Permalink
Merge 6e697a7 into f0cc08d
Browse files Browse the repository at this point in the history
  • Loading branch information
jacknagz committed Aug 16, 2017
2 parents f0cc08d + 6e697a7 commit 2f28475
Show file tree
Hide file tree
Showing 23 changed files with 450 additions and 229 deletions.
49 changes: 33 additions & 16 deletions conf/logs.json
Expand Up @@ -691,7 +691,10 @@
"cloudtrail:events": {
"schema": {
"additionalEventData": {},
"apiVersion": "string",
"awsRegion": "string",
"errorCode": "string",
"errorMessage": "string",
"eventID": "string",
"eventName": "string",
"eventSource": "string",
Expand All @@ -704,28 +707,42 @@
"requestParameters": {},
"resources": [],
"responseElements": {},
"sharedEventID": "string",
"sourceIPAddress": "string",
"userAgent": "string",
"userIdentity": {}
},
"parser": "json"
"parser": "json",
"configuration": {
"json_path": "Records[*]",
"optional_top_level_keys": [
"additionalEventData",
"apiVersion",
"errorCode",
"errorMessage",
"readOnly",
"resources",
"sharedEventID"
]
}
},
"cloudtrail:api_events": {
"cloudtrail:digest": {
"schema": {
"awsRegion": "string",
"eventID": "string",
"eventName": "string",
"eventSource": "string",
"eventTime": "string",
"eventType": "string",
"eventVersion": "string",
"recipientAccountId": "string",
"requestID": "string",
"requestParameters": {},
"responseElements": {},
"sourceIPAddress": "string",
"userAgent": "string",
"userIdentity": {}
"awsAccountId": "string",
"digestStartTime": "string",
"digestEndTime": "string",
"digestS3Bucket": "string",
"digestS3Object": "string",
"digestPublicKeyFingerprint": "string",
"digestSignatureAlgorithm": "string",
"newestEventTime": "string",
"oldestEventTime": "string",
"previousDigestS3Bucket": "string",
"previousDigestS3Object": "string",
"previousDigestHashValue": "string",
"previousDigestHashAlgorithm": "string",
"previousDigestSignature": "string",
"logFiles": []
},
"parser": "json"
},
Expand Down
10 changes: 5 additions & 5 deletions conf/outputs.json
@@ -1,17 +1,17 @@
{
"aws-s3": {
"sample_bucket": "sample.bucket.name"
"sample-bucket": "sample.bucket.name"
},
"aws-lambda": {
"sample_lambda": "function-name:qualifier"
"sample-lambda": "function-name:qualifier"
},
"pagerduty": [
"sample_integration"
"sample-integration"
],
"phantom": [
"sample_integration"
"sample-integration"
],
"slack": [
"sample_channel"
"sample-channel"
]
}
8 changes: 2 additions & 6 deletions conf/sources.json
Expand Up @@ -2,17 +2,13 @@
"kinesis": {
"prefix_cluster1_stream_alert_kinesis": {
"logs": [
"osquery",
"cloudwatch",
"cloudtrail"
"cloudwatch"
]
}
},
"s3": {
"example_s3_bucket": {
"prefix.cluster.sample.bucket": {
"logs": [
"carbonblack",
"osquery",
"cloudtrail"
]
}
Expand Down
Expand Up @@ -5,12 +5,12 @@
disable = StreamRules.disable()


@rule(logs=['cloudtrail:api_events'],
@rule(logs=['cloudtrail:events'],
matchers=[],
outputs=['aws-s3:sample_bucket', 'aws-lambda:sample_lambda',
'pagerduty:sample_integration', 'phantom:sample_integration',
'slack:sample_channel'])
def cloudtrail_critical_api(rec):
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'])
def cloudtrail_critical_api_calls(rec):
"""
author: airbnb_csirt
description: Alert on AWS API calls that stop or delete security/infrastructure logs.
Expand Down
7 changes: 3 additions & 4 deletions rules/community/cloudtrail/cloudtrail_put_bucket_acl.py
@@ -1,14 +1,13 @@
from stream_alert.rule_processor.rules_engine import StreamRules

rule = StreamRules.rule
disable = StreamRules.disable()


@rule(logs=['cloudwatch:events'],
matchers=[],
outputs=['aws-s3:sample_bucket', 'aws-lambda:sample_lambda',
'pagerduty:sample_integration', 'phantom:sample_integration',
'slack:sample_channel'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'detail': ['requestParameters', 'eventName']})
def cloudtrail_put_bucket_acl(rec):
"""
Expand Down
8 changes: 4 additions & 4 deletions rules/community/cloudtrail/cloudtrail_put_object_acl.py
@@ -1,13 +1,13 @@
from stream_alert.rule_processor.rules_engine import StreamRules

rule = StreamRules.rule
disable = StreamRules.disable()


@rule(logs=['cloudtrail:events'],
matchers=[],
outputs=['aws-s3:sample_bucket', 'aws-lambda:sample_lambda',
'pagerduty:sample_integration', 'phantom:sample_integration',
'slack:sample_channel'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'requestParameters': ['accessControlList']})
def cloudtrail_put_object_acl(rec):
"""
Expand Down
@@ -1,16 +1,15 @@
from stream_alert.rule_processor.rules_engine import StreamRules

rule = StreamRules.rule
disable = StreamRules.disable()


@rule(logs=['cloudwatch:events'],
matchers=[],
outputs=['aws-s3:sample_bucket', 'aws-lambda:sample_lambda',
'pagerduty:sample_integration', 'phantom:sample_integration',
'slack:sample_channel'],
outputs=['aws-s3:sample-bucket',
'pagerduty:sample-integration',
'slack:sample-channel'],
req_subkeys={'detail': ['userIdentity', 'eventType']})
def cloudtrail_root_account(rec):
def cloudtrail_root_account_usage(rec):
"""
author: airbnb_csirt
description: Root AWS credentials are being used;
Expand All @@ -20,8 +19,7 @@ def cloudtrail_root_account(rec):
playbook: (a) identify who is using the Root account
(b) ping the individual to determine if intentional and/or legitimate
"""

# see reference_1 for details
# reference_1 contains details on logic below
return (
rec['detail']['userIdentity']['type'] == 'Root' and
rec['detail']['userIdentity'].get('invokedBy') is None and
Expand Down
8 changes: 3 additions & 5 deletions stream_alert/alert_processor/outputs.py
Expand Up @@ -36,8 +36,6 @@
# {cls.__service__: <cls>}
STREAM_OUTPUTS = {}

DEFAULT_RULE_DESCRIPTION = 'No rule description provided'

def output(cls):
"""Class decorator to register all stream outputs"""
STREAM_OUTPUTS[cls.__service__] = cls
Expand Down Expand Up @@ -105,7 +103,7 @@ def dispatch(self, **kwargs):
return self._log_status(False)

message = 'StreamAlert Rule Triggered - {}'.format(kwargs['rule_name'])
rule_desc = kwargs['alert']['rule_description'] or DEFAULT_RULE_DESCRIPTION
rule_desc = kwargs['alert']['rule_description']
details = {
'rule_description': rule_desc,
'record': kwargs['alert']['record']
Expand Down Expand Up @@ -213,7 +211,7 @@ def dispatch(self, **kwargs):
return self._log_status(False)

headers = {"ph-auth-token": creds['ph_auth_token']}
rule_desc = kwargs['alert']['rule_description'] or DEFAULT_RULE_DESCRIPTION
rule_desc = kwargs['alert']['rule_description']
container_id = self._setup_container(kwargs['rule_name'], rule_desc,
creds['url'], headers)

Expand Down Expand Up @@ -320,7 +318,7 @@ def _format_message(cls, rule_name, alert):
rule_desc = ''
# Only print the rule description on the first attachment
if index == 0:
rule_desc = alert['rule_description'] or DEFAULT_RULE_DESCRIPTION
rule_desc = alert['rule_description']
rule_desc = '*Rule Description:*\n{}\n'.format(rule_desc)

# Add this attachemnt to the full message array of attachments
Expand Down
2 changes: 1 addition & 1 deletion stream_alert/athena_partition_refresh/main.py
Expand Up @@ -23,7 +23,7 @@
import backoff
import boto3

logging.basicConfig()
logging.basicConfig(format='%(name)s [%(levelname)s]: [%(module)s.%(funcName)s] %(message)s')
LEVEL = os.environ.get('LOGGER_LEVEL', 'INFO')
LOGGER = logging.getLogger('StreamAlertAthena')
LOGGER.setLevel(LEVEL.upper())
Expand Down
3 changes: 2 additions & 1 deletion stream_alert/rule_processor/__init__.py
Expand Up @@ -8,7 +8,8 @@
if LEVEL.isdigit():
LEVEL = int(LEVEL)

logging.basicConfig()
logging.basicConfig(format='%(name)s [%(levelname)s]: [%(module)s.%(funcName)s] %(message)s')

LOGGER = logging.getLogger('StreamAlert')
try:
LOGGER.setLevel(LEVEL)
Expand Down
20 changes: 14 additions & 6 deletions stream_alert/rule_processor/classifier.py
Expand Up @@ -13,6 +13,7 @@
See the License for the specific language governing permissions and
limitations under the License.
'''
import json
from collections import namedtuple, OrderedDict

from stream_alert.rule_processor import LOGGER
Expand Down Expand Up @@ -166,13 +167,14 @@ def _check_schema_match(schema_matches):
matches = []
for i, schema_match in enumerate(schema_matches):
log_patterns = schema_match.parser.options.get('log_patterns', {})
LOGGER.debug('Log patterns: %s', log_patterns)
if (all(schema_match.parser.matched_log_pattern(data, log_patterns)
for data in schema_match.parsed_data)):
matches.append(schema_matches[i])
else:
LOGGER.debug(
'Log pattern matching failed for schema: %s',
schema_match.root_schema)
'Log pattern matching failed for:\n%s',
json.dumps(schema_match.parsed_data, indent=2))

if matches:
if len(matches) > 1:
Expand Down Expand Up @@ -217,12 +219,15 @@ def _process_log_schemas(self, payload):
parser = parser_class(options)

# Get a list of parsed records
LOGGER.debug('Trying schema: %s', log_name)
parsed_data = parser.parse(schema, payload.pre_parsed_record)

LOGGER.debug('Schema: %s', schema)
if not parsed_data:
continue

LOGGER.debug('Parsed %d records with schema %s',
len(parsed_data), log_name)

if SUPPORT_MULTIPLE_SCHEMA_MATCHING:
schema_matches.append(schema_match(log_name, schema, parser, parsed_data))
continue
Expand Down Expand Up @@ -252,10 +257,13 @@ def _parse(self, payload):
if not schema_matches:
return False

LOGGER.debug('Schema Matched Records:\n%s', json.dumps(
[schema_match.parsed_data for schema_match in schema_matches], indent=2))

schema_match = self._check_schema_match(schema_matches)

LOGGER.debug('Log name: %s', schema_match.log_name)
LOGGER.debug('Parsed data: %s', schema_match.parsed_data)
LOGGER.debug('Parsed data:\n%s', json.dumps(schema_match.parsed_data, indent=2))

for parsed_data_value in schema_match.parsed_data:
# Convert data types per the schema
Expand Down Expand Up @@ -288,7 +296,6 @@ def _convert_type(cls, payload, schema):
Returns:
parsed dict payload with typed values
"""
# check for list types here
for key, value in schema.iteritems():
key = str(key)
# if the schema value is declared as string
Expand Down Expand Up @@ -321,7 +328,8 @@ def _convert_type(cls, payload, schema):

# Skip the values for the 'streamalert:envelope_keys' key that we've
# added during parsing if the do not conform to being a dict
if key == 'streamalert:envelope_keys' and not isinstance(payload[key], dict):
if key == 'streamalert:envelope_keys' and not isinstance(
payload[key], dict):
continue

cls._convert_type(payload[key], schema[key])
Expand Down
3 changes: 2 additions & 1 deletion stream_alert/rule_processor/handler.py
Expand Up @@ -155,7 +155,8 @@ def _process_alerts(self, payload):
self._failed_record_count += 1
continue

LOGGER.debug('Payload: %s', record)
LOGGER.debug('Classified and Parsed Payload: <Valid: %s, Log Source: %s, Entity: %s>',
record.valid, record.log_source, record.entity)

record_alerts = StreamRules.process(record)

Expand Down

0 comments on commit 2f28475

Please sign in to comment.