Skip to content

Commit

Permalink
Merge pull request #1 from hubblestack/develop
Browse files Browse the repository at this point in the history
Sync with head
  • Loading branch information
jettero committed Aug 18, 2017
2 parents f10735b + 23bab90 commit e790716
Show file tree
Hide file tree
Showing 54 changed files with 2,101 additions and 63 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ cd hubble/pkg
Package will be in the `hubble/pkg/dist/` directory. There's no guarantee of
glibc compatibility.

## Using released packages

Various pre-built packages targeting several popular operating systems can be found under [Releases](/hubblestack/hubble/releases).

# Usage

A config template has been placed in `/etc/hubble/hubble`. Modify it to your
Expand Down
6 changes: 6 additions & 0 deletions conf/hubble
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,12 @@

gitfs_remotes:
- https://github.com/hubblestack/hubblestack_data.git
## To use locally stored hubblestack_data, comment out gitfs_remote section,
## then uncomment the following
# file_roots:
# base:
# - /srv/salt/hubblestack_data
## In the above example, hubblestack_data is cloned into /srv/salt/.
fileserver_backend:
- roots
- git
Expand Down
2 changes: 1 addition & 1 deletion hubblestack/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.1.7'
__version__ = '2.2.1'
48 changes: 46 additions & 2 deletions hubblestack/daemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,14 +308,58 @@ def load_config():
elif __opts__['verbose'] >= 3:
__opts__['log_level'] = 'debug'

# Setup module dirs
# Setup module/grain/returner dirs
module_dirs = __opts__.get('module_dirs', [])
module_dirs.append(os.path.join(os.path.dirname(__file__), 'extmods'))
module_dirs.append(os.path.join(os.path.dirname(__file__), 'extmods', 'modules'))
__opts__['module_dirs'] = module_dirs
grains_dirs = __opts__.get('grains_dirs', [])
grains_dirs.append(os.path.join(os.path.dirname(__file__), 'extmods', 'grains'))
__opts__['grains_dirs'] = grains_dirs
returner_dirs = __opts__.get('returner_dirs', [])
returner_dirs.append(os.path.join(os.path.dirname(__file__), 'extmods', 'returners'))
__opts__['returner_dirs'] = returner_dirs
fileserver_dirs = __opts__.get('fileserver_dirs', [])
fileserver_dirs.append(os.path.join(os.path.dirname(__file__), 'extmods', 'fileserver'))
__opts__['fileserver_dirs'] = fileserver_dirs
__opts__['file_roots']['base'].insert(0, os.path.join(os.path.dirname(__file__), 'files'))
if 'roots' not in __opts__['fileserver_backend']:
__opts__['fileserver_backend'].append('roots')

# Disable all of salt's boto modules, they give nothing but trouble to the loader
disable_modules = __opts__.get('disable_modules', [])
disable_modules.extend([
'boto3_elasticache',
'boto3_route53',
'boto_apigateway',
'boto_asg',
'boto_cfn',
'boto_cloudtrail',
'boto_cloudwatch_event',
'boto_cloudwatch',
'boto_cognitoidentity',
'boto_datapipeline',
'boto_dynamodb',
'boto_ec2',
'boto_efs',
'boto_elasticache',
'boto_elasticsearch_domain',
'boto_elb',
'boto_elbv2',
'boto_iam',
'boto_iot',
'boto_kinesis',
'boto_kms',
'boto_lambda',
'boto_rds',
'boto_route53',
'boto_s3_bucket',
'boto_secgroup',
'boto_sns',
'boto_sqs',
'boto_vpc',
])
__opts__['disable_modules'] = disable_modules

# Setup logging
salt.log.setup.setup_console_logger(__opts__['log_level'])
salt.log.setup.setup_logfile_logger(__opts__['log_file'],
Expand Down
Empty file.
6 changes: 4 additions & 2 deletions hubblestack/extmods/grains/osqueryinfo.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-

import salt.utils
import salt.modules.cmdmod

__salt__ = { 'cmd.run': salt.modules.cmdmod._run_quiet }

Expand All @@ -14,13 +15,14 @@ def osquerygrain():
grains = {}
option = '--version'

osqueryipaths = ('osqueryi', '/usr/bin/osqueryi', '/opt/osquery/osqueryi')
# Prefer our /opt/osquery/osqueryi if present
osqueryipaths = ('/opt/osquery/osqueryi', 'osqueryi', '/usr/bin/osqueryi')
for path in osqueryipaths:
if salt.utils.which(path):
for item in __salt__['cmd.run']('{0} {1}'.format(path, option)).split():
if item[:1].isdigit():
grains['osqueryversion'] = item
grains['osquerybinpath'] = path
grains['osquerybinpath'] = salt.utils.which(path)
break
break
return grains
26 changes: 24 additions & 2 deletions hubblestack/extmods/modules/nebula_osquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def queries(query_group,
query_file = 'salt://hubblestack_nebula/hubblestack_nebula_win_queries.yaml'
else:
query_file = 'salt://hubblestack_nebula/hubblestack_nebula_queries.yaml'
if not salt.utils.which('osqueryi'):
if 'osquerybinpath' not in __grains__:
if query_group == 'day':
log.warning('osquery not installed on this host. Returning baseline data')
# Match the formatting of normal osquery results. Not super
Expand Down Expand Up @@ -170,7 +170,7 @@ def queries(query_group,
'result': True,
}

cmd = ['osqueryi', '--read_max', MAX_FILE_SIZE, '--json', query_sql]
cmd = [__grains__['osquerybinpath'], '--read_max', MAX_FILE_SIZE, '--json', query_sql]
res = __salt__['cmd.run_all'](cmd)
if res['retcode'] == 0:
query_ret['data'] = json.loads(res['stdout'])
Expand Down Expand Up @@ -198,6 +198,28 @@ def queries(query_group,
return ret


def fields(*args):
'''
Use config.get to retrieve custom data based on the keys in the `*args`
list.
Arguments:
*args
List of keys to retrieve
'''
ret = {}
for field in args:
ret['custom_{0}'.format(field)] = __salt__['config.get'](field)
# Return it as nebula data
if ret:
return [{'custom_fields': {
'data': [ret],
'result': True
}}]
return []


def version():
'''
Report version of this module
Expand Down
1 change: 1 addition & 0 deletions hubblestack/extmods/modules/nova_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -1911,6 +1911,7 @@ def _load_module(self, name):
data = yaml.safe_load(fh_)
except Exception as exc:
self.__missing_data__[name] = str(exc)
log.exception('Error loading yaml {0}'.format(fh_))
return False

self.__data__[name] = data
Expand Down
36 changes: 36 additions & 0 deletions hubblestack/extmods/modules/pulsar.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,3 +414,39 @@ def _dict_update(dest, upd, recursive_update=True, merge_lists=False):
for k in upd:
dest[k] = upd[k]
return dest


def top(topfile='salt://hubblestack_pulsar/top.pulsar',
verbose=False):

configs = get_top_data(topfile)

configs = ['salt://hubblestack_pulsar/' + config.replace('.', '/') + '.yaml'
for config in configs]

return process(configs, verbose=verbose)


def get_top_data(topfile):

topfile = __salt__['cp.cache_file'](topfile)

try:
with open(topfile) as handle:
topdata = yaml.safe_load(handle)
except Exception as e:
raise CommandExecutionError('Could not load topfile: {0}'.format(e))

if not isinstance(topdata, dict) or 'pulsar' not in topdata or \
not(isinstance(topdata['pulsar'], dict)):
raise CommandExecutionError('Pulsar topfile not formatted correctly')

topdata = topdata['pulsar']

ret = []

for match, data in topdata.iteritems():
if __salt__['match.compound'](match):
ret.extend(data)

return ret
37 changes: 37 additions & 0 deletions hubblestack/extmods/modules/win_pulsar.py
Original file line number Diff line number Diff line change
Expand Up @@ -564,3 +564,40 @@ def _dict_update(dest, upd, recursive_update=True, merge_lists=False):
for k in upd:
dest[k] = upd[k]
return dest


def top(topfile='salt://hubblestack_pulsar/win_top.pulsar',
verbose=False):

configs = get_top_data(topfile)

configs = ['salt://hubblestack_pulsar/' + config.replace('.','/') + '.yaml'
for config in configs]

return process(configs, verbose=verbose)


def get_top_data(topfile):

topfile = __salt__['cp.cache_file'](topfile)

try:
with open(topfile) as handle:
topdata = yaml.safe_load(handle)
except Exception as e:
raise CommandExecutionError('Could not load topfile: {0}'.format(e))

if not isinstance(topdata, dict) or 'pulsar' not in topdata or \
not(isinstance(topdata['pulsar'], dict)):
raise CommandExecutionError('Pulsar topfile not formatted correctly')

topdata = topdata['pulsar']

ret = []

for match, data in topdata.iteritems():
if __salt__['match.compound'](match):
ret.extend(data)

return ret

18 changes: 18 additions & 0 deletions hubblestack/extmods/returners/splunk_nebula_return.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,14 @@ def returner(ret):
timeout = opts['timeout']
custom_fields = opts['custom_fields']

# Set up the fields to be extracted at index time. The field values must be strings.
# Note that these fields will also still be available in the event data
index_extracted_fields = ['aws_instance_id', 'aws_account_id', 'azure_vmId']
try:
index_extracted_fields.extend(opts['index_extracted_fields'])
except TypeError:
pass

# Set up the collector
hec = http_event_collector(http_event_collector_key, http_event_collector_host, http_event_port=http_event_collector_port, http_event_server_ssl=hec_ssl, proxy=proxy, timeout=timeout)

Expand Down Expand Up @@ -132,6 +140,14 @@ def returner(ret):
payload.update({'sourcetype': opts['sourcetype']})
payload.update({'event': event})

# Potentially add metadata fields:
fields = {}
for item in index_extracted_fields:
if item in payload['event'] and not isinstance(payload['event'][item], (list, dict, tuple)):
fields[item] = str(payload['event'][item])
if fields:
payload.update({'fields': fields})

# If the osquery query includes a field called 'time' it will be checked.
# If it's within the last year, it will be used as the eventtime.
event_time = query_result.get('time', '')
Expand Down Expand Up @@ -165,6 +181,7 @@ def _get_options():
processed['http_event_server_ssl'] = opt.get('hec_ssl', True)
processed['proxy'] = opt.get('proxy', {})
processed['timeout'] = opt.get('timeout', 9.05)
processed['index_extracted_fields'] = opt.get('index_extracted_fields', [])
splunk_opts.append(processed)
return splunk_opts
else:
Expand All @@ -182,6 +199,7 @@ def _get_options():
splunk_opts['http_event_server_ssl'] = hec_ssl
splunk_opts['proxy'] = __salt__['config.get']('hubblestack:nebula:returner:splunk:proxy', {})
splunk_opts['timeout'] = __salt__['config.get']('hubblestack:nebula:returner:splunk:timeout', 9.05)
splunk_opts['index_extracted_fields'] = __salt__['config.get']('hubblestack:nebula:returner:splunk:index_extracted_fields', [])

return [splunk_opts]

Expand Down
36 changes: 36 additions & 0 deletions hubblestack/extmods/returners/splunk_nova_return.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,13 @@ def returner(ret):
timeout = opts['timeout']
custom_fields = opts['custom_fields']

# Set up the fields to be extracted at index time. The field values must be strings.
# Note that these fields will also still be available in the event data
index_extracted_fields = ['aws_instance_id', 'aws_account_id', 'azure_vmId']
try:
index_extracted_fields.extend(opts['index_extracted_fields'])
except TypeError:
pass

# Set up the collector
hec = http_event_collector(http_event_collector_key, http_event_collector_host, http_event_port=http_event_collector_port, http_event_server_ssl=hec_ssl, proxy=proxy, timeout=timeout)
Expand Down Expand Up @@ -139,6 +146,15 @@ def returner(ret):
payload.update({'index': opts['index']})
payload.update({'sourcetype': opts['sourcetype']})
payload.update({'event': event})

# Potentially add metadata fields:
fields = {}
for item in index_extracted_fields:
if item in payload['event'] and not isinstance(payload['event'][item], (list, dict, tuple)):
fields[item] = str(payload['event'][item])
if fields:
payload.update({'fields': fields})

hec.batchEvent(payload)

for suc in data.get('Success', []):
Expand Down Expand Up @@ -175,6 +191,15 @@ def returner(ret):
payload.update({'sourcetype': opts['sourcetype']})
payload.update({'index': opts['index']})
payload.update({'event': event})

# Potentially add metadata fields:
fields = {}
for item in index_extracted_fields:
if item in payload['event'] and not isinstance(payload['event'][item], (list, dict, tuple)):
fields[item] = str(payload['event'][item])
if fields:
payload.update({'fields': fields})

hec.batchEvent(payload)

if data.get('Compliance', None):
Expand Down Expand Up @@ -203,6 +228,15 @@ def returner(ret):
payload.update({'sourcetype': opts['sourcetype']})
payload.update({'index': opts['index']})
payload.update({'event': event})

# Potentially add metadata fields:
fields = {}
for item in index_extracted_fields:
if item in payload['event'] and not isinstance(payload['event'][item], (list, dict, tuple)):
fields[item] = str(payload['event'][item])
if fields:
payload.update({'fields': fields})

hec.batchEvent(payload)

hec.flushBatch()
Expand Down Expand Up @@ -245,6 +279,7 @@ def _get_options():
processed['http_event_server_ssl'] = opt.get('hec_ssl', True)
processed['proxy'] = opt.get('proxy', {})
processed['timeout'] = opt.get('timeout', 9.05)
processed['index_extracted_fields'] = opt.get('index_extracted_fields', [])
splunk_opts.append(processed)
return splunk_opts
else:
Expand All @@ -262,6 +297,7 @@ def _get_options():
splunk_opts['http_event_server_ssl'] = hec_ssl
splunk_opts['proxy'] = __salt__['config.get']('hubblestack:nova:returner:splunk:proxy', {})
splunk_opts['timeout'] = __salt__['config.get']('hubblestack:nova:returner:splunk:timeout', 9.05)
splunk_opts['index_extracted_fields'] = __salt__['config.get']('hubblestack:nova:returner:splunk:index_extracted_fields', [])

return [splunk_opts]

Expand Down
Loading

0 comments on commit e790716

Please sign in to comment.