Permalink
Browse files

Fixup block comments not having a space after hash

  • Loading branch information...
pwnbus committed Dec 14, 2018
1 parent 77e93f3 commit df84a1942d129e2d6c9d4cc4129a44a8c0a698f9
@@ -14,7 +14,6 @@ per-file-ignores =
ignore =
E225 # missing whitespace around operator
E231 # missing whitespace after ','
E265 # block comment should start with '# '
E402 # module level import not at top of file
E501 # line too long
E722 # do not use bare except'
@@ -40,9 +40,9 @@ def onAggregation(self, aggreg):
severity = 'INFO'

summary = ('{0} fxa account creation attempts by {1}'.format(aggreg['count'], aggreg['value']))
emails = self.mostCommon(aggreg['allevents'],'_source.details.email')
#did they try to create more than one email account?
#or just retry an existing one
emails = self.mostCommon(aggreg['allevents'], '_source.details.email')
# did they try to create more than one email account?
# or just retry an existing one
if len(emails) > 1:
for i in emails[:5]:
summary += ' {0} ({1} hits)'.format(i[0], i[1])
@@ -33,9 +33,9 @@ def onAggregation(self, aggreg):
tags = ['audit']

summary = ('{0} instances of Strace or Ptrace executed on a system by {1}'.format(aggreg['count'], aggreg['value'], ))
hostnames = self.mostCommon(aggreg['allevents'],'_source.hostname')
#did they modify more than one host?
#or just modify an existing configuration more than once?
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
# did they modify more than one host?
# or just modify an existing configuration more than once?
if len(hostnames) > 1:
for i in hostnames[:5]:
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
@@ -34,9 +34,9 @@ def onAggregation(self, aggreg):
tags = ['audit']

summary = ('{0} Filesystem write(s) to an auditd path by {1}'.format(aggreg['count'], aggreg['value'], ))
hostnames = self.mostCommon(aggreg['allevents'],'_source.hostname')
#did they modify more than one host?
#or just modify an existing configuration more than once?
hostnames = self.mostCommon(aggreg['allevents'], '_source.hostname')
# did they modify more than one host?
# or just modify an existing configuration more than once?
if len(hostnames) > 1:
for i in hostnames[:5]:
summary += ' on {0} ({1} hosts)'.format(i[0], i[1])
@@ -21,8 +21,8 @@

httpsession = FuturesSession(max_workers=5)
httpsession.trust_env=False # turns of needless .netrc check for creds
#a = requests.adapters.HTTPAdapter(max_retries=2)
#httpsession.mount('http://', a)
# a = requests.adapters.HTTPAdapter(max_retries=2)
# httpsession.mount('http://', a)


logger = logging.getLogger(sys.argv[0])
@@ -32,8 +32,8 @@


def postLogs(logcache):
#post logs asynchronously with requests workers and check on the results
#expects a queue object from the multiprocessing library
# post logs asynchronously with requests workers and check on the results
# expects a queue object from the multiprocessing library
posts=[]
try:
while not logcache.empty():
@@ -50,10 +50,10 @@ def postLogs(logcache):
try:
if p.result().status_code >= 500:
logger.error("exception posting to %s %r [will retry]\n" % (url, p.result().status_code))
#try again later when the next message in forces other attempts at posting.
# try again later when the next message in forces other attempts at posting.
logcache.put(postdata)
except ClosedPoolError as e:
#logger.fatal("Closed Pool Error exception posting to %s %r %r [will retry]\n"%(url,e,postdata))
# logger.fatal("Closed Pool Error exception posting to %s %r %r [will retry]\n"%(url,e,postdata))
logcache.put(postdata)
except Exception as e:
logger.fatal("exception posting to %s %r %r [will not retry]\n" % (url, e, postdata))
@@ -67,7 +67,7 @@ def postLogs(logcache):
sh=logging.StreamHandler(sys.stdout)
sh.setFormatter(formatter)
logger.addHandler(sh)
#create a list of logs we can append json to and call for a post when we want.
# create a list of logs we can append json to and call for a post when we want.
logcache=Queue()
try:
for i in range(0,10):
@@ -48,7 +48,7 @@ def loop(self):
while not self._stop:
now = time.time()
if ((now - last) > self.interval):
#Add all the actions you want to do with bugzilla here ;)
# Add all the actions you want to do with bugzilla here ;)
self.bugzilla_search()
last = now
time.sleep(1)
@@ -258,10 +258,10 @@ def searchMongoAlerts(mozdefdb):
# summarize the alert categories
# returns list of tuples: [(u'bruteforce', 8)]
categoryCounts= mostCommon(matchingalerts,'category')
#are the alerts all the same category?
# are the alerts all the same category?

if len(categoryCounts) == 1:
#is the alert category mapped to an attacker category?
# is the alert category mapped to an attacker category?
for category in options.categorymapping:
if category.keys()[0] == categoryCounts[0][0]:
attacker['category'] = category[category.keys()[0]]
@@ -60,7 +60,7 @@ def readOUIFile(ouifilename):
for i in ouifile.readlines()[0::]:
i=i.strip()
if '(hex)' in i:
#print(i)
# print(i)
fields=i.split('\t')
macprefix=fields[0][0:8].replace('-',':').lower()
entity=fields[2]
@@ -199,10 +199,10 @@ def initConfig():
# for detailed information on delegating a service account for use in gathering google admin sdk reports
#

#google's json credential file exported from the project/admin console
# google's json credential file exported from the project/admin console
options.jsoncredentialfile=getConfig('jsoncredentialfile','/path/to/filename.json',options.configfile)

#email of admin to impersonate as a service account
# email of admin to impersonate as a service account
options.impersonate = getConfig('impersonate', 'someone@yourcompany.com', options.configfile)


@@ -64,15 +64,15 @@ def main():
logger.addHandler(sh)

logger.debug('started')
#logger.debug(options)
# logger.debug(options)
try:
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
s = requests.Session()
s.headers.update({'Accept': 'application/json'})
s.headers.update({'Content-type': 'application/json'})
s.headers.update({'Authorization': 'SSWS {0}'.format(options.apikey)})

#capture the time we start running so next time we catch any events created while we run.
# capture the time we start running so next time we catch any events created while we run.
state = State(options.state_file)
lastrun = toUTC(datetime.now()).isoformat()

@@ -115,7 +115,7 @@ def main():
mdEvent.debug = True
mdEvent.fire_and_forget_mode = False

#connect to mysql
# connect to mysql
db=MySQLdb.connect(host=options.hostname, user=options.username,passwd=options.password,db=options.database)
c=db.cursor(MySQLdb.cursors.DictCursor)

@@ -138,7 +138,7 @@ def main():
duration = call['LeaveTime'] - call['JoinTime']
call['CallDuration'] = duration.seconds

#fix up the data for json
# fix up the data for json
for k in call.keys():
# convert datetime objects to isoformat for json serialization
if isinstance(call[k], datetime):
Oops, something went wrong.

0 comments on commit df84a19

Please sign in to comment.