Skip to content
Permalink
Browse files

YAPF run 2 (#299)

* yapf run 2
  • Loading branch information
aarontp authored and Onager committed Nov 14, 2018
1 parent 013e00f commit 8b3b923464e59cbd527a71684c025cc120d2e1de
Showing with 334 additions and 473 deletions.
  1. +5 −0 turbinia/config/style.yapf → .style.yapf
  2. +3 −4 turbinia/celery.py
  3. +23 −54 turbinia/client.py
  4. +4 −4 turbinia/client_test.py
  5. +5 −5 turbinia/config/__init__.py
  6. +0 −3 turbinia/config/turbinia_config.py
  7. +9 −6 turbinia/evidence.py
  8. +1 −1 turbinia/jobs/hadoop.py
  9. +9 −7 turbinia/jobs/http_access_logs.py
  10. +2 −2 turbinia/jobs/jenkins.py
  11. +4 −6 turbinia/jobs/manager.py
  12. +6 −11 turbinia/jobs/manager_test.py
  13. +3 −2 turbinia/jobs/plaso.py
  14. +0 −2 turbinia/jobs/psort.py
  15. +4 −2 turbinia/jobs/sshd.py
  16. +0 −2 turbinia/jobs/strings.py
  17. +4 −2 turbinia/jobs/tomcat.py
  18. +0 −1 turbinia/jobs/worker_stat.py
  19. +16 −15 turbinia/lib/google_cloud.py
  20. +23 −33 turbinia/lib/libcloudforensics.py
  21. +1 −3 turbinia/lib/utils.py
  22. +2 −2 turbinia/message.py
  23. +25 −22 turbinia/output_manager.py
  24. +13 −11 turbinia/processors/google_cloud.py
  25. +6 −5 turbinia/processors/mount_local.py
  26. +8 −8 turbinia/pubsub.py
  27. +1 −2 turbinia/pubsub_test.py
  28. +10 −8 turbinia/state_manager.py
  29. +1 −2 turbinia/state_manager_test.py
  30. +4 −6 turbinia/task_manager.py
  31. +88 −171 turbinia/turbiniactl.py
  32. +22 −37 turbinia/workers/__init__.py
  33. +5 −6 turbinia/workers/analysis/jenkins.py
  34. +1 −2 turbinia/workers/analysis/jenkins_test.py
  35. +3 −2 turbinia/workers/analysis/wordpress.py
  36. +1 −1 turbinia/workers/artifact.py
  37. +2 −4 turbinia/workers/hadoop.py
  38. +3 −2 turbinia/workers/plaso.py
  39. +3 −2 turbinia/workers/psort.py
  40. +6 −7 turbinia/workers/tomcat.py
  41. +8 −8 turbinia/workers/workers_test.py
@@ -1,6 +1,11 @@
#
# To run yapf for this project, invoke as such from the base directory:
# yapf -i -r --style .style.yapf ./turbinia/
#
[style]
based_on_style = chromium
COALESCE_BRACKETS = True
SPLIT_BEFORE_FIRST_ARGUMENT = True
SPLIT_PENALTY_AFTER_OPENING_BRACKET = 0
SPLIT_PENALTY_FOR_ADDED_LINE_SPLIT = 30
SPLIT_BEFORE_NAMED_ASSIGNS = False
@@ -68,20 +68,19 @@ def _fexec(self):
Returns:
function: the fexec() function.
"""

@self.app.task(name='fexec')
def fexec(f, *args, **kwargs):
"""Lets us pass in an arbitrary function without Celery annotations"""
return f(*args, **kwargs)

return fexec

def setup(self):
"""Set up Celery"""
config.LoadConfig()
self.app = celery.Celery(
'turbinia',
broker=config.CELERY_BROKER,
backend=config.CELERY_BACKEND
)
'turbinia', broker=config.CELERY_BROKER, backend=config.CELERY_BACKEND)
self.app.conf.update(
task_default_queue=config.INSTANCE_ID,
# TODO(ericzinnikas): pickle is not secure, we need to replace it with
@@ -42,7 +42,6 @@
from turbinia.workers.tomcat import TomcatAnalysisTask
from turbinia.workers.worker_stat import StatTask


# TODO(aarontp): Remove this map after
# https://github.com/google/turbinia/issues/278 is fixed.
TASK_MAP = {
@@ -60,7 +59,6 @@
'stattask': StatTask,
}


config.LoadConfig()
if config.TASK_MANAGER == 'PSQ':
import psq
@@ -139,7 +137,6 @@ def create_task(self, task_name):
raise TurbiniaException('No Task named {0:s} found'.format(task_name))
return task_obj()


def list_jobs(self):
"""List the available jobs."""
# TODO(aarontp): Refactor this out so that we don't need to depend on
@@ -148,13 +145,9 @@ def list_jobs(self):
for job in self.task_manager.jobs:
log.info('\t{0:s}'.format(job.name))

def wait_for_request(self,
instance,
project,
region,
request_id=None,
user=None,
poll_interval=60):
def wait_for_request(
self, instance, project, region, request_id=None, user=None,
poll_interval=60):
"""Polls and waits for Turbinia Request to complete.
Args:
@@ -187,15 +180,9 @@ def wait_for_request(self,

log.info('All {0:d} Tasks completed'.format(len(task_results)))

def get_task_data(self,
instance,
project,
region,
days=0,
task_id=None,
request_id=None,
user=None,
function_name='gettasks'):
def get_task_data(
self, instance, project, region, days=0, task_id=None, request_id=None,
user=None, function_name='gettasks'):
"""Gets task data from Google Cloud Functions.
Args:
@@ -248,15 +235,9 @@ def get_task_data(self,

return results[0]

def format_task_status(self,
instance,
project,
region,
days=0,
task_id=None,
request_id=None,
user=None,
all_fields=False):
def format_task_status(
self, instance, project, region, days=0, task_id=None, request_id=None,
user=None, all_fields=False):
"""Formats the recent history for Turbinia Tasks.
Args:
@@ -274,8 +255,8 @@ def format_task_status(self,
Returns:
String of task status
"""
task_results = self.get_task_data(instance, project, region, days, task_id,
request_id, user)
task_results = self.get_task_data(
instance, project, region, days, task_id, request_id, user)
num_results = len(task_results)
results = []
if not num_results:
@@ -295,17 +276,18 @@ def format_task_status(self,
status = task.get('status', 'No task status')
if all_fields:
results.append(
'{0:s} request: {1:s} task: {2:s} {3:s} {4:s} {5:s} {6:s}: {7:s}'.
format(
'{0:s} request: {1:s} task: {2:s} {3:s} {4:s} {5:s} {6:s}: {7:s}'
.format(
task.get('last_update'), task.get('request_id'), task.get('id'),
task.get('name'), task.get('user'), task.get('worker_name'),
success, status))
saved_paths = task.get('saved_paths', [])
for path in saved_paths:
results.append('\t{0:s}'.format(path))
else:
results.append('{0:s} {1:s} {2:s}: {3:s}'.format(
task.get('last_update'), task.get('name'), success, status))
results.append(
'{0:s} {1:s} {2:s}: {3:s}'.format(
task.get('last_update'), task.get('name'), success, status))

return '\n'.join(results)

@@ -337,14 +319,9 @@ def send_request(self, request):
"""
self.task_manager.server_pubsub.send_request(request)

def close_tasks(self,
instance,
project,
region,
request_id=None,
task_id=None,
user=None,
requester=None):
def close_tasks(
self, instance, project, region, request_id=None, task_id=None, user=None,
requester=None):
"""Close Turbinia Tasks based on Request ID.
Args:
@@ -394,14 +371,9 @@ def send_request(self, request):
self.task_manager.kombu.send_request(request)

# pylint: disable=arguments-differ
def get_task_data(self,
instance,
_,
__,
days=0,
task_id=None,
request_id=None,
function_name=None):
def get_task_data(
self, instance, _, __, days=0, task_id=None, request_id=None,
function_name=None):
"""Gets task data from Redis.
We keep the same function signature, but ignore arguments passed for GCP.
@@ -483,10 +455,7 @@ def __init__(self, *_, **__):
datastore_client = datastore.Client(project=config.PROJECT)
try:
self.psq = psq.Queue(
psq_publisher,
psq_subscriber,
config.PROJECT,
name=config.PSQ_TOPIC,
psq_publisher, psq_subscriber, config.PROJECT, name=config.PSQ_TOPIC,
storage=psq.DatastoreStorage(datastore_client))
except exceptions.GoogleCloudError as e:
msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
@@ -60,8 +60,8 @@ def testTurbiniaClientGetTaskDataNoResults(self, _, __, mock_cloud_function):
"""Test for exception after empty results from cloud functions."""
mock_cloud_function.return_value = {}
client = TurbiniaClient()
self.assertRaises(TurbiniaException,
client.get_task_data, "inst", "proj", "reg")
self.assertRaises(
TurbiniaException, client.get_task_data, "inst", "proj", "reg")

@mock.patch('turbinia.client.GoogleCloudFunction.ExecuteFunction')
@mock.patch('turbinia.client.task_manager.PSQTaskManager._backend_setup')
@@ -71,8 +71,8 @@ def testTurbiniaClientGetTaskDataInvalidJson(
"""Test for exception after bad json results from cloud functions."""
mock_cloud_function.return_value = {'result': None}
client = TurbiniaClient()
self.assertRaises(TurbiniaException,
client.get_task_data, "inst", "proj", "reg")
self.assertRaises(
TurbiniaException, client.get_task_data, "inst", "proj", "reg")


class TestTurbiniaServer(unittest.TestCase):
@@ -29,9 +29,9 @@
# Look in homedir first, then /etc/turbinia, and finally in the source
# config dir for config files
CONFIGPATH = [
os.path.expanduser('~'),
'/etc/turbinia',
os.path.dirname(os.path.abspath(__file__))]
os.path.expanduser('~'), '/etc/turbinia',
os.path.dirname(os.path.abspath(__file__))
]
# Config vars that we expect to exist in the configuration
CONFIGVARS = [
# Turbinia Config
@@ -65,7 +65,8 @@
'CELERY_BACKEND',
'KOMBU_BROKER',
'KOMBU_CHANNEL',
'KOMBU_DURABLE',]
'KOMBU_DURABLE',
]
# Environment variable to look for path data in
ENVCONFIGVAR = 'TURBINIA_CONFIG_PATH'

@@ -92,7 +93,6 @@ def LoadConfig():
if ENVCONFIGVAR in os.environ:
configpath = os.environ[ENVCONFIGVAR].split(':')


config_file = None
# Load first file found
for _dir, _file in itertools.product(configpath, CONFIGFILES):
@@ -16,7 +16,6 @@

from __future__ import unicode_literals


# Turbinia Role as 'server' or 'psqworker'
ROLE = 'server'

@@ -69,7 +68,6 @@
# problems.
DEBUG_TASKS = False


###############################
# Google Cloud Platform (GCP) #
###############################
@@ -106,7 +104,6 @@
# Which state manager to use
STATE_MANAGER = 'Datastore'


##########
# CELERY #
##########
@@ -30,6 +30,7 @@
if config.TASK_MANAGER == 'PSQ':
from turbinia.processors import google_cloud


def evidence_decode(evidence_dict):
"""Decode JSON into appropriate Evidence object.
@@ -92,8 +93,9 @@ class Evidence(object):
request_id: The id of the request this evidence came from, if any
"""

def __init__(self, name=None, description=None, source=None, local_path=None,
tags=None, request_id=None):
def __init__(
self, name=None, description=None, source=None, local_path=None,
tags=None, request_id=None):
"""Initialization for Evidence."""
self.copyable = False
self.config = {}
@@ -173,8 +175,8 @@ class RawDisk(Evidence):
size: The size of the disk in bytes.
"""

def __init__(self, mount_path=None, mount_partition=None, size=None, *args,
**kwargs):
def __init__(
self, mount_path=None, mount_partition=None, size=None, *args, **kwargs):
"""Initialization for raw disk evidence object."""
self.loopdevice_path = None
self.mount_path = mount_path
@@ -192,8 +194,9 @@ class EncryptedDisk(RawDisk):
unencrypted_path: A string to the unencrypted local path
"""

def __init__(self, encryption_type=None, encryption_key=None,
unencrypted_path=None, *args, **kwargs):
def __init__(
self, encryption_type=None, encryption_key=None, unencrypted_path=None,
*args, **kwargs):
"""Initialization for Encrypted disk evidence objects."""
# TODO(aarontp): Make this an enum, or limited list
self.encryption_type = encryption_type
@@ -33,7 +33,6 @@ class HadoopAnalysisJob(interface.TurbiniaJob):

NAME = 'HadoopAnalysisJob'


def create_tasks(self, evidence):
"""Create task.
@@ -46,4 +45,5 @@ def create_tasks(self, evidence):
tasks = [HadoopAnalysisTask() for _ in evidence]
return tasks


manager.JobsManager.RegisterJob(HadoopAnalysisJob)
@@ -28,22 +28,21 @@
from turbinia.workers.analysis import wordpress

ACCESS_LOG_ARTIFACTS = [
'DockerContainerLogs',
'NginxAccessLogs',
'ApacheAccessLogs'
'DockerContainerLogs', 'NginxAccessLogs', 'ApacheAccessLogs'
]


class HTTPAccessLogExtractionJob(interface.TurbiniaJob):
"""HTTP Access log extraction job."""

evidence_input = [
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded]
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded
]

evidence_output = [ExportedFileArtifact]

NAME = 'HTTPAccessLogExtractionJob'


def create_tasks(self, evidence):
"""Create task.
@@ -55,10 +54,12 @@ def create_tasks(self, evidence):
"""
tasks = []
for artifact_name in ACCESS_LOG_ARTIFACTS:
tasks.extend([artifact.FileArtifactExtractionTask(artifact_name) for _
in evidence])
tasks.extend([
artifact.FileArtifactExtractionTask(artifact_name) for _ in evidence
])
return tasks


class HTTPAccessLogAnalysisJob(interface.TurbiniaJob):
"""HTTP Access log analysis job."""

@@ -77,5 +78,6 @@ def create_tasks(self, evidence):
evidence = [e for e in evidence if e.artifact_name in ACCESS_LOG_ARTIFACTS]
return [wordpress.WordpressAccessLogAnalysisTask() for _ in evidence]


manager.JobsManager.RegisterJobs(
[HTTPAccessLogExtractionJob, HTTPAccessLogAnalysisJob])
@@ -29,12 +29,12 @@ class JenkinsAnalysisJob(interface.TurbiniaJob):
"""Jenkins analysis job."""

evidence_input = [
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded]
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded
]
evidence_output = [ReportText]

NAME = 'JenkinsAnalysisJob'


def create_tasks(self, evidence):
"""Create task for Jenkins analysis job.

0 comments on commit 8b3b923

Please sign in to comment.
You can’t perform that action at this time.