Skip to content

Commit

Permalink
YAPF run 2 (#299)
Browse files Browse the repository at this point in the history
* yapf run 2
  • Loading branch information
aarontp authored and Onager committed Nov 14, 2018
1 parent 013e00f commit 8b3b923
Show file tree
Hide file tree
Showing 41 changed files with 334 additions and 473 deletions.
5 changes: 5 additions & 0 deletions turbinia/config/style.yapf → .style.yapf
@@ -1,6 +1,11 @@
#
# To run yapf for this project, invoke as such from the base directory:
# yapf -i -r --style .style.yapf ./turbinia/
#
[style]
based_on_style = chromium
COALESCE_BRACKETS = True
SPLIT_BEFORE_FIRST_ARGUMENT = True
SPLIT_PENALTY_AFTER_OPENING_BRACKET = 0
SPLIT_PENALTY_FOR_ADDED_LINE_SPLIT = 30
SPLIT_BEFORE_NAMED_ASSIGNS = False
7 changes: 3 additions & 4 deletions turbinia/celery.py
Expand Up @@ -68,20 +68,19 @@ def _fexec(self):
Returns:
function: the fexec() function.
"""

@self.app.task(name='fexec')
def fexec(f, *args, **kwargs):
"""Lets us pass in an arbitrary function without Celery annotations"""
return f(*args, **kwargs)

return fexec

def setup(self):
"""Set up Celery"""
config.LoadConfig()
self.app = celery.Celery(
'turbinia',
broker=config.CELERY_BROKER,
backend=config.CELERY_BACKEND
)
'turbinia', broker=config.CELERY_BROKER, backend=config.CELERY_BACKEND)
self.app.conf.update(
task_default_queue=config.INSTANCE_ID,
# TODO(ericzinnikas): pickle is not secure, we need to replace it with
Expand Down
77 changes: 23 additions & 54 deletions turbinia/client.py
Expand Up @@ -42,7 +42,6 @@
from turbinia.workers.tomcat import TomcatAnalysisTask
from turbinia.workers.worker_stat import StatTask


# TODO(aarontp): Remove this map after
# https://github.com/google/turbinia/issues/278 is fixed.
TASK_MAP = {
Expand All @@ -60,7 +59,6 @@
'stattask': StatTask,
}


config.LoadConfig()
if config.TASK_MANAGER == 'PSQ':
import psq
Expand Down Expand Up @@ -139,7 +137,6 @@ def create_task(self, task_name):
raise TurbiniaException('No Task named {0:s} found'.format(task_name))
return task_obj()


def list_jobs(self):
"""List the available jobs."""
# TODO(aarontp): Refactor this out so that we don't need to depend on
Expand All @@ -148,13 +145,9 @@ def list_jobs(self):
for job in self.task_manager.jobs:
log.info('\t{0:s}'.format(job.name))

def wait_for_request(self,
instance,
project,
region,
request_id=None,
user=None,
poll_interval=60):
def wait_for_request(
self, instance, project, region, request_id=None, user=None,
poll_interval=60):
"""Polls and waits for Turbinia Request to complete.
Args:
Expand Down Expand Up @@ -187,15 +180,9 @@ def wait_for_request(self,

log.info('All {0:d} Tasks completed'.format(len(task_results)))

def get_task_data(self,
instance,
project,
region,
days=0,
task_id=None,
request_id=None,
user=None,
function_name='gettasks'):
def get_task_data(
self, instance, project, region, days=0, task_id=None, request_id=None,
user=None, function_name='gettasks'):
"""Gets task data from Google Cloud Functions.
Args:
Expand Down Expand Up @@ -248,15 +235,9 @@ def get_task_data(self,

return results[0]

def format_task_status(self,
instance,
project,
region,
days=0,
task_id=None,
request_id=None,
user=None,
all_fields=False):
def format_task_status(
self, instance, project, region, days=0, task_id=None, request_id=None,
user=None, all_fields=False):
"""Formats the recent history for Turbinia Tasks.
Args:
Expand All @@ -274,8 +255,8 @@ def format_task_status(self,
Returns:
String of task status
"""
task_results = self.get_task_data(instance, project, region, days, task_id,
request_id, user)
task_results = self.get_task_data(
instance, project, region, days, task_id, request_id, user)
num_results = len(task_results)
results = []
if not num_results:
Expand All @@ -295,17 +276,18 @@ def format_task_status(self,
status = task.get('status', 'No task status')
if all_fields:
results.append(
'{0:s} request: {1:s} task: {2:s} {3:s} {4:s} {5:s} {6:s}: {7:s}'.
format(
'{0:s} request: {1:s} task: {2:s} {3:s} {4:s} {5:s} {6:s}: {7:s}'
.format(
task.get('last_update'), task.get('request_id'), task.get('id'),
task.get('name'), task.get('user'), task.get('worker_name'),
success, status))
saved_paths = task.get('saved_paths', [])
for path in saved_paths:
results.append('\t{0:s}'.format(path))
else:
results.append('{0:s} {1:s} {2:s}: {3:s}'.format(
task.get('last_update'), task.get('name'), success, status))
results.append(
'{0:s} {1:s} {2:s}: {3:s}'.format(
task.get('last_update'), task.get('name'), success, status))

return '\n'.join(results)

Expand Down Expand Up @@ -337,14 +319,9 @@ def send_request(self, request):
"""
self.task_manager.server_pubsub.send_request(request)

def close_tasks(self,
instance,
project,
region,
request_id=None,
task_id=None,
user=None,
requester=None):
def close_tasks(
self, instance, project, region, request_id=None, task_id=None, user=None,
requester=None):
"""Close Turbinia Tasks based on Request ID.
Args:
Expand Down Expand Up @@ -394,14 +371,9 @@ def send_request(self, request):
self.task_manager.kombu.send_request(request)

# pylint: disable=arguments-differ
def get_task_data(self,
instance,
_,
__,
days=0,
task_id=None,
request_id=None,
function_name=None):
def get_task_data(
self, instance, _, __, days=0, task_id=None, request_id=None,
function_name=None):
"""Gets task data from Redis.
We keep the same function signature, but ignore arguments passed for GCP.
Expand Down Expand Up @@ -483,10 +455,7 @@ def __init__(self, *_, **__):
datastore_client = datastore.Client(project=config.PROJECT)
try:
self.psq = psq.Queue(
psq_publisher,
psq_subscriber,
config.PROJECT,
name=config.PSQ_TOPIC,
psq_publisher, psq_subscriber, config.PROJECT, name=config.PSQ_TOPIC,
storage=psq.DatastoreStorage(datastore_client))
except exceptions.GoogleCloudError as e:
msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
Expand Down
8 changes: 4 additions & 4 deletions turbinia/client_test.py
Expand Up @@ -60,8 +60,8 @@ def testTurbiniaClientGetTaskDataNoResults(self, _, __, mock_cloud_function):
"""Test for exception after empty results from cloud functions."""
mock_cloud_function.return_value = {}
client = TurbiniaClient()
self.assertRaises(TurbiniaException,
client.get_task_data, "inst", "proj", "reg")
self.assertRaises(
TurbiniaException, client.get_task_data, "inst", "proj", "reg")

@mock.patch('turbinia.client.GoogleCloudFunction.ExecuteFunction')
@mock.patch('turbinia.client.task_manager.PSQTaskManager._backend_setup')
Expand All @@ -71,8 +71,8 @@ def testTurbiniaClientGetTaskDataInvalidJson(
"""Test for exception after bad json results from cloud functions."""
mock_cloud_function.return_value = {'result': None}
client = TurbiniaClient()
self.assertRaises(TurbiniaException,
client.get_task_data, "inst", "proj", "reg")
self.assertRaises(
TurbiniaException, client.get_task_data, "inst", "proj", "reg")


class TestTurbiniaServer(unittest.TestCase):
Expand Down
10 changes: 5 additions & 5 deletions turbinia/config/__init__.py
Expand Up @@ -29,9 +29,9 @@
# Look in homedir first, then /etc/turbinia, and finally in the source
# config dir for config files
CONFIGPATH = [
os.path.expanduser('~'),
'/etc/turbinia',
os.path.dirname(os.path.abspath(__file__))]
os.path.expanduser('~'), '/etc/turbinia',
os.path.dirname(os.path.abspath(__file__))
]
# Config vars that we expect to exist in the configuration
CONFIGVARS = [
# Turbinia Config
Expand Down Expand Up @@ -65,7 +65,8 @@
'CELERY_BACKEND',
'KOMBU_BROKER',
'KOMBU_CHANNEL',
'KOMBU_DURABLE',]
'KOMBU_DURABLE',
]
# Environment variable to look for path data in
ENVCONFIGVAR = 'TURBINIA_CONFIG_PATH'

Expand All @@ -92,7 +93,6 @@ def LoadConfig():
if ENVCONFIGVAR in os.environ:
configpath = os.environ[ENVCONFIGVAR].split(':')


config_file = None
# Load first file found
for _dir, _file in itertools.product(configpath, CONFIGFILES):
Expand Down
3 changes: 0 additions & 3 deletions turbinia/config/turbinia_config.py
Expand Up @@ -16,7 +16,6 @@

from __future__ import unicode_literals


# Turbinia Role as 'server' or 'psqworker'
ROLE = 'server'

Expand Down Expand Up @@ -69,7 +68,6 @@
# problems.
DEBUG_TASKS = False


###############################
# Google Cloud Platform (GCP) #
###############################
Expand Down Expand Up @@ -106,7 +104,6 @@
# Which state manager to use
STATE_MANAGER = 'Datastore'


##########
# CELERY #
##########
Expand Down
15 changes: 9 additions & 6 deletions turbinia/evidence.py
Expand Up @@ -30,6 +30,7 @@
if config.TASK_MANAGER == 'PSQ':
from turbinia.processors import google_cloud


def evidence_decode(evidence_dict):
"""Decode JSON into appropriate Evidence object.
Expand Down Expand Up @@ -92,8 +93,9 @@ class Evidence(object):
request_id: The id of the request this evidence came from, if any
"""

def __init__(self, name=None, description=None, source=None, local_path=None,
tags=None, request_id=None):
def __init__(
self, name=None, description=None, source=None, local_path=None,
tags=None, request_id=None):
"""Initialization for Evidence."""
self.copyable = False
self.config = {}
Expand Down Expand Up @@ -173,8 +175,8 @@ class RawDisk(Evidence):
size: The size of the disk in bytes.
"""

def __init__(self, mount_path=None, mount_partition=None, size=None, *args,
**kwargs):
def __init__(
self, mount_path=None, mount_partition=None, size=None, *args, **kwargs):
"""Initialization for raw disk evidence object."""
self.loopdevice_path = None
self.mount_path = mount_path
Expand All @@ -192,8 +194,9 @@ class EncryptedDisk(RawDisk):
unencrypted_path: A string to the unencrypted local path
"""

def __init__(self, encryption_type=None, encryption_key=None,
unencrypted_path=None, *args, **kwargs):
def __init__(
self, encryption_type=None, encryption_key=None, unencrypted_path=None,
*args, **kwargs):
"""Initialization for Encrypted disk evidence objects."""
# TODO(aarontp): Make this an enum, or limited list
self.encryption_type = encryption_type
Expand Down
2 changes: 1 addition & 1 deletion turbinia/jobs/hadoop.py
Expand Up @@ -33,7 +33,6 @@ class HadoopAnalysisJob(interface.TurbiniaJob):

NAME = 'HadoopAnalysisJob'


def create_tasks(self, evidence):
"""Create task.
Expand All @@ -46,4 +45,5 @@ def create_tasks(self, evidence):
tasks = [HadoopAnalysisTask() for _ in evidence]
return tasks


manager.JobsManager.RegisterJob(HadoopAnalysisJob)
16 changes: 9 additions & 7 deletions turbinia/jobs/http_access_logs.py
Expand Up @@ -28,22 +28,21 @@
from turbinia.workers.analysis import wordpress

ACCESS_LOG_ARTIFACTS = [
'DockerContainerLogs',
'NginxAccessLogs',
'ApacheAccessLogs'
'DockerContainerLogs', 'NginxAccessLogs', 'ApacheAccessLogs'
]


class HTTPAccessLogExtractionJob(interface.TurbiniaJob):
"""HTTP Access log extraction job."""

evidence_input = [
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded]
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded
]

evidence_output = [ExportedFileArtifact]

NAME = 'HTTPAccessLogExtractionJob'


def create_tasks(self, evidence):
"""Create task.
Expand All @@ -55,10 +54,12 @@ def create_tasks(self, evidence):
"""
tasks = []
for artifact_name in ACCESS_LOG_ARTIFACTS:
tasks.extend([artifact.FileArtifactExtractionTask(artifact_name) for _
in evidence])
tasks.extend([
artifact.FileArtifactExtractionTask(artifact_name) for _ in evidence
])
return tasks


class HTTPAccessLogAnalysisJob(interface.TurbiniaJob):
"""HTTP Access log analysis job."""

Expand All @@ -77,5 +78,6 @@ def create_tasks(self, evidence):
evidence = [e for e in evidence if e.artifact_name in ACCESS_LOG_ARTIFACTS]
return [wordpress.WordpressAccessLogAnalysisTask() for _ in evidence]


manager.JobsManager.RegisterJobs(
[HTTPAccessLogExtractionJob, HTTPAccessLogAnalysisJob])
4 changes: 2 additions & 2 deletions turbinia/jobs/jenkins.py
Expand Up @@ -29,12 +29,12 @@ class JenkinsAnalysisJob(interface.TurbiniaJob):
"""Jenkins analysis job."""

evidence_input = [
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded]
Directory, RawDisk, GoogleCloudDisk, GoogleCloudDiskRawEmbedded
]
evidence_output = [ReportText]

NAME = 'JenkinsAnalysisJob'


def create_tasks(self, evidence):
"""Create task for Jenkins analysis job.
Expand Down

0 comments on commit 8b3b923

Please sign in to comment.