Skip to content

Commit

Permalink
Bug 1348829 - Remove deadcode found using vulture
Browse files Browse the repository at this point in the history
The logparser parts are leftover from bug 1302844. The SETA parts appear
to have been unused at the time of the original landing.
  • Loading branch information
Ed Morley committed Mar 20, 2017
1 parent c1dfddc commit 4fcd020
Show file tree
Hide file tree
Showing 8 changed files with 1 addition and 124 deletions.
13 changes: 0 additions & 13 deletions tests/conftest.py
Expand Up @@ -407,19 +407,6 @@ def classified_failures(test_job, text_log_errors_failure_lines, test_matcher,
return classified_failures


@pytest.fixture
def retriggered_job(test_job, eleven_job_blobs):
# a copy of test_job with a different guid, representing a "retrigger"
from treeherder.model.models import Job
original = eleven_job_blobs[0]
retrigger = copy.deepcopy(original)
retrigger['job']['job_guid'] = "f1c75261017c7c5ce3000931dce4c442fe0a129a"

store_job_data(test_job.repository, [retrigger])

return Job.objects.get(guid=retrigger['job']['job_guid'])


@pytest.fixture
def test_user(request, transactional_db):
# a user *without* sheriff/staff permissions
Expand Down
17 changes: 0 additions & 17 deletions tests/log_parser/test_step_parser.py

This file was deleted.

37 changes: 0 additions & 37 deletions tests/log_parser/test_tasks.py
@@ -1,9 +1,4 @@
import gzip
import urllib2

import pytest
from django.conf import settings
from django.utils.six import BytesIO

from treeherder.etl.jobs import store_job_data
from treeherder.etl.resultset import store_result_set_data
Expand All @@ -30,38 +25,6 @@ def jobs_with_local_log():
return [job]


@pytest.fixture
def jobs_with_local_mozlog_log():
log = ("plain-chunked_raw.log")
sample_data = SampleData()
url = "file://{0}".format(
sample_data.get_log_path("{0}.gz".format(log)))

# sample url to test with a real log, during development
# url = "http://mozilla-releng-blobs.s3.amazonaws.com/blobs/try/sha512/6a690d565effa5a485a9385cc62eccd59feaa93fa6bb167073f012a105dc33aeaa02233daf081426b5363cd9affd007e42aea2265f47ddbc334a4493de1879b5"
job = sample_data.job_data[0]

# substitute the log url with a local url
job['job']['log_references'][0]['url'] = url
job['job']['log_references'][0]['name'] = 'mozlog_json'
return [job]


@pytest.fixture
def mock_mozlog_get_log_handler(monkeypatch):

def _get_log_handle(mockself, url):
response = urllib2.urlopen(
url,
timeout=settings.REQUESTS_TIMEOUT
)
return gzip.GzipFile(fileobj=BytesIO(response.read()))

import treeherder.etl.common
monkeypatch.setattr(treeherder.log_parser.artifactbuilders.MozlogArtifactBuilder,
'get_log_handle', _get_log_handle)


def test_parse_log(test_repository, failure_classifications, jobs_with_local_log, sample_resultset):
"""
check that 2 job_artifacts get inserted when running a parse_log task for
Expand Down
4 changes: 0 additions & 4 deletions treeherder/etl/seta.py
Expand Up @@ -53,10 +53,6 @@ def query_jobtypes(self):
"""Query all available jobtypes and return it as list"""
return self.jobtypes

def query_jobnames(self):
"""Query all jobnames including buildtype and groupcode, then return them as list"""
return self.jobnames


def is_job_blacklisted(testtype):
if not testtype:
Expand Down
9 changes: 0 additions & 9 deletions treeherder/log_parser/parsers.py
@@ -1,4 +1,3 @@
import datetime
import json
import logging
import re
Expand Down Expand Up @@ -231,14 +230,6 @@ def finish_parse(self, last_lineno_seen):
# end of the log.
self.end_step(last_lineno_seen)

def parsetime(self, match):
"""Convert a string date into a datetime."""
# DATE_FORMAT expects a decimal on the seconds. If it's not
# present, we must add it so the date parsing does not fail.
if "." not in match:
match = "{0}.0".format(match)
return datetime.datetime.strptime(match, self.DATE_FORMAT)

@property
def steps(self):
"""Return the list of steps in the artifact"""
Expand Down
5 changes: 0 additions & 5 deletions treeherder/seta/analyze_failures.py
Expand Up @@ -16,11 +16,6 @@
SETA_UNSUPPORTED_PLATFORMS)
from treeherder.seta.update_job_priority import update_job_priority_table

HEADERS = {
'Accept': 'application/json',
'User-Agent': 'treeherder-seta',
}

logger = logging.getLogger(__name__)


Expand Down
31 changes: 0 additions & 31 deletions treeherder/seta/high_value_jobs.py
Expand Up @@ -66,37 +66,6 @@ def build_removals(active_jobs, failures, target):
return low_value_jobs, failures_root_cause


def remove_root_cause_failures(failures, failures_root_cause):
for revision in failures_root_cause:
del failures[revision]
return failures


def invert_index(failures, active_jobs):
inv_map = {}

for revision, jobtypes in failures.iteritems():
for job in active_jobs:
found = is_matched(job, jobtypes)
if found:
inv_map[str(job)] = inv_map.get(str(job), [])
inv_map[str(job)].append(revision)

maximum = 1
for jobtype in sorted(inv_map):
if len(inv_map[jobtype]) > maximum:
maximum = len(inv_map[jobtype])
max_job = jobtype

if maximum == 1:
return failures, None

for revision in inv_map[max_job]:
del failures[revision]

return failures, max_job


def get_high_value_jobs(fixed_by_commit_jobs, target=100):
"""
fixed_by_commit_jobs:
Expand Down
9 changes: 1 addition & 8 deletions treeherder/webapp/api/seta.py
@@ -1,18 +1,11 @@
from rest_framework import (serializers,
status,
from rest_framework import (status,
viewsets)
from rest_framework.response import Response

from treeherder.etl.seta import Treecodes
from treeherder.seta.analyze_failures import get_failures_fixed_by_commit
from treeherder.seta.job_priorities import (SetaError,
seta_job_scheduling)
from treeherder.seta.models import JobPriority


class SetaJobPrioritySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = JobPriority


class SetaJobPriorityViewSet(viewsets.ViewSet):
Expand Down

0 comments on commit 4fcd020

Please sign in to comment.