Skip to content

Commit

Permalink
Bug 1059814 - Non-whitespace pep8 fixes using autopep8 aggressive mode
Browse files Browse the repository at this point in the history
Generated using:
autopep8 --in-place --recursive --aggressive --aggressive
--max-line-length 999 --exclude='.git,__pycache__,.vagrant,build,vendor,
0001_initial.py,models.py,test_note_api.py,test_bug_job_map_api.py' .

autopep8's aggressive mode, unlike standard mode, makes non-whitespace
changes. It also uses lib2to3 to correct deprecated code (W690), some of
which aren't pep8 failures. Some of these changes are more dubious, but
rather than disable W690 completely, I've just excluded the files where
the unwanted changes would have been made, so we can benefit from the
rest.
  • Loading branch information
Ed Morley committed Mar 3, 2015
1 parent d90a2e1 commit 30ad99c
Show file tree
Hide file tree
Showing 18 changed files with 84 additions and 74 deletions.
20 changes: 12 additions & 8 deletions tests/conftest.py
Expand Up @@ -60,6 +60,7 @@ def pytest_sessionstart(session):
settings.PULSE_URI = settings.BROKER_URL
settings.PULSE_EXCHANGE_NAMESPACE = 'test'


def pytest_sessionfinish(session):
"""Tear down the test environment, including databases."""
session.django_runner.teardown_test_environment()
Expand Down Expand Up @@ -155,7 +156,7 @@ def add_test_procs_file(dhub, key, filename):
)
del dhub.procs[key]
proclist = dhub.data_sources[key]["procs"]
if not test_proc_file in proclist:
if test_proc_file not in proclist:
proclist.append(test_proc_file)
dhub.data_sources[key]["procs"] = proclist
dhub.load_procs(key)
Expand Down Expand Up @@ -296,6 +297,7 @@ def mock_message_broker(monkeypatch):
from django.conf import settings
monkeypatch.setattr(settings, 'BROKER_URL', 'memory://')


@pytest.fixture
def resultset_with_three_jobs(jm, sample_data, sample_resultset):
"""
Expand Down Expand Up @@ -328,7 +330,6 @@ def resultset_with_three_jobs(jm, sample_data, sample_resultset):
return resultset_creation['inserted_result_set_ids'][0]



@pytest.fixture
def eleven_jobs_stored(jm, sample_data, sample_resultset):
"""stores a list of 11 job samples"""
Expand Down Expand Up @@ -419,6 +420,7 @@ def fin():

request.addfinalizer(fin)


def pulse_consumer(exchange, request):
from django.conf import settings

Expand All @@ -430,19 +432,19 @@ def pulse_consumer(exchange, request):
connection = kombu.Connection(settings.PULSE_URI)

exchange = kombu.Exchange(
name = exchange_name,
name=exchange_name,
type='topic'
)

queue = kombu.Queue(
no_ack=True,
exchange=exchange, # Exchange name
routing_key='#', # Bind to all messages
auto_delete=True, # Delete after each test
exclusive=False) # Disallow multiple consumers
exchange=exchange, # Exchange name
routing_key='#', # Bind to all messages
auto_delete=True, # Delete after each test
exclusive=False) # Disallow multiple consumers

simpleQueue = connection.SimpleQueue(
name = queue,
name=queue,
channel=connection,
no_ack=True)

Expand All @@ -452,10 +454,12 @@ def fin():
request.addfinalizer(fin)
return simpleQueue


@pytest.fixture
def pulse_resultset_consumer(request):
return pulse_consumer('new-result-set', request)


@pytest.fixture
def pulse_action_consumer(request):
return pulse_consumer('job-actions', request)
8 changes: 6 additions & 2 deletions tests/webapp/api/test_jobs_api.py
Expand Up @@ -8,6 +8,7 @@

import json


def test_job_list(webapp, eleven_jobs_processed, jm):
"""
test retrieving a list of ten json blobs from the jobs-list
Expand Down Expand Up @@ -131,8 +132,9 @@ def test_job_retrigger_unauthorized(webapp, eleven_jobs_processed, jm):
kwargs={"project": jm.project, "pk": job["id"]})
webapp.post(url, status=403)


def test_job_retrigger_authorized(webapp, eleven_jobs_processed, jm,
pulse_action_consumer):
pulse_action_consumer):
"""
Validate that only authenticated users can hit this endpoint.
"""
Expand All @@ -155,8 +157,9 @@ def test_job_retrigger_authorized(webapp, eleven_jobs_processed, jm,
assert content['requester'] == email
user.delete()


def test_job_cancel_authorized(webapp, eleven_jobs_processed, jm,
pulse_action_consumer):
pulse_action_consumer):
"""
Validate that only authenticated users can hit this endpoint.
"""
Expand All @@ -179,6 +182,7 @@ def test_job_cancel_authorized(webapp, eleven_jobs_processed, jm,
assert content['requester'] == email
user.delete()


def test_job_detail_bad_project(webapp, eleven_jobs_processed, jm):
"""
test retrieving a single job from the jobs-detail
Expand Down
5 changes: 3 additions & 2 deletions tests/webapp/api/test_resultset_api.py
Expand Up @@ -285,6 +285,7 @@ def test_resultset_with_bad_key(sample_resultset, jm, initial_data):
assert resp.json['response'] == "access_denied"
assert resp.json['detail'] == "oauth_consumer_key does not match project, {0}, credentials".format(jm.project)


def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consumer):
"""
Issue cancellation of a resultset with three unfinished jobs.
Expand All @@ -300,7 +301,7 @@ def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consum
assert job['state'] == 'pending'

url = reverse("resultset-cancel-all",
kwargs={"project": jm.project, "pk": resultset_with_three_jobs })
kwargs={"project": jm.project, "pk": resultset_with_three_jobs})
resp = client.post(url)

# Ensure all jobs are pending..
Expand All @@ -316,4 +317,4 @@ def test_resultset_cancel_all(jm, resultset_with_three_jobs, pulse_action_consum
assert content['action'] == 'cancel'
assert content['project'] == jm.project

user.delete();
user.delete()
6 changes: 3 additions & 3 deletions treeherder/etl/buildapi.py
Expand Up @@ -64,7 +64,7 @@ def find_job_guid(self, build):
request_ids_str = ",".join(map(str, request_ids))
request_time_list = []

if type(request_times) == dict:
if isinstance(request_times, dict):
for request_id in request_ids:
request_time_list.append(
request_times[str(request_id)])
Expand All @@ -79,7 +79,7 @@ def find_job_guid(self, build):
# coallesced job detected, generate the coalesced
# job guids
for index, r_id in enumerate(request_ids):
#skip if buildbot doesn't have a matching number of ids and times
# skip if buildbot doesn't have a matching number of ids and times
if len(request_time_list) > index:
job_guid_data['coalesced'].append(
common.generate_job_guid(
Expand Down Expand Up @@ -848,7 +848,7 @@ def write_report(self):
# Write out display report
for k, v in sorted(
self.report_obj['analyzers'][analyzer]['data'].iteritems(),
key=lambda (k, v): (v['first_seen'], k)):
key=lambda k_v: (k_v[1]['first_seen'], k_v[0])):

if k in self.blacklist:
continue
Expand Down
8 changes: 4 additions & 4 deletions treeherder/etl/daemon.py
Expand Up @@ -38,7 +38,7 @@ def daemonize(self):
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
except OSError as e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)

Expand All @@ -53,7 +53,7 @@ def daemonize(self):
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError, e:
except OSError as e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)

Expand Down Expand Up @@ -118,10 +118,10 @@ def stop(self):

# Try killing the daemon process
try:
while 1:
while True:
os.kill(pid, SIGTERM)
time.sleep(0.1)
except OSError, err:
except OSError as err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self.pidfile):
Expand Down
2 changes: 1 addition & 1 deletion treeherder/etl/oauth_utils.py
Expand Up @@ -53,7 +53,7 @@ def set_credentials(cls, credentials={}):

logger.error(msg)

except Exception, e:
except Exception as e:
logger.error(e)
raise e

Expand Down
4 changes: 2 additions & 2 deletions treeherder/etl/pulse.py
Expand Up @@ -265,7 +265,7 @@ def process_raw_data_dict(self, attr_table, pulse_data, data):
if cb:
cb(attr, pulse_value, data)
else:
if (type(pulse_value) == list) and (len(pulse_value) > 0):
if (isinstance(pulse_value, list)) and (len(pulse_value) > 0):
data[attr] = pulse_value[0]
else:
data[attr] = pulse_value
Expand All @@ -292,7 +292,7 @@ def process_property_list(self, attr_table, pulse_data, data):

def process_sourcestamp_changes_list(self, attr_table, pulse_data, data):
"""Process sourcestamp changes list"""
if (type(pulse_data) == list) and (len(pulse_data) > 0):
if (isinstance(pulse_data, list)) and (len(pulse_data) > 0):
self.process_raw_data_dict(attr_table, pulse_data[0], data)

def adapt_data(self, data):
Expand Down
2 changes: 1 addition & 1 deletion treeherder/etl/pushlog.py
Expand Up @@ -90,7 +90,7 @@ def run(self, source_url, repository, changeset=None):
extracted_content = self.extract(
source_url + "&fromchange=" + last_push
)
except requests.exceptions.HTTPError, e:
except requests.exceptions.HTTPError as e:
# in case of a 404 error, delete the cache key
# and try it without any parameter
if e.response.status_code == 404:
Expand Down
4 changes: 2 additions & 2 deletions treeherder/etl/tasks/tbpl_tasks.py
Expand Up @@ -19,7 +19,7 @@ def submit_star_comment(project, job_id, bug_id, submit_timestamp, who):
req = OrangeFactorBugRequest(project, job_id, bug_id, submit_timestamp, who)
req.generate_request_body()
req.send_request()
except Exception, e:
except Exception as e:
# Initially retry after 1 minute, then for each subsequent retry
# lengthen the retry time by another minute.
submit_star_comment.retry(exc=e, countdown=(1 + submit_star_comment.request.retries) * 60)
Expand All @@ -38,7 +38,7 @@ def submit_bug_comment(project, job_id, bug_id, who):
req = BugzillaBugRequest(project, job_id, bug_id, who)
req.generate_request_body()
req.send_request()
except Exception, e:
except Exception as e:
# Initially retry after 1 minute, then for each subsequent retry
# lengthen the retry time by another minute.
submit_bug_comment.retry(exc=e, countdown=(1 + submit_bug_comment.request.retries) * 60)
Expand Down
2 changes: 1 addition & 1 deletion treeherder/log_parser/tasks.py
Expand Up @@ -75,7 +75,7 @@ def parse_log(project, job_log_url, job_guid, check_errors=False):

logger.debug("Finished posting artifact for guid '%s'" % job_guid)

except Exception, e:
except Exception as e:
# send an update to job_log_url
# the job_log_url status changes from pending/running to failed
logger.warn("Failed to download and/or parse artifact for guid '%s'" %
Expand Down
4 changes: 2 additions & 2 deletions treeherder/log_parser/utils.py
Expand Up @@ -169,7 +169,7 @@ def extract_log_artifacts(log_url, job_guid, check_errors):

# collect open recent and all other bugs suggestions
if search_term:
if not search_term in terms_requested:
if search_term not in terms_requested:
# retrieve the list of suggestions from the api
bugs = get_bugs_for_search_term(
search_term,
Expand All @@ -185,7 +185,7 @@ def extract_log_artifacts(log_url, job_guid, check_errors):
# the crash signature as search term
crash_signature = get_crash_signature(clean_line)
if crash_signature:
if not crash_signature in terms_requested:
if crash_signature not in terms_requested:
bugs = get_bugs_for_search_term(
crash_signature,
bugscache_uri
Expand Down
6 changes: 2 additions & 4 deletions treeherder/model/derived/jobs.py
Expand Up @@ -250,11 +250,10 @@ def get_job_reference_data(self, signature):
# Retrieve associated data in reference_data_signatures
result = self.refdata_model.get_reference_data([signature])
if result and signature in result:
return result[signature];
return result[signature]

return None


def get_job_list(self, offset, limit,
conditions=None, exclusion_profile=None):
"""
Expand Down Expand Up @@ -389,7 +388,6 @@ def _job_action_event(self, job, action, requester):
routing_key='high_priority'
)


def retrigger(self, requester, job):
"""
Issue a retrigger to the given job
Expand Down Expand Up @@ -2314,7 +2312,7 @@ def load_job_artifacts(self, artifact_data, job_id_lookup):
job_id = None
job_guid = None

if type(artifact) is list:
if isinstance(artifact, list):

job_guid = artifact[0]
job_id = job_id_lookup.get(job_guid, {}).get('id', None)
Expand Down
4 changes: 2 additions & 2 deletions treeherder/model/derived/refdata.py
Expand Up @@ -245,7 +245,7 @@ def add_reference_data_signature(self, name, build_system_type,
# No reference_data_name was provided use the signature
# in it's place, in the case of buildbot this will be the
# buildername
if name == None:
if name is None:
name = signature

placeholders = [name, signature]
Expand Down Expand Up @@ -1415,7 +1415,7 @@ def get_reference_data(self, signatures):

if signatures:

reference_data_signatures_where_in_clause = [ ','.join( ['%s'] * len(signatures) ) ]
reference_data_signatures_where_in_clause = [','.join(['%s'] * len(signatures))]

reference_data = self.execute(
proc="reference.selects.get_reference_data",
Expand Down
23 changes: 11 additions & 12 deletions treeherder/model/exchanges.py
Expand Up @@ -33,26 +33,25 @@ class TreeherderPublisher(PulsePublisher):
)

job_action = Exchange(
exchange = "job-actions",
title = "Actions issued by jobs",
description = """
exchange="job-actions",
title="Actions issued by jobs",
description="""
There are a number of actions which can be done to a job
(retrigger/cancel) they are published on this exchange
""",
routing_keys = [
routing_keys=[
Key(
name = "build_system_type",
summary = "Build system which created job (i.e. buildbot)"
name="build_system_type",
summary="Build system which created job (i.e. buildbot)"
),
Key(
name = "project",
summary = "Project (i.e. try) which this job belongs to"
name="project",
summary="Project (i.e. try) which this job belongs to"
),
Key(
name = "action",
summary = "Type of action issued (i.e. cancel)"
name="action",
summary="Type of action issued (i.e. cancel)"
)
],
schema = "https://treeherder.mozilla.org/schemas/v1/job-action-message.json#"
schema="https://treeherder.mozilla.org/schemas/v1/job-action-message.json#"
)

2 changes: 1 addition & 1 deletion treeherder/model/management/commands/init_master_db.py
Expand Up @@ -63,7 +63,7 @@ def handle(self, *args, **options):
try:
rendered_sql = sql.format(engine=options['engine'])
cursor.execute(rendered_sql)
except Exception, e:
except Exception as e:
print "Error on sql execution:{0}".format(e)
finally:
cursor.close()
Expand Down

0 comments on commit 30ad99c

Please sign in to comment.