Skip to content

Commit

Permalink
Merge pull request mozilla#259 from adusca/all-coalesced
Browse files Browse the repository at this point in the history
All coalesced
  • Loading branch information
adusca committed Jun 16, 2015
2 parents 552523a + ade5761 commit 9564516
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 21 deletions.
29 changes: 28 additions & 1 deletion mozci/scripts/trigger.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@

from mozci.mozci import backfill_revlist, trigger_range, \
query_repo_name_from_buildername, query_repo_url_from_buildername, query_builders
from mozci.sources.buildapi import find_all_by_status, make_retrigger_request, COALESCED
from mozci.sources.pushlog import query_revisions_range_from_revision_and_delta
from mozci.sources.pushlog import query_revisions_range, query_revision_info, query_pushid_range


logging.basicConfig(format='%(asctime)s %(levelname)s:\t %(message)s',
datefmt='%m/%d/%Y %I:%M:%S')
LOG = logging.getLogger()
Expand All @@ -20,7 +22,6 @@ def parse_args(argv=None):
# Required arguments
parser.add_argument('-b', "--buildername",
dest="buildernames",
required=True,
type=str,
help="Comma-separated buildernames used in Treeherder.")

Expand Down Expand Up @@ -86,12 +87,28 @@ def parse_args(argv=None):
help="We will trigger jobs starting from --rev in reverse chronological "
"order until we find the last revision where there was a good job.")

parser.add_argument("--coalesced",
action="store_true",
dest="coalesced",
help="Trigger every coalesced job on revision --rev "
"and repo --repo-name.")

parser.add_argument("--repo-name",
dest="repo_name",
help="Branch name")

options = parser.parse_args(argv)
return options


def validate_options(options):
error_message = ""
if not options.buildernames and not options.coalesced:
error_message = "A buildername is mandatory for all modes except --coalesced. " \
"Use --buildername."
if options.coalesced and not options.repo_name:
error_message = "A branch name is mandatory with --coalesced. Use --repo-name."

if options.back_revisions:
if options.backfill or options.delta or options.from_rev:
error_message = "You should not pass --backfill, --delta or --end-rev " \
Expand Down Expand Up @@ -184,6 +201,16 @@ def main():
# requests is too noisy and adds no value
logging.getLogger("requests").setLevel(logging.WARNING)

if options.coalesced:
request_ids = find_all_by_status(options.repo_name, options.rev, COALESCED)

for request_id in request_ids:
make_retrigger_request(repo_name=options.repo_name,
request_id=request_id,
dry_run=options.dry_run)

return

options.buildernames = sanitize_buildernames(options.buildernames)
repo_url = query_repo_url_from_buildername(options.buildernames[0])

Expand Down
11 changes: 11 additions & 0 deletions mozci/sources/buildapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,3 +321,14 @@ def query_repositories(clobber=False):
json.dump(REPOSITORIES, fd)

return REPOSITORIES


def find_all_by_status(repo_name, revision, status):
"""
Find all coalesced jobs in a given branch and revision.
Returns a list with the request ids of the coalesced jobs.
"""
all_jobs = query_jobs_schedule(repo_name, revision)
return [job["requests"][0]["request_id"] for job in all_jobs
if BuildapiJobStatus(job).get_status() == status]
42 changes: 22 additions & 20 deletions mozci/sources/buildjson.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
"""
import logging
import os
import time

from mozci.utils.tzone import utc_dt, utc_time, utc_day
from mozci.utils.transfer import load_file, path_to_file
Expand All @@ -16,7 +17,7 @@
BUILDS_DAY_FILE = "builds-%s.js"

# This helps us read into memory and load less from disk
BUILDS_DAY_INDEX = {}
BUILDS_CACHE = {}


class BuildjsonException(Exception):
Expand All @@ -31,6 +32,9 @@ def _fetch_data(filename):
Returns all jobs inside of this buildjson file.
"""
global BUILDS_CACHE
if filename in BUILDS_CACHE:
return BUILDS_CACHE[filename]
url = "%s/%s.gz" % (BUILDJSON_DATA, filename)

if not os.path.isabs(filename):
Expand All @@ -40,7 +44,7 @@ def _fetch_data(filename):

# If the file exists and is valid we won't download it again
json_contents = load_file(filepath, url)

BUILDS_CACHE[filename] = json_contents["builds"]
return json_contents["builds"]


Expand Down Expand Up @@ -124,11 +128,11 @@ def query_job_data(complete_at, request_id):
This means that since 4pm to midnight we generate the same file again and again
without adding any new data.
"""
global BUILDS_CACHE

assert type(request_id) is int
assert type(complete_at) is int

global BUILDS_DAY_INDEX

date = utc_day(complete_at)
LOG.debug("Job identified with complete_at value: %d run on %s UTC." % (complete_at, date))

Expand All @@ -141,29 +145,27 @@ def query_job_data(complete_at, request_id):
# We might be able to grab information about pending and running jobs
# from builds-running.js and builds-pending.js
filename = BUILDS_4HR_FILE
job = _find_job(request_id, _fetch_data(filename), filename)
else:
filename = BUILDS_DAY_FILE % date
if utc_day() == date:
# XXX: We could read from memory if we tracked last modified time
# in BUILDS_DAY_INDEX
job = _find_job(request_id, _fetch_data(filename), filename)
else:
if date in BUILDS_DAY_INDEX:
LOG.debug("%s is loaded on memory; reading from there." % date)
else:
# Let's load the jobs into memory
jobs = _fetch_data(filename)
BUILDS_DAY_INDEX[date] = jobs

job = _find_job(request_id, BUILDS_DAY_INDEX[date], filename)
job = _find_job(request_id, _fetch_data(filename), filename)

if job:
return job

# If we have not found the job, it might be that our cache is
# old. We will clean the cache and try one more time after 60
# seconds. If it fails, we will raise an Exception
BUILDS_CACHE = {}
LOG.info("The request %d is not yet on %s. We are going to wait for a new %s."
% (request_id, filename, filename))
time.sleep(60)

job = _find_job(request_id, _fetch_data(filename), filename)
if job:
return job

raise BuildjsonException(
"We have not found the job. If you see this problem please grep "
"in %s for %d and run again with --debug and --dry-run. If you report "
"this issue please upload the mentioned file somewhere for "
"inspection. Thanks!" % (filename, request_id)
)
"inspection. Thanks!" % (filename, request_id))

0 comments on commit 9564516

Please sign in to comment.