Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Merging from default\n

changeset:   2942:88d4e6cc2057
parent:      2940:4d5f49af57ab
user:        Rail Aliiev <rail@mozilla.com>
date:        Mon Jan 21 14:39:22 2013 -0500
files:       common.py process/factory.py
description:
Bug 820238 - PuppetAgain config for new Linux64 testing reference platform. r=armenzg


changeset:   2943:c65fd7add8d6
user:        Chris AtLee <catlee@mozilla.com>
date:        Mon Jan 21 19:45:01 2013 -0500
files:       bin/hgpoller.py bin/log_uploader.py bin/postrun.py bin/try_mailer.py bin/update_from_files.py changes/ftppoller.py changes/hgpoller.py changes/mobileftppoller.py changes/tinderboxpoller.py common.py env.py l10n.py log.py misc.py misc_scheduler.py process/factory.py process/release.py scheduler.py status/db/jsoncol.py status/db/model.py status/db/status.py status/errors.py status/generators.py status/log_handlers.py status/mail.py status/pulse.py status/queued_command.py status/tinderboxmailnotifier.py steps/base.py steps/l10n.py steps/misc.py steps/mobile.py steps/mock.py steps/release.py steps/signing.py steps/source.py steps/talos.py steps/test.py steps/unittest.py steps/updates.py test/test_hgpoller.py test/test_misc_important.py test/test_misc_nextslaves.py test/test_misc_scheduler_nightly.py test/test_misc_scheduler_propfuncs.py test/test_misc_scheduler_propscheduler.py test/test_process_factory.py test/test_test_order.py test/test_try_parser.py try_parser.py
description:
nobug: PEP8! r=bitrotallthethings


changeset:   2944:4c7c7df48595
user:        Steve Fink <sfink@mozilla.com>
date:        Tue Jan 22 12:50:07 2013 -0800
files:       misc.py
description:
Bug 830466 - Turn off tipsOnly for try. r=catlee


changeset:   2945:78189628f1e2
tag:         tip
user:        Armen Zambrano Gasparnian <armenzg@mozilla.com>
date:        Tue Jan 22 16:03:32 2013 -0500
files:       misc.py process/factory.py
description:
Bug 710840: Track peak virtual memory usage of link.exe process during libxul
PGO link on graph server. r=bhearsum

--HG--
branch : production-0.8
  • Loading branch information...
commit 97050edc161868c6bc66a7eee7605ca05a1d171f 2 parents a470055 + 53cbf81
@kmoir kmoir authored
Showing with 4,659 additions and 3,621 deletions.
  1. +30 −17 bin/hgpoller.py
  2. +70 −47 bin/log_uploader.py
  3. +59 −42 bin/postrun.py
  4. +28 −16 bin/try_mailer.py
  5. +52 −25 bin/update_from_files.py
  6. +23 −18 changes/ftppoller.py
  7. +27 −15 changes/hgpoller.py
  8. +39 −34 changes/mobileftppoller.py
  9. +58 −46 changes/tinderboxpoller.py
  10. +11 −4 common.py
  11. +49 −47 env.py
  12. +20 −13 l10n.py
  13. +10 −2 log.py
  14. +512 −377 misc.py
  15. +49 −34 misc_scheduler.py
  16. +2,141 −1,885 process/factory.py
  17. +278 −227 process/release.py
  18. +51 −33 scheduler.py
  19. +1 −0  status/db/jsoncol.py
  20. +122 −67 status/db/model.py
  21. +50 −26 status/db/status.py
  22. +9 −7 status/errors.py
  23. +1 −2  status/generators.py
  24. +10 −6 status/log_handlers.py
  25. +20 −16 status/mail.py
  26. +31 −28 status/pulse.py
  27. +11 −7 status/queued_command.py
  28. +12 −10 status/tinderboxmailnotifier.py
  29. +7 −3 steps/base.py
  30. +13 −6 steps/l10n.py
  31. +72 −44 steps/misc.py
  32. +11 −8 steps/mobile.py
  33. +18 −17 steps/mock.py
  34. +9 −6 steps/release.py
  35. +3 −3 steps/signing.py
  36. +1 −1  steps/source.py
  37. +21 −13 steps/talos.py
  38. +46 −32 steps/test.py
  39. +92 −58 steps/unittest.py
  40. +16 −13 steps/updates.py
  41. +92 −44 test/test_hgpoller.py
  42. +14 −7 test/test_misc_important.py
  43. +29 −15 test/test_misc_nextslaves.py
  44. +51 −26 test/test_misc_scheduler_nightly.py
  45. +24 −11 test/test_misc_scheduler_propfuncs.py
  46. +12 −5 test/test_misc_scheduler_propscheduler.py
  47. +3 −0  test/test_process_factory.py
  48. +43 −25 test/test_test_order.py
  49. +243 −199 test/test_try_parser.py
  50. +65 −34 try_parser.py
View
47 bin/hgpoller.py
@@ -1,16 +1,22 @@
#!/usr/bin/env python
-import urlparse, urllib, time
+import urlparse
+import urllib
+import time
try:
import json
except:
import simplejson as json
-import httplib, urllib2, socket, ssl
+import httplib
+import urllib2
+import socket
+import ssl
import subprocess
from buildbotcustom.changes.hgpoller import _parse_changes
import logging as log
+
def buildValidatingOpener(ca_certs):
class VerifiedHTTPSConnection(httplib.HTTPSConnection):
def connect(self):
@@ -45,6 +51,7 @@ def https_open(self, req):
return url_opener
+
def validating_https_open(url, ca_certs, username=None, password=None):
url_opener = buildValidatingOpener(ca_certs)
req = urllib2.Request(url)
@@ -55,17 +62,18 @@ def validating_https_open(url, ca_certs, username=None, password=None):
req.add_header("Authorization", "Basic %s" % pw)
return url_opener.open(req)
+
def getChanges(base_url, last_changeset=None, tips_only=False, ca_certs=None,
- username=None, password=None):
+ username=None, password=None):
bits = urlparse.urlparse(base_url)
if bits.scheme == 'https':
assert ca_certs, "you must specify ca_certs"
params = [('full', '1')]
if last_changeset:
- params.append( ('fromchange', last_changeset) )
+ params.append(('fromchange', last_changeset))
if tips_only:
- params.append( ('tipsonly', '1') )
+ params.append(('tipsonly', '1'))
url = "%s/json-pushes?%s" % (base_url, urllib.urlencode(params))
log.debug("Fetching %s", url)
@@ -78,22 +86,25 @@ def getChanges(base_url, last_changeset=None, tips_only=False, ca_certs=None,
data = handle.read()
return _parse_changes(data)
+
def sendchange(master, branch, change):
- log.info("Sendchange %s to %s on branch %s", change['changeset'], master, branch)
+ log.info("Sendchange %s to %s on branch %s", change['changeset'],
+ master, branch)
cmd = ['retry.py', '-r', '5', '-s', '5', '-t', '30',
- '--stdout-regexp', 'change sent successfully']
+ '--stdout-regexp', 'change sent successfully']
cmd.extend(
- ['buildbot', 'sendchange',
+ ['buildbot', 'sendchange',
'--master', master,
'--branch', branch,
'--comments', change['comments'].encode('ascii', 'replace'),
'--revision', change['changeset'],
'--user', change['author'].encode('ascii', 'replace'),
'--when', str(change['updated']),
- ])
+ ])
cmd.extend(change['files'])
subprocess.check_call(cmd)
+
def processBranch(branch, state, config):
log.debug("Processing %s", branch)
master = config.get('main', 'master')
@@ -116,13 +127,14 @@ def processBranch(branch, state, config):
try:
changes = getChanges(url, tips_only=tips_only,
- last_changeset=last_changeset, ca_certs=ca_certs,
- username=username, password=password)
+ last_changeset=last_changeset, ca_certs=ca_certs,
+ username=username, password=password)
# Do sendchanges!
for c in changes:
# Ignore off-default branches
if c['branch'] != 'default' and config.getboolean(branch, 'default_branch_only'):
- log.info("Skipping %s on branch %s", c['changeset'], c['branch'])
+ log.info(
+ "Skipping %s on branch %s", c['changeset'], c['branch'])
continue
# Change the comments to include the url to the revision
c['comments'] += ' %s/rev/%s' % (url, c['changeset'])
@@ -150,11 +162,12 @@ def processBranch(branch, state, config):
parser = OptionParser()
parser.set_defaults(
- config_file="hgpoller.ini",
- verbosity=log.INFO,
- )
+ config_file="hgpoller.ini",
+ verbosity=log.INFO,
+ )
parser.add_option("-f", "--config-file", dest="config_file")
- parser.add_option("-v", "--verbose", dest="verbosity", action="store_const", const=log.DEBUG)
+ parser.add_option("-v", "--verbose", dest="verbosity",
+ action="store_const", const=log.DEBUG)
options, args = parser.parse_args()
@@ -168,7 +181,7 @@ def processBranch(branch, state, config):
'interval': 300,
'state_file': 'state.json',
'default_branch_only': "yes",
- })
+ })
config.read(options.config_file)
try:
View
117 bin/log_uploader.py
@@ -3,7 +3,10 @@
Uploads logs from build to the given host.
"""
-import os, cPickle, gzip, subprocess
+import os
+import cPickle
+import gzip
+import subprocess
from datetime import datetime
import time
@@ -17,13 +20,14 @@
retries = 5
retry_sleep = 30
+
def do_cmd(cmd):
"Runs the command, and returns output"
devnull = open(os.devnull)
proc = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- stdin=devnull,)
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ stdin=devnull,)
retcode = proc.wait()
output = proc.stdout.read().strip()
@@ -32,13 +36,15 @@ def do_cmd(cmd):
raise Exception("Command %s returned non-zero exit code %i:\n%s" % (
cmd, retcode, output))
+
def ssh(user, identity, host, remote_cmd, port=22):
cmd = ['ssh', '-l', user]
if identity:
cmd.extend(['-i', identity])
cmd.extend(['-p', str(port), host, remote_cmd])
- return retry(do_cmd, attempts=retries+1, sleeptime=retry_sleep, args=(cmd,))
+ return retry(do_cmd, attempts=retries + 1, sleeptime=retry_sleep, args=(cmd,))
+
def scp(user, identity, host, files, remote_dir, port=22):
cmd = ['scp']
@@ -50,6 +56,7 @@ def scp(user, identity, host, files, remote_dir, port=22):
return retry(do_cmd, attempts=retries, sleeptime=retry_sleep, args=(cmd,))
+
def getBuild(builder_path, build_number):
build_path = os.path.join(builder_path, build_number)
@@ -64,6 +71,7 @@ class FakeBuilder:
build.builder = FakeBuilder()
return build
+
def getAuthor(build):
props = build.getProperties()
if 'who' in props:
@@ -73,6 +81,7 @@ def getAuthor(build):
if changes:
return changes[0].who
+
def getBuildId(build):
try:
buildid = build.getProperty('buildid')
@@ -90,6 +99,7 @@ def getBuildId(build):
return buildid
+
def isNightly(build):
try:
if build.getProperty('nightly_build'):
@@ -97,6 +107,7 @@ def isNightly(build):
except:
return False
+
def formatLog(tmpdir, build, master_name, builder_suffix=''):
"""
Returns a filename with the contents of the build log
@@ -104,9 +115,11 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
"""
builder_name = build.builder.name
if master_name:
- build_name = "%s%s-%s-build%s.txt.gz" % (builder_name, builder_suffix, master_name, build_number)
+ build_name = "%s%s-%s-build%s.txt.gz" % (
+ builder_name, builder_suffix, master_name, build_number)
else:
- build_name = "%s%s-build%s.txt.gz" % (builder_name, builder_suffix, build_number)
+ build_name = "%s%s-build%s.txt.gz" % (
+ builder_name, builder_suffix, build_number)
logFile = gzip.GzipFile(os.path.join(tmpdir, build_name), "w")
@@ -136,7 +149,6 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
logFile.write("\n")
-
# Steps
for step in build.getSteps():
times = step.getTimes()
@@ -151,9 +163,11 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
if results == (None, []):
results = "not started"
- shortText = ' '.join(step.getText()) + ' (results: %s, elapsed: %s)' % (results, elapsed)
+ shortText = ' '.join(step.getText(
+ )) + ' (results: %s, elapsed: %s)' % (results, elapsed)
if times and times[0]:
- logFile.write("========= Started %s (at %s) =========\n" % (shortText, datetime.fromtimestamp(times[0])))
+ logFile.write("========= Started %s (at %s) =========\n" %
+ (shortText, datetime.fromtimestamp(times[0])))
else:
logFile.write("========= Skipped %s =========\n" % shortText)
continue
@@ -165,7 +179,8 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
logFile.write("\n")
if times and times[1]:
- logFile.write("========= Finished %s (at %s) =========\n\n" % (shortText, datetime.fromtimestamp(times[1])))
+ logFile.write("========= Finished %s (at %s) =========\n\n" %
+ (shortText, datetime.fromtimestamp(times[1])))
else:
logFile.write("========= Finished %s =========\n\n" % shortText)
logFile.close()
@@ -173,38 +188,41 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
if __name__ == "__main__":
from optparse import OptionParser
- import tempfile, shutil
+ import tempfile
+ import shutil
parser = OptionParser(__doc__)
parser.set_defaults(
- nightly=False,
- release=None,
- trybuild=False,
- shadowbuild=False,
- l10n=False,
- user=os.environ.get("USER"),
- product="firefox",
- retries=retries,
- retry_sleep=retry_sleep,
- master_name=None,
- )
+ nightly=False,
+ release=None,
+ trybuild=False,
+ shadowbuild=False,
+ l10n=False,
+ user=os.environ.get("USER"),
+ product="firefox",
+ retries=retries,
+ retry_sleep=retry_sleep,
+ master_name=None,
+ )
parser.add_option("-u", "--user", dest="user", help="upload user name")
parser.add_option("-i", "--identity", dest="identity", help="ssh identity")
parser.add_option("-b", "--branch", dest="branch", help="branch")
parser.add_option("-p", "--platform", dest="platform", help="platform")
- parser.add_option("-r", "--retries", dest="retries", help="number of times to try", type="int")
- parser.add_option("-t", "--retrytime", dest="retry_sleep", help="time to sleep between tries", type="int")
+ parser.add_option("-r", "--retries", dest="retries",
+ help="number of times to try", type="int")
+ parser.add_option("-t", "--retrytime", dest="retry_sleep",
+ help="time to sleep between tries", type="int")
parser.add_option("--product", dest="product", help="product directory")
parser.add_option("--nightly", dest="nightly", action="store_true",
- help="upload to nightly dir")
+ help="upload to nightly dir")
parser.add_option("--release", dest="release",
- help="upload to release candidates dir")
+ help="upload to release candidates dir")
parser.add_option("--l10n", dest="l10n", action="store_true",
- help="include locale value in log filename")
+ help="include locale value in log filename")
parser.add_option("--try", dest="trybuild", action="store_true",
- help="upload to try build directory")
+ help="upload to try build directory")
parser.add_option("--shadow", dest="shadowbuild", action="store_true",
- help="upload to shadow build directory")
+ help="upload to shadow build directory")
parser.add_option("--master-name", dest="master_name")
options, args = parser.parse_args()
@@ -233,27 +251,30 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
suffix = '-%s' % build.getProperty('locale')
except KeyError:
suffix = '-unknown'
- logfile = formatLog(local_tmpdir, build, options.master_name, suffix)
+ logfile = formatLog(
+ local_tmpdir, build, options.master_name, suffix)
else:
logfile = formatLog(local_tmpdir, build, options.master_name)
# Now....upload it!
- remote_tmpdir = ssh(user=options.user, identity=options.identity, host=host,
- remote_cmd="mktemp -d")
+ remote_tmpdir = ssh(
+ user=options.user, identity=options.identity, host=host,
+ remote_cmd="mktemp -d")
try:
# Release logs go into the 'logs' directory
if options.release:
# Create the logs directory
ssh(user=options.user, identity=options.identity, host=host,
- remote_cmd="mkdir -p %s/logs" % remote_tmpdir)
+ remote_cmd="mkdir -p %s/logs" % remote_tmpdir)
scp(user=options.user, identity=options.identity, host=host,
- files=[logfile], remote_dir='%s/logs' % remote_tmpdir)
+ files=[logfile], remote_dir='%s/logs' % remote_tmpdir)
remote_files = [os.path.join(remote_tmpdir, 'logs', os.path.basename(f)) for f in [logfile]]
else:
scp(user=options.user, identity=options.identity, host=host,
- files=[logfile], remote_dir=remote_tmpdir)
+ files=[logfile], remote_dir=remote_tmpdir)
- remote_files = [os.path.join(remote_tmpdir, os.path.basename(f)) for f in [logfile]]
+ remote_files = [os.path.join(
+ remote_tmpdir, os.path.basename(f)) for f in [logfile]]
uploadArgs = dict(
branch=options.branch,
@@ -275,7 +296,7 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
who=getAuthor(build),
revision=build.getProperty('revision')[:12],
builddir="%s-%s" % (options.branch, platform),
- ))
+ ))
else:
buildid = getBuildId(build)
@@ -294,22 +315,24 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
# Don't upload to the latest directory - the logs are
# already in the dated directory and we should keep the
# latest-* directory clean.
- #uploadArgs['to_latest'] = True
+ # uploadArgs['to_latest'] = True
else:
uploadArgs['to_tinderbox_builds'] = True
uploadArgs['upload_dir'] = uploadArgs['branch']
else:
- uploadArgs['upload_dir'] = "%s-%s" % (options.branch, platform)
+ uploadArgs[
+ 'upload_dir'] = "%s-%s" % (options.branch, platform)
if options.nightly or isNightly(build):
uploadArgs['to_dated'] = True
# Don't upload to the latest directory - the logs are
# already in the dated directory and we should keep the
# latest-* directory clean.
- #uploadArgs['to_latest'] = True
+ # uploadArgs['to_latest'] = True
if 'mobile' in options.product:
- uploadArgs['branch'] = options.branch + '-' + platform
+ uploadArgs[
+ 'branch'] = options.branch + '-' + platform
else:
uploadArgs['branch'] = options.branch
@@ -325,17 +348,17 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
props = build.getProperties()
if props.getProperty('got_revision') is not None:
- revision=props['got_revision']
+ revision = props['got_revision']
elif props.getProperty('revision') is not None:
- revision=props['revision']
+ revision = props['revision']
else:
- revision=None
+ revision = None
uploadArgs.update(dict(
to_try=False,
who=None,
revision=revision,
buildid=buildid,
- ))
+ ))
post_upload_cmd = postUploadCmdPrefix(**uploadArgs)
post_upload_cmd += [remote_tmpdir]
post_upload_cmd += remote_files
@@ -346,7 +369,7 @@ def formatLog(tmpdir, build, master_name, builder_suffix=''):
print ssh(user=options.user, identity=options.identity, host=host, remote_cmd=post_upload_cmd)
finally:
ssh(user=options.user, identity=options.identity, host=host,
- remote_cmd="rm -rf %s" % remote_tmpdir)
+ remote_cmd="rm -rf %s" % remote_tmpdir)
finally:
shutil.rmtree(local_tmpdir)
View
101 bin/postrun.py
@@ -23,7 +23,8 @@
command queue entries.
"""
-import os, sys
+import os
+import sys
import re
import cPickle as pickle
from datetime import datetime
@@ -41,6 +42,7 @@
from util.commands import get_output
+
class PostRunner(object):
def __init__(self, config):
self.config = config
@@ -57,12 +59,14 @@ def uploadLog(self, build):
product = info['product']
platform = info['platform']
- upload_args = ['-r', '2', '-t', '10', '--master-name', self.config['statusdb.master_name']]
+ upload_args = ['-r', '2', '-t', '10', '--master-name',
+ self.config['statusdb.master_name']]
if "nightly" in builder.name:
upload_args.append("--nightly")
if builder.name.startswith("release-"):
upload_args.append("--release")
- upload_args.append("%s/%s" % (info.get('version'), info.get('build_number')))
+ upload_args.append(
+ "%s/%s" % (info.get('version'), info.get('build_number')))
if branch and 'try' in branch:
upload_args.append("--try")
@@ -106,7 +110,8 @@ def mailResults(self, build, log_url):
"--log-url", log_url,
]
- cmd.extend(['-f', self.config.get('mail_notifier_sender', 'tryserver@build.mozilla.org')])
+ cmd.extend(['-f', self.config.get(
+ 'mail_notifier_sender', 'tryserver@build.mozilla.org')])
if self.config.get('mail_real_author'):
cmd.append('--to-author')
@@ -190,12 +195,12 @@ def getBuildInfo(self, build):
def writePulseMessage(self, options, build, build_id):
builder_name = build.builder.name
msg = {
- 'event': 'build.%s.%s.log_uploaded' % (builder_name, build.number),
- 'payload': {"build": build.asDict()},
- 'master_name': options.master_name,
- 'master_incarnation': options.master_incarnation,
- 'id': build_id,
- }
+ 'event': 'build.%s.%s.log_uploaded' % (builder_name, build.number),
+ 'payload': {"build": build.asDict()},
+ 'master_name': options.master_name,
+ 'master_incarnation': options.master_incarnation,
+ 'id': build_id,
+ }
self.pulse_queue.add(json.dumps([msg]))
def updateStatusDB(self, build, request_ids):
@@ -219,15 +224,16 @@ def updateStatusDB(self, build, request_ids):
log.debug("searching for build")
q = session.query(model.Build).filter_by(
- master_id=master.id,
- builder=db_builder,
- buildnumber=build.number,
- starttime=starttime,
- )
+ master_id=master.id,
+ builder=db_builder,
+ buildnumber=build.number,
+ starttime=starttime,
+ )
db_build = q.first()
if not db_build:
log.debug("creating new build")
- db_build = model.Build.fromBBBuild(session, build, builder_name, master.id)
+ db_build = model.Build.fromBBBuild(
+ session, build, builder_name, master.id)
else:
log.debug("updating old build")
db_build.updateFromBBBuild(session, build)
@@ -241,23 +247,24 @@ def updateStatusDB(self, build, request_ids):
for i in request_ids:
# See if we already have this row
q = model.schedulerdb_requests.select()
- q = q.where(model.schedulerdb_requests.c.status_build_id==db_build.id)
- q = q.where(model.schedulerdb_requests.c.scheduler_request_id==i)
+ q = q.where(
+ model.schedulerdb_requests.c.status_build_id == db_build.id)
+ q = q.where(model.schedulerdb_requests.c.scheduler_request_id == i)
q = q.limit(1).execute()
if not q.fetchone():
# Find the schedulerdb build id for this
bid = schedulerdb.execute(
- sa.text('select id from builds where brid=:brid and number=:number'),
- brid=i, number=build.number
- ).fetchone()
+ sa.text('select id from builds where brid=:brid and number=:number'),
+ brid=i, number=build.number
+ ).fetchone()
if bid is not None:
bid = bid[0]
log.debug("bid for %s is %s", i, bid)
model.schedulerdb_requests.insert().execute(
- status_build_id=db_build.id,
- scheduler_request_id=i,
- scheduler_build_id=bid,
- )
+ status_build_id=db_build.id,
+ scheduler_request_id=i,
+ scheduler_build_id=bid,
+ )
log.debug("build id is %s", db_build.id)
return db_build.id
@@ -267,9 +274,11 @@ def getRequestTimes(self, request_ids):
schedulerdb = sa.create_engine(self.config['schedulerdb.url'])
retval = {}
for i in request_ids:
- submitted_at = schedulerdb.execute(sa.text("select submitted_at from buildrequests where id=:brid"),
- brid=i,
- ).fetchone()
+ submitted_at = schedulerdb.execute(
+ sa.text(
+ "select submitted_at from buildrequests where id=:brid"),
+ brid=i,
+ ).fetchone()
if submitted_at is not None:
retval[i] = submitted_at[0]
return retval
@@ -294,11 +303,14 @@ def processBuild(self, options, build_path, request_ids):
log_url = None
log.debug("adding properties")
build.properties.setProperty('log_url', log_url, 'postrun.py')
- build.properties.setProperty('request_ids', [int(i) for i in request_ids], 'postrun.py')
- build.properties.setProperty('request_times', self.getRequestTimes(request_ids), 'postrun.py')
+ build.properties.setProperty(
+ 'request_ids', [int(i) for i in request_ids], 'postrun.py')
+ build.properties.setProperty('request_times', self.getRequestTimes(
+ request_ids), 'postrun.py')
build_id = self.updateStatusDB(build, request_ids)
- cmd = [sys.executable] + sys.argv + ["--statusdb-id", str(build_id)]
+ cmd = [sys.executable] + sys.argv + [
+ "--statusdb-id", str(build_id)]
self.command_queue.add(json.dumps(cmd))
else:
log.info("publishing to pulse")
@@ -306,24 +318,29 @@ def processBuild(self, options, build_path, request_ids):
build_id = options.statusdb_id
build.properties.setProperty('log_url', log_url, 'postrun.py')
build.properties.setProperty('statusdb_id', build_id, 'postrun.py')
- build.properties.setProperty('request_ids', [int(i) for i in request_ids], 'postrun.py')
- build.properties.setProperty('request_times', self.getRequestTimes(request_ids), 'postrun.py')
+ build.properties.setProperty(
+ 'request_ids', [int(i) for i in request_ids], 'postrun.py')
+ build.properties.setProperty('request_times', self.getRequestTimes(
+ request_ids), 'postrun.py')
self.writePulseMessage(options, build, build_id)
+
def main():
from optparse import OptionParser
parser = OptionParser()
parser.set_defaults(
- config=None,
- loglevel=logging.INFO,
- log_url=None,
- statusdb_id=None,
- master_name=None,
- master_incarnation=None,
- )
+ config=None,
+ loglevel=logging.INFO,
+ log_url=None,
+ statusdb_id=None,
+ master_name=None,
+ master_incarnation=None,
+ )
parser.add_option("-c", "--config", dest="config")
- parser.add_option("-v", "--verbose", dest="loglevel", const=logging.DEBUG, action="store_const")
- parser.add_option("-q", "--quiet", dest="loglevel", const=logging.WARNING, action="store_const")
+ parser.add_option("-v", "--verbose", dest="loglevel",
+ const=logging.DEBUG, action="store_const")
+ parser.add_option("-q", "--quiet", dest="loglevel",
+ const=logging.WARNING, action="store_const")
parser.add_option("--log-url", dest="log_url")
parser.add_option("--statusdb-id", dest="statusdb_id", type="int")
parser.add_option("--master-name", dest="master_name")
View
44 bin/try_mailer.py
@@ -3,13 +3,17 @@
Uploads logs to the given host, and then sends an email to the build's owner
"""
-import subprocess, sys, os, re
+import subprocess
+import sys
+import os
+import re
import cPickle
from email.message import Message
from email.utils import formatdate
from buildbot.status.builder import SUCCESS, WARNINGS, FAILURE, EXCEPTION, RETRY
+
def getBuild(builder_path, build_number):
build_path = os.path.join(builder_path, build_number)
@@ -24,6 +28,7 @@ class FakeBuilder:
build.builder = FakeBuilder()
return build
+
def uploadLog(args):
"""Uploads the build log, and returns the URL to it"""
my_dir = os.path.abspath(os.path.dirname(__file__))
@@ -33,9 +38,9 @@ def uploadLog(args):
print "Running", cmd
proc = subprocess.Popen(cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- stdin=devnull,)
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ stdin=devnull,)
retcode = proc.wait()
output = proc.stdout.read().strip()
@@ -47,6 +52,7 @@ def uploadLog(args):
return url.group(), retcode
return None, retcode
+
def makeTryMessage(build, log_url):
builder = build.builder.name
@@ -64,7 +70,7 @@ def makeTryMessage(build, log_url):
branch = props['branch']
tree = "Try"
if 'comm' in branch:
- tree = "Thunderbird-Try"
+ tree = "Thunderbird-Try"
if 'got_revision' in props:
revision = props['got_revision'][:12]
@@ -125,8 +131,8 @@ def makeTryMessage(build, log_url):
text = re.sub("\n", "<br>\n", text)
headers = {"In-Reply-To": "<%(branch)s-%(revision)s>" % locals(),
- "References": "<%(branch)s-%(revision)s>" % locals(),
- }
+ "References": "<%(branch)s-%(revision)s>" % locals(),
+ }
return dict(
subject=subject,
@@ -134,7 +140,8 @@ def makeTryMessage(build, log_url):
headers=headers,
author=who,
type='html',
- )
+ )
+
def formatMessage(msgdict, from_, to):
m = Message()
@@ -144,7 +151,7 @@ def formatMessage(msgdict, from_, to):
m['Subject'] = msgdict['subject']
m['From'] = from_
m['To'] = ", ".join(to)
- for k,v in msgdict['headers'].items():
+ for k, v in msgdict['headers'].items():
if k not in m:
m[k] = v
return m
@@ -153,16 +160,19 @@ def formatMessage(msgdict, from_, to):
from argparse import ArgumentParser
from smtplib import SMTP
parser = ArgumentParser()
- parser.add_argument("-f", "--from", dest="from_", help="from email address", required=True)
- parser.add_argument("-t", "--to", dest="to", help="to email address", action='append')
+ parser.add_argument("-f", "--from", dest="from_",
+ help="from email address", required=True)
+ parser.add_argument(
+ "-t", "--to", dest="to", help="to email address", action='append')
parser.add_argument("--to-author", dest="to_author", help="send mail to build's owner", action="store_true")
- parser.add_argument("--log-url", dest="log_url", help="url to uploaded log")
+ parser.add_argument(
+ "--log-url", dest="log_url", help="url to uploaded log")
parser.set_defaults(
to_author=False,
to=[],
from_=None,
log_url=None
- )
+ )
options, args = parser.parse_known_args()
@@ -177,12 +187,13 @@ def formatMessage(msgdict, from_, to):
print
tm_parser = ArgumentParser()
- tm_parser.add_argument("-e", "--all-emails", dest="all_emails", help="request all emails", action="store_true")
+ tm_parser.add_argument("-e", "--all-emails", dest="all_emails",
+ help="request all emails", action="store_true")
tm_parser.add_argument("-f", "--failure-emails", dest="failure", help="request failure emails only", action="store_true")
tm_parser.set_defaults(
all_emails=False,
failure=False,
- )
+ )
builder_path, build_number = args[-2:]
build = getBuild(builder_path, build_number)
@@ -191,7 +202,8 @@ def formatMessage(msgdict, from_, to):
match = re.search("try: ", build.source.changes[-1].comments)
comment_args = ""
if match:
- comment_args = build.source.changes[-1].comments.split("try: ")[1].split()
+ comment_args = build.source.changes[-1].comments.split(
+ "try: ")[1].split()
tm_options, args = tm_parser.parse_known_args(comment_args)
# Let's check the results to see if we need the message
View
77 bin/update_from_files.py
@@ -1,34 +1,46 @@
import buildbotcustom.status.db.model as model
-import cPickle, os, re, time, sys
+import cPickle
+import os
+import re
+import time
+import sys
from datetime import datetime
from buildbot.status.builder import BuilderStatus, BuildStepStatus
# Monkey patching!
# These are various replacement functions for __setstate__, which is
# called when unpickling files.
+
+
def monkeypatch(orig, new):
orig_setstate = orig.__setstate__
+
def wrapper(self, state):
return new(self, orig_setstate, state)
orig.__setstate__ = wrapper
+
def builder_setstate_noevents(self, orig, state):
slaves = state['slavenames']
state['events'] = []
orig(self, state)
self.slavenames = slaves
+
def builder_setstate_events(self, orig, state):
slaves = state['slavenames']
orig(self, state)
self.slavenames = slaves
+
def buildstep_setstate(self, orig, state):
state['logs'] = []
orig(self, state)
+
def getBuildNumbers(builder, last_time):
files = os.listdir(builder)
+
def _sortfunc(x):
try:
return int(x)
@@ -45,16 +57,19 @@ def _sortfunc(x):
retval.append(f)
return retval
+
def getBuild(builder, number):
try:
return cPickle.load(open(os.path.join(builder, number)))
except:
return None
+
def getBuilder(builder):
builder = cPickle.load(open(os.path.join(builder, 'builder')))
return builder
+
def updateBuilderSlaves(session, builder, db_builder):
bb_slaves = set(s for s in builder.slavenames)
db_slaves = set()
@@ -69,7 +84,8 @@ def updateBuilderSlaves(session, builder, db_builder):
old_slaves = db_slaves - bb_slaves
for s in new_slaves:
- bs = model.BuilderSlave(added=datetime.now(), slave=model.Slave.get(session, s))
+ bs = model.BuilderSlave(
+ added=datetime.now(), slave=model.Slave.get(session, s))
db_builder.slaves.append(bs)
session.add(bs)
@@ -80,6 +96,7 @@ def updateBuilderSlaves(session, builder, db_builder):
session.commit()
+
def updateSlaveTimes(session, master, builder, db_builder, last_time):
db_slaves = {}
for builder_slave in db_builder.slaves:
@@ -87,10 +104,12 @@ def updateSlaveTimes(session, master, builder, db_builder, last_time):
# Fetch all the events from the database for these slaves
events = session.query(model.MasterSlave).\
- filter(model.MasterSlave.slave_id.in_([slave.slave.id for slave in db_builder.slaves]))
+ filter(model.MasterSlave.slave_id.in_(
+ [slave.slave.id for slave in db_builder.slaves]))
if last_time:
- events = events.filter(model.MasterSlave.connected > datetime.utcfromtimestamp(last_time))
+ events = events.filter(model.MasterSlave.connected >
+ datetime.utcfromtimestamp(last_time))
events = events.order_by(model.MasterSlave.connected.asc()).all()
@@ -114,7 +133,8 @@ def updateSlaveTimes(session, master, builder, db_builder, last_time):
t = datetime.utcfromtimestamp(int(e.started))
if e.text[0] == "connect":
# This slave just connected to this builder
- # Check if we've got an entry earlier than this that hasn't been disconnected yet
+ # Check if we've got an entry earlier than this that hasn't
+ # been disconnected yet
found = False
for event in reversed(slave_events):
if event.connected < t and not event.disconnected:
@@ -130,17 +150,19 @@ def updateSlaveTimes(session, master, builder, db_builder, last_time):
if event:
print t
for e in reversed(slave_events):
- print e.connected, e.connected-t, e.connected == t
+ print e.connected, e.connected - t, e.connected == t
raise ValueError("Shouldn't be here!")
- event = model.MasterSlave(connected=t, slave=db_slaves[name].slave, master=master)
+ event = model.MasterSlave(connected=t, slave=db_slaves[
+ name].slave, master=master)
session.add(event)
slave_events.append(event)
events.append(event)
- slave_events.sort(key=lambda x:x.connected)
- events.sort(key=lambda x:x.connected)
+ slave_events.sort(key=lambda x: x.connected)
+ events.sort(key=lambda x: x.connected)
else:
- # If this is a disconnect event, find the last connect event and mark it as disconnected
+ # If this is a disconnect event, find the last connect event
+ # and mark it as disconnected
found = False
for event in reversed(slave_events):
if event.connected < t:
@@ -186,7 +208,8 @@ def updateFromFiles(session, master_url, master_name, builders, last_time, updat
updateBuilderSlaves(session, bb_builder, db_builder)
if update_times:
- updateSlaveTimes(session, master, bb_builder, db_builder, last_time)
+ updateSlaveTimes(
+ session, master, bb_builder, db_builder, last_time)
for j, buildNumber in enumerate(builds):
master = session.merge(master)
@@ -196,8 +219,8 @@ def updateFromFiles(session, master_url, master_name, builders, last_time, updat
eta = 0
else:
eta = (time.time() - s) / (complete)
- eta = (1-complete) * eta
- print builder, buildNumber, "%i/%i" % (j+1, bn), "%.2f%% complete" % (100* complete), "ETA in %i seconds" % eta
+ eta = (1 - complete) * eta
+ print builder, buildNumber, "%i/%i" % (j + 1, bn), "%.2f%% complete" % (100 * complete), "ETA in %i seconds" % eta
i += 1
build = getBuild(builder, buildNumber)
if not build:
@@ -207,14 +230,15 @@ def updateFromFiles(session, master_url, master_name, builders, last_time, updat
starttime = datetime.utcfromtimestamp(build.started)
q = session.query(model.Build).filter_by(
- master_id=master.id,
- builder=db_builder,
- buildnumber=build.number,
- starttime=starttime,
- )
+ master_id=master.id,
+ builder=db_builder,
+ buildnumber=build.number,
+ starttime=starttime,
+ )
db_build = q.first()
if not db_build:
- db_build = model.Build.fromBBBuild(session, build, builder_name, master.id)
+ db_build = model.Build.fromBBBuild(
+ session, build, builder_name, master.id)
else:
db_build.updateFromBBBuild(session, build)
session.commit()
@@ -226,10 +250,12 @@ def updateFromFiles(session, master_url, master_name, builders, last_time, updat
parser = OptionParser("%prog [options] builders")
parser.add_option("-d", "--database", dest="database", help="database url")
- parser.add_option("-m", "--master", dest="master", help="master url (buildbotURL in the master.cfg file)")
- parser.add_option("-n", "--description", dest="name", help="human friendly name for master")
+ parser.add_option("-m", "--master", dest="master",
+ help="master url (buildbotURL in the master.cfg file)")
+ parser.add_option("-n", "--description", dest="name",
+ help="human friendly name for master")
parser.add_option("", "--times", dest="times", help="update slave connect/disconnect times", action="store_true", default=False)
- parser.add_option("-c", "--config", dest="config",
+ parser.add_option("-c", "--config", dest="config",
help="read configurations from a file")
options, args = parser.parse_args()
@@ -249,7 +275,7 @@ def updateFromFiles(session, master_url, master_name, builders, last_time, updat
for param in supported_params:
# Rewrite empty CLI params if we have them in config
if not getattr(options, param, None) and \
- config.has_option('DEFAULT', param):
+ config.has_option('DEFAULT', param):
setattr(options, param, config.get('DEFAULT', param))
except (ParsingError, MissingSectionHeaderError):
@@ -294,10 +320,11 @@ def updateFromFiles(session, master_url, master_name, builders, last_time, updat
except:
last_time = 0
- print "\n" + "-"*75
+ print "\n" + "-" * 75
print "Starting update at", time.ctime(started)
- updated = updateFromFiles(session, options.master, options.name, builders, last_time, options.times)
+ updated = updateFromFiles(session, options.master, options.name,
+ builders, last_time, options.times)
print "Updated", updated, "builds in:"
View
41 changes/ftppoller.py
@@ -8,13 +8,14 @@
from buildbot.changes import base, changes
from buildbotcustom.l10n import ParseLocalesFile
+
class FtpPollerBase(base.ChangeSource):
"""This source will poll an ftp directory searching for a specific file and when found
trigger a change to the change master."""
compare_attrs = ["ftpURLs", "pollInterval", "branch"]
- parent = None # filled in when we're added
+ parent = None # filled in when we're added
loop = None
volatile = ['loop']
working = 0
@@ -72,11 +73,11 @@ def _get_changes(self, url):
def _process_changes(self, pageContents, url):
if self.parseContents(pageContents):
- c = changes.Change(who = url,
- comments = "success",
- files = [],
- properties={'who': url},
- branch = self.branch)
+ c = changes.Change(who=url,
+ comments="success",
+ files=[],
+ properties={'who': url},
+ branch=self.branch)
self.parent.addChange(c)
@@ -99,7 +100,8 @@ def __init__(self, searchString="", **kwargs):
def parseContents(self, pageContents):
""" Check through lines to see if file exists """
# scenario 1:
- # buildbot restarts or file already exists, so we don't want to trigger anything
+ # buildbot restarts or file already exists, so we don't want to trigger
+ # anything
if self.gotFile == 1:
if re.search(self.searchString, pageContents):
self.stopService()
@@ -151,7 +153,8 @@ def _get_ftp(self, locales, url):
"""Poll the ftp page with the given url. Return the page as a string
along with the list of locales from the previous callback"""
d = self._get_page(url)
- d.addCallback(lambda result: {'pageContents': result, 'locales': locales})
+ d.addCallback(
+ lambda result: {'pageContents': result, 'locales': locales})
return d
def poll(self):
@@ -174,18 +177,19 @@ def _get_locales(self, pageContents):
parsedLocales = ParseLocalesFile(pageContents)
return [re.compile(re.escape("%s/" % l)) for l in parsedLocales if len(parsedLocales[l]) == 0 or self.sl_platform_map[self.platform] in parsedLocales[l]]
-
def searchAllStrings(self, pageContents, locales):
"""match the ftp page against the locales list"""
req_matches = len(locales)
- #count number of strings with at least one positive match
- matches = sum([1 for regex in locales if re.search(regex, pageContents)])
+ # count number of strings with at least one positive match
+ matches = sum(
+ [1 for regex in locales if re.search(regex, pageContents)])
return matches == req_matches
def parseContents(self, pageContents, locales):
""" Check through lines to see if file exists """
# scenario 1:
- # buildbot restarts or all files already exist, so we don't want to trigger anything
+ # buildbot restarts or all files already exist, so we don't want to
+ # trigger anything
if self.gotAllFiles:
if self.searchAllStrings(pageContents, locales):
self.stopService()
@@ -207,14 +211,15 @@ def _process_changes(self, results, url):
pageContents = results['pageContents']
locales = results['locales']
if self.parseContents(pageContents, locales):
- c = changes.Change(who = url,
- comments = "success",
- files = [],
- branch = self.branch)
+ c = changes.Change(who=url,
+ comments="success",
+ files=[],
+ branch=self.branch)
self.parent.addChange(c)
- #return the locales list for the next ftp poller in the callback chain
+ # return the locales list for the next ftp poller in the callback chain
return locales
+
class UrlPoller(FtpPollerBase):
compare_attrs = FtpPollerBase.compare_attrs + ['url']
gotFile = True
@@ -225,7 +230,7 @@ def __init__(self, url, **kwargs):
def poll(self):
if self.working > 0:
- log.msg("Not polling UrlPoller because last poll is still working (%s)" \
+ log.msg("Not polling UrlPoller because last poll is still working (%s)"
% self.working)
else:
self.working = self.working + 1
View
42 changes/hgpoller.py
@@ -116,19 +116,23 @@ def __init__(self, d):
self.d = defer.Deferred()
self.dead = False
d.addCallbacks(self.succeeded, self.failed)
+
def succeeded(self, result):
if self.dead:
log.msg("Dead pluggable got called")
else:
self.d.callback(result)
- def failed(self, fail = None):
+
+ def failed(self, fail=None):
if self.dead:
log.msg("Dead pluggable got errbacked")
else:
self.d.errback(fail)
+
class BasePoller(object):
attemptLimit = 3
+
def __init__(self):
self.attempts = 0
self.startLoad = 0
@@ -142,7 +146,8 @@ def poll(self):
log.msg("dropping the ball on %s, starting new" % self)
else:
self.attempts += 1
- log.msg("Not polling %s because last poll is still working" % self)
+ log.msg("Not polling %s because last poll is still working" %
+ self)
reactor.callLater(0, self.pollDone, None)
return
self.attempts = 1
@@ -238,15 +243,15 @@ def _make_url(self):
def dataFailed(self, res):
# XXX: disabled for bug 774862
- #if hasattr(res.value, 'status') and res.value.status == '500' and \
+ # if hasattr(res.value, 'status') and res.value.status == '500' and \
#'unknown revision' in res.value.response:
## Indicates that the revision can't be found. The repo has most
## likely been reset. Forget about our lastChangeset, and set
## emptyRepo to True so we can trigger builds for new changes there
- #if self.verbose:
- #log.msg("%s has been reset" % self.baseURL)
- #self.lastChangeset = None
- #self.emptyRepo = True
+ # if self.verbose:
+ # log.msg("%s has been reset" % self.baseURL)
+ # self.lastChangeset = None
+ # self.emptyRepo = True
return self.super_class.dataFailed(self, res)
def processData(self, query):
@@ -434,7 +439,9 @@ def __str__(self):
def changeHook(self, change):
if self.storeRev:
- change.properties.setProperty(self.storeRev, change.revision, 'HgPoller')
+ change.properties.setProperty(
+ self.storeRev, change.revision, 'HgPoller')
+
class HgLocalePoller(BaseHgPoller):
"""This helper class for HgAllLocalesPoller polls a single locale and
@@ -444,14 +451,15 @@ class HgLocalePoller(BaseHgPoller):
verbose = False
def __init__(self, locale, parent, branch, hgURL):
- BaseHgPoller.__init__(self, hgURL, branch, tree = locale)
+ BaseHgPoller.__init__(self, hgURL, branch, tree=locale)
self.locale = locale
self.parent = parent
self.branch = branch
def changeHook(self, change):
change.properties.setProperty('locale', self.locale, 'HgLocalePoller')
- change.properties.setProperty('l10n_revision', change.revision, 'HgLocalePoller')
+ change.properties.setProperty(
+ 'l10n_revision', change.revision, 'HgLocalePoller')
def pollDone(self, res):
self.parent.localeDone(self.locale)
@@ -459,6 +467,7 @@ def pollDone(self, res):
def __str__(self):
return "<HgLocalePoller for %s>" % self.baseURL
+
class HgAllLocalesPoller(base.ChangeSource, BasePoller):
"""Poll all localization repositories from an index page.
@@ -518,9 +527,9 @@ def describe(self):
def getData(self):
log.msg("Polling all locales at %s/%s/" % (self.hgURL,
- self.repositoryIndex))
+ self.repositoryIndex))
return getPage(self.hgURL + '/' + self.repositoryIndex + '/?style=raw',
- timeout = self.timeout)
+ timeout=self.timeout)
def getLocalePoller(self, locale, branch):
if (locale, branch) not in self.localePollers:
@@ -533,6 +542,7 @@ def getLocalePoller(self, locale, branch):
def processData(self, data):
locales = filter(None, data.split())
# get locales and branches
+
def brancher(link):
steps = filter(None, link.split('/'))
loc = steps.pop()
@@ -559,7 +569,8 @@ def pollNextLocale(self):
self.activeRequests -= 1
if not self.activeRequests:
msg = "%s done with all locales" % str(self)
- loadTimes = map(lambda p: p.loadTime, self.localePollers.values())
+ loadTimes = map(
+ lambda p: p.loadTime, self.localePollers.values())
goodTimes = filter(lambda t: t is not None, loadTimes)
if not goodTimes:
msg += ". All %d locale pollers failed" % len(loadTimes)
@@ -568,7 +579,8 @@ def pollNextLocale(self):
(min(goodTimes), max(goodTimes),
sum(goodTimes) / len(goodTimes))
if len(loadTimes) > len(goodTimes):
- msg += ", %d failed" % (len(loadTimes) - len(goodTimes))
+ msg += ", %d failed" % (
+ len(loadTimes) - len(goodTimes))
log.msg(msg)
log.msg("Total time: %.1f" % (time.time() - self.startLoad))
return
@@ -583,4 +595,4 @@ def localeDone(self, loc):
def __str__(self):
return "<HgAllLocalesPoller for %s/%s/>" % (self.hgURL,
- self.repositoryIndex)
+ self.repositoryIndex)
View
73 changes/mobileftppoller.py
@@ -9,20 +9,22 @@
from buildbot.changes import base, changes
-
class InvalidResultError(Exception):
def __init__(self, value="InvalidResultError"):
self.value = value
+
def __str__(self):
return repr(self.value)
+
class EmptyResult(Exception):
pass
+
class MobileFtpParser:
"""I parse the web page for possible builds to test"""
findBuildDirs = re.compile('^.*"(\d{10})\/".*$')
-
+
def __init__(self, query, searchString):
url = query.geturl()
pageContents = query.read()
@@ -30,19 +32,20 @@ def __init__(self, query, searchString):
self.dirs = []
self.dates = []
lines = pageContents.split('\n')
- #for parsing lists of directories
+ # for parsing lists of directories
for line in lines:
- if line == "": continue
+ if line == "":
+ continue
match = re.match(self.findBuildDirs, line)
if match:
- self.dirs.append(match.group(1))
- #for parsing lists of files
+ self.dirs.append(match.group(1))
+ # for parsing lists of files
findLastDate = re.compile('^.*"([^"]*' + searchString + ')".*(\d\d-[a-zA-Z]{3}-\d\d\d\d \d\d:\d\d).*$')
for line in lines:
match = re.match(findLastDate, line)
if match:
self.dates.append([match.group(1), url + match.group(1), time.mktime(time.strptime(match.group(2), "%d-%b-%Y %H:%M"))])
-
+
def getDirs(self):
return self.dirs
@@ -51,17 +54,19 @@ def getDates(self):
# Differentiate between the release FtpPoller and the Mobile
# FtpPoller
+
+
class MobileFtpPoller(base.ChangeSource):
"""This source will poll an ftp directory for changes and submit
them to the change master."""
-
+
compare_attrs = ["ftpURLs", "pollInterval", "tree", "branch"]
-
- parent = None # filled in when we're added
+
+ parent = None # filled in when we're added
loop = None
volatile = ['loop']
working = 0
-
+
def __init__(self, branch="", tree="Firefox", pollInterval=30,
ftpURLs=[], searchString="", idleTimeout=None):
"""
@@ -69,25 +74,25 @@ def __init__(self, branch="", tree="Firefox", pollInterval=30,
@param ftpURLs: The ftp directories to monitor
@type tree: string
- @param tree: The tree to look for changes in.
+ @param tree: The tree to look for changes in.
For example, Firefox trunk is 'Firefox'
@type branch: string
@param branch: The branch to look for changes in. This must
match the 'branch' option for the Scheduler.
@type pollInterval: int
- @param pollInterval: The time (in seconds) between queries for
+ @param pollInterval: The time (in seconds) between queries for
changes
@type searchString: string
@param searchString: file type of the build we are looking for
"""
-
+
self.ftpURLs = ftpURLs
self.tree = tree
self.branch = branch
self.pollInterval = pollInterval
self.lastChanges = {}
for url in self.ftpURLs:
- self.lastChanges[url] = time.time()
+ self.lastChanges[url] = time.time()
self.searchString = searchString
self.idleTimeout = idleTimeout
self.idleTimer = None
@@ -119,29 +124,29 @@ def doIdleBuild(self):
def startService(self):
self.loop = LoopingCall(self.poll)
base.ChangeSource.startService(self)
-
+
reactor.callLater(0, self.loop.start, self.pollInterval)
-
+
def stopService(self):
self.loop.stop()
return base.ChangeSource.stopService(self)
-
+
def describe(self):
string = ""
string += "Getting changes from directory %s " \
- % str(self.ftpURLs)
+ % str(self.ftpURLs)
string += "<br>Using tree: %s, branch %s" % (self.tree, self.branch)
return string
-
+
def poll(self):
if self.working > 0:
log.msg("Not polling Tinderbox because last poll is still working (%s)" % (str(self.working)))
else:
for url in self.ftpURLs:
- self.working = self.working + 1
- d = self._get_changes(url)
- d.addCallback(self._process_changes, 0)
- d.addBoth(self._finished)
+ self.working = self.working + 1
+ d = self._get_changes(url)
+ d.addCallback(self._process_changes, 0)
+ d.addBoth(self._finished)
return
def _finished(self, res):
@@ -150,9 +155,9 @@ def _finished(self, res):
def _get_changes(self, url):
log.msg("Polling dir %s" % url)
return defer.maybeDeferred(urlopen, url)
-
+
def _process_changes(self, query, forceDate):
-
+
try:
url = query.geturl()
parser = MobileFtpParser(query, self.searchString)
@@ -164,7 +169,7 @@ def _process_changes(self, query, forceDate):
except EmptyResult:
return
- #figure out if there is a new directory that needs searching
+ # figure out if there is a new directory that needs searching
for dir in dirList:
buildDate = int(dir)
if self.lastChanges[url] >= buildDate:
@@ -179,20 +184,20 @@ def _process_changes(self, query, forceDate):
d.addBoth(self._process_changes, buildDate)
d.addBoth(self._finished)
- #if we have a new browser to test, test it
+ # if we have a new browser to test, test it
for buildname, fullpath, buildDate in dateList:
if (url in self.lastChanges):
if (self.lastChanges[url] >= buildDate):
# change too old
- continue
+ continue
if forceDate > 0:
buildDate = forceDate
else:
self.lastChanges[url] = buildDate
- c = changes.Change(who = url,
- comments = "success",
- files = [fullpath,],
- branch = self.branch,
- when = buildDate,)
+ c = changes.Change(who=url,
+ comments="success",
+ files=[fullpath, ],
+ branch=self.branch,
+ when=buildDate,)
self.parent.addChange(c)
log.msg("found a browser to test (%s)" % (fullpath))
View
104 changes/tinderboxpoller.py
@@ -7,85 +7,94 @@
from buildbot.changes import base, changes
+
class InvalidResultError(Exception):
def __init__(self, value="InvalidResultError"):
self.value = value
+
def __str__(self):
return repr(self.value)
+
class EmptyResult(Exception):
pass
+
class NoMoreBuildNodes(Exception):
pass
+
class NoMoreFileNodes(Exception):
pass
+
class TinderboxResult:
"""I hold a list of dictionaries representing build nodes
items = hostname, status and date of change"""
-
+
nodes = []
-
+
def __init__(self, nodes):
self.nodes = nodes
-
+
def __eq__(self, other):
if len(self.nodes) != len(other.nodes):
return False
for i in range(len(self.nodes)):
if self.nodes[i] != other.nodes[i]:
return False
-
+
return True
-
+
def nodeForHostname(self, nameString):
"""returnt the node for a nameString"""
for node in self.nodes:
if nameString in node['hostname']:
return node
-
+
return None
-
+
class TinderboxParser:
"""I parse the pipe-delimited result from a Tinderbox quickparse query."""
-
+
def __init__(self, s):
nodes = []
lines = s.split('\n')
for line in lines:
- if line == "": continue
+ if line == "":
+ continue
elements = line.split('|')
- if elements[0] == 'State': continue
- items = {'hostname': elements[2], 'status': elements[3], 'date': elements[4]}
+ if elements[0] == 'State':
+ continue
+ items = {'hostname': elements[2], 'status':
+ elements[3], 'date': elements[4]}
nodes.append(items)
self.tinderboxResult = TinderboxResult(nodes)
-
+
def getData(self):
return self.tinderboxResult
-
+
class TinderboxPoller(base.ChangeSource):
"""This source will poll a tinderbox server for changes and submit
them to the change master."""
-
+
compare_attrs = ["tinderboxURL", "pollInterval", "tree", "branch"]
-
- parent = None # filled in when we're added
+
+ parent = None # filled in when we're added
loop = None
volatile = ['loop']
working = False
debug = False
-
+
def __init__(self, tinderboxURL, branch, tree="Firefox", machine="", pollInterval=30):
"""
@type tinderboxURL: string
@param tinderboxURL: The base URL of the Tinderbox server
(ie. http://tinderbox.mozilla.org)
@type tree: string
- @param tree: The tree to look for changes in.
+ @param tree: The tree to look for changes in.
For example, Firefox trunk is 'Firefox'
@type branch: string
@param branch: The branch to look for changes in. This must
@@ -95,10 +104,10 @@ def __init__(self, tinderboxURL, branch, tree="Firefox", machine="", pollInterva
only register for machines that match the
substring "machine"
@type pollInterval: int
- @param pollInterval: The time (in seconds) between queries for
+ @param pollInterval: The time (in seconds) between queries for
changes
"""
-
+
self.tinderboxURL = tinderboxURL
self.tree = tree
self.branch = branch
@@ -107,24 +116,25 @@ def __init__(self, tinderboxURL, branch, tree="Firefox", machine="", pollInterva
self.previousChange = ''
self.lastPoll = time.time()
self.lastChange = time.time()
-
+
def startService(self):
self.loop = LoopingCall(self.poll)
base.ChangeSource.startService(self)
-
+
reactor.callLater(0, self.loop.start, self.pollInterval)
-
+
def stopService(self):
self.loop.stop()
return base.ChangeSource.stopService(self)
-
+
def describe(self):
str = ""
str += "Getting changes from the Tinderbox service running at %s " \
- % self.tinderboxURL
- str += "<br>Using tree: %s, branch %s, hostname %s" % (self.tree, self.branch, self.machine)
+ % self.tinderboxURL
+ str += "<br>Using tree: %s, branch %s, hostname %s" % (
+ self.tree, self.branch, self.machine)
return str
-
+
def poll(self):
if self.working:
log.msg("Not polling Tinderbox because last poll is still working")
@@ -133,38 +143,38 @@ def poll(self):
d = self._get_changes()
d.addCallbacks(self._gotPage, self._gotError)
return
-
+
def _gotPage(self, content):
if self.debug:
- log.msg("_gotPage: %s" % content.split('\n',1)[0])
+ log.msg("_gotPage: %s" % content.split('\n', 1)[0])
self._process_changes(content)
self._finished()
pass
-
+
def _gotError(self, error):
log.msg("quickparse.txt failed to load: %s" % error)
self._finished()
-
+
def _finished(self):
assert self.working
self.working = False
-
+
def _make_url(self):
# build the tinderbox URL
url = self.tinderboxURL
url += "/" + self.tree
url += "/" + "quickparse.txt"
-
+
return url
-
+
def _get_changes(self):
url = self._make_url()
log.msg("Polling Tinderbox tree at %s" % url)
-
+
self.lastPoll = time.time()
# send of the page load request
return getPage(url, timeout=self.pollInterval)
-
+
def _process_changes(self, content):
try:
tp = TinderboxParser(content)
@@ -174,7 +184,7 @@ def _process_changes(self, content):
return
except EmptyResult:
return
-
+
# check machine substring in result set
if self.machine:
node = result.nodeForHostname(self.machine)
@@ -182,7 +192,7 @@ def _process_changes(self, content):
result = TinderboxResult([node])
else:
return
-
+
# see if there are any new changes
if self.previousChange:
if (self.previousChange == result.nodes):
@@ -196,7 +206,7 @@ def _process_changes(self, content):
else:
self.previousChange = result.nodes
return
-
+
allBuildDates = []
for buildNode in result.nodes:
buildDate = int(buildNode['date'])
@@ -205,13 +215,13 @@ def _process_changes(self, content):
log.msg("dropping old build from %s" % buildNode['hostname'])
continue
allBuildDates.append(buildDate)
- c = changes.Change(who = buildNode['hostname'],
- files = ['TODO: filename goes here'],
- comments = buildNode['status'],
- branch = self.branch,
- when = buildDate)
+ c = changes.Change(who=buildNode['hostname'],
+ files=['TODO: filename goes here'],
+ comments=buildNode['status'],
+ branch=self.branch,
+ when=buildDate)
self.parent.addChange(c)
-
+
# do not allow repeats - count the last change as the largest
# build start time that has been seen
if allBuildDates:
@@ -225,12 +235,14 @@ def _process_changes(self, content):
tb.debug = True
from datetime import datetime
+
def timestamp2iso(n):
ts = datetime.fromtimestamp(n)
return ts.isoformat(' ')
class dummyParent:
needsShutDown = True
+
def addChange(self, change):
log.msg("Found new build, : %s, %s" % (change.who,
timestamp2iso(change.when)))
@@ -242,5 +254,5 @@ def addChange(self, change):
tb.parent = dummyParent()
tb.startService()
-
+
reactor.run()
View
15 common.py
@@ -1,7 +1,9 @@
-import time, uuid
+import time
+import uuid
+
def getSupportedPlatforms():
- return ('linux', 'linuxqt','linux64',
+ return ('linux', 'linuxqt', 'linux64',
'win32', 'macosx', 'macosx64',
'win64', 'android',
'ics_armv7a_gecko',
@@ -11,6 +13,7 @@ def getSupportedPlatforms():
'macosx64_gecko_localizer', 'win32_gecko_localizer',
'linux64_gecko_localizer')
+
def getPlatformFtpDir(platform):
platform_ftp_map = {
'linux': 'linux-i686',
@@ -23,20 +26,24 @@ def getPlatformFtpDir(platform):
}
return platform_ftp_map.get(platform)
+
def genBuildID(now=None):
"""Return a buildid based on the current time"""
if not now:
now = time.time()
return time.strftime("%Y%m%d%H%M%S", time.localtime(now))
+
def genBuildUID():
"""Return a unique build uid"""
return uuid.uuid4().hex
+
def incrementBuildID(buildID):
"""Add 1 second to a buildID, handling rollovers to next minute/hour/etc"""
epoch = time.mktime(time.strptime(buildID, "%Y%m%d%H%M%S"))
- return genBuildID(epoch+1)
+ return genBuildID(epoch + 1)
+
def reallyShort(name, product=None):
# FIXME: hacky workaround to fix thunderbird windows builds
@@ -85,6 +92,7 @@ def reallyShort(name, product=None):
'snowleopard': 'snow',
'fedora': 'fed',
'fedora64': 'fed64',
+ 'ubuntu64': 'ub64',
'repack': 'rpk',
'alder': 'a',
'holly': 'h',
@@ -111,4 +119,3 @@ def reallyShort(name, product=None):
else:
new_words.append(word)
return prefix + '-'.join(new_words)
-
View
96 env.py
@@ -35,35 +35,35 @@
"FrameworkSDKDir": 'D:\\msvs8\\SDK\\v2.0',
"DevEnvDir": "D:\\msvs8\\VC\\Common7\\IDE",
"MSVCDir": 'D:\\msvs8\\VC',
- "PATH": 'd:\\sdks\\v6.0\\bin;' + \
- 'D:\\msvs8\\Common7\\IDE;' + \
- 'D:\\msvs8\\VC\\bin;' + \
- 'C:\\msvs8\\SDK\\bin;' + \
- 'D:\\msvs8\\VC;' + \
- 'D:\\msvs8\\Common7\\Tools;' + \
- 'D:\\msvs8\\Common7\\Tools\\bin;' + \
- 'D:\\mozilla-build\\buildbotve\\scripts;' +\
- 'd:\\mozilla-build\\hg;' + \
- 'd:\\mozilla-build\\moztools\\bin;' + \
- 'd:\\mozilla-build\\msys\\local\\bin;' + \
- 'd:\\mozilla-build\\msys\\bin;' + \
- 'd:\\mozilla-build\\7zip;' + \
- 'd:\\mozilla-build\\upx203w;' + \
- 'd:\\mozilla-build\\python25;' + \
- 'd:\\mozilla-build\\blat261\\full;' + \
- 'd:\\mozilla-build\\info-zip;' + \
- 'd:\\mozilla-build\\wget;' + \
+ "PATH": 'd:\\sdks\\v6.0\\bin;' +
+ 'D:\\msvs8\\Common7\\IDE;' +
+ 'D:\\msvs8\\VC\\bin;' +
+ 'C:\\msvs8\\SDK\\bin;' +
+ 'D:\\msvs8\\VC;' +
+ 'D:\\msvs8\\Common7\\Tools;' +
+ 'D:\\msvs8\\Common7\\Tools\\bin;' +
+ 'D:\\mozilla-build\\buildbotve\\scripts;' +
+ 'd:\\mozilla-build\\hg;' +
+ 'd:\\mozilla-build\\moztools\\bin;' +
+ 'd:\\mozilla-build\\msys\\local\\bin;' +
+ 'd:\\mozilla-build\\msys\\bin;' +
+ 'd:\\mozilla-build\\7zip;' +
+ 'd:\\mozilla-build\\upx203w;' +
+ 'd:\\mozilla-build\\python25;' +
+ 'd:\\mozilla-build\\blat261\\full;' +
+ 'd:\\mozilla-build\\info-zip;' +
+ 'd:\\mozilla-build\\wget;' +
'd:\\mozilla-build\\nsis-2.22;',
- 'D:\\mozilla-build\\nsis-2.33u;' + \
+ 'D:\\mozilla-build\\nsis-2.33u;' +
'd:\\sdks\\v6.0\\bin'
- "INCLUDE": 'D:\\sdks\\v6.0\\include;' + \
- 'D:\\sdks\\v6.0\\include\\atl;' + \
- 'D:\\msvs8\\VC\\ATLMFC\\INCLUDE;' + \
- 'D:\\msvs8\\VC\\INCLUDE;' + \
+ "INCLUDE": 'D:\\sdks\\v6.0\\include;' +
+ 'D:\\sdks\\v6.0\\include\\atl;' +
+ 'D:\\msvs8\\VC\\ATLMFC\\INCLUDE;' +
+ 'D:\\msvs8\\VC\\INCLUDE;' +
'D:\\msvs8\\VC\\PlatformSDK\\include',
- "LIB": 'D:\\sdks\\v6.0\\lib;' + \
- 'D:\\msvs8\\VC\\ATLMFC\\LIB;' + \
- 'D:\\msvs8\\VC\\LIB;' + \
+ "LIB": 'D:\\sdks\\v6.0\\lib;' +
+ 'D:\\msvs8\\VC\\ATLMFC\\LIB;' +
+ 'D:\\msvs8\\VC\\LIB;' +
'D:\\msvs8\\VC\\PlatformSDK\\lib',
"SDKDIR": 'D:\\sdks\\v6.0'
}
@@ -80,9 +80,11 @@
"MOZ_NODE_PATH": "/home/cltbld/bin/node.exe"
}
-MozillaEnvironments['linux64-unittest'] = MozillaEnvironments['linux-unittest'].copy()
+MozillaEnvironments['linux64-unittest'] = MozillaEnvironments[
+ 'linux-unittest'].copy()
# Not sure if I need this environment or not :(
-MozillaEnvironments['linux-mobile-unittest'] = MozillaEnvironments['linux-unittest'].copy()
+MozillaEnvironments['linux-mobile-unittest'] = MozillaEnvironments[
+ 'linux-unittest'].copy()
MozillaEnvironments['macosx-unittest'] = {
"MOZ_NO_REMOTE": '1',
@@ -92,7 +94,8 @@
"MOZ_HIDE_RESULTS_TABLE": '1'
}
-MozillaEnvironments['macosx64-unittest'] = MozillaEnvironments['macosx-unittest'].copy()
+MozillaEnvironments['macosx64-unittest'] = MozillaEnvironments[
+ 'macosx-unittest'].copy()
MozillaEnvironments['win32-unittest'] = {
"MOZ_NO_REMOTE": '1',
@@ -114,7 +117,7 @@
"FrameworkSDKDir": 'D:\\msvs8\\SDK\\v2.0',
"MSVCDir": 'D:\\msvs8\\VC',
"DevEnvDir": "D:\\msvs8\\Common7\\IDE",
- "LIBPATH": 'C:\\WINDOWS\\Microsoft.NET\\Framework\\v2.0.50727;' + \
+ "LIBPATH": 'C:\\WINDOWS\\Microsoft.NET\\Framework\\v2.0.50727;' +
'D:\\msvs8\\VC\\ATLMFC\\LIB',
"MOZ_HIDE_RESULTS_TABLE": '1'
}
@@ -137,11 +140,11 @@
"NO_EM_RESTART": '1',
"XPCOM_DEBUG_BREAK": 'warn',
"CYGWINBASE": 'C:\\cygwin',
- "PATH": 'C:\\Python24;' + \
- 'C:\\Python24\\Scripts;' + \
- 'C:\\cygwin\\bin;' + \
- 'C:\\WINDOWS\\System32;' + \
- 'C:\\program files\\gnuwin32\\bin;' + \
+ "PATH": 'C:\\Python24;' +
+ 'C:\\Python24\\Scripts;' +
+ 'C:\\cygwin\\bin;' +
+ 'C:\\WINDOWS\\System32;' +
+