From 64d835038a936761716e79f43eecfc8d5b71a6ab Mon Sep 17 00:00:00 2001 From: Pierre Tardy Date: Sun, 26 Feb 2017 14:00:51 +0100 Subject: [PATCH] move contrib into buildbot-contrib repository --- Makefile | 2 - master/contrib/README.txt | 47 -- master/contrib/bash/buildbot | 59 --- master/contrib/bitbucket_buildbot.py | 211 -------- master/contrib/bk_buildbot.py | 166 ------ master/contrib/buildbot_cvs_mail.py | 267 ---------- master/contrib/bzr_buildbot.py | 485 ------------------ master/contrib/check_buildbot.py | 99 ---- master/contrib/check_smtp.py | 41 -- master/contrib/coverage2text.py | 122 ----- master/contrib/darcs_buildbot.py | 200 -------- master/contrib/docker/master/README.md | 11 - master/contrib/docker/master/buildbot.tac | 18 - .../contrib/docker/master/start_buildbot.sh | 56 -- .../docker/pythonnode_worker/Dockerfile | 20 - master/contrib/fakechange.py | 87 ---- master/contrib/generate_changelog.py | 75 --- master/contrib/git_buildbot.py | 472 ----------------- master/contrib/github_buildbot.py | 421 --------------- master/contrib/hgbuildbot.py | 268 ---------- .../contrib/init-scripts/buildmaster.default | 12 - .../contrib/init-scripts/buildmaster.init.sh | 182 ------- master/contrib/libvirt/network.xml | 16 - master/contrib/libvirt/vmbuilder | 147 ------ master/contrib/os-x/README | 23 - .../net.sourceforge.buildbot.master.plist | 42 -- master/contrib/post_build_request.py | 245 --------- master/contrib/run_maxq.py | 52 -- master/contrib/svn_buildbot.py | 290 ----------- master/contrib/svn_watcher.py | 238 --------- master/contrib/svnpoller.py | 103 ---- master/contrib/systemd/buildbot.service | 17 - master/contrib/viewcvspoll.py | 102 ---- master/contrib/zsh/_buildbot | 30 -- master/docs/conf.py | 1 + master/docs/manual/cfg-changesources.rst | 37 +- master/docs/manual/cfg-reporters.rst | 2 +- master/docs/manual/cfg-workers-docker.rst | 4 +- master/docs/manual/cfg-workers-libvirt.rst | 2 +- master/docs/manual/cfg-wwwhooks.rst | 6 +- master/docs/manual/deploy.rst | 4 +- master/docs/relnotes/0.8.9.rst | 2 +- master/docs/relnotes/0.9.0.rst | 4 +- master/docs/relnotes/0.9.0b1.rst | 2 +- master/docs/relnotes/0.9.0b6.rst | 2 +- worker/contrib/README.txt | 11 - worker/contrib/bash/buildbot-worker | 50 -- .../init-scripts/buildbot-worker.default | 11 - .../init-scripts/buildbot-worker.init.sh | 210 -------- worker/contrib/os-x/README | 23 - .../net.sourceforge.buildbot.worker.plist | 36 -- .../contrib/systemd/buildbot-worker.service | 17 - worker/contrib/zsh/_buildbot-worker | 30 -- 53 files changed, 33 insertions(+), 5047 deletions(-) delete mode 100644 master/contrib/README.txt delete mode 100644 master/contrib/bash/buildbot delete mode 100755 master/contrib/bitbucket_buildbot.py delete mode 100755 master/contrib/bk_buildbot.py delete mode 100755 master/contrib/buildbot_cvs_mail.py delete mode 100644 master/contrib/bzr_buildbot.py delete mode 100755 master/contrib/check_buildbot.py delete mode 100755 master/contrib/check_smtp.py delete mode 100755 master/contrib/coverage2text.py delete mode 100755 master/contrib/darcs_buildbot.py delete mode 100644 master/contrib/docker/master/README.md delete mode 100644 master/contrib/docker/master/buildbot.tac delete mode 100755 master/contrib/docker/master/start_buildbot.sh delete mode 100644 master/contrib/docker/pythonnode_worker/Dockerfile delete mode 100755 master/contrib/fakechange.py delete mode 100755 master/contrib/generate_changelog.py delete mode 100755 master/contrib/git_buildbot.py delete mode 100755 master/contrib/github_buildbot.py delete mode 100644 master/contrib/hgbuildbot.py delete mode 100644 master/contrib/init-scripts/buildmaster.default delete mode 100755 master/contrib/init-scripts/buildmaster.init.sh delete mode 100644 master/contrib/libvirt/network.xml delete mode 100755 master/contrib/libvirt/vmbuilder delete mode 100644 master/contrib/os-x/README delete mode 100644 master/contrib/os-x/net.sourceforge.buildbot.master.plist delete mode 100755 master/contrib/post_build_request.py delete mode 100755 master/contrib/run_maxq.py delete mode 100755 master/contrib/svn_buildbot.py delete mode 100755 master/contrib/svn_watcher.py delete mode 100755 master/contrib/svnpoller.py delete mode 100644 master/contrib/systemd/buildbot.service delete mode 100755 master/contrib/viewcvspoll.py delete mode 100644 master/contrib/zsh/_buildbot delete mode 100644 worker/contrib/README.txt delete mode 100644 worker/contrib/bash/buildbot-worker delete mode 100644 worker/contrib/init-scripts/buildbot-worker.default delete mode 100755 worker/contrib/init-scripts/buildbot-worker.init.sh delete mode 100644 worker/contrib/os-x/README delete mode 100644 worker/contrib/os-x/net.sourceforge.buildbot.worker.plist delete mode 100644 worker/contrib/systemd/buildbot-worker.service delete mode 100644 worker/contrib/zsh/_buildbot-worker diff --git a/Makefile b/Makefile index f4f38d26fd2..d0b8663dad9 100644 --- a/Makefile +++ b/Makefile @@ -55,8 +55,6 @@ docker: docker-buildbot-worker docker-buildbot-worker-node docker-buildbot-maste echo done docker-buildbot-worker: $(DOCKERBUILD) -t buildbot/buildbot-worker:master worker -docker-buildbot-worker-node: - $(DOCKERBUILD) -t buildbot/buildbot-worker-node:master master/contrib/docker/pythonnode_worker docker-buildbot-master: $(DOCKERBUILD) -t buildbot/buildbot-master:master master docker-buildbot-master-ubuntu: diff --git a/master/contrib/README.txt b/master/contrib/README.txt deleted file mode 100644 index d8ea32d6b9e..00000000000 --- a/master/contrib/README.txt +++ /dev/null @@ -1,47 +0,0 @@ -Utility scripts, things contributed by users but not strictly a part of -buildbot: - -fakechange.py: connect to a running bb and submit a fake change to trigger - builders - -generate_changelog.py: generated changelog entry using git. Requires git to - be installed. - -run_maxq.py: a builder-helper for running maxq under buildbot - -svn_buildbot.py: a script intended to be run from a subversion hook-script - which submits changes to svn (requires python 2.3) - -svnpoller.py: this script is intended to be run from a cronjob, and uses 'svn - log' to poll a (possibly remote) SVN repository for changes. - For each change it finds, it runs 'buildbot sendchange' to - deliver them to a waiting PBChangeSource on a (possibly remote) - buildmaster. Modify the svnurl to point at your own SVN - repository, and of course the user running the script must have - read permissions to that repository. It keeps track of the last - revision in a file, change 'fname' to set the location of this - state file. Modify the --master argument to the 'buildbot - sendchange' command to point at your buildmaster. Contributed - by John Pye. Note that if there are multiple changes within a - single polling interval, this will miss all but the last one. - -svn_watcher.py: adapted from svnpoller.py by Niklaus Giger to add options and - run under windows. Runs as a standalone script (it loops - internally rather than expecting to run from a cronjob), - polls an SVN repository every 10 minutes. It expects the - svnurl and buildmaster location as command-line arguments. - -viewcvspoll.py: a standalone script which loops every 60 seconds and polls a - (local?) MySQL database (presumably maintained by ViewCVS?) - for information about new CVS changes, then delivers them - over PB to a remote buildmaster's PBChangeSource. Contributed - by Stephen Kennedy. - -zsh/_buildbot: zsh tab-completion file for 'buildbot' command. Put it in one - of the directories appearing in $fpath to enable tab-completion - in zsh. - -bash/buildbot: bash tab-completion file for 'buildbot' command. Source this - file to enable completions in your bash session. This is - typically accomplished by placing the file into the - appropriate 'bash_completion.d' directory. diff --git a/master/contrib/bash/buildbot b/master/contrib/bash/buildbot deleted file mode 100644 index e395fe8cb38..00000000000 --- a/master/contrib/bash/buildbot +++ /dev/null @@ -1,59 +0,0 @@ -# -# This file installs BASH completions for 'buildbot' command. -# - -_buildbot() -{ - local buildbot_subcommands=" - create-master upgrade-master start stop restart reconfig sighup user - sendchange try tryserver checkconfig" - - local cur=${COMP_WORDS[COMP_CWORD]} - local subcommand= - local subcommand_args= - local i=1 - - # - # 'parse' the command line so far - # figure out if we have subcommand specified and any arguments to it - # - - # skip global options - while [[ "${COMP_WORDS[$i]}" == -* ]]; - do - i=$(($i+1)) - done - - # save subcommand - subcommand=${COMP_WORDS[$i]} - i=$(($i+1)) - - # skip subcommand options - while [[ "${COMP_WORDS[$i]}" == -* ]]; - do - i=$(($i+1)) - done - - # save subcommand arguments - subcommand_args=${COMP_WORDS[@]:$i:${#COMP_WORDS[@]}} - - if [ "$cur" == "$subcommand" ]; then - # suggest buildbot subcommands - COMPREPLY=( $(compgen -W "$buildbot_subcommands" $cur) ) - elif [ "$cur" == "$subcommand_args" ]; then - # we are at first subcommand argument - case $subcommand in - # these command take base directory as first argument, - # suggest directories - upgrade-master|create-master|start|stop|restart|reconfig|sighup) - COMPREPLY=( $(compgen -A directory $cur) ) - ;; - # checkconfig takes a filename or directory as first argument - checkconfig) - COMPREPLY=( $(compgen -A file $cur) ) - ;; - esac - fi -} - -complete -F _buildbot buildbot diff --git a/master/contrib/bitbucket_buildbot.py b/master/contrib/bitbucket_buildbot.py deleted file mode 100755 index 151e1441ad6..00000000000 --- a/master/contrib/bitbucket_buildbot.py +++ /dev/null @@ -1,211 +0,0 @@ -#!/usr/bin/env python -"""Change source forwarder for bitbucket.org POST service. - -bitbucket_buildbot.py will determine the repository information from -the JSON HTTP POST it receives from bitbucket.org and build the -appropriate repository. - -If your bitbucket repository is private, you must add a ssh key to the -bitbucket repository for the user who initiated bitbucket_buildbot.py - -bitbucket_buildbot.py is based on github_buildbot.py -""" - -from __future__ import absolute_import -from __future__ import print_function -from future.utils import iteritems - -import logging -import sys -import tempfile -import traceback -from optparse import OptionParser - -from twisted.cred import credentials -from twisted.internet import reactor -from twisted.spread import pb -from twisted.web import resource -from twisted.web import server - -try: - import json -except ImportError: - import simplejson as json - - -class BitBucketBuildBot(resource.Resource): - - """ - BitBucketBuildBot creates the webserver that responds to the - BitBucket POST Service Hook. - """ - isLeaf = True - bitbucket = None - master = None - port = None - private = False - - def render_POST(self, request): - """ - Responds only to POST events and starts the build process - - :arguments: - request - the http request object - """ - try: - payload = json.loads(request.args['payload'][0]) - logging.debug("Payload: " + str(payload)) - self.process_change(payload) - except Exception: - logging.error("Encountered an exception:") - for msg in traceback.format_exception(*sys.exc_info()): - logging.error(msg.strip()) - - def process_change(self, payload): - """ - Consumes the JSON as a python object and actually starts the build. - - :arguments: - payload - Python Object that represents the JSON sent by Bitbucket POST - Service Hook. - """ - if self.private: - repo_url = 'ssh://hg@%s%s' % ( - self.bitbucket, - payload['repository']['absolute_url'], - ) - else: - repo_url = 'http://%s%s' % ( - self.bitbucket, - payload['repository']['absolute_url'], - ) - changes = [] - for commit in payload['commits']: - files = [file_info['file'] for file_info in commit['files']] - revlink = 'http://%s%s/changeset/%s/' % ( - self.bitbucket, - payload['repository']['absolute_url'], - commit['node'], - ) - change = { - 'revision': commit['node'], - 'revlink': revlink, - 'comments': commit['message'], - 'who': commit['author'], - 'files': files, - 'repository': repo_url, - 'properties': dict(), - } - changes.append(change) - # Submit the changes, if any - if not changes: - logging.warning("No changes found") - return - host, port = self.master.split(':') - port = int(port) - factory = pb.PBClientFactory() - deferred = factory.login(credentials.UsernamePassword("change", - "changepw")) - logging.debug('Trying to connect to: %s:%d', host, port) - reactor.connectTCP(host, port, factory) - deferred.addErrback(self.connectFailed) - deferred.addCallback(self.connected, changes) - - def connectFailed(self, error): - """ - If connection is failed. Logs the error. - """ - logging.error("Could not connect to master: %s", - error.getErrorMessage()) - return error - - def addChange(self, dummy, remote, changei, src='hg'): - """ - Sends changes from the commit to the buildmaster. - """ - logging.debug("addChange %s, %s", repr(remote), repr(changei)) - try: - change = changei.next() - except StopIteration: - remote.broker.transport.loseConnection() - return None - - logging.info("New revision: %s", change['revision'][:8]) - for key, value in iteritems(change): - logging.debug(" %s: %s", key, value) - - change['src'] = src - deferred = remote.callRemote('addChange', change) - deferred.addCallback(self.addChange, remote, changei, src) - return deferred - - def connected(self, remote, changes): - """ - Responds to the connected event. - """ - return self.addChange(None, remote, changes.__iter__()) - - -def main(): - """ - The main event loop that starts the server and configures it. - """ - usage = "usage: %prog [options]" - parser = OptionParser(usage) - parser.add_option( - "-p", "--port", - help="Port the HTTP server listens to for the Bitbucket Service Hook" - " [default: %default]", default=4000, type=int, dest="port") - parser.add_option( - "-m", "--buildmaster", - help="Buildbot Master host and port. ie: localhost:9989 [default:" - + " %default]", default="localhost:9989", dest="buildmaster") - parser.add_option( - "-l", "--log", - help="The absolute path, including filename, to save the log to" - " [default: %default]", - default=tempfile.gettempdir() + "/bitbucket_buildbot.log", - dest="log") - parser.add_option( - "-L", "--level", - help="The logging level: debug, info, warn, error, fatal [default:" - " %default]", default='warn', dest="level") - parser.add_option( - "-g", "--bitbucket", - help="The bitbucket serve [default: %default]", - default='bitbucket.org', - dest="bitbucket") - parser.add_option( - '-P', '--private', - help='Use SSH to connect, for private repositories.', - dest='private', - default=False, - action='store_true', - ) - (options, _) = parser.parse_args() - # Set up logging. - levels = { - 'debug': logging.DEBUG, - 'info': logging.INFO, - 'warn': logging.WARNING, - 'error': logging.ERROR, - 'fatal': logging.FATAL, - } - filename = options.log - log_format = "%(asctime)s - %(levelname)s - %(message)s" - logging.basicConfig(filename=filename, format=log_format, - level=levels[options.level]) - # Start listener. - bitbucket_bot = BitBucketBuildBot() - bitbucket_bot.bitbucket = options.bitbucket - bitbucket_bot.master = options.buildmaster - bitbucket_bot.private = options.private - site = server.Site(bitbucket_bot) - reactor.listenTCP(options.port, site) - reactor.run() - - -if __name__ == '__main__': - main() diff --git a/master/contrib/bk_buildbot.py b/master/contrib/bk_buildbot.py deleted file mode 100755 index 6bbddd5b9b0..00000000000 --- a/master/contrib/bk_buildbot.py +++ /dev/null @@ -1,166 +0,0 @@ -#!/usr/local/bin/python -# -# BitKeeper hook script. -# -# svn_buildbot.py was used as a base for this file, if you find any bugs or -# errors please email me. -# -# Amar Takhar - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import commands -import sys - -from twisted.cred import credentials -from twisted.internet import reactor -from twisted.python import usage -from twisted.spread import pb - -''' -/path/to/bk_buildbot.py --repository "$REPOS" --revision "$REV" --branch \ -"" --bbserver localhost --bbport 9989 -''' - - -# We have hackish "-d" handling here rather than in the Options -# subclass below because a common error will be to not have twisted in -# PYTHONPATH; we want to be able to print that error to the log if -# debug mode is on, so we set it up before the imports. - -DEBUG = None - -if '-d' in sys.argv: - i = sys.argv.index('-d') - DEBUG = sys.argv[i + 1] - del sys.argv[i] - del sys.argv[i] - -if DEBUG: - f = open(DEBUG, 'a') - sys.stderr = f - sys.stdout = f - - -class Options(usage.Options): - optParameters = [ - ['repository', 'r', None, - "The repository that was changed."], - ['revision', 'v', None, - "The revision that we want to examine (default: latest)"], - ['branch', 'b', None, - "Name of the branch to insert into the branch field. (REQUIRED)"], - ['category', 'c', None, - "Schedular category."], - ['bbserver', 's', 'localhost', - "The hostname of the server that buildbot is running on"], - ['bbport', 'p', 8007, - "The port that buildbot is listening on"] - ] - optFlags = [ - ['dryrun', 'n', "Do not actually send changes"], - ] - - def __init__(self): - usage.Options.__init__(self) - - def postOptions(self): - if self['repository'] is None: - raise usage.error("You must pass --repository") - - -class ChangeSender: - - def getChanges(self, opts): - """Generate and stash a list of Change dictionaries, ready to be sent - to the buildmaster's PBChangeSource.""" - - # first we extract information about the files that were changed - repo = opts['repository'] - print("Repo:", repo) - rev_arg = '' - if opts['revision']: - rev_arg = '-r"%s"' % (opts['revision'], ) - changed = commands.getoutput("bk changes -v %s -d':GFILE:\\n' '%s'" % ( - rev_arg, repo)).split('\n') - - # Remove the first line, it's an info message you can't remove - # (annoying) - del changed[0] - - change_info = commands.getoutput("bk changes %s -d':USER:\\n$each(:C:){(:C:)\\n}' '%s'" % ( - rev_arg, repo)).split('\n') - - # Remove the first line, it's an info message you can't remove - # (annoying) - del change_info[0] - - who = change_info.pop(0) - branch = opts['branch'] - message = '\n'.join(change_info) - revision = opts.get('revision') - - changes = {'who': who, - 'branch': branch, - 'files': changed, - 'comments': message, - 'revision': revision} - - if opts.get('category'): - changes['category'] = opts.get('category') - - return changes - - def sendChanges(self, opts, changes): - pbcf = pb.PBClientFactory() - reactor.connectTCP(opts['bbserver'], int(opts['bbport']), pbcf) - d = pbcf.login(credentials.UsernamePassword('change', 'changepw')) - d.addCallback(self.sendAllChanges, changes) - return d - - def sendAllChanges(self, remote, changes): - dl = remote.callRemote('addChange', changes) - return dl - - def run(self): - opts = Options() - try: - opts.parseOptions() - if not opts['branch']: - print("You must supply a branch with -b or --branch.") - sys.exit(1) - - except usage.error as ue: - print(opts) - print("%s: %s" % (sys.argv[0], ue)) - sys.exit() - - changes = self.getChanges(opts) - if opts['dryrun']: - for k in changes.keys(): - print("[%10s]: %s" % (k, changes[k])) - print("*NOT* sending any changes") - return - - d = self.sendChanges(opts, changes) - - def quit(*why): - print("quitting! because", why) - reactor.stop() - - @d.addErrback(failed) - def failed(f): - print("FAILURE: %s" % f) - reactor.stop() - - d.addCallback(quit, "SUCCESS") - reactor.callLater(60, quit, "TIMEOUT") - - reactor.run() - - -if __name__ == '__main__': - s = ChangeSender() - s.run() diff --git a/master/contrib/buildbot_cvs_mail.py b/master/contrib/buildbot_cvs_mail.py deleted file mode 100755 index 4d06be3768c..00000000000 --- a/master/contrib/buildbot_cvs_mail.py +++ /dev/null @@ -1,267 +0,0 @@ -#!/usr/bin/env python -# -# Buildbot CVS Mail -# -# This script was derrived from syncmail, -# Copyright (c) 2002-2006 Barry Warsaw, Fred Drake, and contributors -# -# http://cvs-syncmail.cvs.sourceforge.net -# -# The script was re-written with the sole pupose of providing updates to -# Buildbot master by Andy Howell -# -# Options handling done right by djmitche - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals - -import optparse -import os -import re -import smtplib -import socket -import sys -import textwrap -import time -from email.utils import formataddr -from io import StringIO - -""" - -t - --testing - Construct message and send to stdout for testing - -The rest of the command line arguments are: - - %%{sVv} - CVS %%{sVv} loginfo expansion. When invoked by CVS, this will be a single - string containing the files that are changing. - -""" -__version__ = '$Revision: 1.3 $' - - -try: - import pwd -except ImportError: - # pwd is not available on Windows.. - pwd = None - -COMMASPACE = ', ' - -PROGRAM = sys.argv[0] - - -class SmtplibMock: - - """I stand in for smtplib for testing purposes. - """ - class SMTP: - - """I stand in for smtplib.SMTP connection for testing purposes. - I copy the message to stdout. - """ - - def close(self): - pass - - def connect(self, mailhost, mailport): - pass - - def sendmail(self, address, email, msg): - sys.stdout.write(msg) - - -rfc822_specials_re = re.compile(r'[\(\)<>@,;:\\\"\.\[\]]') - - -def quotename(name): - if name and rfc822_specials_re.search(name): - return '"%s"' % name.replace('"', '\\"') - else: - return name - - -def send_mail(options): - # Create the smtp connection to the localhost - conn = options.smtplib.SMTP() - conn.connect(options.mailhost, options.mailport) - if pwd: - pwinfo = pwd.getpwuid(os.getuid()) - user = pwinfo[0] - name = pwinfo[4] - else: - user = 'cvs' - name = 'CVS' - - domain = options.fromhost - if not domain: - # getfqdn is not good for use in unit tests - if options.amTesting: - domain = 'testing.com' - else: - domain = socket.getfqdn() - address = '%s@%s' % (user, domain) - s = StringIO() - datestamp = time.strftime('%a, %d %b %Y %H:%M:%S +0000', - time.gmtime(time.time())) - fileList = ' '.join(map(str, options.files)) - - vars = {'author': formataddr((name, address)), - 'email': options.email, - 'subject': 'cvs update for project %s' % options.project, - 'version': __version__, - 'date': datestamp, - } - print('''\ -From: %(author)s -To: %(email)s''' % vars, file=s) - if options.replyto: - print('Reply-To: %s' % options.replyto, file=s) - print('''\ -Subject: %(subject)s -Date: %(date)s -X-Mailer: Python buildbot-cvs-mail %(version)s -''' % vars, file=s) - print('Cvsmode: %s' % options.cvsmode, file=s) - print('Category: %s' % options.category, file=s) - print('CVSROOT: %s' % options.cvsroot, file=s) - print('Files: %s' % fileList, file=s) - if options.path: - print('Path: %s' % options.path, file=s) - print('Project: %s' % options.project, file=s) - cvs_input = sys.stdin.read() - - # On Python 2, sys.stdin.read() returns bytes, but - # on Python 3, it returns unicode str. - if isinstance(cvs_input, bytes): - cvs_input = cvs_input.decode("utf-8") - - s.write(cvs_input) - print('', file=s) - conn.sendmail(address, options.email, s.getvalue()) - conn.close() - - -def fork_and_send_mail(options): - # cannot wait for child process or that will cause parent to retain cvs - # lock for too long. Urg! - if not os.fork(): - # in the child - # give up the lock you cvs thang! - time.sleep(2) - send_mail(options) - os._exit(0) - -description = """ -This script is used to provide email notifications of changes to the CVS -repository to a buildbot master. It is invoked via a CVS loginfo file (see -$CVSROOT/CVSROOT/loginfo). See the Buildbot manual for more information. -""" -usage = "%prog [options] %{sVv}" -parser = optparse.OptionParser(description=description, - usage=usage, - add_help_option=True, - version=__version__) - -parser.add_option("-C", "--category", dest='category', metavar="CAT", - help=textwrap.dedent("""\ - Category for change. This becomes the Change.category attribute, which - can be used within the buildmaster to filter changes. - """)) -parser.add_option("-c", "--cvsroot", dest='cvsroot', metavar="PATH", - help=textwrap.dedent("""\ - CVSROOT for use by buildbot workers to checkout code. - This becomes the Change.repository attribute. - Exmaple: :ext:myhost:/cvsroot - """)) -parser.add_option("-e", "--email", dest='email', metavar="EMAIL", - help=textwrap.dedent("""\ - Email address of the buildbot. - """)) -parser.add_option("-f", "--fromhost", dest='fromhost', metavar="HOST", - help=textwrap.dedent("""\ - The hostname that email messages appear to be coming from. The From: - header of the outgoing message will look like user@hostname. By - default, hostname is the machine's fully qualified domain name. - """)) -parser.add_option("-m", "--mailhost", dest='mailhost', metavar="HOST", - default="localhost", - help=textwrap.dedent("""\ - The hostname of an available SMTP server. The default is - 'localhost'. - """)) -parser.add_option("--mailport", dest='mailport', metavar="PORT", - default=25, type="int", - help=textwrap.dedent("""\ - The port number of SMTP server. The default is '25'. - """)) -parser.add_option("-q", "--quiet", dest='verbose', action="store_false", - default=True, - help=textwrap.dedent("""\ - Don't print as much status to stdout. - """)) -parser.add_option("-p", "--path", dest='path', metavar="PATH", - help=textwrap.dedent("""\ - The path for the files in this update. This comes from the %p parameter - in loginfo for CVS version 1.12.x. Do not use this for CVS version 1.11.x - """)) -parser.add_option("-P", "--project", dest='project', metavar="PROJ", - help=textwrap.dedent("""\ - The project for the source. Often set to the CVS module being modified. This becomes - the Change.project attribute. - """)) -parser.add_option("-R", "--reply-to", dest='replyto', metavar="ADDR", - help=textwrap.dedent("""\ - Add a "Reply-To: ADDR" header to the email message. - """)) -parser.add_option( - "-t", "--testing", action="store_true", dest="amTesting", default=False) -parser.set_defaults(smtplib=smtplib) - - -def get_options(): - options, args = parser.parse_args() - - # rest of command line are the files. - options.files = args - if options.path is None: - options.cvsmode = '1.11' - else: - options.cvsmode = '1.12' - - if options.cvsroot is None: - parser.error('--cvsroot is required') - if options.email is None: - parser.error('--email is required') - - # set up for unit tests - if options.amTesting: - options.verbose = 0 - options.smtplib = SmtplibMock - - return options - -# scan args for options - - -def main(): - options = get_options() - - if options.verbose: - print('Mailing %s...' % options.email) - print('Generating notification message...') - if options.amTesting: - send_mail(options) - else: - fork_and_send_mail(options) - - if options.verbose: - print('Generating notification message... done.') - return 0 - -if __name__ == '__main__': - ret = main() - sys.exit(ret) diff --git a/master/contrib/bzr_buildbot.py b/master/contrib/bzr_buildbot.py deleted file mode 100644 index acc42ff8beb..00000000000 --- a/master/contrib/bzr_buildbot.py +++ /dev/null @@ -1,485 +0,0 @@ -# Copyright (C) 2008-2009 Canonical -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -"""\ -bzr buildbot integration -======================== - -This file contains both bzr commit/change hooks and a bzr poller. - ------------- -Requirements ------------- - -This has been tested with buildbot 0.7.9, bzr 1.10, and Twisted 8.1.0. It -should work in subsequent releases. - -For the hook to work, Twisted must be installed in the same Python that bzr -uses. - ------ -Hooks ------ - -To install, put this file in a bzr plugins directory (e.g., -~/.bazaar/plugins). Then, in one of your bazaar conf files (e.g., -~/.bazaar/locations.conf), set the location you want to connect with buildbot -with these keys: - -- buildbot_on: one of 'commit', 'push, or 'change'. Turns the plugin on to - report changes via commit, changes via push, or any changes to the trunk. - 'change' is recommended. - -- buildbot_server: (required to send to a buildbot master) the URL of the - buildbot master to which you will connect (as of this writing, the same - server and port to which workers connect). - -- buildbot_port: (optional, defaults to 9989) the port of the buildbot master - to which you will connect (as of this writing, the same server and port to - which workers connect) - -- buildbot_auth: (optional, defaults to change:changepw) the credentials - expected by the change source configuration in the master. Takes the - "user:password" form. - -- buildbot_pqm: (optional, defaults to not pqm) Normally, the user that - commits the revision is the user that is responsible for the change. When - run in a pqm (Patch Queue Manager, see https://launchpad.net/pqm) - environment, the user that commits is the Patch Queue Manager, and the user - that committed the *parent* revision is responsible for the change. To turn - on the pqm mode, set this value to any of (case-insensitive) "Yes", "Y", - "True", or "T". - -- buildbot_dry_run: (optional, defaults to not a dry run) Normally, the - post-commit hook will attempt to communicate with the configured buildbot - server and port. If this parameter is included and any of (case-insensitive) - "Yes", "Y", "True", or "T", then the hook will simply print what it would - have sent, but not attempt to contact the buildbot master. - -- buildbot_send_branch_name: (optional, defaults to not sending the branch - name) If your buildbot's bzr source build step uses a repourl, do - *not* turn this on. If your buildbot's bzr build step uses a baseURL, then - you may set this value to any of (case-insensitive) "Yes", "Y", "True", or - "T" to have the buildbot master append the branch name to the baseURL. - -Note: The bzr smart server (as of version 2.2.2) doesn't know how to resolve -bzr:// urls into absolute paths so any paths in locations.conf won't match, -hence no change notifications will be sent to Buildbot. Setting configuration -parameters globally or in-branch might still work. - -When buildbot no longer has a hardcoded password, it will be a configuration -option here as well. - ------- -Poller ------- - -See the Buildbot manual. - -------------------- -Contact Information -------------------- - -Maintainer/author: gary.poster@canonical.com -""" - -from __future__ import absolute_import -from __future__ import print_function - -# Work around Twisted bug. -# See http://twistedmatrix.com/trac/ticket/3591 -import operator -# -import socket - -import twisted.cred.credentials -import twisted.internet.base -import twisted.internet.reactor -import twisted.internet.selectreactor -import twisted.internet.task -import twisted.internet.threads -import twisted.python.log -import twisted.spread.pb -from twisted.internet import defer -from twisted.python import failure - -import bzrlib.branch -import bzrlib.errors -import bzrlib.trace - -try: - import buildbot.util - import buildbot.changes.base - import buildbot.changes.changes -except ImportError: - DEFINE_POLLER = False -else: - DEFINE_POLLER = True - - -# -# This is the code that the poller and the hooks share. - -def generate_change(branch, - old_revno=None, old_revid=None, - new_revno=None, new_revid=None, - blame_merge_author=False): - """Return a dict of information about a change to the branch. - - Dict has keys of "files", "who", "comments", and "revision", as used by - the buildbot Change (and the PBChangeSource). - - If only the branch is given, the most recent change is returned. - - If only the new_revno is given, the comparison is expected to be between - it and the previous revno (new_revno -1) in the branch. - - Passing old_revid and new_revid is only an optimization, included because - bzr hooks usually provide this information. - - blame_merge_author means that the author of the merged branch is - identified as the "who", not the person who committed the branch itself. - This is typically used for PQM. - """ - change = {} # files, who, comments, revision; NOT branch (= branch.nick) - if new_revno is None: - new_revno = branch.revno() - if new_revid is None: - new_revid = branch.get_rev_id(new_revno) - # TODO: This falls over if this is the very first revision - if old_revno is None: - old_revno = new_revno - 1 - if old_revid is None: - old_revid = branch.get_rev_id(old_revno) - repository = branch.repository - new_rev = repository.get_revision(new_revid) - if blame_merge_author: - # this is a pqm commit or something like it - change['who'] = repository.get_revision( - new_rev.parent_ids[-1]).get_apparent_authors()[0] - else: - change['who'] = new_rev.get_apparent_authors()[0] - # maybe useful to know: - # name, email = bzrtools.config.parse_username(change['who']) - change['comments'] = new_rev.message - change['revision'] = new_revno - files = change['files'] = [] - changes = repository.revision_tree(new_revid).changes_from( - repository.revision_tree(old_revid)) - for (collection, name) in ((changes.added, 'ADDED'), - (changes.removed, 'REMOVED'), - (changes.modified, 'MODIFIED')): - for info in collection: - path = info[0] - kind = info[2] - files.append(' '.join([path, kind, name])) - for info in changes.renamed: - oldpath, newpath, id, kind, text_modified, meta_modified = info - elements = [oldpath, kind, 'RENAMED', newpath] - if text_modified or meta_modified: - elements.append('MODIFIED') - files.append(' '.join(elements)) - return change - -# -# poller - -# We don't want to make the hooks unnecessarily depend on buildbot being -# installed locally, so we conditionally create the BzrPoller class. -if DEFINE_POLLER: - - FULL = object() - SHORT = object() - - class BzrPoller(buildbot.changes.base.PollingChangeSource, - buildbot.util.ComparableMixin): - - compare_attrs = ('url') - - def __init__(self, url, poll_interval=10 * 60, blame_merge_author=False, - branch_name=None, category=None): - # poll_interval is in seconds, so default poll_interval is 10 - # minutes. - # bzr+ssh://bazaar.launchpad.net/~launchpad-pqm/launchpad/devel/ - # works, lp:~launchpad-pqm/launchpad/devel/ doesn't without help. - if url.startswith('lp:'): - url = 'bzr+ssh://bazaar.launchpad.net/' + url[3:] - self.url = url - self.poll_interval = poll_interval - self.loop = twisted.internet.task.LoopingCall(self.poll) - self.blame_merge_author = blame_merge_author - self.branch_name = branch_name - self.category = category - - def startService(self): - twisted.python.log.msg("BzrPoller(%s) starting" % self.url) - if self.branch_name is FULL: - ourbranch = self.url - elif self.branch_name is SHORT: - # We are in a bit of trouble, as we cannot really know what our - # branch is until we have polled new changes. - # Seems we would have to wait until we polled the first time, - # and only then do the filtering, grabbing the branch name from - # whatever we polled. - # For now, leave it as it was previously (compare against - # self.url); at least now things work when specifying the - # branch name explicitly. - ourbranch = self.url - else: - ourbranch = self.branch_name - for change in reversed(self.parent.changes): - if change.branch == ourbranch: - self.last_revision = change.revision - break - else: - self.last_revision = None - buildbot.changes.base.PollingChangeSource.startService(self) - - def stopService(self): - twisted.python.log.msg("BzrPoller(%s) shutting down" % self.url) - return buildbot.changes.base.PollingChangeSource.stopService(self) - - def describe(self): - return "BzrPoller watching %s" % self.url - - @defer.inlineCallbacks - def poll(self): - # On a big tree, even individual elements of the bzr commands - # can take awhile. So we just push the bzr work off to a - # thread. - try: - changes = yield twisted.internet.threads.deferToThread( - self.getRawChanges) - except (SystemExit, KeyboardInterrupt): - raise - except Exception: - # we'll try again next poll. Meanwhile, let's report. - twisted.python.log.err() - else: - for change_kwargs in changes: - yield self.addChange(change_kwargs) - self.last_revision = change_kwargs['revision'] - - def getRawChanges(self): - branch = bzrlib.branch.Branch.open_containing(self.url)[0] - if self.branch_name is FULL: - branch_name = self.url - elif self.branch_name is SHORT: - branch_name = branch.nick - else: # presumably a string or maybe None - branch_name = self.branch_name - changes = [] - change = generate_change( - branch, blame_merge_author=self.blame_merge_author) - if (self.last_revision is None or - change['revision'] > self.last_revision): - change['branch'] = branch_name - change['category'] = self.category - changes.append(change) - if self.last_revision is not None: - while self.last_revision + 1 < change['revision']: - change = generate_change( - branch, new_revno=change['revision'] - 1, - blame_merge_author=self.blame_merge_author) - change['branch'] = branch_name - changes.append(change) - changes.reverse() - return changes - - def addChange(self, change_kwargs): - d = defer.Deferred() - - def _add_change(): - d.callback( - self.master.data.updates.addChange(src='bzr', - **change_kwargs)) - twisted.internet.reactor.callLater(0, _add_change) - return d - -# -# hooks - -HOOK_KEY = 'buildbot_on' -SERVER_KEY = 'buildbot_server' -PORT_KEY = 'buildbot_port' -AUTH_KEY = 'buildbot_auth' -DRYRUN_KEY = 'buildbot_dry_run' -PQM_KEY = 'buildbot_pqm' -SEND_BRANCHNAME_KEY = 'buildbot_send_branch_name' - -PUSH_VALUE = 'push' -COMMIT_VALUE = 'commit' -CHANGE_VALUE = 'change' - - -def _is_true(config, key): - val = config.get_user_option(key) - return val is not None and val.lower().strip() in ( - 'y', 'yes', 't', 'true') - - -def _installed_hook(branch): - value = branch.get_config().get_user_option(HOOK_KEY) - if value is not None: - value = value.strip().lower() - if value not in (PUSH_VALUE, COMMIT_VALUE, CHANGE_VALUE): - raise bzrlib.errors.BzrError( - '%s, if set, must be one of %s, %s, or %s' % ( - HOOK_KEY, PUSH_VALUE, COMMIT_VALUE, CHANGE_VALUE)) - return value - - -# replaces twisted.internet.thread equivalent - - -def _putResultInDeferred(reactor, deferred, f, args, kwargs): - """ - Run a function and give results to a Deferred. - """ - try: - result = f(*args, **kwargs) - except Exception: - f = failure.Failure() - reactor.callFromThread(deferred.errback, f) - else: - reactor.callFromThread(deferred.callback, result) - -# would be a proposed addition. deferToThread could use it - - -def deferToThreadInReactor(reactor, f, *args, **kwargs): - """ - Run function in thread and return result as Deferred. - """ - d = defer.Deferred() - reactor.callInThread(_putResultInDeferred, reactor, d, f, args, kwargs) - return d - -# uses its own reactor for the threaded calls, unlike Twisted's - - -class ThreadedResolver(twisted.internet.base.ThreadedResolver): - - def getHostByName(self, name, timeout=(1, 3, 11, 45)): - if timeout: - timeoutDelay = reduce(operator.add, timeout) - else: - timeoutDelay = 60 - userDeferred = defer.Deferred() - lookupDeferred = deferToThreadInReactor( - self.reactor, socket.gethostbyname, name) - cancelCall = self.reactor.callLater( - timeoutDelay, self._cleanup, name, lookupDeferred) - self._runningQueries[lookupDeferred] = (userDeferred, cancelCall) - lookupDeferred.addBoth(self._checkTimeout, name, lookupDeferred) - return userDeferred -# - - -def send_change(branch, old_revno, old_revid, new_revno, new_revid, hook): - config = branch.get_config() - server = config.get_user_option(SERVER_KEY) - if not server: - bzrlib.trace.warning( - 'bzr_buildbot: ERROR. If %s is set, %s must be set', - HOOK_KEY, SERVER_KEY) - return - change = generate_change( - branch, old_revno, old_revid, new_revno, new_revid, - blame_merge_author=_is_true(config, PQM_KEY)) - if _is_true(config, SEND_BRANCHNAME_KEY): - change['branch'] = branch.nick - # as of this writing (in Buildbot 0.7.9), 9989 is the default port when - # you make a buildbot master. - port = int(config.get_user_option(PORT_KEY) or 9989) - # if dry run, stop. - if _is_true(config, DRYRUN_KEY): - bzrlib.trace.note("bzr_buildbot DRY RUN " - "(*not* sending changes to %s:%d on %s)", - server, port, hook) - keys = sorted(change.keys()) - for k in keys: - bzrlib.trace.note("[%10s]: %s", k, change[k]) - return - # We instantiate our own reactor so that this can run within a server. - reactor = twisted.internet.selectreactor.SelectReactor() - # See other reference to http://twistedmatrix.com/trac/ticket/3591 - # above. This line can go away with a release of Twisted that addresses - # this issue. - reactor.resolver = ThreadedResolver(reactor) - pbcf = twisted.spread.pb.PBClientFactory() - reactor.connectTCP(server, port, pbcf) - auth = config.get_user_option(AUTH_KEY) - if auth: - user, passwd = [s.strip() for s in auth.split(':', 1)] - else: - user, passwd = ('change', 'changepw') - deferred = pbcf.login( - twisted.cred.credentials.UsernamePassword(user, passwd)) - - @deferred.addCallback - def sendChanges(remote): - """Send changes to buildbot.""" - bzrlib.trace.mutter("bzrbuildout sending changes: %s", change) - change['src'] = 'bzr' - return remote.callRemote('addChange', change) - - def quit(ignore, msg): - bzrlib.trace.note("bzrbuildout: %s", msg) - reactor.stop() - - deferred.addCallback(quit, "SUCCESS") - - @deferred.addErrback - def failed(failure): - bzrlib.trace.warning("bzrbuildout: FAILURE\n %s", failure) - reactor.stop() - - reactor.callLater(60, quit, None, "TIMEOUT") - bzrlib.trace.note( - "bzr_buildbot: SENDING CHANGES to buildbot master %s:%d on %s", - server, port, hook) - reactor.run(installSignalHandlers=False) # run in a thread when in server - - -def post_commit(local_branch, master_branch, # branch is the master_branch - old_revno, old_revid, new_revno, new_revid): - if _installed_hook(master_branch) == COMMIT_VALUE: - send_change(master_branch, - old_revid, old_revid, new_revno, new_revid, COMMIT_VALUE) - - -def post_push(result): - if _installed_hook(result.target_branch) == PUSH_VALUE: - send_change(result.target_branch, - result.old_revid, result.old_revid, - result.new_revno, result.new_revid, PUSH_VALUE) - - -def post_change_branch_tip(result): - if _installed_hook(result.branch) == CHANGE_VALUE: - send_change(result.branch, - result.old_revid, result.old_revid, - result.new_revno, result.new_revid, CHANGE_VALUE) - -bzrlib.branch.Branch.hooks.install_named_hook( - 'post_commit', post_commit, - 'send change to buildbot master') -bzrlib.branch.Branch.hooks.install_named_hook( - 'post_push', post_push, - 'send change to buildbot master') -bzrlib.branch.Branch.hooks.install_named_hook( - 'post_change_branch_tip', post_change_branch_tip, - 'send change to buildbot master') diff --git a/master/contrib/check_buildbot.py b/master/contrib/check_buildbot.py deleted file mode 100755 index 48abca6b24e..00000000000 --- a/master/contrib/check_buildbot.py +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env python - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from future.utils import lrange - -import sys -import urllib - -"""check_buildbot.py -H hostname -p httpport [options] - -nagios check for buildbot. - -requires that both metrics and web status enabled. - -Both hostname and httpport must be set, or alternatively use url which -should be the full url to the metrics json resource""" -try: - import simplejson as json -except ImportError: - import json - - -OK, WARNING, CRITICAL, UNKNOWN = lrange(4) -STATUS_TEXT = ["OK", "Warning", "Critical", "Unknown"] -STATUS_CODES = dict(OK=OK, WARNING=WARNING, CRIT=CRITICAL) - - -def exit(level, msg): - print("%s: %s" % (STATUS_TEXT[level], msg)) - sys.exit(level) - - -def main(): - from optparse import OptionParser - parser = OptionParser(__doc__) - parser.set_defaults( - hostname=None, - httpport=None, - url=None, - verbosity=0 - ) - parser.add_option("-H", "--host", dest="hostname", - help="Hostname") - parser.add_option("-p", "--port", dest="httpport", - type="int", help="WebStatus port") - parser.add_option("-u", "--url", dest="url", - help="Metrics url") - parser.add_option("-v", "--verbose", dest="verbosity", - action="count", help="Increase verbosity") - options, args = parser.parse_args() - - if options.hostname and options.httpport: - url = "http://%s:%s/json/metrics" % (options.hostname, - options.httpport) - elif options.url: - url = options.url - else: - exit(UNKNOWN, "You must specify both hostname and httpport, or just url") - - try: - data = urllib.urlopen(url).read() - except Exception: - exit(CRITICAL, "Error connecting to %s" % url) - - try: - data = json.loads(data) - except ValueError: - exit(CRITICAL, "Could not parse output of %s as json" % url) - - if not data: - exit(WARNING, "%s returned null; are metrics disabled?" % url) - - alarms = data['alarms'] - status = OK - messages = [] - for alarm_name, alarm_state in alarms.items(): - if options.verbosity >= 2: - messages.append("%s: %s" % (alarm_name, alarm_state)) - - try: - alarm_code = STATUS_CODES[alarm_state[0]] - except (KeyError, IndexError): - status = UNKNOWN - messages.append("%s has unknown alarm state %s" % - (alarm_name, alarm_state)) - continue - - status = max(status, alarm_code) - if alarm_code > OK and options.verbosity < 2: - messages.append("%s: %s" % (alarm_name, alarm_state)) - - if not messages and status == OK: - messages.append("no problems") - exit(status, ";".join(messages)) - -if __name__ == '__main__': - main() diff --git a/master/contrib/check_smtp.py b/master/contrib/check_smtp.py deleted file mode 100755 index 0734213b42b..00000000000 --- a/master/contrib/check_smtp.py +++ /dev/null @@ -1,41 +0,0 @@ -#! /usr/bin/python -tt - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -from getpass import getpass -from smtplib import SMTP - -""" -This script helps to check that the SMTP_HOST (see below) would accept STARTTLS -command, and if LOCAL_HOST is acceptable for it, would check the requested user -name and password would allow to send e-mail through it. -""" - - -SMTP_HOST = 'the host you want to send e-mail through' -LOCAL_HOST = 'hostname that the SMTP_HOST would accept' - - -def main(): - """ - entry point - """ - - server = SMTP(SMTP_HOST) - - server.starttls() - - print(server.ehlo(LOCAL_HOST)) - - user = raw_input('user: ') - password = getpass('password: ') - - print(server.login(user, password)) - server.close() - -if __name__ == '__main__': - main() - -# vim:ts=4:sw=4:et:tw=80 diff --git a/master/contrib/coverage2text.py b/master/contrib/coverage2text.py deleted file mode 100755 index 3376426367d..00000000000 --- a/master/contrib/coverage2text.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python - -from __future__ import absolute_import -from __future__ import print_function - -import sys - -from coverage import coverage -from coverage.results import Numbers -from coverage.summary import SummaryReporter - -from twisted.python import usage - - -# this is an adaptation of the code behind "coverage report", modified to -# display+sortby "lines uncovered", which (IMHO) is more important of a -# metric than lines covered or percentage covered. Concentrating on the files -# with the most uncovered lines encourages getting the tree and test suite -# into a state that provides full line-coverage on all files. - -# much of this code was adapted from coverage/summary.py in the 'coverage' -# distribution, and is used under their BSD license. - - -class Options(usage.Options): - optParameters = [ - ("sortby", "s", "uncovered", "how to sort: uncovered, covered, name"), - ] - - -class MyReporter(SummaryReporter): - - def report(self, outfile=None, sortby="uncovered"): - self.find_code_units(None, ["/System", "/Library", "/usr/lib", - "buildbot/test", "simplejson"]) - - # Prepare the formatting strings - max_name = max([len(cu.name) for cu in self.code_units] + [5]) - fmt_name = "%%- %ds " % max_name - fmt_err = "%s %s: %s\n" - header1 = (fmt_name % "") + " Statements " - header2 = (fmt_name % "Name") + " Uncovered Covered" - fmt_coverage = fmt_name + "%9d %7d " - if self.branches: - header1 += " Branches " - header2 += " Found Excutd" - fmt_coverage += " %6d %6d" - header1 += " Percent" - header2 += " Covered" - fmt_coverage += " %7d%%" - if self.show_missing: - header1 += " " - header2 += " Missing" - fmt_coverage += " %s" - rule = "-" * len(header1) + "\n" - header1 += "\n" - header2 += "\n" - fmt_coverage += "\n" - - if not outfile: - outfile = sys.stdout - - # Write the header - outfile.write(header1) - outfile.write(header2) - outfile.write(rule) - - total = Numbers() - total_uncovered = 0 - - lines = [] - for cu in self.code_units: - try: - analysis = self.coverage._analyze(cu) - nums = analysis.numbers - uncovered = nums.n_statements - nums.n_executed - total_uncovered += uncovered - args = (cu.name, uncovered, nums.n_executed) - if self.branches: - args += (nums.n_branches, nums.n_executed_branches) - args += (nums.pc_covered,) - if self.show_missing: - args += (analysis.missing_formatted(),) - if sortby == "covered": - sortkey = nums.pc_covered - elif sortby == "uncovered": - sortkey = uncovered - else: - sortkey = cu.name - lines.append((sortkey, fmt_coverage % args)) - total += nums - except Exception: - if not self.ignore_errors: - typ, msg = sys.exc_info()[:2] - outfile.write(fmt_err % (cu.name, typ.__name__, msg)) - lines.sort() - if sortby in ("uncovered", "covered"): - lines.reverse() - for sortkey, line in lines: - outfile.write(line) - - if total.n_files > 1: - outfile.write(rule) - args = ("TOTAL", total_uncovered, total.n_executed) - if self.branches: - args += (total.n_branches, total.n_executed_branches) - args += (total.pc_covered,) - if self.show_missing: - args += ("",) - outfile.write(fmt_coverage % args) - - -def report(o): - c = coverage() - c.load() - r = MyReporter(c, show_missing=False, ignore_errors=False) - r.report(sortby=o['sortby']) - -if __name__ == '__main__': - o = Options() - o.parseOptions() - report(o) diff --git a/master/contrib/darcs_buildbot.py b/master/contrib/darcs_buildbot.py deleted file mode 100755 index 86e14e11c40..00000000000 --- a/master/contrib/darcs_buildbot.py +++ /dev/null @@ -1,200 +0,0 @@ -#! /usr/bin/python - -# This is a script which delivers Change events from Darcs to the buildmaster -# each time a patch is pushed into a repository. Add it to the 'apply' hook -# on your canonical "central" repository, by putting something like the -# following in the _darcs/prefs/defaults file of that repository: -# -# apply posthook /PATH/TO/darcs_buildbot.py BUILDMASTER:PORT -# apply run-posthook -# -# (the second command is necessary to avoid the usual "do you really want to -# run this hook" prompt. Note that you cannot have multiple 'apply posthook' -# lines: if you need this, you must create a shell script to run all your -# desired commands, then point the posthook at that shell script.) -# -# Note that both Buildbot and Darcs must be installed on the repository -# machine. You will also need the Python/XML distribution installed (the -# "python2.3-xml" package under debian). - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import commands -import os -import sys -import xml -from xml.dom import minidom - -from twisted.internet import defer -from twisted.internet import reactor - -from buildbot.clients import sendchange - - -def getText(node): - return "".join([cn.data - for cn in node.childNodes - if cn.nodeType == cn.TEXT_NODE]) - - -def getTextFromChild(parent, childtype): - children = parent.getElementsByTagName(childtype) - if not children: - return "" - return getText(children[0]) - - -def makeChange(p): - author = p.getAttribute("author") - revision = p.getAttribute("hash") - comments = (getTextFromChild(p, "name") + "\n" + - getTextFromChild(p, "comment")) - - summary = p.getElementsByTagName("summary")[0] - files = [] - for filenode in summary.childNodes: - if filenode.nodeName in ("add_file", "modify_file", "remove_file"): - filename = getText(filenode).strip() - files.append(filename) - elif filenode.nodeName == "move": - to_name = filenode.getAttribute("to") - files.append(to_name) - - # note that these are all unicode. Because PB can't handle unicode, we - # encode them into ascii, which will blow up early if there's anything we - # can't get to the far side. When we move to something that *can* handle - # unicode (like newpb), remove this. - author = author.encode("ascii", "replace") - comments = comments.encode("ascii", "replace") - files = [f.encode("ascii", "replace") for f in files] - revision = revision.encode("ascii", "replace") - - change = { - # note: this is more likely to be a full email address, which would - # make the left-hand "Changes" column kind of wide. The buildmaster - # should probably be improved to display an abbreviation of the - # username. - 'username': author, - 'revision': revision, - 'comments': comments, - 'files': files, - } - return change - - -def getChangesFromCommand(cmd, count): - out = commands.getoutput(cmd) - try: - doc = minidom.parseString(out) - except xml.parsers.expat.ExpatError as e: - print("failed to parse XML") - print(str(e)) - print("purported XML is:") - print("--BEGIN--") - print(out) - print("--END--") - sys.exit(1) - - c = doc.getElementsByTagName("changelog")[0] - changes = [] - for i, p in enumerate(c.getElementsByTagName("patch")): - if i >= count: - break - changes.append(makeChange(p)) - return changes - - -def getSomeChanges(count): - cmd = "darcs changes --last=%d --xml-output --summary" % count - return getChangesFromCommand(cmd, count) - - -LASTCHANGEFILE = ".darcs_buildbot-lastchange" - - -def findNewChanges(): - if os.path.exists(LASTCHANGEFILE): - f = open(LASTCHANGEFILE, "r") - lastchange = f.read() - f.close() - else: - return getSomeChanges(1) - lookback = 10 - while True: - changes = getSomeChanges(lookback) - # getSomeChanges returns newest-first, so changes[0] is the newest. - # we want to scan the newest first until we find the changes we sent - # last time, then deliver everything newer than that (and send them - # oldest-first). - for i, c in enumerate(changes): - if c['revision'] == lastchange: - newchanges = changes[:i] - newchanges.reverse() - return newchanges - if 2 * lookback > 100: - raise RuntimeError("unable to find our most recent change " - "(%s) in the last %d changes" % (lastchange, - lookback)) - lookback = 2 * lookback - - -def sendChanges(master): - changes = findNewChanges() - s = sendchange.Sender(master) - - d = defer.Deferred() - reactor.callLater(0, d.callback, None) - - if not changes: - print("darcs_buildbot.py: weird, no changes to send") - return - elif len(changes) == 1: - print("sending 1 change to buildmaster:") - else: - print("sending %d changes to buildmaster:" % len(changes)) - - # the Darcs Source class expects revision to be a context, not a - # hash of a patch (which is what we have in c['revision']). For - # the moment, we send None for everything but the most recent, because getting - # contexts is Hard. - - # get the context for the most recent change - latestcontext = commands.getoutput("darcs changes --context") - changes[-1]['context'] = latestcontext - - def _send(res, c): - branch = None - print(" %s" % c['revision']) - return s.send(branch, c.get('context'), c['comments'], c['files'], - c['username'], vc='darcs') - for c in changes: - d.addCallback(_send, c) - - def printSuccess(res): - num_changes = len(changes) - if num_changes > 1: - print("%d changes sent successfully" % num_changes) - elif num_changes == 1: - print("change sent successfully") - else: - print("no changes to send") - - def printFailure(why): - print("change(s) NOT sent, something went wrong: " + str(why)) - - d.addCallbacks(printSuccess, printFailure) - d.addBoth(lambda _: reactor.stop) - reactor.run() - - if changes: - lastchange = changes[-1]['revision'] - f = open(LASTCHANGEFILE, "w") - f.write(lastchange) - f.close() - - -if __name__ == '__main__': - MASTER = sys.argv[1] - sendChanges(MASTER) diff --git a/master/contrib/docker/master/README.md b/master/contrib/docker/master/README.md deleted file mode 100644 index 87249810ec0..00000000000 --- a/master/contrib/docker/master/README.md +++ /dev/null @@ -1,11 +0,0 @@ -Buildbot-Master docker container -================================ - -[Buildbot](http://buildbot.net) is a continuous integration framework written and configured in python. - -You can look at the [tutorial](http://docs.buildbot.net/latest/tutorial/docker.html) to learn how to use it. - -This container is based on alpine linux, and thus very lightweight. Another version based on ubuntu exists if you need more custom environment. - -The container expects a /var/lib/buildbot volume to store its configuration, and will open port 8010 for web server, and 9989 for worker connection. -It is also expecting a postgresql container attached for storing state. diff --git a/master/contrib/docker/master/buildbot.tac b/master/contrib/docker/master/buildbot.tac deleted file mode 100644 index 8aa02134ad2..00000000000 --- a/master/contrib/docker/master/buildbot.tac +++ /dev/null @@ -1,18 +0,0 @@ -import sys - -from twisted.application import service -from twisted.python.log import FileLogObserver -from twisted.python.log import ILogObserver - -from buildbot.master import BuildMaster - -basedir = '/var/lib/buildbot' -configfile = 'master.cfg' - -# note: this line is matched against to check that this is a buildmaster -# directory; do not edit it. -application = service.Application('buildmaster') -application.setComponent(ILogObserver, FileLogObserver(sys.stdout).emit) - -m = BuildMaster(basedir, configfile, umask=None) -m.setServiceParent(application) diff --git a/master/contrib/docker/master/start_buildbot.sh b/master/contrib/docker/master/start_buildbot.sh deleted file mode 100755 index 8bd4f815676..00000000000 --- a/master/contrib/docker/master/start_buildbot.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/sh - -# startup script for purely stateless master - -# we download the config from an arbitrary curl accessible tar.gz file (which github can generate for us) - -B=`pwd` - -if [ -z "$BUILDBOT_CONFIG_URL" ] -then - if [ ! -f "$B/master.cfg" ] - then - echo No master.cfg found nor $$BUILDBOT_CONFIG_URL ! - echo Please provide a master.cfg file in $B or provide a $$BUILDBOT_CONFIG_URL variable via -e - exit 1 - fi - -else - BUILDBOT_CONFIG_DIR=${BUILDBOT_CONFIG_DIR:-config} - mkdir -p $B/$BUILDBOT_CONFIG_DIR - # if it ends with .tar.gz then its a tarball, else its directly the file - if echo "$BUILDBOT_CONFIG_URL" | grep '.tar.gz$' >/dev/null - then - until curl -sL $BUILDBOT_CONFIG_URL | tar -xz --strip-components=1 --directory=$B/$BUILDBOT_CONFIG_DIR - do - echo "Can't download from \$BUILDBOT_CONFIG_URL: $BUILDBOT_CONFIG_URL" - sleep 1 - done - - ln -sf $B/$BUILDBOT_CONFIG_DIR/master.cfg $B/master.cfg - - if [ -f $B/$BUILDBOT_CONFIG_DIR/buildbot.tac ] - then - ln -sf $B/$BUILDBOT_CONFIG_DIR/buildbot.tac $B/buildbot.tac - fi - else - until curl -sL $BUILDBOT_CONFIG_URL > $B/master.cfg - do - echo "Can't download from $$BUILDBOT_CONFIG_URL: $BUILDBOT_CONFIG_URL" - done - fi -fi -# copy the default buildbot.tac if not provided by the config -if [ ! -f $B/buildbot.tac ] -then - cp /usr/src/buildbot/contrib/docker/master/buildbot.tac $B -fi -# wait for db to start by trying to upgrade the master -until buildbot upgrade-master $B -do - echo "Can't upgrade master yet. Waiting for database ready?" - sleep 1 -done - -# we use exec so that twistd use the pid 1 of the container, and so that signals are properly forwarded -exec twistd -ny $B/buildbot.tac diff --git a/master/contrib/docker/pythonnode_worker/Dockerfile b/master/contrib/docker/pythonnode_worker/Dockerfile deleted file mode 100644 index 8644ca24459..00000000000 --- a/master/contrib/docker/pythonnode_worker/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -# buildbot/buildbot-worker-python-node - -# This example docker file show how to customize the base worker docker image -# to add build dependencies to build the python+nodejs buildbot_www package - -FROM buildbot/buildbot-worker:master -MAINTAINER Buildbot maintainers - -# This will make apt-get install without question -ARG DEBIAN_FRONTEND=noninteractive - -user root -# Install required npm packages -RUN curl -sL https://deb.nodesource.com/setup_6.x | bash - && \ - apt-get update && apt-get install -y -o APT::Install-Recommends=false -o APT::Install-Suggests=false \ - nodejs \ - git && \ - rm -rf /var/lib/apt/lists/* - -user buildbot diff --git a/master/contrib/fakechange.py b/master/contrib/fakechange.py deleted file mode 100755 index 8e02012c518..00000000000 --- a/master/contrib/fakechange.py +++ /dev/null @@ -1,87 +0,0 @@ -#! /usr/bin/python - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import commands -import os.path -import random -import sys - -from twisted.cred import credentials -from twisted.internet import reactor -from twisted.python import log -from twisted.spread import pb - - -""" -This is an example of how to use the remote ChangeMaster interface, which is -a port that allows a remote program to inject Changes into the buildmaster. - -The buildmaster can either pull changes in from external sources (see -buildbot.changes.changes.ChangeMaster.addSource for an example), or those -changes can be pushed in from outside. This script shows how to do the -pushing. - -Changes are just dictionaries with three keys: - - 'who': a simple string with a username. Responsibility for this change will - be assigned to the named user (if something goes wrong with the build, they - will be blamed for it). - - 'files': a list of strings, each with a filename relative to the top of the - source tree. - - 'comments': a (multiline) string with checkin comments. - -Each call to .addChange injects a single Change object: each Change -represents multiple files, all changed by the same person, and all with the -same checkin comments. - -The port that this script connects to is the same 'workerPort' that the -workers and other debug tools use. The ChangeMaster service will only be -available on that port if 'change' is in the list of services passed to -buildbot.master.makeApp (this service is turned ON by default). -""" - - -def done(*args): - reactor.stop() - - -users = ('zaphod', 'arthur', 'trillian', 'marvin', 'sbfast') -dirs = ('src', 'doc', 'tests') -sources = ('foo.c', 'bar.c', 'baz.c', 'Makefile') -docs = ('Makefile', 'index.html', 'manual.texinfo') - - -def makeFilename(): - d = random.choice(dirs) - if d in ('src', 'tests'): - f = random.choice(sources) - else: - f = random.choice(docs) - return os.path.join(d, f) - - -def send_change(remote): - who = random.choice(users) - if len(sys.argv) > 1: - files = sys.argv[1:] - else: - files = [makeFilename()] - comments = commands.getoutput("fortune") - change = {'who': who, 'files': files, 'comments': comments} - d = remote.callRemote('addChange', change) - d.addCallback(done) - print("%s: %s" % (who, " ".join(files))) - - -f = pb.PBClientFactory() -d = f.login(credentials.UsernamePassword("change", "changepw")) -reactor.connectTCP("localhost", 8007, f) -err = lambda f: (log.err(), reactor.stop()) -d.addCallback(send_change).addErrback(err) - -reactor.run() diff --git a/master/contrib/generate_changelog.py b/master/contrib/generate_changelog.py deleted file mode 100755 index 14bbf6ee60f..00000000000 --- a/master/contrib/generate_changelog.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2008 -# Steve 'Ashcrow' Milner -# -# This software may be freely redistributed under the terms of the GNU -# general public license. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os -import sys - -""" -Generates changelog information using git. -""" - -__docformat__ = 'restructuredtext' - - -def print_err(msg): - """ - Wrapper to make printing to stderr nicer. - - :Parameters: - - `msg`: the message to print. - """ - sys.stderr.write(msg) - sys.stderr.write('\n') - - -def usage(): - """ - Prints out usage information to stderr. - """ - print_err('Usage: %s git-binary since' % sys.argv[0]) - print_err(('Example: %s /usr/bin/git f5067523dfae9c7cdefc82' - '8721ec593ac7be62db' % sys.argv[0])) - - -def main(args): - """ - Main entry point. - - :Parameters: - - `args`: same as sys.argv[1:] - """ - # Make sure we have the arguments we need, else show usage - try: - git_bin = args[0] - since = args[1] - except IndexError: - usage() - return 1 - - if not os.access(git_bin, os.X_OK): - print_err('Can not access %s' % git_bin) - return 1 - - # Open a pipe and force the format - pipe = os.popen((git_bin + ' log --pretty="format:%ad %ae%n' - ' * %s" ' + since + '..')) - print(pipe.read()) - pipe.close() - return 0 - - -if __name__ == '__main__': - raise SystemExit(main(sys.argv[1:])) diff --git a/master/contrib/git_buildbot.py b/master/contrib/git_buildbot.py deleted file mode 100755 index 55dbe873581..00000000000 --- a/master/contrib/git_buildbot.py +++ /dev/null @@ -1,472 +0,0 @@ -#!/usr/bin/env python - -# This script expects one line for each new revision on the form -# -# -# For example: -# aa453216d1b3e49e7f6f98441fa56946ddcd6a20 -# 68f7abf4e6f922807889f52bc043ecd31b79f814 refs/heads/master -# -# Each of these changes will be passed to the buildbot server along -# with any other change information we manage to extract from the -# repository. -# -# This script is meant to be run from hooks/post-receive in the git -# repository. It can also be run at client side with hooks/post-merge -# after using this wrapper: - -# !/bin/sh -# PRE=$(git rev-parse 'HEAD@{1}') -# POST=$(git rev-parse HEAD) -# SYMNAME=$(git rev-parse --symbolic-full-name HEAD) -# echo "$PRE $POST $SYMNAME" | git_buildbot.py -# -# Largely based on contrib/hooks/post-receive-email from git. - -from __future__ import absolute_import -from __future__ import print_function -from future.utils import iteritems -from future.utils import text_type - -import commands -import logging -import os -import re -import subprocess -import sys -from optparse import OptionParser - -from twisted.cred import credentials -from twisted.internet import defer -from twisted.internet import reactor -from twisted.spread import pb - -# Modify this to fit your setup, or pass in --master server:port on the -# command line - -master = "localhost:9989" - -# When sending the notification, send this category if (and only if) -# it's set (via --category) - -category = None - -# When sending the notification, send this repository if (and only if) -# it's set (via --repository) - -repository = None - -# When sending the notification, send this project if (and only if) -# it's set (via --project) - -project = None - -# When sending the notification, send this codebase. If this is None, no -# codebase will be sent. This can also be set via --codebase - -codebase = None - -# Username portion of PB login credentials to send the changes to the master -username = "change" - -# Password portion of PB login credentials to send the changes to the master -auth = "changepw" - -# When converting strings to unicode, assume this encoding. -# (set with --encoding) - -encoding = 'utf8' - -# If true, takes only the first parent commits. This controls if we want to -# trigger builds for merged in commits (when False). - -first_parent = False - -# The GIT_DIR environment variable must have been set up so that any -# git commands that are executed will operate on the repository we're -# installed in. - -changes = [] - - -def connectFailed(error): - logging.error( - "Could not connect to %s: %s", master, error.getErrorMessage()) - return error - - -def addChanges(remote, changei, src='git'): - logging.debug("addChanges %s, %s", repr(remote), repr(changei)) - - def addChange(c): - logging.info("New revision: %s", c['revision'][:8]) - for key, value in iteritems(c): - logging.debug(" %s: %s", key, value) - - c['src'] = src - d = remote.callRemote('addChange', c) - return d - - finished_d = defer.Deferred() - - def iter(): - try: - c = changei.next() - d = addChange(c) - # handle successful completion by re-iterating, but not immediately - # as that will blow out the Python stack - - def cb(_): - reactor.callLater(0, iter) - d.addCallback(cb) - # and pass errors along to the outer deferred - d.addErrback(finished_d.errback) - except StopIteration: - remote.broker.transport.loseConnection() - finished_d.callback(None) - - iter() - return finished_d - - -def connected(remote): - return addChanges(remote, changes.__iter__()) - - -def grab_commit_info(c, rev): - # Extract information about committer and files using git show - f = os.popen("git show --raw --pretty=full %s" % rev, 'r') - - files = [] - comments = [] - - while True: - line = f.readline() - if not line: - break - - if line.startswith(4 * ' '): - comments.append(line[4:]) - - m = re.match(r"^:.*[MAD]\s+(.+)$", line) - if m: - logging.debug("Got file: %s", m.group(1)) - files.append(text_type(m.group(1), encoding=encoding)) - continue - - m = re.match(r"^Author:\s+(.+)$", line) - if m: - logging.debug("Got author: %s", m.group(1)) - c['who'] = text_type(m.group(1), encoding=encoding) - - if re.match(r"^Merge: .*$", line): - files.append('merge') - - c['comments'] = ''.join(comments) - c['files'] = files - status = f.close() - if status: - logging.warning("git show exited with status %d", status) - - -def gen_changes(input, branch): - while True: - line = input.readline() - if not line: - break - - logging.debug("Change: %s", line) - - m = re.match(r"^([0-9a-f]+) (.*)$", line.strip()) - c = {'revision': m.group(1), - 'branch': text_type(branch, encoding=encoding), - } - - if category: - c['category'] = text_type(category, encoding=encoding) - - if repository: - c['repository'] = text_type(repository, encoding=encoding) - - if project: - c['project'] = text_type(project, encoding=encoding) - - if codebase: - c['codebase'] = text_type(codebase, encoding=encoding) - - grab_commit_info(c, m.group(1)) - changes.append(c) - - -def gen_create_branch_changes(newrev, refname, branch): - # A new branch has been created. Generate changes for everything - # up to `newrev' which does not exist in any branch but `refname'. - # - # Note that this may be inaccurate if two new branches are created - # at the same time, pointing to the same commit, or if there are - # commits that only exists in a common subset of the new branches. - - logging.info("Branch `%s' created", branch) - - f = os.popen("git rev-parse --not --branches" - + "| grep -v $(git rev-parse %s)" % refname - + - "| git rev-list --reverse --pretty=oneline --stdin %s" % newrev, - 'r') - - gen_changes(f, branch) - - status = f.close() - if status: - logging.warning("git rev-list exited with status %d", status) - - -def gen_create_tag_changes(newrev, refname, tag): - # A new tag has been created. Generate one change for the commit - # a tag may or may not coincide with the head of a branch, so - # the "branch" attribute will hold the tag name. - - logging.info("Tag `%s' created", tag) - f = os.popen("git log -n 1 --pretty=oneline %s" % newrev, 'r') - gen_changes(f, tag) - status = f.close() - if status: - logging.warning("git log exited with status %d", status) - - -def gen_update_branch_changes(oldrev, newrev, refname, branch): - # A branch has been updated. If it was a fast-forward update, - # generate Change events for everything between oldrev and newrev. - # - # In case of a forced update, first generate a "fake" Change event - # rewinding the branch to the common ancestor of oldrev and - # newrev. Then, generate Change events for each commit between the - # common ancestor and newrev. - - logging.info( - "Branch `%s' updated %s .. %s", branch, oldrev[:8], newrev[:8]) - - mergebasecommand = subprocess.Popen( - ["git", "merge-base", oldrev, newrev], stdout=subprocess.PIPE) - (baserev, err) = mergebasecommand.communicate() - baserev = baserev.strip() # remove newline - - logging.debug("oldrev=%s newrev=%s baserev=%s", oldrev, newrev, baserev) - if baserev != oldrev: - c = {'revision': baserev, - 'comments': "Rewind branch", - 'branch': text_type(branch, encoding=encoding), - 'who': "dummy", - } - logging.info("Branch %s was rewound to %s", branch, baserev[:8]) - files = [] - f = os.popen("git diff --raw %s..%s" % (oldrev, baserev), 'r') - while True: - line = f.readline() - if not line: - break - - file = re.match(r"^:.*[MAD]\s+(.+)$", line).group(1) - logging.debug(" Rewound file: %s", file) - files.append(text_type(file, encoding=encoding)) - - status = f.close() - if status: - logging.warning("git diff exited with status %d", status) - - if category: - c['category'] = text_type(category, encoding=encoding) - - if repository: - c['repository'] = text_type(repository, encoding=encoding) - - if project: - c['project'] = text_type(project, encoding=encoding) - - if codebase: - c['codebase'] = text_type(codebase, encoding=encoding) - - if files: - c['files'] = files - changes.append(c) - - if newrev != baserev: - # Not a pure rewind - options = "--reverse --pretty=oneline" - if first_parent: - # Add the --first-parent to avoid adding the merge commits which - # have already been tested. - options += ' --first-parent' - f = os.popen("git rev-list %s %s..%s" % - (options, baserev, newrev), 'r') - gen_changes(f, branch) - - status = f.close() - if status: - logging.warning("git rev-list exited with status %d", status) - - -def cleanup(res): - reactor.stop() - - -def process_branch_change(oldrev, newrev, refname, branch): - # Find out if the branch was created, deleted or updated. - if re.match(r"^0*$", newrev): - logging.info("Branch `%s' deleted, ignoring", branch) - elif re.match(r"^0*$", oldrev): - gen_create_branch_changes(newrev, refname, branch) - else: - gen_update_branch_changes(oldrev, newrev, refname, branch) - - -def process_tag_change(oldrev, newrev, refname, tag): - # Process a new tag, or ignore a deleted tag - if re.match(r"^0*$", newrev): - logging.info("Tag `%s' deleted, ignoring", tag) - elif re.match(r"^0*$", oldrev): - gen_create_tag_changes(newrev, refname, tag) - - -def process_change(oldrev, newrev, refname): - # Identify the change as a branch, tag or other, and process it - m = re.match(r"^refs/(heads|tags)/(.+)$", refname) - if not m: - logging.info("Ignoring refname `%s': Not a branch or tag", refname) - return - - if m.group(1) == 'heads': - branch = m.group(2) - process_branch_change(oldrev, newrev, refname, branch) - elif m.group(1) == 'tags': - tag = m.group(2) - process_tag_change(oldrev, newrev, refname, tag) - - -def process_changes(): - # Read branch updates from stdin and generate Change events - while True: - line = sys.stdin.readline() - line = line.rstrip() - if not line: - break - - [oldrev, newrev, refname] = line.split(None, 2) - process_change(oldrev, newrev, refname) - - -def send_changes(): - # Submit the changes, if any - if not changes: - logging.warning("No changes found") - return - - host, port = master.split(':') - port = int(port) - - f = pb.PBClientFactory() - d = f.login(credentials.UsernamePassword(username, auth)) - reactor.connectTCP(host, port, f) - - d.addErrback(connectFailed) - d.addCallback(connected) - d.addBoth(cleanup) - - reactor.run() - - -def parse_options(): - parser = OptionParser() - parser.add_option("-l", "--logfile", action="store", type="string", - help="Log to the specified file") - parser.add_option("-v", "--verbose", action="count", - help="Be more verbose. Ignored if -l is not specified.") - master_help = ("Build master to push to. Default is %(master)s" % - {'master': master}) - parser.add_option("-m", "--master", action="store", type="string", - help=master_help) - parser.add_option("-c", "--category", action="store", - type="string", help="Scheduler category to notify.") - parser.add_option("-r", "--repository", action="store", - type="string", help="Git repository URL to send.") - parser.add_option("-p", "--project", action="store", - type="string", help="Project to send.") - parser.add_option("--codebase", action="store", - type="string", help="Codebase to send.") - encoding_help = ("Encoding to use when converting strings to " - "unicode. Default is %(encoding)s." % - {"encoding": encoding}) - parser.add_option("-e", "--encoding", action="store", type="string", - help=encoding_help) - username_help = ("Username used in PB connection auth, defaults to " - "%(username)s." % {"username": username}) - parser.add_option("-u", "--username", action="store", type="string", - help=username_help) - auth_help = ("Password used in PB connection auth, defaults to " - "%(auth)s." % {"auth": auth}) - # 'a' instead of 'p' due to collisions with the project short option - parser.add_option("-a", "--auth", action="store", type="string", - help=auth_help) - first_parent_help = ("If set, don't trigger builds for merged in commits") - parser.add_option("--first-parent", action="store_true", - help=first_parent_help) - options, args = parser.parse_args() - return options - - -# Log errors and critical messages to stderr. Optionally log -# information to a file as well (we'll set that up later.) -stderr = logging.StreamHandler(sys.stderr) -fmt = logging.Formatter("git_buildbot: %(levelname)s: %(message)s") -stderr.setLevel(logging.ERROR) -stderr.setFormatter(fmt) -logging.getLogger().addHandler(stderr) -logging.getLogger().setLevel(logging.DEBUG) - -try: - options = parse_options() - level = logging.WARNING - if options.verbose: - level -= 10 * options.verbose - if level < 0: - level = 0 - - if options.logfile: - logfile = logging.FileHandler(options.logfile) - logfile.setLevel(level) - fmt = logging.Formatter("%(asctime)s %(levelname)s: %(message)s") - logfile.setFormatter(fmt) - logging.getLogger().addHandler(logfile) - - if options.master: - master = options.master - - if options.category: - category = options.category - - if options.repository: - repository = options.repository - - if options.project: - project = options.project - - if options.codebase: - codebase = options.codebase - - if options.username: - username = options.username - - if options.auth: - auth = options.auth - - if options.encoding: - encoding = options.encoding - - if options.first_parent: - first_parent = options.first_parent - - process_changes() - send_changes() -except Exception: - logging.exception("Unhandled exception") - sys.exit(1) diff --git a/master/contrib/github_buildbot.py b/master/contrib/github_buildbot.py deleted file mode 100755 index 7c39fda22f2..00000000000 --- a/master/contrib/github_buildbot.py +++ /dev/null @@ -1,421 +0,0 @@ -#!/usr/bin/env python -""" -github_buildbot.py is based on git_buildbot.py. Last revised on 2014-02-20. - -github_buildbot.py will determine the repository information from the JSON -HTTP POST it receives from github.com and build the appropriate repository. -If your github repository is private, you must add a ssh key to the github -repository for the user who initiated the build on the worker. - -This version of github_buildbot.py parses v3 of the github webhook api, with the -"application.vnd.github.v3+json" payload. Configure *only* "push" and/or -"pull_request" events to trigger this webhook. - -""" - -from __future__ import absolute_import -from __future__ import print_function -from future.utils import iteritems - -import hmac -import logging -import os -import re -import sys -from hashlib import sha1 -from httplib import ACCEPTED -from httplib import BAD_REQUEST -from httplib import INTERNAL_SERVER_ERROR -from httplib import OK -from optparse import OptionParser - -from twisted.cred import credentials -from twisted.internet import reactor -from twisted.spread import pb -from twisted.web import resource -from twisted.web import server - -try: - import json -except ImportError: - import simplejson as json - - -class GitHubBuildBot(resource.Resource): - - """ - GitHubBuildBot creates the webserver that responds to the GitHub Service - Hook. - """ - isLeaf = True - master = None - port = None - - def render_POST(self, request): - """ - Responds only to POST events and starts the build process - - :arguments: - request - the http request object - """ - - # All responses are application/json - request.setHeader("Content-Type", "application/json") - - content = request.content.read() - - # Verify the message if a secret was provided - # - # NOTE: We always respond with '400 BAD REQUEST' if we can't - # validate the message. This is done to prevent malicious - # requests from learning about why they failed to POST data - # to us. - if self.secret is not None: - signature = request.getHeader("X-Hub-Signature") - - if signature is None: - logging.error("Rejecting request. Signature is missing.") - request.setResponseCode(BAD_REQUEST) - return json.dumps({"error": "Bad Request."}) - - try: - hash_type, hexdigest = signature.split("=") - - except ValueError: - logging.error("Rejecting request. Bad signature format.") - request.setResponseCode(BAD_REQUEST) - return json.dumps({"error": "Bad Request."}) - - else: - # sha1 is hard coded into github's source code so it's - # unlikely this will ever change. - if hash_type != "sha1": - logging.error("Rejecting request. Unexpected hash type.") - request.setResponseCode(BAD_REQUEST) - return json.dumps({"error": "Bad Request."}) - - mac = hmac.new(self.secret, msg=content, digestmod=sha1) - if mac.hexdigest() != hexdigest: - logging.error("Rejecting request. Hash mismatch.") - request.setResponseCode(BAD_REQUEST) - return json.dumps({"error": "Bad Request."}) - - event_type = request.getHeader("X-GitHub-Event") - logging.debug("X-GitHub-Event: %r", event_type) - - handler = getattr(self, 'handle_%s' % event_type, None) - - if handler is None: - logging.info( - "Rejecting request. Received unsupported event %r.", - event_type) - request.setResponseCode(BAD_REQUEST) - return json.dumps({"error": "Bad Request."}) - - try: - content_type = request.getHeader("Content-Type") - - if content_type == "application/json": - payload = json.loads(content) - elif content_type == "application/x-www-form-urlencoded": - payload = json.loads(request.args["payload"][0]) - else: - logging.info( - "Rejecting request. Unknown 'Content-Type', received %r", - content_type) - request.setResponseCode(BAD_REQUEST) - return json.dumps({"error": "Bad Request."}) - - logging.debug("Payload: %r", payload) - repo = payload['repository']['full_name'] - repo_url = payload['repository']['html_url'] - changes = handler(payload, repo, repo_url) - self.send_changes(changes, request) - return server.NOT_DONE_YET - - except Exception as e: - logging.exception(e) - request.setResponseCode(INTERNAL_SERVER_ERROR) - return json.dumps({"error": str(e)}) - - def process_change(self, change, branch, repo, repo_url): - files = change['added'] + change['removed'] + change['modified'] - who = "" - if 'username' in change['author']: - who = change['author']['username'] - else: - who = change['author']['name'] - if 'email' in change['author']: - who = "%s <%s>" % (who, change['author']['email']) - - comments = change['message'] - if len(comments) > 1024: - trim = " ... (trimmed, commit message exceeds 1024 characters)" - comments = comments[:1024 - len(trim)] + trim - - info_change = {'revision': change['id'], - 'revlink': change['url'], - 'who': who, - 'comments': comments, - 'repository': repo_url, - 'files': files, - 'project': repo, - 'branch': branch} - - if self.category: - info_change['category'] = self.category - - return info_change - - def handle_ping(self, *_): - return None - - def handle_push(self, payload, repo, repo_url): - """ - Consumes the JSON as a python object and actually starts the build. - - :arguments: - payload - Python Object that represents the JSON sent by GitHub Service - Hook. - """ - changes = None - refname = payload['ref'] - - m = re.match(r"^refs/(heads|tags)/(.+)$", refname) - if not m: - logging.info( - "Ignoring refname `%s': Not a branch or a tag", refname) - return changes - - refname = m.group(2) - - if payload['deleted'] is True: - logging.info("%r deleted, ignoring", refname) - else: - changes = [] - for change in payload['commits']: - if (self.head_commit or m.group(1) == 'tags') \ - and change['id'] != payload['head_commit']['id']: - continue - changes.append(self.process_change( - change, refname, repo, repo_url)) - return changes - - def handle_pull_request(self, payload, repo, repo_url): - """ - Consumes the JSON as a python object and actually starts the build. - - :arguments: - payload - Python Object that represents the JSON sent by GitHub Service - Hook. - """ - changes = None - - branch = "refs/pull/{}/head".format(payload['number']) - - if payload['action'] not in ("opened", "synchronize"): - logging.info("PR %r %r, ignoring", - payload['number'], payload['action']) - return None - else: - changes = [] - - # Create a synthetic change - change = { - 'id': payload['pull_request']['head']['sha'], - 'message': payload['pull_request']['body'], - 'timestamp': payload['pull_request']['updated_at'], - 'url': payload['pull_request']['html_url'], - 'author': { - 'username': payload['pull_request']['user']['login'], - }, - 'added': [], - 'removed': [], - 'modified': [], - } - - changes.append(self.process_change( - change, branch, repo, repo_url)) - return changes - - def send_changes(self, changes, request): - """ - Submit the changes, if any - """ - if not changes: - logging.warning("No changes found") - request.setResponseCode(OK) - request.write(json.dumps({"result": "No changes found."})) - request.finish() - return - - host, port = self.master.split(':') - port = int(port) - - if self.auth is not None: - auth = credentials.UsernamePassword(*self.auth.split(":")) - else: - auth = credentials.Anonymous() - - factory = pb.PBClientFactory() - deferred = factory.login(auth) - reactor.connectTCP(host, port, factory) - deferred.addErrback(self.connectFailed, request) - deferred.addCallback(self.connected, changes, request) - - def connectFailed(self, error, request): - """ - If connection is failed. Logs the error. - """ - logging.error("Could not connect to master: %s", - error.getErrorMessage()) - request.setResponseCode(INTERNAL_SERVER_ERROR) - request.write( - json.dumps({"error": "Failed to connect to buildbot master."})) - request.finish() - return error - - def addChange(self, _, remote, changei, src='git'): - """ - Sends changes from the commit to the buildmaster. - """ - logging.debug("addChange %r, %r", remote, changei) - try: - change = changei.next() - except StopIteration: - remote.broker.transport.loseConnection() - return None - - logging.info("New revision: %s", change['revision'][:8]) - for key, value in iteritems(change): - logging.debug(" %s: %s", key, value) - - change['src'] = src - deferred = remote.callRemote('addChange', change) - deferred.addCallback(self.addChange, remote, changei, src) - return deferred - - def connected(self, remote, changes, request): - """ - Responds to the connected event. - """ - # By this point we've connected to buildbot so - # we don't really need to keep github waiting any - # longer - request.setResponseCode(ACCEPTED) - request.write(json.dumps({"result": "Submitting changes."})) - request.finish() - - return self.addChange(None, remote, changes.__iter__()) - - -def setup_options(): - """ - The main event loop that starts the server and configures it. - """ - usage = "usage: %prog [options]" - parser = OptionParser(usage) - - parser.add_option("-p", "--port", - help="Port the HTTP server listens to for the GitHub " - "Service Hook [default: %default]", - default=9001, type=int, dest="port") - - parser.add_option("-m", "--buildmaster", - help="Buildbot Master host and port. ie: localhost:9989 " - "[default: %default]", - default="localhost:9989", dest="buildmaster") - - parser.add_option("--auth", - help="The username and password, separated by a colon, " - "to use when connecting to buildbot over the " - "perspective broker.", - default="change:changepw", dest="auth") - - parser.add_option("--head-commit", action="store_true", - help="If set, only trigger builds for commits at head") - - parser.add_option("--secret", - help="If provided then use the X-Hub-Signature header " - "to verify that the request is coming from " - "github. [default: %default]", - default=None, dest="secret") - - parser.add_option("-l", "--log", - help="The absolute path, including filename, to save the " - "log to [default: %default]. This may also be " - "'stdout' indicating logs should output directly to " - "standard output instead.", - default="github_buildbot.log", dest="log") - - parser.add_option("-L", "--level", - help="The logging level: debug, info, warn, error, " - "fatal [default: %default]", default='warn', - dest="level", - choices=("debug", "info", "warn", "error", "fatal")) - - parser.add_option("-g", "--github", - help="The github server. Changing this is useful if" - " you've specified a specific HOST handle in " - "~/.ssh/config for github [default: %default]", - default='github.com', dest="github") - - parser.add_option("--pidfile", - help="Write the process identifier (PID) to this " - "file on start. The file is removed on clean " - "exit. [default: %default]", - default=None, dest="pidfile") - - parser.add_option("--category", - help="Category for the build change", - default=None, dest="category") - - (options, _) = parser.parse_args() - - if options.auth is not None and ":" not in options.auth: - parser.error("--auth did not contain ':'") - - if options.pidfile: - with open(options.pidfile, 'w') as f: - f.write(str(os.getpid())) - - filename = options.log - log_format = "%(asctime)s - %(levelname)s - %(message)s" - if options.log != "stdout": - logging.basicConfig(filename=filename, format=log_format, - level=logging._levelNames[options.level.upper()]) - else: - logging.basicConfig(format=log_format, - handlers=[ - logging.StreamHandler(stream=sys.stdout)], - level=logging._levelNames[options.level.upper()]) - - return options - - -def run_hook(options): - github_bot = GitHubBuildBot() - github_bot.github = options.github - github_bot.master = options.buildmaster - github_bot.secret = options.secret - github_bot.auth = options.auth - github_bot.head_commit = options.head_commit - github_bot.category = options.category - - site = server.Site(github_bot) - reactor.listenTCP(options.port, site) - - reactor.run() - - -def main(): - options = setup_options() - - run_hook(options) - - -if __name__ == '__main__': - main() diff --git a/master/contrib/hgbuildbot.py b/master/contrib/hgbuildbot.py deleted file mode 100644 index 9059ff167cd..00000000000 --- a/master/contrib/hgbuildbot.py +++ /dev/null @@ -1,268 +0,0 @@ -# This file is part of Buildbot. Buildbot is free software: you can -# redistribute it and/or modify it under the terms of the GNU General Public -# License as published by the Free Software Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Portions Copyright Buildbot Team Members -# Portions Copyright 2007 Frederic Leroy -# Portions Copyright 2016 Louis Opter -# -# -# Documentation -# ============= -# -# Mercurial "changegroup" hook that notifies Buildbot when a number of -# changsets is brought into the repository from elsewhere. -# -# Your Buildmaster needs to define a base ChangeHook, you should configure it -# behind a reverse proxy that does TLS and authentication for you and/or keep -# it behind a firewall. See the docs for more details: -# -# http://docs.buildbot.net/latest/manual/cfg-wwwhooks.html -# -# Copy this file to ".hg/hgbuildbot.py" in the repository that should notify -# Buildbot. -# -# Add it to the "[hooks]" section of ".hg/hgrc". Also add a "[hgbuildbot]" -# section with additional parameters, for example: -# -# [hooks] -# changegroup.buildbot = python:.hg/hgbuiltbot.py:hook -# -# [hgbuildbot] -# venv = /home/buildbot/.virtualenvs/builtbot/lib/python2.7/site-packages -# master = http://localhost:8020/change_hook/base -# -# -# Available parmeters -# ------------------- -# -# venv -# The hook needs the Python package "requests". You can optionally point to -# virtualenv if it is not installed globally: -# -# Optional; default: None -# -# Example: -# -# venv = /path/to/venv/lib/pythonX.Y/site-packages -# -# master -# URLs of the Buildmaster(s) to notify. -# Can be a single entry or a comma-separated list. -# -# Mandatory. -# -# Examples: -# -# master = localhost:8020/change_hook/base -# master = bm1.example.org:8020/change_hook/base,bm2.example.org:8020/change_hook/base -# -# user -# User for connecting to the Buildmaster. (Basic auth will be used). -# -# Optional. -# -# passwd -# Password for connecting to the Buildmaster. -# -# Optional. -# -# branchtype -# The branchmodel you use: "inrepo" for named branches (managed by -# "hg branch") or "dirname" for directory based branches (the last component -# of the repository's directory will then be used as branch name). -# -# Optional; default: inrepo -# -# branch -# Explicitly specify a branchname instead of using the repo's basename when -# using "branchtype = dirname". -# -# Optional. -# -# baseurl -# Prefix for the repository URL sent to the Buildmaster. See below for -# details. -# -# Optional. The hook will also check the [web] section for this parameter. -# -# strip -# Strip as many slashes from the repo dir before appending it to baseurl. -# See below for details. -# -# Optional; default: 0; The hook will also check the [notify] section for -# this parameter. -# -# category -# Category to assign to all change sets. -# -# Optional. -# -# project -# Project that the repo belongs to. -# -# Optional. -# -# codebase -# Codebase name for the repo. -# -# Optional. -# -# -# Repository URLs -# --------------- -# -# The hook sends a repository URL to the Buildmasters. It can be used by -# schedulers (e.g., for filtering) and is also used in the webview to create -# a link to the corresponding changeset. -# -# By default, the absolute repository path (e.g., "/home/hg/repos/myrepo") will -# be used. The webview will in this case simply append the path to its own -# hostname in order to create a link to that change (e.g., -# "http://localhost:8010/home/hg/repos/myrepo"). -# -# You can alternatively strip some of the repo path's components and prepend -# a custom base URL instead. For example, if you want to create an URL like -# "https://code.company.com/myrepo", you must specify the following parameters: -# -# baseurl = https://code.company.com/ -# strip = 4 -# -# This would strip everything until (and including) the 4th "/" in the repo's -# path leaving only "myrepo" left. This would then be append to the base URL. - -from __future__ import absolute_import -from __future__ import print_function -from future.builtins import range - -import json -import os -import os.path - -import requests - -from mercurial.encoding import fromlocal -from mercurial.node import hex -from mercurial.node import nullid - - -def hook(ui, repo, hooktype, node=None, source=None, **kwargs): - if hooktype != 'changegroup': - ui.status('hgbuildbot: hooktype %s not supported.\n' % hooktype) - return - - # Read config parameters - masters = ui.configlist('hgbuildbot', 'master') - if not masters: - ui.write('* You must add a [hgbuildbot] section to .hg/hgrc in ' - 'order to use the Buildbot hook\n') - return - - # - virtualenv - venv = ui.config('hgbuildbot', 'venv', None) - if venv is not None: - if not os.path.isdir(venv): - ui.write('* Virtualenv "%s" does not exist.\n' % venv) - else: - activate_this = os.path.join(venv, "bin/activate_this.py") - with open(activate_this) as f: - activateThisScript = f.read() - exec(activateThisScript, dict(__file__=activate_this)) - - # - auth - username = ui.config('hgbuildbot', 'user') - password = ui.config('hgbuildbot', 'passwd') - if username is not None and password is not None: - auth = requests.auth.HTTPBasicAuth(username, password) - else: - auth = None - - # - branch - branchtype = ui.config('hgbuildbot', 'branchtype', 'inrepo') - branch = ui.config('hgbuildbot', 'branch', None) - - # - repo URL - baseurl = ui.config('hgbuildbot', 'baseurl', - ui.config('web', 'baseurl', '')) - stripcount = int(ui.config('hgbuildbot', 'strip', - ui.config('notify', 'strip', 0))) - - # - category, project and codebase - category = ui.config('hgbuildbot', 'category', None) - project = ui.config('hgbuildbot', 'project', '') - codebase = ui.config('hgbuildbot', 'codebase', '') - - # Process changesets - if branch is None and branchtype == 'dirname': - branch = os.path.basename(repo.root) - # If branchtype == 'inrepo', update "branch" for each commit later. - - repository = strip(repo.root, stripcount) - repository = baseurl + repository - - start = repo[node].rev() - end = len(repo) - - for rev in range(start, end): - # send changeset - node = repo.changelog.node(rev) - log = repo.changelog.read(node) - manifest, user, (time, timezone), files, desc, extra = log - parents = [p for p in repo.changelog.parents(node) if p != nullid] - - if branchtype == 'inrepo': - branch = extra['branch'] - if branch: - branch = fromlocal(branch) - - is_merge = len(parents) > 1 - # merges don't always contain files, but at least one file is - # required by buildbot - if is_merge and not files: - files = ["merge"] - properties = {'is_merge': is_merge} - - change = { - # 'master': master, - 'branch': branch, - 'revision': hex(node), - 'comments': fromlocal(desc), - 'files': json.dumps(files), - 'author': fromlocal(user), - 'category': category, - 'when': time, - 'properties': json.dumps(properties), - 'repository': repository, - 'project': project, - 'codebase': codebase, - } - for master in masters: - response = requests.post( - master, - auth=auth, - params=change, - headers={"Content-Type": "application/x-www-form-urlencoded"}, - ) - if not response.ok: - ui.warn("couldn't notify buildbot about {}: {} {}".format( - hex(node)[:12], response.status_code, response.reason - )) - else: - ui.status("notified buildbot about {}".format(hex(node)[:12])) - - -def strip(path, count): - """Strip the count first slash of the path""" - # First normalize it - path = '/'.join(path.split(os.sep)) - # and strip the *count* first slash - return path.split('/', count)[-1] diff --git a/master/contrib/init-scripts/buildmaster.default b/master/contrib/init-scripts/buildmaster.default deleted file mode 100644 index 60b6c4ae042..00000000000 --- a/master/contrib/init-scripts/buildmaster.default +++ /dev/null @@ -1,12 +0,0 @@ -MASTER_RUNNER=/usr/bin/buildbot - -# NOTE: MASTER_ENABLED has changed its behaviour in version 0.8.4. Use -# 'true|yes|1' to enable instance and 'false|no|0' to disable. Other -# values will be considered as syntax error. - -MASTER_ENABLED[1]=0 # 1-enabled, 0-disabled -MASTER_NAME[1]="buildmaster #1" # short name printed on start/stop -MASTER_USER[1]="buildbot" # user to run master as -MASTER_BASEDIR[1]="" # basedir to master (absolute path) -MASTER_OPTIONS[1]="" # buildbot options -MASTER_PREFIXCMD[1]="" # prefix command, i.e. nice, linux32, dchroot diff --git a/master/contrib/init-scripts/buildmaster.init.sh b/master/contrib/init-scripts/buildmaster.init.sh deleted file mode 100755 index bc55119f703..00000000000 --- a/master/contrib/init-scripts/buildmaster.init.sh +++ /dev/null @@ -1,182 +0,0 @@ -#!/bin/bash - -### Maintain compatibility with chkconfig -# chkconfig: 2345 83 17 -# description: buildmaster - -### BEGIN INIT INFO -# Provides: buildmaster -# Required-Start: $remote_fs -# Required-Stop: $remote_fs -# Default-Start: 2 3 4 5 -# Default-Stop: 0 1 6 -# Short-Description: Buildbot master init script -# Description: This file allows running buildbot master instances at -# startup -### END INIT INFO - -PATH=/sbin:/bin:/usr/sbin:/usr/bin -MASTER_RUNNER=/usr/bin/buildbot - -. /lib/lsb/init-functions - -# Source buildmaster configuration -[[ -r /etc/default/buildmaster ]] && . /etc/default/buildmaster -#[[ -r /etc/sysconfig/buildmaster ]] && . /etc/sysconfig/buildmaster - -# Or define/override the configuration here -#MASTER_ENABLED[1]=0 # 1-enabled, 0-disabled -#MASTER_NAME[1]="buildmaster #1" # short name printed on start/stop -#MASTER_USER[1]="buildbot" # user to run master as -#MASTER_BASEDIR[1]="" # basedir to master (absolute path) -#MASTER_OPTIONS[1]="" # buildbot options -#MASTER_PREFIXCMD[1]="" # prefix command, i.e. nice, linux32, dchroot - -if [[ ! -x ${MASTER_RUNNER} ]]; then - log_failure_msg "does not exist or not an executable file: ${MASTER_RUNNER}" - exit 1 -fi - -function is_enabled() { - ANSWER=`echo $1|tr "[:upper:]" "[:lower:]"` - [[ "$ANSWER" == "yes" ]] || [[ "$ANSWER" == "true" ]] || [[ "$ANSWER" == "1" ]] - return $? -} - -function is_disabled() { - ANSWER=`echo $1|tr "[:upper:]" "[:lower:]"` - [[ "$ANSWER" == "no" ]] || [[ "$ANSWER" == "false" ]] || [[ "$ANSWER" == "0" ]] - return $? -} - - -function master_config_valid() { - # Function validates buildmaster instance startup variables based on array - # index - local errors=0 - local index=$1 - - if ! is_enabled "${MASTER_ENABLED[$index]}" && ! is_disabled "${MASTER_ENABLED[$index]}" ; then - log_warning_msg "buildmaster #${i}: invalid enabled status" - errors=$(($errors+1)) - fi - - if [[ -z ${MASTER_NAME[$index]} ]]; then - log_failure_msg "buildmaster #${i}: no name" - errors=$(($errors+1)) - fi - - if [[ -z ${MASTER_USER[$index]} ]]; then - log_failure_msg "buildmaster #${i}: no run user specified" - errors=$( ($errors+1) ) - elif ! getent passwd ${MASTER_USER[$index]} >/dev/null; then - log_failure_msg "buildmaster #${i}: unknown user ${MASTER_USER[$index]}" - errors=$(($errors+1)) - fi - - if [[ ! -d "${MASTER_BASEDIR[$index]}" ]]; then - log_failure_msg "buildmaster ${i}: basedir does not exist ${MASTER_BASEDIR[$index]}" - errors=$(($errors+1)) - fi - - return $errors -} - -function check_config() { - itemcount="${#MASTER_ENABLED[@]} - ${#MASTER_NAME[@]} - ${#MASTER_USER[@]} - ${#MASTER_BASEDIR[@]} - ${#MASTER_OPTIONS[@]} - ${#MASTER_PREFIXCMD[@]}" - - if [[ $(echo "$itemcount" | tr -d ' ' | sort -u | wc -l) -ne 1 ]]; then - log_failure_msg "MASTER_* arrays must have an equal number of elements!" - return 1 - fi - - errors=0 - for i in $( seq ${#MASTER_ENABLED[@]} ); do - if is_disabled "${MASTER_ENABLED[$i]}" ; then - log_warning_msg "buildmaster #${i}: disabled" - continue - fi - master_config_valid $i - errors=$(($errors+$?)) - done - - [[ $errors == 0 ]]; return $? -} - -check_config || exit $? - -function iscallable () { type $1 2>/dev/null | grep -q 'shell function'; } - -function master_op () { - op=$1 ; mi=$2 - - ${MASTER_PREFIXCMD[$mi]} \ - su -s /bin/sh \ - -c "$MASTER_RUNNER $op ${MASTER_OPTIONS[$mi]} ${MASTER_BASEDIR[$mi]} > /dev/null" \ - - ${MASTER_USER[$mi]} - return $? -} - -function do_op () { - errors=0 - for i in $( seq ${#MASTER_ENABLED[@]} ); do - if [ -n "$4" ] && [ "$4" != "${MASTER_NAME[$i]}" ] ; then - continue - elif is_disabled "${MASTER_ENABLED[$i]}" && [ -z "$4" ] ; then - continue - fi - - # Some rhels don't come with all the lsb goodies - if iscallable log_daemon_msg; then - log_daemon_msg "$3 \"${MASTER_NAME[$i]}\"" - if eval $1 $2 $i; then - log_end_msg 0 - else - log_end_msg 1 - errors=$(($errors+1)) - fi - else - if eval $1 $2 $i; then - log_success_msg "$3 \"${MASTER_NAME[$i]}\"" - else - log_failure_msg "$3 \"${MASTER_NAME[$i]}\"" - errors=$(($errors+1)) - fi - fi - done - return $errors -} - -case "$1" in - start) - do_op "master_op" "start" "Starting buildmaster" "$2" - exit $? - ;; - stop) - do_op "master_op" "stop" "Stopping buildmaster" "$2" - exit $? - ;; - reload) - do_op "master_op" "reconfig" "Reloading buildmaster" "$2" - exit $? - ;; - restart|force-reload) - do_op "master_op" "restart" "Restarting buildmaster" "$2" - exit $? - ;; - upgrade) - do_op "master_op" "upgrade-master" "Upgrading buildmaster" "$2" - exit $? - ;; - *) - echo "Usage: $0 {start|stop|restart|reload|force-reload|upgrade}" - exit 1 - ;; -esac - -exit 0 diff --git a/master/contrib/libvirt/network.xml b/master/contrib/libvirt/network.xml deleted file mode 100644 index 825a1865a19..00000000000 --- a/master/contrib/libvirt/network.xml +++ /dev/null @@ -1,16 +0,0 @@ - - buildbot-network - - - - - - - - - - - - - - diff --git a/master/contrib/libvirt/vmbuilder b/master/contrib/libvirt/vmbuilder deleted file mode 100755 index 5fa106df6dd..00000000000 --- a/master/contrib/libvirt/vmbuilder +++ /dev/null @@ -1,147 +0,0 @@ -#! /usr/bin/env python - -""" -This script can be used to generate an Ubuntu VM that is suitable for use by the libvirt backend of buildbot. - -It creates a buildbot slave and then changes the buildbot.tac to get its username from the hostname. The hostname is set by -changing the DHCP script. - -See network.xml for how to map a MAC address to an IP address and a hostname. You can load that configuration on to your master by running:: - - virsh net-define network.xml - -Note that the VM's also need their MAC address set, and configuring to use the new network, or this won't work.. -""" - -import os, platform, tempfile - -if platform.machine() == "x86_64": - arch = "amd64" -else: - arch = "i386" - -postboot = """\ -#!/bin/sh -chroot $1 update-rc.d -f buildbot remove - -chroot $1 addgroup --system minion -chroot $1 adduser --system --home /var/local/buildbot --shell /bin/bash --ingroup zope --disabled-password --disabled-login minion - -mkdir -p $1/var/local/buildbot -chroot $1 chown minion: /var/local/buildbot - -chroot $1 sudo -u minion /usr/bin/buildbot create-slave /var/local/buildbot %(master_host)s:%(master_port)s %(slave)s %(slave_password)s - -cat > $1/etc/default/buildbot << HERE -BB_NUMBER[0]=0 -BB_NAME[0]="minion" -BB_USER[0]="minion" -BB_BASEDIR[0]="/var/local/buildbot" -BB_OPTIONS[0]="" -BB_PREFIXCMD[0]="" -HERE - -cat > $1/var/local/buildbot/buildbot.tac << HERE -from twisted.application import service -from buildbot.slave.bot import BuildSlave -import socket - -basedir = r'/var/local/buildbot' -buildmaster_host = '%(master_host)s' -port = %(master_port)s -slavename = socket.gethostname() -passwd = "%(slave_password)s" -keepalive = 600 -usepty = 0 -umask = None -maxdelay = 300 -rotateLength = 1000000 -maxRotatedFiles = None - -application = service.Application('buildslave') -s = BuildSlave(buildmaster_host, port, slavename, passwd, basedir, - keepalive, usepty, umask=umask, maxdelay=maxdelay) -s.setServiceParent(application) -HERE - -cat > $1/etc/dhcp3/dhclient-exit-hooks.d/update-hostname << HERE -if [ x\$reason != xBOUND ] && [ x\$reason != xREBIND ] && [ x\$reason != xREBOOT ]; then exit; fi -echo Updating hostname: \$new_host_name -hostname \$new_host_name -echo Starting buildbot -/etc/init.d/buildbot stop || true -/etc/init.d/buildbot start -HERE - -cat > $1/etc/udev/rules.d/virtio.rules << HERE -KERNEL=="vda*", SYMLINK+="sda%%n" -HERE -""" - -class VMBuilder: - - """ Class that executes ubuntu-vm-builder with appropriate options """ - - postboot = postboot - - defaults = { - "rootsize": 8192, - "mem": 1024, - "domain": 'yourdomain.com', - "hostname": "ubuntu", - "arch": arch, - "variant": "minbase", - "components": "main,universe,multiverse,restricted", - "lang": "en_GB.UTF-8", - "timezone": "Europe/London", - "execscript": os.path.realpath(os.path.join(os.curdir, "postboot.sh")), - "addpkg": [ - "standard^", "server^", "gpgv", "openssh-server", "buildbot", "subversion", - ], - } - - def __init__(self, hypervisor="kvm", suite="karmic", destdir="ubuntu", **kw): - self.hypervisor = hypervisor - self.suite = suite - self.destdir = destdir - self.options = self.defaults.copy() - self.options.update(**kw) - f = tempfile.NamedTemporaryFile(delete=False, prefix="/var/tmp/") - print >>f, self.postboot % { - 'master_host': '192.168.201.1', - 'master_port': '8081', - 'slave': 'slave', - 'slave_password': 'password', - } - f.close() - os.chmod(f.name, 0755) - self.options['execscript'] = f.name - - def build(self): - optstring = [] - for k, v in self.options.items(): - if type(v) == type([]): - for i in v: - if i: - optstring.append("--%s=%s" % (k, i)) - else: - if v: - optstring.append("--%s=%s" % (k, v)) - execute=("ubuntu-vm-builder %s %s -d%s %s" % ( - self.hypervisor, - self.suite, - self.destdir, - " ".join(optstring))) - print execute - os.system(execute) - -if __name__ == "__main__": - import sys, socket, optparse - - parser = optparse.OptionParser(usage="%prog [options] project") - parser.add_option("-p", "--proxy", help="http proxy URL") - (options, args) = parser.parse_args() - builder = VMBuilder(proxy=options.proxy) - builder.build() - - diff --git a/master/contrib/os-x/README b/master/contrib/os-x/README deleted file mode 100644 index 6cc1d640eaa..00000000000 --- a/master/contrib/os-x/README +++ /dev/null @@ -1,23 +0,0 @@ -Mark Pauley contributed the two launchd plist files for OS-X (10.4+) to start -a buildmaster or buildslave automatically at startup: - - contrib/OS-X/net.sourceforge.buildbot.master.plist - contrib/OS-X/net.sourceforge.buildbot.slave.plist - -His email message is as follows: - - Message-Id: - From: Mark Pauley - To: buildbot-devel - Date: Wed, 24 Jan 2007 11:05:44 -0800 - Subject: [Buildbot-devel] Sample buildbot launchd plists for MacOS 10.4+ - - Hi guys, - I've had these kicking around for a while and thought that maybe - someone would like to see them. Installing either of these two to / - Library/LaunchDaemons will cause the bulidbot slave or master to auto- - start as whatever user you like on launch. This is the "right way to - do this" going forward, startupitems are deprecated. Please note that - this means any tests that require a windowserver connection on os x - won't work. - diff --git a/master/contrib/os-x/net.sourceforge.buildbot.master.plist b/master/contrib/os-x/net.sourceforge.buildbot.master.plist deleted file mode 100644 index 3ba239563bb..00000000000 --- a/master/contrib/os-x/net.sourceforge.buildbot.master.plist +++ /dev/null @@ -1,42 +0,0 @@ - - - - - Label - net.sourceforge.buildbot.slave - - - UserName - buildbot - - - WorkingDirectory - /Users/buildbot/Buildbot_Master - - ProgramArguments - - /usr/bin/twistd - --nodaemon - --python=buildbot.tac - --logfile=buildbot.log - --prefix=master - - - - QueueDirectories - / - - KeepAlive - - SuccessfulExit - - - - RunAtLoad - - - StandardErrorPath - /var/log/build_master.log - - diff --git a/master/contrib/post_build_request.py b/master/contrib/post_build_request.py deleted file mode 100755 index 070e104531c..00000000000 --- a/master/contrib/post_build_request.py +++ /dev/null @@ -1,245 +0,0 @@ -#!/usr/bin/env python - -# This file is part of Buildbot. Buildbot is free software: you can -# redistribute it and/or modify it under the terms of the GNU General Public -# License as published by the Free Software Foundation, version 2. -# -# This program is distributed in the hope that it will be useful, but WITHOUT -# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS -# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more -# details. -# -# You should have received a copy of the GNU General Public License along with -# this program; if not, write to the Free Software Foundation, Inc., 51 -# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# Portions Copyright Buildbot Team Members -# Portions Copyright 2013 OpenGamma Inc. and the OpenGamma group of companies - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import getpass -import httplib -import optparse -import os -import textwrap -import urllib - -# Find a working json module. Code is from -# Paul Wise : -# http://lists.debian.org/debian-python/2010/02/msg00016.html -try: - import json # python 2.6 - assert json # silence pyflakes -except ImportError: - import simplejson as json # python 2.4 to 2.5 -try: - _tmp = json.loads -except AttributeError: - import warnings - import sys - warnings.warn("Use simplejson, not the old json module.") - sys.modules.pop('json') # get rid of the bad json module - import simplejson as json - -# Make a dictionary with options from command line - - -def buildURL(options): - urlDict = {} - if options.author: - author = options.author - else: - author = getpass.getuser() - - urlDict['author'] = author - - if options.files: - urlDict['files'] = json.dumps(options.files) - - if options.comments: - urlDict['comments'] = options.comments - else: - # A comment is required by the buildbot DB - urlDict['comments'] = 'post_build_request submission' - - if options.revision: - urlDict['revision'] = options.revision - - if options.when: - urlDict['when'] = options.when - - if options.branch: - urlDict['branch'] = options.branch - - if options.category: - urlDict['category'] = options.category - - if options.revlink: - urlDict['revlink'] = options.revlink - - if options.properties: - urlDict['properties'] = json.dumps(options.properties) - - if options.repository: - urlDict['repository'] = options.repository - - if options.project: - urlDict['project'] = options.project - - return urlDict - - -def propertyCB(option, opt, value, parser): - pdict = eval(value) - for key in pdict.keys(): - parser.values.properties[key] = pdict[key] - -__version__ = '0.1' - -description = "" - -usage = """%prog [options] - -This script is used to submit a change to the buildbot master using the -/change_hook web interface. Options are url encoded and submitted -using a HTTP POST. The repository and project must be specified. - -This can be used to force a build. For example, create a scheduler that -listens for changes on a category 'release': - -releaseFilt = ChangeFilter(category="release") -s=Scheduler(name="Release", change_filter=releaseFilt, - treeStableTimer=10, - builderNames=["UB10.4 x86_64 Release"])) -c['schedulers'].append(s) - -Then run this script with the options: - ---repository --project --category release -""" - -parser = optparse.OptionParser(description=description, - usage=usage, - add_help_option=True, - version=__version__) - -parser.add_option("-w", "--who", dest='author', metavar="AUTHOR", - help=textwrap.dedent("""\ - Who is submitting this request. - This becomes the Change.author attribute. - This defaults to the name of the user running this script - """)) -parser.add_option("-f", "--file", dest='files', action="append", metavar="FILE", - help=textwrap.dedent("""\ - Add a file to the change request. - This is added to the Change.files attribute. - NOTE: Setting the file URL is not supported - """)) -parser.add_option("-c", "--comments", dest='comments', metavar="COMMENTS", - help=textwrap.dedent("""\ - Comments for the change. This becomes the Change.comments attribute - """)) -parser.add_option("-R", "--revision", dest='revision', metavar="REVISION", - help=textwrap.dedent("""\ - This is the revision of the change. - This becomes the Change.revision attribute. - """)) -parser.add_option("-W", "--when", dest='when', metavar="WHEN", - help=textwrap.dedent("""\ - This this the date of the change. - This becomes the Change.when attribute. - """)) -parser.add_option("-b", "--branch", dest='branch', metavar="BRANCH", - help=textwrap.dedent("""\ - This this the branch of the change. - This becomes the Change.branch attribute. - """)) -parser.add_option("-C", "--category", dest='category', metavar="CAT", - help=textwrap.dedent("""\ - Category for change. This becomes the Change.category attribute, which - can be used within the buildmaster to filter changes. - """)) -parser.add_option("--revlink", dest='revlink', metavar="REVLINK", - help=textwrap.dedent("""\ - This this the revlink of the change. - This becomes the Change.revlink. - """)) -parser.add_option("-p", "--property", dest='properties', action="callback", callback=propertyCB, - type="string", metavar="PROP", - help=textwrap.dedent("""\ - This adds a single property. This can be specified multiple times. - The argument is a string representing python dictionary. For example, - {'foo' : [ 'bar', 'baz' ]} - This becomes the Change.properties attribute. - """)) -parser.add_option("-r", "--repository", dest='repository', metavar="PATH", - help=textwrap.dedent("""\ - Repository for use by buildbot workers to checkout code. - This becomes the Change.repository attribute. - Exmaple: :ext:myhost:/cvsroot - """)) -parser.add_option("-P", "--project", dest='project', metavar="PROJ", - help=textwrap.dedent("""\ - The project for the source. Often set to the CVS module being modified. This becomes - the Change.project attribute. - """)) -parser.add_option("-v", "--verbose", dest='verbosity', action="count", - help=textwrap.dedent("""\ - Print more detail. Shows the response status and reason received from the master. If - specified twice, it also shows the raw response. - """)) -parser.add_option("-H", "--host", dest='host', metavar="HOST", - default='localhost:8010', - help=textwrap.dedent("""\ - Host and optional port of buildbot. For example, bbhost:8010 - Defaults to %default - """)) -parser.add_option("-u", "--urlpath", dest='urlpath', metavar="URLPATH", - default='/change_hook/base', - help=textwrap.dedent("""\ - Path portion of URL. Defaults to %default - """)) -parser.add_option("-t", "--testing", action="store_true", dest="amTesting", default=False, - help=textwrap.dedent("""\ - Just print values and exit. - """)) -parser.set_defaults(properties={}) - -(options, args) = parser.parse_args() - -if options.repository is None: - print("repository must be specified") - parser.print_usage() - os._exit(2) - -if options.project is None: - print("project must be specified") - parser.print_usage() - os._exit(2) - -urlDict = buildURL(options) - -params = urllib.urlencode(urlDict) -headers = {"Content-type": "application/x-www-form-urlencoded", - "Accept": "text/plain"} -if options.amTesting: - print("params: %s" % params) - print("host: %s" % options.host) - print("urlpath: %s" % options.urlpath) -else: - conn = httplib.HTTPConnection(options.host) - conn.request("POST", options.urlpath, params, headers) - response = conn.getresponse() - data = response.read() - exitCode = 0 - if response.status is not 202: - exitCode = 1 - if options.verbosity >= 1: - print(response.status, response.reason) - if options.verbosity >= 2: - print("Raw response: %s" % (data)) - conn.close() - os._exit(exitCode) diff --git a/master/contrib/run_maxq.py b/master/contrib/run_maxq.py deleted file mode 100755 index 26afde7228a..00000000000 --- a/master/contrib/run_maxq.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env jython - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import glob -import sys - -testdir = sys.argv[1] - -orderfiles = glob.glob(testdir + '/*.tests') - -# wee. just be glad I didn't make this one gigantic nested listcomp. -# anyway, this builds a once-nested list of files to test. - -# open! -files = [open(fn) for fn in orderfiles] - -# create prelim list of lists of files! -files = [f.readlines() for f in files] - -# shwack newlines and filter out empties! -files = [filter(None, [fn.strip() for fn in fs]) for fs in files] - -# prefix with testdir -files = [[testdir + '/' + fn.strip() for fn in fs] for fs in files] - -print("Will run these tests:", files) - -i = 0 - -for testlist in files: - - print("===========================") - print("running tests from testlist", orderfiles[i]) - print("---------------------------") - i = i + 1 - - for test in testlist: - print("running test", test) - - try: - with open(test) as f: - exec(f.read(), globals().copy()) - - except Exception: - ei = sys.exc_info() - print("TEST FAILURE:", ei[1]) - - else: - print("SUCCESS") diff --git a/master/contrib/svn_buildbot.py b/master/contrib/svn_buildbot.py deleted file mode 100755 index 7af8754417f..00000000000 --- a/master/contrib/svn_buildbot.py +++ /dev/null @@ -1,290 +0,0 @@ -#!/usr/bin/python - -# this requires python >=2.3 for the 'sets' module. - -# The sets.py from python-2.3 appears to work fine under python2.2 . To -# install this script on a host with only python2.2, copy -# /usr/lib/python2.3/sets.py from a newer python into somewhere on your -# PYTHONPATH, then edit the #! line above to invoke python2.2 - -# python2.1 is right out - -# If you run this program as part of your SVN post-commit hooks, it will -# deliver Change notices to a buildmaster that is running a PBChangeSource -# instance. - -# edit your svn-repository/hooks/post-commit file, and add lines that look -# like this: - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function -from future.utils import text_type - -import commands -import os -import re -import sets -import sys - -from twisted.cred import credentials -from twisted.internet import defer -from twisted.internet import reactor -from twisted.python import usage -from twisted.spread import pb - -''' -# set up PYTHONPATH to contain Twisted/buildbot perhaps, if not already -# installed site-wide -. ~/.environment - -/path/to/svn_buildbot.py --repository "$REPOS" --revision "$REV" \ ---bbserver localhost --bbport 9989 --username myuser --auth passwd -''' - - -# We have hackish "-d" handling here rather than in the Options -# subclass below because a common error will be to not have twisted in -# PYTHONPATH; we want to be able to print that error to the log if -# debug mode is on, so we set it up before the imports. - -DEBUG = None - -if '-d' in sys.argv: - i = sys.argv.index('-d') - DEBUG = sys.argv[i + 1] - del sys.argv[i] - del sys.argv[i] - -if DEBUG: - f = open(DEBUG, 'a') - sys.stderr = f - sys.stdout = f - - -class Options(usage.Options): - optParameters = [ - ['repository', 'r', None, - "The repository that was changed."], - ['worker-repo', 'c', None, - "In case the repository differs for the workers."], - ['revision', 'v', None, - "The revision that we want to examine (default: latest)"], - ['bbserver', 's', 'localhost', - "The hostname of the server that buildbot is running on"], - ['bbport', 'p', 8007, - "The port that buildbot is listening on"], - ['username', 'u', 'change', - "Username used in PB connection auth"], - ['auth', 'a', 'changepw', - "Password used in PB connection auth"], - ['include', 'f', None, - '''\ -Search the list of changed files for this regular expression, and if there is -at least one match notify buildbot; otherwise buildbot will not do a build. -You may provide more than one -f argument to try multiple -patterns. If no filter is given, buildbot will always be notified.'''], - ['filter', 'f', None, "Same as --include. (Deprecated)"], - ['exclude', 'F', None, - '''\ -The inverse of --filter. Changed files matching this expression will never -be considered for a build. -You may provide more than one -F argument to try multiple -patterns. Excludes override includes, that is, patterns that match both an -include and an exclude will be excluded.'''], - ['encoding', 'e', "utf8", - "The encoding of the strings from subversion (default: utf8)"], - ['project', 'P', None, "The project for the source."] - ] - optFlags = [ - ['dryrun', 'n', "Do not actually send changes"], - ] - - def __init__(self): - usage.Options.__init__(self) - self._includes = [] - self._excludes = [] - self['includes'] = None - self['excludes'] = None - - def opt_include(self, arg): - self._includes.append('.*%s.*' % (arg, )) - - opt_filter = opt_include - - def opt_exclude(self, arg): - self._excludes.append('.*%s.*' % (arg, )) - - def postOptions(self): - if self['repository'] is None: - raise usage.error("You must pass --repository") - if self._includes: - self['includes'] = '(%s)' % ('|'.join(self._includes), ) - if self._excludes: - self['excludes'] = '(%s)' % ('|'.join(self._excludes), ) - - -def split_file_dummy(changed_file): - """Split the repository-relative filename into a tuple of (branchname, - branch_relative_filename). If you have no branches, this should just - return (None, changed_file). - """ - return (None, changed_file) - - -# this version handles repository layouts that look like: -# trunk/files.. -> trunk -# branches/branch1/files.. -> branches/branch1 -# branches/branch2/files.. -> branches/branch2 -# - - -def split_file_branches(changed_file): - pieces = changed_file.split(os.sep) - if pieces[0] == 'branches': - return (os.path.join(*pieces[:2]), - os.path.join(*pieces[2:])) - if pieces[0] == 'trunk': - return (pieces[0], os.path.join(*pieces[1:])) - # there are other sibilings of 'trunk' and 'branches'. Pretend they are - # all just funny-named branches, and let the Schedulers ignore them. - # return (pieces[0], os.path.join(*pieces[1:])) - - raise RuntimeError("cannot determine branch for '%s'" % changed_file) - - -split_file = split_file_dummy - - -class ChangeSender: - - def getChanges(self, opts): - """Generate and stash a list of Change dictionaries, ready to be sent - to the buildmaster's PBChangeSource.""" - - # first we extract information about the files that were changed - repo = opts['repository'] - worker_repo = opts['worker-repo'] or repo - print("Repo:", repo) - rev_arg = '' - if opts['revision']: - rev_arg = '-r %s' % (opts['revision'], ) - changed = commands.getoutput('svnlook changed %s "%s"' % ( - rev_arg, repo)).split('\n') - # the first 4 columns can contain status information - changed = [x[4:] for x in changed] - - message = commands.getoutput('svnlook log %s "%s"' % (rev_arg, repo)) - who = commands.getoutput('svnlook author %s "%s"' % (rev_arg, repo)) - revision = opts.get('revision') - if revision is not None: - revision = str(int(revision)) - - # see if we even need to notify buildbot by looking at filters first - changestring = '\n'.join(changed) - fltpat = opts['includes'] - if fltpat: - included = sets.Set(re.findall(fltpat, changestring)) - else: - included = sets.Set(changed) - - expat = opts['excludes'] - if expat: - excluded = sets.Set(re.findall(expat, changestring)) - else: - excluded = sets.Set([]) - if len(included.difference(excluded)) == 0: - print(changestring) - print("""\ - Buildbot was not interested, no changes matched any of these filters:\n %s - or all the changes matched these exclusions:\n %s\ - """ % (fltpat, expat)) - sys.exit(0) - - # now see which branches are involved - files_per_branch = {} - for f in changed: - branch, filename = split_file(f) - if branch in files_per_branch.keys(): - files_per_branch[branch].append(filename) - else: - files_per_branch[branch] = [filename] - - # now create the Change dictionaries - changes = [] - encoding = opts['encoding'] - for branch in files_per_branch.keys(): - d = {'who': text_type(who, encoding=encoding), - 'repository': text_type(worker_repo, encoding=encoding), - 'comments': text_type(message, encoding=encoding), - 'revision': revision, - 'project': text_type(opts['project'] or "", encoding=encoding), - 'src': 'svn', - } - if branch: - d['branch'] = text_type(branch, encoding=encoding) - else: - d['branch'] = branch - - files = [] - for file in files_per_branch[branch]: - files.append(text_type(file, encoding=encoding)) - d['files'] = files - - changes.append(d) - - return changes - - def sendChanges(self, opts, changes): - pbcf = pb.PBClientFactory() - reactor.connectTCP(opts['bbserver'], int(opts['bbport']), pbcf) - creds = credentials.UsernamePassword(opts['username'], opts['auth']) - d = pbcf.login(creds) - d.addCallback(self.sendAllChanges, changes) - return d - - def sendAllChanges(self, remote, changes): - dl = [remote.callRemote('addChange', change) - for change in changes] - return defer.gatherResults(dl, consumeErrors=True) - - def run(self): - opts = Options() - try: - opts.parseOptions() - except usage.error as ue: - print(opts) - print("%s: %s" % (sys.argv[0], ue)) - sys.exit() - - changes = self.getChanges(opts) - if opts['dryrun']: - for i, c in enumerate(changes): - print("CHANGE #%d" % (i + 1)) - keys = sorted(c.keys()) - for k in keys: - print("[%10s]: %s" % (k, c[k])) - print("*NOT* sending any changes") - return - - d = self.sendChanges(opts, changes) - - def quit(*why): - print("quitting! because", why) - reactor.stop() - - d.addCallback(quit, "SUCCESS") - - @d.addErrback - def failed(f): - print("FAILURE") - print(f) - reactor.stop() - - reactor.callLater(60, quit, "TIMEOUT") - reactor.run() - - -if __name__ == '__main__': - s = ChangeSender() - s.run() diff --git a/master/contrib/svn_watcher.py b/master/contrib/svn_watcher.py deleted file mode 100755 index 5e647fe3765..00000000000 --- a/master/contrib/svn_watcher.py +++ /dev/null @@ -1,238 +0,0 @@ -#!/usr/bin/python - -# This is a program which will poll a (remote) SVN repository, looking for -# new revisions. It then uses the 'buildbot sendchange' command to deliver -# information about the Change to a (remote) buildmaster. It can be run from -# a cron job on a periodic basis, or can be told (with the 'watch' option) to -# automatically repeat its check every 10 minutes. - -# This script does not store any state information, so to avoid spurious -# changes you must use the 'watch' option and let it run forever. - -# You will need to provide it with the location of the buildmaster's -# PBChangeSource port (in the form hostname:portnum), and the svnurl of the -# repository to watch. - - -# 15.03.06 by John Pye -# 29.03.06 by Niklaus Giger, added support to run under windows, -# added invocation option -# 22.03.10 by Johnnie Pittman, added support for category and interval -# options. - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import subprocess -import sys -import time -import xml.dom.minidom -from optparse import OptionParser -from xml.parsers.expat import ExpatError - -if sys.platform == 'win32': - import win32pipe - - -def getoutput(cmd): - p = subprocess.Popen(cmd, stdout=subprocess.PIPE) - return p.stdout.read() - - -def sendchange_cmd(master, revisionData): - cmd = [ - "buildbot", - "sendchange", - "--master=%s" % master, - "--revision=%s" % revisionData['revision'], - "--username=%s" % revisionData['author'], - "--comments=%s" % revisionData['comments'], - "--vc=%s" % 'svn', - ] - if opts.category: - cmd.append("--category=%s" % opts.category) - for path in revisionData['paths']: - cmd.append(path) - - if opts.verbose: - print(cmd) - - return cmd - - -def parseChangeXML(raw_xml): - """Parse the raw xml and return a dict with key pairs set. - - Commmand we're parsing: - - svn log --non-interactive --xml --verbose --limit=1 - - With an output that looks like this: - - - - - mwiggins - 2009-11-11T17:16:48.012357Z - - /tags/Latest - - Updates/latest - - - """ - - data = dict() - - # parse the xml string and grab the first log entry. - try: - doc = xml.dom.minidom.parseString(raw_xml) - except ExpatError: - print("\nError: Got an empty response with an empty changeset.\n") - raise - log_entry = doc.getElementsByTagName("logentry")[0] - - # grab the appropriate meta data we need - data['revision'] = log_entry.getAttribute("revision") - data['author'] = "".join([t.data for t in - log_entry.getElementsByTagName("author")[0].childNodes]) - data['comments'] = "".join([t.data for t in - log_entry.getElementsByTagName("msg")[0].childNodes]) - - # grab the appropriate file paths that changed. - pathlist = log_entry.getElementsByTagName("paths")[0] - paths = [] - for path in pathlist.getElementsByTagName("path"): - paths.append("".join([t.data for t in path.childNodes])) - data['paths'] = paths - - return data - - -def checkChanges(repo, master, oldRevision=-1): - cmd = ["svn", "log", "--non-interactive", "--xml", "--verbose", - "--limit=1", repo] - - if opts.verbose: - print("Getting last revision of repository: " + repo) - - if sys.platform == 'win32': - f = win32pipe.popen(cmd) - xml1 = ''.join(f.readlines()) - f.close() - else: - xml1 = getoutput(cmd) - - if opts.verbose: - print("XML\n-----------\n" + xml1 + "\n\n") - - revisionData = parseChangeXML(xml1) - - if opts.verbose: - print("PATHS") - print(revisionData['paths']) - - if revisionData['revision'] != oldRevision: - - cmd = sendchange_cmd(master, revisionData) - - if sys.platform == 'win32': - f = win32pipe.popen(cmd) - pretty_time = time.strftime("%H.%M.%S ") - print("%s Revision %s: %s" % (pretty_time, revisionData['revision'], - ''.join(f.readlines()))) - f.close() - else: - xml1 = getoutput(cmd) - else: - pretty_time = time.strftime("%H.%M.%S ") - print("%s nothing has changed since revision %s" % (pretty_time, - revisionData['revision'])) - - return revisionData['revision'] - - -def build_parser(): - usagestr = "%prog [options] " - parser = OptionParser(usage=usagestr) - - parser.add_option( - "-c", "--category", dest="category", action="store", default="", - help="""Store a category name to be associated with sendchange msg.""" - ) - - parser.add_option( - "-i", "--interval", dest="interval", action="store", default=0, - help="Implies watch option and changes the time in minutes to the value specified.", - ) - - parser.add_option( - "-v", "--verbose", dest="verbose", action="store_true", default=False, - help="Enables more information to be presented on the command line.", - ) - - parser.add_option( - "", "--watch", dest="watch", action="store_true", default=False, - help="Automatically check the repo url every 10 minutes.", - ) - - return parser - - -def validate_args(args): - """Validate our arguments and exit if we don't have what we want.""" - if not args: - print("\nError: No arguments were specified.\n") - parser.print_help() - sys.exit(1) - elif len(args) > 2: - print("\nToo many arguments specified.\n") - parser.print_help() - sys.exit(2) - - -if __name__ == '__main__': - - # build our parser and validate our args - parser = build_parser() - (opts, args) = parser.parse_args() - validate_args(args) - if opts.interval: - try: - int(opts.interval) - except ValueError: - print("\nError: Value of the interval option must be a number.") - parser.print_help() - sys.exit(3) - - # grab what we need - repo_url = args[0] - bbmaster = args[1] - - # if watch is specified, run until stopped - if opts.watch or opts.interval: - oldRevision = -1 - print("Watching for changes in repo %s for master %s." % - (repo_url, bbmaster)) - while True: - try: - oldRevision = checkChanges(repo_url, bbmaster, oldRevision) - except ExpatError: - # had an empty changeset. Trapping the exception and moving - # on. - pass - try: - if opts.interval: - # Check the repository every interval in minutes the user - # specified. - time.sleep(int(opts.interval) * 60) - else: - # Check the repository every 10 minutes - time.sleep(10 * 60) - except KeyboardInterrupt: - print("\nReceived interrupt via keyboard. Shutting Down.") - sys.exit(0) - - # default action if watch isn't specified - checkChanges(repo_url, bbmaster) diff --git a/master/contrib/svnpoller.py b/master/contrib/svnpoller.py deleted file mode 100755 index b654facd9a6..00000000000 --- a/master/contrib/svnpoller.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/python -""" - svn.py - Script for BuildBot to monitor a remote Subversion repository. - Copyright (C) 2006 John Pye -""" -# This script is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License, or (at your option) any later version. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 -# USA - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import commands -import ConfigParser -import os.path -import xml.dom.minidom - -# change these settings to match your project -svnurl = "https://pse.cheme.cmu.edu/svn/ascend/code/trunk" -statefilename = "~/changemonitor/config.ini" -buildmaster = "buildbot.example.org:9989" # connects to a PBChangeSource - -xml1 = commands.getoutput( - "svn log --non-interactive --verbose --xml --limit=1 " + svnurl) -# print "XML\n-----------\n"+xml1+"\n\n" - -try: - doc = xml.dom.minidom.parseString(xml1) - el = doc.getElementsByTagName("logentry")[0] - revision = el.getAttribute("revision") - author = "".join([t.data for t in el.getElementsByTagName( - "author")[0].childNodes]) - comments = "".join([t.data for t in el.getElementsByTagName( - "msg")[0].childNodes]) - - pathlist = el.getElementsByTagName("paths")[0] - paths = [] - for p in pathlist.getElementsByTagName("path"): - paths.append("".join([t.data for t in p.childNodes])) - # print "PATHS" - # print paths -except xml.parsers.expat.ExpatError as e: - print("FAILED TO PARSE 'svn log' XML:") - print(str(e)) - print("----") - print("RECEIVED TEXT:") - print(xml1) - import sys - sys.exit(1) - -fname = statefilename -fname = os.path.expanduser(fname) -ini = ConfigParser.SafeConfigParser() - -try: - ini.read(fname) -except Exception: - print("Creating changemonitor config.ini:", fname) - ini.add_section("CurrentRevision") - ini.set("CurrentRevision", -1) - -try: - lastrevision = ini.get("CurrentRevision", "changeset") -except ConfigParser.NoOptionError: - print("NO OPTION FOUND") - lastrevision = -1 -except ConfigParser.NoSectionError: - print("NO SECTION FOUND") - lastrevision = -1 - -if lastrevision != revision: - - # comments = codecs.encodings.unicode_escape.encode(comments) - cmd = "buildbot sendchange --master=" + buildmaster + " --branch=trunk \ ---revision=\"" + revision + "\" --username=\"" + author + "\" --vc=\"svn\" \ ---comments=\"" + comments + "\" " + " ".join(paths) - - # print cmd - res = commands.getoutput(cmd) - - print("SUBMITTING NEW REVISION", revision) - if not ini.has_section("CurrentRevision"): - ini.add_section("CurrentRevision") - try: - ini.set("CurrentRevision", "changeset", revision) - f = open(fname, "w") - ini.write(f) - # print "WROTE CHANGES TO",fname - except Exception: - print("FAILED TO RECORD INI FILE") diff --git a/master/contrib/systemd/buildbot.service b/master/contrib/systemd/buildbot.service deleted file mode 100644 index 94f9b8ca270..00000000000 --- a/master/contrib/systemd/buildbot.service +++ /dev/null @@ -1,17 +0,0 @@ -[Unit] -Description=Buildbot Master -Wants=network.target -After=network.target - -[Service] -Type=forking -PIDFile=/srv/buildbot/master/twistd.pid -WorkingDirectory=/srv/buildbot -ExecStart=/usr/bin/buildbot start master -ExecReload=/usr/bin/buildbot reconfig master -ExecStop=/usr/bin/buildbot stop master -Restart=always -User=buildbot - -[Install] -WantedBy=multi-user.target diff --git a/master/contrib/viewcvspoll.py b/master/contrib/viewcvspoll.py deleted file mode 100755 index 8cc8ef5944a..00000000000 --- a/master/contrib/viewcvspoll.py +++ /dev/null @@ -1,102 +0,0 @@ -#! /usr/bin/python - -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -import os.path -import time - -import MySQLdb # @UnresolvedImport - -from twisted.cred import credentials -from twisted.internet import reactor -from twisted.python import log -from twisted.spread import pb - -"""Based on the fakechanges.py contrib script""" - - -class ViewCvsPoller: - - def __init__(self): - - def _load_rc(): - import user - ret = {} - for line in open(os.path.join( - user.home, ".cvsblamerc")).readlines(): - if line.find("=") != -1: - key, val = line.split("=") - ret[key.strip()] = val.strip() - return ret - # maybe add your own keys here db=xxx, user=xxx, passwd=xxx - self.cvsdb = MySQLdb.connect("cvs", **_load_rc()) - # self.last_checkin = "2005-05-11" # for testing - self.last_checkin = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime()) - - def get_changes(self): - changes = [] - - def empty_change(): - return {'who': None, 'files': [], 'comments': None} - change = empty_change() - - cursor = self.cvsdb.cursor() - cursor.execute("""SELECT whoid, descid, fileid, dirid, branchid, \ -ci_when FROM checkins WHERE ci_when>='%s'""" % self.last_checkin) - last_checkin = None - for whoid, descid, fileid, dirid, branchid, ci_when in \ - cursor.fetchall(): - if branchid != 1: # only head - continue - cursor.execute("""SELECT who from people where id=%s""" % whoid) - who = cursor.fetchone()[0] - cursor.execute("""SELECT description from descs where id=%s""" % ( - descid)) - desc = cursor.fetchone()[0] - cursor.execute("""SELECT file from files where id=%s""" % fileid) - filename = cursor.fetchone()[0] - cursor.execute("""SELECT dir from dirs where id=%s""" % dirid) - dirname = cursor.fetchone()[0] - if who == change["who"] and desc == change["comments"]: - change["files"].append("%s/%s" % (dirname, filename)) - elif change["who"]: - changes.append(change) - change = empty_change() - else: - change["who"] = who - change["files"].append("%s/%s" % (dirname, filename)) - change["comments"] = desc - if last_checkin is None or ci_when > last_checkin: - last_checkin = ci_when - if last_checkin: - self.last_checkin = last_checkin - return changes - - -poller = ViewCvsPoller() - - -def error(*args): - log.err() - reactor.stop() - - -def poll_changes(remote): - print("GET CHANGES SINCE", poller.last_checkin, end=' ') - changes = poller.get_changes() - for change in changes: - print(change["who"], "\n *", "\n * ".join(change["files"])) - change['src'] = 'cvs' - remote.callRemote('addChange', change).addErrback(error) - print() - reactor.callLater(60, poll_changes, remote) - - -factory = pb.PBClientFactory() -reactor.connectTCP("localhost", 9999, factory) -deferred = factory.login(credentials.UsernamePassword("change", "changepw")) -deferred.addCallback(poll_changes).addErrback(error) - -reactor.run() diff --git a/master/contrib/zsh/_buildbot b/master/contrib/zsh/_buildbot deleted file mode 100644 index 18e1e674084..00000000000 --- a/master/contrib/zsh/_buildbot +++ /dev/null @@ -1,30 +0,0 @@ -#compdef buildbot -# -# This is the ZSH completion file for 'buildbot' command. It calls 'buildbot' -# command with the special "--_shell-completion" option which is handled -# by twisted.python.usage. t.p.usage then generates zsh code on stdout to -# handle the completions. -# -# This file is derived from twisted/python/twisted-completion.zsh from twisted -# distribution. -# - -# redirect stderr to /dev/null otherwise deprecation warnings may get puked all -# over the user's terminal if completing options for a deprecated command. -# Redirect stderr to a file to debug errors. -local cmd output -cmd=("$words[@]" --_shell-completion zsh:$CURRENT) -output=$("$cmd[@]" 2>/dev/null) - -if [[ $output == "#compdef "* ]]; then - # Looks like we got a valid completion function - so eval it to produce - # the completion matches. - eval $output -else - echo "\nCompletion error running command:" ${(qqq)cmd} - echo -n "If output below is unhelpful you may need to edit this file and " - echo "redirect stderr to a file." - echo "Expected completion function, but instead got:" - echo $output - return 1 -fi diff --git a/master/docs/conf.py b/master/docs/conf.py index 8d9bf0c83f7..bec9e7fefe2 100755 --- a/master/docs/conf.py +++ b/master/docs/conf.py @@ -158,6 +158,7 @@ # "pretty" reference that looks like relative path in Buildbot source tree # by default. 'src': ('https://github.com/buildbot/buildbot/blob/master/%s', ''), + 'contrib-src': ('https://github.com/buildbot/buildbot-contrib/blob/master/%s', ''), } # Sphinx' link checker. diff --git a/master/docs/manual/cfg-changesources.rst b/master/docs/manual/cfg-changesources.rst index 0fbcb8cdb2e..0c740e70e17 100644 --- a/master/docs/manual/cfg-changesources.rst +++ b/master/docs/manual/cfg-changesources.rst @@ -24,31 +24,31 @@ Choosing a Change Source There are a variety of :class:`ChangeSource` classes available, some of which are meant to be used in conjunction with other tools to deliver :class:`Change` events from the VC repository to the buildmaster. As a quick guide, here is a list of VC systems and the :class:`ChangeSource`\s that might be useful with them. -Note that some of these modules are in Buildbot's :src:`master/contrib` directory, meaning that they have been offered by other users in hopes they may be useful, and might require some additional work to make them functional. +Note that some of these modules are in Buildbot's :contrib-src:`master/contrib` directory, meaning that they have been offered by other users in hopes they may be useful, and might require some additional work to make them functional. CVS -* :bb:chsrc:`CVSMaildirSource` (watching mail sent by :src:`master/contrib/buildbot_cvs_mail.py` script) +* :bb:chsrc:`CVSMaildirSource` (watching mail sent by :contrib-src:`master/contrib/buildbot_cvs_mail.py` script) * :bb:chsrc:`PBChangeSource` (listening for connections from ``buildbot sendchange`` run in a loginfo script) -* :bb:chsrc:`PBChangeSource` (listening for connections from a long-running :src:`master/contrib/viewcvspoll.py` polling process which examines the ViewCVS database directly) +* :bb:chsrc:`PBChangeSource` (listening for connections from a long-running :contrib-src:`master/contrib/viewcvspoll.py` polling process which examines the ViewCVS database directly) * :bb:chsrc:`Change Hooks` in WebStatus SVN -* :bb:chsrc:`PBChangeSource` (listening for connections from :src:`master/contrib/svn_buildbot.py` run in a postcommit script) -* :bb:chsrc:`PBChangeSource` (listening for connections from a long-running :src:`master/contrib/svn_watcher.py` or :src:`master/contrib/svnpoller.py` polling process +* :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/svn_buildbot.py` run in a postcommit script) +* :bb:chsrc:`PBChangeSource` (listening for connections from a long-running :contrib-src:`master/contrib/svn_watcher.py` or :contrib-src:`master/contrib/svnpoller.py` polling process * :bb:chsrc:`SVNCommitEmailMaildirSource` (watching for email sent by :file:`commit-email.pl`) * :bb:chsrc:`SVNPoller` (polling the SVN repository) * :bb:chsrc:`Change Hooks` in WebStatus Darcs -* :bb:chsrc:`PBChangeSource` (listening for connections from :src:`master/contrib/darcs_buildbot.py` in a commit script) +* :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/darcs_buildbot.py` in a commit script) * :bb:chsrc:`Change Hooks` in WebStatus Mercurial -* :bb:chsrc:`Change Hooks` in WebStatus (including :src:`master/contrib/hgbuildbot.py`, configurable in a ``changegroup`` hook) +* :bb:chsrc:`Change Hooks` in WebStatus (including :contrib-src:`master/contrib/hgbuildbot.py`, configurable in a ``changegroup`` hook) * `BitBucket change hook `_ (specifically designed for BitBucket notifications, but requiring a publicly-accessible WebStatus) * :bb:chsrc:`HgPoller` (polling a remote Mercurial repository) * :bb:chsrc:`BitbucketPullrequestPoller` (polling Bitbucket for pull requests) @@ -56,14 +56,14 @@ Mercurial Bzr (the newer Bazaar) -* :bb:chsrc:`PBChangeSource` (listening for connections from :src:`master/contrib/bzr_buildbot.py` run in a post-change-branch-tip or commit hook) +* :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/bzr_buildbot.py` run in a post-change-branch-tip or commit hook) * :bb:chsrc:`BzrPoller` (polling the Bzr repository) * :bb:chsrc:`Change Hooks` in WebStatus Git -* :bb:chsrc:`PBChangeSource` (listening for connections from :src:`master/contrib/git_buildbot.py` run in the post-receive hook) -* :bb:chsrc:`PBChangeSource` (listening for connections from :src:`master/contrib/github_buildbot.py`, which listens for notifications from GitHub) +* :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/git_buildbot.py` run in the post-receive hook) +* :bb:chsrc:`PBChangeSource` (listening for connections from :contrib-src:`master/contrib/github_buildbot.py`, which listens for notifications from GitHub) * :bb:chsrc:`Change Hooks` in WebStatus * :bb:chsrc:`GitHub` change hook (specifically designed for GitHub notifications, but requiring a publicly-accessible WebStatus) * :bb:chsrc:`BitBucket` change hook (specifically designed for BitBucket notifications, but requiring a publicly-accessible WebStatus) @@ -260,7 +260,7 @@ CVSMaildirSource .. py:class:: buildbot.changes.mail.CVSMaildirSource -This parser works with the :src:`master/contrib/buildbot_cvs_mail.py` script. +This parser works with the :contrib-src:`master/contrib/buildbot_cvs_mail.py` script. The script sends an email containing all the files submitted in one directory. It is invoked by using the :file:`CVSROOT/loginfo` facility. @@ -275,10 +275,10 @@ For example: c['change_source'] = changes.CVSMaildirSource("/home/buildbot/Mail") -Configuration of CVS and :src:`buildbot_cvs_mail.py ` +Configuration of CVS and :contrib-src:`buildbot_cvs_mail.py ` ########################################################################################## -CVS must be configured to invoke the :src:`buildbot_cvs_mail.py ` script when files are checked in. +CVS must be configured to invoke the :contrib-src:`buildbot_cvs_mail.py ` script when files are checked in. This is done via the CVS loginfo configuration file. To update this, first do: @@ -298,7 +298,7 @@ cd to the CVSROOT directory and edit the file loginfo, adding a line like: For cvs version 1.12.x, the ``--path %p`` option is required. Version 1.11.x and 1.12.x report the directory path differently. -The above example you put the :src:`buildbot_cvs_mail.py ` script under /cvsroot/CVSROOT. +The above example you put the :contrib-src:`buildbot_cvs_mail.py ` script under /cvsroot/CVSROOT. It can be anywhere. Run the script with --help to see all the options. At the very least, the options ``-e`` (email) and ``-P`` (project) should be specified. @@ -425,7 +425,7 @@ Bzr Hook Bzr is also written in Python, and the Bzr hook depends on Twisted to send the changes. -To install, put :src:`master/contrib/bzr_buildbot.py` in one of your plugins locations a bzr plugins directory (e.g., :file:`~/.bazaar/plugins`). +To install, put :contrib-src:`master/contrib/bzr_buildbot.py` in one of your plugins locations a bzr plugins directory (e.g., :file:`~/.bazaar/plugins`). Then, in one of your bazaar conf files (e.g., :file:`~/.bazaar/locations.conf`), set the location you want to connect with Buildbot with these keys: * ``buildbot_on`` @@ -670,7 +670,7 @@ Bzr Poller ~~~~~~~~~~ If you cannot insert a Bzr hook in the server, you can use the :bb:chsrc:`BzrPoller`. -To use it, put :src:`master/contrib/bzr_buildbot.py` somewhere that your Buildbot configuration can import it. +To use it, put :contrib-src:`master/contrib/bzr_buildbot.py` somewhere that your Buildbot configuration can import it. Even putting it in the same directory as the :file:`master.cfg` should work. Install the poller in the Buildbot configuration as with any other change source. Minimally, provide a URL that you want to poll (``bzr://``, ``bzr+ssh://``, or ``lp:``), making sure the Buildbot user has necessary privileges. @@ -695,7 +695,7 @@ The ``BzrPoller`` parameters are: ``branch_name`` Any value to be used as the branch name. - Defaults to None, or specify a string, or specify the constants from :src:`bzr_buildbot.py ` ``SHORT`` or ``FULL`` to get the short branch name or full branch address. + Defaults to None, or specify a string, or specify the constants from :contrib-src:`bzr_buildbot.py ` ``SHORT`` or ``FULL`` to get the short branch name or full branch address. ``blame_merge_author`` normally, the user that commits the revision is the user that is responsible for the change. @@ -709,7 +709,7 @@ The ``BzrPoller`` parameters are: GitPoller ~~~~~~~~~ -If you cannot take advantage of post-receive hooks as provided by :src:`master/contrib/git_buildbot.py` for example, then you can use the :bb:chsrc:`GitPoller`. +If you cannot take advantage of post-receive hooks as provided by :contrib-src:`master/contrib/git_buildbot.py` for example, then you can use the :bb:chsrc:`GitPoller`. The :bb:chsrc:`GitPoller` periodically fetches from a remote Git repository and processes any changes. It requires its own working directory for operation. @@ -1206,4 +1206,3 @@ Change Hooks (HTTP Notifications) Buildbot already provides a web frontend, and that frontend can easily be used to receive HTTP push notifications of commits from services like GitHub. See :ref:`Change-Hooks` for more information. - diff --git a/master/docs/manual/cfg-reporters.rst b/master/docs/manual/cfg-reporters.rst index 254ffcc5739..35032f89f7d 100644 --- a/master/docs/manual/cfg-reporters.rst +++ b/master/docs/manual/cfg-reporters.rst @@ -93,7 +93,7 @@ If your SMTP host requires authentication before it allows you to send emails, t .. note:: - If for some reasons you are not able to send a notification with TLS enabled and specified user name and password, you might want to use :src:`master/contrib/check_smtp.py` to see if it works at all. + If for some reasons you are not able to send a notification with TLS enabled and specified user name and password, you might want to use :contrib-src:`master/contrib/check_smtp.py` to see if it works at all. If you want to require Transport Layer Security (TLS), then you can also set ``useTls``:: diff --git a/master/docs/manual/cfg-workers-docker.rst b/master/docs/manual/cfg-workers-docker.rst index 6049cdfd684..c9612c31efb 100644 --- a/master/docs/manual/cfg-workers-docker.rst +++ b/master/docs/manual/cfg-workers-docker.rst @@ -120,9 +120,9 @@ Reuse same image for different workers Previous simple example hardcodes the worker name into the dockerfile, which will not work if you want to share your docker image between workers. -You can find in buildbot source code in :src:`master/contrib/docker` one example configurations: +You can find in buildbot source code in :contrib-src:`master/contrib/docker` one example configurations: -:src:`pythonnode_worker ` +:contrib-src:`pythonnode_worker ` a worker with Python and node installed, which demonstrate how to reuse the base worker to create variations of build environments. It is based on the official ``buildbot/buildbot-worker`` image. diff --git a/master/docs/manual/cfg-workers-libvirt.rst b/master/docs/manual/cfg-workers-libvirt.rst index 0bbadb2c20a..16f7bc4aff4 100644 --- a/master/docs/manual/cfg-workers-libvirt.rst +++ b/master/docs/manual/cfg-workers-libvirt.rst @@ -51,7 +51,7 @@ Because this image may need updating a lot, we strongly suggest scripting its cr If you want to have multiple workers using the same base image it can be annoying to duplicate the image just to change the buildbot credentials. One option is to use libvirt's DHCP server to allocate an identity to the worker: DHCP sets a hostname, and the worker takes its identity from that. -Doing all this is really beyond the scope of the manual, but there is a :src:`vmbuilder ` script and a :src:`network.xml ` file to create such a DHCP server in :src:`master/contrib/` (:ref:`Contrib-Scripts`) that should get you started: +Doing all this is really beyond the scope of the manual, but there is a :contrib-src:`vmbuilder ` script and a :contrib-src:`network.xml ` file to create such a DHCP server in :contrib-src:`master/contrib/` (:ref:`Contrib-Scripts`) that should get you started: .. code-block:: bash diff --git a/master/docs/manual/cfg-wwwhooks.rst b/master/docs/manual/cfg-wwwhooks.rst index ab4b21f5198..e43b396c3e8 100644 --- a/master/docs/manual/cfg-wwwhooks.rst +++ b/master/docs/manual/cfg-wwwhooks.rst @@ -21,7 +21,7 @@ An example www configuration line which enables change_hook and two DIALECTS: Within the www config dictionary arguments, the ``change_hook`` key enables/disables the module and ``change_hook_dialects`` whitelists DIALECTs where the keys are the module names and the values are optional arguments which will be passed to the hooks. -The :src:`master/contrib/post_build_request.py` script allows for the submission of an arbitrary change request. +The :contrib-src:`master/contrib/post_build_request.py` script allows for the submission of an arbitrary change request. Run :command:`post_build_request.py --help` for more information. The ``base`` dialect must be enabled for this to work. @@ -86,7 +86,7 @@ GitHub hook .. note:: - There is a standalone HTTP server available for receiving GitHub notifications as well: :src:`master/contrib/github_buildbot.py`. + There is a standalone HTTP server available for receiving GitHub notifications as well: :contrib-src:`master/contrib/github_buildbot.py`. This script may be useful in cases where you cannot expose the WebStatus for public consumption. The GitHub hook has the following parameters: @@ -184,7 +184,7 @@ When this is setup you should add a `POST` service pointing to ``/change_hook/bi For example, it the grid URL is ``http://builds.example.com/bbot/grid``, then point BitBucket to ``http://builds.example.com/change_hook/bitbucket``. To specify a project associated to the repository, append ``?project=name`` to the URL. -Note that there is a standalone HTTP server available for receiving BitBucket notifications, as well: :src:`master/contrib/bitbucket_buildbot.py`. +Note that there is a standalone HTTP server available for receiving BitBucket notifications, as well: :contrib-src:`master/contrib/bitbucket_buildbot.py`. This script may be useful in cases where you cannot expose the WebStatus for public consumption. .. warning:: diff --git a/master/docs/manual/deploy.rst b/master/docs/manual/deploy.rst index 2697ed0de7a..a2e51927c14 100644 --- a/master/docs/manual/deploy.rst +++ b/master/docs/manual/deploy.rst @@ -104,5 +104,5 @@ Again, :samp:`buildbot-worker restart {BASEDIR}` will speed up the process. Contrib Scripts ~~~~~~~~~~~~~~~ -While some features of Buildbot are included in the distribution, others are only available in :src:`master/contrib/` in the source directory. -The latest versions of such scripts are available at :src:`master/contrib`. +While some features of Buildbot are included in the distribution, others are only available in :contrib-src:`master/contrib/` in the ``buildbot-contrib`` source directory. +The latest versions of such scripts are available at :contrib-src:`master/contrib`. diff --git a/master/docs/relnotes/0.8.9.rst b/master/docs/relnotes/0.8.9.rst index 246bc3b948b..dd541b7a242 100644 --- a/master/docs/relnotes/0.8.9.rst +++ b/master/docs/relnotes/0.8.9.rst @@ -180,7 +180,7 @@ Features * An example of a declarative configuration is included in ``master/contrib/SimpleConfig.py``, with copious comments. -* Systemd unit files for Buildbot are available in the :src:`master/contrib/` directory. +* Systemd unit files for Buildbot are available in the :contrib-src:`master/contrib/` directory. * We've added some extra checking to make sure that you have a valid locale before starting buildbot (#2608). diff --git a/master/docs/relnotes/0.9.0.rst b/master/docs/relnotes/0.9.0.rst index 5edbee10b1f..2ec35fa0140 100644 --- a/master/docs/relnotes/0.9.0.rst +++ b/master/docs/relnotes/0.9.0.rst @@ -278,7 +278,7 @@ Fixes * :bb:chsrc:`P4Source`'s ``server_tz`` parameter now works correctly. * The ``revlink`` in changes produced by the Bitbucket hook now correctly includes the ``changes/`` portion of the URL. -* :bb:chsrc:`PBChangeSource`'s git hook :src:`master/contrib/git_buildbot.py` now supports git tags +* :bb:chsrc:`PBChangeSource`'s git hook :contrib-src:`master/contrib/git_buildbot.py` now supports git tags A pushed git tag generates a change event with the ``branch`` property equal to the tag name. To schedule builds based on buildbot tags, one could use something like this: @@ -592,7 +592,7 @@ Features * The :class:`DockerLatentWorker` image attribute is now renderable (can take properties in account). * The :class:`DockerLatentWorker` sets environment variables describing how to connect to the master. - Example dockerfiles can be found in :src:`master/contrib/docker`. + Example dockerfiles can be found in :contrib-src:`master/contrib/docker`. * :class:`DockerLatentWorker` now has a ``hostconfig`` parameter that can be used to setup host configuration when creating a new container. diff --git a/master/docs/relnotes/0.9.0b1.rst b/master/docs/relnotes/0.9.0b1.rst index 2d694e86760..24a0bc33bb4 100644 --- a/master/docs/relnotes/0.9.0b1.rst +++ b/master/docs/relnotes/0.9.0b1.rst @@ -199,7 +199,7 @@ Fixes * :bb:chsrc:`P4Source`'s ``server_tz`` parameter now works correctly. * The ``revlink`` in changes produced by the Bitbucket hook now correctly includes the ``changes/`` portion of the URL. -* :bb:chsrc:`PBChangeSource`'s git hook :src:`master/contrib/git_buildbot.py` now supports git tags +* :bb:chsrc:`PBChangeSource`'s git hook :contrib-src:`master/contrib/git_buildbot.py` now supports git tags A pushed git tag generates a change event with the ``branch`` property equal to the tag name. To schedule builds based on buildbot tags, one could use something like this: diff --git a/master/docs/relnotes/0.9.0b6.rst b/master/docs/relnotes/0.9.0b6.rst index 7e183330d82..9dba12da483 100644 --- a/master/docs/relnotes/0.9.0b6.rst +++ b/master/docs/relnotes/0.9.0b6.rst @@ -42,7 +42,7 @@ Features * The :class:`DockerLatentBuildSlave` image attribute is now renderable (can take properties in account). * The :class:`DockerLatentBuildSlave` sets environment variables describing how to connect to the master. - Example dockerfiles can be found in :src:`master/contrib/docker`. + Example dockerfiles can be found in :contrib-src:`master/contrib/docker`. Details diff --git a/worker/contrib/README.txt b/worker/contrib/README.txt deleted file mode 100644 index 9c12d8fe184..00000000000 --- a/worker/contrib/README.txt +++ /dev/null @@ -1,11 +0,0 @@ -Utility scripts, things contributed by users but not strictly a part of -buildbot: - -zsh/_buildbot-worker: zsh tab-completion file for 'buildbot-worker' command. - Put it in one of the directories appearing in $fpath - to enable tab-completion in zsh. - -bash/buildbot-worker: bash tab-completion file for 'buildbot-worker' command. - Source this file to enable completions in your bash - session. This is typically accomplished by placing the - file into the appropriate 'bash_completion.d' directory. diff --git a/worker/contrib/bash/buildbot-worker b/worker/contrib/bash/buildbot-worker deleted file mode 100644 index 0b15c11c9cd..00000000000 --- a/worker/contrib/bash/buildbot-worker +++ /dev/null @@ -1,50 +0,0 @@ -# -# This file installs BASH completions for 'buildbot-worker' command. -# - -_buildbot_worker() -{ - local buildbot_worker_subcommands=" - create-worker start stop restart" - - local cur=${COMP_WORDS[COMP_CWORD]} - local subcommand= - local subcommand_args= - local i=1 - - # - # 'parse' the command line so far - # figure out if we have subcommand specified and any arguments to it - # - - # skip global options - while [[ "${COMP_WORDS[$i]}" == -* ]]; - do - i=$(($i+1)) - done - - # save subcommand - subcommand=${COMP_WORDS[$i]} - i=$(($i+1)) - - # skip subcommand options - while [[ "${COMP_WORDS[$i]}" == -* ]]; - do - i=$(($i+1)) - done - - # save subcommand arguments - subcommand_args=${COMP_WORDS[@]:$i:${#COMP_WORDS[@]}} - - if [ "$cur" == "$subcommand" ]; then - # suggest buildbot subcommands - COMPREPLY=( $(compgen -W "$buildbot_worker_subcommands" $cur) ) - elif [ "$cur" == "$subcommand_args" ]; then - # we are at first subcommand argument - # all subcommands can have worker base directory as first argument - # suggest directories - COMPREPLY=( $(compgen -A directory $cur) ) - fi -} - -complete -F _buildbot_worker buildbot-worker diff --git a/worker/contrib/init-scripts/buildbot-worker.default b/worker/contrib/init-scripts/buildbot-worker.default deleted file mode 100644 index 9d14c7536a6..00000000000 --- a/worker/contrib/init-scripts/buildbot-worker.default +++ /dev/null @@ -1,11 +0,0 @@ -WORKER_RUNNER=/usr/bin/buildbot-worker - -# 'true|yes|1' values in WORKER_ENABLED to enable instance and 'false|no|0' to -# disable. Other values will be considered as syntax error. - -WORKER_ENABLED[1]=0 # 1-enabled, 0-disabled -WORKER_NAME[1]="buildbot-worker #1" # short name printed on start/stop -WORKER_USER[1]="buildbot" # user to run worker as -WORKER_BASEDIR[1]="" # basedir to worker (absolute path) -WORKER_OPTIONS[1]="" # buildbot options -WORKER_PREFIXCMD[1]="" # prefix command, i.e. nice, linux32, dchroot diff --git a/worker/contrib/init-scripts/buildbot-worker.init.sh b/worker/contrib/init-scripts/buildbot-worker.init.sh deleted file mode 100755 index 808f2bd2998..00000000000 --- a/worker/contrib/init-scripts/buildbot-worker.init.sh +++ /dev/null @@ -1,210 +0,0 @@ -#!/bin/bash - -### Maintain compatibility with chkconfig -# chkconfig: 2345 83 17 -# description: buildbot-worker - -### BEGIN INIT INFO -# Provides: buildbot-worker -# Required-Start: $remote_fs -# Required-Stop: $remote_fs -# Default-Start: 2 3 4 5 -# Default-Stop: 0 1 6 -# Short-Description: Buildbot worker init script -# Description: This file allows running buildbot worker instances at -# startup -### END INIT INFO - -PATH=/sbin:/bin:/usr/sbin:/usr/bin -WORKER_RUNNER=/usr/bin/buildbot-worker - - -# Source buildbot-worker configuration -[[ -r /etc/default/buildbot-worker ]] && . /etc/default/buildbot-worker -#[[ -r /etc/sysconfig/buildbot-worker ]] && . /etc/sysconfig/buildbot-worker - -# Or define/override the configuration here -#WORKER_ENABLED[1]=0 # 0-enabled, other-disabled -#WORKER_NAME[1]="buildbot-worker #1" # short name printed on start/stop -#WORKER_USER[1]="buildbot" # user to run worker as -#WORKER_BASEDIR[1]="" # basedir to worker (absolute path) -#WORKER_OPTIONS[1]="" # buildbot options -#WORKER_PREFIXCMD[1]="" # prefix command, i.e. nice, linux32, dchroot - - -# Get some LSB-like functions -if [ -r /lib/lsb/init-functions ]; then - . /lib/lsb/init-functions -else - function log_success_msg() { - echo "$@" - } - function log_failure_msg() { - echo "$@" - } - function log_warning_msg() { - echo "$@" - } -fi - - -# Some systems don't have seq (e.g. Solaris) -if type seq >/dev/null 2>&1; then - : -else - function seq() { - for ((i=1; i<=$1; i+=1)); do - echo $i - done - } -fi - - -if [[ ! -x ${WORKER_RUNNER} ]]; then - log_failure_msg "does not exist or not an executable file: ${WORKER_RUNNER}" - exit 1 -fi - -function is_enabled() { - ANSWER=`echo $1|tr "[:upper:]" "[:lower:]"` - [[ "$ANSWER" == "yes" ]] || [[ "$ANSWER" == "true" ]] || [[ "$ANSWER" == "1" ]] - return $? -} - -function is_disabled() { - ANSWER=`echo $1|tr "[:upper:]" "[:lower:]"` - [[ "$ANSWER" == "no" ]] || [[ "$ANSWER" == "false" ]] || [[ "$ANSWER" == "0" ]] - return $? -} - - -function worker_config_valid() { - # Function validates buildbot worker instance startup variables based on - # array index - local errors=0 - local index=$1 - - if ! is_enabled "${WORKER_ENABLED[$index]}" && ! is_disabled "${WORKER_ENABLED[$index]}" ; then - log_warning_msg "buildbot-worker #${index}: invalid enabled status" - errors=$(($errors+1)) - fi - - if [[ -z ${WORKER_NAME[$index]} ]]; then - log_failure_msg "buildbot-worker #${index}: no name" - errors=$(($errors+1)) - fi - - if [[ -z ${WORKER_USER[$index]} ]]; then - log_failure_msg "buildbot-worker #${index}: no run user specified" - errors=$( ($errors+1) ) - elif ! getent passwd ${WORKER_USER[$index]} >/dev/null; then - log_failure_msg "buildbot-worker #${index}: unknown user ${WORKER_USER[$index]}" - errors=$(($errors+1)) - fi - - if [[ ! -d "${WORKER_BASEDIR[$index]}" ]]; then - log_failure_msg "buildbot-worker ${index}: basedir does not exist ${WORKER_BASEDIR[$index]}" - errors=$(($errors+1)) - fi - - return $errors -} - -function check_config() { - itemcount="${#WORKER_ENABLED[@]} - ${#WORKER_NAME[@]} - ${#WORKER_USER[@]} - ${#WORKER_BASEDIR[@]} - ${#WORKER_OPTIONS[@]} - ${#WORKER_PREFIXCMD[@]}" - - if [[ $(echo "$itemcount" | tr -d ' ' | sort -u | wc -l) -ne 1 ]]; then - log_failure_msg "WORKER_* arrays must have an equal number of elements!" - return 1 - fi - - errors=0 - for i in $( seq ${#WORKER_ENABLED[@]} ); do - if is_disabled "${WORKER_ENABLED[$i]}" ; then - log_warning_msg "buildbot-worker #${i}: disabled" - continue - fi - worker_config_valid $i - errors=$(($errors+$?)) - done - - [[ $errors == 0 ]]; return $? -} - -check_config || exit $? - -function iscallable () { type $1 2>/dev/null | grep -q 'shell function'; } - -function worker_op () { - op=$1 ; mi=$2 - - if [ `uname` = SunOS ]; then - suopt="" - else - suopt="-s /bin/sh" - fi - ${WORKER_PREFIXCMD[$mi]} \ - su $suopt - ${WORKER_USER[$mi]} \ - -c "$WORKER_RUNNER $op ${WORKER_OPTIONS[$mi]} ${WORKER_BASEDIR[$mi]} > /dev/null" - return $? -} - -function do_op () { - errors=0 - for i in $( seq ${#WORKER_ENABLED[@]} ); do - if [ -n "$4" ] && [ "$4" != "${WORKER_NAME[$i]}" ] ; then - continue - elif is_disabled "${WORKER_ENABLED[$i]}" && [ -z "$4" ] ; then - continue - fi - - # Some rhels don't come with all the lsb goodies - if iscallable log_daemon_msg; then - log_daemon_msg "$3 \"${WORKER_NAME[$i]}\"" - if eval $1 $2 $i; then - log_end_msg 0 - else - log_end_msg 1 - errors=$(($errors+1)) - fi - else - if eval $1 $2 $i; then - log_success_msg "$3 \"${WORKER_NAME[$i]}\"" - else - log_failure_msg "$3 \"${WORKER_NAME[$i]}\"" - errors=$(($errors+1)) - fi - fi - done - return $errors -} - -case "$1" in - start) - do_op "worker_op" "start" "Starting buildbot-worker" "$2" - exit $? - ;; - stop) - do_op "worker_op" "stop" "Stopping buildbot-worker" "$2" - exit $? - ;; - reload) - do_op "worker_op" "reload" "Reloading buildbot-worker" "$2" - exit $? - ;; - restart|force-reload) - do_op "worker_op" "restart" "Restarting buildbot-worker" "$2" - exit $? - ;; - *) - echo "Usage: $0 {start|stop|restart|reload|force-reload}" - exit 1 - ;; -esac - -exit 0 diff --git a/worker/contrib/os-x/README b/worker/contrib/os-x/README deleted file mode 100644 index 1bfa9663cf8..00000000000 --- a/worker/contrib/os-x/README +++ /dev/null @@ -1,23 +0,0 @@ -Mark Pauley contributed the two launchd plist files for OS-X (10.4+) to start -a Buildbot master or worker automatically at startup: - - contrib/OS-X/net.sourceforge.buildbot.master.plist - contrib/OS-X/net.sourceforge.buildbot.worker.plist - -His email message is as follows: - - Message-Id: - From: Mark Pauley - To: buildbot-devel - Date: Wed, 24 Jan 2007 11:05:44 -0800 - Subject: [Buildbot-devel] Sample buildbot launchd plists for MacOS 10.4+ - - Hi guys, - I've had these kicking around for a while and thought that maybe - someone would like to see them. Installing either of these two to / - Library/LaunchDaemons will cause the Buildbot worker or master to auto- - start as whatever user you like on launch. This is the "right way to - do this" going forward, startupitems are deprecated. Please note that - this means any tests that require a windowserver connection on os x - won't work. - diff --git a/worker/contrib/os-x/net.sourceforge.buildbot.worker.plist b/worker/contrib/os-x/net.sourceforge.buildbot.worker.plist deleted file mode 100644 index bccb6b63272..00000000000 --- a/worker/contrib/os-x/net.sourceforge.buildbot.worker.plist +++ /dev/null @@ -1,36 +0,0 @@ - - - - - Label - net.sourceforge.buildbot.worker - - - UserName - buildbot - - - WorkingDirectory - /Users/buildbot/Buildbot_Worker - - ProgramArguments - - /usr/bin/twistd - --nodaemon - --python=buildbot.tac - --logfile=buildbot.log - --prefix=worker - - - KeepAlive - - SuccessfulExit - - - - RunAtLoad - - - - diff --git a/worker/contrib/systemd/buildbot-worker.service b/worker/contrib/systemd/buildbot-worker.service deleted file mode 100644 index f49fd3c71c2..00000000000 --- a/worker/contrib/systemd/buildbot-worker.service +++ /dev/null @@ -1,17 +0,0 @@ -[Unit] -Description=Buildbot Worker -Wants=network.target -After=network.target - -[Service] -Type=forking -PIDFile=/srv/buildbot-worker/linux-worker/twistd.pid -WorkingDirectory=/srv/buildbot-worker -ExecStart=/usr/bin/buildbot-worker start linux-worker -ExecReload=/usr/bin/buildbot-worker restart linux-worker -ExecStop=/usr/bin/buildbot-worker stop linux-worker -Restart=always -User=buildbot-worker - -[Install] -WantedBy=multi-user.target diff --git a/worker/contrib/zsh/_buildbot-worker b/worker/contrib/zsh/_buildbot-worker deleted file mode 100644 index ddf621e506a..00000000000 --- a/worker/contrib/zsh/_buildbot-worker +++ /dev/null @@ -1,30 +0,0 @@ -#compdef buildbot-worker -# -# This is the ZSH completion file for 'buildbot-worker' command. It calls -# 'buildbot-worker' command with the special "--_shell-completion" option -# which is handled by twisted.python.usage. t.p.usage then generates zsh code -# on stdout to handle the completions. -# -# This file is derived from twisted/python/twisted-completion.zsh from twisted -# distribution. -# - -# redirect stderr to /dev/null otherwise deprecation warnings may get puked all -# over the user's terminal if completing options for a deprecated command. -# Redirect stderr to a file to debug errors. -local cmd output -cmd=("$words[@]" --_shell-completion zsh:$CURRENT) -output=$("$cmd[@]" 2>/dev/null) - -if [[ $output == "#compdef "* ]]; then - # Looks like we got a valid completion function - so eval it to produce - # the completion matches. - eval $output -else - echo "\nCompletion error running command:" ${(qqq)cmd} - echo -n "If output below is unhelpful you may need to edit this file and " - echo "redirect stderr to a file." - echo "Expected completion function, but instead got:" - echo $output - return 1 -fi