Permalink
Browse files

Bug 957384: make buildapi's tests pass; r=bhearsum

There's still quite a lot of work required to get the tests to run, but much
of that is due to the use of Pylons and the way DB access is structured in
Buildapi, neither of which were in scope for this bug.

--HG--
rename : buildapi/lib/test_cacher.py => buildapi/tests/test_cacher.py
  • Loading branch information...
djmitche committed Jan 22, 2014
1 parent 5d9bfe3 commit da83a098c539951aaa3c461c01235bd4ee8bb547
View
@@ -32,6 +32,16 @@ or
buildapi.cache = memcached:HOSTNAME:PORT,HOSTNAME:PORT,..
+You'll need to set up some scheduler and status DB's. The schema for these
+DBs are in the root directory, although you may want to fill them with test
+data which is not included.
+
+ sqlite3 statusdb.sqlite3 < statusdb_schema.sql
+ sqlite3 schedulerdb.sqlite3 < schedulerdb_schema.sql
+ sqlite3 buildapi.sqlite3 < buildapi_schema.sql
+
+And point the proper DB strings
+
Now setup the application::
paster setup-app config.ini
View
@@ -0,0 +1,16 @@
+{
+ "branch1": {
+ "repo": "http://hg.mozilla.org/projects/branch1",
+ "graph_branches": [
+ "branch1"
+ ],
+ "repo_type": "hg"
+ },
+ "branch2": {
+ "repo": "http://hg.mozilla.org/projects/branch2",
+ "graph_branches": [
+ "branch2"
+ ],
+ "repo_type": "git"
+ }
+}
View
@@ -165,7 +165,10 @@ def get_branches():
branches_url = app_globals.branches_url
log.info("Fetching branches list from %s", branches_url)
try:
- branches = json.load(urllib.urlopen(branches_url))
+ if branches_url.startswith('TEST:'):
+ branches = json.load(open(branches_url.split(':')[1]))
+ else:
+ branches = json.load(urllib.urlopen(branches_url))
_branches = branches
_last_branches_check = now
except:
@@ -9,9 +9,6 @@
from buildapi.model.util import get_time_interval, get_platform, \
get_build_type, get_job_type
-br = meta.scheduler_db_meta.tables['buildrequests']
-c = meta.scheduler_db_meta.tables['changes']
-
def BuildersQuery(starttime, endtime, branch_name):
"""Constructs the sqlalchemy query for fetching all build requests in the
specified time interval for the specified branch.
@@ -33,6 +30,8 @@ def BuildersTypeQuery(starttime, endtime, buildername):
buildername - builder's name
Output: query
"""
+ br = meta.scheduler_db_meta.tables['buildrequests']
+
q = BuildRequestsQuery(starttime=starttime, endtime=endtime)
q = q.where(br.c.buildername.like(buildername))
return q
@@ -7,13 +7,6 @@
from buildapi.model.util import get_branch_name, get_platform, get_build_type, \
get_job_type, get_revision, results_to_str, status_to_str
-b = meta.scheduler_db_meta.tables['builds']
-br = meta.scheduler_db_meta.tables['buildrequests']
-bs = meta.scheduler_db_meta.tables['buildsets']
-s = meta.scheduler_db_meta.tables['sourcestamps']
-sch = meta.scheduler_db_meta.tables['sourcestamp_changes']
-c = meta.scheduler_db_meta.tables['changes']
-
def BuildRequestsQuery(revision=None, branch_name=None, starttime=None,
endtime=None, changeid_all=False):
"""Constructs the sqlalchemy query for fetching build requests.
@@ -39,6 +32,13 @@ def BuildRequestsQuery(revision=None, branch_name=None, starttime=None,
per build request, with only one of the changeids at random
Output: query
"""
+ b = meta.scheduler_db_meta.tables['builds']
+ br = meta.scheduler_db_meta.tables['buildrequests']
+ bs = meta.scheduler_db_meta.tables['buildsets']
+ s = meta.scheduler_db_meta.tables['sourcestamps']
+ sch = meta.scheduler_db_meta.tables['sourcestamp_changes']
+ c = meta.scheduler_db_meta.tables['changes']
+
q = outerjoin(br, b, b.c.brid == br.c.id).join(
bs, bs.c.id == br.c.buildsetid).join(
s, s.c.id == bs.c.sourcestampid).outerjoin(
@@ -117,6 +117,7 @@ def GetBuildRequests(revision=None, branch_name=None, starttime=None,
per build request, with only one of the changeids at random
Output: dictionary of BuildRequest objects, keyed by (br.brid, br.bid)
"""
+
q = BuildRequestsQuery(revision=revision, branch_name=branch_name,
starttime=starttime, endtime=endtime, changeid_all=changeid_all)
q_results = q.execute()
@@ -3,8 +3,6 @@
import buildapi.model.meta as meta
from buildapi.model.util import get_revision
-c = meta.scheduler_db_meta.tables['changes']
-
def ChangesQuery(revision=None, branch_name=None, starttime=None, endtime=None):
"""Constructs the sqlalchemy query for fetching changes.
@@ -16,6 +14,8 @@ def ChangesQuery(revision=None, branch_name=None, starttime=None, endtime=None):
endtime - end time (UNIX timestamp in seconds)
Output: query
"""
+ c = meta.scheduler_db_meta.tables['changes']
+
q = select([c.c.changeid, c.c.revision, c.c.branch, c.c.when_timestamp])
if revision:
@@ -8,10 +8,6 @@
SKIPPED, EXCEPTION, RETRY
from buildapi.model.util import get_time_interval, get_revision, results_to_str
-br = meta.scheduler_db_meta.tables['buildrequests']
-s = meta.scheduler_db_meta.tables['sourcestamps']
-c = meta.scheduler_db_meta.tables['changes']
-
def GetEndtoEndTimes(starttime=None, endtime=None,
branch_name='mozilla-central'):
"""Get end to end times report for the speficied time interval and branch.
@@ -1,5 +1,4 @@
#!/usr/bin/python
-import redis
import simplejson as json
import sqlalchemy as sa
@@ -315,9 +314,12 @@ def date_option(option, opt, value, parser):
session = session_maker()
if config.has_option('general', 'redis'):
+ import redis
R = redis.Redis(host=config.get('general', 'redis'))
else:
+ import redis
R = redis.Redis()
+ # TODO: support memcached
scheduler_db_engine = sa.create_engine(config.get('general', 'scheduler_dburl'), pool_recycle=60)
@@ -20,15 +20,15 @@
__all__ = ['environ', 'url', 'TestController']
# Invoke websetup with the current config file
-SetupCommand('setup-app').run([pylons.test.pylonsapp.config['__file__']])
+SetupCommand('setup-app').run(['test.ini'])
environ = {}
class TestController(TestCase):
def __init__(self, *args, **kwargs):
wsgiapp = pylons.test.pylonsapp
- config = wsgiapp.config
+ config = wsgiapp.application.config
self.config = config
self.app = TestApp(wsgiapp)
self.g = self.config['pylons.app_globals']
@@ -1,6 +1,7 @@
from buildapi.tests import *
from buildapi.model import init_scheduler_model, init_buildapi_model
from buildapi.lib import json
+from buildapi.lib import mq
import sqlalchemy
import os, time
@@ -17,19 +18,23 @@ def setUp(self):
init_scheduler_model(self.engine)
init_buildapi_model(self.engine)
- self.g.mq.engine.execute('delete from jobrequests')
+ self.engine.execute('delete from jobrequests')
- # disable actually sending messages!
+ # create a job request publisher that doesn't actually send anything
+ config = {
+ 'carrot.exchange': 'exch',
+ }
+ self.g.mq = mq.LoggingJobRequestPublisher(self.engine, config, 'carrot')
self.g.mq.send = mock.Mock()
def get_jobrequests(self):
- p = self.g.mq.engine.execute('select * from jobrequests')
+ p = self.engine.execute('select * from jobrequests')
requests = p.fetchall()
return requests
def test_branches(self):
response = self.app.get(url('branches', format='json')).json
- self.assertEquals(response, self.config['branches'])
+ self.assertEquals(response, json.load(open('branches-test.json')))
def test_builders(self):
# We have to fake out time.time here so it returns a time closely after
@@ -49,15 +54,16 @@ def test_builders_bad_branch(self):
self.assert_("Branch branch3 not found" in response.body)
def test_branch(self):
- response = self.app.get(url('branch', branch='branch1', format='json')).json
- self.assertEquals(len(response['builds']), 1)
- self.assertEquals(len(response['pending']), 2)
+ # fake the time to be when state.sql was created
+ with mock.patch.object(time, 'time', return_value=1285855044):
+ response = self.app.get(url('branch', branch='branch1', format='json')).json
+ self.assertEquals(len(response), 3)
def test_branch2(self):
- response = self.app.get(url('branch', branch='branch2', format='json')).json
- self.assertEquals(len(response['builds']), 0)
- self.assertEquals(len(response['running']), 1)
- self.assertEquals(len(response['pending']), 0)
+ # fake the time to be when state.sql was created
+ with mock.patch.object(time, 'time', return_value=1285855044):
+ response = self.app.get(url('branch', branch='branch2', format='json')).json
+ self.assertEquals(len(response), 1)
def test_build(self):
response = self.app.get(url('build', branch='branch1', build_id=1, format='json')).json
@@ -75,8 +81,7 @@ def test_request(self):
def test_revision(self):
response = self.app.get(url('revision', branch='branch1', revision='123456789', format='json')).json
- self.assertEquals(len(response['builds']), 1)
- self.assertEquals(len(response['pending']), 0)
+ self.assertEquals(len(response), 1)
def test_reprioritize(self):
self.g.mq._clock = mock.Mock(return_value=543221)
@@ -1,7 +1,7 @@
import threading
import mock
from buildapi.lib import cacher
-from unittest import TestCase
+from unittest import TestCase, SkipTest
class Cases(object):
@@ -19,7 +19,7 @@ def test_get(self):
m.assert_not_called()
def test_put(self):
- m = mock.Mock()
+ m = mock.Mock(return_value=9999)
self.c.put('not-there', 7)
self.assertEqual(self.c.get('not-there', m), 7)
m.assert_not_called()
@@ -56,11 +56,18 @@ def get(thd):
class TestRedisCacher(TestCase, Cases):
def newCache(self):
- return cacher.RedisCache()
+ return cacher.RedisCache(host='localhost')
def setUp(self):
+ try:
+ import redis
+ except ImportError:
+ raise SkipTest("redis not installed")
self.c = self.newCache()
- self.c.r.delete('not-there')
+ try:
+ self.c.r.delete('not-there')
+ except redis.ConnectionError:
+ raise SkipTest("no redis server on localhost")
def tearDown(self):
self.c.r.delete('there')
@@ -70,10 +77,19 @@ def tearDown(self):
class TestMemcacheCacher(TestCase, Cases):
def newCache(self):
- return cacher.MemcacheCache()
+ return cacher.MemcacheCache(hosts=['localhost'])
def setUp(self):
+ try:
+ import memcache
+ assert memcache
+ except ImportError:
+ raise SkipTest("memcache not installed")
self.c = self.newCache()
+ # memcached will just happily cache nothing if it can't connect, which is
+ # great in production but not in testing.
+ if not self.c.m.servers[0].connect():
+ raise SkipTest("no memcached server on localhost")
self.c.m.delete('not-there')
def tearDown(self):
Oops, something went wrong.

0 comments on commit da83a09

Please sign in to comment.