Skip to content
This repository has been archived by the owner on Oct 3, 2018. It is now read-only.

Commit

Permalink
Add repo tracking and prep to merge e10s branch to master
Browse files Browse the repository at this point in the history
  • Loading branch information
EricRahm committed Mar 18, 2016
1 parent 2138ce7 commit a6d184f
Show file tree
Hide file tree
Showing 9 changed files with 103 additions and 27 deletions.
36 changes: 24 additions & 12 deletions benchtester/BatchTester.py
Expand Up @@ -100,6 +100,20 @@ def __init__(self, build, revision):
# honor this in should_test as well
self.force = None

def build_type(self):
if isinstance(self.build, BuildGetter.CompileBuild):
return 'compile'
elif isinstance(self.build, BuildGetter.TryBuild):
return 'try'
elif isinstance(self.build, BuildGetter.FTPBuild):
return 'ftp'
elif isinstance(self.build, BuildGetter.TinderboxBuild):
return 'tinderbox'
elif isinstance(self.build, BuildGetter.NightlyBuild):
return 'nightly'
else:
raise Exception("Unknown build type %s" % (self.build,))

@staticmethod
def deserialize(buildobj, args):
if buildobj['type'] == 'compile':
Expand Down Expand Up @@ -139,26 +153,22 @@ def serialize(self):
'series': self.series
}

if isinstance(self.build, BuildGetter.CompileBuild):
ret['type'] = 'compile'
elif isinstance(self.build, BuildGetter.TryBuild):
ret['type'] = 'try'
build_type = self.build_type()
ret['type'] = build_type

if build_type == 'try':
ret['changeset'] = self.build._changeset
elif isinstance(self.build, BuildGetter.FTPBuild):
ret['type'] = 'ftp'
elif build_type == 'ftp':
ret['path'] = self.build._path
elif isinstance(self.build, BuildGetter.TinderboxBuild):
elif build_type == 'tinderbox':
# When deserializing we need to look this up by it's tinderbox timestamp,
# even if we use the push timestamp internally
ret['timestamp'] = self.build.get_tinderbox_timestamp()
ret['type'] = 'tinderbox'
ret['branch'] = self.build.get_branch()
elif isinstance(self.build, BuildGetter.NightlyBuild):
elif build_type == 'nightly':
# Date of nightly might not correspond to build timestamp
ret['for'] = '%u-%u-%u' % (self.build._date.year, self.build._date.month, self.build._date.day)
ret['type'] = 'nightly'
else:
raise Exception("Unknown build type %s" % (build,))

return ret

# Work around multiprocessing.Pool() quirkiness. We can't give it
Expand Down Expand Up @@ -486,6 +496,8 @@ def _process_batch(globalargs, batchargs, returnproxy, hook):
mod = None
ret = BatchTest._process_batch_inner(globalargs, batchargs, mod)
except Exception, e:
import traceback
traceback.print_exc()
ret = "An exception occured while processing batch -- %s: %s" % (type(e), e)

if type(ret) == str:
Expand Down
29 changes: 26 additions & 3 deletions benchtester/BenchTester.py
Expand Up @@ -30,7 +30,8 @@
'''CREATE TABLE IF NOT EXISTS
"benchtester_builds" ("id" INTEGER PRIMARY KEY NOT NULL,
"name" VARCHAR NOT NULL UNIQUE,
"time" DATETIME NOT NULL)''',
"time" DATETIME NOT NULL,
"repo_id" INTEGER NOT NULL)''',

# Tests - tests that have been run and against which build
'''CREATE TABLE IF NOT EXISTS
Expand All @@ -50,6 +51,11 @@
"benchtester_procs" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
"name" VARCHAR NOT NULL UNIQUE)''',

# Repos - names of source repositories
'''CREATE TABLE IF NOT EXISTS
"benchtester_repos" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
"name" VARCHAR NOT NULL UNIQUE)''',

# Checkpoints - names of checkpoints
'''CREATE TABLE IF NOT EXISTS
"benchtester_checkpoints" ("id" INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
Expand Down Expand Up @@ -162,7 +168,7 @@ def map_process_names(process_names):

for full_process_name in process_names:
# Drop the pid portion of process name
process_re = r'(.*)\s+\(\d+\)'
process_re = r'(.*)\s+\(.+\)'
m = re.match(process_re, full_process_name)
if m:
proc_name = m.group(1)
Expand Down Expand Up @@ -377,6 +383,15 @@ def _open_db(self):
if not db_exists:
cur.execute("INSERT INTO `benchtester_version` (`version`) VALUES (?)", [ gVersion ])

# Create/update the repo
cur.execute("SELECT `id` FROM `benchtester_repos` WHERE `name` = ?", [ self.repo ])
row = cur.fetchone()
if row:
repo_id = int(row[0])
else:
cur.execute("INSERT INTO benchtester_repos(name) VALUES (?)", (self.repo, ))
repo_id = cur.lastrowid

# Create/update build ID
cur.execute("SELECT `time`, `id` FROM `benchtester_builds` WHERE `name` = ?", [ self.buildname ])
buildrow = cur.fetchone()
Expand All @@ -387,7 +402,9 @@ def _open_db(self):
self.build_id = buildrow[1]
elif not buildrow:
self.info("Creating new build record")
cur.execute("INSERT INTO `benchtester_builds` (`name`, `time`) VALUES (?, ?)", (self.buildname, int(self.buildtime)))
cur.execute("INSERT INTO `benchtester_builds` (`name`, `time`, `repo_id`) "
"VALUES (?, ?, ?)",
(self.buildname, int(self.buildtime), repo_id))
cur.execute("SELECT last_insert_rowid()")
self.build_id = cur.fetchone()[0]
else:
Expand Down Expand Up @@ -424,6 +441,12 @@ def setup(self, args):
if (self.args['buildtime']):
self.buildtime = str(self.args['buildtime']).strip()

if 'repo' in self.args and self.args['repo']:
self.repo = self.args['repo']
self.info('Using provided repo: %s' % self.repo)
else:
self.repo = 'mozilla-inbound'
self.info('Using default repo: mozilla-inbound')

# Try to autodetect commitname/time if given a binary in a repo
if not self.buildname or not self.buildtime:
Expand Down
9 changes: 8 additions & 1 deletion create_graph_json.py
Expand Up @@ -258,7 +258,10 @@ def error(msg):

# Fetch and sort the builds by timestamp. For builds with identical push dates,
# lookup the revision number from hg
cur.execute('''SELECT `id`, `name`, `time` FROM `benchtester_builds`''')
cur.execute('''SELECT build.id as `id`, build.name as `name`, build.time as `time`, repo.name as `repo_name`
FROM `benchtester_builds` as build, `benchtester_repos` as repo
WHERE build.repo_id = repo.id''')

builds = cur.fetchall()
hg_ui = None
hg_repo = None
Expand Down Expand Up @@ -528,6 +531,10 @@ def discard(node):
if not testname in gTests.keys() or \
not gTests[testname].get('dump'):
del testdata[testname]
else:
# Add test metadata.
testdata[testname]['repo'] = build['repo_name']
testdata[testname]['revision'] = build['name']

#
# Write out the test data for this build into <buildname>.json.gz
Expand Down
4 changes: 4 additions & 0 deletions html/slimyet.js
Expand Up @@ -548,6 +548,10 @@ var gSeries = {
'MaxJSV2': "JS: After TP5 [+30s]",
'MaxImagesV2': "Images: After TP5 [+30s]"
},
};

// Once we enable e10s we'll want to add this to gSeries.
var gSeriesE10S = {
"Web Content Resident Memory" : {
'Web Content StartMemoryResidentV2': "RSS: Fresh start",
'Web Content StartMemoryResidentSettledV2': "RSS: Fresh start [+30s]",
Expand Down
9 changes: 8 additions & 1 deletion slimtest_batchtester_hook.py
Expand Up @@ -96,14 +96,21 @@ def run_tests(build, args):
if not tester.load_module(test['type']):
raise Exception("Could not load module %s" % (test['type'],))

REPO_MAP = {
'nightly': 'mozilla-central',
'tinderbox': 'mozilla-inbound',
'try': 'try'
}

tester.setup({
'buildname': build.revision,
'binary': build.build.get_binary(),
'buildtime': build.build.get_buildtime(),
'sqlitedb': database_for_build(build),
'logfile': logfile,
'gecko_log': gecko_logfile,
'marionette_port': 24242 + build.num # Use different marionette ports so as not to collide
'marionette_port': 24242 + build.num, # Use different marionette ports so as not to collide
'repo': REPO_MAP.get(build.build_type(), None),
})

display = ":%u" % (build.num + 9,)
Expand Down
1 change: 1 addition & 0 deletions slimtest_config.py
Expand Up @@ -35,6 +35,7 @@
'test': [ 'benchtester', 'test_memory_usage.py' ],
'proxyPort': 3128,
'e10s': True,
'entities': 1, 'perTabPause': 0, 'settleWaitTime': 0, 'iterations': 1
}
},
};
19 changes: 19 additions & 0 deletions tests/benchtester/test_bench_tester.py
Expand Up @@ -46,6 +46,25 @@ def test_process_name_mapping(self):
proc_name_mappings = BenchTester.map_process_names(proc_names_list)
self.assertEqual(expected_mappings, proc_name_mappings)

# Test multiple of one type with pid prefix
proc_names_list = [
"Main",
"Web Content (pid 1234)",
"Web Content (pid 2345)",
"Web Content (pid 3456)"
]

expected_mappings = {
"Main": "Main",
"Web Content (pid 1234)": "Web Content",
"Web Content (pid 2345)": "Web Content 2",
"Web Content (pid 3456)": "Web Content 3"
}

proc_name_mappings = BenchTester.map_process_names(proc_names_list)
self.assertEqual(expected_mappings, proc_name_mappings)


# Test multiple of several types
proc_names_list = [
"Main",
Expand Down
16 changes: 8 additions & 8 deletions util/process_perf_data.py
Expand Up @@ -22,14 +22,14 @@

# A description of each checkpoint and the root path to it.
CHECKPOINTS = [
{ 'name': "Fresh start", 'path': "Iteration 1/Start" },
{ 'name': "Fresh start [+30s]", 'path': "Iteration 1/StartSettled" },
{ 'name': "After tabs open", 'path': "Iteration 5/TabsOpen" },
{ 'name': "After tabs open [+30s]", 'path': "Iteration 5/TabsOpenSettled" },
{ 'name': "After tabs open [+30s, forced GC]", 'path': "Iteration 5/TabsOpenForceGC" },
{ 'name': "Tabs closed", 'path': "Iteration 5/TabsClosed" },
{ 'name': "Tabs closed [+30s]", 'path': "Iteration 5/TabsClosedSettled" },
{ 'name': "Tabs closed [+30s, forced GC]", 'path': "Iteration 5/TabsClosedForceGC" }
{ 'name': "Fresh start", 'path': "Iteration 1/Start/Main" },
{ 'name': "Fresh start [+30s]", 'path': "Iteration 1/StartSettled/Main" },
{ 'name': "After tabs open", 'path': "Iteration 5/TabsOpen/Main" },
{ 'name': "After tabs open [+30s]", 'path': "Iteration 5/TabsOpenSettled/Main" },
{ 'name': "After tabs open [+30s, forced GC]", 'path': "Iteration 5/TabsOpenForceGC/Main" },
{ 'name': "Tabs closed", 'path': "Iteration 5/TabsClosed/Main" },
{ 'name': "Tabs closed [+30s]", 'path': "Iteration 5/TabsClosedSettled/Main" },
{ 'name': "Tabs closed [+30s, forced GC]", 'path': "Iteration 5/TabsClosedForceGC/Main" }
]

# A description of each perfherder suite and the path to its values.
Expand Down
7 changes: 5 additions & 2 deletions util/update_database_v0_v1.py
Expand Up @@ -110,6 +110,9 @@
# Add an entry for Main in benchtester_procs
cur.execute('INSERT INTO benchtester_procs(name) VALUES ( ? )', ('Main', ))

# Add an entry for mozilla-inbound in benchtester_repos
cur.execute('INSERT INTO benchtester_repos(name) VALUES ( ? )', ('mozilla-inbound', ))

# Fill in the datapoints table
cur.execute('SELECT DISTINCT name AS datapoint '
'FROM old.benchtester_datapoints d ')
Expand Down Expand Up @@ -137,8 +140,8 @@ def splitunits(dp):
print("[%.02fs] Inserted %d datapoints" % ((time.time() - starttime), len(datapoints)))

# Copy the builds table
cur.execute('INSERT INTO benchtester_builds(id, name, time) '
'SELECT id, name, time from old.benchtester_builds ')
cur.execute('INSERT INTO benchtester_builds(id, name, time, repo_id) '
'SELECT id, name, time, 1 from old.benchtester_builds')

print("[%.02fs] Copied benchtester_builds" % (time.time() - starttime))

Expand Down

0 comments on commit a6d184f

Please sign in to comment.