Permalink
Browse files

made changes

  • Loading branch information...
1 parent 62ae872 commit 6a7259146dd6ac07a40d5a9eb8bc0748ed059793 @jeads jeads committed Nov 29, 2012
@@ -117,56 +117,54 @@ def get_metrics_pushlog(
ptm = factory.get_ptm(project)
mtm = factory.get_mtm(project)
- pushlog = {}
- if days_ago > 0:
- pushlog = plm.get_branch_pushlog(None, days_ago, numdays, branch)
- else:
- pushlog = plm.get_branch_pushlog_by_revision(
- revision, branch, pushes_before, pushes_after
+ aggregate_pushlog, changeset_lookup = plm.get_branch_pushlog_by_revision(
+ revision, branch, pushes_before, pushes_after
)
- aggregate_pushlog = []
pushlog_id_index_map = {}
all_revisions = []
- for node in pushlog:
- if node['pushlog_id'] not in pushlog_id_index_map:
- node_struct = {
- 'revisions':[],
- 'dz_revision':"",
- 'branch_name':node['name'],
- 'date':node['date'],
- 'push_id':node['push_id'],
- 'pushlog_id':node['pushlog_id'],
- 'metrics_data':[],
- }
+ for index, node in enumerate(aggregate_pushlog):
- aggregate_pushlog.append(node_struct)
- index = len(aggregate_pushlog) - 1
+ pushlog_id_index_map[node['pushlog_id']] = index
- pushlog_id_index_map[node['pushlog_id']] = index
+ aggregate_pushlog[index]['metrics_data'] = []
+ aggregate_pushlog[index]['dz_revision'] = ""
+ aggregate_pushlog[index]['branch_name'] = branch
- pushlog_index = pushlog_id_index_map[ node['pushlog_id'] ]
- revision = mtm.truncate_revision(node['node'])
- aggregate_pushlog[pushlog_index]['revisions'].append(revision)
+ changesets = changeset_lookup[ node['pushlog_id'] ]
+
+ #truncate the revision strings and collect them
+ for cset_index, revision_data in enumerate(changesets['revisions']):
+
+ full_revision = revision_data['revision']
+
+ revision = mtm.truncate_revision(full_revision)
+ changesets['revisions'][cset_index]['revision'] = revision
+
+ all_revisions.append(revision)
+
+ aggregate_pushlog[index]['revisions'] = changesets['revisions']
- all_revisions.append(revision)
pushlog_id_list = pushlog_id_index_map.keys()
# get the testrun ids from perftest
filtered_test_run_ids = ptm.get_test_run_ids(
- branch, all_revisions, os_name, os_version, branch_version, processor,
- build_type, test_name
+ branch, all_revisions, os_name, os_version, branch_version,
+ processor, build_type, test_name
)
+ # get the test run ids associated with the pushlog ids
pushlog_test_run_ids = mtm.get_test_run_ids_from_pushlog_ids(
pushlog_ids=pushlog_id_list
)
+ # get intersection
test_run_ids = list( set(filtered_test_run_ids).intersection(
set(pushlog_test_run_ids)) )
+ # get the metrics data for the intersection
metrics_data = mtm.get_metrics_data_from_test_run_ids(
test_run_ids, page_name
)
@@ -246,6 +246,7 @@ def get_branch_pushlog_by_revision(
node = push_data[0]['node']
push_id = push_data[0]['push_id']
+ branch_id = push_data[0]['branch_id']
pushes_before_proc = 'hgmozilla.selects.get_push_ids_before_node'
pushes_after_proc = 'hgmozilla.selects.get_push_ids_after_node'
@@ -254,17 +255,60 @@ def get_branch_pushlog_by_revision(
proc=pushes_before_proc,
debug_show=self.DEBUG,
return_type='tuple',
- placeholders=[push_id, branch_name, pushes_before]
+ placeholders=[push_id, branch_id, pushes_before]
)
pushes_after_data = self.hg_ds.dhub.execute(
proc=pushes_after_proc,
debug_show=self.DEBUG,
return_type='tuple',
- placeholders=[push_id, branch_name, pushes_after]
+ placeholders=[push_id, branch_id, pushes_after]
)
- return pushes_before_data + push_data + pushes_after_data
+ #Combine all of the requested push data
+ pushlog = pushes_before_data + push_data + pushes_after_data
+
+ #Retrieve a complete list of all of the pushlog ids
+ pushlog_ids = []
+
+ map(
+ lambda n: pushlog_ids.append(n['pushlog_id']),
+ pushlog
+ )
+
+ #Use a separate query to retrieve associated revisions so
+ #we can control the number of pushes by using a LIMIT clause
+ changeset_data_proc = 'hgmozilla.selects.get_changeset_data_for_pushes'
+
+ #Build the sql WHERE IN clause
+ where_in_clause = ','.join( map( lambda v:'%s', pushlog_ids ) )
+
+ changeset_data = self.hg_ds.dhub.execute(
+ proc=changeset_data_proc,
+ debug_show=self.DEBUG,
+ return_type='tuple',
+ placeholders=pushlog_ids,
+ replace=[where_in_clause]
+ )
+
+ #Aggregate changesets
+ changeset_lookup = {}
+ for changeset in changeset_data:
+ if changeset['pushlog_id'] not in changeset_lookup:
+ changeset_struct = {
+ 'revisions':[],
+ 'pushlog_id':changeset['pushlog_id']
+ }
+
+ changeset_lookup[ changeset['pushlog_id'] ] = changeset_struct
+
+ changeset_lookup[ changeset['pushlog_id'] ]['revisions'].append(
+ { 'revision':changeset['node'],
+ 'desc':changeset['desc'],
+ 'author':changeset['author'] }
+ )
+
+ return pushlog, changeset_lookup
def get_params(self, numdays, enddate=None):
"""
@@ -288,6 +332,7 @@ def get_params(self, numdays, enddate=None):
"full": 1,
"startdate": _startdate.strftime("%m/%d/%Y"),
}
+
# enddate is optional. the endpoint will just presume today,
# if not given.
if enddate:
@@ -65,6 +65,7 @@
"sql":"SELECT p.id AS 'pushlog_id',
p.push_id,
p.date,
+ p.user,
c.node,
b.id AS 'branch_id',
b.name
@@ -81,13 +82,9 @@
"sql":"SELECT p.id AS 'pushlog_id',
p.push_id,
p.date,
- c.node,
- b.id AS 'branch_id',
- b.name
+ p.user
FROM pushlogs AS p
- LEFT JOIN changesets AS c ON p.id = c.pushlog_id
- LEFT JOIN branches AS b ON p.branch_id = b.id
- WHERE p.push_id < ? AND b.name = ?
+ WHERE p.push_id < ? AND p.branch_id = ?
LIMIT ?",
"host":"read_host"
@@ -98,18 +95,26 @@
"sql":"SELECT p.id AS 'pushlog_id',
p.push_id,
p.date,
- c.node,
- b.id AS 'branch_id',
- b.name
+ p.user
FROM pushlogs AS p
- LEFT JOIN changesets AS c ON p.id = c.pushlog_id
- LEFT JOIN branches AS b ON p.branch_id = b.id
- WHERE p.push_id > ? AND b.name = ?
+ WHERE p.push_id > ? AND p.branch_id = ?
LIMIT ?",
"host":"read_host"
},
+ "get_changeset_data_for_pushes":{
+
+ "sql":"SELECT c.id,
+ c.pushlog_id,
+ c.node,
+ c.author,
+ c.desc
+ FROM changesets AS c
+ WHERE c.pushlog_id IN (REP0)",
+ "host":"read_host"
+
+ },
"get_all_branch_pushlogs":{
"sql":"SELECT p.id AS 'pushlog_id',
p.push_id,
@@ -233,6 +233,13 @@ div.su-data-series-panel {
padding: 0.5em;
overflow: auto;
}
+div.su-datum-info-panel {
+ margin-top: 5px;
+ margin-left: 5px;
+ padding: 0.5em;
+ float:right;
+ width:180px;
+}
.ui-button-text{
font-size: 11px !important;
}
@@ -166,6 +166,11 @@ var TestPagesView = new Class({
if( $(event.target).is('input') ){
+ //Retrieve the associated mean
+ var row = $(event.target).closest('tr');
+ var cells = $(row).find('td');
+ var meanValue = $(cells[3]).text();
+
var checked = $(event.target).attr('checked');
var pagename = $(event.target).attr(this.pagenameDataAttr);
var testSuite = $(this.testSuiteSel).text();
@@ -176,7 +181,8 @@ var TestPagesView = new Class({
'pagename':pagename,
'testsuite':testSuite,
'platform':platform,
- 'platform_info':this.platformInfo
+ 'platform_info':this.platformInfo,
+ 'mean':meanValue
};
$(this.eventContainerSel).trigger(
Oops, something went wrong.

0 comments on commit 6a72591

Please sign in to comment.