Permalink
Browse files

merged with master

  • Loading branch information...
2 parents f0a0aa9 + 1fca824 commit 88298abf3fa52c78bee2caa2b57f752fda8ff751 @jeads jeads committed Oct 22, 2012
View
@@ -2,17 +2,20 @@
PYTHON_ROOT=/usr/bin/
DATAZILLA_HOME=/usr/local/datazilla
-*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_summary_cache --build --cache --cron_batch small
-*/10 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_summary_cache --build --cache --cron_batch medium
-0,30 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_summary_cache --build --cache --cron_batch large
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_summary_cache --build --cache --project stoneridge
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_summary_cache --build --cache --project b2g
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_summary_cache --build --cache --project jetperf
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_summary_cache --build --cache --project test
-*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_test_collections --load --cron_batch small
-*/10 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_test_collections --load --cron_batch medium
-0,30 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_test_collections --load --cron_batch large
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_test_collections --load --project stoneridge
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_test_collections --load --project b2g
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_test_collections --load --project jetperf
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py populate_test_collections --load --project test
-*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py set_default_product --cron_batch small
-*/10 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py set_default_product --cron_batch medium
-0,30 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py set_default_product --cron_batch large
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py set_default_product --project stoneridge
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py set_default_product --project b2g
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py set_default_product --project jetperf
+*/2 * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py set_default_product --project test
# run twice every minute
* * * * * $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py process_objects --cron_batch small --loadlimit 25 && $PYTHON_ROOT/python $DATAZILLA_HOME/manage.py process_objects --cron_batch small --loadlimit 25
@@ -24,13 +24,17 @@ def set_default_products(project):
current_version = versions[0]
- default_products = []
+ default_count = 0
for id in products:
default = 0
if current_version == products[id]['version']:
- default = 1
+ default_count += 1
+ #Don't load more than 10 datasets by default
+ if default_count <= 10:
+ default = 1
+
+ ptm.set_default_product(id, default)
- ptm.set_default_product(id, default)
ptm.cache_default_project()
@@ -62,7 +62,6 @@ def build_test_summaries(project):
[])
json_data = json.dumps( table )
-
ptm.set_summary_cache( products[ product_name ], tr, json_data )
ptm.disconnect()
View
@@ -996,13 +996,19 @@ def set_summary_cache(self, item_id, item_data, value):
now_datetime
]
+ # NOTE: Disabling warnings here. A warning is generated in the
+ # production environment that is specific to the master/slave
+ # configuration.
+ filterwarnings('ignore', category=MySQLdb.Warning)
+
self.sources["perftest"].dhub.execute(
proc='perftest.inserts.set_summary_cache',
debug_show=self.DEBUG,
placeholders=placeholders,
executemany=False,
)
+ resetwarnings()
def set_test_collection(self, name, description):
@@ -1135,8 +1141,10 @@ def claim_objects(self, limit):
# unsafe because the set of rows included cannot be predicted.
#
# I have been unable to generate the warning in the development
- # environment to date. In the production environment the generation
- # of this warning is causing the program to exit.
+ # environment because the warning is specific to the master/slave
+ # replication environment which only exists in production.In the
+ # production environment the generation of this warning is causing
+ # the program to exit.
#
# The mark_loading SQL statement does execute an UPDATE/LIMIT but now
# implements an "ORDER BY id" clause making the UPDATE
@@ -618,14 +618,14 @@
FROM `summary_cache`
WHERE `item_id` = ? AND `item_data` = ?",
- "host":"read_host"
+ "host":"master_host"
},
"get_all_summary_cache_data":{
"sql":"SELECT `item_id`, `item_data`, `value`, `date`
FROM `summary_cache`",
- "host":"read_host"
+ "host":"master_host"
},
"get_test_collections":{
@@ -134,3 +134,226 @@ def set_test_data(request, project=""):
status = 200
return HttpResponse(json.dumps(result), mimetype=APP_JS, status=status)
+
+
+def dataview(request, project="", method=""):
+
+ proc_path = "perftest.views."
+
+ ##Full proc name including base path in json file##
+ full_proc_path = "%s%s" % (proc_path, method)
+
+ if settings.DEBUG:
+ ###
+ #Write IP address and datetime to log
+ ###
+ print "Client IP:%s" % (request.META['REMOTE_ADDR'])
+ print "Request Datetime:%s" % (str(datetime.datetime.now()))
+
+ json = ""
+ if method in DATAVIEW_ADAPTERS:
+ dm = PerformanceTestModel(project)
+ pt_dhub = dm.sources["perftest"].dhub
+
+ if 'adapter' in DATAVIEW_ADAPTERS[method]:
+ json = DATAVIEW_ADAPTERS[method]['adapter'](project,
+ method,
+ request,
+ dm)
+ else:
+ if 'fields' in DATAVIEW_ADAPTERS[method]:
+ fields = []
+ for f in DATAVIEW_ADAPTERS[method]['fields']:
+ if f in request.GET:
+ fields.append( int( request.GET[f] ) )
+
+ if len(fields) == len(DATAVIEW_ADAPTERS[method]['fields']):
+ json = pt_dhub.execute(proc=full_proc_path,
+ debug_show=settings.DEBUG,
+ placeholders=fields,
+ return_type='table_json')
+
+ else:
+ json = '{ "error":"%s fields required, %s provided" }' % (str(len(DATAVIEW_ADAPTERS[method]['fields'])),
+ str(len(fields)))
+
+ else:
+
+ json = pt_dhub.execute(proc=full_proc_path,
+ debug_show=settings.DEBUG,
+ return_type='table_json')
+
+ dm.disconnect();
+
+ else:
+ json = '{ "error":"Data view name %s not recognized" }' % method
+
+ return HttpResponse(json, mimetype=APP_JS)
+
+
+def _get_test_reference_data(project, method, request, dm):
+
+ ref_data = dm.get_test_reference_data()
+
+ json_data = json.dumps( ref_data )
+
+ return json_data
+
+
+def _get_test_run_summary(project, method, request, dm):
+
+ product_ids = []
+ test_ids = []
+ platform_ids = []
+
+ #####
+ #Calling get_id_list() insures that we have only numbers in the
+ #lists, this gaurds against SQL injection
+ #####
+ if 'product_ids' in request.GET:
+ product_ids = utils.get_id_list(request.GET['product_ids'])
+ if 'test_ids' in request.GET:
+ test_ids = utils.get_id_list(request.GET['test_ids'])
+ if 'platform_ids' in request.GET:
+ platform_ids = utils.get_id_list(request.GET['platform_ids'])
+
+ time_key = 'days_30'
+ time_ranges = utils.get_time_ranges()
+ if 'tkey' in request.GET:
+ time_key = request.GET['tkey']
+
+ if not product_ids:
+
+ ##Default to id 1
+ product_ids = [1]
+
+ ##Set default product_id
+ pck = dm.get_project_cache_key('default_product')
+ default_products = cache.get(pck)
+ default_products = dm.get_default_products()
+
+ ##If we have one use it
+ if default_products:
+ product_ids = map( int, default_products.split(',') )
+
+ json_data = '{}'
+
+ #Commenting the use of memcache out for now, the shared memcache
+ #in production is failing to return the expected data. This works
+ #in development so it's likely a configuration issue of some sort.
+ """
+ if product_ids and (not test_ids) and (not platform_ids):
+
+ if len(product_ids) > 1:
+ extend_list = { 'data':[], 'columns':[] }
+ for id in product_ids:
+ key = utils.get_summary_cache_key(project, str(id), time_key)
+
+ compressed_json_data = cache.get(key)
+
+ if compressed_json_data:
+ json_data = zlib.decompress( compressed_json_data )
+ data = json.loads( json_data )
+ extend_list['data'].extend( data['data'] )
+ extend_list['columns'] = data['columns']
+
+ json_data = json.dumps(extend_list)
+
+
+ else:
+ key = utils.get_summary_cache_key(
+ project,
+ str(product_ids[0]),
+ time_key,
+ )
+ compressed_json_data = cache.get(key)
+
+ if compressed_json_data:
+ json_data = zlib.decompress( compressed_json_data )
+
+ else:
+ """
+ table = dm.get_test_run_summary(time_ranges[time_key]['start'],
+ time_ranges[time_key]['stop'],
+ product_ids,
+ platform_ids,
+ test_ids)
+
+ json_data = json.dumps( table )
+
+ return json_data
+
+
+def _get_test_values(project, method, request, dm):
+
+ data = {};
+
+ if 'test_run_id' in request.GET:
+ data = dm.get_test_run_values( request.GET['test_run_id'] )
+
+ json_data = json.dumps( data )
+
+ return json_data
+
+
+def _get_page_values(project, method, request, dm):
+
+ data = {};
+
+ if ('test_run_id' in request.GET) and ('page_id' in request.GET):
+ data = dm.get_page_values( request.GET['test_run_id'], request.GET['page_id'] )
+
+ json_data = json.dumps( data )
+
+ return json_data
+
+
+def _get_test_value_summary(project, method, request, dm):
+
+ data = {};
+
+ if 'test_run_id' in request.GET:
+ data = dm.get_test_run_value_summary( request.GET['test_run_id'] )
+
+ json_data = json.dumps( data )
+
+ return json_data
+
+
+#####
+#UTILITY METHODS
+#####
+DATAVIEW_ADAPTERS = { ##Flat tables SQL##
+ 'test_run':{},
+ 'test_value':{ 'fields':[ 'test_run_id', ] },
+ 'test_option_values':{ 'fields':[ 'test_run_id', ] },
+ 'test_aux_data':{ 'fields':[ 'test_run_id', ] },
+
+ ##API only##
+ 'get_test_ref_data':{ 'adapter':_get_test_reference_data},
+
+ ##Visualization Tools##
+ 'test_runs':{ 'adapter':_get_test_run_summary,
+ 'fields':['test_run_id',
+ 'test_run_data']
+ },
+
+ 'test_chart':{ 'adapter':_get_test_run_summary,
+ 'fields':['test_run_id',
+ 'test_run_data'] },
+
+ 'test_values':{ 'adapter':_get_test_values,
+ 'fields':['test_run_id'] },
+
+ 'page_values':{ 'adapter':_get_page_values,
+ 'fields':['test_run_id',
+ 'page_id'] },
+
+ 'test_value_summary':{ 'adapter':_get_test_value_summary,
+ 'fields':['test_run_id'] } }
+
+SIGNALS = set()
+for dv in DATAVIEW_ADAPTERS:
+ if 'fields' in DATAVIEW_ADAPTERS[dv]:
+ for field in DATAVIEW_ADAPTERS[dv]['fields']:
+ SIGNALS.add(field)

0 comments on commit 88298ab

Please sign in to comment.