Skip to content
This repository has been archived by the owner on Aug 20, 2018. It is now read-only.

Commit

Permalink
removed legacy cache code
Browse files Browse the repository at this point in the history
  • Loading branch information
jeads committed Dec 6, 2012
1 parent 1411bfd commit beb04cc
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 219 deletions.
34 changes: 2 additions & 32 deletions datazilla/webapp/apps/dataviews/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,43 +162,13 @@ def _get_test_run_summary(project, method, request, dm):

json_data = '{}'

if product_ids and (not test_ids) and (not platform_ids):

if len(product_ids) > 1:
extend_list = { 'data':[], 'columns':[] }
for id in product_ids:
key = utils.get_summary_cache_key(project, str(id), time_key)

compressed_json_data = cache.get(key)

if compressed_json_data:
json_data = zlib.decompress( compressed_json_data )
data = json.loads( json_data )
extend_list['data'].extend( data['data'] )
extend_list['columns'] = data['columns']

json_data = json.dumps(extend_list)


else:
key = utils.get_summary_cache_key(
project,
str(product_ids[0]),
time_key,
)
compressed_json_data = cache.get(key)

if compressed_json_data:
json_data = zlib.decompress( compressed_json_data )

else:
table = dm.get_test_run_summary(time_ranges[time_key]['start'],
table = dm.get_test_run_summary(time_ranges[time_key]['start'],
time_ranges[time_key]['stop'],
product_ids,
platform_ids,
test_ids)

json_data = json.dumps( table )
json_data = json.dumps( table )

return json_data

Expand Down
187 changes: 0 additions & 187 deletions datazilla/webapp/apps/datazilla/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,190 +135,3 @@ def set_test_data(request, project=""):

return HttpResponse(json.dumps(result), mimetype=APP_JS, status=status)


def dataview(request, project="", method=""):

proc_path = "perftest.views."

##Full proc name including base path in json file##
full_proc_path = "%s%s" % (proc_path, method)

if settings.DEBUG:
###
#Write IP address and datetime to log
###
print "Client IP:%s" % (request.META['REMOTE_ADDR'])
print "Request Datetime:%s" % (str(datetime.datetime.now()))

json = ""
if method in DATAVIEW_ADAPTERS:
dm = PerformanceTestModel(project)
pt_dhub = dm.sources["perftest"].dhub

if 'adapter' in DATAVIEW_ADAPTERS[method]:
json = DATAVIEW_ADAPTERS[method]['adapter'](project,
method,
request,
dm)
else:
if 'fields' in DATAVIEW_ADAPTERS[method]:
fields = []
for f in DATAVIEW_ADAPTERS[method]['fields']:
if f in request.GET:
fields.append( int( request.GET[f] ) )

if len(fields) == len(DATAVIEW_ADAPTERS[method]['fields']):
json = pt_dhub.execute(proc=full_proc_path,
debug_show=settings.DEBUG,
placeholders=fields,
return_type='table_json')

else:
json = '{ "error":"%s fields required, %s provided" }' % (str(len(DATAVIEW_ADAPTERS[method]['fields'])),
str(len(fields)))

else:

json = pt_dhub.execute(proc=full_proc_path,
debug_show=settings.DEBUG,
return_type='table_json')

dm.disconnect();

else:
json = '{ "error":"Data view name %s not recognized" }' % method

return HttpResponse(json, mimetype=APP_JS)


def _get_test_reference_data(project, method, request, dm):

ref_data = dm.get_test_reference_data()

json_data = json.dumps( ref_data )

return json_data


def _get_test_run_summary(project, method, request, dm):

product_ids = []
test_ids = []
platform_ids = []

#####
#Calling get_id_list() insures that we have only numbers in the
#lists, this gaurds against SQL injection
#####
if 'product_ids' in request.GET:
product_ids = utils.get_id_list(request.GET['product_ids'])
if 'test_ids' in request.GET:
test_ids = utils.get_id_list(request.GET['test_ids'])
if 'platform_ids' in request.GET:
platform_ids = utils.get_id_list(request.GET['platform_ids'])

time_key = 'days_30'
time_ranges = utils.get_time_ranges()
if 'tkey' in request.GET:
time_key = request.GET['tkey']

if not product_ids:

##Default to id 1
product_ids = [1]

##Set default product_id
pck = dm.get_project_cache_key('default_product')
default_products = cache.get(pck)
default_products = dm.get_default_products()

##If we have one use it
if default_products:
product_ids = map( int, default_products.split(',') )

json_data = '{}'

table = dm.get_test_run_summary(time_ranges[time_key]['start'],
time_ranges[time_key]['stop'],
product_ids,
platform_ids,
test_ids)

json_data = json.dumps( table )

return json_data


def _get_test_values(project, method, request, dm):

data = {};

if 'test_run_id' in request.GET:
data = dm.get_test_run_values( request.GET['test_run_id'] )

json_data = json.dumps( data )

return json_data


def _get_page_values(project, method, request, dm):

data = {};

if ('test_run_id' in request.GET) and ('page_id' in request.GET):
data = dm.get_page_values( request.GET['test_run_id'], request.GET['page_id'] )

json_data = json.dumps( data )

return json_data


def _get_test_value_summary(project, method, request, dm):

data = {};

if 'test_run_id' in request.GET:
data = dm.get_test_run_value_summary( request.GET['test_run_id'] )

json_data = json.dumps( data )

return json_data


#####
#UTILITY METHODS
#####
DATAVIEW_ADAPTERS = { ##Flat tables SQL##
'test_run':{},
'test_value':{ 'fields':[ 'test_run_id', ] },
'test_option_values':{ 'fields':[ 'test_run_id', ] },
'test_aux_data':{ 'fields':[ 'test_run_id', ] },

##API only##
'get_test_ref_data':{ 'adapter':_get_test_reference_data},

##Visualization Tools##
'test_runs':{ 'adapter':_get_test_run_summary,
'fields':['test_run_id',
'test_run_data']
},

'test_chart':{ 'adapter':_get_test_run_summary,
'fields':['test_run_id',
'test_run_data'] },

'test_values':{ 'adapter':_get_test_values,
'fields':['test_run_id'] },

'page_values':{ 'adapter':_get_page_values,
'fields':['test_run_id',
'page_id'] },

'test_value_summary':{ 'adapter':_get_test_value_summary,
'fields':['test_run_id'] } }

SIGNALS = set()
for dv in DATAVIEW_ADAPTERS:
if 'fields' in DATAVIEW_ADAPTERS[dv]:
for field in DATAVIEW_ADAPTERS[dv]['fields']:
SIGNALS.add(field)

0 comments on commit beb04cc

Please sign in to comment.