diff --git a/webapp/graphite/readers/utils.py b/webapp/graphite/readers/utils.py index c8498031b..0462dcc37 100644 --- a/webapp/graphite/readers/utils.py +++ b/webapp/graphite/readers/utils.py @@ -5,6 +5,8 @@ from django.conf import settings +from graphite.logger import log + class BaseReader(object): __metaclass__ = abc.ABCMeta diff --git a/webapp/graphite/render/datalib.py b/webapp/graphite/render/datalib.py index d96c5c788..98a4bc14c 100755 --- a/webapp/graphite/render/datalib.py +++ b/webapp/graphite/render/datalib.py @@ -15,6 +15,7 @@ import collections import re +import time from traceback import format_exc @@ -151,22 +152,19 @@ def copy(self, name=None, start=None, end=None, step=None, values=None, consolid # Data retrieval API @logtime(custom_msg=True) def fetchData(requestContext, pathExpr, msg_setter=None): - msg_setter("retrieval of \"%s\" took" % str(pathExpr)) + msg_setter("lookup and merge of \"%s\" took" % str(pathExpr)) seriesList = {} (startTime, endTime, now) = timebounds(requestContext) - result_queue = [] + prefetched = requestContext.get('prefetched', {}).get((startTime, endTime, now), {}).get(pathExpr) + if not prefetched: + return [] - prefetched = requestContext.get('prefetched', {}).get((startTime, endTime, now), None) - if prefetched is not None: - for result in prefetched[pathExpr]: - result_queue.append(result) + return _merge_results(pathExpr, startTime, endTime, prefetched, seriesList, requestContext) - return _merge_results(pathExpr, startTime, endTime, result_queue, seriesList, requestContext) - -def _merge_results(pathExpr, startTime, endTime, result_queue, seriesList, requestContext): +def _merge_results(pathExpr, startTime, endTime, prefetched, seriesList, requestContext): log.debug("render.datalib.fetchData :: starting to merge") # Used as a cache to avoid recounting series None values below. @@ -174,7 +172,7 @@ def _merge_results(pathExpr, startTime, endTime, result_queue, seriesList, reque errors = [] - for path, results in result_queue: + for path, results in prefetched: if not results: log.debug("render.datalib.fetchData :: no results for %s.fetch(%s, %s)" % (path, startTime, endTime)) continue @@ -269,7 +267,8 @@ def prefetchData(requestContext, targets): if not pathExpressions: return - log.rendering("Fetching data for [%s]" % (', '.join(pathExpressions))) + start = time.time() + log.debug("Fetching data for [%s]" % (', '.join(pathExpressions))) (startTime, endTime, now) = timebounds(requestContext) @@ -305,3 +304,5 @@ def prefetchData(requestContext, targets): requestContext['prefetched'] = {} requestContext['prefetched'][(startTime, endTime, now)] = prefetched + + log.rendering("Fetched data for [%s] in %fs" % (', '.join(pathExpressions), time.time() - start)) diff --git a/webapp/graphite/util.py b/webapp/graphite/util.py index 34ff8286c..b06e2def8 100644 --- a/webapp/graphite/util.py +++ b/webapp/graphite/util.py @@ -228,34 +228,33 @@ def write_index(whisper_dir=None, ceres_dir=None, index=None): def logtime(custom_msg=False, custom_name=False): def wrap(f): def wrapped_f(*args, **kwargs): - msg = 'completed in' - name = f.__module__ + '.' + f.__name__ + logmsg = { + 'msg': 'completed in', + 'name': f.__module__ + '.' + f.__name__, + } t = time.time() if custom_msg: def set_msg(msg): - wrapped_f.msg = msg + logmsg['msg'] = msg kwargs['msg_setter'] = set_msg if custom_name: def set_name(name): - wrapped_f.name = name + logmsg['name'] = name kwargs['name_setter'] = set_name try: res = f(*args, **kwargs) except: - msg = 'failed in' + logmsg['msg'] = 'failed in' raise finally: - msg = getattr(wrapped_f, 'msg', msg) - name = getattr(wrapped_f, 'name', name) - log.info( '{name} :: {msg} {sec:.6}s'.format( - name=name, - msg=msg, + name=logmsg['name'], + msg=logmsg['msg'], sec=time.time() - t, ) )