Skip to content

Commit

Permalink
fix issue in logtime, more streamlining
Browse files Browse the repository at this point in the history
  • Loading branch information
DanCech committed Nov 3, 2017
1 parent 36d196d commit f68247e
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 21 deletions.
2 changes: 2 additions & 0 deletions webapp/graphite/readers/utils.py
Expand Up @@ -5,6 +5,8 @@

from django.conf import settings

from graphite.logger import log


class BaseReader(object):
__metaclass__ = abc.ABCMeta
Expand Down
23 changes: 12 additions & 11 deletions webapp/graphite/render/datalib.py
Expand Up @@ -15,6 +15,7 @@

import collections
import re
import time

from traceback import format_exc

Expand Down Expand Up @@ -151,30 +152,27 @@ def copy(self, name=None, start=None, end=None, step=None, values=None, consolid
# Data retrieval API
@logtime(custom_msg=True)
def fetchData(requestContext, pathExpr, msg_setter=None):
msg_setter("retrieval of \"%s\" took" % str(pathExpr))
msg_setter("lookup and merge of \"%s\" took" % str(pathExpr))

seriesList = {}
(startTime, endTime, now) = timebounds(requestContext)

result_queue = []
prefetched = requestContext.get('prefetched', {}).get((startTime, endTime, now), {}).get(pathExpr)
if not prefetched:
return []

prefetched = requestContext.get('prefetched', {}).get((startTime, endTime, now), None)
if prefetched is not None:
for result in prefetched[pathExpr]:
result_queue.append(result)
return _merge_results(pathExpr, startTime, endTime, prefetched, seriesList, requestContext)

return _merge_results(pathExpr, startTime, endTime, result_queue, seriesList, requestContext)


def _merge_results(pathExpr, startTime, endTime, result_queue, seriesList, requestContext):
def _merge_results(pathExpr, startTime, endTime, prefetched, seriesList, requestContext):
log.debug("render.datalib.fetchData :: starting to merge")

# Used as a cache to avoid recounting series None values below.
series_best_nones = {}

errors = []

for path, results in result_queue:
for path, results in prefetched:
if not results:
log.debug("render.datalib.fetchData :: no results for %s.fetch(%s, %s)" % (path, startTime, endTime))
continue
Expand Down Expand Up @@ -269,7 +267,8 @@ def prefetchData(requestContext, targets):
if not pathExpressions:
return

log.rendering("Fetching data for [%s]" % (', '.join(pathExpressions)))
start = time.time()
log.debug("Fetching data for [%s]" % (', '.join(pathExpressions)))

(startTime, endTime, now) = timebounds(requestContext)

Expand Down Expand Up @@ -305,3 +304,5 @@ def prefetchData(requestContext, targets):
requestContext['prefetched'] = {}

requestContext['prefetched'][(startTime, endTime, now)] = prefetched

log.rendering("Fetched data for [%s] in %fs" % (', '.join(pathExpressions), time.time() - start))
19 changes: 9 additions & 10 deletions webapp/graphite/util.py
Expand Up @@ -228,34 +228,33 @@ def write_index(whisper_dir=None, ceres_dir=None, index=None):
def logtime(custom_msg=False, custom_name=False):
def wrap(f):
def wrapped_f(*args, **kwargs):
msg = 'completed in'
name = f.__module__ + '.' + f.__name__
logmsg = {
'msg': 'completed in',
'name': f.__module__ + '.' + f.__name__,
}

t = time.time()
if custom_msg:
def set_msg(msg):
wrapped_f.msg = msg
logmsg['msg'] = msg

kwargs['msg_setter'] = set_msg
if custom_name:
def set_name(name):
wrapped_f.name = name
logmsg['name'] = name

kwargs['name_setter'] = set_name

try:
res = f(*args, **kwargs)
except:
msg = 'failed in'
logmsg['msg'] = 'failed in'
raise
finally:
msg = getattr(wrapped_f, 'msg', msg)
name = getattr(wrapped_f, 'name', name)

log.info(
'{name} :: {msg} {sec:.6}s'.format(
name=name,
msg=msg,
name=logmsg['name'],
msg=logmsg['msg'],
sec=time.time() - t,
)
)
Expand Down

0 comments on commit f68247e

Please sign in to comment.