Skip to content


Subversion checkout URL

You can clone with
Download ZIP
Fetching contributors…
Cannot retrieve contributors at this time
502 lines (383 sloc) 18.1 KB
import datetime
import time
import logging
import os
import pickle
import re
import simplejson
import StringIO
from types import GeneratorType
import zlib
from google.appengine.ext.webapp import template, RequestHandler
from google.appengine.api import memcache
import unformatter
from pprint import pformat
import cleanup
import cookies
import unformatter
import json
except ImportError:
import simplejson as json
from gae_mini_profiler.config import _config
if os.environ["SERVER_SOFTWARE"].startswith("Devel"):
config = _config.ConfigDevelopment
config = _config.ConfigProduction
# request_id is a per-request identifier accessed by a couple other pieces of gae_mini_profiler
request_id = None
class SharedStatsHandler(RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), "templates/shared.html")
request_id = self.request.get("request_id")
if not RequestStats.get(request_id):
self.response.out.write("Profiler stats no longer exist for this request.")
template.render(path, {
"request_id": request_id
class RequestStatsHandler(RequestHandler):
def get(self):
self.response.headers["Content-Type"] = "application/json"
list_request_ids = []
request_ids = self.request.get("request_ids")
if request_ids:
list_request_ids = request_ids.split(",")
list_request_stats = []
for request_id in list_request_ids:
request_stats = RequestStats.get(request_id)
if request_stats and not request_stats.disabled:
dict_request_stats = {}
for property in RequestStats.serialized_properties:
dict_request_stats[property] = request_stats.__getattribute__(property)
# Don't show temporary redirect profiles more than once automatically, as they are
# tied to URL params and may be copied around easily.
if request_stats.temporary_redirect:
request_stats.disabled = True
class RequestStats(object):
serialized_properties = ["request_id", "url", "url_short", "s_dt",
"profiler_results", "appstats_results", "simple_timing",
"temporary_redirect", "logs"]
def __init__(self, request_id, environ, middleware):
self.request_id = request_id
self.url = environ.get("PATH_INFO")
if environ.get("QUERY_STRING"):
self.url += "?%s" % environ.get("QUERY_STRING")
self.url_short = self.url
if len(self.url_short) > 26:
self.url_short = self.url_short[:26] + "..."
self.simple_timing = middleware.simple_timing
self.s_dt ="%Y-%m-%d %H:%M:%S")
self.profiler_results = RequestStats.calc_profiler_results(middleware)
self.appstats_results = RequestStats.calc_appstats_results(middleware)
self.logs = middleware.logs
self.temporary_redirect = middleware.temporary_redirect
self.disabled = False
def store(self):
# Store compressed results so we stay under the memcache 1MB limit
pickled = pickle.dumps(self)
compressed_pickled = zlib.compress(pickled)
return memcache.set(RequestStats.memcache_key(self.request_id), compressed_pickled)
def get(request_id):
if request_id:
compressed_pickled = memcache.get(RequestStats.memcache_key(request_id))
if compressed_pickled:
pickled = zlib.decompress(compressed_pickled)
return pickle.loads(pickled)
return None
def memcache_key(request_id):
if not request_id:
return None
return "__gae_mini_profiler_request_%s" % request_id
def seconds_fmt(f):
return RequestStats.milliseconds_fmt(f * 1000)
def milliseconds_fmt(f):
return ("%.5f" % f).rstrip("0").rstrip(".")
def short_method_fmt(s):
return s[s.rfind("/") + 1:]
def short_rpc_file_fmt(s):
if not s:
return ""
return s[s.find("/"):]
def calc_profiler_results(middleware):
if middleware.simple_timing:
return {
"total_time": RequestStats.seconds_fmt(middleware.end - middleware.start),
import pstats
# Make sure nothing is printed to stdout
output = StringIO.StringIO()
stats = pstats.Stats(, stream=output)
results = {
"total_call_count": stats.total_calls,
"total_time": RequestStats.seconds_fmt(stats.total_tt),
"calls": []
width, list_func_names = stats.get_print_list([80])
for func_name in list_func_names:
primitive_call_count, total_call_count, total_time, cumulative_time, callers = stats.stats[func_name]
func_desc = pstats.func_std_string(func_name)
callers_names = map(lambda func_name: pstats.func_std_string(func_name), callers.keys())
callers_desc = map(
lambda name: {"func_desc": name, "func_desc_short": RequestStats.short_method_fmt(name)},
"primitive_call_count": primitive_call_count,
"total_call_count": total_call_count,
"total_time": RequestStats.seconds_fmt(total_time),
"per_call": RequestStats.seconds_fmt(total_time / total_call_count) if total_call_count else "",
"cumulative_time": RequestStats.seconds_fmt(cumulative_time),
"per_call_cumulative": RequestStats.seconds_fmt(cumulative_time / primitive_call_count) if primitive_call_count else "",
"func_desc": func_desc,
"func_desc_short": RequestStats.short_method_fmt(func_desc),
"callers_desc": callers_desc,
return results
def calc_appstats_results(middleware):
if middleware.recorder:
total_call_count = 0
total_time = 0
calls = []
service_totals_dict = {}
likely_dupes = False
end_offset_last = 0
requests_set = set()
appstats_key = long(middleware.recorder.start_timestamp * 1000)
for trace in middleware.recorder.traces:
total_call_count += 1
total_time += trace.duration_milliseconds()
# Don't accumulate total RPC time for traces that overlap asynchronously
if trace.start_offset_milliseconds() < end_offset_last:
total_time -= (end_offset_last - trace.start_offset_milliseconds())
end_offset_last = trace.start_offset_milliseconds() + trace.duration_milliseconds()
service_prefix = trace.service_call_name()
if "." in service_prefix:
service_prefix = service_prefix[:service_prefix.find(".")]
if service_prefix not in service_totals_dict:
service_totals_dict[service_prefix] = {
"total_call_count": 0,
"total_time": 0,
"total_misses": 0,
service_totals_dict[service_prefix]["total_call_count"] += 1
service_totals_dict[service_prefix]["total_time"] += trace.duration_milliseconds()
stack_frames_desc = []
for frame in trace.call_stack_:
stack_frames_desc.append("%s:%s %s" %
request = trace.request_data_summary()
response = trace.response_data_summary()
likely_dupe = request in requests_set
likely_dupes = likely_dupes or likely_dupe
request_short = request_pretty = None
response_short = response_pretty = None
miss = 0
request_object = unformatter.unformat(request)
response_object = unformatter.unformat(response)
request_short, response_short, miss = cleanup.cleanup(request_object, response_object)
request_pretty = pformat(request_object)
response_pretty = pformat(response_object)
except Exception, e:
logging.warning("Prettifying RPC calls failed.\n%s", e)
service_totals_dict[service_prefix]["total_misses"] += miss
"service": trace.service_call_name(),
"start_offset": RequestStats.milliseconds_fmt(trace.start_offset_milliseconds()),
"total_time": RequestStats.milliseconds_fmt(trace.duration_milliseconds()),
"request": request_pretty or request,
"response": response_pretty or response,
"request_short": request_short or cleanup.truncate(request),
"response_short": response_short or cleanup.truncate(response),
"stack_frames_desc": stack_frames_desc,
"likely_dupe": likely_dupe,
service_totals = []
for service_prefix in service_totals_dict:
"service_prefix": service_prefix,
"total_call_count": service_totals_dict[service_prefix]["total_call_count"],
"total_misses": service_totals_dict[service_prefix]["total_misses"],
"total_time": RequestStats.milliseconds_fmt(service_totals_dict[service_prefix]["total_time"]),
service_totals = sorted(service_totals, reverse=True, key=lambda service_total: float(service_total["total_time"]))
return {
"total_call_count": total_call_count,
"total_time": RequestStats.milliseconds_fmt(total_time),
"calls": calls,
"service_totals": service_totals,
"likely_dupes": likely_dupes,
"appstats_key": appstats_key,
return None
class ProfilerWSGIMiddleware(object):
def __init__(self, app):
template.register_template_library('gae_mini_profiler.templatetags') = app
self.app_clean = app = None
self.recorder = None
self.temporary_redirect = False
self.handler = None
self.logs = None
self.simple_timing = False
self.start = None
self.end = None
def __call__(self, environ, start_response):
global request_id
request_id = None
# Start w/ a non-profiled app at the beginning of each request = self.app_clean = None
self.recorder = None
self.temporary_redirect = False
self.simple_timing = cookies.get_cookie_value("g-m-p-disabled") == "1"
# Never profile calls to the profiler itself to avoid endless recursion.
if config.should_profile(environ) and not environ.get("PATH_INFO", "").startswith("/gae_mini_profiler/"):
# Set a random ID for this request so we can look up stats later
import base64
request_id = base64.urlsafe_b64encode(os.urandom(5))
# Send request id in headers so jQuery ajax calls can pick
# up profiles.
def profiled_start_response(status, headers, exc_info = None):
if status.startswith("302 "):
# Temporary redirect. Add request identifier to redirect location
# so next rendered page can show this request's profile.
headers = ProfilerWSGIMiddleware.headers_with_modified_redirect(environ, headers)
self.temporary_redirect = True
# Append headers used when displaying profiler results from ajax requests
headers.append(("X-MiniProfiler-Id", request_id))
headers.append(("X-MiniProfiler-QS", environ.get("QUERY_STRING")))
return start_response(status, headers, exc_info)
if self.simple_timing:
# Detailed recording is disabled. Just track simple start/stop time.
self.start = time.clock()
result =, profiled_start_response)
for value in result:
yield value
self.end = time.clock()
# Add logging handler
# Monkey patch appstats.formatting to fix string quoting bug
# See
import unformatter.formatting
import google.appengine.ext.appstats.formatting
google.appengine.ext.appstats.formatting._format_value = unformatter.formatting._format_value
# Configure AppStats output, keeping a high level of request
# content so we can detect dupe RPCs more accurately
from google.appengine.ext.appstats import recording
recording.config.MAX_REPR = 750
# Turn on AppStats monitoring for this request
old_app =
def wrapped_appstats_app(environ, start_response):
# Use this wrapper to grab the app stats recorder for
if hasattr(recording.recorder, "get_for_current_request"):
self.recorder = recording.recorder.get_for_current_request()
self.recorder = recording.recorder
return old_app(environ, start_response) = recording.appstats_wsgi_middleware(wrapped_appstats_app)
# Turn on cProfile profiling for this request
import cProfile = cProfile.Profile()
# Get profiled wsgi result
result = *args, **kwargs:, profiled_start_response), None, None)
self.recorder = recording.recorder
# If we're dealing w/ a generator, profile all of the .next calls as well
if type(result) == GeneratorType:
while True:
except StopIteration:
for value in result:
yield value
self.logs = self.get_logs(self.handler)
self.handler = None
# Store stats for later access
RequestStats(request_id, environ, self).store()
# Just in case we're using up memory in the recorder and profiler
self.recorder = None = None
request_id = None
result =, start_response)
for value in result:
yield value
def add_handler(self):
if self.handler is None:
self.handler = ProfilerWSGIMiddleware.create_handler()
def create_handler():
handler = logging.StreamHandler(StringIO.StringIO())
formatter = logging.Formatter("\t".join([
]), '%M:%S.')
return handler
def get_logs(handler):
raw_lines = [l for l in"\n") if l]
lines = []
for line in raw_lines:
if "\t" in line:
fields = line.split("\t")
else: # line is part of a multiline log message (prob a traceback)
prevline = lines[-1][-1]
if prevline: # ignore leading blank lines in the message
prevline += "\n"
prevline += line
lines[-1][-1] = prevline
return lines
def headers_with_modified_redirect(environ, headers):
headers_modified = []
for header in headers:
if header[0] == "Location":
reg = re.compile("mp-r-id=([^&]+)")
# Keep any chain of redirects around
request_id_chain = request_id
match ="QUERY_STRING"))
if match:
request_id_chain = ",".join([match.groups()[0], request_id])
# Remove any pre-existing miniprofiler redirect id
location = header[1]
location = reg.sub("", location)
# Add current request id as miniprofiler redirect id
location += ("&" if "?" in location else "?")
location = location.replace("&&", "&")
location += "mp-r-id=%s" % request_id_chain
headers_modified.append((header[0], location))
return headers_modified
Jump to Line
Something went wrong with that request. Please try again.