Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Merge remote-tracking branch 'upstream/master'

  • Loading branch information...
commit 68722ddf426a566c8451e7a65c6cd536fdaf26fa 2 parents c5333b9 + b9f8389
@captsens captsens authored
View
104 check-dependencies.py
@@ -10,31 +10,33 @@
print "You are using python %s, but version 2.4 or greater is required" % py_version
raise SystemExit(1)
-fatal = 0
-warning = 0
+required = 0
+optional = 0
# Test for whisper
try:
import whisper
except:
- print "[FATAL] Unable to import the 'whisper' module, please download this package from the Graphite project page and install it.\n"
- fatal += 1
+ # No? test for ceres
+ try:
+ import ceres
+
+ # We imported ceres, but not whisper so it's an optional dependency
+ print "[OPTIONAL] Unable to import the 'whisper' module. Without it the webapp will be unable to read .wsp files"
+ optional +=1
+ except:
+ print "[REQUIRED] Unable to import the 'whisper' or 'ceres' modules, please download this package from the Graphite project page and install it.\n"
+ required += 1
-# Test for ceres
-try:
- import ceres
-except:
- print "[FATAL] Unable to import the 'ceres' module, please download this package from the Graphite project page and install it.\n"
- fatal += 1
# Test for pycairo
try:
import cairo
except:
- print "[FATAL] Unable to import the 'cairo' module, do you have pycairo installed for python %s?\n" % py_version
+ print "[REQUIRED] Unable to import the 'cairo' module, do you have pycairo installed for python %s?\n" % py_version
cairo = None
- fatal += 1
+ required += 1
# Test that pycairo has the PNG backend
@@ -43,8 +45,8 @@
surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, 10, 10)
del surface
except:
- print "[FATAL] Failed to create an ImageSurface with cairo, you probably need to recompile cairo with PNG support\n"
- fatal += 1
+ print "[REQUIRED] Failed to create an ImageSurface with cairo, you probably need to recompile cairo with PNG support\n"
+ required += 1
# Test that cairo can find fonts
try:
@@ -54,42 +56,46 @@
context.font_extents()
del surface, context
except:
- print "[FATAL] Failed to create text with cairo, this probably means cairo cant find any fonts. Install some system fonts and try again\n"
+ print "[REQUIRED] Failed to create text with cairo, this probably means cairo cant find any fonts. Install some system fonts and try again\n"
+
# Test for django
try:
import django
except:
- print "[FATAL] Unable to import the 'django' module, do you have Django installed for python %s?\n" % py_version
+ print "[REQUIRED] Unable to import the 'django' module, do you have Django installed for python %s?\n" % py_version
django = None
- fatal += 1
+ required += 1
+
# Test for pytz
try:
import pytz
except:
- print "[FATAL] Unable to import the 'pytz' module, do you have pytz module installed for python %s?\n" % py_version
- fatal += 1
+ print "[REQUIRED] Unable to import the 'pytz' module, do you have pytz module installed for python %s?\n" % py_version
+ required += 1
+
# Test for pyparsing
try:
import pyparsing
except:
- print "[FATAL] Unable to import the 'pyparsing' module, do you have pyparsing module installed for python %s?\n" % py_version
- fatal += 1
+ print "[REQUIRED] Unable to import the 'pyparsing' module, do you have pyparsing module installed for python %s?\n" % py_version
+ required += 1
+
# Test for django-tagging
try:
import tagging
except:
- print "[FATAL] Unable to import the 'tagging' module, do you have django-tagging installed for python %s?\n" % py_version
- fatal += 1
+ print "[REQUIRED] Unable to import the 'tagging' module, do you have django-tagging installed for python %s?\n" % py_version
+ required += 1
# Verify django version
if django and django.VERSION[:2] < (1,1):
- print "[FATAL] You have django version %s installed, but version 1.1 or greater is required\n" % django.get_version()
- fatal += 1
+ print "[REQUIRED] You have django version %s installed, but version 1.1 or greater is required\n" % django.get_version()
+ required += 1
# Test for a json module
@@ -99,24 +105,24 @@
try:
import simplejson
except ImportError:
- print "[FATAL] Unable to import either the 'json' or 'simplejson' module, at least one is required.\n"
- fatal += 1
+ print "[REQUIRED] Unable to import either the 'json' or 'simplejson' module, at least one is required.\n"
+ required += 1
# Test for zope.interface
try:
from zope.interface import Interface
except ImportError:
- print "[WARNING] Unable to import Interface from zope.interface. Without it, you will be unable to run carbon on this server.\n"
- warning +=1
+ print "[OPTIONAL] Unable to import Interface from zope.interface. Without it, you will be unable to run carbon on this server.\n"
+ optional +=1
# Test for python-memcached
try:
import memcache
except:
- print "[WARNING] Unable to import the 'memcache' module, do you have python-memcached installed for python %s? This feature is not required but greatly improves performance.\n" % py_version
- warning += 1
+ print "[OPTIONAL] Unable to import the 'memcache' module, do you have python-memcached installed for python %s? This feature is not required but greatly improves performance.\n" % py_version
+ optional += 1
# Test for sqlite
@@ -126,47 +132,55 @@
except:
from pysqlite2 import dbapi2 #python 2.4
except:
- print "[WARNING] Unable to import the sqlite module, do you have python-sqlite2 installed for python %s? If you plan on using another database backend that Django supports (such as mysql or postgres) then don't worry about this. However if you do not want to setup the database yourself, you will need to install sqlite2 and python-sqlite2.\n" % py_version
- warning += 1
+ print "[OPTIONAL] Unable to import the sqlite module, do you have python-sqlite2 installed for python %s? If you plan on using another database backend that Django supports (such as mysql or postgres) then don't worry about this. However if you do not want to setup the database yourself, you will need to install sqlite2 and python-sqlite2.\n" % py_version
+ optional += 1
# Test for python-ldap
try:
import ldap
except:
- print "[WARNING] Unable to import the 'ldap' module, do you have python-ldap installed for python %s? Without python-ldap, you will not be able to use LDAP authentication in the graphite webapp.\n" % py_version
- warning += 1
+ print "[OPTIONAL] Unable to import the 'ldap' module, do you have python-ldap installed for python %s? Without python-ldap, you will not be able to use LDAP authentication in the graphite webapp.\n" % py_version
+ optional += 1
# Test for Twisted python
try:
import twisted
except:
- print "[WARNING] Unable to import the 'twisted' package, do you have Twisted installed for python %s? Without Twisted, you cannot run carbon on this server.\n" % py_version
- warning += 1
+ print "[OPTIONAL] Unable to import the 'twisted' package, do you have Twisted installed for python %s? Without Twisted, you cannot run carbon on this server.\n" % py_version
+ optional += 1
else:
tv = []
tv = twisted.__version__.split('.')
if int(tv[0]) < 8 or (int(tv[0]) == 8 and int(tv[1]) < 2):
- print "[WARNING] Your version of Twisted is too old to run carbon. You will not be able to run carbon on this server until you upgrade Twisted >= 8.2.\n"
- warning += 1
+ print "[OPTIONAL] Your version of Twisted is too old to run carbon. You will not be able to run carbon on this server until you upgrade Twisted >= 8.2.\n"
+ optional += 1
+
# Test for txamqp
try:
import txamqp
except:
- print "[WARNING] Unable to import the 'txamqp' module, this is required if you want to use AMQP. Note that txamqp requires python 2.5 or greater.\n"
- warning += 1
+ print "[OPTIONAL] Unable to import the 'txamqp' module, this is required if you want to use AMQP as an input to Carbon. Note that txamqp requires python 2.5 or greater.\n"
+ optional += 1
+
+
+# Test for python-rrdtool
+try:
+ import rrdtool
+except:
+ print "[OPTIONAL] Unable to import the 'python-rrdtool' module, this is required for reading RRD.\n"
-if fatal:
- print "%d necessary dependencies not met. Graphite will not function until these dependencies are fulfilled." % fatal
+if required:
+ print "%d necessary dependencies not met. Graphite will not function until these dependencies are fulfilled." % required
else:
print "All necessary dependencies are met."
-if warning:
- print "%d optional dependencies not met. Please consider the warning messages before proceeding." % warning
+if optional:
+ print "%d optional dependencies not met. Please consider the optional items before proceeding." % optional
else:
print "All optional dependencies are met."
View
30 docs/tools.rst
@@ -68,6 +68,25 @@ GDash
`Gdash`_ is a simple Graphite dashboard built using Twitters Bootstrap driven by a small DSL.
+Giraffe
+--------
+`Giraffe`_ is a Graphite real-time dashboard based on `Rickshaw`_ and requires no server backend.
+Inspired by `Gdash`_, `Tasseo`_ and `Graphene`_ it mixes features from all three into a slightly
+different animal.
+
+Graphitus
+---------
+`graphitus`_ is a client side dashboard for graphite built using bootstrap and underscore.js.
+
+
+Graph-Explorer
+--------------
+`Graph-Explorer`_ is a graphite dashboard which uses plugins to add tags and metadata
+to metrics and a query language with lets you filter through them and
+compose/manipulate graphs on the fly. Also aims for high interactivity using
+`TimeseriesWidget`_ and minimal hassle to set up and get running.
+
+
Graphene
--------
`Graphene`_ is a Graphite dashboard toolkit based on `D3.js`_ and `Backbone.js`_ which was
@@ -201,10 +220,17 @@ Therry
------
`Therry`_ ia s simple web service that caches Graphite metrics and exposes an endpoint for dumping or searching against them by substring.
+TimeseriesWidget
+----------
+`TimeseriesWidget`_ adds timeseries graphs to your webpages/dashboards using a simple api,
+focuses on high interactivity and modern features (realtime zooming, datapoint inspection,
+annotated events, etc). Supports Graphite, flot, rickshaw and anthracite.
+
.. _Diamond: http://opensource.brightcove.com/project/Diamond/
.. _jmxtrans: http://code.google.com/p/jmxtrans/
.. _statsd: https://github.com/etsy/statsd
.. _Ganglia: http://ganglia.info/
+.. _graphitius: https://github.com/erezmazor/graphitus
.. _Backbone.js: http://documentcloud.github.com/backbone/
.. _collectd: http://collectd.org/
.. _collectd-to-graphite: https://github.com/loggly/collectd-to-graphite
@@ -225,6 +251,7 @@ Therry
.. _Graphiti: https://github.com/paperlesspost/graphiti
.. _Graphios: https://github.com/shawn-sterling/graphios
.. _Charcoal: https://github.com/cebailey59/charcoal
+.. _Graph-Explorer: https://github.com/Dieterbe/graph-explorer
.. _Graphitejs: https://github.com/prestontimmons/graphitejs
.. _Grockets: https://github.com/disqus/grockets
.. _Host sFlow: http://host-sflow.sourceforge.net/
@@ -239,4 +266,7 @@ Therry
.. _Seyren: https://github.com/scobal/seyren
.. _write-graphite: http://collectd.org/wiki/index.php/Plugin:Write_Graphite
.. _Therry: https://github.com/obfuscurity/therry
+.. _TimeseriesWidget: https://github.com/Dieterbe/timeserieswidget
.. _Graphsky: https://github.com/hyves-org/graphsky
+.. _Giraffe: http://kenhub.github.com/giraffe/
+.. _Rickshaw: http://code.shutterstock.com/rickshaw/
View
15 webapp/content/js/composer_widgets.js
@@ -941,8 +941,8 @@ function createFunctionsMenu() {
menu: [
{text: 'Remove Above Value', handler: applyFuncToEachWithInput('removeAboveValue', 'Set any values above ___ to None')},
{text: 'Remove Above Percentile', handler: applyFuncToEachWithInput('removeAbovePercentile', 'Set any values above the ___th percentile to None')},
- {text: 'Remove Below Value', handler: applyFuncToEachWithInput('removeAboveValue', 'Set any values above ___ to None')},
- {text: 'Remove Below Percentile', handler: applyFuncToEachWithInput('removeAbovePercentile', 'Set any values above the ___th percentile to None')}
+ {text: 'Remove Below Value', handler: applyFuncToEachWithInput('removeBelowValue', 'Set any values below ___ to None')},
+ {text: 'Remove Below Percentile', handler: applyFuncToEachWithInput('removeBelowPercentile', 'Set any values below the ___th percentile to None')}
]
},
{text: 'Most Deviant', handler: applyFuncToEachWithInput('mostDeviant', 'Draw the ___ metrics with the highest standard deviation')},
@@ -990,7 +990,7 @@ function createFunctionsMenu() {
{text: 'Draw non-zero As Infinite', handler: applyFuncToEach('drawAsInfinite')},
{text: 'Line Width', handler: applyFuncToEachWithInput('lineWidth', 'Please enter a line width for this graph target')},
{text: 'Dashed Line', handler: applyFuncToEach('dashed')},
- {text: 'Keep Last Value', handler: applyFuncToEach('keepLastValue')},
+ {text: 'Keep Last Value', handler: applyFuncToEachWithInput('keepLastValue', 'Please enter the maximum number of "None" datapoints to overwrite, or leave empty for no limit. (default: empty)', {allowBlank: true})},
{text: 'Transform Nulls', handler: applyFuncToEachWithInput('transformNull', 'Please enter the value to transform null values to')},
{text: 'Substring', handler: applyFuncToEachWithInput('substr', 'Enter a starting position')},
{text: 'Group', handler: applyFuncToAll('group')},
@@ -1049,7 +1049,7 @@ function createOptionsMenu() {
var yAxisLeftMenu = new Ext.menu.Menu({
items: [
- menuInputItem("Left Y Label", "vtitle"),
+ menuInputItem("Left Y Label", "vtitle", "Left Y Label", /^$/),
menuInputItem("Left Y Minimum", "yMinLeft"),
menuInputItem("Left Y Maximum", "yMaxLeft"),
menuInputItem("Left Y Limit", "yLimitLeft"),
@@ -1062,7 +1062,7 @@ function createOptionsMenu() {
});
var yAxisRightMenu = new Ext.menu.Menu({
items: [
- menuInputItem("Right Y Label", "vtitleRight"),
+ menuInputItem("Right Y Label", "vtitleRight", "Right Y Label", /^$/),
menuInputItem("Right Y Minimum", "yMinRight"),
menuInputItem("Right Y Maximum", "yMaxRight"),
menuInputItem("Right Y Limit", "yLimitRight"),
@@ -1083,7 +1083,7 @@ function createOptionsMenu() {
var yAxisMenu = new Ext.menu.Menu({
items: [
- menuInputItem("Label", "vtitle"),
+ menuInputItem("Label", "vtitle", "Y-Axis Label", /^$/),
menuInputItem("Minimum", "yMin"),
menuInputItem("Maximum", "yMax"),
menuInputItem("Minor Lines", "minorY", "Enter the number of minor lines to draw", /^[a-zA-Z]/),
@@ -1119,6 +1119,7 @@ function createOptionsMenu() {
menuRadioItem("line", "Slope Line (default)", "lineMode", ""),
menuRadioItem("line", "Staircase Line", "lineMode", "staircase"),
menuRadioItem("line", "Connected Line", "lineMode", "connected"),
+ menuInputItem("Connected Line Limit", "connectedLimit", "The number of consecutive None values to jump over when in connected line mode. (default: no limit, leave empty)"),
menuCheckItem("Draw Null as Zero", "drawNullAsZero")
]
});
@@ -1237,7 +1238,7 @@ function menuHelpItem(name, message) {
function paramPrompt(question, param, regexp) {
if(regexp == null) {
- regexp = /[^A-Za-z0-9_.]/;
+ regexp = /[^A-Za-z0-9_.\-]/;
}
return function (menuItem, e) {
View
12 webapp/graphite/events/views.py
@@ -60,8 +60,16 @@ def post_event(request):
return HttpResponse(status=405)
def get_data(request):
- return HttpResponse(json.dumps(fetch(request), cls=EventEncoder),
- mimetype="application/json")
+ if 'jsonp' in request.REQUEST:
+ response = HttpResponse(
+ "%s(%s)" % (request.REQUEST.get('jsonp'),
+ json.dumps(fetch(request), cls=EventEncoder)),
+ mimetype='text/javascript')
+ else:
+ response = HttpResponse(
+ json.dumps(fetch(request), cls=EventEncoder),
+ mimetype="application/json")
+ return response
def fetch(request):
if request.GET.get("from", None) is not None:
View
6 webapp/graphite/metrics/views.py
@@ -39,6 +39,12 @@ def index_json(request):
if fnmatch.fnmatch(basename, '*.wsp'):
matches.append(os.path.join(root, basename))
+ for root, dirs, files in os.walk(settings.CERES_DIR):
+ root = root.replace(settings.CERES_DIR, '')
+ for filename in files:
+ if filename == '.ceres-node':
+ matches.append(root)
+
matches = [ m.replace('.wsp','').replace('/', '.') for m in sorted(matches) ]
if jsonp:
return HttpResponse("%s(%s)" % (jsonp, json.dumps(matches)), mimetype='text/javascript')
View
4 webapp/graphite/render/attime.py
@@ -21,9 +21,7 @@
months = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec']
weekdays = ['sun','mon','tue','wed','thu','fri','sat']
-tzinfo = pytz.timezone(settings.TIME_ZONE)
-
-def parseATTime(s):
+def parseATTime(s, tzinfo=None):
s = s.strip().lower().replace('_','').replace(',','').replace(' ','')
if s.isdigit():
if len(s) == 8 and int(s[:4]) > 1900 and int(s[4:6]) < 13 and int(s[6:]) < 32:
View
3  webapp/graphite/render/evaluator.py
@@ -31,9 +31,10 @@ def evaluateTokens(requestContext, tokens):
elif tokens.number:
if tokens.number.integer:
return int(tokens.number.integer)
-
elif tokens.number.float:
return float(tokens.number.float)
+ elif tokens.number.scientific:
+ return float(tokens.number.scientific[0])
elif tokens.string:
return str(tokens.string)[1:-1]
View
215 webapp/graphite/render/functions.py
@@ -361,7 +361,7 @@ def percentileOfSeries(requestContext, seriesList, n, interpolate=False):
if n <= 0:
raise ValueError('The requested percent is required to be greater than 0')
- name = 'percentilesOfSeries(%s, %.1f)' % (seriesList[0].pathExpression, n)
+ name = 'percentilesOfSeries(%s,%g)' % (seriesList[0].pathExpression, n)
(start, end, step) = normalize([seriesList])[1:]
values = [ _getPercentile(row, n, interpolate) for row in izip(*seriesList) ]
resultSeries = TimeSeries(name, start, end, step, values)
@@ -369,9 +369,9 @@ def percentileOfSeries(requestContext, seriesList, n, interpolate=False):
return [resultSeries]
-def keepLastValue(requestContext, seriesList):
+def keepLastValue(requestContext, seriesList, limit = INF):
"""
- Takes one metric or a wildcard seriesList.
+ Takes one metric or a wildcard seriesList, and optionally a limit to the number of 'None' values to skip over.
Continues the line with the last received value when gaps ('None' values) appear in your data, rather than breaking your line.
Example:
@@ -379,15 +379,37 @@ def keepLastValue(requestContext, seriesList):
.. code-block:: none
&target=keepLastValue(Server01.connections.handled)
+ &target=keepLastValue(Server01.connections.handled, 10)
"""
for series in seriesList:
series.name = "keepLastValue(%s)" % (series.name)
series.pathExpression = series.name
+ consecutiveNones = 0
for i,value in enumerate(series):
- if value is None and i != 0:
- value = series[i-1]
series[i] = value
+
+ # No 'keeping' can be done on the first value because we have no idea
+ # what came before it.
+ if i == 0:
+ continue
+
+ if value is None:
+ consecutiveNones += 1
+ else:
+ if 0 < consecutiveNones <= limit:
+ # If a non-None value is seen before the limit of Nones is hit,
+ # backfill all the missing datapoints with the last known value.
+ for index in xrange(i - consecutiveNones, i):
+ series[index] = series[i - consecutiveNones - 1]
+
+ consecutiveNones = 0
+
+ # If the series ends with some None values, try to backfill a bit to cover it.
+ if 0 < consecutiveNones < limit:
+ for index in xrange(len(series) - consecutiveNones, len(series)):
+ series[index] = series[len(series) - consecutiveNones - 1]
+
return seriesList
def asPercent(requestContext, seriesList, total=None):
@@ -525,7 +547,15 @@ def movingMedian(requestContext, seriesList, windowSize):
delta = parseTimeOffset(windowSize)
windowInterval = abs(delta.seconds + (delta.days * 86400))
- for seriesIndex, series in enumerate(seriesList):
+ if windowInterval:
+ bootstrapSeconds = windowInterval
+ else:
+ bootstrapSeconds = max([s.step for s in seriesList]) * int(windowSize)
+
+ bootstrapList = _fetchWithBootstrap(requestContext, seriesList, seconds=bootstrapSeconds)
+ result = []
+
+ for bootstrap, series in zip(bootstrapList, seriesList):
if windowInterval:
windowPoints = windowInterval / series.step
else:
@@ -534,26 +564,22 @@ def movingMedian(requestContext, seriesList, windowSize):
if type(windowSize) is str:
newName = 'movingMedian(%s,"%s")' % (series.name, windowSize)
else:
- newName = "movingMedian(%s,%s)" % (series.name, windowSize)
+ newName = "movingMedian(%s,%d)" % (series.name, windowPoints)
newSeries = TimeSeries(newName, series.start, series.end, series.step, [])
newSeries.pathExpression = newName
- bootstrap = series.step * windowPoints
- bootstrappedSeries = _fetchWithBootstrap(requestContext, series, seconds=bootstrap)
- windowIndex = windowPoints - 1
-
+ offset = len(bootstrap) - len(series)
for i in range(len(series)):
- window = bootstrappedSeries[i:i + windowIndex - 1]
+ window = bootstrap[i + offset - windowPoints:i + offset]
nonNull = [v for v in window if v is not None]
if nonNull:
m_index = len(nonNull) / 2
newSeries.append(sorted(nonNull)[m_index])
else:
newSeries.append(None)
+ result.append(newSeries)
- seriesList[seriesIndex] = newSeries
-
- return seriesList
+ return result
def scale(requestContext, seriesList, factor):
"""
@@ -569,7 +595,7 @@ def scale(requestContext, seriesList, factor):
"""
for series in seriesList:
- series.name = "scale(%s,%.1f)" % (series.name,float(factor))
+ series.name = "scale(%s,%g)" % (series.name,float(factor))
for i,value in enumerate(series):
series[i] = safeMul(value,factor)
return seriesList
@@ -638,7 +664,7 @@ def offset(requestContext, seriesList, factor):
"""
for series in seriesList:
- series.name = "offset(%s,%.1f)" % (series.name,float(factor))
+ series.name = "offset(%s,%g)" % (series.name,float(factor))
for i,value in enumerate(series):
if value is not None:
series[i] = value + factor
@@ -668,7 +694,15 @@ def movingAverage(requestContext, seriesList, windowSize):
delta = parseTimeOffset(windowSize)
windowInterval = abs(delta.seconds + (delta.days * 86400))
- for seriesIndex, series in enumerate(seriesList):
+ if windowInterval:
+ bootstrapSeconds = windowInterval
+ else:
+ bootstrapSeconds = max([s.step for s in seriesList]) * int(windowSize)
+
+ bootstrapList = _fetchWithBootstrap(requestContext, seriesList, seconds=bootstrapSeconds)
+ result = []
+
+ for bootstrap, series in zip(bootstrapList, seriesList):
if windowInterval:
windowPoints = windowInterval / series.step
else:
@@ -681,21 +715,14 @@ def movingAverage(requestContext, seriesList, windowSize):
newSeries = TimeSeries(newName, series.start, series.end, series.step, [])
newSeries.pathExpression = newName
- bootstrap = series.step * windowPoints
- bootstrappedSeries = _fetchWithBootstrap(requestContext, series, seconds=bootstrap)
- windowIndex = windowPoints - 1
-
+ offset = len(bootstrap) - len(series)
for i in range(len(series)):
- window = bootstrappedSeries[i:i + windowIndex]
- nonNull = [v for v in window if v is not None]
- if nonNull:
- newSeries.append(sum(nonNull) / len(nonNull))
- else:
- newSeries.append(None)
+ window = bootstrap[i + offset - windowPoints:i + offset]
+ newSeries.append(safeAvg(window))
- seriesList[ seriesIndex ] = newSeries
+ result.append(newSeries)
- return seriesList
+ return result
def cumulative(requestContext, seriesList, consolidationFunc='sum'):
"""
@@ -742,7 +769,10 @@ def consolidateBy(requestContext, seriesList, consolidationFunc):
def derivative(requestContext, seriesList):
"""
This is the opposite of the integral function. This is useful for taking a
- running total metric and showing how many requests per minute were handled.
+ running total metric and calculating the delta between subsequent data points.
+
+ This function does not normalize for periods of time, as a true derivative would.
+ Instead see the perSecond() function to calculate a rate of change over time.
Example:
@@ -859,7 +889,7 @@ def nonNegativeDerivative(requestContext, seriesList, maxValue=None):
.. code-block:: none
- &target=derivative(company.server.application01.ifconfig.TXPackets)
+ &target=nonNegativederivative(company.server.application01.ifconfig.TXPackets)
"""
results = []
@@ -1283,7 +1313,7 @@ def highestMax(requestContext, seriesList, n):
.. code-block:: none
- &target=highestCurrent(server*.instance*.threads.busy,5)
+ &target=highestMax(server*.instance*.threads.busy,5)
Draws the top 5 servers who have had the most busy threads during the time
period specified.
@@ -1321,7 +1351,7 @@ def currentAbove(requestContext, seriesList, n):
.. code-block:: none
- &target=highestAbove(server*.instance*.threads.busy,50)
+ &target=currentAbove(server*.instance*.threads.busy,50)
Draws the servers with more than 50 busy threads.
@@ -1457,8 +1487,8 @@ def nPercentile(requestContext, seriesList, n):
continue # Skip this series because it is empty.
perc_val = _getPercentile(s_copy, n)
- if perc_val:
- name = 'nPercentile(%.1f, %s)' % (n, s_copy.name)
+ if perc_val is not None:
+ name = 'nPercentile(%s, %g)' % (s_copy.name, n)
point_count = int((s.end - s.start)/s.step)
perc_series = TimeSeries(name, s_copy.start, s_copy.end, s_copy.step, [perc_val] * point_count )
perc_series.pathExpression = name
@@ -1468,7 +1498,7 @@ def nPercentile(requestContext, seriesList, n):
def removeAbovePercentile(requestContext, seriesList, n):
"""
Removes data above the nth percentile from the series or list of series provided.
- Values below this percentile are assigned a value of None.
+ Values above this percentile are assigned a value of None.
"""
for s in seriesList:
s.name = 'removeAbovePercentile(%s, %d)' % (s.name, n)
@@ -1482,7 +1512,7 @@ def removeAbovePercentile(requestContext, seriesList, n):
def removeAboveValue(requestContext, seriesList, n):
"""
Removes data above the given threshold from the series or list of series provided.
- Values below this threshole are assigned a value of None
+ Values above this threshole are assigned a value of None
"""
for s in seriesList:
s.name = 'removeAboveValue(%s, %d)' % (s.name, n)
@@ -1494,7 +1524,7 @@ def removeAboveValue(requestContext, seriesList, n):
def removeBelowPercentile(requestContext, seriesList, n):
"""
- Removes data above the nth percentile from the series or list of series provided.
+ Removes data below the nth percentile from the series or list of series provided.
Values below this percentile are assigned a value of None.
"""
for s in seriesList:
@@ -1508,7 +1538,7 @@ def removeBelowPercentile(requestContext, seriesList, n):
def removeBelowValue(requestContext, seriesList, n):
"""
- Removes data above the given threshold from the series or list of series provided.
+ Removes data below the given threshold from the series or list of series provided.
Values below this threshole are assigned a value of None
"""
for s in seriesList:
@@ -1651,8 +1681,8 @@ def stdev(requestContext, seriesList, points, windowTolerance=0.1):
# For this we take the standard deviation in terms of the moving average
# and the moving average of series squares.
for (seriesIndex,series) in enumerate(seriesList):
- stddevSeries = TimeSeries("stddev(%s,%.1f)" % (series.name, float(points)), series.start, series.end, series.step, [])
- stddevSeries.pathExpression = "stddev(%s,%.1f)" % (series.name, float(points))
+ stddevSeries = TimeSeries("stddev(%s,%d)" % (series.name, int(points)), series.start, series.end, series.step, [])
+ stddevSeries.pathExpression = "stddev(%s,%d)" % (series.name, int(points))
validPoints = 0
currentSum = 0
@@ -1706,25 +1736,39 @@ def secondYAxis(requestContext, seriesList):
series.name= 'secondYAxis(%s)' % series.name
return seriesList
-def _fetchWithBootstrap(requestContext, series, **delta_kwargs):
+def _fetchWithBootstrap(requestContext, seriesList, **delta_kwargs):
'Request the same data but with a bootstrap period at the beginning'
- previousContext = requestContext.copy()
- previousContext['startTime'] = requestContext['startTime'] - timedelta(**delta_kwargs)
- previousContext['endTime'] = requestContext['startTime']
- oldSeries = evaluateTarget(previousContext, series.pathExpression)[0]
-
- newValues = []
- if oldSeries.step != series.step:
- ratio = oldSeries.step / series.step
- for value in oldSeries:
- newValues.extend([ value ] * ratio)
- else:
- newValues.extend(oldSeries)
- newValues.extend(series)
+ bootstrapContext = requestContext.copy()
+ bootstrapContext['startTime'] = requestContext['startTime'] - timedelta(**delta_kwargs)
+ bootstrapContext['endTime'] = requestContext['startTime']
- newSeries = TimeSeries(series.name, oldSeries.start, series.end, series.step, newValues)
- newSeries.pathExpression = series.pathExpression
- return newSeries
+ bootstrapList = []
+ for series in seriesList:
+ if series.pathExpression in [ b.pathExpression for b in bootstrapList ]:
+ # This pathExpression returns multiple series and we already fetched it
+ continue
+ bootstraps = evaluateTarget(bootstrapContext, series.pathExpression)
+ bootstrapList.extend(bootstraps)
+
+ newSeriesList = []
+ for bootstrap, original in zip(bootstrapList, seriesList):
+ newValues = []
+ if bootstrap.step != original.step:
+ ratio = bootstrap.step / original.step
+ for value in bootstrap:
+ #XXX For series with aggregationMethod = sum this should also
+ # divide by the ratio to bring counts to the same time unit
+ # ...but we have no way of knowing whether that's the case
+ newValues.extend([ value ] * ratio)
+ else:
+ newValues.extend(bootstrap)
+ newValues.extend(original)
+
+ newSeries = TimeSeries(original.name, bootstrap.start, original.end, original.step, newValues)
+ newSeries.pathExpression = series.pathExpression
+ newSeriesList.append(newSeries)
+
+ return newSeriesList
def _trimBootstrap(bootstrap, original):
'Trim the bootstrap period off the front of this series so it matches the original'
@@ -1849,9 +1893,9 @@ def holtWintersForecast(requestContext, seriesList):
one week previous to the series is used to bootstrap the initial forecast.
"""
results = []
- for series in seriesList:
- withBootstrap = _fetchWithBootstrap(requestContext, series, days=7)
- analysis = holtWintersAnalysis(withBootstrap)
+ bootstrapList = _fetchWithBootstrap(requestContext, seriesList, days=7)
+ for bootstrap, series in zip(bootstrapList, seriesList):
+ analysis = holtWintersAnalysis(bootstrap)
results.append(_trimBootstrap(analysis['predictions'], series))
return results
@@ -1861,8 +1905,8 @@ def holtWintersConfidenceBands(requestContext, seriesList, delta=3):
upper and lower bands with the predicted forecast deviations.
"""
results = []
- for series in seriesList:
- bootstrap = _fetchWithBootstrap(requestContext, series, days=7)
+ bootstrapList = _fetchWithBootstrap(requestContext, seriesList, days=7)
+ for bootstrap,series in zip(bootstrapList, seriesList):
analysis = holtWintersAnalysis(bootstrap)
forecast = _trimBootstrap(analysis['predictions'], series)
deviation = _trimBootstrap(analysis['deviations'], series)
@@ -1902,8 +1946,6 @@ def holtWintersAberration(requestContext, seriesList, delta=3):
results = []
for series in seriesList:
confidenceBands = holtWintersConfidenceBands(requestContext, [series], delta)
- bootstrapped = _fetchWithBootstrap(requestContext, series, days=7)
- series = _trimBootstrap(bootstrapped, series)
lowerBand = confidenceBands[0]
upperBand = confidenceBands[1]
aberration = list()
@@ -2016,7 +2058,7 @@ def timeStack(requestContext, seriesList, timeShiftUnit, timeShiftStart, timeShi
.. code-block:: none
- &target=timeShift(Sales.widgets.largeBlue,"1d",0,7) # create a series for today and each of the previous 7 days
+ &target=timeStack(Sales.widgets.largeBlue,"1d",0,7) # create a series for today and each of the previous 7 days
"""
# Default to negative. parseTimeOffset defaults to +
@@ -2154,13 +2196,41 @@ def transform(v):
else: return v
for series in seriesList:
- series.name = "transformNull(%s, %.2f)" % (series.name, default)
+ series.name = "transformNull(%s,%g)" % (series.name, default)
series.pathExpression = series.name
values = [transform(v) for v in series]
series.extend(values)
del series[:len(values)]
return seriesList
+
+def identity(requestContext, name):
+ """
+ Identity function:
+ Returns datapoints where the value equals the timestamp of the datapoint.
+ Useful when you have another series where the value is a timestamp, and
+ you want to compare it to the time of the datapoint, to render an age
+
+ Example:
+
+ .. code-block:: none
+
+ &target=identity("The.time.series")
+
+ This would create a series named "The.time.series" that contains points where
+ x(t) == t.
+ """
+ step = 60
+ delta = timedelta(seconds=step)
+ start = time.mktime(requestContext["startTime"].timetuple())
+ end = time.mktime(requestContext["endTime"].timetuple())
+ values = range(start, end, step)
+ series = TimeSeries(name, start, end, step, values)
+ series.pathExpression = 'identity("%s")' % name
+
+ return [series]
+
+
def countSeries(requestContext, *seriesLists):
"""
Draws a horizontal line representing the number of nodes found in the seriesList.
@@ -2516,11 +2586,13 @@ def timeFunction(requestContext, name):
values.append(time.mktime(when.timetuple()))
when += delta
- return [TimeSeries(name,
+ series = TimeSeries(name,
time.mktime(requestContext["startTime"].timetuple()),
time.mktime(requestContext["endTime"].timetuple()),
- step, values)]
+ step, values)
+ series.pathExpression = name
+ return [series]
def sinFunction(requestContext, name, amplitude=1):
"""
@@ -2716,7 +2788,7 @@ def pieMinimum(requestContext, series):
# Data Filter functions
'removeAbovePercentile' : removeAbovePercentile,
'removeAboveValue' : removeAboveValue,
- 'removeBelowPercentile' : removeAbovePercentile,
+ 'removeBelowPercentile' : removeBelowPercentile,
'removeBelowValue' : removeBelowValue,
# Special functions
@@ -2743,6 +2815,7 @@ def pieMinimum(requestContext, series):
'areaBetween' : areaBetween,
'threshold' : threshold,
'transformNull' : transformNull,
+ 'identity': identity,
# test functions
'time': timeFunction,
View
42 webapp/graphite/render/glyph.py
@@ -72,6 +72,16 @@
WEEK = DAY * 7
MONTH = DAY * 31
YEAR = DAY * 365
+
+# Set a flag to indicate whether the '%l' option can be used safely.
+# On Windows, in particular the %l option in strftime is not supported.
+#(It is not one of the documented Python formatters).
+try:
+ datetime.now().strftime("%a %l%p")
+ percent_l_supported = True
+except ValueError, e:
+ percent_l_supported = False
+
xAxisConfigs = (
dict(seconds=0.00, minorGridUnit=SEC, minorGridStep=5, majorGridUnit=MIN, majorGridStep=1, labelUnit=SEC, labelStep=5, format="%H:%M:%S", maxInterval=10*MIN),
dict(seconds=0.07, minorGridUnit=SEC, minorGridStep=10, majorGridUnit=MIN, majorGridStep=1, labelUnit=SEC, labelStep=10, format="%H:%M:%S", maxInterval=20*MIN),
@@ -84,8 +94,8 @@
dict(seconds=10, minorGridUnit=MIN, minorGridStep=5, majorGridUnit=MIN, majorGridStep=20, labelUnit=MIN, labelStep=20, format="%H:%M", maxInterval=1*DAY),
dict(seconds=30, minorGridUnit=MIN, minorGridStep=10, majorGridUnit=HOUR, majorGridStep=1, labelUnit=HOUR, labelStep=1, format="%H:%M", maxInterval=2*DAY),
dict(seconds=60, minorGridUnit=MIN, minorGridStep=30, majorGridUnit=HOUR, majorGridStep=2, labelUnit=HOUR, labelStep=2, format="%H:%M", maxInterval=2*DAY),
- dict(seconds=100, minorGridUnit=HOUR, minorGridStep=2, majorGridUnit=HOUR, majorGridStep=4, labelUnit=HOUR, labelStep=4, format="%a %l%p", maxInterval=6*DAY),
- dict(seconds=255, minorGridUnit=HOUR, minorGridStep=6, majorGridUnit=HOUR, majorGridStep=12, labelUnit=HOUR, labelStep=12, format="%m/%d %l%p"),
+ dict(seconds=100, minorGridUnit=HOUR, minorGridStep=2, majorGridUnit=HOUR, majorGridStep=4, labelUnit=HOUR, labelStep=4, format=percent_l_supported and "%a %l%p" or "%a %I%p", maxInterval=6*DAY),
+ dict(seconds=255, minorGridUnit=HOUR, minorGridStep=6, majorGridUnit=HOUR, majorGridStep=12, labelUnit=HOUR, labelStep=12, format=percent_l_supported and "%m/%d %l%p" or "%m/%d %I%p"),
dict(seconds=600, minorGridUnit=HOUR, minorGridStep=6, majorGridUnit=DAY, majorGridStep=1, labelUnit=DAY, labelStep=1, format="%m/%d", maxInterval=14*DAY),
dict(seconds=600, minorGridUnit=HOUR, minorGridStep=12, majorGridUnit=DAY, majorGridStep=1, labelUnit=DAY, labelStep=1, format="%m/%d", maxInterval=365*DAY),
dict(seconds=2000, minorGridUnit=DAY, minorGridStep=1, majorGridUnit=DAY, majorGridStep=2, labelUnit=DAY, labelStep=2, format="%m/%d", maxInterval=365*DAY),
@@ -170,7 +180,7 @@ def __init__(self,**params):
self.drawRectangle( 0, 0, self.width, self.height )
if 'colorList' in params:
- colorList = unquote_plus( params['colorList'] ).split(',')
+ colorList = unquote_plus( str(params['colorList']) ).split(',')
else:
colorList = self.defaultColorList
self.colors = itertools.cycle( colorList )
@@ -502,7 +512,8 @@ class LineGraph(Graph):
'yMaxRight', 'yLimitLeft', 'yLimitRight', 'yStepLeft', \
'yStepRight', 'rightWidth', 'rightColor', 'rightDashed', \
'leftWidth', 'leftColor', 'leftDashed', 'xFormat', 'minorY', \
- 'hideYAxis', 'uniqueLegend', 'vtitleRight', 'yDivisors')
+ 'hideYAxis', 'uniqueLegend', 'vtitleRight', 'yDivisors', \
+ 'connectedLimit')
validLineModes = ('staircase','slope','connected')
validAreaModes = ('none','first','all','stacked')
validPieModes = ('maximum', 'minimum', 'average')
@@ -577,6 +588,7 @@ def drawGraph(self,**params):
#Now to setup our LineGraph specific options
self.lineWidth = float( params.get('lineWidth', 1.2) )
self.lineMode = params.get('lineMode','slope').lower()
+ self.connectedLimit = params.get("connectedLimit", INFINITY)
assert self.lineMode in self.validLineModes, "Invalid line mode!"
self.areaMode = params.get('areaMode','none').lower()
assert self.areaMode in self.validAreaModes, "Invalid area mode!"
@@ -844,9 +856,10 @@ def drawLines(self, width=None, dash=None, linecap='butt', linejoin='miter'):
else:
self.setColor( series.color, series.options.get('alpha') or 1.0 )
- fromNone = True
+ # The number of preceeding datapoints that had a None value.
+ consecutiveNones = 0
- for value in series:
+ for index, value in enumerate(series):
if value != value: # convert NaN to None
value = None
@@ -854,13 +867,13 @@ def drawLines(self, width=None, dash=None, linecap='butt', linejoin='miter'):
value = 0.0
if value is None:
- if not fromNone:
+ if consecutiveNones == 0:
self.ctx.line_to(x, y)
if 'stacked' in series.options: #Close off and fill area before unknown interval
self.fillAreaAndClip(x, y, startX)
x += series.xStep
- fromNone = True
+ consecutiveNones += 1
else:
if self.secondYAxis:
@@ -883,11 +896,11 @@ def drawLines(self, width=None, dash=None, linecap='butt', linejoin='miter'):
x += series.xStep
continue
- if fromNone:
+ if consecutiveNones > 0:
startX = x
if self.lineMode == 'staircase':
- if fromNone:
+ if consecutiveNones > 0:
self.ctx.move_to(x, y)
else:
self.ctx.line_to(x, y)
@@ -896,17 +909,22 @@ def drawLines(self, width=None, dash=None, linecap='butt', linejoin='miter'):
self.ctx.line_to(x, y)
elif self.lineMode == 'slope':
- if fromNone:
+ if consecutiveNones > 0:
self.ctx.move_to(x, y)
self.ctx.line_to(x, y)
x += series.xStep
elif self.lineMode == 'connected':
+ # If if the gap is larger than the connectedLimit or if this is the
+ # first non-None datapoint in the series, start drawing from that datapoint.
+ if consecutiveNones > self.connectedLimit or consecutiveNones == index:
+ self.ctx.move_to(x, y)
+
self.ctx.line_to(x, y)
x += series.xStep
- fromNone = False
+ consecutiveNones = 0
if 'stacked' in series.options:
self.fillAreaAndClip(x-series.xStep, y, startX)
View
5 webapp/graphite/render/grammar.py
@@ -14,11 +14,16 @@
Optional('-') + Word(nums) + Literal('.') + Word(nums)
)('float')
+sciNumber = Combine(
+ (floatNumber | intNumber) + CaselessLiteral('e') + intNumber
+)('scientific')
+
aString = quotedString('string')
# Use lookahead to match only numbers in a list (can't remember why this is necessary)
afterNumber = FollowedBy(",") ^ FollowedBy(")") ^ FollowedBy(LineEnd())
number = Group(
+ (sciNumber + afterNumber) |
(floatNumber + afterNumber) |
(intNumber + afterNumber)
)('number')
View
28 webapp/graphite/render/views.py
@@ -12,7 +12,8 @@
See the License for the specific language governing permissions and
limitations under the License."""
import csv
-from time import time, strftime, localtime
+from datetime import datetime
+from time import time
from random import shuffle
from httplib import CannotSendRequest
from urllib import urlencode
@@ -24,6 +25,11 @@
except ImportError:
import pickle
+try: # See if there is a system installation of pytz first
+ import pytz
+except ImportError: # Otherwise we fall back to Graphite's bundled version
+ from graphite.thirdparty import pytz
+
from graphite.util import getProfileByUsername, json
from graphite.remote_storage import HTTPConnectionWithTimeout
from graphite.logger import log
@@ -118,8 +124,8 @@ def renderView(request):
for series in data:
for i, value in enumerate(series):
- timestamp = localtime( series.start + (i * series.step) )
- writer.writerow( (series.name, strftime("%Y-%m-%d %H:%M:%S", timestamp), value) )
+ timestamp = datetime.fromtimestamp(series.start + (i * series.step), requestOptions['tzinfo'])
+ writer.writerow((series.name, timestamp.strftime("%Y-%m-%d %H:%M:%S"), value))
return response
@@ -244,16 +250,24 @@ def parseOptions(request):
continue
graphOptions[opt] = val
+ tzinfo = pytz.timezone(settings.TIME_ZONE)
+ if 'tz' in queryParams:
+ try:
+ tzinfo = pytz.timezone(queryParams['tz'])
+ except pytz.UnknownTimeZoneError:
+ pass
+ requestOptions['tzinfo'] = tzinfo
+
# Get the time interval for time-oriented graph types
if graphType == 'line' or graphType == 'pie':
if 'until' in queryParams:
- untilTime = parseATTime( queryParams['until'] )
+ untilTime = parseATTime(queryParams['until'], tzinfo)
else:
- untilTime = parseATTime('now')
+ untilTime = parseATTime('now', tzinfo)
if 'from' in queryParams:
- fromTime = parseATTime( queryParams['from'] )
+ fromTime = parseATTime(queryParams['from'], tzinfo)
else:
- fromTime = parseATTime('-1d')
+ fromTime = parseATTime('-1d', tzinfo)
startTime = min(fromTime, untilTime)
endTime = max(fromTime, untilTime)
Please sign in to comment.
Something went wrong with that request. Please try again.