Skip to content

Commit

Permalink
man I am finding a lot of tools that work with graphite today!
Browse files Browse the repository at this point in the history
  • Loading branch information
cdavis committed Apr 1, 2011
1 parent 9d11f12 commit e3a19d0
Show file tree
Hide file tree
Showing 5 changed files with 15 additions and 7 deletions.
2 changes: 1 addition & 1 deletion carbon/bin/carbon-cache.py
Expand Up @@ -62,7 +62,6 @@
from carbon.log import logToStdout, logToDir
from carbon.listeners import MetricLineReceiver, MetricPickleReceiver, CacheQueryHandler, startListener
from carbon.cache import MetricCache
from carbon.writer import startWriter
from carbon.instrumentation import startRecordingCacheMetrics
from carbon.events import metricReceived

Expand Down Expand Up @@ -196,6 +195,7 @@ def shutdown():
exchange_name=amqp_exchange_name,
verbose=amqp_verbose)

from carbon.writer import startWriter # have to import this *after* settings are defined
startWriter()
startRecordingCacheMetrics()

Expand Down
4 changes: 2 additions & 2 deletions carbon/lib/carbon/aggregator/client.py
Expand Up @@ -43,7 +43,7 @@ def flushQueue(self):
while (not self.paused) and self.queue:
datapoints = self.queue[:MAX_DATAPOINTS_PER_MESSAGE]
self.queue = self.factory.queue = self.queue[MAX_DATAPOINTS_PER_MESSAGE:]
self.sendString( pickle.dumps(datapoints) )
self.sendString( pickle.dumps(datapoints, protocol=-1) )

def send(self, metric, datapoint):
if self.paused:
Expand All @@ -55,7 +55,7 @@ def send(self, metric, datapoint):

else:
datapoints = [ (metric, datapoint) ]
self.sendString( pickle.dumps(datapoints) )
self.sendString( pickle.dumps(datapoints, protocol=-1) )


class MetricSenderFactory(ReconnectingClientFactory):
Expand Down
4 changes: 2 additions & 2 deletions carbon/lib/carbon/relay.py
Expand Up @@ -52,7 +52,7 @@ def flushQueue(self):
while (not self.paused) and self.queue:
datapoints = self.queue[:MAX_DATAPOINTS_PER_MESSAGE]
self.queue = self.factory.queue = self.queue[MAX_DATAPOINTS_PER_MESSAGE:]
self.sendString( pickle.dumps(datapoints) )
self.sendString( pickle.dumps(datapoints, protocol=-1) )
increment(self.sent, len(datapoints))

def send(self, metric, datapoint):
Expand All @@ -66,7 +66,7 @@ def send(self, metric, datapoint):

else:
datapoints = [ (metric, datapoint) ]
self.sendString( pickle.dumps(datapoints) )
self.sendString( pickle.dumps(datapoints, protocol=-1) )
increment(self.sent)


Expand Down
2 changes: 1 addition & 1 deletion carbon/lib/carbon/writer.py
Expand Up @@ -175,7 +175,7 @@ def reloadStorageSchemas():


schemaReloadTask = LoopingCall(reloadStorageSchemas)
schemas = []
schemas = loadStorageSchemas()


def startWriter():
Expand Down
10 changes: 9 additions & 1 deletion docs/tools.rst
Expand Up @@ -26,11 +26,19 @@ collectd
--------
`collectd`_ is a daemon which collects system performance statistics periodically and provides
mechanisms to store the values in a variety of ways, including RRD. Jordan Sissel of Loggly wrote
a neat tool (`https://github.com/loggly/collectd-to-graphite`_) that allows collectd to
a neat tool (https://github.com/loggly/collectd-to-graphite) that allows collectd to
send metrics to Graphite.


Logster
-------
`Logster`_ is a utility for reading log files and generating metrics in Graphite or Ganglia.
It is ideal for visualizing trends of events that are occurring in your application/system/error
logs. For example, you might use logster to graph the number of occurrences of HTTP response
code that appears in your web server logs.

.. _jmxtrans: http://code.google.com/p/jmxtrans/
.. _statsd: https://github.com/etsy/statsd
.. _Ganglia: http://ganglia.info/
.. _collectd: http://collectd.org/
.. _Logster: https://github.com/etsy/logster

0 comments on commit e3a19d0

Please sign in to comment.