Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Try out utilization grapher.

  • Loading branch information...
commit 571196d44435dde45c896e641a41bfaa97f3a0b1 1 parent b57b4d8
@spladug spladug authored
Showing with 104 additions and 0 deletions.
  1. +34 −0 alerts.py
  2. +70 −0 utilization.py
View
34 alerts.py
@@ -6,12 +6,14 @@
import os
import sys
import time
+import socket
import wessex
__all__ = ["harold", "config"]
harold = None
+graphite = None
config = None
def init(config_path='production.ini'):
@@ -19,6 +21,8 @@ def init(config_path='production.ini'):
config = load_config(path=config_path)
if config.has_section('logging'):
configure_logging(config)
+ if config.has_section('graphite'):
+ configure_graphite(config)
harold = get_harold(config)
def load_config(path='production.ini'):
@@ -74,3 +78,33 @@ def configure_logging(config):
logger.setLevel(_get_logging_level(config))
logger.addHandler(ch)
return logger
+
+def _parse_addr(addr):
+ host, port_str = addr.split(':', 1)
+ return host, int(port_str)
+
+
+def configure_graphite(config):
+ global graphite
+
+ class Graphite(object):
+ def __init__(self, address):
+ self.address = address
+
+ def _send_message(self, msg):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(self.address)
+ sock.send(msg + '\n')
+ sock.close()
+
+ def send_values(self, items):
+ messages = []
+ timestamp = str(time.time())
+ for key, value in items.iteritems():
+ messages.append(" ".join((key, str(value), timestamp)))
+ if messages:
+ self._send_message(messages.join("\n"))
+
+ address_text = config.get('graphite', 'address')
+ address = _parse_addr(address_text)
+ graphite = Graphite(address)
View
70 utilization.py
@@ -0,0 +1,70 @@
+import csv
+import urllib2
+import collections
+
+import alerts
+
+
+def fetch_session_counts(haproxy_stats_urls):
+ current = collections.Counter()
+ limit = collections.Counter()
+
+ for url in haproxy_stats_urls:
+ csv_data = urllib2.urlopen(url, timeout=3)
+ reader = csv.reader(csv_data)
+
+ for i, row in enumerate(reader):
+ if i == 0: continue
+
+ proxy_name, service_name, s_cur, s_lim, status = row[0], row[1], row[4], row[6], row[17]
+
+ if service_name in ("FRONTEND", "BACKEND"):
+ continue
+
+ if status != "UP":
+ continue
+
+ current[proxy_name] += int(s_cur)
+ limit[proxy_name] += int(s_lim)
+
+ ret = []
+ for pool, limit in limit.most_common():
+ ret.append((pool, current[pool], limit))
+ return ret
+
+
+def notify_graphite(usage):
+ values = {}
+ for pool, cur, limit in usage:
+ values["stats.utilization.%s.current" % pool] = cur
+ values["stats.utilization.%s.capacity" % pool] = limit
+ alerts.graphite.send_values(values)
+
+
+def pretty_print(usage):
+ print "%20s%20s%10s" % ("", "sessions", "")
+ print "%20s%10s%10s%10s" % ("pool", "cur", "max", "% util")
+ print "-" * 50
+ for pool, cur, limit in usage:
+ print "%20s%10d%10d%10.2f" % (pool, cur, limit, float(cur) / limit * 100.0)
+
+
+def main():
+ alerts.init()
+
+ haproxy_urls = [value for key, value in
+ alerts.config.items("haproxy")
+ if key.startswith("url")]
+
+ while True:
+ try:
+ usage_by_pool = fetch_session_counts(haproxy_urls)
+ except urllib2.urlerror:
+ pass
+ else:
+ notify_graphite(usage_by_pool)
+ pretty_print(usage_by_pool)
+
+
+if __name__ == "__main__":
+ main()
Please sign in to comment.
Something went wrong with that request. Please try again.