Skip to content

Commit

Permalink
Merge pull request marineam#2 from mythmon/intern1
Browse files Browse the repository at this point in the history
Major Changes:
-   Changed configurator to be auto complete text boxes.
-   Allow configurator to add multiple hosts/services/groups at a time.
-   Gave configurator sorting and filtering capabilities.
-   Use local storage to store user preferences.
-   Use configurator for everything.
-   Collapsible graphs.
-   Choice of localtime or UTC for graphs.
-   Upgrade to JQuery 1.6.
  • Loading branch information
marineam committed Jun 29, 2011
2 parents f2fce57 + d1fd82c commit 77dd6c3
Show file tree
Hide file tree
Showing 18 changed files with 2,060 additions and 1,190 deletions.
3 changes: 3 additions & 0 deletions .gitignore
@@ -1,3 +1,6 @@
*.swp
*.pyc
_trial_temp
railroad/django.wsgi
railroad/railroad/settings.py
dropin.cache
63 changes: 48 additions & 15 deletions railroad/railroad/ajax/views.py
@@ -1,39 +1,72 @@
from django.http import HttpResponse
from railroad.viewhosts import views
import itertools
import json

# This function is kind of crazy. It should give different results if called
# twice with the same inputs. It's purpose is to suggest the right set of auto
# complete possibilities when a user enters a common substring of two possible
# auto complete suggestions.
def transpose_combo(li, n, memo):
key = repr(li)
if key in memo:
return memo[key].next()

n = min(n,len(li))
combinations = itertools.combinations(li,n)
transpose = zip(*combinations)
memo[key] = iter(transpose)
return memo[key].next()

def autocomplete(request, context):
query = request.GET.get('q', '')
query = request.GET.get('term', '')
limit = int(request.GET.get('limit', 10))


stat, obj = views.parse()

choices = []
queries = []
q_results = []
result = ""

if len(query.split(',')) > 1:
queries = query.split(',')
else:
queries = [query]

queries = [q.strip() for q in queries if q.strip()]
queries = [q.strip() for q in query.split(',') if q.strip()]

if context == 'host':
choices = views.hostnames(stat)
elif context == 'group':
choices = [x['alias'] for x in views.grouplist(obj)]
elif context == 'service':
# servicesnames will return services with the sane name for different hosts, so
# we make it a set to get rid of duplicate names
# servicesnames will return services with the sane name for different
# hosts, so we make it a set to get rid of duplicate names
choices = set(views.servicenames(stat))

for q in queries:
matching_names = [x for x in choices if x.lower().startswith(q)]
matching_names = [x for x in choices if x.lower().startswith(q.lower())]
q_results.append(matching_names)
results = itertools.product(*q_results)

# this craziness is to deal with this situation: let valid completions be
# 'ab', 'ac'. User enters "a, a". Auto complete should give back ['ab,
# ac'], not ['ab, ac', 'ac, ab'].
memo = {}
counts = {}
for q in q_results:
key = repr(q) # lists aren't hashable :( but strings are :)
if key in counts:
counts[key] += 1
else:
counts[key] = 1

product_foder = []
for q in q_results:
try:
product_foder.append(transpose_combo(q, counts[repr(q)], memo))
except StopIteration:
# That means the user entered too many of the substring. ignore an
# excess entries.
pass
# end craziness

results = itertools.product(*product_foder)
results = [','.join(result) for result in results]
result = '\n'.join(results)
return HttpResponse(result)
result = [ { "value" : r } for r in results ]

return HttpResponse(json.dumps(result))
61 changes: 47 additions & 14 deletions railroad/railroad/parserrd/views.py
Expand Up @@ -16,6 +16,8 @@
import os
import types
import time
import random
from math import floor

import rrdtool
import coil
Expand Down Expand Up @@ -44,11 +46,41 @@ def labelize(data, index, base, unit):
statistics = data[index]['statistics']
cur = str(sigfigs(statistics['cur'] / base)) \
if statistics['cur'] != None else 'Null'
return ' (cur: %s%s, min: %s%s, max: %s%s, avg: %s%s)' % \
(cur, unit, str(sigfigs(statistics['min'] / base)), unit, \
str(sigfigs(statistics['max'] / base)), unit, \
str(sigfigs(statistics['avg'] / base)), unit)

return (' (cur: %s%s, min: %s%s, max: %s%s, avg: %s%s)' %
(cur, unit, str(sigfigs(statistics['min'] / base)), unit,
str(sigfigs(statistics['max'] / base)), unit,
str(sigfigs(statistics['avg'] / base)), unit))

def getColors(names):
rng_state = random.getstate()

n = len(names)
colors = []
offset = 0
for i in range(n):
random.seed(names[i])
# Make a random color
h = 360 * ((float(i) / n + offset) % 1)
s = 0.6 + 0.4 * random.random()
l = 0.375 + 0.25 * random.random()

# convert it from HSL to RGB
c = (1 - abs(2*l-1)) * s;
hp = floor(h / 60)
x = c * (1 - abs(hp % 2 - 1))
(rp,gp,bp) = {0: (c,x,0), 1: (x,c,0), 2: (0,c,x),
3: (0,x,c), 4: (x,0,c), 5: (c,0,x)}[hp]

m = l - 0.5 * c
r,g,b = [x*256 for x in (rp + m, gp + m, bp + m)]

# Convert it to a hex color
colors.append('#%02x%02x%02x' % (r,g,b))

random.setstate(rng_state)

return colors

def index(request, host, service, start, end, resolution='150'):
"""Reads the rrd and returns the data in flot-friendly format"""
rra_path = settings.RRA_PATH
Expand Down Expand Up @@ -133,13 +165,12 @@ def index(request, host, service, start, end, resolution='150'):
if not(query_label):
query_label = root_label
labels.append(('query', query_label if query_label else 'Result'))


if '_result' in all_labels:
labels.append(('_result', root_label if root_label else 'Result'))

length = len(labels)

indices = range(length)
dataset = {}

Expand All @@ -153,7 +184,9 @@ def index(request, host, service, start, end, resolution='150'):

labels = map(lambda x: x[0], labels)
state_data = []


graph_options['colors'] = getColors(labels);

# Reading graph options
for index in indices:
key = labels[index]
Expand Down Expand Up @@ -271,7 +304,7 @@ def index(request, host, service, start, end, resolution='150'):
else:
flot_data[index]['label'] += \
' (cur: N/A, min: N/A, max: N/A, avg: N/A)'


if max != None:
graph_options['yaxis']['max'] = max * 1.1 + 1
Expand All @@ -280,12 +313,12 @@ def index(request, host, service, start, end, resolution='150'):
axis_max = root_trend.get('axis_max', '')
if axis_max and graph_options['yaxis']['max'] < axis_max:
graph_options['yaxis']['max'] = axis_max * 1.1 + 1

if root_trend:
axis_label = root_trend.get('axis_label', '')
if axis_label:
graph_options['yaxis']['label'] = axis_label

for index in indices:
del(flot_data[index][railroad_conf])

Expand Down Expand Up @@ -317,9 +350,9 @@ def index(request, host, service, start, end, resolution='150'):
# still draw (otherwise they don't get axes, ticks, etc)
flot_data.append({'data': state_data, 'lines': {'show': False}})

result = {'options': graph_options, 'data': flot_data, 'base': base, \
'empty': empty_graph, 'current_time': current_time, \
'start': start, 'end': end, \
result = {'options': graph_options, 'data': flot_data, 'base': base,
'empty': empty_graph, 'current_time': current_time,
'start': start, 'end': end,
}

return HttpResponse(json.dumps(result))
2 changes: 2 additions & 0 deletions railroad/railroad/urls.py
Expand Up @@ -44,6 +44,8 @@
(r'^configurator/graph$', 'railroad.viewhosts.views.customgraph'),
(r'^configurator/formstate$', 'railroad.viewhosts.views.formstate'),
(r'^configurator/generatelink$', 'railroad.viewhosts.views.generatelink'),
(r'^configurator/host/(?P<hosts>\w+)$', 'railroad.viewhosts.views.hostconfigurator'),
(r'^configurator/service/(?P<service>(\w+\s*)+)$', 'railroad.viewhosts.views.serviceconfigurator'),

# Stuff for AJAX
(r'^ajax/autocomplete/(?P<context>\w+)$', 'railroad.ajax.views.autocomplete'),
Expand Down
91 changes: 52 additions & 39 deletions railroad/railroad/viewhosts/views.py
Expand Up @@ -19,7 +19,6 @@
import re
import time
import pickle
import itertools

import coil
import rrdtool
Expand Down Expand Up @@ -209,6 +208,43 @@ def servicenames_by_host(stat, host):
return [service['service_description'] for service in all_services \
if service['host_name'] == host]

def get_graphs(stat, obj, hosts='', groups='', services=''):
"""Returns a list of services objects, marked graphable or not"""
groups = set([group.strip() for group in groups.split(',') if group.strip()])
hosts = set([host.strip() for host in hosts.split(',') if host.strip()])
services = set([service.strip() for service in services.split(',') if service.strip()])
group_hosts = set() # Hosts under the given groups
all_hosts = set() # All hosts will contain all host names from host and group
end = int(time.time()) # For graphing
start = end - DAY # For graphing

if groups:
for group in groups:
group_hosts.update(set(hostnames_by_group(stat,obj,group)))
all_hosts.update(hosts | group_hosts) if hosts | group_hosts else None
service_list = [] # Will contain the service objects
# Given hosts and no services, we want to get all services for those hosts.
if all_hosts and not services:
for host in all_hosts:
for service in servicelist_by_host(stat,host):
service_list.append(service)
# Given no hosts and services, we want to get all hosts for those services.
# Given hosts and services, we want to start by getting all of the hosts with the services listed, and then will later filter out the hosts we don't want
if (not all_hosts and services) or (all_hosts and services):
for service in services:
for host in hostlist_by_service(stat,service):
service_list.append(servicedetail(stat,host['host_name'],service))
# Given hosts and services, we already have a list of all hosts for the listed services, we want to filter out hosts that weren't listed.
if all_hosts and services:
service_list = [service for service in service_list if (lambda x: x in all_hosts) (service['host_name'])]
# Find out whether each service object is graphable or not
for service in service_list:
service['is_graphable'] = is_graphable(service['host_name'], service['service_description'])
service['start'] = start
service['end'] = end
service['period'] = 'ajax'
return service_list

def get_time_intervals():
"""Returns a list of (start,end) intervals for day, week, month, year"""
# day , week , month , year
Expand Down Expand Up @@ -581,45 +617,9 @@ def customgraph(request):
hosts = request.GET.get("host")
services = request.GET.get("service")

# Remove empty entries, i.e null strings in the list
# Define as sets to remove duplicates easily, allow for some set notation later
groups = set([x.strip() for x in groups.split(',') if x.strip()])
hosts = set([x.strip() for x in hosts.split(',') if x.strip()])
services = set([x.strip() for x in services.split(',') if x.strip()])
group_hosts = set() # Hosts under the given groups
all_hosts = set() # All hosts will contain all host names from host and group

# Populate group_hosts with the hosts in the groups
if groups:
for group in groups:
group_hosts.update(set(hostnames_by_group(stat,obj,group)))

all_hosts.update(hosts | group_hosts) if hosts | group_hosts else None
service_list = [] #Will contain the service objects
end = int(time.time()) # For graphing
start = end - DAY #For graphing

# Given hosts and no services, we want to get all services for those hosts.
if all_hosts and not services:
for host in all_hosts:
for service in servicelist_by_host(stat,host):
service_list.append(service)
# Given no hosts and services, we want to get all hosts for those services.
# Given hosts and services, we want to start by getting all of the hosts with the services listed, and then will later filter out the hosts we don't want
if (not all_hosts and services) or (all_hosts and services):
for service in services:
for host in hostlist_by_service(stat,service):
service_list.append(servicedetail(stat,host['host_name'],service))
# Given hosts and services, we already have a list of all hosts for the listed services, we want to filter out hosts that weren't listed.
if all_hosts and services:
service_list = [service for service in service_list if (lambda x: x in all_hosts) (service['host_name'])]

# Find out whether each service object is graphable or not
for service in service_list:
service['is_graphable'] = is_graphable(service['host_name'], service['service_description'])
service['start'] = start
service['end'] = end
service['period'] = 'ajax'
service_list = get_graphs(stat, obj, hosts, groups, services)


context_data = {
'loaded_graphs': service_list,
Expand Down Expand Up @@ -668,6 +668,19 @@ def directconfigurator(request):
stat, obj = parse()
return configurator(stat, obj)

def hostconfigurator(request, hosts):
"""Returns a configurator page with graphs on it"""
stat, obj = parse()
service_list = get_graphs(stat, obj, hosts)
return configurator(stat, obj, 'Configurator', 'Configurator', service_list)

def serviceconfigurator(request, service):
"""Returns a configurator page with graphs on it"""
stat, obj = parse()
service_list = get_graphs(stat, obj, "", "", service)
return configurator(stat, obj, 'Configurator', 'Configurator', service_list)


def configurator(stat, obj, htmltitle='Configurator', \
pagetitle='Configurator', loaded_graphs=[], \
page_state=''):
Expand Down
Binary file added railroad/static/img/remove.png
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit 77dd6c3

Please sign in to comment.