Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
adv_graphgen: finish packaging
  • Loading branch information
makefu committed Sep 10, 2015
1 parent 7ba002c commit 219fab9
Show file tree
Hide file tree
Showing 9 changed files with 130 additions and 93 deletions.
66 changes: 38 additions & 28 deletions retiolum/scripts/adv_graphgen/README.md
@@ -1,28 +1,38 @@
The folder contains a number of scripts which provide a convenient way to
generate advanced graphs from the SIGUSR2 output of tinc.

it currently contains the following files:

sanitize.sh:
wrapper arond parse.py which filters the syslog file for all tinc
related lines and removes the status informations:
this means that
<code>
May 19 20:40:44 servarch dnsmasq[5382]: reading /etc/resolv.conf
May 19 20:41:38 servarch tinc.retiolum[4780]: Error looking up pa-sharepoint.informatik.ba-stuttgart.de port 655: Name or service not known
</code>
becomes
<code>
Error looking up pa-sharepoint.informatik.ba-stuttgart.de port 655: Name or service not known
</code>
and so on.
It also provides a wrapper around graphviz which automagically
generates graphs from the produced graph file

parse.py:
reads from stdin the sanitized syslog file and prints a valid dot file
from the given output.
The parser module may also produce any other output (e.g. for dns
entries and so on) you will need to actually read and modify the source
in order to be able to do this. ~May the source be with you~

# Generate Graphs from tinc

## Install
### Nix

# tinc_pre is required:
nix-env -i -f tinc_graphs.nix

## e.g. in Retiolum:
## krebs.retiolum.tinc = pkgs.tinc_pre
### Local

python setup.py install
# also install graphviz,imagemagic for building graphs


### Usage:

see source of the 2 builder scripts:

#all-around-builder
# env: EXTERNAL_FOLDER, INTERNAL_FOLDER, GEODB, TINC_HOSTPATH
all-the-graphs

# build actual graphs
build-graph

# exported py scripts
tinc-stats2json # - parses tinc current state into json
tinc-build-graph # - transfers json to graph
copy-map # - copies map.html into $1
add-geodata # - adds geodata to json
tinc-availability-stats # adds availability data to json

## Geodb infos

- http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
- nix-env -iA geolite-legacy
11 changes: 8 additions & 3 deletions retiolum/scripts/adv_graphgen/scripts/all-the-graphs
Expand Up @@ -2,11 +2,16 @@

EXTERNAL_FOLDER=${EXTERNAL_FOLDER:-/var/www/euer.krebsco.de/graphs/retiolum}
INTERNAL_FOLDER=${INTERNAL_FOLDER:-/var/www/euer/graphs/retiolum}
GEOCTIYDB="${GEOCITYDB:-}"
export GEODB="${GEODB:-}"
export TINC_HOSTPATH=${TINC_HOSTPATH:-~/painload/retiolum/hosts}
mapfile="$INTERNAL_FOLDER/map.html"

if test -n "$GEOCITYDB";then
# creates a marker file
# TODO: copy map.html from
if test ! -e "$mapfile";then
echo "copying map to $map.html"
copy-map "$mapfile"
fi
echo "creating geodata database"
tinc-stats2json | add-geodata > "$INTERNAL_FOLDER/marker.json"
fi
build-graphs anonymous "$EXTERNAL_FOLDER"
Expand Down
3 changes: 2 additions & 1 deletion retiolum/scripts/adv_graphgen/setup.py
Expand Up @@ -15,12 +15,13 @@
author_email='spam@krebsco.de',
# you will also need graphviz and imagemagick
install_requires = [ 'pygeoip' ],

scripts = ['scripts/all-the-graphs', 'scripts/build-graphs'],
packages=['tinc_graphs'],
entry_points={
'console_scripts' : [
'tinc-stats2json = tinc_graphs.Log2JSON:main',
'tinc-build-graph = tinc_graphs.Graph:main',
'copy-map = tinc_graphs.Geo:copy_map',
'add-geodata = tinc_graphs.Geo:main',
'tinc-availability-stats = tinc_graphs.Availability:generate_stats',
]
Expand Down
18 changes: 11 additions & 7 deletions retiolum/scripts/adv_graphgen/tinc_graphs.nix
Expand Up @@ -4,19 +4,23 @@ with import <nixpkgs> {};
## or in your env
# nix-env -i -f tinc_graphs.nix

buildPythonPackage rec {
python3Packages.buildPythonPackage rec {
name = "tinc_graphs-${version}";
version = "0.2.6";
propagatedBuildInputs = with pkgs;[
pythonPackages.docopt
graphviz
imagemagick
pythonPackages.pygeoip

# optional if you want geolocation:
python3Packages.pygeoip
# geolite-legacy for the db:
## ${geolite-legacy}/share/GeoIP/GeoIPCity.dat
];
src = fetchurl {
url = "";
sha256 = "1dksw1s1n2hxvnga6pygkr174dywncr0wiggkrkn1srbn2amh1c2";
};
#src = fetchurl {
#url = "";
#sha256 = "1dksw1s1n2hxvnga6pygkr174dywncr0wiggkrkn1srbn2amh1c2";
#};
src = ./.;
meta = {
homepage = http://krebsco.de/;
description = "Create Graphs from Tinc Stats";
Expand Down
9 changes: 5 additions & 4 deletions retiolum/scripts/adv_graphgen/tinc_graphs/Availability.py
@@ -1,11 +1,12 @@
#!/usr/bin/python
# TODO: Rewrite this shitty piece of software ...
# -*- coding: utf8 -*-

import sys,json,os
""" TODO: Refactoring needed to pull the edges out of the node structures again,
it should be easier to handle both structures"""
DUMP_FILE = os.environment.get("AVAILABILITY_FILE","tinc-availability.json")
hostpath=os.environment.get("TINC_HOSTPATH", "/etc/tinc/retiolum/hosts")
DUMP_FILE = os.environ.get("AVAILABILITY_FILE","tinc-availability.json")
hostpath=os.environ.get("TINC_HOSTPATH", "/etc/tinc/retiolum/hosts")

def get_all_nodes():
return os.listdir(hostpath)
Expand All @@ -16,12 +17,12 @@ def generate_stats():
import json
jlines = []
try:
f = open(DUMP_FILE,'r')
f = open(DUMP_FILE,'r+')
for line in f:
jlines.append(json.loads(line))
f.close()
except Exception as e:
print("Unable to open and parse Availability DB: {} (override with AVAILABILITY_FILE)".format(DUMP_FILE)
print("Unable to open and parse Availability DB: {} (override with AVAILABILITY_FILE)".format(DUMP_FILE))
sys.exit(1)

all_nodes = {}
Expand Down
25 changes: 13 additions & 12 deletions retiolum/scripts/adv_graphgen/tinc_graphs/Geo.py
Expand Up @@ -2,24 +2,24 @@
# -*- coding: utf8 -*-
import sys,json,os
from .Graph import delete_unused_nodes,resolve_myself
GEODB=os.environ.get("GEOCITYDB","GeoLiteCity.dat")
GEODB=os.environ.get("GEODB","GeoLiteCity.dat")

def copy_map():
from shutil import copytree
from shutil import copy
from os.path import dirname,join,realpath
if len(sys.argv) != 2 or sys.argv[1] == "--help" :
print("usage: {} <destination>".format(sys.argv[0])
print(" copies the map.html file to the <destination>")
print("usage: {} <destination>".format(sys.argv[0]))
print(" copies the map.html file to the <destination>")
sys.exit(1)
dstdir=sys.argv[1]
copytree(realpath(join(dirname(__file__),'static/map.html')),dstdir)
copy(realpath(join(dirname(__file__),'static/map.html')),dstdir)


def add_geo(nodes):
from pygeoip import GeoIP
gi = GeoIP(GEODB)

for k,v in nodes.iteritems():
for k,v in nodes.items():
try:
nodes[k].update(gi.record_by_addr(v["external-ip"]))
except Exception as e:
Expand All @@ -31,7 +31,7 @@ def add_coords_to_edges(nodes):
from pygeoip import GeoIP
gi = GeoIP(GEODB)

for k,v in nodes.iteritems():
for k,v in nodes.items():
for i,j in enumerate(v.get("to",[])):
data=gi.record_by_addr(j["addr"])
try:
Expand All @@ -45,13 +45,13 @@ def add_jitter(nodes):
from random import random
#add a bit of jitter to all of the coordinates
max_jitter=0.005
for k,v in nodes.iteritems():
for k,v in nodes.items():
jitter_lat= max_jitter -random()*max_jitter*2
jitter_long= max_jitter -random()*max_jitter*2
try:
v["latitude"]= v["latitude"] + jitter_lat
v["longitude"]= v["longitude"] + jitter_long
for nodek,node in nodes.iteritems():
for nodek,node in nodes.items():
for to in node['to']:
if to['name'] == k:
to['latitude'] = v["latitude"]
Expand All @@ -61,10 +61,11 @@ def add_jitter(nodes):

def main():
import json
try:
with open(GEODB) as f: f.read()
except:
try:
with open(GEODB,'rb') as f: f.read()
except Exception as e:
print("cannot open {} (GEODB in env)".format(GEODB))
print(e)
sys.exit(1)
try:
nodes = add_jitter(add_coords_to_edges(add_geo(resolve_myself(delete_unused_nodes(json.load(sys.stdin))))))
Expand Down
56 changes: 33 additions & 23 deletions retiolum/scripts/adv_graphgen/tinc_graphs/Graph.py
Expand Up @@ -6,8 +6,11 @@
from .Availability import get_node_availability
import sys,json
from time import time
DUMP_FILE = os.environment.get("AVAILABILITY_FILE", "tinc-availability.json")
DUMP_FILE = os.environ.get("AVAILABILITY_FILE", "tinc-availability.json")
hostpath=os.environ.get("TINC_HOSTPATH", "/etc/tinc/retiolum/hosts")

# will be filled later
supernodes= []

def resolve_myself(nodes):
#resolve MYSELF to the real ip
Expand All @@ -34,23 +37,24 @@ def generate_availability_stats(nodes):
""" generates stats of from availability
"""
jlines = []
try:
f = BackwardsReader(DUMP_FILE)
lines_to_use = 1000
while True:
if lines_to_use == 0: break
line = f.readline()
if not line: break
jline = json.loads(line)
if not jline['nodes']: continue

jlines.append(jline)
lines_to_use -=1
except Exception as e: sys.stderr.write(str(e))
# try:
# f = BackwardsReader(DUMP_FILE)
# lines_to_use = 1000
# while True:
# if lines_to_use == 0: break
# line = f.readline()
# if not line: break
# jline = json.loads(line)
# if not jline['nodes']: continue

# jlines.append(jline)
# lines_to_use -=1
# except Exception as e: sys.stderr.write(str(e))

for k,v in nodes.items():
v['availability'] = get_node_availability(k,jlines)
sys.stderr.write( "%s -> %f\n" %(k ,v['availability']))
# TODO: get this information in a different way
v['availability'] = get_node_availability(k,[])


def generate_stats(nodes):
""" Generates some statistics of the network and nodes
Expand Down Expand Up @@ -211,7 +215,6 @@ def anonymize_nodes(nodes):
return newnodes

def main():
supernodes= []
if len(sys.argv) != 2 or sys.argv[1] not in ["anonymous","complete"]:
print("usage: %s (anonymous|complete)")
sys.exit(1)
Expand All @@ -231,18 +234,25 @@ def main():
print_edge(k,v)

elif sys.argv[1] == "complete":
for supernode,addr in check_all_the_super():
supernodes.append(supernode)
try:
for supernode,addr in check_all_the_super(hostpath):
supernodes.append(supernode)
except FileNotFoundError as e:
print("!! cannot load list of supernodes ({})".format(hostpath))
print("!! Use TINC_HOSTPATH env to override")
sys.exit(1)

generate_availability_stats(nodes)
add_services(nodes)
for k,v in nodes.items():
print_node(k,v)
print_edge(k,v)
try:
dump_graph(nodes)
except Exception as e:
sys.stderr.write("Cannot dump graph: %s" % str(e))

#TODO: get availability somehow else
# try:
# dump_graph(nodes)
# except Exception as e:
# sys.stderr.write("Cannot dump graph: %s" % str(e))
else:
pass

Expand Down
2 changes: 1 addition & 1 deletion retiolum/scripts/adv_graphgen/tinc_graphs/Services.py
@@ -1,5 +1,5 @@
import os,sys
services_dir=os.environment.get("SERIVCES_DIR","/home/reaktor/nag.services")
services_dir=os.environ.get("SERIVCES_DIR","/home/reaktor/nag.services")
def add_services(nodes):
for k,v in nodes.items():
n = nodes[k]
Expand Down
33 changes: 19 additions & 14 deletions retiolum/scripts/adv_graphgen/tinc_graphs/Supernodes.py
Expand Up @@ -7,21 +7,26 @@ def find_potential_super(path="/etc/tinc/retiolum/hosts"):
needle_addr = re.compile("Address\s*=\s*(.*)")
needle_port = re.compile("Port\s*=\s*(.*)")
for f in os.listdir(path):
with open(path+"/"+f) as of:
addrs = []
port = "655"
try:
with open(path+"/"+f) as of:
addrs = []
port = "655"

for line in of.readlines():
for line in of.readlines():

addr_found = needle_addr.match(line)
if addr_found:
addrs.append(addr_found.group(1))
addr_found = needle_addr.match(line)
if addr_found:
addrs.append(addr_found.group(1))

port_found = needle_port.match(line)
if port_found:
port = port_found.group(1)

if addrs : yield (f ,[(addr ,int(port)) for addr in addrs])
port_found = needle_port.match(line)
if port_found:
port = port_found.group(1)

if addrs : yield (f ,[(addr ,int(port)) for addr in addrs])
except FileNotFoundError as e:
print("Cannot open hosts directory to be used to find potential supernodes")
print("Directory used: {}".format(path))
raise


def try_connect(addr):
Expand Down Expand Up @@ -54,11 +59,11 @@ def check_all_the_super(path):

def main():
import os
hostpath=os.environment.get("TINC_HOSTPATH", "/etc/tinc/retiolum/hosts")
hostpath=os.environ.get("TINC_HOSTPATH", "/etc/tinc/retiolum/hosts")

for host,addrs in check_all_the_super(hostpath):
print("%s %s" %(host,str(addrs)))

if __name__ == "__main__":
main()

Expand Down

0 comments on commit 219fab9

Please sign in to comment.