update package structure, move non-executables to lib

This commit is contained in:
Martin Weinelt 2015-03-24 22:10:54 +01:00
commit c74b7b95fb
10 changed files with 21 additions and 16 deletions

40
lib/GlobalRRD.py Normal file
View file

@ -0,0 +1,40 @@
import os
import subprocess
from lib.RRD import DS, RRA, RRD
class GlobalRRD(RRD):
ds_list = [
# Number of nodes available
DS('nodes', 'GAUGE', 120, 0, float('NaN')),
# Number of client available
DS('clients', 'GAUGE', 120, 0, float('NaN')),
]
rra_list = [
# 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 1, 120),
# 31 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 744),
# ~5 years of 1 day samples
RRA('AVERAGE', 0.5, 1440, 1780),
]
def __init__(self, directory):
super().__init__(os.path.join(directory, "nodes.rrd"))
self.ensure_sanity(self.ds_list, self.rra_list, step=60)
# TODO: fix this, python does not support function overloading
def update(self, node_count, client_count):
super().update({'nodes': node_count, 'clients': client_count})
def graph(self, filename, timeframe):
args = ["rrdtool", 'graph', filename,
'-s', '-' + timeframe,
'-w', '800',
'-h' '400',
'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
'LINE1:nodes#F00:nodes\\l',
'DEF:clients=' + self.filename + ':clients:AVERAGE',
'LINE2:clients#00F:clients']
subprocess.check_output(args)

59
lib/NodeRRD.py Normal file
View file

@ -0,0 +1,59 @@
import os
import subprocess
from lib.RRD import DS, RRA, RRD
class NodeRRD(RRD):
ds_list = [
DS('upstate', 'GAUGE', 120, 0, 1),
DS('clients', 'GAUGE', 120, 0, float('NaN')),
]
rra_list = [
# 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 1, 120),
# 5 days of 5 minute samples
RRA('AVERAGE', 0.5, 5, 1440),
# 30 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 720),
# 1 year of 12 hour samples
RRA('AVERAGE', 0.5, 720, 730),
]
def __init__(self, filename, node=None):
"""
Create a new RRD for a given node.
If the RRD isn't supposed to be updated, the node can be omitted.
"""
self.node = node
super().__init__(filename)
self.ensure_sanity(self.ds_list, self.rra_list, step=60)
@property
def imagename(self):
return "{basename}.png".format(basename=os.path.basename(self.filename).rsplit('.', 2)[0])
# TODO: fix this, python does not support function overloading
def update(self):
super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']})
def graph(self, directory, timeframe):
"""
Create a graph in the given directory. The file will be named
basename.png if the RRD file is named basename.rrd
"""
args = ['rrdtool', 'graph', os.path.join(directory, self.imagename),
'-s', '-' + timeframe,
'-w', '800',
'-h', '400',
'-l', '0',
'-y', '1:1',
'DEF:clients=' + self.filename + ':clients:AVERAGE',
'VDEF:maxc=clients,MAXIMUM',
'CDEF:c=0,clients,ADDNAN',
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
'AREA:c#0F0:up\\l',
'AREA:d#F00:down\\l',
'LINE1:c#00F:clients connected\\l']
subprocess.check_output(args)

339
lib/RRD.py Normal file
View file

@ -0,0 +1,339 @@
import subprocess
import re
import os
from operator import xor, eq
from functools import reduce
from itertools import starmap
import math
class RRDIncompatibleException(Exception):
"""
Is raised when an RRD doesn't have the desired definition and cannot be
upgraded to it.
"""
pass
class RRDOutdatedException(Exception):
"""
Is raised when an RRD doesn't have the desired definition, but can be
upgraded to it.
"""
pass
if not hasattr(__builtins__, "FileNotFoundError"):
class FileNotFoundError(Exception):
pass
class RRD(object):
"""
An RRD is a Round Robin Database, a database which forgets old data and
aggregates multiple records into new ones.
It contains multiple Data Sources (DS) which can be thought of as columns,
and Round Robin Archives (RRA) which can be thought of as tables with the
DS as columns and time-dependant rows.
"""
# rra[2].cdp_prep[0].value = 1,8583033333e+03
_info_regex = re.compile("""
(?P<section>[a-z_]+)
\[ (?P<key>[a-zA-Z0-9_]+) \]
\.
|
(?P<name>[a-z_]+)
\s*=\s*
"? (?P<value>.*?) "?
$""", re.X)
_cached_info = None
def _exec_rrdtool(self, cmd, *args, **kwargs):
pargs = ["rrdtool", cmd, self.filename]
for k, v in kwargs.items():
pargs.extend(["--" + k, str(v)])
pargs.extend(args)
subprocess.check_output(pargs)
def __init__(self, filename):
self.filename = filename
def ensure_sanity(self, ds_list, rra_list, **kwargs):
"""
Create or upgrade the RRD file if necessary to contain all DS in
ds_list. If it needs to be created, the RRAs in rra_list and any kwargs
will be used for creation. Note that RRAs and options of an existing
database are NOT modified!
"""
try:
self.check_sanity(ds_list)
except FileNotFoundError:
self.create(ds_list, rra_list, **kwargs)
except RRDOutdatedException:
self.upgrade(ds_list)
def check_sanity(self, ds_list=()):
"""
Check if the RRD file exists and contains (at least) the DS listed in
ds_list.
"""
if not os.path.exists(self.filename):
raise FileNotFoundError(self.filename)
info = self.info()
if set(ds_list) - set(info['ds'].values()) != set():
for ds in ds_list:
if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type:
raise RRDIncompatibleException("%s is %s but should be %s" %
(ds.name, ds.type, info['ds'][ds.name].type))
else:
raise RRDOutdatedException()
def upgrade(self, dss):
"""
Upgrade the DS definitions (!) of this RRD.
(To update its values, use update())
The list dss contains DSS objects to be updated or added. The
parameters of a DS can be changed, but not its type. New DS are always
added at the end in the order of their appearance in the list.
This is done internally via an rrdtool dump -> rrdtool restore and
modifying the dump on the fly.
"""
info = self.info()
new_ds = list(info['ds'].values())
new_ds.sort(key=lambda ds: ds.index)
for ds in dss:
if ds.name in info['ds']:
old_ds = info['ds'][ds.name]
if info['ds'][ds.name].type != ds.type:
raise RuntimeError('Cannot convert existing DS "%s" from type "%s" to "%s"' %
(ds.name, old_ds.type, ds.type))
ds.index = old_ds.index
new_ds[ds.index] = ds
else:
ds.index = len(new_ds)
new_ds.append(ds)
added_ds_num = len(new_ds) - len(info['ds'])
dump = subprocess.Popen(
["rrdtool", "dump", self.filename],
stdout=subprocess.PIPE)
restore = subprocess.Popen(
["rrdtool", "restore", "-", self.filename + ".new"],
stdin=subprocess.PIPE)
echo = True
ds_definitions = True
for line in dump.stdout:
if ds_definitions and b'<ds>' in line:
echo = False
if b'<!-- Round Robin Archives -->' in line:
ds_definitions = False
for ds in new_ds:
restore.stdin.write(bytes("""
<ds>
<name> %s </name>
<type> %s </type>
<minimal_heartbeat>%i</minimal_heartbeat>
<min>%s</min>
<max>%s</max>
<!-- PDP Status -->
<last_ds>%s</last_ds>
<value>%s</value>
<unknown_sec> %i </unknown_sec>
</ds>
""" % (ds.name,
ds.type,
ds.args[0],
ds.args[1],
ds.args[2],
ds.last_ds,
ds.value,
ds.unknown_sec), "utf-8"))
if b'</cdp_prep>' in line:
restore.stdin.write(added_ds_num*b"""
<ds>
<primary_value> NaN </primary_value>
<secondary_value> NaN </secondary_value>
<value> NaN </value>
<unknown_datapoints> 0 </unknown_datapoints>
</ds>
""")
# echoing of input line
if echo:
restore.stdin.write(
line.replace(
b'</row>',
(added_ds_num*b'<v>NaN</v>')+b'</row>'
)
)
if ds_definitions and b'</ds>' in line:
echo = True
dump.stdout.close()
restore.stdin.close()
dump.wait()
restore.wait()
os.rename(self.filename + ".new", self.filename)
self._cached_info = None
def create(self, ds_list, rra_list, **kwargs):
"""
Create a new RRD file with the specified list of RRAs and DSs.
Any kwargs are passed as --key=value to rrdtool create.
"""
self._exec_rrdtool(
"create",
*map(str, rra_list + ds_list),
**kwargs
)
self._cached_info = None
def update(self, V):
"""
Update the RRD with new values V.
V can be either list or dict:
* If it is a dict, its keys must be DS names in the RRD and it is
ensured that the correct DS are updated with the correct values, by
passing a "template" to rrdtool update (see man rrdupdate).
* If it is a list, no template is generated and the order of the
values in V must be the same as that of the DS in the RRD.
"""
try:
args = ['N:' + ':'.join(map(str, V.values()))]
kwargs = {'template': ':'.join(V.keys())}
except AttributeError:
args = ['N:' + ':'.join(map(str, V))]
kwargs = {}
self._exec_rrdtool("update", *args, **kwargs)
self._cached_info = None
def info(self):
"""
Return a dictionary with information about the RRD.
See `man rrdinfo` for more details.
"""
if self._cached_info:
return self._cached_info
env = os.environ.copy()
env["LC_ALL"] = "C"
proc = subprocess.Popen(
["rrdtool", "info", self.filename],
stdout=subprocess.PIPE,
env=env
)
out, err = proc.communicate()
out = out.decode()
info = {}
for line in out.splitlines():
base = info
for match in self._info_regex.finditer(line):
section, key, name, value = match.group("section", "key", "name", "value")
if section and key:
try:
key = int(key)
except ValueError:
pass
if section not in base:
base[section] = {}
if key not in base[section]:
base[section][key] = {}
base = base[section][key]
if name and value:
try:
base[name] = int(value)
except ValueError:
try:
base[name] = float(value)
except:
base[name] = value
dss = {}
for name, ds in info['ds'].items():
ds_obj = DS(name, ds['type'], ds['minimal_heartbeat'], ds['min'], ds['max'])
ds_obj.index = ds['index']
ds_obj.last_ds = ds['last_ds']
ds_obj.value = ds['value']
ds_obj.unknown_sec = ds['unknown_sec']
dss[name] = ds_obj
info['ds'] = dss
rras = []
for rra in info['rra'].values():
rras.append(RRA(rra['cf'], rra['xff'], rra['pdp_per_row'], rra['rows']))
info['rra'] = rras
self._cached_info = info
return info
class DS(object):
"""
DS stands for Data Source and represents one line of data points in a Round
Robin Database (RRD).
"""
name = None
type = None
args = []
index = -1
last_ds = 'U'
value = 0
unknown_sec = 0
def __init__(self, name, dst, *args):
self.name = name
self.type = dst
self.args = args
def __str__(self):
return "DS:%s:%s:%s" % (
self.name,
self.type,
":".join(map(str, self._nan_to_u_args()))
)
def __repr__(self):
return "%s(%r, %r, %s)" % (
self.__class__.__name__,
self.name,
self.type,
", ".join(map(repr, self.args))
)
def __eq__(self, other):
return all(starmap(eq, zip(self.compare_keys(), other.compare_keys())))
def __hash__(self):
return reduce(xor, map(hash, self.compare_keys()))
def _nan_to_u_args(self):
return tuple(
'U' if type(arg) is float and math.isnan(arg)
else arg
for arg in self.args
)
def compare_keys(self):
return self.name, self.type, self._nan_to_u_args()
class RRA(object):
def __init__(self, cf, *args):
self.cf = cf
self.args = args
def __str__(self):
return "RRA:%s:%s" % (self.cf, ":".join(map(str, self.args)))
def __repr__(self):
return "%s(%r, %s)" % (
self.__class__.__name__,
self.cf,
", ".join(map(repr, self.args))
)

1
lib/__init__.py Normal file
View file

@ -0,0 +1 @@
__author__ = 'hexa'

20
lib/alfred.py Normal file
View file

@ -0,0 +1,20 @@
import subprocess
import json
def _fetch(data_type):
output = subprocess.check_output(
["alfred-json", "-z", "-f", "json", "-r", str(data_type)])
return json.loads(output.decode("utf-8")).values()
def nodeinfo():
return _fetch(158)
def statistics():
return _fetch(159)
def vis():
return _fetch(160)

98
lib/batman.py Normal file
View file

@ -0,0 +1,98 @@
import subprocess
import json
import re
class Batman(object):
"""
Bindings for B.A.T.M.A.N. Advanced
commandline interface "batctl"
"""
def __init__(self, mesh_interface='bat0'):
self.mesh_interface = mesh_interface
def vis_data(self, batadv_vis=False):
vds = self.vis_data_batctl_legacy()
if batadv_vis:
vds += self.vis_data_batadv_vis()
return vds
@staticmethod
def vis_data_helper(lines):
vd_tmp = []
for line in lines:
try:
utf8_line = line.decode('utf-8')
vd_tmp.append(json.loads(utf8_line))
except UnicodeDecodeError:
pass
return vd_tmp
def vis_data_batctl_legacy(self):
"""
Parse "batctl -m <mesh_interface> vd json -n"
into an array of dictionaries.
"""
output = subprocess.check_output(
['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n'])
lines = output.splitlines()
vds = self.vis_data_helper(lines)
return vds
def vis_data_batadv_vis(self):
"""
Parse "batadv-vis -i <mesh_interface> -f json"
into an array of dictionaries.
"""
output = subprocess.check_output(
['batadv-vis', '-i', self.mesh_interface, '-f', 'json'])
lines = output.splitlines()
return self.vis_data_helper(lines)
def gateway_list(self):
"""
Parse "batctl -m <mesh_interface> gwl -n"
into an array of dictionaries.
"""
output = subprocess.check_output(
['batctl', '-m', self.mesh_interface, 'gwl', '-n'])
output_utf8 = output.decode('utf-8')
lines = output_utf8.splitlines()
own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",
lines[0]).group(1)
gateways = []
gw_mode = self.gateway_mode()
if gw_mode['mode'] == 'server':
gateways.append(own_mac)
for line in lines:
gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line)
if gw_line:
gateways.append(gw_line.group(1))
return gateways
def gateway_mode(self):
"""
Parse "batctl -m <mesh_interface> gw"
"""
output = subprocess.check_output(
['batctl', '-m', self.mesh_interface, 'gw'])
elements = output.decode("utf-8").split()
mode = elements[0]
if mode == 'server':
return {'mode': 'server',
'bandwidth': elements[3]}
else:
return {'mode': mode}
if __name__ == "__main__":
bc = Batman()
vd = bc.vis_data()
gw = bc.gateway_list()
for x in vd:
print(x)
print(gw)
print(bc.gateway_mode())

76
lib/graph.py Normal file
View file

@ -0,0 +1,76 @@
from functools import reduce
from itertools import chain
import networkx as nx
from lib.nodes import build_mac_table
def import_vis_data(graph, nodes, vis_data):
macs = build_mac_table(nodes)
nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data))
nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data))
graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b))))
edges = filter(lambda d: 'neighbor' in d, vis_data)
graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges))
def mark_vpn(graph, vpn_macs):
components = map(frozenset, nx.weakly_connected_components(graph))
components = filter(vpn_macs.intersection, components)
nodes = reduce(lambda a, b: a | b, components, set())
for node in nodes:
for k, v in graph[node].items():
v['vpn'] = True
def to_multigraph(graph):
def f(a):
node = graph.node[a]
return node['primary'] if node else a
def map_node(node, data):
return (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict())
digraph = nx.MultiDiGraph()
digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True))))
return digraph
def merge_nodes(graph):
def merge_edges(data):
tq = min(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn)
multigraph = to_multigraph(graph)
digraph = nx.DiGraph()
digraph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values()))
for d in multigraph[e]] for e in multigraph])
digraph.add_edges_from(edges)
return digraph
def to_undirected(graph):
multigraph = nx.MultiGraph()
multigraph.add_nodes_from(graph.nodes_iter(data=True))
multigraph.add_edges_from(graph.edges_iter(data=True))
def merge_edges(data):
tq = max(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2)
graph = nx.Graph()
graph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values()))
for d in multigraph[e]] for e in multigraph])
graph.add_edges_from(edges)
return graph

138
lib/nodes.py Normal file
View file

@ -0,0 +1,138 @@
from collections import Counter, defaultdict
from datetime import datetime
from functools import reduce
def build_mac_table(nodes):
macs = dict()
for node_id, node in nodes.items():
try:
for mac in node['nodeinfo']['network']['mesh_interfaces']:
macs[mac] = node_id
except KeyError:
pass
return macs
def prune_nodes(nodes, now, days):
prune = []
for node_id, node in nodes.items():
if 'lastseen' not in node:
prune.append(node_id)
continue
lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S')
delta = (now - lastseen).seconds
if delta >= days * 86400:
prune.append(node_id)
for node_id in prune:
del nodes[node_id]
def mark_online(node, now):
node['lastseen'] = now.isoformat()
node.setdefault('firstseen', now.isoformat())
node['flags']['online'] = True
def import_nodeinfo(nodes, nodeinfos, now, assume_online=False):
for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos):
node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()})
node['nodeinfo'] = nodeinfo
node['flags']['online'] = False
node['flags']['gateway'] = False
if assume_online:
mark_online(node, now)
def reset_statistics(nodes):
for node in nodes.values():
node['statistics'] = {'clients': 0}
def import_statistics(nodes, statistics):
def add(node, statistics, target, source, f=lambda d: d):
try:
node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics))
except (KeyError, TypeError):
pass
macs = build_mac_table(nodes)
statistics = filter(lambda d: 'node_id' in d, statistics)
statistics = filter(lambda d: d['node_id'] in nodes, statistics)
for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics):
add(node, statistics, 'clients', ['clients', 'total'])
add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d))
add(node, statistics, 'uptime', ['uptime'])
add(node, statistics, 'loadavg', ['loadavg'])
add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total'])
add(node, statistics, 'rootfs_usage', ['rootfs_usage'])
def import_mesh_ifs_vis_data(nodes, vis_data):
macs = build_mac_table(nodes)
mesh_ifs = defaultdict(lambda: set())
for line in filter(lambda d: 'secondary' in d, vis_data):
primary = line['of']
mesh_ifs[primary].add(primary)
mesh_ifs[primary].add(line['secondary'])
def if_to_node(ifs):
a = filter(lambda d: d in macs, ifs)
a = map(lambda d: nodes[macs[d]], a)
try:
return next(a), ifs
except StopIteration:
return None
mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values()))
for v in mesh_nodes:
node = v[0]
try:
mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces'])
except KeyError:
mesh_ifs = set()
node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1])
def import_vis_clientcount(nodes, vis_data):
macs = build_mac_table(nodes)
data = filter(lambda d: d.get('label', None) == 'TT', vis_data)
data = filter(lambda d: d['router'] in macs, data)
data = map(lambda d: macs[d['router']], data)
for node_id, clientcount in Counter(data).items():
nodes[node_id]['statistics'].setdefault('clients', clientcount)
def mark_gateways(nodes, gateways):
macs = build_mac_table(nodes)
gateways = filter(lambda d: d in macs, gateways)
for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True
def mark_vis_data_online(nodes, vis_data, now):
macs = build_mac_table(nodes)
online = set()
for line in vis_data:
if 'primary' in line:
online.add(line['primary'])
elif 'secondary' in line:
online.add(line['secondary'])
elif 'gateway' in line:
# This matches clients' MACs.
# On pre-Gluon nodes the primary MAC will be one of it.
online.add(line['gateway'])
for mac in filter(lambda d: d in macs, online):
mark_online(nodes[macs[mac]], now)

51
lib/rrddb.py Normal file
View file

@ -0,0 +1,51 @@
#!/usr/bin/env python3
import time
import os
from lib.GlobalRRD import GlobalRRD
from lib.NodeRRD import NodeRRD
class RRD(object):
def __init__(self,
database_directory,
image_path,
display_time_global="7d",
display_time_node="1d"):
self.dbPath = database_directory
self.globalDb = GlobalRRD(self.dbPath)
self.imagePath = image_path
self.displayTimeGlobal = display_time_global
self.displayTimeNode = display_time_node
self.currentTimeInt = (int(time.time())/60)*60
self.currentTime = str(self.currentTimeInt)
try:
os.stat(self.imagePath)
except OSError:
os.mkdir(self.imagePath)
def update_database(self, nodes):
online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items()))
client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values()))
self.globalDb.update(len(online_nodes), client_count)
for node_id, node in online_nodes.items():
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node)
rrd.update()
def update_images(self):
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal)
nodedb_files = os.listdir(self.dbPath)
for file_name in nodedb_files:
if not os.path.isfile(os.path.join(self.dbPath, file_name)):
continue
node_name = os.path.basename(file_name).split('.')
if node_name[1] == 'rrd' and not node_name[0] == "nodes":
rrd = NodeRRD(os.path.join(self.dbPath, file_name))
rrd.graph(self.imagePath, self.displayTimeNode)