fix alot of pep8

This commit is contained in:
Martin Weinelt 2015-03-24 16:49:37 +01:00 committed by Nils Schneider
parent 9a8d40ea9a
commit 10e10944a5
9 changed files with 397 additions and 357 deletions

View file

@ -2,6 +2,7 @@ import os
import subprocess import subprocess
from RRD import RRD, DS, RRA from RRD import RRD, DS, RRA
class GlobalRRD(RRD): class GlobalRRD(RRD):
ds_list = [ ds_list = [
# Number of nodes available # Number of nodes available
@ -10,14 +11,17 @@ class GlobalRRD(RRD):
DS('clients', 'GAUGE', 120, 0, float('NaN')), DS('clients', 'GAUGE', 120, 0, float('NaN')),
] ]
rra_list = [ rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples RRA('AVERAGE', 0.5, 1, 120),
RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples # 31 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 744),
# ~5 years of 1 day samples
RRA('AVERAGE', 0.5, 1440, 1780),
] ]
def __init__(self, directory): def __init__(self, directory):
super().__init__(os.path.join(directory, "nodes.rrd")) super().__init__(os.path.join(directory, "nodes.rrd"))
self.ensureSanity(self.ds_list, self.rra_list, step=60) self.ensure_sanity(self.ds_list, self.rra_list, step=60)
def update(self, nodeCount, clientCount): def update(self, nodeCount, clientCount):
super().update({'nodes': nodeCount, 'clients': clientCount}) super().update({'nodes': nodeCount, 'clients': clientCount})
@ -30,6 +34,5 @@ class GlobalRRD(RRD):
'DEF:nodes=' + self.filename + ':nodes:AVERAGE', 'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
'LINE1:nodes#F00:nodes\\l', 'LINE1:nodes#F00:nodes\\l',
'DEF:clients=' + self.filename + ':clients:AVERAGE', 'DEF:clients=' + self.filename + ':clients:AVERAGE',
'LINE2:clients#00F:clients', 'LINE2:clients#00F:clients']
]
subprocess.check_output(args) subprocess.check_output(args)

View file

@ -2,19 +2,24 @@ import os
import subprocess import subprocess
from RRD import RRD, DS, RRA from RRD import RRD, DS, RRA
class NodeRRD(RRD): class NodeRRD(RRD):
ds_list = [ ds_list = [
DS('upstate', 'GAUGE', 120, 0, 1), DS('upstate', 'GAUGE', 120, 0, 1),
DS('clients', 'GAUGE', 120, 0, float('NaN')), DS('clients', 'GAUGE', 120, 0, float('NaN')),
] ]
rra_list = [ rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples RRA('AVERAGE', 0.5, 1, 120),
RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples # 5 days of 5 minute samples
RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples RRA('AVERAGE', 0.5, 5, 1440),
# 30 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 720),
# 1 year of 12 hour samples
RRA('AVERAGE', 0.5, 720, 730),
] ]
def __init__(self, filename, node = None): def __init__(self, filename, node=None):
""" """
Create a new RRD for a given node. Create a new RRD for a given node.
@ -22,12 +27,13 @@ class NodeRRD(RRD):
""" """
self.node = node self.node = node
super().__init__(filename) super().__init__(filename)
self.ensureSanity(self.ds_list, self.rra_list, step=60) self.ensure_sanity(self.ds_list, self.rra_list, step=60)
@property @property
def imagename(self): def imagename(self):
return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" return "{basename}.png".format(basename=os.path.basename(self.filename).rsplit('.', 2)[0])
# TODO: fix this, python does not support function overloading
def update(self): def update(self):
super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']}) super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']})
@ -36,8 +42,8 @@ class NodeRRD(RRD):
Create a graph in the given directory. The file will be named Create a graph in the given directory. The file will be named
basename.png if the RRD file is named basename.rrd basename.png if the RRD file is named basename.rrd
""" """
args = ['rrdtool','graph', os.path.join(directory, self.imagename), args = ['rrdtool', 'graph', os.path.join(directory, self.imagename),
'-s', '-' + timeframe , '-s', '-' + timeframe,
'-w', '800', '-w', '800',
'-h', '400', '-h', '400',
'-l', '0', '-l', '0',
@ -48,6 +54,5 @@ class NodeRRD(RRD):
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*', 'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
'AREA:c#0F0:up\\l', 'AREA:c#0F0:up\\l',
'AREA:d#F00:down\\l', 'AREA:d#F00:down\\l',
'LINE1:c#00F:clients connected\\l', 'LINE1:c#00F:clients connected\\l']
]
subprocess.check_output(args) subprocess.check_output(args)

65
RRD.py
View file

@ -1,19 +1,20 @@
import subprocess import subprocess
import re import re
import io
import os import os
from tempfile import TemporaryFile
from operator import xor, eq from operator import xor, eq
from functools import reduce from functools import reduce
from itertools import starmap from itertools import starmap
import math import math
class RRDIncompatibleException(Exception): class RRDIncompatibleException(Exception):
""" """
Is raised when an RRD doesn't have the desired definition and cannot be Is raised when an RRD doesn't have the desired definition and cannot be
upgraded to it. upgraded to it.
""" """
pass pass
class RRDOutdatedException(Exception): class RRDOutdatedException(Exception):
""" """
Is raised when an RRD doesn't have the desired definition, but can be Is raised when an RRD doesn't have the desired definition, but can be
@ -25,7 +26,8 @@ if not hasattr(__builtins__, "FileNotFoundError"):
class FileNotFoundError(Exception): class FileNotFoundError(Exception):
pass pass
class RRD:
class RRD(object):
""" """
An RRD is a Round Robin Database, a database which forgets old data and An RRD is a Round Robin Database, a database which forgets old data and
aggregates multiple records into new ones. aggregates multiple records into new ones.
@ -49,7 +51,7 @@ class RRD:
def _exec_rrdtool(self, cmd, *args, **kwargs): def _exec_rrdtool(self, cmd, *args, **kwargs):
pargs = ["rrdtool", cmd, self.filename] pargs = ["rrdtool", cmd, self.filename]
for k,v in kwargs.items(): for k, v in kwargs.items():
pargs.extend(["--" + k, str(v)]) pargs.extend(["--" + k, str(v)])
pargs.extend(args) pargs.extend(args)
subprocess.check_output(pargs) subprocess.check_output(pargs)
@ -57,7 +59,7 @@ class RRD:
def __init__(self, filename): def __init__(self, filename):
self.filename = filename self.filename = filename
def ensureSanity(self, ds_list, rra_list, **kwargs): def ensure_sanity(self, ds_list, rra_list, **kwargs):
""" """
Create or upgrade the RRD file if necessary to contain all DS in Create or upgrade the RRD file if necessary to contain all DS in
ds_list. If it needs to be created, the RRAs in rra_list and any kwargs ds_list. If it needs to be created, the RRAs in rra_list and any kwargs
@ -65,13 +67,13 @@ class RRD:
database are NOT modified! database are NOT modified!
""" """
try: try:
self.checkSanity(ds_list) self.check_sanity(ds_list)
except FileNotFoundError: except FileNotFoundError:
self.create(ds_list, rra_list, **kwargs) self.create(ds_list, rra_list, **kwargs)
except RRDOutdatedException: except RRDOutdatedException:
self.upgrade(ds_list) self.upgrade(ds_list)
def checkSanity(self, ds_list=()): def check_sanity(self, ds_list=()):
""" """
Check if the RRD file exists and contains (at least) the DS listed in Check if the RRD file exists and contains (at least) the DS listed in
ds_list. ds_list.
@ -82,7 +84,8 @@ class RRD:
if set(ds_list) - set(info['ds'].values()) != set(): if set(ds_list) - set(info['ds'].values()) != set():
for ds in ds_list: for ds in ds_list:
if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type: if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type:
raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type)) raise RRDIncompatibleException("%s is %s but should be %s" %
(ds.name, ds.type, info['ds'][ds.name].type))
else: else:
raise RRDOutdatedException() raise RRDOutdatedException()
@ -106,7 +109,7 @@ class RRD:
old_ds = info['ds'][ds.name] old_ds = info['ds'][ds.name]
if info['ds'][ds.name].type != ds.type: if info['ds'][ds.name].type != ds.type:
raise RuntimeError('Cannot convert existing DS "%s" from type "%s" to "%s"' % raise RuntimeError('Cannot convert existing DS "%s" from type "%s" to "%s"' %
(ds.name, old_ds.type, ds.type)) (ds.name, old_ds.type, ds.type))
ds.index = old_ds.index ds.index = old_ds.index
new_ds[ds.index] = ds new_ds[ds.index] = ds
else: else:
@ -116,12 +119,11 @@ class RRD:
dump = subprocess.Popen( dump = subprocess.Popen(
["rrdtool", "dump", self.filename], ["rrdtool", "dump", self.filename],
stdout=subprocess.PIPE stdout=subprocess.PIPE)
)
restore = subprocess.Popen( restore = subprocess.Popen(
["rrdtool", "restore", "-", self.filename + ".new"], ["rrdtool", "restore", "-", self.filename + ".new"],
stdin=subprocess.PIPE stdin=subprocess.PIPE)
)
echo = True echo = True
ds_definitions = True ds_definitions = True
for line in dump.stdout: for line in dump.stdout:
@ -143,16 +145,14 @@ class RRD:
<value>%s</value> <value>%s</value>
<unknown_sec> %i </unknown_sec> <unknown_sec> %i </unknown_sec>
</ds> </ds>
""" % ( """ % (ds.name,
ds.name, ds.type,
ds.type, ds.args[0],
ds.args[0], ds.args[1],
ds.args[1], ds.args[2],
ds.args[2], ds.last_ds,
ds.last_ds, ds.value,
ds.value, ds.unknown_sec), "utf-8"))
ds.unknown_sec)
, "utf-8"))
if b'</cdp_prep>' in line: if b'</cdp_prep>' in line:
restore.stdin.write(added_ds_num*b""" restore.stdin.write(added_ds_num*b"""
@ -272,7 +272,8 @@ class RRD:
self._cached_info = info self._cached_info = info
return info return info
class DS:
class DS(object):
""" """
DS stands for Data Source and represents one line of data points in a Round DS stands for Data Source and represents one line of data points in a Round
Robin Database (RRD). Robin Database (RRD).
@ -284,6 +285,7 @@ class DS:
last_ds = 'U' last_ds = 'U'
value = 0 value = 0
unknown_sec = 0 unknown_sec = 0
def __init__(self, name, dst, *args): def __init__(self, name, dst, *args):
self.name = name self.name = name
self.type = dst self.type = dst
@ -293,7 +295,7 @@ class DS:
return "DS:%s:%s:%s" % ( return "DS:%s:%s:%s" % (
self.name, self.name,
self.type, self.type,
":".join(map(str, self._nan_to_U_args())) ":".join(map(str, self._nan_to_u_args()))
) )
def __repr__(self): def __repr__(self):
@ -305,22 +307,23 @@ class DS:
) )
def __eq__(self, other): def __eq__(self, other):
return all(starmap(eq, zip(self._compare_keys(), other._compare_keys()))) return all(starmap(eq, zip(self.compare_keys(), other.compare_keys())))
def __hash__(self): def __hash__(self):
return reduce(xor, map(hash, self._compare_keys())) return reduce(xor, map(hash, self.compare_keys()))
def _nan_to_U_args(self): def _nan_to_u_args(self):
return tuple( return tuple(
'U' if type(arg) is float and math.isnan(arg) 'U' if type(arg) is float and math.isnan(arg)
else arg else arg
for arg in self.args for arg in self.args
) )
def _compare_keys(self): def compare_keys(self):
return (self.name, self.type, self._nan_to_U_args()) return self.name, self.type, self._nan_to_u_args()
class RRA:
class RRA(object):
def __init__(self, cf, *args): def __init__(self, cf, *args):
self.cf = cf self.cf = cf
self.args = args self.args = args

View file

@ -1,15 +1,19 @@
import subprocess import subprocess
import json import json
def _fetch(data_type): def _fetch(data_type):
output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)])
return json.loads(output.decode("utf-8")).values() return json.loads(output.decode("utf-8")).values()
def nodeinfo(): def nodeinfo():
return _fetch(158) return _fetch(158)
def statistics(): def statistics():
return _fetch(159) return _fetch(159)
def vis(): def vis():
return _fetch(160) return _fetch(160)

View file

@ -1,9 +1,11 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""
backend.py - ffmap-backend runner
https://github.com/ffnord/ffmap-backend
"""
import argparse import argparse
import json import json
import os import os
import sys
import networkx as nx import networkx as nx
from datetime import datetime from datetime import datetime
from networkx.readwrite import json_graph from networkx.readwrite import json_graph
@ -11,91 +13,89 @@ from networkx.readwrite import json_graph
import alfred import alfred
import nodes import nodes
import graph import graph
from batman import batman from batman import Batman
from rrddb import rrd from rrddb import RRD
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases', def main(params):
help='read aliases from FILE', if not params['mesh']:
default=[], params['mesh'] = ['bat0']
action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append', nodes_fn = os.path.join(params['destination_directory'], 'nodes.json')
help='batman mesh interface') graph_fn = os.path.join(params['destination_directory'], 'graph.json')
parser.add_argument('-d', '--destination-directory', action='store', now = datetime.utcnow().replace(microsecond=0)
help='destination directory for generated files',required=True)
parser.add_argument('--vpn', action='append', metavar='MAC', with open(nodes_fn, 'r') as nodedb_handle:
help='assume MAC to be part of the VPN') nodedb = json.load(nodedb_handle)
parser.add_argument('--prune', metavar='DAYS', # flush nodedb if it uses the old format
help='forget nodes offline for at least DAYS') if 'links' in nodedb:
nodedb = {'nodes': dict()}
args = parser.parse_args() nodedb['timestamp'] = now.isoformat()
options = vars(args) for node_id, node in nodedb['nodes'].items():
node['flags']['online'] = False
if not options['mesh']: nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True)
options['mesh'] = ['bat0']
nodes_fn = os.path.join(options['destination_directory'], 'nodes.json') for aliases in params['aliases']:
graph_fn = os.path.join(options['destination_directory'], 'graph.json') with open(aliases, 'r') as f:
nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False)
now = datetime.utcnow().replace(microsecond=0) nodes.reset_statistics(nodedb['nodes'])
nodes.import_statistics(nodedb['nodes'], alfred.statistics())
try: bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(Batman, params['mesh'])))
nodedb = json.load(open(nodes_fn)) for vis_data, gateway_list in bm:
nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data)
nodes.import_vis_clientcount(nodedb['nodes'], vis_data)
nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now)
nodes.mark_gateways(nodedb['nodes'], gateway_list)
# ignore if old format if params['prune']:
if 'links' in nodedb: nodes.prune_nodes(nodedb['nodes'], now, int(params['prune']))
raise
except:
nodedb = {'nodes': dict()}
nodedb['timestamp'] = now.isoformat() batadv_graph = nx.DiGraph()
for vis_data, gateway_list in bm:
graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data)
for node_id, node in nodedb['nodes'].items(): if params['vpn']:
node['flags']['online'] = False graph.mark_vpn(batadv_graph, frozenset(params['vpn']))
nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) batadv_graph = graph.merge_nodes(batadv_graph)
batadv_graph = graph.to_undirected(batadv_graph)
for aliases in options['aliases']: with open(nodes_fn, 'w') as f:
with open(aliases, 'r') as f: json.dump(nodedb, f)
nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False)
nodes.reset_statistics(nodedb['nodes']) with open(graph_fn, 'w') as f:
nodes.import_statistics(nodedb['nodes'], alfred.statistics()) json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f)
bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(batman, options['mesh']))) scriptdir = os.path.dirname(os.path.realpath(__file__))
for vis_data, gateway_list in bm: rrd = RRD(scriptdir + '/nodedb/', params['destination_directory'] + '/nodes')
nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) rrd.update_database(nodedb['nodes'])
nodes.import_vis_clientcount(nodedb['nodes'], vis_data) rrd.update_images()
nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now)
nodes.mark_gateways(nodedb['nodes'], gateway_list)
if options['prune']:
nodes.prune_nodes(nodedb['nodes'], now, int(options['prune']))
batadv_graph = nx.DiGraph() if __name__ == '__main__':
for vis_data, gateway_list in bm: parser = argparse.ArgumentParser()
graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data)
if options['vpn']: parser.add_argument('-a', '--aliases',
graph.mark_vpn(batadv_graph, frozenset(options['vpn'])) help='read aliases from FILE',
default=[], action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append',
help='batman mesh interface')
parser.add_argument('-d', '--destination-directory', action='store',
help='destination directory for generated files',
required=True)
parser.add_argument('--vpn', action='append', metavar='MAC',
help='assume MAC to be part of the VPN')
parser.add_argument('--prune', metavar='DAYS',
help='forget nodes offline for at least DAYS')
batadv_graph = graph.merge_nodes(batadv_graph) options = vars(parser.parse_args())
batadv_graph = graph.to_undirected(batadv_graph)
with open(nodes_fn, 'w') as f: main(options)
json.dump(nodedb, f)
with open(graph_fn, 'w') as f:
json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f)
scriptdir = os.path.dirname(os.path.realpath(__file__))
rrd = rrd(scriptdir + '/nodedb/', options['destination_directory'] + '/nodes')
rrd.update_database(nodedb['nodes'])
rrd.update_images()

142
batman.py
View file

@ -1,82 +1,90 @@
#!/usr/bin/env python3
import subprocess import subprocess
import json import json
import re import re
class batman:
""" Bindings for B.A.T.M.A.N. advanced batctl tool
"""
def __init__(self, mesh_interface = "bat0"):
self.mesh_interface = mesh_interface
def vis_data(self,batadv_vis=False): class Batman(object):
vds = self.vis_data_batctl_legacy()
if batadv_vis:
vds += self.vis_data_batadv_vis()
return vds
def vis_data_helper(self,lines):
vd = []
for line in lines:
try:
utf8_line = line.decode("utf-8")
vd.append(json.loads(utf8_line))
except e:
pass
return vd
def vis_data_batctl_legacy(self):
""" Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
""" """
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"]) Bindings for B.A.T.M.A.N. Advanced
lines = output.splitlines() commandline interface "batctl"
vds = self.vis_data_helper(lines)
return vds
def vis_data_batadv_vis(self):
""" Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
""" """
output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"]) def __init__(self, mesh_interface='bat0'):
lines = output.splitlines() self.mesh_interface = mesh_interface
return self.vis_data_helper(lines)
def gateway_list(self): def vis_data(self, batadv_vis=False):
""" Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries. vds = self.vis_data_batctl_legacy()
""" if batadv_vis:
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"]) vds += self.vis_data_batadv_vis()
output_utf8 = output.decode("utf-8") return vds
lines = output_utf8.splitlines()
own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) @staticmethod
def vis_data_helper(lines):
vd_tmp = []
for line in lines:
try:
utf8_line = line.decode('utf-8')
vd_tmp.append(json.loads(utf8_line))
except UnicodeDecodeError:
pass
return vd_tmp
gw = [] def vis_data_batctl_legacy(self):
gw_mode = self.gateway_mode() """
if gw_mode['mode'] == 'server': Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
gw.append(own_mac) """
output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n'])
lines = output.splitlines()
vds = self.vis_data_helper(lines)
return vds
for line in lines: def vis_data_batadv_vis(self):
gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) """
if gw_line: Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
gw.append(gw_line.group(1)) """
output = subprocess.check_output(['batadv-vis', '-i', self.mesh_interface, '-f', 'json'])
lines = output.splitlines()
return self.vis_data_helper(lines)
return gw def gateway_list(self):
"""
Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries.
"""
output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gwl', '-n'])
output_utf8 = output.decode('utf-8')
lines = output_utf8.splitlines()
def gateway_mode(self): own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1)
""" Parse "batctl -m <mesh_interface> gw"
""" gateways = []
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"]) gw_mode = self.gateway_mode()
elements = output.decode("utf-8").split() if gw_mode['mode'] == 'server':
mode = elements[0] gateways.append(own_mac)
if mode == "server":
return {'mode': 'server', 'bandwidth': elements[3]} for line in lines:
else: gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line)
return {'mode': mode} if gw_line:
gateways.append(gw_line.group(1))
return gateways
def gateway_mode(self):
"""
Parse "batctl -m <mesh_interface> gw"
"""
output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gw'])
elements = output.decode("utf-8").split()
mode = elements[0]
if mode == 'server':
return {'mode': 'server',
'bandwidth': elements[3]}
else:
return {'mode': mode}
if __name__ == "__main__": if __name__ == "__main__":
bc = batman() bc = Batman()
vd = bc.vis_data() vd = bc.vis_data()
gw = bc.gateway_list() gw = bc.gateway_list()
for x in vd: for x in vd:
print(x) print(x)
print(gw) print(gw)
print(bc.gateway_mode()) print(bc.gateway_mode())

View file

@ -1,66 +1,74 @@
import networkx as nx import networkx as nx
from copy import deepcopy
from functools import reduce from functools import reduce
from itertools import chain from itertools import chain
from nodes import build_mac_table from nodes import build_mac_table
def import_vis_data(graph, nodes, vis_data):
macs = build_mac_table(nodes)
nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data))
nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data))
graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b))))
edges = filter(lambda d: 'neighbor' in d, vis_data) def import_vis_data(graph, nodes, vis_data):
graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) macs = build_mac_table(nodes)
nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data))
nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data))
graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b))))
edges = filter(lambda d: 'neighbor' in d, vis_data)
graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges))
def mark_vpn(graph, vpn_macs): def mark_vpn(graph, vpn_macs):
components = map(frozenset, nx.weakly_connected_components(graph)) components = map(frozenset, nx.weakly_connected_components(graph))
components = filter(vpn_macs.intersection, components) components = filter(vpn_macs.intersection, components)
nodes = reduce(lambda a, b: a | b, components, set()) nodes = reduce(lambda a, b: a | b, components, set())
for node in nodes:
for k, v in graph[node].items():
v['vpn'] = True
for node in nodes:
for k, v in graph[node].items():
v['vpn'] = True
def to_multigraph(graph): def to_multigraph(graph):
def f(a): def f(a):
node = graph.node[a] node = graph.node[a]
return node['primary'] if node else a return node['primary'] if node else a
G = nx.MultiDiGraph() def map_node(node, data):
map_node = lambda node, data: (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) return (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict())
G.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
G.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) digraph = nx.MultiDiGraph()
digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True))))
return digraph
return G
def merge_nodes(graph): def merge_nodes(graph):
def merge_edges(data): def merge_edges(data):
tq = min(map(lambda d: d['tq'], data)) tq = min(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data)) vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn) return dict(tq=tq, vpn=vpn)
G = to_multigraph(graph) multigraph = to_multigraph(graph)
H = nx.DiGraph() digraph = nx.DiGraph()
H.add_nodes_from(G.nodes_iter(data=True)) digraph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values()))
H.add_edges_from(edges) for d in multigraph[e]] for e in multigraph])
digraph.add_edges_from(edges)
return digraph
return H
def to_undirected(graph): def to_undirected(graph):
G = nx.MultiGraph() multigraph = nx.MultiGraph()
G.add_nodes_from(graph.nodes_iter(data=True)) multigraph.add_nodes_from(graph.nodes_iter(data=True))
G.add_edges_from(graph.edges_iter(data=True)) multigraph.add_edges_from(graph.edges_iter(data=True))
def merge_edges(data): def merge_edges(data):
tq = max(map(lambda d: d['tq'], data)) tq = max(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data)) vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2) return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2)
H = nx.Graph() graph = nx.Graph()
H.add_nodes_from(G.nodes_iter(data=True)) graph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values()))
H.add_edges_from(edges) for d in multigraph[e]] for e in multigraph])
graph.add_edges_from(edges)
return H return graph

191
nodes.py
View file

@ -2,128 +2,137 @@ from collections import Counter, defaultdict
from datetime import datetime from datetime import datetime
from functools import reduce from functools import reduce
def build_mac_table(nodes):
macs = dict()
for node_id, node in nodes.items():
try:
for mac in node['nodeinfo']['network']['mesh_interfaces']:
macs[mac] = node_id
except KeyError:
pass
return macs def build_mac_table(nodes):
macs = dict()
for node_id, node in nodes.items():
try:
for mac in node['nodeinfo']['network']['mesh_interfaces']:
macs[mac] = node_id
except KeyError:
pass
return macs
def prune_nodes(nodes, now, days): def prune_nodes(nodes, now, days):
prune = [] prune = []
for node_id, node in nodes.items(): for node_id, node in nodes.items():
if not 'lastseen' in node: if 'lastseen' not in node:
prune.append(node_id) prune.append(node_id)
continue continue
lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S') lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S')
delta = (now - lastseen).seconds delta = (now - lastseen).seconds
if delta >= days * 86400: if delta >= days * 86400:
prune.append(node_id) prune.append(node_id)
for node_id in prune:
del nodes[node_id]
for node_id in prune:
del nodes[node_id]
def mark_online(node, now): def mark_online(node, now):
node['lastseen'] = now.isoformat() node['lastseen'] = now.isoformat()
node.setdefault('firstseen', now.isoformat()) node.setdefault('firstseen', now.isoformat())
node['flags']['online'] = True node['flags']['online'] = True
def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): def import_nodeinfo(nodes, nodeinfos, now, assume_online=False):
for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos):
node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()})
node['nodeinfo'] = nodeinfo node['nodeinfo'] = nodeinfo
node['flags']['online'] = False node['flags']['online'] = False
node['flags']['gateway'] = False node['flags']['gateway'] = False
if assume_online:
mark_online(node, now)
if assume_online:
mark_online(node, now)
def reset_statistics(nodes): def reset_statistics(nodes):
for node in nodes.values(): for node in nodes.values():
node['statistics'] = { 'clients': 0 } node['statistics'] = {'clients': 0}
def import_statistics(nodes, statistics): def import_statistics(nodes, statistics):
def add(node, statistics, target, source, f=lambda d: d): def add(node, statistics, target, source, f=lambda d: d):
try: try:
node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics))
except (KeyError,TypeError): except (KeyError, TypeError):
pass pass
macs = build_mac_table(nodes)
statistics = filter(lambda d: 'node_id' in d, statistics)
statistics = filter(lambda d: d['node_id'] in nodes, statistics)
for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics):
add(node, statistics, 'clients', ['clients', 'total'])
add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d))
add(node, statistics, 'uptime', ['uptime'])
add(node, statistics, 'loadavg', ['loadavg'])
add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total'])
add(node, statistics, 'rootfs_usage', ['rootfs_usage'])
macs = build_mac_table(nodes)
statistics = filter(lambda d: 'node_id' in d, statistics)
statistics = filter(lambda d: d['node_id'] in nodes, statistics)
for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics):
add(node, statistics, 'clients', ['clients', 'total'])
add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d))
add(node, statistics, 'uptime', ['uptime'])
add(node, statistics, 'loadavg', ['loadavg'])
add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total'])
add(node, statistics, 'rootfs_usage', ['rootfs_usage'])
def import_mesh_ifs_vis_data(nodes, vis_data): def import_mesh_ifs_vis_data(nodes, vis_data):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
mesh_ifs = defaultdict(lambda: set()) mesh_ifs = defaultdict(lambda: set())
for line in filter(lambda d: 'secondary' in d, vis_data): for line in filter(lambda d: 'secondary' in d, vis_data):
primary = line['of'] primary = line['of']
mesh_ifs[primary].add(primary) mesh_ifs[primary].add(primary)
mesh_ifs[primary].add(line['secondary']) mesh_ifs[primary].add(line['secondary'])
def if_to_node(ifs): def if_to_node(ifs):
a = filter(lambda d: d in macs, ifs) a = filter(lambda d: d in macs, ifs)
a = map(lambda d: nodes[macs[d]], a) a = map(lambda d: nodes[macs[d]], a)
try: try:
return (next(a), ifs) return next(a), ifs
except StopIteration: except StopIteration:
return None return None
mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values())) mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values()))
for v in mesh_nodes: for v in mesh_nodes:
node = v[0] node = v[0]
try: try:
mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces'])
except KeyError: except KeyError:
mesh_ifs = set() mesh_ifs = set()
node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1])
node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1])
def import_vis_clientcount(nodes, vis_data): def import_vis_clientcount(nodes, vis_data):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
data = filter(lambda d: d.get('label', None) == 'TT', vis_data) data = filter(lambda d: d.get('label', None) == 'TT', vis_data)
data = filter(lambda d: d['router'] in macs, data) data = filter(lambda d: d['router'] in macs, data)
data = map(lambda d: macs[d['router']], data) data = map(lambda d: macs[d['router']], data)
for node_id, clientcount in Counter(data).items():
nodes[node_id]['statistics'].setdefault('clients', clientcount)
for node_id, clientcount in Counter(data).items():
nodes[node_id]['statistics'].setdefault('clients', clientcount)
def mark_gateways(nodes, gateways): def mark_gateways(nodes, gateways):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
gateways = filter(lambda d: d in macs, gateways) gateways = filter(lambda d: d in macs, gateways)
for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True
for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True
def mark_vis_data_online(nodes, vis_data, now): def mark_vis_data_online(nodes, vis_data, now):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
online = set() online = set()
for line in vis_data: for line in vis_data:
if 'primary' in line: if 'primary' in line:
online.add(line['primary']) online.add(line['primary'])
elif 'secondary' in line: elif 'secondary' in line:
online.add(line['secondary']) online.add(line['secondary'])
elif 'gateway' in line: elif 'gateway' in line:
# This matches clients' MACs. # This matches clients' MACs.
# On pre-Gluon nodes the primary MAC will be one of it. # On pre-Gluon nodes the primary MAC will be one of it.
online.add(line['gateway']) online.add(line['gateway'])
for mac in filter(lambda d: d in macs, online): for mac in filter(lambda d: d in macs, online):
mark_online(nodes[macs[mac]], now) mark_online(nodes[macs[mac]], now)

View file

@ -1,50 +1,50 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import subprocess
import time import time
import os import os
from GlobalRRD import GlobalRRD from GlobalRRD import GlobalRRD
from NodeRRD import NodeRRD from NodeRRD import NodeRRD
class rrd:
def __init__( self
, databaseDirectory
, imagePath
, displayTimeGlobal = "7d"
, displayTimeNode = "1d"
):
self.dbPath = databaseDirectory
self.globalDb = GlobalRRD(self.dbPath)
self.imagePath = imagePath
self.displayTimeGlobal = displayTimeGlobal
self.displayTimeNode = displayTimeNode
self.currentTimeInt = (int(time.time())/60)*60 class RRD(object):
self.currentTime = str(self.currentTimeInt) def __init__(self,
database_directory,
image_path,
display_time_global="7d",
display_time_node="1d"):
try: self.dbPath = database_directory
os.stat(self.imagePath) self.globalDb = GlobalRRD(self.dbPath)
except: self.imagePath = image_path
os.mkdir(self.imagePath) self.displayTimeGlobal = display_time_global
self.displayTimeNode = display_time_node
def update_database(self, nodes): self.currentTimeInt = (int(time.time())/60)*60
online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items())) self.currentTime = str(self.currentTimeInt)
client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values()))
self.globalDb.update(len(online_nodes), client_count) try:
for node_id, node in online_nodes.items(): os.stat(self.imagePath)
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node) except OSError:
rrd.update() os.mkdir(self.imagePath)
def update_images(self): def update_database(self, nodes):
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items()))
client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values()))
nodeDbFiles = os.listdir(self.dbPath) self.globalDb.update(len(online_nodes), client_count)
for node_id, node in online_nodes.items():
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node)
rrd.update()
for fileName in nodeDbFiles: def update_images(self):
if not os.path.isfile(os.path.join(self.dbPath, fileName)): self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal)
continue
nodeName = os.path.basename(fileName).split('.') nodedb_files = os.listdir(self.dbPath)
if nodeName[1] == 'rrd' and not nodeName[0] == "nodes":
rrd = NodeRRD(os.path.join(self.dbPath, fileName)) for file_name in nodedb_files:
rrd.graph(self.imagePath, self.displayTimeNode) if not os.path.isfile(os.path.join(self.dbPath, file_name)):
continue
node_name = os.path.basename(file_name).split('.')
if node_name[1] == 'rrd' and not node_name[0] == "nodes":
rrd = NodeRRD(os.path.join(self.dbPath, file_name))
rrd.graph(self.imagePath, self.displayTimeNode)