fix alot of pep8

This commit is contained in:
Martin Weinelt 2015-03-24 16:49:37 +01:00
parent 9195ea9650
commit 3291b2b6ba
9 changed files with 421 additions and 380 deletions

View file

@ -2,6 +2,7 @@ import os
import subprocess import subprocess
from RRD import RRD, DS, RRA from RRD import RRD, DS, RRA
class GlobalRRD(RRD): class GlobalRRD(RRD):
ds_list = [ ds_list = [
# Number of nodes available # Number of nodes available
@ -10,14 +11,17 @@ class GlobalRRD(RRD):
DS('clients', 'GAUGE', 120, 0, float('NaN')), DS('clients', 'GAUGE', 120, 0, float('NaN')),
] ]
rra_list = [ rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples RRA('AVERAGE', 0.5, 1, 120),
RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples # 31 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 744),
# ~5 years of 1 day samples
RRA('AVERAGE', 0.5, 1440, 1780),
] ]
def __init__(self, directory): def __init__(self, directory):
super().__init__(os.path.join(directory, "nodes.rrd")) super().__init__(os.path.join(directory, "nodes.rrd"))
self.ensureSanity(self.ds_list, self.rra_list, step=60) self.ensure_sanity(self.ds_list, self.rra_list, step=60)
def update(self, nodeCount, clientCount): def update(self, nodeCount, clientCount):
super().update({'nodes': nodeCount, 'clients': clientCount}) super().update({'nodes': nodeCount, 'clients': clientCount})
@ -30,6 +34,5 @@ class GlobalRRD(RRD):
'DEF:nodes=' + self.filename + ':nodes:AVERAGE', 'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
'LINE1:nodes#F00:nodes\\l', 'LINE1:nodes#F00:nodes\\l',
'DEF:clients=' + self.filename + ':clients:AVERAGE', 'DEF:clients=' + self.filename + ':clients:AVERAGE',
'LINE2:clients#00F:clients', 'LINE2:clients#00F:clients']
]
subprocess.check_output(args) subprocess.check_output(args)

View file

@ -2,19 +2,24 @@ import os
import subprocess import subprocess
from RRD import RRD, DS, RRA from RRD import RRD, DS, RRA
class NodeRRD(RRD): class NodeRRD(RRD):
ds_list = [ ds_list = [
DS('upstate', 'GAUGE', 120, 0, 1), DS('upstate', 'GAUGE', 120, 0, 1),
DS('clients', 'GAUGE', 120, 0, float('NaN')), DS('clients', 'GAUGE', 120, 0, float('NaN')),
] ]
rra_list = [ rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples RRA('AVERAGE', 0.5, 1, 120),
RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples # 5 days of 5 minute samples
RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples RRA('AVERAGE', 0.5, 5, 1440),
# 30 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 720),
# 1 year of 12 hour samples
RRA('AVERAGE', 0.5, 720, 730),
] ]
def __init__(self, filename, node = None): def __init__(self, filename, node=None):
""" """
Create a new RRD for a given node. Create a new RRD for a given node.
@ -22,12 +27,13 @@ class NodeRRD(RRD):
""" """
self.node = node self.node = node
super().__init__(filename) super().__init__(filename)
self.ensureSanity(self.ds_list, self.rra_list, step=60) self.ensure_sanity(self.ds_list, self.rra_list, step=60)
@property @property
def imagename(self): def imagename(self):
return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" return "{basename}.png".format(basename=os.path.basename(self.filename).rsplit('.', 2)[0])
# TODO: fix this, python does not support function overloading
def update(self): def update(self):
super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']}) super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']})
@ -36,8 +42,8 @@ class NodeRRD(RRD):
Create a graph in the given directory. The file will be named Create a graph in the given directory. The file will be named
basename.png if the RRD file is named basename.rrd basename.png if the RRD file is named basename.rrd
""" """
args = ['rrdtool','graph', os.path.join(directory, self.imagename), args = ['rrdtool', 'graph', os.path.join(directory, self.imagename),
'-s', '-' + timeframe , '-s', '-' + timeframe,
'-w', '800', '-w', '800',
'-h', '400', '-h', '400',
'-l', '0', '-l', '0',
@ -48,6 +54,5 @@ class NodeRRD(RRD):
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*', 'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
'AREA:c#0F0:up\\l', 'AREA:c#0F0:up\\l',
'AREA:d#F00:down\\l', 'AREA:d#F00:down\\l',
'LINE1:c#00F:clients connected\\l', 'LINE1:c#00F:clients connected\\l']
]
subprocess.check_output(args) subprocess.check_output(args)

51
RRD.py
View file

@ -1,19 +1,20 @@
import subprocess import subprocess
import re import re
import io
import os import os
from tempfile import TemporaryFile
from operator import xor, eq from operator import xor, eq
from functools import reduce from functools import reduce
from itertools import starmap from itertools import starmap
import math import math
class RRDIncompatibleException(Exception): class RRDIncompatibleException(Exception):
""" """
Is raised when an RRD doesn't have the desired definition and cannot be Is raised when an RRD doesn't have the desired definition and cannot be
upgraded to it. upgraded to it.
""" """
pass pass
class RRDOutdatedException(Exception): class RRDOutdatedException(Exception):
""" """
Is raised when an RRD doesn't have the desired definition, but can be Is raised when an RRD doesn't have the desired definition, but can be
@ -25,7 +26,8 @@ if not hasattr(__builtins__, "FileNotFoundError"):
class FileNotFoundError(Exception): class FileNotFoundError(Exception):
pass pass
class RRD:
class RRD(object):
""" """
An RRD is a Round Robin Database, a database which forgets old data and An RRD is a Round Robin Database, a database which forgets old data and
aggregates multiple records into new ones. aggregates multiple records into new ones.
@ -49,7 +51,7 @@ class RRD:
def _exec_rrdtool(self, cmd, *args, **kwargs): def _exec_rrdtool(self, cmd, *args, **kwargs):
pargs = ["rrdtool", cmd, self.filename] pargs = ["rrdtool", cmd, self.filename]
for k,v in kwargs.items(): for k, v in kwargs.items():
pargs.extend(["--" + k, str(v)]) pargs.extend(["--" + k, str(v)])
pargs.extend(args) pargs.extend(args)
subprocess.check_output(pargs) subprocess.check_output(pargs)
@ -57,7 +59,7 @@ class RRD:
def __init__(self, filename): def __init__(self, filename):
self.filename = filename self.filename = filename
def ensureSanity(self, ds_list, rra_list, **kwargs): def ensure_sanity(self, ds_list, rra_list, **kwargs):
""" """
Create or upgrade the RRD file if necessary to contain all DS in Create or upgrade the RRD file if necessary to contain all DS in
ds_list. If it needs to be created, the RRAs in rra_list and any kwargs ds_list. If it needs to be created, the RRAs in rra_list and any kwargs
@ -65,13 +67,13 @@ class RRD:
database are NOT modified! database are NOT modified!
""" """
try: try:
self.checkSanity(ds_list) self.check_sanity(ds_list)
except FileNotFoundError: except FileNotFoundError:
self.create(ds_list, rra_list, **kwargs) self.create(ds_list, rra_list, **kwargs)
except RRDOutdatedException: except RRDOutdatedException:
self.upgrade(ds_list) self.upgrade(ds_list)
def checkSanity(self, ds_list=()): def check_sanity(self, ds_list=()):
""" """
Check if the RRD file exists and contains (at least) the DS listed in Check if the RRD file exists and contains (at least) the DS listed in
ds_list. ds_list.
@ -82,7 +84,8 @@ class RRD:
if set(ds_list) - set(info['ds'].values()) != set(): if set(ds_list) - set(info['ds'].values()) != set():
for ds in ds_list: for ds in ds_list:
if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type: if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type:
raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type)) raise RRDIncompatibleException("%s is %s but should be %s" %
(ds.name, ds.type, info['ds'][ds.name].type))
else: else:
raise RRDOutdatedException() raise RRDOutdatedException()
@ -116,12 +119,11 @@ class RRD:
dump = subprocess.Popen( dump = subprocess.Popen(
["rrdtool", "dump", self.filename], ["rrdtool", "dump", self.filename],
stdout=subprocess.PIPE stdout=subprocess.PIPE)
)
restore = subprocess.Popen( restore = subprocess.Popen(
["rrdtool", "restore", "-", self.filename + ".new"], ["rrdtool", "restore", "-", self.filename + ".new"],
stdin=subprocess.PIPE stdin=subprocess.PIPE)
)
echo = True echo = True
ds_definitions = True ds_definitions = True
for line in dump.stdout: for line in dump.stdout:
@ -143,16 +145,14 @@ class RRD:
<value>%s</value> <value>%s</value>
<unknown_sec> %i </unknown_sec> <unknown_sec> %i </unknown_sec>
</ds> </ds>
""" % ( """ % (ds.name,
ds.name,
ds.type, ds.type,
ds.args[0], ds.args[0],
ds.args[1], ds.args[1],
ds.args[2], ds.args[2],
ds.last_ds, ds.last_ds,
ds.value, ds.value,
ds.unknown_sec) ds.unknown_sec), "utf-8"))
, "utf-8"))
if b'</cdp_prep>' in line: if b'</cdp_prep>' in line:
restore.stdin.write(added_ds_num*b""" restore.stdin.write(added_ds_num*b"""
@ -272,7 +272,8 @@ class RRD:
self._cached_info = info self._cached_info = info
return info return info
class DS:
class DS(object):
""" """
DS stands for Data Source and represents one line of data points in a Round DS stands for Data Source and represents one line of data points in a Round
Robin Database (RRD). Robin Database (RRD).
@ -284,6 +285,7 @@ class DS:
last_ds = 'U' last_ds = 'U'
value = 0 value = 0
unknown_sec = 0 unknown_sec = 0
def __init__(self, name, dst, *args): def __init__(self, name, dst, *args):
self.name = name self.name = name
self.type = dst self.type = dst
@ -293,7 +295,7 @@ class DS:
return "DS:%s:%s:%s" % ( return "DS:%s:%s:%s" % (
self.name, self.name,
self.type, self.type,
":".join(map(str, self._nan_to_U_args())) ":".join(map(str, self._nan_to_u_args()))
) )
def __repr__(self): def __repr__(self):
@ -305,22 +307,23 @@ class DS:
) )
def __eq__(self, other): def __eq__(self, other):
return all(starmap(eq, zip(self._compare_keys(), other._compare_keys()))) return all(starmap(eq, zip(self.compare_keys(), other.compare_keys())))
def __hash__(self): def __hash__(self):
return reduce(xor, map(hash, self._compare_keys())) return reduce(xor, map(hash, self.compare_keys()))
def _nan_to_U_args(self): def _nan_to_u_args(self):
return tuple( return tuple(
'U' if type(arg) is float and math.isnan(arg) 'U' if type(arg) is float and math.isnan(arg)
else arg else arg
for arg in self.args for arg in self.args
) )
def _compare_keys(self): def compare_keys(self):
return (self.name, self.type, self._nan_to_U_args()) return self.name, self.type, self._nan_to_u_args()
class RRA:
class RRA(object):
def __init__(self, cf, *args): def __init__(self, cf, *args):
self.cf = cf self.cf = cf
self.args = args self.args = args

View file

@ -2,19 +2,24 @@
import subprocess import subprocess
import json import json
def _fetch(data_type): def _fetch(data_type):
output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)])
return json.loads(output.decode("utf-8")).values() return json.loads(output.decode("utf-8")).values()
def nodeinfo(): def nodeinfo():
return _fetch(158) return _fetch(158)
def statistics(): def statistics():
return _fetch(159) return _fetch(159)
def vis(): def vis():
return _fetch(160) return _fetch(160)
def aliases(): def aliases():
alias = {} alias = {}
for node in nodeinfo(): for node in nodeinfo():
@ -22,7 +27,7 @@ def aliases():
if 'location' in node: if 'location' in node:
try: try:
node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude'])
except: except KeyError:
pass pass
try: try:

View file

@ -1,9 +1,11 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
"""
backend.py - ffmap-backend runner
https://github.com/ffnord/ffmap-backend
"""
import argparse import argparse
import json import json
import os import os
import sys
import networkx as nx import networkx as nx
from datetime import datetime from datetime import datetime
from networkx.readwrite import json_graph from networkx.readwrite import json_graph
@ -11,91 +13,89 @@ from networkx.readwrite import json_graph
import alfred import alfred
import nodes import nodes
import graph import graph
from batman import batman from batman import Batman
from rrddb import rrd from rrddb import RRD
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases', def main(params):
help='read aliases from FILE', if not params['mesh']:
default=[], params['mesh'] = ['bat0']
action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append', nodes_fn = os.path.join(params['destination_directory'], 'nodes.json')
help='batman mesh interface') graph_fn = os.path.join(params['destination_directory'], 'graph.json')
parser.add_argument('-d', '--destination-directory', action='store', now = datetime.utcnow().replace(microsecond=0)
help='destination directory for generated files',required=True)
parser.add_argument('--vpn', action='append', metavar='MAC', with open(nodes_fn, 'r') as nodedb_handle:
help='assume MAC to be part of the VPN') nodedb = json.load(nodedb_handle)
parser.add_argument('--prune', metavar='DAYS', # flush nodedb if it uses the old format
help='forget nodes offline for at least DAYS')
args = parser.parse_args()
options = vars(args)
if not options['mesh']:
options['mesh'] = ['bat0']
nodes_fn = os.path.join(options['destination_directory'], 'nodes.json')
graph_fn = os.path.join(options['destination_directory'], 'graph.json')
now = datetime.utcnow().replace(microsecond=0)
try:
nodedb = json.load(open(nodes_fn))
# ignore if old format
if 'links' in nodedb: if 'links' in nodedb:
raise
except:
nodedb = {'nodes': dict()} nodedb = {'nodes': dict()}
nodedb['timestamp'] = now.isoformat() nodedb['timestamp'] = now.isoformat()
for node_id, node in nodedb['nodes'].items(): for node_id, node in nodedb['nodes'].items():
node['flags']['online'] = False node['flags']['online'] = False
nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True)
for aliases in options['aliases']: for aliases in params['aliases']:
with open(aliases, 'r') as f: with open(aliases, 'r') as f:
nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False) nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False)
nodes.reset_statistics(nodedb['nodes']) nodes.reset_statistics(nodedb['nodes'])
nodes.import_statistics(nodedb['nodes'], alfred.statistics()) nodes.import_statistics(nodedb['nodes'], alfred.statistics())
bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(batman, options['mesh']))) bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(Batman, params['mesh'])))
for vis_data, gateway_list in bm: for vis_data, gateway_list in bm:
nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data)
nodes.import_vis_clientcount(nodedb['nodes'], vis_data) nodes.import_vis_clientcount(nodedb['nodes'], vis_data)
nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now) nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now)
nodes.mark_gateways(nodedb['nodes'], gateway_list) nodes.mark_gateways(nodedb['nodes'], gateway_list)
if options['prune']: if params['prune']:
nodes.prune_nodes(nodedb['nodes'], now, int(options['prune'])) nodes.prune_nodes(nodedb['nodes'], now, int(params['prune']))
batadv_graph = nx.DiGraph() batadv_graph = nx.DiGraph()
for vis_data, gateway_list in bm: for vis_data, gateway_list in bm:
graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data) graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data)
if options['vpn']: if params['vpn']:
graph.mark_vpn(batadv_graph, frozenset(options['vpn'])) graph.mark_vpn(batadv_graph, frozenset(params['vpn']))
batadv_graph = graph.merge_nodes(batadv_graph) batadv_graph = graph.merge_nodes(batadv_graph)
batadv_graph = graph.to_undirected(batadv_graph) batadv_graph = graph.to_undirected(batadv_graph)
with open(nodes_fn, 'w') as f: with open(nodes_fn, 'w') as f:
json.dump(nodedb, f) json.dump(nodedb, f)
with open(graph_fn, 'w') as f: with open(graph_fn, 'w') as f:
json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f)
scriptdir = os.path.dirname(os.path.realpath(__file__)) scriptdir = os.path.dirname(os.path.realpath(__file__))
rrd = rrd(scriptdir + '/nodedb/', options['destination_directory'] + '/nodes') rrd = RRD(scriptdir + '/nodedb/', params['destination_directory'] + '/nodes')
rrd.update_database(nodedb['nodes']) rrd.update_database(nodedb['nodes'])
rrd.update_images() rrd.update_images()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='read aliases from FILE',
default=[], action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append',
help='batman mesh interface')
parser.add_argument('-d', '--destination-directory', action='store',
help='destination directory for generated files',
required=True)
parser.add_argument('--vpn', action='append', metavar='MAC',
help='assume MAC to be part of the VPN')
parser.add_argument('--prune', metavar='DAYS',
help='forget nodes offline for at least DAYS')
options = vars(parser.parse_args())
main(options)

View file

@ -1,79 +1,87 @@
#!/usr/bin/env python3
import subprocess import subprocess
import json import json
import re import re
class batman:
""" Bindings for B.A.T.M.A.N. advanced batctl tool class Batman(object):
""" """
def __init__(self, mesh_interface = "bat0"): Bindings for B.A.T.M.A.N. Advanced
commandline interface "batctl"
"""
def __init__(self, mesh_interface='bat0'):
self.mesh_interface = mesh_interface self.mesh_interface = mesh_interface
def vis_data(self,batadv_vis=False): def vis_data(self, batadv_vis=False):
vds = self.vis_data_batctl_legacy() vds = self.vis_data_batctl_legacy()
if batadv_vis: if batadv_vis:
vds += self.vis_data_batadv_vis() vds += self.vis_data_batadv_vis()
return vds return vds
def vis_data_helper(self,lines): @staticmethod
vd = [] def vis_data_helper(lines):
vd_tmp = []
for line in lines: for line in lines:
try: try:
utf8_line = line.decode("utf-8") utf8_line = line.decode('utf-8')
vd.append(json.loads(utf8_line)) vd_tmp.append(json.loads(utf8_line))
except e: except UnicodeDecodeError:
pass pass
return vd return vd_tmp
def vis_data_batctl_legacy(self): def vis_data_batctl_legacy(self):
""" Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
""" """
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"]) Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
"""
output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n'])
lines = output.splitlines() lines = output.splitlines()
vds = self.vis_data_helper(lines) vds = self.vis_data_helper(lines)
return vds return vds
def vis_data_batadv_vis(self): def vis_data_batadv_vis(self):
""" Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
""" """
output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"]) Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
"""
output = subprocess.check_output(['batadv-vis', '-i', self.mesh_interface, '-f', 'json'])
lines = output.splitlines() lines = output.splitlines()
return self.vis_data_helper(lines) return self.vis_data_helper(lines)
def gateway_list(self): def gateway_list(self):
""" Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries.
""" """
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"]) Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries.
output_utf8 = output.decode("utf-8") """
output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gwl', '-n'])
output_utf8 = output.decode('utf-8')
lines = output_utf8.splitlines() lines = output_utf8.splitlines()
own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1)
gw = [] gateways = []
gw_mode = self.gateway_mode() gw_mode = self.gateway_mode()
if gw_mode['mode'] == 'server': if gw_mode['mode'] == 'server':
gw.append(own_mac) gateways.append(own_mac)
for line in lines: for line in lines:
gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line)
if gw_line: if gw_line:
gw.append(gw_line.group(1)) gateways.append(gw_line.group(1))
return gw return gateways
def gateway_mode(self): def gateway_mode(self):
""" Parse "batctl -m <mesh_interface> gw"
""" """
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"]) Parse "batctl -m <mesh_interface> gw"
"""
output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gw'])
elements = output.decode("utf-8").split() elements = output.decode("utf-8").split()
mode = elements[0] mode = elements[0]
if mode == "server": if mode == 'server':
return {'mode': 'server', 'bandwidth': elements[3]} return {'mode': 'server',
'bandwidth': elements[3]}
else: else:
return {'mode': mode} return {'mode': mode}
if __name__ == "__main__": if __name__ == "__main__":
bc = batman() bc = Batman()
vd = bc.vis_data() vd = bc.vis_data()
gw = bc.gateway_list() gw = bc.gateway_list()
for x in vd: for x in vd:

View file

@ -1,9 +1,9 @@
import networkx as nx import networkx as nx
from copy import deepcopy
from functools import reduce from functools import reduce
from itertools import chain from itertools import chain
from nodes import build_mac_table from nodes import build_mac_table
def import_vis_data(graph, nodes, vis_data): def import_vis_data(graph, nodes, vis_data):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data)) nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data))
@ -13,6 +13,7 @@ def import_vis_data(graph, nodes, vis_data):
edges = filter(lambda d: 'neighbor' in d, vis_data) edges = filter(lambda d: 'neighbor' in d, vis_data)
graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges))
def mark_vpn(graph, vpn_macs): def mark_vpn(graph, vpn_macs):
components = map(frozenset, nx.weakly_connected_components(graph)) components = map(frozenset, nx.weakly_connected_components(graph))
components = filter(vpn_macs.intersection, components) components = filter(vpn_macs.intersection, components)
@ -22,17 +23,21 @@ def mark_vpn(graph, vpn_macs):
for k, v in graph[node].items(): for k, v in graph[node].items():
v['vpn'] = True v['vpn'] = True
def to_multigraph(graph): def to_multigraph(graph):
def f(a): def f(a):
node = graph.node[a] node = graph.node[a]
return node['primary'] if node else a return node['primary'] if node else a
G = nx.MultiDiGraph() def map_node(node, data):
map_node = lambda node, data: (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) return (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict())
G.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
G.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) digraph = nx.MultiDiGraph()
digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True))))
return digraph
return G
def merge_nodes(graph): def merge_nodes(graph):
def merge_edges(data): def merge_edges(data):
@ -40,27 +45,30 @@ def merge_nodes(graph):
vpn = all(map(lambda d: d.get('vpn', False), data)) vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn) return dict(tq=tq, vpn=vpn)
G = to_multigraph(graph) multigraph = to_multigraph(graph)
H = nx.DiGraph() digraph = nx.DiGraph()
H.add_nodes_from(G.nodes_iter(data=True)) digraph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values()))
H.add_edges_from(edges) for d in multigraph[e]] for e in multigraph])
digraph.add_edges_from(edges)
return digraph
return H
def to_undirected(graph): def to_undirected(graph):
G = nx.MultiGraph() multigraph = nx.MultiGraph()
G.add_nodes_from(graph.nodes_iter(data=True)) multigraph.add_nodes_from(graph.nodes_iter(data=True))
G.add_edges_from(graph.edges_iter(data=True)) multigraph.add_edges_from(graph.edges_iter(data=True))
def merge_edges(data): def merge_edges(data):
tq = max(map(lambda d: d['tq'], data)) tq = max(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data)) vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2) return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2)
H = nx.Graph() graph = nx.Graph()
H.add_nodes_from(G.nodes_iter(data=True)) graph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values()))
H.add_edges_from(edges) for d in multigraph[e]] for e in multigraph])
graph.add_edges_from(edges)
return H return graph

View file

@ -2,6 +2,7 @@ from collections import Counter, defaultdict
from datetime import datetime from datetime import datetime
from functools import reduce from functools import reduce
def build_mac_table(nodes): def build_mac_table(nodes):
macs = dict() macs = dict()
for node_id, node in nodes.items(): for node_id, node in nodes.items():
@ -10,13 +11,13 @@ def build_mac_table(nodes):
macs[mac] = node_id macs[mac] = node_id
except KeyError: except KeyError:
pass pass
return macs return macs
def prune_nodes(nodes, now, days): def prune_nodes(nodes, now, days):
prune = [] prune = []
for node_id, node in nodes.items(): for node_id, node in nodes.items():
if not 'lastseen' in node: if 'lastseen' not in node:
prune.append(node_id) prune.append(node_id)
continue continue
@ -26,14 +27,16 @@ def prune_nodes(nodes, now, days):
if delta >= days * 86400: if delta >= days * 86400:
prune.append(node_id) prune.append(node_id)
for node_id in prune: for prune_key in prune:
del nodes[node_id] del nodes[prune_key]
def mark_online(node, now): def mark_online(node, now):
node['lastseen'] = now.isoformat() node['lastseen'] = now.isoformat()
node.setdefault('firstseen', now.isoformat()) node.setdefault('firstseen', now.isoformat())
node['flags']['online'] = True node['flags']['online'] = True
def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): def import_nodeinfo(nodes, nodeinfos, now, assume_online=False):
for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos):
node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()})
@ -44,15 +47,17 @@ def import_nodeinfo(nodes, nodeinfos, now, assume_online=False):
if assume_online: if assume_online:
mark_online(node, now) mark_online(node, now)
def reset_statistics(nodes): def reset_statistics(nodes):
for node in nodes.values(): for node in nodes.values():
node['statistics'] = { 'clients': 0 } node['statistics'] = {'clients': 0}
def import_statistics(nodes, statistics): def import_statistics(nodes, statistics):
def add(node, statistics, target, source, f=lambda d: d): def add(node, statistics, target, source, f=lambda d: d):
try: try:
node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics))
except (KeyError,TypeError): except (KeyError, TypeError):
pass pass
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
@ -66,6 +71,7 @@ def import_statistics(nodes, statistics):
add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total'])
add(node, statistics, 'rootfs_usage', ['rootfs_usage']) add(node, statistics, 'rootfs_usage', ['rootfs_usage'])
def import_mesh_ifs_vis_data(nodes, vis_data): def import_mesh_ifs_vis_data(nodes, vis_data):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
@ -79,7 +85,7 @@ def import_mesh_ifs_vis_data(nodes, vis_data):
a = filter(lambda d: d in macs, ifs) a = filter(lambda d: d in macs, ifs)
a = map(lambda d: nodes[macs[d]], a) a = map(lambda d: nodes[macs[d]], a)
try: try:
return (next(a), ifs) return next(a), ifs
except StopIteration: except StopIteration:
return None return None
@ -95,6 +101,7 @@ def import_mesh_ifs_vis_data(nodes, vis_data):
node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1])
def import_vis_clientcount(nodes, vis_data): def import_vis_clientcount(nodes, vis_data):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
data = filter(lambda d: d.get('label', None) == 'TT', vis_data) data = filter(lambda d: d.get('label', None) == 'TT', vis_data)
@ -104,6 +111,7 @@ def import_vis_clientcount(nodes, vis_data):
for node_id, clientcount in Counter(data).items(): for node_id, clientcount in Counter(data).items():
nodes[node_id]['statistics'].setdefault('clients', clientcount) nodes[node_id]['statistics'].setdefault('clients', clientcount)
def mark_gateways(nodes, gateways): def mark_gateways(nodes, gateways):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)
gateways = filter(lambda d: d in macs, gateways) gateways = filter(lambda d: d in macs, gateways)
@ -111,6 +119,7 @@ def mark_gateways(nodes, gateways):
for node in map(lambda d: nodes[macs[d]], gateways): for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True node['flags']['gateway'] = True
def mark_vis_data_online(nodes, vis_data, now): def mark_vis_data_online(nodes, vis_data, now):
macs = build_mac_table(nodes) macs = build_mac_table(nodes)

View file

@ -1,29 +1,29 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import subprocess
import time import time
import os import os
from GlobalRRD import GlobalRRD from GlobalRRD import GlobalRRD
from NodeRRD import NodeRRD from NodeRRD import NodeRRD
class rrd:
def __init__( self class RRD(object):
, databaseDirectory def __init__(self,
, imagePath database_directory,
, displayTimeGlobal = "7d" image_path,
, displayTimeNode = "1d" display_time_global="7d",
): display_time_node="1d"):
self.dbPath = databaseDirectory
self.dbPath = database_directory
self.globalDb = GlobalRRD(self.dbPath) self.globalDb = GlobalRRD(self.dbPath)
self.imagePath = imagePath self.imagePath = image_path
self.displayTimeGlobal = displayTimeGlobal self.displayTimeGlobal = display_time_global
self.displayTimeNode = displayTimeNode self.displayTimeNode = display_time_node
self.currentTimeInt = (int(time.time())/60)*60 self.currentTimeInt = (int(time.time())/60)*60
self.currentTime = str(self.currentTimeInt) self.currentTime = str(self.currentTimeInt)
try: try:
os.stat(self.imagePath) os.stat(self.imagePath)
except: except OSError:
os.mkdir(self.imagePath) os.mkdir(self.imagePath)
def update_database(self, nodes): def update_database(self, nodes):
@ -38,13 +38,13 @@ class rrd:
def update_images(self): def update_images(self):
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal)
nodeDbFiles = os.listdir(self.dbPath) nodedb_files = os.listdir(self.dbPath)
for fileName in nodeDbFiles: for file_name in nodedb_files:
if not os.path.isfile(os.path.join(self.dbPath, fileName)): if not os.path.isfile(os.path.join(self.dbPath, file_name)):
continue continue
nodeName = os.path.basename(fileName).split('.') node_name = os.path.basename(file_name).split('.')
if nodeName[1] == 'rrd' and not nodeName[0] == "nodes": if node_name[1] == 'rrd' and not node_name[0] == "nodes":
rrd = NodeRRD(os.path.join(self.dbPath, fileName)) rrd = NodeRRD(os.path.join(self.dbPath, file_name))
rrd.graph(self.imagePath, self.displayTimeNode) rrd.graph(self.imagePath, self.displayTimeNode)