From 56b884b810f11d61d9e6305548d68dd311b2823e Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 29 Jun 2014 11:20:56 +0200 Subject: [PATCH 01/97] remove dead code --- hostid.py | 13 ------------- 1 file changed, 13 deletions(-) delete mode 100644 hostid.py diff --git a/hostid.py b/hostid.py deleted file mode 100644 index 2b4038e..0000000 --- a/hostid.py +++ /dev/null @@ -1,13 +0,0 @@ -import re -from functools import reduce - -def mac_to_hostid(mac): - int_mac = list(map(lambda x: int(x, 16), mac.split(":"))) - int_mac[0] ^= 2 - bytes = map(lambda x: "%02x" % x, int_mac[0:3] + [0xff, 0xfe] + int_mac[3:]) - return reduce(lambda a, i: - [a[0] + ("" if i == 0 else ":") + a[1] + a[2]] + a[3:], - range(0, 4), - [""] + list(bytes) - ) - From 9f546be0c74ae069cc152aac3fc7c41ced68170b Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 29 Jun 2014 22:47:29 +0200 Subject: [PATCH 02/97] persistent state (mostly for gluon nodes), prune after 30d --- bat2nodes.py | 11 ++++++++++- node.py | 1 + nodedb.py | 46 +++++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 56 insertions(+), 2 deletions(-) diff --git a/bat2nodes.py b/bat2nodes.py index 921b548..ce6e596 100755 --- a/bat2nodes.py +++ b/bat2nodes.py @@ -4,6 +4,7 @@ import json import fileinput import argparse import os +import time from batman import batman from alfred import alfred @@ -43,7 +44,8 @@ args = parser.parse_args() options = vars(args) -db = NodeDB() +db = NodeDB(int(time.time())) + if options['mesh']: for mesh_interface in options['mesh']: bm = batman(mesh_interface) @@ -69,6 +71,13 @@ db.count_clients() if options['obscure']: db.obscure_clients() +db.load_state("state.json") + +# remove nodes that have been offline for more than 30 days +db.prune_offline(time.time() - 30*86400) + +db.dump_state("state.json") + scriptdir = os.path.dirname(os.path.realpath(__file__)) m = D3MapBuilder(db) diff --git a/node.py b/node.py index 0fe35fb..a75f982 100644 --- a/node.py +++ b/node.py @@ -12,6 +12,7 @@ class Node(): self.gps = None self.firmware = None self.clientcount = 0 + self.lastseen = 0 def add_mac(self, mac): mac = mac.lower() diff --git a/nodedb.py b/nodedb.py index fa9caed..973861d 100644 --- a/nodedb.py +++ b/nodedb.py @@ -5,7 +5,8 @@ from node import Node, Interface from link import Link, LinkConnector class NodeDB: - def __init__(self): + def __init__(self, time=0): + self.time = time self._nodes = [] self._links = [] @@ -18,6 +19,46 @@ class NodeDB: def get_nodes(self): return self._nodes + # remove all offlines nodes with lastseen < timestamp + def prune_offline(self, timestamp): + self._nodes = list(filter(lambda x: x.lastseen >= timestamp, self._nodes)) + + # write persistent state to file + def dump_state(self, filename): + obj = [] + + for node in self._nodes: + if node.flags['client']: + continue + + obj.append({ 'id': node.id + , 'name': node.name + , 'lastseen': node.lastseen + , 'geo': node.gps + }) + + with open(filename, "w") as f: + json.dump(obj, f) + + # load persistent state from file + def load_state(self, filename): + try: + with open(filename, "r") as f: + obj = json.load(f) + for n in obj: + try: + node = self.maybe_node_by_id(n['id']) + except: + node = Node() + node.id = n['id'] + node.name = n['name'] + node.lastseen = n['lastseen'] + node.gps = n['geo'] + self._nodes.append(node) + + except: + pass + def maybe_node_by_fuzzy_mac(self, mac): mac_a = mac.lower() @@ -51,6 +92,7 @@ class NodeDB: node = self.maybe_node_by_mac((x['of'], x['secondary'])) except: node = Node() + node.lastseen = self.time node.flags['online'] = True if 'legacy' in x: node.flags['legacy'] = True @@ -66,6 +108,7 @@ class NodeDB: node = self.maybe_node_by_mac((x['router'], )) except: node = Node() + node.lastseen = self.time node.flags['online'] = True if 'legacy' in x: node.flags['legacy'] = True @@ -95,6 +138,7 @@ class NodeDB: node = self.maybe_node_by_mac((x['neighbor'], )) except: node = Node() + node.lastseen = self.time node.flags['online'] = True if x['label'] == 'TT': node.flags['client'] = True From 263dd4ceff65c40d90b7f40937986bf3612d8f45 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 5 Jul 2014 20:23:06 +0200 Subject: [PATCH 03/97] alfred.py: use gzip (requires alfred-json v0.2) --- alfred.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alfred.py b/alfred.py index 6d926bb..b956026 100755 --- a/alfred.py +++ b/alfred.py @@ -7,7 +7,7 @@ class alfred: self.request_data_type = request_data_type def aliases(self): - output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json"]) + output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json","-z"]) alfred_data = json.loads(output.decode("utf-8")) alias = {} for mac,node in alfred_data.items(): From 6fc1423124def6343a0aeb456c2af269e744b18d Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Fri, 21 Feb 2014 15:27:19 +0100 Subject: [PATCH 04/97] Make handling of node attributes more flexible. This commit makes Nodes special dicts that return None-like objects for inexistent keys, making it a dynamic attribute store. Also, it removes the D3MapBuilder and moves its logic to the Node and Link classes' newly introduced export() method. Only they need to be changed to populate the final nodes.json with more attributes. --- alfred.py | 15 ++++----------- bat2nodes.py | 8 +++++--- d3mapbuilder.py | 36 ------------------------------------ ffhlwiki.py | 2 +- json_encoder.py | 13 +++++++++++++ link.py | 13 +++++++++++-- node.py | 48 +++++++++++++++++++++++++++++++++++++++++++----- nodedb.py | 20 ++++++++------------ 8 files changed, 85 insertions(+), 70 deletions(-) delete mode 100644 d3mapbuilder.py create mode 100644 json_encoder.py diff --git a/alfred.py b/alfred.py index 6d926bb..b8aa1e2 100755 --- a/alfred.py +++ b/alfred.py @@ -12,16 +12,11 @@ class alfred: alias = {} for mac,node in alfred_data.items(): node_alias = {} - if 'location' in node: - try: - node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) - except: - pass + for key in node: + node_alias[key] = node[key] - try: - node_alias['firmware'] = node['software']['firmware']['release'] - except KeyError: - pass + if 'location' in node: + node_alias['geo'] = [node['location']['latitude'], node['location']['longitude']] try: node_alias['id'] = node['network']['mac'] @@ -30,8 +25,6 @@ class alfred: if 'hostname' in node: node_alias['name'] = node['hostname'] - elif 'name' in node: - node_alias['name'] = node['name'] if len(node_alias): alias[mac] = node_alias return alias diff --git a/bat2nodes.py b/bat2nodes.py index 921b548..e1fde6e 100755 --- a/bat2nodes.py +++ b/bat2nodes.py @@ -4,12 +4,13 @@ import json import fileinput import argparse import os +import datetime from batman import batman from alfred import alfred from rrd import rrd from nodedb import NodeDB -from d3mapbuilder import D3MapBuilder +from json_encoder import CustomJSONEncoder # Force encoding to UTF-8 import locale # Ensures that subsequent open()s @@ -71,11 +72,12 @@ if options['obscure']: scriptdir = os.path.dirname(os.path.realpath(__file__)) -m = D3MapBuilder(db) +exported = db.export() +exported['meta'] = {'timestamp': datetime.datetime.utcnow().replace(microsecond=0).isoformat()} #Write nodes json nodes_json = open(options['destination_directory'] + '/nodes.json.new','w') -nodes_json.write(m.build()) +json.dump(exported, nodes_json, cls=CustomJSONEncoder) nodes_json.close() #Move to destination diff --git a/d3mapbuilder.py b/d3mapbuilder.py deleted file mode 100644 index ff7589f..0000000 --- a/d3mapbuilder.py +++ /dev/null @@ -1,36 +0,0 @@ -import json -import datetime - -class D3MapBuilder: - def __init__(self, db): - self._db = db - - def build(self): - output = dict() - - now = datetime.datetime.utcnow().replace(microsecond=0) - - nodes = self._db.get_nodes() - - output['nodes'] = [{'name': x.name, 'id': x.id, - 'macs': ', '.join(x.macs), - 'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None, - 'firmware': x.firmware, - 'flags': x.flags, - 'clientcount': x.clientcount - } for x in nodes] - - links = self._db.get_links() - - output['links'] = [{'source': x.source.id, 'target': x.target.id, - 'quality': x.quality, - 'type': x.type, - 'id': x.id - } for x in links] - - output['meta'] = { - 'timestamp': now.isoformat() - } - - return json.dumps(output) - diff --git a/ffhlwiki.py b/ffhlwiki.py index c1ba01e..588ae72 100755 --- a/ffhlwiki.py +++ b/ffhlwiki.py @@ -71,7 +71,7 @@ def import_wikigps(url): mac = data[0].strip() if data[1]: - alias['gps'] = data[1].strip() + alias['geo'] = [float(x) for x in data[1].strip().split(' ')] if data[2]: alias['name'] = data[2].strip() diff --git a/json_encoder.py b/json_encoder.py new file mode 100644 index 0000000..8d62771 --- /dev/null +++ b/json_encoder.py @@ -0,0 +1,13 @@ +from json import JSONEncoder + +class CustomJSONEncoder(JSONEncoder): + """ + JSON encoder that uses an object's __json__() method to convert it to + something JSON-compatible. + """ + def default(self, obj): + try: + return obj.__json__() + except AttributeError: + pass + return super().default(obj) diff --git a/link.py b/link.py index 896079b..b161608 100644 --- a/link.py +++ b/link.py @@ -1,11 +1,20 @@ class Link(): def __init__(self): self.id = None - self.source = None - self.target = None + self.source = LinkConnector() + self.target = LinkConnector() self.quality = None self.type = None + def export(self): + return { + 'source': self.source.id, + 'target': self.target.id, + 'quality': self.quality, + 'type': self.type, + 'id': self.id + } + class LinkConnector(): def __init__(self): self.id = None diff --git a/node.py b/node.py index 0fe35fb..504768a 100644 --- a/node.py +++ b/node.py @@ -1,4 +1,31 @@ -class Node(): +from collections import defaultdict + +class NoneDict: + """ + A NoneDict acts like None but returns a NoneDict for every item in it. + + This is similar to the behaviour of collections.defaultdict in that even + previously inexistent keys can be accessed, but there is nothing stored + permanently. + """ + __repr__ = lambda self: 'NoneDict()' + __bool__ = lambda self: False + __getitem__ = lambda self, k: NoneDict() + __json__ = lambda self: None + def __setitem__(self, key, value): + raise RuntimeError("NoneDict is readonly") + +class casualdict(defaultdict): + """ + This special defaultdict returns a NoneDict for inexistent items. Also, its + items can be accessed as attributed as well. + """ + def __init__(self): + super().__init__(NoneDict) + __getattr__ = defaultdict.__getitem__ + __setattr__ = defaultdict.__setitem__ + +class Node(casualdict): def __init__(self): self.name = "" self.id = "" @@ -9,9 +36,7 @@ class Node(): "gateway": False, "client": False }) - self.gps = None - self.firmware = None - self.clientcount = 0 + super().__init__() def add_mac(self, mac): mac = mac.lower() @@ -25,7 +50,20 @@ class Node(): def __repr__(self): return self.macs.__repr__() + def export(self): + """ + Return a dict that contains all attributes of the Node that are supposed to + be exported to other applications. + """ + return { + "name": self.name, + "id": self.id, + "macs": list(self.macs), + "geo": self.geo, + "firmware": self.software['firmware']['release'], + "flags": self.flags + } + class Interface(): def __init__(self): self.vpn = False - diff --git a/nodedb.py b/nodedb.py index fa9caed..e5ff30e 100644 --- a/nodedb.py +++ b/nodedb.py @@ -1,4 +1,3 @@ -import json from functools import reduce from collections import defaultdict from node import Node, Interface @@ -18,6 +17,12 @@ class NodeDB: def get_nodes(self): return self._nodes + def export(self): + return { + 'nodes': [node.export() for node in self.get_nodes()], + 'links': [link.export() for link in self.get_links()], + } + def maybe_node_by_fuzzy_mac(self, mac): mac_a = mac.lower() @@ -179,21 +184,12 @@ class NodeDB: node.add_mac(mac) self._nodes.append(node) - if 'name' in alias: - node.name = alias['name'] + for key in alias: + node[key] = alias[key] if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces: node.interfaces[mac].vpn = True - if 'gps' in alias: - node.gps = alias['gps'] - - if 'firmware' in alias: - node.firmware = alias['firmware'] - - if 'id' in alias: - node.id = alias['id'] - # list of macs # if options['gateway']: # mark_gateways(options['gateway']) From 43e70191f19f89b65b37c42141aa2c038f122940 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 22 Feb 2014 13:34:14 +0100 Subject: [PATCH 05/97] RRD: Fix updating of DS --- RRD.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/RRD.py b/RRD.py index d1ae870..9bb87a0 100644 --- a/RRD.py +++ b/RRD.py @@ -80,9 +80,9 @@ class RRD: raise FileNotFoundError(self.filename) info = self.info() if set(ds_list) - set(info['ds'].values()) != set(): - if set((ds.name, ds.type) for ds in ds_list) \ - - set((ds.name, ds.type) for ds in info['ds'].values()) != set(): - raise RRDIncompatibleException() + for ds in ds_list: + if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type: + raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type)) else: raise RRDOutdatedException() @@ -177,15 +177,8 @@ class RRD: echo = True dump.stdout.close() restore.stdin.close() - try: - dump.wait(1) - except subprocess.TimeoutExpired: - dump.kill() - try: - restore.wait(2) - except subprocess.TimeoutExpired: - dump.kill() - raise RuntimeError("rrdtool restore process killed") + dump.wait() + restore.wait() os.rename(self.filename + ".new", self.filename) self._cached_info = None From 7075d8481c641725786ecde30b66146115e15225 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 22 Feb 2014 13:35:34 +0100 Subject: [PATCH 06/97] NodeRRD: add many more DS, rrd.py: generate neighbor counts --- NodeRRD.py | 31 ++++++++++++++++++++++++++++++- node.py | 1 + rrd.py | 10 +++++++++- 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/NodeRRD.py b/NodeRRD.py index f53cad6..0118234 100644 --- a/NodeRRD.py +++ b/NodeRRD.py @@ -7,6 +7,19 @@ class NodeRRD(RRD): ds_list = [ DS('upstate', 'GAUGE', 120, 0, 1), DS('clients', 'GAUGE', 120, 0, float('NaN')), + DS('neighbors', 'GAUGE', 120, 0, float('NaN')), + DS('vpn_neighbors', 'GAUGE', 120, 0, float('NaN')), + DS('loadavg', 'GAUGE', 120, 0, float('NaN')), + DS('rx_bytes', 'DERIVE', 120, 0, float('NaN')), + DS('rx_packets', 'DERIVE', 120, 0, float('NaN')), + DS('tx_bytes', 'DERIVE', 120, 0, float('NaN')), + DS('tx_packets', 'DERIVE', 120, 0, float('NaN')), + DS('mgmt_rx_bytes', 'DERIVE', 120, 0, float('NaN')), + DS('mgmt_rx_packets', 'DERIVE', 120, 0, float('NaN')), + DS('mgmt_tx_bytes', 'DERIVE', 120, 0, float('NaN')), + DS('mgmt_tx_packets', 'DERIVE', 120, 0, float('NaN')), + DS('forward_bytes', 'DERIVE', 120, 0, float('NaN')), + DS('forward_packets', 'DERIVE', 120, 0, float('NaN')), ] rra_list = [ RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples @@ -30,7 +43,23 @@ class NodeRRD(RRD): return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" def update(self): - super().update({'upstate': 1, 'clients': self.node.clients}) + values = { + 'upstate': 1, + 'clients': float(self.node.clients), + 'neighbors': float(self.node.neighbors), + 'vpn_neighbors': float(self.node.vpn_neighbors), + 'loadavg': float(self.node.statistics['loadavg']), + } + for item in ('rx', 'tx', 'mgmt_rx', 'mgmt_tx', 'forward'): + try: + values['%s_bytes' % item] = int(self.node.statistics['traffic'][item]['bytes']) + except TypeError: + pass + try: + values['%s_packets' % item] = int(self.node.statistics['traffic'][item]['packets']) + except TypeError: + pass + super().update(values) def graph(self, directory, timeframe): """ diff --git a/node.py b/node.py index 504768a..83531b2 100644 --- a/node.py +++ b/node.py @@ -12,6 +12,7 @@ class NoneDict: __bool__ = lambda self: False __getitem__ = lambda self, k: NoneDict() __json__ = lambda self: None + __float__ = lambda self: float('NaN') def __setitem__(self, key, value): raise RuntimeError("NoneDict is readonly") diff --git a/rrd.py b/rrd.py index 5c3330d..dad78c5 100755 --- a/rrd.py +++ b/rrd.py @@ -33,7 +33,9 @@ class rrd: if node.flags['online']: if not node.flags['client']: nodes[node.id] = node - node.clients = 0; + node.clients = 0 + node.neighbors = 0 + node.vpn_neighbors = 0 if 'legacy' in node.flags and node.flags['legacy']: clientCount -= 1 else: @@ -45,6 +47,12 @@ class rrd: nodes[source].clients += 1 elif target in nodes and not source in nodes: nodes[target].clients += 1 + elif source in nodes and target in nodes: + nodes[source].neighbors += 1 + nodes[target].neighbors += 1 + if link.type == 'vpn': + nodes[target].vpn_neighbors += 1 + nodes[source].vpn_neighbors += 1 self.globalDb.update(len(nodes), clientCount) for node in nodes.values(): From 89e4c6370050a012bc5ce09a9a1c798f2656e527 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Wed, 19 Mar 2014 23:26:28 +0100 Subject: [PATCH 07/97] alfred.py: Make geo attribute setting more robust --- alfred.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/alfred.py b/alfred.py index b8aa1e2..06ee1f7 100755 --- a/alfred.py +++ b/alfred.py @@ -15,8 +15,10 @@ class alfred: for key in node: node_alias[key] = node[key] - if 'location' in node: + try: node_alias['geo'] = [node['location']['latitude'], node['location']['longitude']] + except (TypeError, KeyError): + pass try: node_alias['id'] = node['network']['mac'] From ee515476645dd48812f0b4fee1acf7a3ee8b21f3 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sun, 6 Jul 2014 20:04:24 +0200 Subject: [PATCH 08/97] =?UTF-8?q?mkmap.sh:=20Remove=20L=C3=BCbeck-specific?= =?UTF-8?q?=20stuff?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- mkmap.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mkmap.sh b/mkmap.sh index ce3b162..1c6453f 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -9,7 +9,4 @@ DEST=$1 cd "$(dirname "$0")"/ -./ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > aliases_hl.json -./ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > aliases_moelln.json - -./bat2nodes.py -A -a aliases.json -a aliases_hl.json -a aliases_moelln.json -d $DEST +./bat2nodes.py -A -a aliases.json -d $DEST From 54402ce08906df0e7675766a84b788986f2bfd92 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sun, 6 Jul 2014 20:07:49 +0200 Subject: [PATCH 09/97] mkmap.sh: Add locking around script call --- mkmap.sh | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/mkmap.sh b/mkmap.sh index 1c6453f..28195be 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -3,10 +3,30 @@ set -e DEST=$1 - +LOCKFILE="/run/lock/ffmap" [ "$DEST" ] || exit 1 cd "$(dirname "$0")"/ +if lockfile-check "$LOCKFILE"; then + exit +fi +lockfile-create "$LOCKFILE" +lockfile-touch "$LOCKFILE" & +LOCKPID="$!" + ./bat2nodes.py -A -a aliases.json -d $DEST + +kill "$LOCKPID" +lockfile-remove "$LOCKFILE" + +if lockfile-check "$LOCKFILE-sync"; then + exit +fi +lockfile-create "$LOCKFILE-sync" +lockfile-touch "$LOCKFILE-sync" & +LOCKPID="$!" + +kill "$LOCKPID" +lockfile-remove "$LOCKFILE-sync" From f5e3705eec4888ec0d40f98333df3d447f1f842f Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Mon, 7 Jul 2014 23:27:21 +0200 Subject: [PATCH 10/97] Began rewrite with more modular design --- alfred.py | 37 -- batman.py | 86 ----- hostid.py | 13 - inputs/alfred/__init__.py | 18 + inputs/batadv/__init__.py | 100 +++++ json_encoder.py | 13 - link.py | 24 -- node.py | 128 +++---- nodedb.py | 441 +++-------------------- outputs/json/__init__.py | 71 ++++ GlobalRRD.py => outputs/rrd/GlobalRRD.py | 6 +- NodeRRD.py => outputs/rrd/NodeRRD.py | 14 +- RRD.py => outputs/rrd/RRD.py | 0 outputs/rrd/__init__.py | 31 ++ rrd.py | 80 ---- 15 files changed, 354 insertions(+), 708 deletions(-) delete mode 100755 alfred.py delete mode 100755 batman.py delete mode 100644 hostid.py create mode 100644 inputs/alfred/__init__.py create mode 100644 inputs/batadv/__init__.py delete mode 100644 json_encoder.py delete mode 100644 link.py create mode 100644 outputs/json/__init__.py rename GlobalRRD.py => outputs/rrd/GlobalRRD.py (89%) rename NodeRRD.py => outputs/rrd/NodeRRD.py (85%) rename RRD.py => outputs/rrd/RRD.py (100%) create mode 100644 outputs/rrd/__init__.py delete mode 100755 rrd.py diff --git a/alfred.py b/alfred.py deleted file mode 100755 index 06ee1f7..0000000 --- a/alfred.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python3 -import subprocess -import json - -class alfred: - def __init__(self,request_data_type = 158): - self.request_data_type = request_data_type - - def aliases(self): - output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json"]) - alfred_data = json.loads(output.decode("utf-8")) - alias = {} - for mac,node in alfred_data.items(): - node_alias = {} - for key in node: - node_alias[key] = node[key] - - try: - node_alias['geo'] = [node['location']['latitude'], node['location']['longitude']] - except (TypeError, KeyError): - pass - - try: - node_alias['id'] = node['network']['mac'] - except KeyError: - pass - - if 'hostname' in node: - node_alias['name'] = node['hostname'] - if len(node_alias): - alias[mac] = node_alias - return alias - -if __name__ == "__main__": - ad = alfred() - al = ad.aliases() - print(al) diff --git a/batman.py b/batman.py deleted file mode 100755 index c9b3db6..0000000 --- a/batman.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python3 -import subprocess -import json -import re - -class batman: - """ Bindings for B.A.T.M.A.N. advanced batctl tool - """ - def __init__(self, mesh_interface = "bat0"): - self.mesh_interface = mesh_interface - - def vis_data(self,batadv_vis=False): - vds = self.vis_data_batctl_legacy() - if batadv_vis: - vds += self.vis_data_batadv_vis() - return vds - - def vis_data_helper(self,lines): - vd = [] - for line in lines: - try: - utf8_line = line.decode("utf-8") - vd.append(json.loads(utf8_line)) - except e: - pass - return vd - - def vis_data_batctl_legacy(self): - """ Parse "batctl -m vd json -n" into an array of dictionaries. - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"]) - lines = output.splitlines() - vds = self.vis_data_helper(lines) - for vd in vds: - vd['legacy'] = True - return vds - - def vis_data_batadv_vis(self): - """ Parse "batadv-vis -i -f json" into an array of dictionaries. - """ - output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"]) - lines = output.splitlines() - return self.vis_data_helper(lines) - - def gateway_list(self): - """ Parse "batctl -m gwl -n" into an array of dictionaries. - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"]) - output_utf8 = output.decode("utf-8") - # TODO Parse information - lines = output_utf8.splitlines() - own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",lines[0]).group(1) - # Remove header line - del lines[0] - # Fill gateway list - gw = [] - gw_mode = self.gateway_mode() - if gw_mode['mode'] == 'server': - gw.append({'mac': own_mac, 'bandwidth': gw_mode['bandwidth']}) - for line in lines: - gw_line = line.split() - if (gw_line[0] == 'No'): - continue - # When in client gateway mode maybe gw_line[0] is not the right. - gw.append({'mac':gw_line[0], 'bandwidth': gw_line[-1]}) - return gw - - def gateway_mode(self): - """ Parse "batctl -m gw" - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"]) - elements = output.decode("utf-8").split() - mode = elements[0] - if mode == "server": - return {'mode': 'server', 'bandwidth': elements[3]} - else: - return {'mode': mode} - -if __name__ == "__main__": - bc = batman() - vd = bc.vis_data() - gw = bc.gateway_list() - for x in vd: - print(x) - print(gw) - print(bc.gateway_mode()) diff --git a/hostid.py b/hostid.py deleted file mode 100644 index 2b4038e..0000000 --- a/hostid.py +++ /dev/null @@ -1,13 +0,0 @@ -import re -from functools import reduce - -def mac_to_hostid(mac): - int_mac = list(map(lambda x: int(x, 16), mac.split(":"))) - int_mac[0] ^= 2 - bytes = map(lambda x: "%02x" % x, int_mac[0:3] + [0xff, 0xfe] + int_mac[3:]) - return reduce(lambda a, i: - [a[0] + ("" if i == 0 else ":") + a[1] + a[2]] + a[3:], - range(0, 4), - [""] + list(bytes) - ) - diff --git a/inputs/alfred/__init__.py b/inputs/alfred/__init__.py new file mode 100644 index 0000000..6c0f66e --- /dev/null +++ b/inputs/alfred/__init__.py @@ -0,0 +1,18 @@ +import subprocess +import json + +class Input: + def __init__(self,request_data_type = 158): + self.request_data_type = request_data_type + + def get_data(self, nodedb): + """Add data from alfred to the supplied nodedb""" + output = subprocess.check_output([ + "alfred-json", + "-r", str(self.request_data_type), + "-f", "json", + ]) + alfred_data = json.loads(output.decode("utf-8")) + + for mac, node in alfred_data.items(): + nodedb.add_or_update([mac], node) diff --git a/inputs/batadv/__init__.py b/inputs/batadv/__init__.py new file mode 100644 index 0000000..576b09a --- /dev/null +++ b/inputs/batadv/__init__.py @@ -0,0 +1,100 @@ +import subprocess +import json + +class Input: + """Fill the NodeDB with links from batadv-vis. + + The links are added as lists containing the neighboring nodes, not + only their identifiers! Mind this when exporting the database, as + it probably leads to recursion. + """ + def __init__(self, mesh_interface="bat0"): + self.mesh_interface = mesh_interface + + @staticmethod + def _is_similar_mac(a, b): + """Determine if two MAC addresses are similar.""" + if a == b: + return True + + # Split the address into bytes + try: + mac_a = list(int(i, 16) for i in a.split(":")) + mac_b = list(int(i, 16) for i in b.split(":")) + except ValueError: + return False + + # Second and third byte musn't differ + if mac_a[1] != mac_b[1] or mac_a[2] != mac_b[2]: + return False + + # First byte must only differ in bit 2 + if mac_a[0] | 2 != mac_b[0] | 2: + return False + + # Count differing bytes after the third + c = [x for x in zip(mac_a[3:], mac_b[3:]) if x[0] != x[1]] + + # No more than two additional bytes must differ + if len(c) > 2: + return False + + # If no more bytes differ, they are very similar + if len(c) == 0: + return True + + # If the sum of absolute differences isn't greater than 2, they + # are pretty similar + delta = sum(abs(i[0] - i[1]) for i in c) + return delta < 2 + + def get_data(self, nodedb): + """Add data from batadv-vis to the supplied nodedb""" + output = subprocess.check_output([ + "batadv-vis", + "-i", str(self.mesh_interface), + "-f", "jsondoc", + ]) + data = json.loads(output.decode("utf-8")) + + # First pass + for node in data["vis"]: + # Determine possible other MAC addresses of this node by + # comparing all its client's MAC addresses to its primary + # MAC address. If they are similar, it probably is another + # address of the node itself! If it isn't, it is a real + # client. + node['aliases'] = [node["primary"]] + if 'secondary' in node: + node['aliases'].extend(node['secondary']) + real_clients = [] + for mac in node["clients"]: + if self._is_similar_mac(mac, node["primary"]): + node['aliases'].append(mac) + else: + real_clients.append(mac) + node['clients'] = real_clients + + # Add nodes and aliases without any information at first. + # This way, we can later link the objects themselves. + nodedb.add_or_update(node['aliases']) + + # Second pass + for node in data["vis"]: + # We only need the primary address now, all aliases are + # already present in the database. Furthermore, we can be + # sure that all neighbors are in the database as well. If + # a neighbor isn't added already, we simply ignore it. + nodedb.add_or_update( + [node["primary"]], + { + "clients": node["clients"], + "neighbors": [ + { + "metric": neighbor['metric'], + "neighbor": nodedb[neighbor['neighbor']], + } for neighbor in node["neighbors"] + if neighbor['neighbor'] in nodedb + ] + } + ) diff --git a/json_encoder.py b/json_encoder.py deleted file mode 100644 index 8d62771..0000000 --- a/json_encoder.py +++ /dev/null @@ -1,13 +0,0 @@ -from json import JSONEncoder - -class CustomJSONEncoder(JSONEncoder): - """ - JSON encoder that uses an object's __json__() method to convert it to - something JSON-compatible. - """ - def default(self, obj): - try: - return obj.__json__() - except AttributeError: - pass - return super().default(obj) diff --git a/link.py b/link.py deleted file mode 100644 index b161608..0000000 --- a/link.py +++ /dev/null @@ -1,24 +0,0 @@ -class Link(): - def __init__(self): - self.id = None - self.source = LinkConnector() - self.target = LinkConnector() - self.quality = None - self.type = None - - def export(self): - return { - 'source': self.source.id, - 'target': self.target.id, - 'quality': self.quality, - 'type': self.type, - 'id': self.id - } - -class LinkConnector(): - def __init__(self): - self.id = None - self.interface = None - - def __repr__(self): - return "LinkConnector(%d, %s)" % (self.id, self.interface) diff --git a/node.py b/node.py index 83531b2..5fa58f6 100644 --- a/node.py +++ b/node.py @@ -1,70 +1,70 @@ from collections import defaultdict class NoneDict: - """ - A NoneDict acts like None but returns a NoneDict for every item in it. + """Act like None but return a NoneDict for every item request. - This is similar to the behaviour of collections.defaultdict in that even - previously inexistent keys can be accessed, but there is nothing stored - permanently. - """ - __repr__ = lambda self: 'NoneDict()' - __bool__ = lambda self: False - __getitem__ = lambda self, k: NoneDict() - __json__ = lambda self: None - __float__ = lambda self: float('NaN') - def __setitem__(self, key, value): - raise RuntimeError("NoneDict is readonly") - -class casualdict(defaultdict): - """ - This special defaultdict returns a NoneDict for inexistent items. Also, its - items can be accessed as attributed as well. - """ - def __init__(self): - super().__init__(NoneDict) - __getattr__ = defaultdict.__getitem__ - __setattr__ = defaultdict.__setitem__ - -class Node(casualdict): - def __init__(self): - self.name = "" - self.id = "" - self.macs = set() - self.interfaces = dict() - self.flags = dict({ - "online": False, - "gateway": False, - "client": False - }) - super().__init__() - - def add_mac(self, mac): - mac = mac.lower() - if len(self.macs) == 0: - self.id = mac - - self.macs.add(mac) - - self.interfaces[mac] = Interface() - - def __repr__(self): - return self.macs.__repr__() - - def export(self): + This is similar to the behaviour of collections.defaultdict in that + even previously inexistent keys can be accessed, but nothing is + stored permanently in this class. """ - Return a dict that contains all attributes of the Node that are supposed to - be exported to other applications. - """ - return { - "name": self.name, - "id": self.id, - "macs": list(self.macs), - "geo": self.geo, - "firmware": self.software['firmware']['release'], - "flags": self.flags - } + __repr__ = lambda self: 'NoneDict()' + __bool__ = lambda self: False + __getitem__ = lambda self, k: NoneDict() + __json__ = lambda self: None + __float__ = lambda self: float('NaN') + def __setitem__(self, key, value): + raise RuntimeError("NoneDict is readonly") -class Interface(): - def __init__(self): - self.vpn = False +class Node(defaultdict): + _id = None + def __init__(self, id_=None): + self._id = id_ + super().__init__(NoneDict) + + def __repr__(self): + return "Node(%s)" % self.id + + @property + def id(self): + return self._id + + def __hash__(self): + """Generate hash from the node's id. + + WARNING: Obviously this hash doesn't cover all of the node's + data, but we need nodes to be hashable in order to eliminate + duplicates in the NodeDB. + + At least the id cannot change after initialization... + """ + return hash(self.id) + + @property + def vpn_neighbors(self): + try: + vpn_neighbors = [] + for neighbor in self['neighbors']: + if neighbor['neighbor']['vpn']: + vpn_neighbors.append(neighbor) + return vpn_neighbors + except TypeError: + return [] + + def export(self): + """Generate a serializable dict of the node. + + In particular, this replaces any references to other nodes by + their id to prevent circular references. + """ + ret = dict(self) + if "neighbors" in self: + ret["neighbors"] = [] + for neighbor in self["neighbors"]: + new_neighbor = {} + for key, val in neighbor.items(): + if isinstance(val, Node): + new_neighbor[key] = val.id + else: + new_neighbor[key] = val + ret["neighbors"].append(new_neighbor) + return ret diff --git a/nodedb.py b/nodedb.py index e5ff30e..a056184 100644 --- a/nodedb.py +++ b/nodedb.py @@ -1,381 +1,60 @@ -from functools import reduce -from collections import defaultdict -from node import Node, Interface -from link import Link, LinkConnector - -class NodeDB: - def __init__(self): - self._nodes = [] - self._links = [] - - # fetch list of links - def get_links(self): - self.update_vpn_links() - return self.reduce_links() - - # fetch list of nodes - def get_nodes(self): - return self._nodes - - def export(self): - return { - 'nodes': [node.export() for node in self.get_nodes()], - 'links': [link.export() for link in self.get_links()], - } - - def maybe_node_by_fuzzy_mac(self, mac): - mac_a = mac.lower() - - for node in self._nodes: - for mac_b in node.macs: - if is_derived_mac(mac_a, mac_b): - return node - - raise KeyError - - def maybe_node_by_mac(self, macs): - for node in self._nodes: - for mac in macs: - if mac.lower() in node.macs: - return node - - raise KeyError - - def maybe_node_by_id(self, mac): - for node in self._nodes: - if mac.lower() == node.id: - return node - - raise KeyError - - def parse_vis_data(self,vis_data): - for x in vis_data: - - if 'of' in x: - try: - node = self.maybe_node_by_mac((x['of'], x['secondary'])) - except: - node = Node() - node.flags['online'] = True - if 'legacy' in x: - node.flags['legacy'] = True - self._nodes.append(node) - - node.add_mac(x['of']) - node.add_mac(x['secondary']) - - for x in vis_data: - - if 'router' in x: - try: - node = self.maybe_node_by_mac((x['router'], )) - except: - node = Node() - node.flags['online'] = True - if 'legacy' in x: - node.flags['legacy'] = True - node.add_mac(x['router']) - self._nodes.append(node) - - # If it's a TT link and the MAC is very similar - # consider this MAC as one of the routers - # MACs - if 'gateway' in x and x['label'] == "TT": - if is_similar(x['router'], x['gateway']): - node.add_mac(x['gateway']) - - # skip processing as regular link - continue - - try: - if 'neighbor' in x: - try: - node = self.maybe_node_by_mac((x['neighbor'])) - except: - continue - - if 'gateway' in x: - x['neighbor'] = x['gateway'] - - node = self.maybe_node_by_mac((x['neighbor'], )) - except: - node = Node() - node.flags['online'] = True - if x['label'] == 'TT': - node.flags['client'] = True - - node.add_mac(x['neighbor']) - self._nodes.append(node) - - for x in vis_data: - - if 'router' in x: - try: - if 'gateway' in x: - x['neighbor'] = x['gateway'] - - router = self.maybe_node_by_mac((x['router'], )) - neighbor = self.maybe_node_by_mac((x['neighbor'], )) - except: - continue - - # filter TT links merged in previous step - if router == neighbor: - continue - - link = Link() - link.source = LinkConnector() - link.source.interface = x['router'] - link.source.id = self._nodes.index(router) - link.target = LinkConnector() - link.target.interface = x['neighbor'] - link.target.id = self._nodes.index(neighbor) - link.quality = x['label'] - link.id = "-".join(sorted((link.source.interface, link.target.interface))) - - if x['label'] == "TT": - link.type = "client" - - self._links.append(link) - - for x in vis_data: - - if 'primary' in x: - try: - node = self.maybe_node_by_mac((x['primary'], )) - except: - continue - - node.id = x['primary'] - - def reduce_links(self): - tmp_links = defaultdict(list) - - for link in self._links: - tmp_links[link.id].append(link) - - links = [] - - def reduce_link(a, b): - a.id = b.id - a.source = b.source - a.target = b.target - a.type = b.type - a.quality = ", ".join([x for x in (a.quality, b.quality) if x]) - - return a - - for k, v in tmp_links.items(): - new_link = reduce(reduce_link, v, Link()) - links.append(new_link) - - return links - - def import_aliases(self, aliases): - for mac, alias in aliases.items(): - try: - node = self.maybe_node_by_mac([mac]) - except: - try: - node = self.maybe_node_by_fuzzy_mac(mac) - except: - # create an offline node - node = Node() - node.add_mac(mac) - self._nodes.append(node) - - for key in alias: - node[key] = alias[key] - - if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces: - node.interfaces[mac].vpn = True - - # list of macs - # if options['gateway']: - # mark_gateways(options['gateway']) - def mark_gateways(self, gateways): - for gateway in gateways: - try: - node = self.maybe_node_by_mac((gateway, )) - except: - print("WARNING: did not find gateway '",gateway,"' in node list") - continue - - node.flags['gateway'] = True - - def update_vpn_links(self): - changes = 1 - while changes > 0: - changes = 0 - for link in self._links: - if link.type == "client": - continue - - source_interface = self._nodes[link.source.id].interfaces[link.source.interface] - target_interface = self._nodes[link.target.id].interfaces[link.target.interface] - if source_interface.vpn or target_interface.vpn: - source_interface.vpn = True - target_interface.vpn = True - if link.type != "vpn": - changes += 1 - - link.type = "vpn" - - def count_clients(self): - for link in self._links: - try: - a = self.maybe_node_by_id(link.source.interface) - b = self.maybe_node_by_id(link.target.interface) - - if a.flags['client']: - client = a - node = b - elif b.flags['client']: - client = b - node = a - else: - continue - - node.clientcount += 1 - except: - pass - - def obscure_clients(self): - - globalIdCounter = 0 - nodeCounters = {} - clientIds = {} - - for node in self._nodes: - if node.flags['client']: - node.macs = set() - clientIds[node.id] = None - - for link in self._links: - ids = link.source.interface - idt = link.target.interface - - try: - node_source = self.maybe_node_by_fuzzy_mac(ids) - node_target = self.maybe_node_by_id(idt) - - if not node_source.flags['client'] and not node_target.flags['client']: - # if none of the nodes associated with this link are clients, - # we do not want to obscure - continue - - if ids in clientIds and idt in clientIds: - # This is for corner cases, when a client - # is linked to another client. - clientIds[ids] = str(globalIdCounter) - ids = str(globalIdCounter) - globalIdCounter += 1 - - clientIds[idt] = str(globalIdCounter) - idt = str(globalIdCounter) - globalIdCounter += 1 - - elif ids in clientIds: - newId = generateId(idt) - clientIds[ids] = newId - ids = newId - - link.source.interface = ids; - node_source.id = ids; - - elif idt in clientIds: - newId = generateId(ids,nodeCounters) - clientIds[idt] = newId - idt = newId - - link.target.interface = idt; - node_target.id = idt; - - link.id = ids + "-" + idt - - except KeyError: - pass - -# extends node id by incremented node counter -def generateId(nodeId,nodeCounters): - if nodeId in nodeCounters: - n = nodeCounters[nodeId] - nodeCounters[nodeId] = n + 1 - else: - nodeCounters[nodeId] = 1 - n = 0 - - return nodeId + "_" + str(n) - -# compares two MACs and decides whether they are -# similar and could be from the same node -def is_similar(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - # first byte must only differ in bit 2 - if mac_a[0] | 2 == mac_b[0] | 2: - # count different bytes - c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]] - else: - return False - - # no more than two additional bytes must differ - if len(c) <= 2: - delta = 0 - - if len(c) > 0: - delta = sum(abs(i[0] -i[1]) for i in c) - - # These addresses look pretty similar! - return delta < 8 - -def is_derived_mac(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]: - return False - - x = list(mac_a) - x[5] += 1 - x[5] %= 255 - if mac_b == x: - return True - - x[0] |= 2 - if mac_b == x: - return True - - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[5] += 2 - x[5] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - return False +from node import Node + +class AmbiguityException(Exception): + """Indicate the ambiguity of identifiers. + + This exception is raised if there is more than one match for a set + of identifiers. + + Attributes: + identifiers -- set of ambiguous identifiers + """ + + identifiers = [] + + def __init__(self, identifiers): + self.identifiers = identifiers + + def __str__(self): + return "Ambiguous identifiers: %s" % ", ".join(self.identifiers) + +class NodeDB(dict): + def add_or_update(self, ids, other=None): + """Add or update a node in the database. + + Searches for an already existing node and updates it, or adds a new + one if no existing one is found. Raises an AmbiguityException if + more than one different nodes are found matching the criteria. + + Arguments: + ids -- list of possible identifiers (probably MAC addresses) of the + node + other -- dict of values to update in an existing node or add to + the new one. Defaults to None, in which case no values + are added or updated, only the aliases of the + (possibly freshly created) node are updated. + """ + + # Find existing node, if any + node = None + node_id = None + for id_ in ids: + if id_ == node_id: + continue + if id_ in self: + if node is not None: + raise AmbiguityException([node_id, id_]) + node = self[id_] + node_id = id_ + + # If no node was found, create a new one + if node is None: + node = Node(ids[0]) + + # Update the node with the given properties using its own update method. + if other is not None: + node.update(other) + + # Add new aliases if any + for id_ in ids: + self[id_] = node diff --git a/outputs/json/__init__.py b/outputs/json/__init__.py new file mode 100644 index 0000000..f005c38 --- /dev/null +++ b/outputs/json/__init__.py @@ -0,0 +1,71 @@ +import json + +__all__ = ["Exporter"] + +class CustomJSONEncoder(json.JSONEncoder): + """ + JSON encoder that uses an object's __json__() method to convert it to + something JSON-compatible. + """ + def default(self, obj): + try: + return obj.__json__() + except AttributeError: + pass + return super().default(obj) + +class Exporter: + def __init__(self, filepath="nodes.json"): + self.filepath = filepath + + @staticmethod + def generate(nodedb): + indexes = {} + nodes = [] + count = 0 + for node in set(nodedb.values()): + nodes.append(node.export()) + indexes[node.id] = count + count += 1 + + links = [] + for node in set(nodedb.values()): + if "neighbors" in node: + links.extend( + { + "source": indexes[node.id], + "target": indexes[neighbor["neighbor"].id], + "quality": neighbor["metric"], + "type": "vpn" if neighbor["neighbor"]["vpn"] else None, + "id": "-".join((node.id, neighbor["neighbor"].id)), + } for neighbor in node["neighbors"] + ) + if "clients" in node: + for client in node["clients"]: + if not client in indexes: + nodes.append({ + "id": client, + }) + indexes[client] = count + count += 1 + + links.append({ + "source": indexes[node.id], + "target": indexes[client], + "quality": "TT", + "type": "client", + "id": "-".join((node.id, client)), + }) + + return { + "nodes": nodes, + "links": links, + } + + def export(self, nodedb): + with open(self.filepath, "w") as nodes_json: + json.dump( + self.generate(nodedb), + nodes_json, + cls=CustomJSONEncoder + ) diff --git a/GlobalRRD.py b/outputs/rrd/GlobalRRD.py similarity index 89% rename from GlobalRRD.py rename to outputs/rrd/GlobalRRD.py index f3f3960..b114418 100644 --- a/GlobalRRD.py +++ b/outputs/rrd/GlobalRRD.py @@ -1,6 +1,6 @@ import os import subprocess -from RRD import RRD, DS, RRA +from .RRD import RRD, DS, RRA class GlobalRRD(RRD): ds_list = [ @@ -15,8 +15,8 @@ class GlobalRRD(RRD): RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples ] - def __init__(self, directory): - super().__init__(os.path.join(directory, "nodes.rrd")) + def __init__(self, filepath): + super().__init__(filepath) self.ensureSanity(self.ds_list, self.rra_list, step=60) def update(self, nodeCount, clientCount): diff --git a/NodeRRD.py b/outputs/rrd/NodeRRD.py similarity index 85% rename from NodeRRD.py rename to outputs/rrd/NodeRRD.py index 0118234..fc8aef1 100644 --- a/NodeRRD.py +++ b/outputs/rrd/NodeRRD.py @@ -1,7 +1,7 @@ import os import subprocess from node import Node -from RRD import RRD, DS, RRA +from .RRD import RRD, DS, RRA class NodeRRD(RRD): ds_list = [ @@ -45,18 +45,18 @@ class NodeRRD(RRD): def update(self): values = { 'upstate': 1, - 'clients': float(self.node.clients), - 'neighbors': float(self.node.neighbors), - 'vpn_neighbors': float(self.node.vpn_neighbors), - 'loadavg': float(self.node.statistics['loadavg']), + 'clients': float(len(self.node.get('clients', []))), + 'neighbors': float(len(self.node.get('neighbors', []))), + 'vpn_neighbors': float(len(self.node.vpn_neighbors)), + 'loadavg': float(self.node['statistics']['loadavg']), } for item in ('rx', 'tx', 'mgmt_rx', 'mgmt_tx', 'forward'): try: - values['%s_bytes' % item] = int(self.node.statistics['traffic'][item]['bytes']) + values[item + '_bytes'] = int(self.node['statistics']['traffic'][item]['bytes']) except TypeError: pass try: - values['%s_packets' % item] = int(self.node.statistics['traffic'][item]['packets']) + values[item + '_packets'] = int(self.node['statistics']['traffic'][item]['packets']) except TypeError: pass super().update(values) diff --git a/RRD.py b/outputs/rrd/RRD.py similarity index 100% rename from RRD.py rename to outputs/rrd/RRD.py diff --git a/outputs/rrd/__init__.py b/outputs/rrd/__init__.py new file mode 100644 index 0000000..5e9fbc1 --- /dev/null +++ b/outputs/rrd/__init__.py @@ -0,0 +1,31 @@ +import os +from .NodeRRD import NodeRRD +from .GlobalRRD import GlobalRRD + +class Exporter: + def __init__(self, directory="nodedb"): + self.directory = directory + try: + os.mkdir(self.directory) + except OSError: + pass + + def export(self, nodedb): + nodes = set(nodedb.values()) + clients = 0 + nodecount = 0 + for node in nodes: + clients += len(node.get("clients", [])) + nodecount += 1 + NodeRRD( + os.path.join( + self.directory, + str(node.id).replace(':', '') + '.rrd' + ), + node + ).update() + + GlobalRRD(os.path.join(self.directory, "nodes.rrd")).update( + nodecount, + clients + ) diff --git a/rrd.py b/rrd.py deleted file mode 100755 index dad78c5..0000000 --- a/rrd.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -import subprocess -import time -import os -from GlobalRRD import GlobalRRD -from NodeRRD import NodeRRD - -class rrd: - def __init__( self - , databaseDirectory - , imagePath - , displayTimeGlobal = "7d" - , displayTimeNode = "1d" - ): - self.dbPath = databaseDirectory - self.globalDb = GlobalRRD(self.dbPath) - self.imagePath = imagePath - self.displayTimeGlobal = displayTimeGlobal - self.displayTimeNode = displayTimeNode - - self.currentTimeInt = (int(time.time())/60)*60 - self.currentTime = str(self.currentTimeInt) - - try: - os.stat(self.imagePath) - except: - os.mkdir(self.imagePath) - - def update_database(self,db): - nodes = {} - clientCount = 0 - for node in db.get_nodes(): - if node.flags['online']: - if not node.flags['client']: - nodes[node.id] = node - node.clients = 0 - node.neighbors = 0 - node.vpn_neighbors = 0 - if 'legacy' in node.flags and node.flags['legacy']: - clientCount -= 1 - else: - clientCount += 1 - for link in db.get_links(): - source = link.source.interface - target = link.target.interface - if source in nodes and not target in nodes: - nodes[source].clients += 1 - elif target in nodes and not source in nodes: - nodes[target].clients += 1 - elif source in nodes and target in nodes: - nodes[source].neighbors += 1 - nodes[target].neighbors += 1 - if link.type == 'vpn': - nodes[target].vpn_neighbors += 1 - nodes[source].vpn_neighbors += 1 - - self.globalDb.update(len(nodes), clientCount) - for node in nodes.values(): - rrd = NodeRRD( - os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'), - node - ) - rrd.update() - - def update_images(self): - """ Creates an image for every rrd file in the database directory. - """ - - self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) - - nodeDbFiles = os.listdir(self.dbPath) - - for fileName in nodeDbFiles: - if not os.path.isfile(os.path.join(self.dbPath, fileName)): - continue - - nodeName = os.path.basename(fileName).split('.') - if nodeName[1] == 'rrd' and not nodeName[0] == "nodes": - rrd = NodeRRD(os.path.join(self.dbPath, fileName)) - rrd.graph(self.imagePath, self.displayTimeNode) From e54e7467fc3c7cab4189498450f8c672348c130f Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Tue, 8 Jul 2014 14:20:47 +0200 Subject: [PATCH 11/97] Create package ffmap, add wiki input, remove old code --- .gitignore | 2 - README.md | 8 +- aliases.json_sample | 9 -- bat2nodes.py | 88 ------------------ ffhlwiki.py | 93 ------------------- ffmap/__init__.py | 42 +++++++++ nodedb/.gitkeep => ffmap/inputs/__init__.py | 0 .../__init__.py => ffmap/inputs/alfred.py | 0 .../__init__.py => ffmap/inputs/batadv.py | 0 ffmap/inputs/wiki.py | 71 ++++++++++++++ node.py => ffmap/node.py | 29 +++++- nodedb.py => ffmap/nodedb.py | 8 +- ffmap/outputs/__init__.py | 1 + .../__init__.py => ffmap/outputs/d3json.py | 14 ++- .../rrd/__init__.py => ffmap/outputs/rrd.py | 7 +- outputs/rrd/RRD.py => ffmap/rrd/__init__.py | 0 outputs/rrd/NodeRRD.py => ffmap/rrd/rrds.py | 36 ++++++- ffmap/run.py | 69 ++++++++++++++ mkmap.sh | 32 ------- outputs/rrd/GlobalRRD.py | 35 ------- setup.py | 10 ++ 21 files changed, 272 insertions(+), 282 deletions(-) delete mode 100644 aliases.json_sample delete mode 100755 bat2nodes.py delete mode 100755 ffhlwiki.py create mode 100644 ffmap/__init__.py rename nodedb/.gitkeep => ffmap/inputs/__init__.py (100%) rename inputs/alfred/__init__.py => ffmap/inputs/alfred.py (100%) rename inputs/batadv/__init__.py => ffmap/inputs/batadv.py (100%) create mode 100755 ffmap/inputs/wiki.py rename node.py => ffmap/node.py (74%) rename nodedb.py => ffmap/nodedb.py (92%) create mode 100644 ffmap/outputs/__init__.py rename outputs/json/__init__.py => ffmap/outputs/d3json.py (87%) rename outputs/rrd/__init__.py => ffmap/outputs/rrd.py (86%) rename outputs/rrd/RRD.py => ffmap/rrd/__init__.py (100%) rename outputs/rrd/NodeRRD.py => ffmap/rrd/rrds.py (72%) create mode 100644 ffmap/run.py delete mode 100755 mkmap.sh delete mode 100644 outputs/rrd/GlobalRRD.py create mode 100644 setup.py diff --git a/.gitignore b/.gitignore index 0f42dec..0d20b64 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1 @@ *.pyc -aliases.json -nodedb/ diff --git a/README.md b/README.md index fc718fe..8e4abd8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Data for Freifunk Map, Graph and Node List -ffmap-backend gathers information on the batman network by invoking +ffmap-backend gathers information on the batman network by invoking batctl and batadv-vis @@ -41,13 +41,13 @@ Alias /map /home/ffmap/www/ Alias /firmware /home/freifunk/autoupdates/ -To execute, run - ./mkmap.sh ../www +To execute, run + python3 -mffmap.run --input-alfred --input-badadv --output-d3json ../www/nodes.json The script expects above described sudo-wrappers in the $HOME directory of the user executing the script. If those are not available, an error will occurr if not executed as root. Also, the tool realpath optionally allows to execute the script from anywhere in the directory tree. For the script's regular execution add the following to the crontab:
-*/5 * * * * /home/ffmap/ffmap-backend/mkmap.sh /home/ffmap/www
+*/5 * * * * python3 -mffmap.run --input-alfred --input-badadv --output-d3json /home/ffmap/www/nodes.json
 
diff --git a/aliases.json_sample b/aliases.json_sample deleted file mode 100644 index 1f3ca33..0000000 --- a/aliases.json_sample +++ /dev/null @@ -1,9 +0,0 @@ -{ - "b0:48:7a:e7:d3:64" : { - "name" : "Meute-AP" - }, - "8e:3d:c2:10:10:28" : { - "name" : "holstentor", - "vpn" : true - } -} diff --git a/bat2nodes.py b/bat2nodes.py deleted file mode 100755 index e1fde6e..0000000 --- a/bat2nodes.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3 - -import json -import fileinput -import argparse -import os -import datetime - -from batman import batman -from alfred import alfred -from rrd import rrd -from nodedb import NodeDB -from json_encoder import CustomJSONEncoder - -# Force encoding to UTF-8 -import locale # Ensures that subsequent open()s -locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded. - -import sys -#sys.stdin = open('/dev/stdin', 'r') -#sys.stdout = open('/dev/stdout', 'w') -#sys.stderr = open('/dev/stderr', 'w') - -parser = argparse.ArgumentParser() - -parser.add_argument('-a', '--aliases', - help='read aliases from FILE', - action='append', - metavar='FILE') - -parser.add_argument('-m', '--mesh', action='append', - help='batman mesh interface') - -parser.add_argument('-o', '--obscure', action='store_true', - help='obscure client macs') - -parser.add_argument('-A', '--alfred', action='store_true', - help='retrieve aliases from alfred') - -parser.add_argument('-d', '--destination-directory', action='store', - help='destination directory for generated files',required=True) - -args = parser.parse_args() - -options = vars(args) - -db = NodeDB() -if options['mesh']: - for mesh_interface in options['mesh']: - bm = batman(mesh_interface) - db.parse_vis_data(bm.vis_data(options['alfred'])) - for gw in bm.gateway_list(): - db.mark_gateways(gw['mac']) -else: - bm = batman() - db.parse_vis_data(bm.vis_data(options['alfred'])) - for gw in bm.gateway_list(): - db.mark_gateways([gw['mac']]) - -if options['aliases']: - for aliases in options['aliases']: - db.import_aliases(json.load(open(aliases))) - -if options['alfred']: - af = alfred() - db.import_aliases(af.aliases()) - -db.count_clients() - -if options['obscure']: - db.obscure_clients() - -scriptdir = os.path.dirname(os.path.realpath(__file__)) - -exported = db.export() -exported['meta'] = {'timestamp': datetime.datetime.utcnow().replace(microsecond=0).isoformat()} - -#Write nodes json -nodes_json = open(options['destination_directory'] + '/nodes.json.new','w') -json.dump(exported, nodes_json, cls=CustomJSONEncoder) -nodes_json.close() - -#Move to destination -os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json') - -rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes") -rrd.update_database(db) -rrd.update_images() diff --git a/ffhlwiki.py b/ffhlwiki.py deleted file mode 100755 index 588ae72..0000000 --- a/ffhlwiki.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python3 - -import json -import argparse -from itertools import zip_longest -from urllib.request import urlopen -from bs4 import BeautifulSoup - -def import_wikigps(url): - def fetch_wikitable(url): - f = urlopen(url) - - soup = BeautifulSoup(f) - - table = soup.find_all("table")[0] - - rows = table.find_all("tr") - - headers = [] - - data = [] - - def maybe_strip(x): - if isinstance(x.string, str): - return x.string.strip() - else: - return "" - - for row in rows: - tds = list([maybe_strip(x) for x in row.find_all("td")]) - ths = list([maybe_strip(x) for x in row.find_all("th")]) - - if any(tds): - data.append(tds) - - if any(ths): - headers = ths - - nodes = [] - - for d in data: - nodes.append(dict(zip(headers, d))) - - return nodes - - nodes = fetch_wikitable(url) - - aliases = {} - - for node in nodes: - try: - node['MAC'] = node['MAC'].split(',') - except KeyError: - pass - - try: - node['GPS'] = node['GPS'].split(',') - except KeyError: - pass - - try: - node['Knotenname'] = node['Knotenname'].split(',') - except KeyError: - pass - - nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname']) - - for data in nodes: - alias = {} - - mac = data[0].strip() - - if data[1]: - alias['geo'] = [float(x) for x in data[1].strip().split(' ')] - - if data[2]: - alias['name'] = data[2].strip() - - aliases[mac] = alias - - return aliases - -parser = argparse.ArgumentParser() - -parser.add_argument('url', help='wiki URL') - -args = parser.parse_args() - -options = vars(args) - -aliases = import_wikigps(options['url']) - -print(json.dumps(aliases)) diff --git a/ffmap/__init__.py b/ffmap/__init__.py new file mode 100644 index 0000000..9542acc --- /dev/null +++ b/ffmap/__init__.py @@ -0,0 +1,42 @@ +import importlib + +from ffmap.nodedb import NodeDB + +def run(inputs, outputs): + """Fill the database with given inputs and give it to given outputs. + + Arguments: + inputs -- list of Input instances (with a compatible get_data(nodedb) method) + outputs -- list of Output instances (with a compatible output(nodedb) method) + """ + db = NodeDB() + for input_ in inputs: + input_.get_data(db) + + for output in outputs: + output.output(db) + +def run_names(inputs, outputs): + """Fill the database with inputs and give it to outputs, each given + by names. + + In contrast to run(inputs, outputs), this method expects only the + names of the modules to use, not instances thereof. + Arguments: + inputs -- list of dicts, each dict having the keys "name" with the + name of the input to use (directory name in inputs/), and + the key "options" with a dict of input-dependent options. + outputs -- list of dicts, see inputs. + """ + input_instances = [] + output_instances = [] + + for input_ in inputs: + module = importlib.import_module(".inputs." + input_["name"], "ffmap") + input_instances.append(module.Input(**input_["options"])) + + for output in outputs: + module = importlib.import_module(".outputs." + output["name"], "ffmap") + output_instances.append(module.Output(**output["options"])) + + run(input_instances, output_instances) diff --git a/nodedb/.gitkeep b/ffmap/inputs/__init__.py similarity index 100% rename from nodedb/.gitkeep rename to ffmap/inputs/__init__.py diff --git a/inputs/alfred/__init__.py b/ffmap/inputs/alfred.py similarity index 100% rename from inputs/alfred/__init__.py rename to ffmap/inputs/alfred.py diff --git a/inputs/batadv/__init__.py b/ffmap/inputs/batadv.py similarity index 100% rename from inputs/batadv/__init__.py rename to ffmap/inputs/batadv.py diff --git a/ffmap/inputs/wiki.py b/ffmap/inputs/wiki.py new file mode 100755 index 0000000..ab36ad5 --- /dev/null +++ b/ffmap/inputs/wiki.py @@ -0,0 +1,71 @@ +import json +import argparse +from itertools import zip_longest +from urllib.request import urlopen +from bs4 import BeautifulSoup + +class Input: + def __init__(self, url="http://luebeck.freifunk.net/wiki/Knoten"): + self.url = url + + def fetch_wikitable(self): + f = urlopen(self.url) + soup = BeautifulSoup(f) + table = soup.find("table") + rows = table.find_all("tr") + headers = [] + data = [] + + def maybe_strip(x): + if isinstance(x.string, str): + return x.string.strip() + else: + return "" + + for row in rows: + tds = list([maybe_strip(x) for x in row.find_all("td")]) + ths = list([maybe_strip(x) for x in row.find_all("th")]) + + if any(tds): + data.append(tds) + + if any(ths): + headers = ths + + return [dict(zip(headers, d)) for d in data] + + def get_data(self, nodedb): + nodes = self.fetch_wikitable() + + for node in nodes: + if "MAC" not in node or not node["MAC"]: + # without MAC, we cannot merge this data with others, so + # we might as well ignore it + continue + + newnode = { + "network": { + "mac": node.get("MAC").lower(), + }, + "location": { + "latitude": float(node.get("GPS", " ").split(" ")[0]), + "longitude": float(node.get("GPS", " ").split(" ")[1]), + "description": node.get("Ort"), + } if " " in node.get("GPS", "") else None, + "hostname": node.get("Knotenname"), + "hardware": { + "model": node["Router"], + } if node.get("Router") else None, + "software": { + "firmware": { + "base": "LFF", + "release": node.get("LFF Version"), + }, + }, + "owner": { + "contact": node["Betreiber"], + } if node.get("Betreiber") else None, + } + # remove keys with None as value + newnode = {k: v for k,v in newnode.items() if v is not None} + nodedb.add_or_update([newnode["network"]["mac"]], newnode) diff --git a/node.py b/ffmap/node.py similarity index 74% rename from node.py rename to ffmap/node.py index 5fa58f6..e2169f2 100644 --- a/node.py +++ b/ffmap/node.py @@ -7,11 +7,20 @@ class NoneDict: even previously inexistent keys can be accessed, but nothing is stored permanently in this class. """ - __repr__ = lambda self: 'NoneDict()' - __bool__ = lambda self: False - __getitem__ = lambda self, k: NoneDict() - __json__ = lambda self: None - __float__ = lambda self: float('NaN') + def __repr__(self): + return 'NoneDict()' + def __bool__(self): + return False + def __getitem__(self, k): + return NoneDict() + def __json__(self): + return None + def __float__(self): + return float('NaN') + def __iter__(self): + # empty generator + return + yield def __setitem__(self, key, value): raise RuntimeError("NoneDict is readonly") @@ -39,6 +48,16 @@ class Node(defaultdict): """ return hash(self.id) + def deep_update(self, other): + """Update the dictionary like dict.update() but recursively.""" + def dmerge(a, b): + for k, v in b.items(): + if isinstance(v, dict) and isinstance(a.get(k), dict): + dmerge(a[k], v) + else: + a[k] = v + dmerge(self, other) + @property def vpn_neighbors(self): try: diff --git a/nodedb.py b/ffmap/nodedb.py similarity index 92% rename from nodedb.py rename to ffmap/nodedb.py index a056184..0be76b0 100644 --- a/nodedb.py +++ b/ffmap/nodedb.py @@ -1,6 +1,6 @@ -from node import Node +from .node import Node -class AmbiguityException(Exception): +class AmbiguityError(Exception): """Indicate the ambiguity of identifiers. This exception is raised if there is more than one match for a set @@ -43,7 +43,7 @@ class NodeDB(dict): continue if id_ in self: if node is not None: - raise AmbiguityException([node_id, id_]) + raise AmbiguityError([node_id, id_]) node = self[id_] node_id = id_ @@ -53,7 +53,7 @@ class NodeDB(dict): # Update the node with the given properties using its own update method. if other is not None: - node.update(other) + node.deep_update(other) # Add new aliases if any for id_ in ids: diff --git a/ffmap/outputs/__init__.py b/ffmap/outputs/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/ffmap/outputs/__init__.py @@ -0,0 +1 @@ + diff --git a/outputs/json/__init__.py b/ffmap/outputs/d3json.py similarity index 87% rename from outputs/json/__init__.py rename to ffmap/outputs/d3json.py index f005c38..fd5b267 100644 --- a/outputs/json/__init__.py +++ b/ffmap/outputs/d3json.py @@ -1,11 +1,12 @@ import json +from datetime import datetime __all__ = ["Exporter"] class CustomJSONEncoder(json.JSONEncoder): """ - JSON encoder that uses an object's __json__() method to convert it to - something JSON-compatible. + JSON encoder that uses an object's __json__() method to convert it + to something JSON-compatible. """ def default(self, obj): try: @@ -14,7 +15,7 @@ class CustomJSONEncoder(json.JSONEncoder): pass return super().default(obj) -class Exporter: +class Output: def __init__(self, filepath="nodes.json"): self.filepath = filepath @@ -60,9 +61,14 @@ class Exporter: return { "nodes": nodes, "links": links, + "meta": { + "timestamp": datetime.utcnow() + .replace(microsecond=0) + .isoformat() + } } - def export(self, nodedb): + def output(self, nodedb): with open(self.filepath, "w") as nodes_json: json.dump( self.generate(nodedb), diff --git a/outputs/rrd/__init__.py b/ffmap/outputs/rrd.py similarity index 86% rename from outputs/rrd/__init__.py rename to ffmap/outputs/rrd.py index 5e9fbc1..ce450c3 100644 --- a/outputs/rrd/__init__.py +++ b/ffmap/outputs/rrd.py @@ -1,8 +1,7 @@ import os -from .NodeRRD import NodeRRD -from .GlobalRRD import GlobalRRD +from ffmap.rrd.rrds import NodeRRD, GlobalRRD -class Exporter: +class Output: def __init__(self, directory="nodedb"): self.directory = directory try: @@ -10,7 +9,7 @@ class Exporter: except OSError: pass - def export(self, nodedb): + def output(self, nodedb): nodes = set(nodedb.values()) clients = 0 nodecount = 0 diff --git a/outputs/rrd/RRD.py b/ffmap/rrd/__init__.py similarity index 100% rename from outputs/rrd/RRD.py rename to ffmap/rrd/__init__.py diff --git a/outputs/rrd/NodeRRD.py b/ffmap/rrd/rrds.py similarity index 72% rename from outputs/rrd/NodeRRD.py rename to ffmap/rrd/rrds.py index fc8aef1..2155d0c 100644 --- a/outputs/rrd/NodeRRD.py +++ b/ffmap/rrd/rrds.py @@ -1,7 +1,7 @@ import os import subprocess -from node import Node -from .RRD import RRD, DS, RRA +from ffmap.node import Node +from . import RRD, DS, RRA class NodeRRD(RRD): ds_list = [ @@ -81,3 +81,35 @@ class NodeRRD(RRD): 'LINE1:c#00F:clients connected\\l', ] subprocess.check_output(args) + +class GlobalRRD(RRD): + ds_list = [ + # Number of nodes available + DS('nodes', 'GAUGE', 120, 0, float('NaN')), + # Number of client available + DS('clients', 'GAUGE', 120, 0, float('NaN')), + ] + rra_list = [ + RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples + RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples + RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples + ] + + def __init__(self, filepath): + super().__init__(filepath) + self.ensureSanity(self.ds_list, self.rra_list, step=60) + + def update(self, nodeCount, clientCount): + super().update({'nodes': nodeCount, 'clients': clientCount}) + + def graph(self, filename, timeframe): + args = ["rrdtool", 'graph', filename, + '-s', '-' + timeframe, + '-w', '800', + '-h' '400', + 'DEF:nodes=' + self.filename + ':nodes:AVERAGE', + 'LINE1:nodes#F00:nodes\\l', + 'DEF:clients=' + self.filename + ':clients:AVERAGE', + 'LINE2:clients#00F:clients', + ] + subprocess.check_output(args) diff --git a/ffmap/run.py b/ffmap/run.py new file mode 100644 index 0000000..a9e004f --- /dev/null +++ b/ffmap/run.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +import argparse +import sys + +from ffmap import run_names + +class MyAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + if self.dest.startswith(("input_", "output_")): + collection_name = self.dest.split("_")[0] + "s" + name = self.dest.split("_", 1)[1] + if not hasattr(namespace, collection_name): + setattr(namespace, collection_name, []) + collection = getattr(namespace, collection_name) + collection.append({ + "name": name, + "options": {self.metavar.lower(): values} + if values is not None else {} + }) + else: + raise Exception("Unexpected dest=" + self.dest) + +def parser_add_myarg(parser, name, metavar="OPT", help=None): + parser.add_argument("--" + name, + metavar=metavar, + type=str, + nargs='?', + const=None, + action=MyAction, + help=help) + +parser = argparse.ArgumentParser( + description="""Merge node data from multiple sources and generate + various output formats from this data""", +) +input_group = parser.add_argument_group("Inputs", description=""" + Inputs are used in the order given on the command line, where later + inputs can overwrite attributes of earlier inputs if named equally, + but the first input encountering a node sets its id, which is + immutable afterwards. + + The same input can be given multiple times, probably with different + options. +""") +output_group = parser.add_argument_group("Outputs") +parser_add_myarg(input_group, 'input-alfred', metavar="REQUEST_DATA_TYPE", + help="read node details from A.L.F.R.E.D.") +parser_add_myarg(input_group, 'input-wiki', metavar="URL", + help="read node details from a Wiki page") +parser_add_myarg(input_group, 'input-batadv', metavar="MESH_INTERFACE", + help="add node's neighbors and clients from batadv-vis") +parser_add_myarg(output_group, 'output-d3json', metavar="FILEPATH", + help="generate JSON file compatible with ffmap-d3") +parser_add_myarg(output_group, 'output-rrd', metavar="DIRECTORY", + help="update RRDs with statistics, one global and one per node") + +args = parser.parse_args() + +if "inputs" not in args or not args.inputs: + parser.print_help(sys.stderr) + sys.stderr.write("\nERROR: No input has been defined!\n") + sys.exit(1) + +if "outputs" not in args or not args.outputs: + parser.print_help(sys.stderr) + sys.stderr.write("\nERROR: No output has been defined!\n") + sys.exit(1) + +run_names(inputs=args.inputs, outputs=args.outputs) diff --git a/mkmap.sh b/mkmap.sh deleted file mode 100755 index 28195be..0000000 --- a/mkmap.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e - -DEST=$1 -LOCKFILE="/run/lock/ffmap" - -[ "$DEST" ] || exit 1 - -cd "$(dirname "$0")"/ - -if lockfile-check "$LOCKFILE"; then - exit -fi -lockfile-create "$LOCKFILE" -lockfile-touch "$LOCKFILE" & -LOCKPID="$!" - -./bat2nodes.py -A -a aliases.json -d $DEST - -kill "$LOCKPID" -lockfile-remove "$LOCKFILE" - -if lockfile-check "$LOCKFILE-sync"; then - exit -fi -lockfile-create "$LOCKFILE-sync" -lockfile-touch "$LOCKFILE-sync" & -LOCKPID="$!" - -kill "$LOCKPID" -lockfile-remove "$LOCKFILE-sync" diff --git a/outputs/rrd/GlobalRRD.py b/outputs/rrd/GlobalRRD.py deleted file mode 100644 index b114418..0000000 --- a/outputs/rrd/GlobalRRD.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import subprocess -from .RRD import RRD, DS, RRA - -class GlobalRRD(RRD): - ds_list = [ - # Number of nodes available - DS('nodes', 'GAUGE', 120, 0, float('NaN')), - # Number of client available - DS('clients', 'GAUGE', 120, 0, float('NaN')), - ] - rra_list = [ - RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples - RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples - ] - - def __init__(self, filepath): - super().__init__(filepath) - self.ensureSanity(self.ds_list, self.rra_list, step=60) - - def update(self, nodeCount, clientCount): - super().update({'nodes': nodeCount, 'clients': clientCount}) - - def graph(self, filename, timeframe): - args = ["rrdtool", 'graph', filename, - '-s', '-' + timeframe, - '-w', '800', - '-h' '400', - 'DEF:nodes=' + self.filename + ':nodes:AVERAGE', - 'LINE1:nodes#F00:nodes\\l', - 'DEF:clients=' + self.filename + ':clients:AVERAGE', - 'LINE2:clients#00F:clients', - ] - subprocess.check_output(args) diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..4ee3d1f --- /dev/null +++ b/setup.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +from distutils.core import setup + +setup(name='FFmap', + version='0.1', + description='Freifunk map backend', + url='https://github.com/ffnord/ffmap-backend', + packages=['ffmap', 'ffmap.inputs', 'ffmap.outputs', 'ffmap.rrd'], + ) From 446bc984039816bc6f6e6a5a202c9306e68c954c Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Thu, 31 Jul 2014 11:41:26 +0200 Subject: [PATCH 12/97] input alfred: unify nodeinfo and stats datatypes --- ffmap/inputs/alfred.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/ffmap/inputs/alfred.py b/ffmap/inputs/alfred.py index 6c0f66e..340c0d9 100644 --- a/ffmap/inputs/alfred.py +++ b/ffmap/inputs/alfred.py @@ -7,12 +7,26 @@ class Input: def get_data(self, nodedb): """Add data from alfred to the supplied nodedb""" + # get nodeinfo output = subprocess.check_output([ "alfred-json", "-r", str(self.request_data_type), "-f", "json", ]) - alfred_data = json.loads(output.decode("utf-8")) + nodeinfo = json.loads(output.decode("utf-8")) - for mac, node in alfred_data.items(): + # get statistics + output = subprocess.check_output([ + "alfred-json", + "-r", str(self.request_data_type+1), + "-f", "json", + ]) + statistics = json.loads(output.decode("utf-8")) + + # merge statistics into nodeinfo to be compatible with earlier versions + for mac, node in statistics.items(): + if mac in nodeinfo: + nodeinfo[mac]['statistics'] = statistics[mac] + + for mac, node in nodeinfo.items(): nodedb.add_or_update([mac], node) From 5fba69de7adf5e5d9fda211f0dd972e9153da899 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Thu, 31 Jul 2014 16:31:14 +0200 Subject: [PATCH 13/97] d3json: make output more similar to pre-rewrite version --- ffmap/node.py | 2 ++ ffmap/outputs/d3json.py | 61 +++++++++++++++++++++++++---------------- 2 files changed, 39 insertions(+), 24 deletions(-) diff --git a/ffmap/node.py b/ffmap/node.py index e2169f2..b89dd19 100644 --- a/ffmap/node.py +++ b/ffmap/node.py @@ -86,4 +86,6 @@ class Node(defaultdict): else: new_neighbor[key] = val ret["neighbors"].append(new_neighbor) + if "id" not in ret: + ret["id"] = self.id return ret diff --git a/ffmap/outputs/d3json.py b/ffmap/outputs/d3json.py index fd5b267..06e82fb 100644 --- a/ffmap/outputs/d3json.py +++ b/ffmap/outputs/d3json.py @@ -25,42 +25,55 @@ class Output: nodes = [] count = 0 for node in set(nodedb.values()): - nodes.append(node.export()) + node_export = node.export() + node_export["flags"] = { + "gateway": "vpn" in node and node["vpn"], + "client": False, + "online": True + } + nodes.append(node_export) indexes[node.id] = count count += 1 - links = [] + links = {} for node in set(nodedb.values()): - if "neighbors" in node: - links.extend( - { + for neighbor in node.get("neighbors", []): + key = (neighbor["neighbor"].id, node.id) + rkey = tuple(reversed(key)) + if rkey in links: + links[rkey]["quality"] += ","+neighbor["metric"] + else: + links[key] = { "source": indexes[node.id], "target": indexes[neighbor["neighbor"].id], "quality": neighbor["metric"], - "type": "vpn" if neighbor["neighbor"]["vpn"] else None, + "type": "vpn" if neighbor["neighbor"]["vpn"] or node["vpn"] else None, "id": "-".join((node.id, neighbor["neighbor"].id)), - } for neighbor in node["neighbors"] - ) - if "clients" in node: - for client in node["clients"]: - if not client in indexes: - nodes.append({ - "id": client, - }) - indexes[client] = count - count += 1 - - links.append({ - "source": indexes[node.id], - "target": indexes[client], - "quality": "TT", - "type": "client", - "id": "-".join((node.id, client)), + } + for client in node.get("clients", []): + if not client in indexes: + nodes.append({ + "id": client, + "flags": { + "client": True, + "online": True, + "gateway": False + } }) + indexes[client] = count + count += 1 + + links[(node.id, client)] = { + "source": indexes[node.id], + "target": indexes[client], + "quality": "TT", + "type": "client", + "id": "-".join((node.id, client)), + } return { "nodes": nodes, - "links": links, + "links": list(links.values()), "meta": { "timestamp": datetime.utcnow() .replace(microsecond=0) From a88b207cf12f6fd12c5701aa7b54a79eb590c76f Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 10 Aug 2014 09:36:29 +0200 Subject: [PATCH 14/97] remove fuzzy matching --- nodedb.py | 94 +++---------------------------------------------------- 1 file changed, 4 insertions(+), 90 deletions(-) diff --git a/nodedb.py b/nodedb.py index 973861d..58595a5 100644 --- a/nodedb.py +++ b/nodedb.py @@ -59,16 +59,6 @@ class NodeDB: except: pass - def maybe_node_by_fuzzy_mac(self, mac): - mac_a = mac.lower() - - for node in self._nodes: - for mac_b in node.macs: - if is_derived_mac(mac_a, mac_b): - return node - - raise KeyError - def maybe_node_by_mac(self, macs): for node in self._nodes: for mac in macs: @@ -115,11 +105,8 @@ class NodeDB: node.add_mac(x['router']) self._nodes.append(node) - # If it's a TT link and the MAC is very similar - # consider this MAC as one of the routers - # MACs if 'gateway' in x and x['label'] == "TT": - if is_similar(x['router'], x['gateway']): + if x['router'] in node.macs: node.add_mac(x['gateway']) # skip processing as regular link @@ -216,7 +203,7 @@ class NodeDB: node = self.maybe_node_by_mac([mac]) except: try: - node = self.maybe_node_by_fuzzy_mac(mac) + node = self.maybe_node_mac(mac) except: # create an offline node node = Node() @@ -304,8 +291,8 @@ class NodeDB: idt = link.target.interface try: - node_source = self.maybe_node_by_fuzzy_mac(ids) - node_target = self.maybe_node_by_id(idt) + node_source = self.maybe_node_by_mac(ids) + node_target = self.maybe_node_by_mac(idt) if not node_source.flags['client'] and not node_target.flags['client']: # if none of the nodes associated with this link are clients, @@ -354,76 +341,3 @@ def generateId(nodeId,nodeCounters): n = 0 return nodeId + "_" + str(n) - -# compares two MACs and decides whether they are -# similar and could be from the same node -def is_similar(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - # first byte must only differ in bit 2 - if mac_a[0] | 2 == mac_b[0] | 2: - # count different bytes - c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]] - else: - return False - - # no more than two additional bytes must differ - if len(c) <= 2: - delta = 0 - - if len(c) > 0: - delta = sum(abs(i[0] -i[1]) for i in c) - - # These addresses look pretty similar! - return delta < 8 - -def is_derived_mac(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]: - return False - - x = list(mac_a) - x[5] += 1 - x[5] %= 255 - if mac_b == x: - return True - - x[0] |= 2 - if mac_b == x: - return True - - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[5] += 2 - x[5] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - return False From 663539c2064e9c75aa8e87a35a3700d9b8f79f69 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Tue, 12 Aug 2014 20:51:36 +0200 Subject: [PATCH 15/97] Revert "remove fuzzy matching" This reverts commit a88b207cf12f6fd12c5701aa7b54a79eb590c76f. --- nodedb.py | 94 ++++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 90 insertions(+), 4 deletions(-) diff --git a/nodedb.py b/nodedb.py index 58595a5..973861d 100644 --- a/nodedb.py +++ b/nodedb.py @@ -59,6 +59,16 @@ class NodeDB: except: pass + def maybe_node_by_fuzzy_mac(self, mac): + mac_a = mac.lower() + + for node in self._nodes: + for mac_b in node.macs: + if is_derived_mac(mac_a, mac_b): + return node + + raise KeyError + def maybe_node_by_mac(self, macs): for node in self._nodes: for mac in macs: @@ -105,8 +115,11 @@ class NodeDB: node.add_mac(x['router']) self._nodes.append(node) + # If it's a TT link and the MAC is very similar + # consider this MAC as one of the routers + # MACs if 'gateway' in x and x['label'] == "TT": - if x['router'] in node.macs: + if is_similar(x['router'], x['gateway']): node.add_mac(x['gateway']) # skip processing as regular link @@ -203,7 +216,7 @@ class NodeDB: node = self.maybe_node_by_mac([mac]) except: try: - node = self.maybe_node_mac(mac) + node = self.maybe_node_by_fuzzy_mac(mac) except: # create an offline node node = Node() @@ -291,8 +304,8 @@ class NodeDB: idt = link.target.interface try: - node_source = self.maybe_node_by_mac(ids) - node_target = self.maybe_node_by_mac(idt) + node_source = self.maybe_node_by_fuzzy_mac(ids) + node_target = self.maybe_node_by_id(idt) if not node_source.flags['client'] and not node_target.flags['client']: # if none of the nodes associated with this link are clients, @@ -341,3 +354,76 @@ def generateId(nodeId,nodeCounters): n = 0 return nodeId + "_" + str(n) + +# compares two MACs and decides whether they are +# similar and could be from the same node +def is_similar(a, b): + if a == b: + return True + + try: + mac_a = list(int(i, 16) for i in a.split(":")) + mac_b = list(int(i, 16) for i in b.split(":")) + except ValueError: + return False + + # first byte must only differ in bit 2 + if mac_a[0] | 2 == mac_b[0] | 2: + # count different bytes + c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]] + else: + return False + + # no more than two additional bytes must differ + if len(c) <= 2: + delta = 0 + + if len(c) > 0: + delta = sum(abs(i[0] -i[1]) for i in c) + + # These addresses look pretty similar! + return delta < 8 + +def is_derived_mac(a, b): + if a == b: + return True + + try: + mac_a = list(int(i, 16) for i in a.split(":")) + mac_b = list(int(i, 16) for i in b.split(":")) + except ValueError: + return False + + if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]: + return False + + x = list(mac_a) + x[5] += 1 + x[5] %= 255 + if mac_b == x: + return True + + x[0] |= 2 + if mac_b == x: + return True + + x[3] += 1 + x[3] %= 255 + if mac_b == x: + return True + + x = list(mac_a) + x[0] |= 2 + x[5] += 2 + x[5] %= 255 + if mac_b == x: + return True + + x = list(mac_a) + x[0] |= 2 + x[3] += 1 + x[3] %= 255 + if mac_b == x: + return True + + return False From 2dfd11189dd419ab09cb0630accab78b37edefde Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 17 Aug 2014 18:40:15 +0200 Subject: [PATCH 16/97] count clients, instead of nodes --- bat2nodes.py | 8 --- d3mapbuilder.py | 1 - node.py | 1 - nodedb.py | 129 +++++++----------------------------------------- 4 files changed, 18 insertions(+), 121 deletions(-) diff --git a/bat2nodes.py b/bat2nodes.py index ce6e596..78c2174 100755 --- a/bat2nodes.py +++ b/bat2nodes.py @@ -31,9 +31,6 @@ parser.add_argument('-a', '--aliases', parser.add_argument('-m', '--mesh', action='append', help='batman mesh interface') -parser.add_argument('-o', '--obscure', action='store_true', - help='obscure client macs') - parser.add_argument('-A', '--alfred', action='store_true', help='retrieve aliases from alfred') @@ -66,11 +63,6 @@ if options['alfred']: af = alfred() db.import_aliases(af.aliases()) -db.count_clients() - -if options['obscure']: - db.obscure_clients() - db.load_state("state.json") # remove nodes that have been offline for more than 30 days diff --git a/d3mapbuilder.py b/d3mapbuilder.py index ff7589f..8fb1961 100644 --- a/d3mapbuilder.py +++ b/d3mapbuilder.py @@ -13,7 +13,6 @@ class D3MapBuilder: nodes = self._db.get_nodes() output['nodes'] = [{'name': x.name, 'id': x.id, - 'macs': ', '.join(x.macs), 'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None, 'firmware': x.firmware, 'flags': x.flags, diff --git a/node.py b/node.py index a75f982..fce1866 100644 --- a/node.py +++ b/node.py @@ -7,7 +7,6 @@ class Node(): self.flags = dict({ "online": False, "gateway": False, - "client": False }) self.gps = None self.firmware = None diff --git a/nodedb.py b/nodedb.py index 973861d..6b3d503 100644 --- a/nodedb.py +++ b/nodedb.py @@ -28,9 +28,6 @@ class NodeDB: obj = [] for node in self._nodes: - if node.flags['client']: - continue - obj.append({ 'id': node.id , 'name': node.name , 'lastseen': node.lastseen @@ -102,8 +99,11 @@ class NodeDB: node.add_mac(x['secondary']) for x in vis_data: - if 'router' in x: + # TTs will be processed later + if x['label'] == "TT": + continue + try: node = self.maybe_node_by_mac((x['router'], )) except: @@ -115,16 +115,6 @@ class NodeDB: node.add_mac(x['router']) self._nodes.append(node) - # If it's a TT link and the MAC is very similar - # consider this MAC as one of the routers - # MACs - if 'gateway' in x and x['label'] == "TT": - if is_similar(x['router'], x['gateway']): - node.add_mac(x['gateway']) - - # skip processing as regular link - continue - try: if 'neighbor' in x: try: @@ -140,15 +130,15 @@ class NodeDB: node = Node() node.lastseen = self.time node.flags['online'] = True - if x['label'] == 'TT': - node.flags['client'] = True - node.add_mac(x['neighbor']) self._nodes.append(node) for x in vis_data: - if 'router' in x: + # TTs will be processed later + if x['label'] == "TT": + continue + try: if 'gateway' in x: x['neighbor'] = x['gateway'] @@ -172,13 +162,9 @@ class NodeDB: link.quality = x['label'] link.id = "-".join(sorted((link.source.interface, link.target.interface))) - if x['label'] == "TT": - link.type = "client" - self._links.append(link) for x in vis_data: - if 'primary' in x: try: node = self.maybe_node_by_mac((x['primary'], )) @@ -187,6 +173,16 @@ class NodeDB: node.id = x['primary'] + for x in vis_data: + if 'router' in x and x['label'] == 'TT': + try: + node = self.maybe_node_by_mac((x['router'], )) + node.add_mac(x['gateway']) + if not is_similar(x['router'], x['gateway']): + node.clientcount += 1 + except: + pass + def reduce_links(self): tmp_links = defaultdict(list) @@ -256,9 +252,6 @@ class NodeDB: while changes > 0: changes = 0 for link in self._links: - if link.type == "client": - continue - source_interface = self._nodes[link.source.id].interfaces[link.source.interface] target_interface = self._nodes[link.target.id].interfaces[link.target.interface] if source_interface.vpn or target_interface.vpn: @@ -269,92 +262,6 @@ class NodeDB: link.type = "vpn" - def count_clients(self): - for link in self._links: - try: - a = self.maybe_node_by_id(link.source.interface) - b = self.maybe_node_by_id(link.target.interface) - - if a.flags['client']: - client = a - node = b - elif b.flags['client']: - client = b - node = a - else: - continue - - node.clientcount += 1 - except: - pass - - def obscure_clients(self): - - globalIdCounter = 0 - nodeCounters = {} - clientIds = {} - - for node in self._nodes: - if node.flags['client']: - node.macs = set() - clientIds[node.id] = None - - for link in self._links: - ids = link.source.interface - idt = link.target.interface - - try: - node_source = self.maybe_node_by_fuzzy_mac(ids) - node_target = self.maybe_node_by_id(idt) - - if not node_source.flags['client'] and not node_target.flags['client']: - # if none of the nodes associated with this link are clients, - # we do not want to obscure - continue - - if ids in clientIds and idt in clientIds: - # This is for corner cases, when a client - # is linked to another client. - clientIds[ids] = str(globalIdCounter) - ids = str(globalIdCounter) - globalIdCounter += 1 - - clientIds[idt] = str(globalIdCounter) - idt = str(globalIdCounter) - globalIdCounter += 1 - - elif ids in clientIds: - newId = generateId(idt) - clientIds[ids] = newId - ids = newId - - link.source.interface = ids; - node_source.id = ids; - - elif idt in clientIds: - newId = generateId(ids,nodeCounters) - clientIds[idt] = newId - idt = newId - - link.target.interface = idt; - node_target.id = idt; - - link.id = ids + "-" + idt - - except KeyError: - pass - -# extends node id by incremented node counter -def generateId(nodeId,nodeCounters): - if nodeId in nodeCounters: - n = nodeCounters[nodeId] - nodeCounters[nodeId] = n + 1 - else: - nodeCounters[nodeId] = 1 - n = 0 - - return nodeId + "_" + str(n) - # compares two MACs and decides whether they are # similar and could be from the same node def is_similar(a, b): From 0d71de7091711ec4838c797c17c65b23454078c3 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 17 Aug 2014 19:10:19 +0200 Subject: [PATCH 17/97] fix rrd --- NodeRRD.py | 2 +- batman.py | 2 -- nodedb.py | 4 ---- rrd.py | 18 +----------------- 4 files changed, 2 insertions(+), 24 deletions(-) diff --git a/NodeRRD.py b/NodeRRD.py index f53cad6..6fa32f3 100644 --- a/NodeRRD.py +++ b/NodeRRD.py @@ -30,7 +30,7 @@ class NodeRRD(RRD): return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" def update(self): - super().update({'upstate': 1, 'clients': self.node.clients}) + super().update({'upstate': 1, 'clients': self.node.clientcount}) def graph(self, directory, timeframe): """ diff --git a/batman.py b/batman.py index c9b3db6..583d962 100755 --- a/batman.py +++ b/batman.py @@ -31,8 +31,6 @@ class batman: output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"]) lines = output.splitlines() vds = self.vis_data_helper(lines) - for vd in vds: - vd['legacy'] = True return vds def vis_data_batadv_vis(self): diff --git a/nodedb.py b/nodedb.py index 6b3d503..b41f917 100644 --- a/nodedb.py +++ b/nodedb.py @@ -91,8 +91,6 @@ class NodeDB: node = Node() node.lastseen = self.time node.flags['online'] = True - if 'legacy' in x: - node.flags['legacy'] = True self._nodes.append(node) node.add_mac(x['of']) @@ -110,8 +108,6 @@ class NodeDB: node = Node() node.lastseen = self.time node.flags['online'] = True - if 'legacy' in x: - node.flags['legacy'] = True node.add_mac(x['router']) self._nodes.append(node) diff --git a/rrd.py b/rrd.py index 5c3330d..2a03026 100755 --- a/rrd.py +++ b/rrd.py @@ -28,23 +28,7 @@ class rrd: def update_database(self,db): nodes = {} - clientCount = 0 - for node in db.get_nodes(): - if node.flags['online']: - if not node.flags['client']: - nodes[node.id] = node - node.clients = 0; - if 'legacy' in node.flags and node.flags['legacy']: - clientCount -= 1 - else: - clientCount += 1 - for link in db.get_links(): - source = link.source.interface - target = link.target.interface - if source in nodes and not target in nodes: - nodes[source].clients += 1 - elif target in nodes and not source in nodes: - nodes[target].clients += 1 + clientCount = sum(map(lambda d: d.clientcount, db.get_nodes())) self.globalDb.update(len(nodes), clientCount) for node in nodes.values(): From a5cb5f0fdb0c53d7ef6d84ee2b2154e09ab27fe7 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 17 Aug 2014 19:10:32 +0200 Subject: [PATCH 18/97] remove fuzzy matching --- nodedb.py | 21 ++++----------------- 1 file changed, 4 insertions(+), 17 deletions(-) diff --git a/nodedb.py b/nodedb.py index b41f917..a45c7a1 100644 --- a/nodedb.py +++ b/nodedb.py @@ -56,16 +56,6 @@ class NodeDB: except: pass - def maybe_node_by_fuzzy_mac(self, mac): - mac_a = mac.lower() - - for node in self._nodes: - for mac_b in node.macs: - if is_derived_mac(mac_a, mac_b): - return node - - raise KeyError - def maybe_node_by_mac(self, macs): for node in self._nodes: for mac in macs: @@ -207,13 +197,10 @@ class NodeDB: try: node = self.maybe_node_by_mac([mac]) except: - try: - node = self.maybe_node_by_fuzzy_mac(mac) - except: - # create an offline node - node = Node() - node.add_mac(mac) - self._nodes.append(node) + # create an offline node + node = Node() + node.add_mac(mac) + self._nodes.append(node) if 'name' in alias: node.name = alias['name'] From 3780fb6cb1b28fc7233e0bb9f2121a3f91a0bfaa Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 17 Aug 2014 19:31:14 +0200 Subject: [PATCH 19/97] fix rrd.py for real --- rrd.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rrd.py b/rrd.py index 2a03026..6f48913 100755 --- a/rrd.py +++ b/rrd.py @@ -27,11 +27,11 @@ class rrd: os.mkdir(self.imagePath) def update_database(self,db): - nodes = {} - clientCount = sum(map(lambda d: d.clientcount, db.get_nodes())) + nodes = db.get_nodes() + clientCount = sum(map(lambda d: d.clientcount, nodes)) self.globalDb.update(len(nodes), clientCount) - for node in nodes.values(): + for node in nodes: rrd = NodeRRD( os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'), node From 26e57117ffce5ccf9932cb5d57f7e57747a1b901 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 17 Aug 2014 19:32:13 +0200 Subject: [PATCH 20/97] rename rrd.py to rrddb.py --- bat2nodes.py | 2 +- rrd.py => rrddb.py | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename rrd.py => rrddb.py (100%) diff --git a/bat2nodes.py b/bat2nodes.py index 78c2174..3d71d83 100755 --- a/bat2nodes.py +++ b/bat2nodes.py @@ -8,7 +8,7 @@ import time from batman import batman from alfred import alfred -from rrd import rrd +from rrddb import rrd from nodedb import NodeDB from d3mapbuilder import D3MapBuilder diff --git a/rrd.py b/rrddb.py similarity index 100% rename from rrd.py rename to rrddb.py From 65655a38bbd0773841d60285dc1c8758aaa17e61 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 22 Feb 2014 13:34:14 +0100 Subject: [PATCH 21/97] RRD: Fix updating of DS --- RRD.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/RRD.py b/RRD.py index d1ae870..9bb87a0 100644 --- a/RRD.py +++ b/RRD.py @@ -80,9 +80,9 @@ class RRD: raise FileNotFoundError(self.filename) info = self.info() if set(ds_list) - set(info['ds'].values()) != set(): - if set((ds.name, ds.type) for ds in ds_list) \ - - set((ds.name, ds.type) for ds in info['ds'].values()) != set(): - raise RRDIncompatibleException() + for ds in ds_list: + if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type: + raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type)) else: raise RRDOutdatedException() @@ -177,15 +177,8 @@ class RRD: echo = True dump.stdout.close() restore.stdin.close() - try: - dump.wait(1) - except subprocess.TimeoutExpired: - dump.kill() - try: - restore.wait(2) - except subprocess.TimeoutExpired: - dump.kill() - raise RuntimeError("rrdtool restore process killed") + dump.wait() + restore.wait() os.rename(self.filename + ".new", self.filename) self._cached_info = None From 6d452fc1495120e7767572535c387fc6223b4e53 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 6 Sep 2014 13:48:03 +0200 Subject: [PATCH 22/97] d3json: obscure client MACs --- ffmap/outputs/d3json.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/ffmap/outputs/d3json.py b/ffmap/outputs/d3json.py index 06e82fb..31f03a6 100644 --- a/ffmap/outputs/d3json.py +++ b/ffmap/outputs/d3json.py @@ -50,26 +50,27 @@ class Output: "type": "vpn" if neighbor["neighbor"]["vpn"] or node["vpn"] else None, "id": "-".join((node.id, neighbor["neighbor"].id)), } + clientcount = 0 for client in node.get("clients", []): - if not client in indexes: - nodes.append({ - "id": client, - "flags": { - "client": True, - "online": True, - "gateway": False - } - }) - indexes[client] = count - count += 1 + nodes.append({ + "id": "%s-%s" % (node.id, clientcount), + "flags": { + "client": True, + "online": True, + "gateway": False + } + }) + indexes[client] = count links[(node.id, client)] = { "source": indexes[node.id], "target": indexes[client], "quality": "TT", "type": "client", - "id": "-".join((node.id, client)), + "id": "%s-%i" % (node.id, clientcount), } + count += 1 + clientcount += 1 return { "nodes": nodes, From f08aaaff4e1a62b9b1dbfb0f699ee2e1e8edc1a8 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sun, 7 Sep 2014 12:17:36 +0200 Subject: [PATCH 23/97] Fix fuzzy MAC matching --- ffmap/inputs/batadv.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ffmap/inputs/batadv.py b/ffmap/inputs/batadv.py index 576b09a..3a5abc9 100644 --- a/ffmap/inputs/batadv.py +++ b/ffmap/inputs/batadv.py @@ -28,8 +28,8 @@ class Input: if mac_a[1] != mac_b[1] or mac_a[2] != mac_b[2]: return False - # First byte must only differ in bit 2 - if mac_a[0] | 2 != mac_b[0] | 2: + # First byte must only differ in bits 2 and 3 + if mac_a[0] | 6 != mac_b[0] | 6: return False # Count differing bytes after the third From 66112061d6820265f1d9157f5a62132ae76c6528 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 20 Sep 2014 12:42:40 +0200 Subject: [PATCH 24/97] Fix adding of nodes with multiple matching alises --- ffmap/nodedb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ffmap/nodedb.py b/ffmap/nodedb.py index 0be76b0..344ed29 100644 --- a/ffmap/nodedb.py +++ b/ffmap/nodedb.py @@ -42,7 +42,7 @@ class NodeDB(dict): if id_ == node_id: continue if id_ in self: - if node is not None: + if node is not None and node is not self[id_]: raise AmbiguityError([node_id, id_]) node = self[id_] node_id = id_ From 322860be7e31e1dae98a525d9dd2963872b181b5 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 20 Sep 2014 12:42:53 +0200 Subject: [PATCH 25/97] Add MACs from mesh_interfaces as alises --- ffmap/inputs/alfred.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ffmap/inputs/alfred.py b/ffmap/inputs/alfred.py index 340c0d9..13730af 100644 --- a/ffmap/inputs/alfred.py +++ b/ffmap/inputs/alfred.py @@ -29,4 +29,5 @@ class Input: nodeinfo[mac]['statistics'] = statistics[mac] for mac, node in nodeinfo.items(): - nodedb.add_or_update([mac], node) + aliases = [mac] + node.get('network', {}).get('mesh_interfaces', []) + nodedb.add_or_update(aliases, node) From 48a17446391aa70d0713a76fda3b914f30ea28ff Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 20 Sep 2014 21:16:37 +0200 Subject: [PATCH 26/97] firstseen --- node.py | 1 + nodedb.py | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/node.py b/node.py index fce1866..bece59c 100644 --- a/node.py +++ b/node.py @@ -12,6 +12,7 @@ class Node(): self.firmware = None self.clientcount = 0 self.lastseen = 0 + self.firstseen = 0 def add_mac(self, mac): mac = mac.lower() diff --git a/nodedb.py b/nodedb.py index a45c7a1..d0d081d 100644 --- a/nodedb.py +++ b/nodedb.py @@ -31,6 +31,7 @@ class NodeDB: obj.append({ 'id': node.id , 'name': node.name , 'lastseen': node.lastseen + , 'firstseen': node.firstseen , 'geo': node.gps }) @@ -53,6 +54,9 @@ class NodeDB: node.gps = n['geo'] self._nodes.append(node) + if 'firstseen' in n: + node.firstseen = n['firstseen'] + except: pass @@ -80,6 +84,7 @@ class NodeDB: except: node = Node() node.lastseen = self.time + node.firstseen = self.time node.flags['online'] = True self._nodes.append(node) @@ -97,6 +102,7 @@ class NodeDB: except: node = Node() node.lastseen = self.time + node.firstseen = self.time node.flags['online'] = True node.add_mac(x['router']) self._nodes.append(node) @@ -115,6 +121,7 @@ class NodeDB: except: node = Node() node.lastseen = self.time + node.firstseen = self.time node.flags['online'] = True node.add_mac(x['neighbor']) self._nodes.append(node) From b7a079d418dada79b34303fac5834b033dff1aa9 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 20 Sep 2014 21:37:00 +0200 Subject: [PATCH 27/97] remove dead code --- nodedb.py | 44 -------------------------------------------- 1 file changed, 44 deletions(-) diff --git a/nodedb.py b/nodedb.py index d0d081d..5a92231 100644 --- a/nodedb.py +++ b/nodedb.py @@ -280,47 +280,3 @@ def is_similar(a, b): # These addresses look pretty similar! return delta < 8 - -def is_derived_mac(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]: - return False - - x = list(mac_a) - x[5] += 1 - x[5] %= 255 - if mac_b == x: - return True - - x[0] |= 2 - if mac_b == x: - return True - - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[5] += 2 - x[5] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - return False From b570d8956f65e5e824e1065aebb8f1a499ee870d Mon Sep 17 00:00:00 2001 From: Daniel Ehlers Date: Mon, 22 Sep 2014 23:34:21 +0200 Subject: [PATCH 28/97] global rrd: Count online nodes instead of nodes in state --- rrddb.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rrddb.py b/rrddb.py index 6f48913..36e4682 100755 --- a/rrddb.py +++ b/rrddb.py @@ -30,7 +30,8 @@ class rrd: nodes = db.get_nodes() clientCount = sum(map(lambda d: d.clientcount, nodes)) - self.globalDb.update(len(nodes), clientCount) + curtime = time.time() - 60 + self.globalDb.update(len(list(filter(lambda x: x.lastseen >= curtime, nodes))), clientCount) for node in nodes: rrd = NodeRRD( os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'), From 6e101bc6de59c7064b0a0ffe09d6e7230501e45b Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 21 Sep 2014 22:18:31 +0200 Subject: [PATCH 29/97] simplify mark_gateway --- bat2nodes.py | 13 ++++--------- batman.py | 20 +++++++++----------- nodedb.py | 16 +++++----------- 3 files changed, 18 insertions(+), 31 deletions(-) diff --git a/bat2nodes.py b/bat2nodes.py index 3d71d83..cc987ee 100755 --- a/bat2nodes.py +++ b/bat2nodes.py @@ -29,6 +29,7 @@ parser.add_argument('-a', '--aliases', metavar='FILE') parser.add_argument('-m', '--mesh', action='append', + default=["bat0"], help='batman mesh interface') parser.add_argument('-A', '--alfred', action='store_true', @@ -43,17 +44,11 @@ options = vars(args) db = NodeDB(int(time.time())) -if options['mesh']: - for mesh_interface in options['mesh']: - bm = batman(mesh_interface) - db.parse_vis_data(bm.vis_data(options['alfred'])) - for gw in bm.gateway_list(): - db.mark_gateways(gw['mac']) -else: - bm = batman() +for mesh_interface in options['mesh']: + bm = batman(mesh_interface) db.parse_vis_data(bm.vis_data(options['alfred'])) for gw in bm.gateway_list(): - db.mark_gateways([gw['mac']]) + db.mark_gateway(gw) if options['aliases']: for aliases in options['aliases']: diff --git a/batman.py b/batman.py index 583d962..94229ad 100755 --- a/batman.py +++ b/batman.py @@ -45,22 +45,20 @@ class batman: """ output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"]) output_utf8 = output.decode("utf-8") - # TODO Parse information lines = output_utf8.splitlines() - own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",lines[0]).group(1) - # Remove header line - del lines[0] - # Fill gateway list + + own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) + gw = [] gw_mode = self.gateway_mode() if gw_mode['mode'] == 'server': - gw.append({'mac': own_mac, 'bandwidth': gw_mode['bandwidth']}) + gw.append(own_mac) + for line in lines: - gw_line = line.split() - if (gw_line[0] == 'No'): - continue - # When in client gateway mode maybe gw_line[0] is not the right. - gw.append({'mac':gw_line[0], 'bandwidth': gw_line[-1]}) + gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) + if gw_line: + gw.append(gw_line.group(1)) + return gw def gateway_mode(self): diff --git a/nodedb.py b/nodedb.py index 5a92231..1a719a3 100644 --- a/nodedb.py +++ b/nodedb.py @@ -224,18 +224,12 @@ class NodeDB: if 'id' in alias: node.id = alias['id'] - # list of macs - # if options['gateway']: - # mark_gateways(options['gateway']) - def mark_gateways(self, gateways): - for gateway in gateways: - try: - node = self.maybe_node_by_mac((gateway, )) - except: - print("WARNING: did not find gateway '",gateway,"' in node list") - continue - + def mark_gateway(self, gateway): + try: + node = self.maybe_node_by_mac((gateway, )) node.flags['gateway'] = True + except KeyError: + print("WARNING: did not find gateway ", gateway, " in node list") def update_vpn_links(self): changes = 1 From ee8bbd8b3e4732d8fbcb841f35325cd737c084cf Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 21 Sep 2014 22:18:41 +0200 Subject: [PATCH 30/97] simplify clientcount --- nodedb.py | 37 ++++++------------------------------- 1 file changed, 6 insertions(+), 31 deletions(-) diff --git a/nodedb.py b/nodedb.py index 1a719a3..f0f7240 100644 --- a/nodedb.py +++ b/nodedb.py @@ -171,11 +171,15 @@ class NodeDB: try: node = self.maybe_node_by_mac((x['router'], )) node.add_mac(x['gateway']) - if not is_similar(x['router'], x['gateway']): - node.clientcount += 1 + node.clientcount += 1 except: pass + # don't count node as its own client + for node in self._nodes: + if node.clientcount > 0: + node.clientcount -= 1 + def reduce_links(self): tmp_links = defaultdict(list) @@ -245,32 +249,3 @@ class NodeDB: changes += 1 link.type = "vpn" - -# compares two MACs and decides whether they are -# similar and could be from the same node -def is_similar(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - # first byte must only differ in bit 2 - if mac_a[0] | 2 == mac_b[0] | 2: - # count different bytes - c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]] - else: - return False - - # no more than two additional bytes must differ - if len(c) <= 2: - delta = 0 - - if len(c) > 0: - delta = sum(abs(i[0] -i[1]) for i in c) - - # These addresses look pretty similar! - return delta < 8 From cd1329963acc721088843f65c4060fd7ac64897a Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Tue, 23 Sep 2014 22:31:51 +0200 Subject: [PATCH 31/97] Alfred input: Pass -z switch to alfred-json --- ffmap/inputs/alfred.py | 26 +++++++++++--------------- 1 file changed, 11 insertions(+), 15 deletions(-) diff --git a/ffmap/inputs/alfred.py b/ffmap/inputs/alfred.py index 13730af..9df5f5e 100644 --- a/ffmap/inputs/alfred.py +++ b/ffmap/inputs/alfred.py @@ -5,23 +5,19 @@ class Input: def __init__(self,request_data_type = 158): self.request_data_type = request_data_type + @staticmethod + def _call_alfred(request_data_type): + return json.loads(subprocess.check_output([ + "alfred-json", + "-z", + "-r", str(request_data_type), + "-f", "json", + ]).decode("utf-8")) + def get_data(self, nodedb): """Add data from alfred to the supplied nodedb""" - # get nodeinfo - output = subprocess.check_output([ - "alfred-json", - "-r", str(self.request_data_type), - "-f", "json", - ]) - nodeinfo = json.loads(output.decode("utf-8")) - - # get statistics - output = subprocess.check_output([ - "alfred-json", - "-r", str(self.request_data_type+1), - "-f", "json", - ]) - statistics = json.loads(output.decode("utf-8")) + nodeinfo = self._call_alfred(self.request_data_type) + statistics = self._call_alfred(self.request_data_type+1) # merge statistics into nodeinfo to be compatible with earlier versions for mac, node in statistics.items(): From 94f725656474a019376898b489d49fd3eb91caf6 Mon Sep 17 00:00:00 2001 From: Stefan Laudemann Date: Sun, 1 Feb 2015 03:07:35 +0100 Subject: [PATCH 32/97] Adds missing comma to pass (x['neighbor'], ) as tuple (not as str). As Python interprets "(elem)" as string and not as tuple, maybe_node_by_mac() iterates over the single characters in the MAC- addressed passed as parameter when called parse_vis_data(). Most of the calls already use the "(elem, )" syntax to indicate that a tuple is passed. However, there is still one call for which this is not the case causing a noticable longer runtime due to calls to maybe_node_by_mac() that cannot yield any useful result. --- nodedb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nodedb.py b/nodedb.py index f0f7240..63c461e 100644 --- a/nodedb.py +++ b/nodedb.py @@ -110,7 +110,7 @@ class NodeDB: try: if 'neighbor' in x: try: - node = self.maybe_node_by_mac((x['neighbor'])) + node = self.maybe_node_by_mac((x['neighbor'], )) except: continue From b3c629264af75a2c0acebf9137497392ff1add03 Mon Sep 17 00:00:00 2001 From: Stefan Laudemann Date: Sun, 1 Feb 2015 13:50:01 +0100 Subject: [PATCH 33/97] Changes try-except-blocks around maybe_nody_by_*() calls to only catch KeyError exceptions. Semantically, all the implemented error handling for the try-except- blocks around calls to "maybe_node_by_mac()" or "maybe_node_by_id()" in nodedb.py only handle the case that a particular MAC address cannot be found in the list of known nodes. If such a MAC address cannot be found in this list, the methods properly indicate this by raising a KeyError. However, all the try-except-block generically catch all exceptions and thus may cover other problems. But not only that problems might be covered by this, generic try-except-blocks make finding errors and de- bugging quite painful. Hence, these try-except-blocks should only catch KeyErrors or at least have an error handling that differs from other exceptions. --- nodedb.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/nodedb.py b/nodedb.py index 63c461e..0bec73e 100644 --- a/nodedb.py +++ b/nodedb.py @@ -46,7 +46,7 @@ class NodeDB: for n in obj: try: node = self.maybe_node_by_id(n['id']) - except: + except KeyError: node = Node() node.id = n['id'] node.name = n['name'] @@ -81,7 +81,7 @@ class NodeDB: if 'of' in x: try: node = self.maybe_node_by_mac((x['of'], x['secondary'])) - except: + except KeyError: node = Node() node.lastseen = self.time node.firstseen = self.time @@ -99,7 +99,7 @@ class NodeDB: try: node = self.maybe_node_by_mac((x['router'], )) - except: + except KeyError: node = Node() node.lastseen = self.time node.firstseen = self.time @@ -111,14 +111,14 @@ class NodeDB: if 'neighbor' in x: try: node = self.maybe_node_by_mac((x['neighbor'], )) - except: + except KeyError: continue if 'gateway' in x: x['neighbor'] = x['gateway'] node = self.maybe_node_by_mac((x['neighbor'], )) - except: + except KeyError: node = Node() node.lastseen = self.time node.firstseen = self.time @@ -138,7 +138,7 @@ class NodeDB: router = self.maybe_node_by_mac((x['router'], )) neighbor = self.maybe_node_by_mac((x['neighbor'], )) - except: + except KeyError: continue # filter TT links merged in previous step @@ -161,7 +161,7 @@ class NodeDB: if 'primary' in x: try: node = self.maybe_node_by_mac((x['primary'], )) - except: + except KeyError: continue node.id = x['primary'] @@ -172,9 +172,9 @@ class NodeDB: node = self.maybe_node_by_mac((x['router'], )) node.add_mac(x['gateway']) node.clientcount += 1 - except: + except KeyError: pass - + # don't count node as its own client for node in self._nodes: if node.clientcount > 0: @@ -207,7 +207,7 @@ class NodeDB: for mac, alias in aliases.items(): try: node = self.maybe_node_by_mac([mac]) - except: + except KeyError: # create an offline node node = Node() node.add_mac(mac) From 71c2417b9da5685aee8eb3e63926370d1514a5c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nils=20Martin=20Kl=C3=BCnder?= Date: Sat, 21 Feb 2015 19:17:49 +0100 Subject: [PATCH 34/97] How about a less confusing repr? MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit „WTF, why is this a string?” --- node.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/node.py b/node.py index bece59c..78e7765 100644 --- a/node.py +++ b/node.py @@ -24,7 +24,7 @@ class Node(): self.interfaces[mac] = Interface() def __repr__(self): - return self.macs.__repr__() + return '' % self.macs.__repr__() class Interface(): def __init__(self): From 9257aa01a6c70f06966dbaf1fb5398c7caab16b9 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Tue, 23 Sep 2014 12:00:13 +0200 Subject: [PATCH 35/97] drop ffhlwiki and mkmap --- ffhlwiki.py | 93 ----------------------------------------------------- mkmap.sh | 15 --------- 2 files changed, 108 deletions(-) delete mode 100755 ffhlwiki.py delete mode 100755 mkmap.sh diff --git a/ffhlwiki.py b/ffhlwiki.py deleted file mode 100755 index c1ba01e..0000000 --- a/ffhlwiki.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python3 - -import json -import argparse -from itertools import zip_longest -from urllib.request import urlopen -from bs4 import BeautifulSoup - -def import_wikigps(url): - def fetch_wikitable(url): - f = urlopen(url) - - soup = BeautifulSoup(f) - - table = soup.find_all("table")[0] - - rows = table.find_all("tr") - - headers = [] - - data = [] - - def maybe_strip(x): - if isinstance(x.string, str): - return x.string.strip() - else: - return "" - - for row in rows: - tds = list([maybe_strip(x) for x in row.find_all("td")]) - ths = list([maybe_strip(x) for x in row.find_all("th")]) - - if any(tds): - data.append(tds) - - if any(ths): - headers = ths - - nodes = [] - - for d in data: - nodes.append(dict(zip(headers, d))) - - return nodes - - nodes = fetch_wikitable(url) - - aliases = {} - - for node in nodes: - try: - node['MAC'] = node['MAC'].split(',') - except KeyError: - pass - - try: - node['GPS'] = node['GPS'].split(',') - except KeyError: - pass - - try: - node['Knotenname'] = node['Knotenname'].split(',') - except KeyError: - pass - - nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname']) - - for data in nodes: - alias = {} - - mac = data[0].strip() - - if data[1]: - alias['gps'] = data[1].strip() - - if data[2]: - alias['name'] = data[2].strip() - - aliases[mac] = alias - - return aliases - -parser = argparse.ArgumentParser() - -parser.add_argument('url', help='wiki URL') - -args = parser.parse_args() - -options = vars(args) - -aliases = import_wikigps(options['url']) - -print(json.dumps(aliases)) diff --git a/mkmap.sh b/mkmap.sh deleted file mode 100755 index ce3b162..0000000 --- a/mkmap.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -set -e - -DEST=$1 - - -[ "$DEST" ] || exit 1 - -cd "$(dirname "$0")"/ - -./ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > aliases_hl.json -./ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > aliases_moelln.json - -./bat2nodes.py -A -a aliases.json -a aliases_hl.json -a aliases_moelln.json -d $DEST From 41ee81d92ce7067a22dc8b24b6c72b9a6e09473b Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Tue, 23 Sep 2014 11:57:45 +0200 Subject: [PATCH 36/97] alfred: restructure code, add nodeinfo, statistics, vis --- .gitignore | 3 +- NodeRRD.py | 3 +- alfred.py | 65 ++++++------ aliases.json_sample | 33 ++++-- backend.py | 101 ++++++++++++++++++ bat2nodes.py | 82 --------------- batman.py | 0 d3mapbuilder.py | 35 ------ graph.py | 66 ++++++++++++ link.py | 15 --- node.py | 32 ------ nodedb.py | 251 -------------------------------------------- nodes.py | 124 ++++++++++++++++++++++ rrddb.py | 19 ++-- 14 files changed, 358 insertions(+), 471 deletions(-) mode change 100755 => 100644 alfred.py create mode 100755 backend.py delete mode 100755 bat2nodes.py mode change 100755 => 100644 batman.py delete mode 100644 d3mapbuilder.py create mode 100644 graph.py delete mode 100644 link.py delete mode 100644 node.py delete mode 100644 nodedb.py create mode 100644 nodes.py mode change 100755 => 100644 rrddb.py diff --git a/.gitignore b/.gitignore index 0f42dec..c161fb3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ *.pyc -aliases.json +aliases*.json nodedb/ +pycache/ diff --git a/NodeRRD.py b/NodeRRD.py index 6fa32f3..ca24c0d 100644 --- a/NodeRRD.py +++ b/NodeRRD.py @@ -1,6 +1,5 @@ import os import subprocess -from node import Node from RRD import RRD, DS, RRA class NodeRRD(RRD): @@ -30,7 +29,7 @@ class NodeRRD(RRD): return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" def update(self): - super().update({'upstate': 1, 'clients': self.node.clientcount}) + super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']}) def graph(self, directory, timeframe): """ diff --git a/alfred.py b/alfred.py old mode 100755 new mode 100644 index b956026..f71b8da --- a/alfred.py +++ b/alfred.py @@ -2,41 +2,44 @@ import subprocess import json -class alfred: - def __init__(self,request_data_type = 158): - self.request_data_type = request_data_type +def _fetch(data_type): + output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) + return json.loads(output.decode("utf-8")).values() - def aliases(self): - output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json","-z"]) - alfred_data = json.loads(output.decode("utf-8")) - alias = {} - for mac,node in alfred_data.items(): - node_alias = {} - if 'location' in node: - try: - node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) - except: - pass +def nodeinfo(): + return _fetch(158) +def statistics(): + return _fetch(159) + +def vis(): + return _fetch(160) + +def aliases(): + alias = {} + for node in nodeinfo(): + node_alias = {} + if 'location' in node: try: - node_alias['firmware'] = node['software']['firmware']['release'] - except KeyError: + node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) + except: pass - try: - node_alias['id'] = node['network']['mac'] - except KeyError: - pass + try: + node_alias['firmware'] = node['software']['firmware']['release'] + except KeyError: + pass - if 'hostname' in node: - node_alias['name'] = node['hostname'] - elif 'name' in node: - node_alias['name'] = node['name'] - if len(node_alias): - alias[mac] = node_alias - return alias + try: + node_alias['id'] = node['network']['mac'] + except KeyError: + pass -if __name__ == "__main__": - ad = alfred() - al = ad.aliases() - print(al) + if 'hostname' in node: + node_alias['name'] = node['hostname'] + elif 'name' in node: + node_alias['name'] = node['name'] + if len(node_alias): + alias[node['network']['mac']] = node_alias + + return alias diff --git a/aliases.json_sample b/aliases.json_sample index 1f3ca33..ca1eb6b 100644 --- a/aliases.json_sample +++ b/aliases.json_sample @@ -1,9 +1,24 @@ -{ - "b0:48:7a:e7:d3:64" : { - "name" : "Meute-AP" - }, - "8e:3d:c2:10:10:28" : { - "name" : "holstentor", - "vpn" : true - } -} +[ + { + "node_id": "krtek", + "hostname": "krtek", + "location": { + "longitude": 10.74, + "latitude": 53.86 + }, + "network": { + "mesh_interfaces": [ + "00:25:86:e6:f1:bf" + ] + } + }, + { + "node_id": "gw1", + "hostname": "burgtor", + "network": { + "mesh_interfaces": [ + "52:54:00:f3:62:d9" + ] + } + } +] diff --git a/backend.py b/backend.py new file mode 100755 index 0000000..7544709 --- /dev/null +++ b/backend.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 + +import argparse +import json +import os +import sys +import networkx as nx +from datetime import datetime +from networkx.readwrite import json_graph + +import alfred +import nodes +import graph +from batman import batman +from rrddb import rrd + +parser = argparse.ArgumentParser() + +parser.add_argument('-a', '--aliases', + help='read aliases from FILE', + default=[], + action='append', + metavar='FILE') + +parser.add_argument('-m', '--mesh', action='append', + help='batman mesh interface') + +parser.add_argument('-d', '--destination-directory', action='store', + help='destination directory for generated files',required=True) + +parser.add_argument('--vpn', action='append', metavar='MAC', + help='assume MAC to be part of the VPN') + +parser.add_argument('--prune', metavar='DAYS', + help='forget nodes offline for at least DAYS') + +args = parser.parse_args() + +options = vars(args) + +if not options['mesh']: + options['mesh'] = ['bat0'] + +nodes_fn = os.path.join(options['destination_directory'], 'nodes.json') +graph_fn = os.path.join(options['destination_directory'], 'graph.json') + +now = datetime.utcnow().replace(microsecond=0) + +try: + nodedb = json.load(open(nodes_fn)) + + # ignore if old format + if 'links' in nodedb: + raise +except: + nodedb = {'nodes': dict()} + +nodedb['timestamp'] = now.isoformat() + +for node_id, node in nodedb['nodes'].items(): + node['flags']['online'] = False + +nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) + +for aliases in options['aliases']: + with open(aliases, 'r') as f: + nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False) + +nodes.reset_statistics(nodedb['nodes']) +nodes.import_statistics(nodedb['nodes'], alfred.statistics()) + +bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(batman, options['mesh']))) +for vis_data, gateway_list in bm: + nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) + nodes.import_vis_clientcount(nodedb['nodes'], vis_data) + nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now) + nodes.mark_gateways(nodedb['nodes'], gateway_list) + +if options['prune']: + nodes.prune_nodes(nodedb['nodes'], now, int(options['prune'])) + +batadv_graph = nx.DiGraph() +for vis_data, gateway_list in bm: + graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data) + +if options['vpn']: + graph.mark_vpn(batadv_graph, frozenset(options['vpn'])) + +batadv_graph = graph.merge_nodes(batadv_graph) +batadv_graph = graph.to_undirected(batadv_graph) + +with open(nodes_fn, 'w') as f: + json.dump(nodedb, f) + +with open(graph_fn, 'w') as f: + json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) + +scriptdir = os.path.dirname(os.path.realpath(__file__)) +rrd = rrd(scriptdir + '/nodedb/', options['destination_directory'] + '/nodes') +rrd.update_database(nodedb['nodes']) +rrd.update_images() diff --git a/bat2nodes.py b/bat2nodes.py deleted file mode 100755 index cc987ee..0000000 --- a/bat2nodes.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python3 - -import json -import fileinput -import argparse -import os -import time - -from batman import batman -from alfred import alfred -from rrddb import rrd -from nodedb import NodeDB -from d3mapbuilder import D3MapBuilder - -# Force encoding to UTF-8 -import locale # Ensures that subsequent open()s -locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded. - -import sys -#sys.stdin = open('/dev/stdin', 'r') -#sys.stdout = open('/dev/stdout', 'w') -#sys.stderr = open('/dev/stderr', 'w') - -parser = argparse.ArgumentParser() - -parser.add_argument('-a', '--aliases', - help='read aliases from FILE', - action='append', - metavar='FILE') - -parser.add_argument('-m', '--mesh', action='append', - default=["bat0"], - help='batman mesh interface') - -parser.add_argument('-A', '--alfred', action='store_true', - help='retrieve aliases from alfred') - -parser.add_argument('-d', '--destination-directory', action='store', - help='destination directory for generated files',required=True) - -args = parser.parse_args() - -options = vars(args) - -db = NodeDB(int(time.time())) - -for mesh_interface in options['mesh']: - bm = batman(mesh_interface) - db.parse_vis_data(bm.vis_data(options['alfred'])) - for gw in bm.gateway_list(): - db.mark_gateway(gw) - -if options['aliases']: - for aliases in options['aliases']: - db.import_aliases(json.load(open(aliases))) - -if options['alfred']: - af = alfred() - db.import_aliases(af.aliases()) - -db.load_state("state.json") - -# remove nodes that have been offline for more than 30 days -db.prune_offline(time.time() - 30*86400) - -db.dump_state("state.json") - -scriptdir = os.path.dirname(os.path.realpath(__file__)) - -m = D3MapBuilder(db) - -#Write nodes json -nodes_json = open(options['destination_directory'] + '/nodes.json.new','w') -nodes_json.write(m.build()) -nodes_json.close() - -#Move to destination -os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json') - -rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes") -rrd.update_database(db) -rrd.update_images() diff --git a/batman.py b/batman.py old mode 100755 new mode 100644 diff --git a/d3mapbuilder.py b/d3mapbuilder.py deleted file mode 100644 index 8fb1961..0000000 --- a/d3mapbuilder.py +++ /dev/null @@ -1,35 +0,0 @@ -import json -import datetime - -class D3MapBuilder: - def __init__(self, db): - self._db = db - - def build(self): - output = dict() - - now = datetime.datetime.utcnow().replace(microsecond=0) - - nodes = self._db.get_nodes() - - output['nodes'] = [{'name': x.name, 'id': x.id, - 'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None, - 'firmware': x.firmware, - 'flags': x.flags, - 'clientcount': x.clientcount - } for x in nodes] - - links = self._db.get_links() - - output['links'] = [{'source': x.source.id, 'target': x.target.id, - 'quality': x.quality, - 'type': x.type, - 'id': x.id - } for x in links] - - output['meta'] = { - 'timestamp': now.isoformat() - } - - return json.dumps(output) - diff --git a/graph.py b/graph.py new file mode 100644 index 0000000..b6e86aa --- /dev/null +++ b/graph.py @@ -0,0 +1,66 @@ +import networkx as nx +from copy import deepcopy +from functools import reduce +from itertools import chain +from nodes import build_mac_table + +def import_vis_data(graph, nodes, vis_data): + macs = build_mac_table(nodes) + nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data)) + nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data)) + graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b)))) + + edges = filter(lambda d: 'neighbor' in d, vis_data) + graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) + +def mark_vpn(graph, vpn_macs): + components = map(frozenset, nx.weakly_connected_components(graph)) + components = filter(vpn_macs.intersection, components) + nodes = reduce(lambda a, b: a | b, components, set()) + + for node in nodes: + for k, v in graph[node].items(): + v['vpn'] = True + +def to_multigraph(graph): + def f(a): + node = graph.node[a] + return node['primary'] if node else a + + G = nx.MultiDiGraph() + map_node = lambda node, data: (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) + G.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True)))) + G.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) + + return G + +def merge_nodes(graph): + def merge_edges(data): + tq = min(map(lambda d: d['tq'], data)) + vpn = all(map(lambda d: d.get('vpn', False), data)) + return dict(tq=tq, vpn=vpn) + + G = to_multigraph(graph) + H = nx.DiGraph() + H.add_nodes_from(G.nodes_iter(data=True)) + edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) + H.add_edges_from(edges) + + return H + +def to_undirected(graph): + G = nx.MultiGraph() + G.add_nodes_from(graph.nodes_iter(data=True)) + G.add_edges_from(graph.edges_iter(data=True)) + + def merge_edges(data): + tq = max(map(lambda d: d['tq'], data)) + vpn = all(map(lambda d: d.get('vpn', False), data)) + return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2) + + H = nx.Graph() + H.add_nodes_from(G.nodes_iter(data=True)) + edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) + H.add_edges_from(edges) + + return H diff --git a/link.py b/link.py deleted file mode 100644 index 896079b..0000000 --- a/link.py +++ /dev/null @@ -1,15 +0,0 @@ -class Link(): - def __init__(self): - self.id = None - self.source = None - self.target = None - self.quality = None - self.type = None - -class LinkConnector(): - def __init__(self): - self.id = None - self.interface = None - - def __repr__(self): - return "LinkConnector(%d, %s)" % (self.id, self.interface) diff --git a/node.py b/node.py deleted file mode 100644 index 78e7765..0000000 --- a/node.py +++ /dev/null @@ -1,32 +0,0 @@ -class Node(): - def __init__(self): - self.name = "" - self.id = "" - self.macs = set() - self.interfaces = dict() - self.flags = dict({ - "online": False, - "gateway": False, - }) - self.gps = None - self.firmware = None - self.clientcount = 0 - self.lastseen = 0 - self.firstseen = 0 - - def add_mac(self, mac): - mac = mac.lower() - if len(self.macs) == 0: - self.id = mac - - self.macs.add(mac) - - self.interfaces[mac] = Interface() - - def __repr__(self): - return '' % self.macs.__repr__() - -class Interface(): - def __init__(self): - self.vpn = False - diff --git a/nodedb.py b/nodedb.py deleted file mode 100644 index 0bec73e..0000000 --- a/nodedb.py +++ /dev/null @@ -1,251 +0,0 @@ -import json -from functools import reduce -from collections import defaultdict -from node import Node, Interface -from link import Link, LinkConnector - -class NodeDB: - def __init__(self, time=0): - self.time = time - self._nodes = [] - self._links = [] - - # fetch list of links - def get_links(self): - self.update_vpn_links() - return self.reduce_links() - - # fetch list of nodes - def get_nodes(self): - return self._nodes - - # remove all offlines nodes with lastseen < timestamp - def prune_offline(self, timestamp): - self._nodes = list(filter(lambda x: x.lastseen >= timestamp, self._nodes)) - - # write persistent state to file - def dump_state(self, filename): - obj = [] - - for node in self._nodes: - obj.append({ 'id': node.id - , 'name': node.name - , 'lastseen': node.lastseen - , 'firstseen': node.firstseen - , 'geo': node.gps - }) - - with open(filename, "w") as f: - json.dump(obj, f) - - # load persistent state from file - def load_state(self, filename): - try: - with open(filename, "r") as f: - obj = json.load(f) - for n in obj: - try: - node = self.maybe_node_by_id(n['id']) - except KeyError: - node = Node() - node.id = n['id'] - node.name = n['name'] - node.lastseen = n['lastseen'] - node.gps = n['geo'] - self._nodes.append(node) - - if 'firstseen' in n: - node.firstseen = n['firstseen'] - - except: - pass - - def maybe_node_by_mac(self, macs): - for node in self._nodes: - for mac in macs: - if mac.lower() in node.macs: - return node - - raise KeyError - - def maybe_node_by_id(self, mac): - for node in self._nodes: - if mac.lower() == node.id: - return node - - raise KeyError - - def parse_vis_data(self,vis_data): - for x in vis_data: - - if 'of' in x: - try: - node = self.maybe_node_by_mac((x['of'], x['secondary'])) - except KeyError: - node = Node() - node.lastseen = self.time - node.firstseen = self.time - node.flags['online'] = True - self._nodes.append(node) - - node.add_mac(x['of']) - node.add_mac(x['secondary']) - - for x in vis_data: - if 'router' in x: - # TTs will be processed later - if x['label'] == "TT": - continue - - try: - node = self.maybe_node_by_mac((x['router'], )) - except KeyError: - node = Node() - node.lastseen = self.time - node.firstseen = self.time - node.flags['online'] = True - node.add_mac(x['router']) - self._nodes.append(node) - - try: - if 'neighbor' in x: - try: - node = self.maybe_node_by_mac((x['neighbor'], )) - except KeyError: - continue - - if 'gateway' in x: - x['neighbor'] = x['gateway'] - - node = self.maybe_node_by_mac((x['neighbor'], )) - except KeyError: - node = Node() - node.lastseen = self.time - node.firstseen = self.time - node.flags['online'] = True - node.add_mac(x['neighbor']) - self._nodes.append(node) - - for x in vis_data: - if 'router' in x: - # TTs will be processed later - if x['label'] == "TT": - continue - - try: - if 'gateway' in x: - x['neighbor'] = x['gateway'] - - router = self.maybe_node_by_mac((x['router'], )) - neighbor = self.maybe_node_by_mac((x['neighbor'], )) - except KeyError: - continue - - # filter TT links merged in previous step - if router == neighbor: - continue - - link = Link() - link.source = LinkConnector() - link.source.interface = x['router'] - link.source.id = self._nodes.index(router) - link.target = LinkConnector() - link.target.interface = x['neighbor'] - link.target.id = self._nodes.index(neighbor) - link.quality = x['label'] - link.id = "-".join(sorted((link.source.interface, link.target.interface))) - - self._links.append(link) - - for x in vis_data: - if 'primary' in x: - try: - node = self.maybe_node_by_mac((x['primary'], )) - except KeyError: - continue - - node.id = x['primary'] - - for x in vis_data: - if 'router' in x and x['label'] == 'TT': - try: - node = self.maybe_node_by_mac((x['router'], )) - node.add_mac(x['gateway']) - node.clientcount += 1 - except KeyError: - pass - - # don't count node as its own client - for node in self._nodes: - if node.clientcount > 0: - node.clientcount -= 1 - - def reduce_links(self): - tmp_links = defaultdict(list) - - for link in self._links: - tmp_links[link.id].append(link) - - links = [] - - def reduce_link(a, b): - a.id = b.id - a.source = b.source - a.target = b.target - a.type = b.type - a.quality = ", ".join([x for x in (a.quality, b.quality) if x]) - - return a - - for k, v in tmp_links.items(): - new_link = reduce(reduce_link, v, Link()) - links.append(new_link) - - return links - - def import_aliases(self, aliases): - for mac, alias in aliases.items(): - try: - node = self.maybe_node_by_mac([mac]) - except KeyError: - # create an offline node - node = Node() - node.add_mac(mac) - self._nodes.append(node) - - if 'name' in alias: - node.name = alias['name'] - - if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces: - node.interfaces[mac].vpn = True - - if 'gps' in alias: - node.gps = alias['gps'] - - if 'firmware' in alias: - node.firmware = alias['firmware'] - - if 'id' in alias: - node.id = alias['id'] - - def mark_gateway(self, gateway): - try: - node = self.maybe_node_by_mac((gateway, )) - node.flags['gateway'] = True - except KeyError: - print("WARNING: did not find gateway ", gateway, " in node list") - - def update_vpn_links(self): - changes = 1 - while changes > 0: - changes = 0 - for link in self._links: - source_interface = self._nodes[link.source.id].interfaces[link.source.interface] - target_interface = self._nodes[link.target.id].interfaces[link.target.interface] - if source_interface.vpn or target_interface.vpn: - source_interface.vpn = True - target_interface.vpn = True - if link.type != "vpn": - changes += 1 - - link.type = "vpn" diff --git a/nodes.py b/nodes.py new file mode 100644 index 0000000..49d9fc4 --- /dev/null +++ b/nodes.py @@ -0,0 +1,124 @@ +from collections import Counter, defaultdict +from datetime import datetime +from functools import reduce + +def build_mac_table(nodes): + macs = dict() + for node_id, node in nodes.items(): + try: + for mac in node['nodeinfo']['network']['mesh_interfaces']: + macs[mac] = node_id + except KeyError: + pass + + return macs + +def prune_nodes(nodes, now, days): + prune = [] + for node_id, node in nodes.items(): + if not 'lastseen' in node: + prune.append(node_id) + continue + + lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S') + delta = (now - lastseen).seconds + + if delta >= days * 86400: + prune.append(node_id) + + for node_id in prune: + del nodes[node_id] + +def mark_online(node, now): + node.setdefault('firstseen', now.isoformat()) + node['flags']['online'] = True + +def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): + for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): + node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) + node['nodeinfo'] = nodeinfo + node['flags']['online'] = False + node['flags']['gateway'] = False + + if assume_online: + node['lastseen'] = now.isoformat() + mark_online(node, now) + +def reset_statistics(nodes): + for node in nodes.values(): + node['statistics'] = { 'clients': 0 } + +def import_statistics(nodes, statistics): + def add(node, statistics, target, source, f=lambda d: d): + try: + node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) + except (KeyError,TypeError): + pass + + macs = build_mac_table(nodes) + statistics = filter(lambda d: 'node_id' in d, statistics) + statistics = filter(lambda d: d['node_id'] in nodes, statistics) + for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics): + add(node, statistics, 'clients', ['clients', 'total']) + add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d)) + add(node, statistics, 'uptime', ['uptime']) + add(node, statistics, 'loadavg', ['loadavg']) + add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) + add(node, statistics, 'rootfs_usage', ['rootfs_usage']) + +def import_mesh_ifs_vis_data(nodes, vis_data): + macs = build_mac_table(nodes) + + mesh_ifs = defaultdict(lambda: set()) + for line in filter(lambda d: 'secondary' in d, vis_data): + primary = line['of'] + mesh_ifs[primary].add(primary) + mesh_ifs[primary].add(line['secondary']) + + def if_to_node(ifs): + a = filter(lambda d: d in macs, ifs) + a = map(lambda d: nodes[macs[d]], a) + try: + return (next(a), ifs) + except StopIteration: + return None + + mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values())) + + for v in mesh_nodes: + node = v[0] + mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) + node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) + +def import_vis_clientcount(nodes, vis_data): + macs = build_mac_table(nodes) + data = filter(lambda d: d.get('label', None) == 'TT', vis_data) + data = filter(lambda d: d['router'] in macs, data) + data = map(lambda d: macs[d['router']], data) + + for node_id, clientcount in Counter(data).items(): + nodes[node_id]['statistics'].setdefault('clients', clientcount) + +def mark_gateways(nodes, gateways): + macs = build_mac_table(nodes) + gateways = filter(lambda d: d in macs, gateways) + + for node in map(lambda d: nodes[macs[d]], gateways): + node['flags']['gateway'] = True + +def mark_vis_data_online(nodes, vis_data, now): + macs = build_mac_table(nodes) + + online = set() + for line in vis_data: + if 'primary' in line: + online.add(line['primary']) + elif 'secondary' in line: + online.add(line['secondary']) + elif 'gateway' in line: + # This matches clients' MACs. + # On pre-Gluon nodes the primary MAC will be one of it. + online.add(line['gateway']) + + for mac in filter(lambda d: d in macs, online): + mark_online(nodes[macs[mac]], now) diff --git a/rrddb.py b/rrddb.py old mode 100755 new mode 100644 index 36e4682..2fccff4 --- a/rrddb.py +++ b/rrddb.py @@ -26,23 +26,16 @@ class rrd: except: os.mkdir(self.imagePath) - def update_database(self,db): - nodes = db.get_nodes() - clientCount = sum(map(lambda d: d.clientcount, nodes)) + def update_database(self, nodes): + online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items())) + client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values())) - curtime = time.time() - 60 - self.globalDb.update(len(list(filter(lambda x: x.lastseen >= curtime, nodes))), clientCount) - for node in nodes: - rrd = NodeRRD( - os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'), - node - ) + self.globalDb.update(len(online_nodes), client_count) + for node_id, node in online_nodes.items(): + rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node) rrd.update() def update_images(self): - """ Creates an image for every rrd file in the database directory. - """ - self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) nodeDbFiles = os.listdir(self.dbPath) From 2a2db65bc6cdae5f1d7d2eb8c9e1d15a3076a1e7 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 21 Mar 2015 01:26:56 +0100 Subject: [PATCH 37/97] update README --- README.md | 65 ++++++++++++++++++++++++++++++------------------------- 1 file changed, 36 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index fc718fe..d70aaac 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,17 @@ # Data for Freifunk Map, Graph and Node List ffmap-backend gathers information on the batman network by invoking - batctl -and - batadv-vis + + * batctl, + * alfred-json and + * batadv-vis + as root (via sudo) and has this information placed into a target directory as the file "nodes.json" and also updates the directory "nodes" with graphical representations of uptimes and the number of clients connecting. -The target directory is suggested to host all information for interpreting those -node descriptions, e.g. as provided by https://github.com/ffnord/ffmap-d3.git . +Run `backend.py --help` for a quick overview of all available options. + When executed without root privileges, we suggest to grant sudo permissions within wrappers of those binaries, so no further changes are required in other scripts: @@ -24,30 +26,35 @@ EOCAT and analogously for batadv-vis. The entry for /etc/sudoers could be whateveruser ALL=(ALL:ALL) NOPASSWD: /usr/sbin/batctl,/usr/sbin/batadv-vis,/usr/sbin/alfred-json -The destination directory can be made directly available through apache: -
-$ cat /etc/apache2/site-enabled/000-default
-...
-        
-                Options Indexes FollowSymLinks MultiViews
-                AllowOverride None
-                Order allow,deny
-                allow from all
-        
-...
-
-$ cat /etc/apache2/conf.d/freifunk
-Alias /map /home/ffmap/www/
-Alias /firmware /home/freifunk/autoupdates/
-
- -To execute, run - ./mkmap.sh ../www -The script expects above described sudo-wrappers in the $HOME directory of the user executing -the script. If those are not available, an error will occurr if not executed as root. Also, -the tool realpath optionally allows to execute the script from anywhere in the directory tree. - For the script's regular execution add the following to the crontab:
-*/5 * * * * /home/ffmap/ffmap-backend/mkmap.sh /home/ffmap/www
+* * * * * /path/to/ffmap-backend/backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a --vpn d2:d0:93:63:f7:da
 
+ +# Data format + +## nodes.json + + { 'nodes': { + node_id: { 'flags': { flags }, + 'firstseen': isoformat, + 'lastseen': isoformat, + 'nodeinfo': {...}, # copied from alfred type 158 + 'statistics': { + 'uptime': double, # seconds + 'memory_usage': double, # 0..1 + 'clients': double, + 'rootfs_usage': double, # 0..1 + 'loadavg': double, + 'gateway': mac + } + }, + ... + } + 'timestamp': isoformat + } + +### flags (bool) + +- online +- gateway From 3ddecd26bf689dea8802c4c3884a19d6dfe62e4c Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 21 Mar 2015 10:37:09 +0100 Subject: [PATCH 38/97] fix lastseen handling of non-alfred nodes --- nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nodes.py b/nodes.py index 49d9fc4..3a52753 100644 --- a/nodes.py +++ b/nodes.py @@ -30,6 +30,7 @@ def prune_nodes(nodes, now, days): del nodes[node_id] def mark_online(node, now): + node['lastseen'] = now.isoformat() node.setdefault('firstseen', now.isoformat()) node['flags']['online'] = True @@ -41,7 +42,6 @@ def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): node['flags']['gateway'] = False if assume_online: - node['lastseen'] = now.isoformat() mark_online(node, now) def reset_statistics(nodes): From 9195ea96501dc3f33ced25d39b1746cbdbb37c87 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 21 Mar 2015 15:17:50 +0100 Subject: [PATCH 39/97] make mesh_interfaces optional --- nodes.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/nodes.py b/nodes.py index 3a52753..61949e1 100644 --- a/nodes.py +++ b/nodes.py @@ -87,7 +87,12 @@ def import_mesh_ifs_vis_data(nodes, vis_data): for v in mesh_nodes: node = v[0] - mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) + + try: + mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) + except KeyError: + mesh_ifs = set() + node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) def import_vis_clientcount(nodes, vis_data): From 3291b2b6bacf582baeebaa187356c73809e3796b Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 16:49:37 +0100 Subject: [PATCH 40/97] fix alot of pep8 --- GlobalRRD.py | 15 ++-- NodeRRD.py | 27 +++++--- RRD.py | 65 +++++++++--------- alfred.py | 61 ++++++++-------- backend.py | 132 +++++++++++++++++------------------ batman.py | 142 ++++++++++++++++++++------------------ graph.py | 96 ++++++++++++++------------ nodes.py | 191 +++++++++++++++++++++++++++------------------------ rrddb.py | 72 +++++++++---------- 9 files changed, 421 insertions(+), 380 deletions(-) diff --git a/GlobalRRD.py b/GlobalRRD.py index f3f3960..b3cf31a 100644 --- a/GlobalRRD.py +++ b/GlobalRRD.py @@ -2,6 +2,7 @@ import os import subprocess from RRD import RRD, DS, RRA + class GlobalRRD(RRD): ds_list = [ # Number of nodes available @@ -10,14 +11,17 @@ class GlobalRRD(RRD): DS('clients', 'GAUGE', 120, 0, float('NaN')), ] rra_list = [ - RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples - RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples + # 2 hours of 1 minute samples + RRA('AVERAGE', 0.5, 1, 120), + # 31 days of 1 hour samples + RRA('AVERAGE', 0.5, 60, 744), + # ~5 years of 1 day samples + RRA('AVERAGE', 0.5, 1440, 1780), ] def __init__(self, directory): super().__init__(os.path.join(directory, "nodes.rrd")) - self.ensureSanity(self.ds_list, self.rra_list, step=60) + self.ensure_sanity(self.ds_list, self.rra_list, step=60) def update(self, nodeCount, clientCount): super().update({'nodes': nodeCount, 'clients': clientCount}) @@ -30,6 +34,5 @@ class GlobalRRD(RRD): 'DEF:nodes=' + self.filename + ':nodes:AVERAGE', 'LINE1:nodes#F00:nodes\\l', 'DEF:clients=' + self.filename + ':clients:AVERAGE', - 'LINE2:clients#00F:clients', - ] + 'LINE2:clients#00F:clients'] subprocess.check_output(args) diff --git a/NodeRRD.py b/NodeRRD.py index ca24c0d..a4ec092 100644 --- a/NodeRRD.py +++ b/NodeRRD.py @@ -2,19 +2,24 @@ import os import subprocess from RRD import RRD, DS, RRA + class NodeRRD(RRD): ds_list = [ DS('upstate', 'GAUGE', 120, 0, 1), DS('clients', 'GAUGE', 120, 0, float('NaN')), ] rra_list = [ - RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples - RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples - RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples + # 2 hours of 1 minute samples + RRA('AVERAGE', 0.5, 1, 120), + # 5 days of 5 minute samples + RRA('AVERAGE', 0.5, 5, 1440), + # 30 days of 1 hour samples + RRA('AVERAGE', 0.5, 60, 720), + # 1 year of 12 hour samples + RRA('AVERAGE', 0.5, 720, 730), ] - def __init__(self, filename, node = None): + def __init__(self, filename, node=None): """ Create a new RRD for a given node. @@ -22,12 +27,13 @@ class NodeRRD(RRD): """ self.node = node super().__init__(filename) - self.ensureSanity(self.ds_list, self.rra_list, step=60) + self.ensure_sanity(self.ds_list, self.rra_list, step=60) @property def imagename(self): - return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" + return "{basename}.png".format(basename=os.path.basename(self.filename).rsplit('.', 2)[0]) + # TODO: fix this, python does not support function overloading def update(self): super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']}) @@ -36,8 +42,8 @@ class NodeRRD(RRD): Create a graph in the given directory. The file will be named basename.png if the RRD file is named basename.rrd """ - args = ['rrdtool','graph', os.path.join(directory, self.imagename), - '-s', '-' + timeframe , + args = ['rrdtool', 'graph', os.path.join(directory, self.imagename), + '-s', '-' + timeframe, '-w', '800', '-h', '400', '-l', '0', @@ -48,6 +54,5 @@ class NodeRRD(RRD): 'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*', 'AREA:c#0F0:up\\l', 'AREA:d#F00:down\\l', - 'LINE1:c#00F:clients connected\\l', - ] + 'LINE1:c#00F:clients connected\\l'] subprocess.check_output(args) diff --git a/RRD.py b/RRD.py index 9bb87a0..799338c 100644 --- a/RRD.py +++ b/RRD.py @@ -1,19 +1,20 @@ import subprocess import re -import io import os -from tempfile import TemporaryFile from operator import xor, eq from functools import reduce from itertools import starmap import math + class RRDIncompatibleException(Exception): """ Is raised when an RRD doesn't have the desired definition and cannot be upgraded to it. """ pass + + class RRDOutdatedException(Exception): """ Is raised when an RRD doesn't have the desired definition, but can be @@ -25,7 +26,8 @@ if not hasattr(__builtins__, "FileNotFoundError"): class FileNotFoundError(Exception): pass -class RRD: + +class RRD(object): """ An RRD is a Round Robin Database, a database which forgets old data and aggregates multiple records into new ones. @@ -49,7 +51,7 @@ class RRD: def _exec_rrdtool(self, cmd, *args, **kwargs): pargs = ["rrdtool", cmd, self.filename] - for k,v in kwargs.items(): + for k, v in kwargs.items(): pargs.extend(["--" + k, str(v)]) pargs.extend(args) subprocess.check_output(pargs) @@ -57,7 +59,7 @@ class RRD: def __init__(self, filename): self.filename = filename - def ensureSanity(self, ds_list, rra_list, **kwargs): + def ensure_sanity(self, ds_list, rra_list, **kwargs): """ Create or upgrade the RRD file if necessary to contain all DS in ds_list. If it needs to be created, the RRAs in rra_list and any kwargs @@ -65,13 +67,13 @@ class RRD: database are NOT modified! """ try: - self.checkSanity(ds_list) + self.check_sanity(ds_list) except FileNotFoundError: self.create(ds_list, rra_list, **kwargs) except RRDOutdatedException: self.upgrade(ds_list) - def checkSanity(self, ds_list=()): + def check_sanity(self, ds_list=()): """ Check if the RRD file exists and contains (at least) the DS listed in ds_list. @@ -82,7 +84,8 @@ class RRD: if set(ds_list) - set(info['ds'].values()) != set(): for ds in ds_list: if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type: - raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type)) + raise RRDIncompatibleException("%s is %s but should be %s" % + (ds.name, ds.type, info['ds'][ds.name].type)) else: raise RRDOutdatedException() @@ -106,7 +109,7 @@ class RRD: old_ds = info['ds'][ds.name] if info['ds'][ds.name].type != ds.type: raise RuntimeError('Cannot convert existing DS "%s" from type "%s" to "%s"' % - (ds.name, old_ds.type, ds.type)) + (ds.name, old_ds.type, ds.type)) ds.index = old_ds.index new_ds[ds.index] = ds else: @@ -116,12 +119,11 @@ class RRD: dump = subprocess.Popen( ["rrdtool", "dump", self.filename], - stdout=subprocess.PIPE - ) + stdout=subprocess.PIPE) + restore = subprocess.Popen( ["rrdtool", "restore", "-", self.filename + ".new"], - stdin=subprocess.PIPE - ) + stdin=subprocess.PIPE) echo = True ds_definitions = True for line in dump.stdout: @@ -143,16 +145,14 @@ class RRD: %s %i - """ % ( - ds.name, - ds.type, - ds.args[0], - ds.args[1], - ds.args[2], - ds.last_ds, - ds.value, - ds.unknown_sec) - , "utf-8")) + """ % (ds.name, + ds.type, + ds.args[0], + ds.args[1], + ds.args[2], + ds.last_ds, + ds.value, + ds.unknown_sec), "utf-8")) if b'' in line: restore.stdin.write(added_ds_num*b""" @@ -272,7 +272,8 @@ class RRD: self._cached_info = info return info -class DS: + +class DS(object): """ DS stands for Data Source and represents one line of data points in a Round Robin Database (RRD). @@ -284,6 +285,7 @@ class DS: last_ds = 'U' value = 0 unknown_sec = 0 + def __init__(self, name, dst, *args): self.name = name self.type = dst @@ -293,7 +295,7 @@ class DS: return "DS:%s:%s:%s" % ( self.name, self.type, - ":".join(map(str, self._nan_to_U_args())) + ":".join(map(str, self._nan_to_u_args())) ) def __repr__(self): @@ -305,22 +307,23 @@ class DS: ) def __eq__(self, other): - return all(starmap(eq, zip(self._compare_keys(), other._compare_keys()))) + return all(starmap(eq, zip(self.compare_keys(), other.compare_keys()))) def __hash__(self): - return reduce(xor, map(hash, self._compare_keys())) + return reduce(xor, map(hash, self.compare_keys())) - def _nan_to_U_args(self): + def _nan_to_u_args(self): return tuple( 'U' if type(arg) is float and math.isnan(arg) else arg for arg in self.args ) - def _compare_keys(self): - return (self.name, self.type, self._nan_to_U_args()) + def compare_keys(self): + return self.name, self.type, self._nan_to_u_args() -class RRA: + +class RRA(object): def __init__(self, cf, *args): self.cf = cf self.args = args diff --git a/alfred.py b/alfred.py index f71b8da..3c08b78 100644 --- a/alfred.py +++ b/alfred.py @@ -2,44 +2,49 @@ import subprocess import json + def _fetch(data_type): - output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) - return json.loads(output.decode("utf-8")).values() + output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) + return json.loads(output.decode("utf-8")).values() + def nodeinfo(): - return _fetch(158) + return _fetch(158) + def statistics(): - return _fetch(159) + return _fetch(159) + def vis(): - return _fetch(160) + return _fetch(160) + def aliases(): - alias = {} - for node in nodeinfo(): - node_alias = {} - if 'location' in node: - try: - node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) - except: - pass + alias = {} + for node in nodeinfo(): + node_alias = {} + if 'location' in node: + try: + node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) + except KeyError: + pass - try: - node_alias['firmware'] = node['software']['firmware']['release'] - except KeyError: - pass + try: + node_alias['firmware'] = node['software']['firmware']['release'] + except KeyError: + pass - try: - node_alias['id'] = node['network']['mac'] - except KeyError: - pass + try: + node_alias['id'] = node['network']['mac'] + except KeyError: + pass - if 'hostname' in node: - node_alias['name'] = node['hostname'] - elif 'name' in node: - node_alias['name'] = node['name'] - if len(node_alias): - alias[node['network']['mac']] = node_alias + if 'hostname' in node: + node_alias['name'] = node['hostname'] + elif 'name' in node: + node_alias['name'] = node['name'] + if len(node_alias): + alias[node['network']['mac']] = node_alias - return alias + return alias diff --git a/backend.py b/backend.py index 7544709..ff943ec 100755 --- a/backend.py +++ b/backend.py @@ -1,9 +1,11 @@ #!/usr/bin/env python3 - +""" +backend.py - ffmap-backend runner +https://github.com/ffnord/ffmap-backend +""" import argparse import json import os -import sys import networkx as nx from datetime import datetime from networkx.readwrite import json_graph @@ -11,91 +13,89 @@ from networkx.readwrite import json_graph import alfred import nodes import graph -from batman import batman -from rrddb import rrd +from batman import Batman +from rrddb import RRD -parser = argparse.ArgumentParser() -parser.add_argument('-a', '--aliases', - help='read aliases from FILE', - default=[], - action='append', - metavar='FILE') +def main(params): + if not params['mesh']: + params['mesh'] = ['bat0'] -parser.add_argument('-m', '--mesh', action='append', - help='batman mesh interface') + nodes_fn = os.path.join(params['destination_directory'], 'nodes.json') + graph_fn = os.path.join(params['destination_directory'], 'graph.json') -parser.add_argument('-d', '--destination-directory', action='store', - help='destination directory for generated files',required=True) + now = datetime.utcnow().replace(microsecond=0) -parser.add_argument('--vpn', action='append', metavar='MAC', - help='assume MAC to be part of the VPN') + with open(nodes_fn, 'r') as nodedb_handle: + nodedb = json.load(nodedb_handle) -parser.add_argument('--prune', metavar='DAYS', - help='forget nodes offline for at least DAYS') + # flush nodedb if it uses the old format + if 'links' in nodedb: + nodedb = {'nodes': dict()} -args = parser.parse_args() + nodedb['timestamp'] = now.isoformat() -options = vars(args) + for node_id, node in nodedb['nodes'].items(): + node['flags']['online'] = False -if not options['mesh']: - options['mesh'] = ['bat0'] + nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) -nodes_fn = os.path.join(options['destination_directory'], 'nodes.json') -graph_fn = os.path.join(options['destination_directory'], 'graph.json') + for aliases in params['aliases']: + with open(aliases, 'r') as f: + nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False) -now = datetime.utcnow().replace(microsecond=0) + nodes.reset_statistics(nodedb['nodes']) + nodes.import_statistics(nodedb['nodes'], alfred.statistics()) -try: - nodedb = json.load(open(nodes_fn)) + bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(Batman, params['mesh']))) + for vis_data, gateway_list in bm: + nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) + nodes.import_vis_clientcount(nodedb['nodes'], vis_data) + nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now) + nodes.mark_gateways(nodedb['nodes'], gateway_list) - # ignore if old format - if 'links' in nodedb: - raise -except: - nodedb = {'nodes': dict()} + if params['prune']: + nodes.prune_nodes(nodedb['nodes'], now, int(params['prune'])) -nodedb['timestamp'] = now.isoformat() + batadv_graph = nx.DiGraph() + for vis_data, gateway_list in bm: + graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data) -for node_id, node in nodedb['nodes'].items(): - node['flags']['online'] = False + if params['vpn']: + graph.mark_vpn(batadv_graph, frozenset(params['vpn'])) -nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) + batadv_graph = graph.merge_nodes(batadv_graph) + batadv_graph = graph.to_undirected(batadv_graph) -for aliases in options['aliases']: - with open(aliases, 'r') as f: - nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False) + with open(nodes_fn, 'w') as f: + json.dump(nodedb, f) -nodes.reset_statistics(nodedb['nodes']) -nodes.import_statistics(nodedb['nodes'], alfred.statistics()) + with open(graph_fn, 'w') as f: + json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) -bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(batman, options['mesh']))) -for vis_data, gateway_list in bm: - nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) - nodes.import_vis_clientcount(nodedb['nodes'], vis_data) - nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now) - nodes.mark_gateways(nodedb['nodes'], gateway_list) + scriptdir = os.path.dirname(os.path.realpath(__file__)) + rrd = RRD(scriptdir + '/nodedb/', params['destination_directory'] + '/nodes') + rrd.update_database(nodedb['nodes']) + rrd.update_images() -if options['prune']: - nodes.prune_nodes(nodedb['nodes'], now, int(options['prune'])) -batadv_graph = nx.DiGraph() -for vis_data, gateway_list in bm: - graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data) +if __name__ == '__main__': + parser = argparse.ArgumentParser() -if options['vpn']: - graph.mark_vpn(batadv_graph, frozenset(options['vpn'])) + parser.add_argument('-a', '--aliases', + help='read aliases from FILE', + default=[], action='append', + metavar='FILE') + parser.add_argument('-m', '--mesh', action='append', + help='batman mesh interface') + parser.add_argument('-d', '--destination-directory', action='store', + help='destination directory for generated files', + required=True) + parser.add_argument('--vpn', action='append', metavar='MAC', + help='assume MAC to be part of the VPN') + parser.add_argument('--prune', metavar='DAYS', + help='forget nodes offline for at least DAYS') -batadv_graph = graph.merge_nodes(batadv_graph) -batadv_graph = graph.to_undirected(batadv_graph) + options = vars(parser.parse_args()) -with open(nodes_fn, 'w') as f: - json.dump(nodedb, f) - -with open(graph_fn, 'w') as f: - json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) - -scriptdir = os.path.dirname(os.path.realpath(__file__)) -rrd = rrd(scriptdir + '/nodedb/', options['destination_directory'] + '/nodes') -rrd.update_database(nodedb['nodes']) -rrd.update_images() + main(options) diff --git a/batman.py b/batman.py index 94229ad..86ad4fe 100644 --- a/batman.py +++ b/batman.py @@ -1,82 +1,90 @@ -#!/usr/bin/env python3 import subprocess import json import re -class batman: - """ Bindings for B.A.T.M.A.N. advanced batctl tool - """ - def __init__(self, mesh_interface = "bat0"): - self.mesh_interface = mesh_interface - def vis_data(self,batadv_vis=False): - vds = self.vis_data_batctl_legacy() - if batadv_vis: - vds += self.vis_data_batadv_vis() - return vds - - def vis_data_helper(self,lines): - vd = [] - for line in lines: - try: - utf8_line = line.decode("utf-8") - vd.append(json.loads(utf8_line)) - except e: - pass - return vd - - def vis_data_batctl_legacy(self): - """ Parse "batctl -m vd json -n" into an array of dictionaries. +class Batman(object): """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"]) - lines = output.splitlines() - vds = self.vis_data_helper(lines) - return vds - - def vis_data_batadv_vis(self): - """ Parse "batadv-vis -i -f json" into an array of dictionaries. + Bindings for B.A.T.M.A.N. Advanced + commandline interface "batctl" """ - output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"]) - lines = output.splitlines() - return self.vis_data_helper(lines) + def __init__(self, mesh_interface='bat0'): + self.mesh_interface = mesh_interface - def gateway_list(self): - """ Parse "batctl -m gwl -n" into an array of dictionaries. - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"]) - output_utf8 = output.decode("utf-8") - lines = output_utf8.splitlines() + def vis_data(self, batadv_vis=False): + vds = self.vis_data_batctl_legacy() + if batadv_vis: + vds += self.vis_data_batadv_vis() + return vds - own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) + @staticmethod + def vis_data_helper(lines): + vd_tmp = [] + for line in lines: + try: + utf8_line = line.decode('utf-8') + vd_tmp.append(json.loads(utf8_line)) + except UnicodeDecodeError: + pass + return vd_tmp - gw = [] - gw_mode = self.gateway_mode() - if gw_mode['mode'] == 'server': - gw.append(own_mac) + def vis_data_batctl_legacy(self): + """ + Parse "batctl -m vd json -n" into an array of dictionaries. + """ + output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n']) + lines = output.splitlines() + vds = self.vis_data_helper(lines) + return vds - for line in lines: - gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) - if gw_line: - gw.append(gw_line.group(1)) + def vis_data_batadv_vis(self): + """ + Parse "batadv-vis -i -f json" into an array of dictionaries. + """ + output = subprocess.check_output(['batadv-vis', '-i', self.mesh_interface, '-f', 'json']) + lines = output.splitlines() + return self.vis_data_helper(lines) - return gw + def gateway_list(self): + """ + Parse "batctl -m gwl -n" into an array of dictionaries. + """ + output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gwl', '-n']) + output_utf8 = output.decode('utf-8') + lines = output_utf8.splitlines() - def gateway_mode(self): - """ Parse "batctl -m gw" - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"]) - elements = output.decode("utf-8").split() - mode = elements[0] - if mode == "server": - return {'mode': 'server', 'bandwidth': elements[3]} - else: - return {'mode': mode} + own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) + + gateways = [] + gw_mode = self.gateway_mode() + if gw_mode['mode'] == 'server': + gateways.append(own_mac) + + for line in lines: + gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) + if gw_line: + gateways.append(gw_line.group(1)) + + return gateways + + def gateway_mode(self): + """ + Parse "batctl -m gw" + """ + output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gw']) + elements = output.decode("utf-8").split() + mode = elements[0] + if mode == 'server': + return {'mode': 'server', + 'bandwidth': elements[3]} + else: + return {'mode': mode} if __name__ == "__main__": - bc = batman() - vd = bc.vis_data() - gw = bc.gateway_list() - for x in vd: - print(x) - print(gw) - print(bc.gateway_mode()) + bc = Batman() + vd = bc.vis_data() + gw = bc.gateway_list() + for x in vd: + print(x) + print(gw) + print(bc.gateway_mode()) diff --git a/graph.py b/graph.py index b6e86aa..460e327 100644 --- a/graph.py +++ b/graph.py @@ -1,66 +1,74 @@ import networkx as nx -from copy import deepcopy from functools import reduce from itertools import chain from nodes import build_mac_table -def import_vis_data(graph, nodes, vis_data): - macs = build_mac_table(nodes) - nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data)) - nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data)) - graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b)))) - edges = filter(lambda d: 'neighbor' in d, vis_data) - graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) +def import_vis_data(graph, nodes, vis_data): + macs = build_mac_table(nodes) + nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data)) + nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data)) + graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b)))) + + edges = filter(lambda d: 'neighbor' in d, vis_data) + graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) + def mark_vpn(graph, vpn_macs): - components = map(frozenset, nx.weakly_connected_components(graph)) - components = filter(vpn_macs.intersection, components) - nodes = reduce(lambda a, b: a | b, components, set()) + components = map(frozenset, nx.weakly_connected_components(graph)) + components = filter(vpn_macs.intersection, components) + nodes = reduce(lambda a, b: a | b, components, set()) + + for node in nodes: + for k, v in graph[node].items(): + v['vpn'] = True - for node in nodes: - for k, v in graph[node].items(): - v['vpn'] = True def to_multigraph(graph): - def f(a): - node = graph.node[a] - return node['primary'] if node else a + def f(a): + node = graph.node[a] + return node['primary'] if node else a - G = nx.MultiDiGraph() - map_node = lambda node, data: (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) - G.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True)))) - G.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) + def map_node(node, data): + return (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) + + digraph = nx.MultiDiGraph() + digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True)))) + digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) + + return digraph - return G def merge_nodes(graph): - def merge_edges(data): - tq = min(map(lambda d: d['tq'], data)) - vpn = all(map(lambda d: d.get('vpn', False), data)) - return dict(tq=tq, vpn=vpn) + def merge_edges(data): + tq = min(map(lambda d: d['tq'], data)) + vpn = all(map(lambda d: d.get('vpn', False), data)) + return dict(tq=tq, vpn=vpn) - G = to_multigraph(graph) - H = nx.DiGraph() - H.add_nodes_from(G.nodes_iter(data=True)) - edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) - H.add_edges_from(edges) + multigraph = to_multigraph(graph) + digraph = nx.DiGraph() + digraph.add_nodes_from(multigraph.nodes_iter(data=True)) + edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values())) + for d in multigraph[e]] for e in multigraph]) + digraph.add_edges_from(edges) + + return digraph - return H def to_undirected(graph): - G = nx.MultiGraph() - G.add_nodes_from(graph.nodes_iter(data=True)) - G.add_edges_from(graph.edges_iter(data=True)) + multigraph = nx.MultiGraph() + multigraph.add_nodes_from(graph.nodes_iter(data=True)) + multigraph.add_edges_from(graph.edges_iter(data=True)) - def merge_edges(data): - tq = max(map(lambda d: d['tq'], data)) - vpn = all(map(lambda d: d.get('vpn', False), data)) - return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2) + def merge_edges(data): + tq = max(map(lambda d: d['tq'], data)) + vpn = all(map(lambda d: d.get('vpn', False), data)) + return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2) - H = nx.Graph() - H.add_nodes_from(G.nodes_iter(data=True)) - edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) - H.add_edges_from(edges) + graph = nx.Graph() + graph.add_nodes_from(multigraph.nodes_iter(data=True)) + edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values())) + for d in multigraph[e]] for e in multigraph]) + graph.add_edges_from(edges) - return H + return graph diff --git a/nodes.py b/nodes.py index 61949e1..d2b9df8 100644 --- a/nodes.py +++ b/nodes.py @@ -2,128 +2,137 @@ from collections import Counter, defaultdict from datetime import datetime from functools import reduce -def build_mac_table(nodes): - macs = dict() - for node_id, node in nodes.items(): - try: - for mac in node['nodeinfo']['network']['mesh_interfaces']: - macs[mac] = node_id - except KeyError: - pass - return macs +def build_mac_table(nodes): + macs = dict() + for node_id, node in nodes.items(): + try: + for mac in node['nodeinfo']['network']['mesh_interfaces']: + macs[mac] = node_id + except KeyError: + pass + return macs + def prune_nodes(nodes, now, days): - prune = [] - for node_id, node in nodes.items(): - if not 'lastseen' in node: - prune.append(node_id) - continue + prune = [] + for node_id, node in nodes.items(): + if 'lastseen' not in node: + prune.append(node_id) + continue - lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S') - delta = (now - lastseen).seconds + lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S') + delta = (now - lastseen).seconds - if delta >= days * 86400: - prune.append(node_id) + if delta >= days * 86400: + prune.append(node_id) + + for prune_key in prune: + del nodes[prune_key] - for node_id in prune: - del nodes[node_id] def mark_online(node, now): - node['lastseen'] = now.isoformat() - node.setdefault('firstseen', now.isoformat()) - node['flags']['online'] = True + node['lastseen'] = now.isoformat() + node.setdefault('firstseen', now.isoformat()) + node['flags']['online'] = True + def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): - for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): - node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) - node['nodeinfo'] = nodeinfo - node['flags']['online'] = False - node['flags']['gateway'] = False + for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): + node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) + node['nodeinfo'] = nodeinfo + node['flags']['online'] = False + node['flags']['gateway'] = False + + if assume_online: + mark_online(node, now) - if assume_online: - mark_online(node, now) def reset_statistics(nodes): - for node in nodes.values(): - node['statistics'] = { 'clients': 0 } + for node in nodes.values(): + node['statistics'] = {'clients': 0} + def import_statistics(nodes, statistics): - def add(node, statistics, target, source, f=lambda d: d): - try: - node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) - except (KeyError,TypeError): - pass + def add(node, statistics, target, source, f=lambda d: d): + try: + node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) + except (KeyError, TypeError): + pass + + macs = build_mac_table(nodes) + statistics = filter(lambda d: 'node_id' in d, statistics) + statistics = filter(lambda d: d['node_id'] in nodes, statistics) + for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics): + add(node, statistics, 'clients', ['clients', 'total']) + add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d)) + add(node, statistics, 'uptime', ['uptime']) + add(node, statistics, 'loadavg', ['loadavg']) + add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) + add(node, statistics, 'rootfs_usage', ['rootfs_usage']) - macs = build_mac_table(nodes) - statistics = filter(lambda d: 'node_id' in d, statistics) - statistics = filter(lambda d: d['node_id'] in nodes, statistics) - for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics): - add(node, statistics, 'clients', ['clients', 'total']) - add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d)) - add(node, statistics, 'uptime', ['uptime']) - add(node, statistics, 'loadavg', ['loadavg']) - add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) - add(node, statistics, 'rootfs_usage', ['rootfs_usage']) def import_mesh_ifs_vis_data(nodes, vis_data): - macs = build_mac_table(nodes) + macs = build_mac_table(nodes) - mesh_ifs = defaultdict(lambda: set()) - for line in filter(lambda d: 'secondary' in d, vis_data): - primary = line['of'] - mesh_ifs[primary].add(primary) - mesh_ifs[primary].add(line['secondary']) + mesh_ifs = defaultdict(lambda: set()) + for line in filter(lambda d: 'secondary' in d, vis_data): + primary = line['of'] + mesh_ifs[primary].add(primary) + mesh_ifs[primary].add(line['secondary']) - def if_to_node(ifs): - a = filter(lambda d: d in macs, ifs) - a = map(lambda d: nodes[macs[d]], a) - try: - return (next(a), ifs) - except StopIteration: - return None + def if_to_node(ifs): + a = filter(lambda d: d in macs, ifs) + a = map(lambda d: nodes[macs[d]], a) + try: + return next(a), ifs + except StopIteration: + return None - mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values())) + mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values())) - for v in mesh_nodes: - node = v[0] + for v in mesh_nodes: + node = v[0] - try: - mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) - except KeyError: - mesh_ifs = set() + try: + mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) + except KeyError: + mesh_ifs = set() + + node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) - node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) def import_vis_clientcount(nodes, vis_data): - macs = build_mac_table(nodes) - data = filter(lambda d: d.get('label', None) == 'TT', vis_data) - data = filter(lambda d: d['router'] in macs, data) - data = map(lambda d: macs[d['router']], data) + macs = build_mac_table(nodes) + data = filter(lambda d: d.get('label', None) == 'TT', vis_data) + data = filter(lambda d: d['router'] in macs, data) + data = map(lambda d: macs[d['router']], data) + + for node_id, clientcount in Counter(data).items(): + nodes[node_id]['statistics'].setdefault('clients', clientcount) - for node_id, clientcount in Counter(data).items(): - nodes[node_id]['statistics'].setdefault('clients', clientcount) def mark_gateways(nodes, gateways): - macs = build_mac_table(nodes) - gateways = filter(lambda d: d in macs, gateways) + macs = build_mac_table(nodes) + gateways = filter(lambda d: d in macs, gateways) + + for node in map(lambda d: nodes[macs[d]], gateways): + node['flags']['gateway'] = True - for node in map(lambda d: nodes[macs[d]], gateways): - node['flags']['gateway'] = True def mark_vis_data_online(nodes, vis_data, now): - macs = build_mac_table(nodes) + macs = build_mac_table(nodes) - online = set() - for line in vis_data: - if 'primary' in line: - online.add(line['primary']) - elif 'secondary' in line: - online.add(line['secondary']) - elif 'gateway' in line: - # This matches clients' MACs. - # On pre-Gluon nodes the primary MAC will be one of it. - online.add(line['gateway']) + online = set() + for line in vis_data: + if 'primary' in line: + online.add(line['primary']) + elif 'secondary' in line: + online.add(line['secondary']) + elif 'gateway' in line: + # This matches clients' MACs. + # On pre-Gluon nodes the primary MAC will be one of it. + online.add(line['gateway']) - for mac in filter(lambda d: d in macs, online): - mark_online(nodes[macs[mac]], now) + for mac in filter(lambda d: d in macs, online): + mark_online(nodes[macs[mac]], now) diff --git a/rrddb.py b/rrddb.py index 2fccff4..b023e6b 100644 --- a/rrddb.py +++ b/rrddb.py @@ -1,50 +1,50 @@ #!/usr/bin/env python3 -import subprocess import time import os from GlobalRRD import GlobalRRD from NodeRRD import NodeRRD -class rrd: - def __init__( self - , databaseDirectory - , imagePath - , displayTimeGlobal = "7d" - , displayTimeNode = "1d" - ): - self.dbPath = databaseDirectory - self.globalDb = GlobalRRD(self.dbPath) - self.imagePath = imagePath - self.displayTimeGlobal = displayTimeGlobal - self.displayTimeNode = displayTimeNode - self.currentTimeInt = (int(time.time())/60)*60 - self.currentTime = str(self.currentTimeInt) +class RRD(object): + def __init__(self, + database_directory, + image_path, + display_time_global="7d", + display_time_node="1d"): - try: - os.stat(self.imagePath) - except: - os.mkdir(self.imagePath) + self.dbPath = database_directory + self.globalDb = GlobalRRD(self.dbPath) + self.imagePath = image_path + self.displayTimeGlobal = display_time_global + self.displayTimeNode = display_time_node - def update_database(self, nodes): - online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items())) - client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values())) + self.currentTimeInt = (int(time.time())/60)*60 + self.currentTime = str(self.currentTimeInt) - self.globalDb.update(len(online_nodes), client_count) - for node_id, node in online_nodes.items(): - rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node) - rrd.update() + try: + os.stat(self.imagePath) + except OSError: + os.mkdir(self.imagePath) - def update_images(self): - self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) + def update_database(self, nodes): + online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items())) + client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values())) - nodeDbFiles = os.listdir(self.dbPath) + self.globalDb.update(len(online_nodes), client_count) + for node_id, node in online_nodes.items(): + rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node) + rrd.update() - for fileName in nodeDbFiles: - if not os.path.isfile(os.path.join(self.dbPath, fileName)): - continue + def update_images(self): + self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) - nodeName = os.path.basename(fileName).split('.') - if nodeName[1] == 'rrd' and not nodeName[0] == "nodes": - rrd = NodeRRD(os.path.join(self.dbPath, fileName)) - rrd.graph(self.imagePath, self.displayTimeNode) + nodedb_files = os.listdir(self.dbPath) + + for file_name in nodedb_files: + if not os.path.isfile(os.path.join(self.dbPath, file_name)): + continue + + node_name = os.path.basename(file_name).split('.') + if node_name[1] == 'rrd' and not node_name[0] == "nodes": + rrd = NodeRRD(os.path.join(self.dbPath, file_name)) + rrd.graph(self.imagePath, self.displayTimeNode) From e098cd8d7720c18c384e5b17cbbf780745fba2b1 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:00:35 +0100 Subject: [PATCH 41/97] alfred.py: better ask for forgiveness, than permission --- alfred.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/alfred.py b/alfred.py index 3c08b78..ec6a4ca 100644 --- a/alfred.py +++ b/alfred.py @@ -24,11 +24,13 @@ def aliases(): alias = {} for node in nodeinfo(): node_alias = {} - if 'location' in node: - try: - node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) - except KeyError: - pass + + try: + # TODO: better pass lat, lng as a tuple? + node_alias['gps'] = "{lat}\x20{lng}".format(lat=node['location']['latitude'], + lng=node['location']['longitude']) + except KeyError: + pass try: node_alias['firmware'] = node['software']['firmware']['release'] From 5b14ed5ad9b45830951b2bf495bf1d46feeed7cf Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:31:44 +0100 Subject: [PATCH 42/97] alfred.py: remove superfluous shebang --- alfred.py | 1 - 1 file changed, 1 deletion(-) diff --git a/alfred.py b/alfred.py index ec6a4ca..720b946 100644 --- a/alfred.py +++ b/alfred.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 import subprocess import json From e66731154b841dc84b14e1a391820c5956eb70d6 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:41:02 +0100 Subject: [PATCH 43/97] pep8: some line length fixes --- GlobalRRD.py | 5 +++-- alfred.py | 8 +++++--- backend.py | 13 +++++++++---- batman.py | 24 ++++++++++++++++-------- 4 files changed, 33 insertions(+), 17 deletions(-) diff --git a/GlobalRRD.py b/GlobalRRD.py index b3cf31a..9c09549 100644 --- a/GlobalRRD.py +++ b/GlobalRRD.py @@ -23,8 +23,9 @@ class GlobalRRD(RRD): super().__init__(os.path.join(directory, "nodes.rrd")) self.ensure_sanity(self.ds_list, self.rra_list, step=60) - def update(self, nodeCount, clientCount): - super().update({'nodes': nodeCount, 'clients': clientCount}) + # TODO: fix this, python does not support function overloading + def update(self, node_count, client_count): + super().update({'nodes': node_count, 'clients': client_count}) def graph(self, filename, timeframe): args = ["rrdtool", 'graph', filename, diff --git a/alfred.py b/alfred.py index 720b946..2ab0117 100644 --- a/alfred.py +++ b/alfred.py @@ -3,7 +3,8 @@ import json def _fetch(data_type): - output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) + output = subprocess.check_output( + ["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) return json.loads(output.decode("utf-8")).values() @@ -26,8 +27,9 @@ def aliases(): try: # TODO: better pass lat, lng as a tuple? - node_alias['gps'] = "{lat}\x20{lng}".format(lat=node['location']['latitude'], - lng=node['location']['longitude']) + node_alias['gps'] = "{lat}\x20{lng}".\ + format(lat=node['location']['latitude'], + lng=node['location']['longitude']) except KeyError: pass diff --git a/backend.py b/backend.py index ff943ec..7b7d0e5 100755 --- a/backend.py +++ b/backend.py @@ -38,16 +38,20 @@ def main(params): for node_id, node in nodedb['nodes'].items(): node['flags']['online'] = False - nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) + nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), + now, assume_online=True) for aliases in params['aliases']: with open(aliases, 'r') as f: - nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False) + nodes.import_nodeinfo(nodedb['nodes'], json.load(f), + now, assume_online=False) nodes.reset_statistics(nodedb['nodes']) nodes.import_statistics(nodedb['nodes'], alfred.statistics()) - bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(Batman, params['mesh']))) + bm = list(map(lambda d: + (d.vis_data(True), d.gateway_list()), + map(Batman, params['mesh']))) for vis_data, gateway_list in bm: nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) nodes.import_vis_clientcount(nodedb['nodes'], vis_data) @@ -74,7 +78,8 @@ def main(params): json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) scriptdir = os.path.dirname(os.path.realpath(__file__)) - rrd = RRD(scriptdir + '/nodedb/', params['destination_directory'] + '/nodes') + rrd = RRD("{}/nodedb/".format(scriptdir), + "{}/nodes".format(params['destination_directory'])) rrd.update_database(nodedb['nodes']) rrd.update_images() diff --git a/batman.py b/batman.py index 86ad4fe..ddd0bd5 100644 --- a/batman.py +++ b/batman.py @@ -30,30 +30,37 @@ class Batman(object): def vis_data_batctl_legacy(self): """ - Parse "batctl -m vd json -n" into an array of dictionaries. + Parse "batctl -m vd json -n" + into an array of dictionaries. """ - output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n']) + output = subprocess.check_output( + ['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n']) lines = output.splitlines() vds = self.vis_data_helper(lines) return vds def vis_data_batadv_vis(self): """ - Parse "batadv-vis -i -f json" into an array of dictionaries. + Parse "batadv-vis -i -f json" + into an array of dictionaries. """ - output = subprocess.check_output(['batadv-vis', '-i', self.mesh_interface, '-f', 'json']) + output = subprocess.check_output( + ['batadv-vis', '-i', self.mesh_interface, '-f', 'json']) lines = output.splitlines() return self.vis_data_helper(lines) def gateway_list(self): """ - Parse "batctl -m gwl -n" into an array of dictionaries. + Parse "batctl -m gwl -n" + into an array of dictionaries. """ - output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gwl', '-n']) + output = subprocess.check_output( + ['batctl', '-m', self.mesh_interface, 'gwl', '-n']) output_utf8 = output.decode('utf-8') lines = output_utf8.splitlines() - own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) + own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", + lines[0]).group(1) gateways = [] gw_mode = self.gateway_mode() @@ -71,7 +78,8 @@ class Batman(object): """ Parse "batctl -m gw" """ - output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gw']) + output = subprocess.check_output( + ['batctl', '-m', self.mesh_interface, 'gw']) elements = output.decode("utf-8").split() mode = elements[0] if mode == 'server': From d4a7c835539e0c8b04329b121e386d6a7980448c Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:41:12 +0100 Subject: [PATCH 44/97] update .gitignore --- .gitignore | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index c161fb3..e7425cf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,8 @@ -*.pyc +# script-generated aliases*.json nodedb/ + +# python bytecode / cache +*.pyc pycache/ +__pycache__/ From 1fb61db96339a8babd030112e0acfe6a81342427 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:50:36 +0100 Subject: [PATCH 45/97] backend.py: use argparser to set mesh default, join paths with os.path.join --- backend.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/backend.py b/backend.py index 7b7d0e5..0a91153 100755 --- a/backend.py +++ b/backend.py @@ -18,9 +18,6 @@ from rrddb import RRD def main(params): - if not params['mesh']: - params['mesh'] = ['bat0'] - nodes_fn = os.path.join(params['destination_directory'], 'nodes.json') graph_fn = os.path.join(params['destination_directory'], 'graph.json') @@ -77,9 +74,9 @@ def main(params): with open(graph_fn, 'w') as f: json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) - scriptdir = os.path.dirname(os.path.realpath(__file__)) - rrd = RRD("{}/nodedb/".format(scriptdir), - "{}/nodes".format(params['destination_directory'])) + script_directory = os.path.dirname(os.path.realpath(__file__)) + rrd = RRD(os.path.join(script_directory, 'nodedb'), + os.path.join(params['destination_directory'], 'nodes')) rrd.update_database(nodedb['nodes']) rrd.update_images() @@ -92,7 +89,8 @@ if __name__ == '__main__': default=[], action='append', metavar='FILE') parser.add_argument('-m', '--mesh', action='append', - help='batman mesh interface') + default=['bat0'], + help='batman mesh interface (defaults to bat0)') parser.add_argument('-d', '--destination-directory', action='store', help='destination directory for generated files', required=True) From 309971f1b0ec5011b53f407485bc6c0bc4cae74c Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:58:06 +0100 Subject: [PATCH 46/97] batman.py: fix broken identation caused by previous commit --- nodes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/nodes.py b/nodes.py index d2b9df8..23a2b0e 100644 --- a/nodes.py +++ b/nodes.py @@ -27,8 +27,8 @@ def prune_nodes(nodes, now, days): if delta >= days * 86400: prune.append(node_id) - for prune_key in prune: - del nodes[prune_key] + for node_id in prune: + del nodes[node_id] def mark_online(node, now): From 3dd2a9e32554c465c0e01c24b2444d9f16d60893 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 18:06:54 +0100 Subject: [PATCH 47/97] travis-ci: add pep8 check --- .travis.yml | 6 ++++++ README.md | 2 ++ 2 files changed, 8 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..cb081fa --- /dev/null +++ b/.travis.yml @@ -0,0 +1,6 @@ +sudo: false +language: python +python: + - "3.4" +install: "pip install pep8" +script: "pep8 *.py" diff --git a/README.md b/README.md index d70aaac..9f5af40 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # Data for Freifunk Map, Graph and Node List +[![Build Status](https://travis-ci.org/ffnord/ffmap-backend.svg?branch=master)](https://travis-ci.org/ffnord/ffmap-backend) + ffmap-backend gathers information on the batman network by invoking * batctl, From 15a0f71847f307a651c9d0e991e44a402e07e16d Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Tue, 24 Mar 2015 18:18:40 +0100 Subject: [PATCH 48/97] alfred.py: remove unused function aliases --- alfred.py | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/alfred.py b/alfred.py index f71b8da..8850328 100644 --- a/alfred.py +++ b/alfred.py @@ -14,32 +14,3 @@ def statistics(): def vis(): return _fetch(160) - -def aliases(): - alias = {} - for node in nodeinfo(): - node_alias = {} - if 'location' in node: - try: - node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude']) - except: - pass - - try: - node_alias['firmware'] = node['software']['firmware']['release'] - except KeyError: - pass - - try: - node_alias['id'] = node['network']['mac'] - except KeyError: - pass - - if 'hostname' in node: - node_alias['name'] = node['hostname'] - elif 'name' in node: - node_alias['name'] = node['name'] - if len(node_alias): - alias[node['network']['mac']] = node_alias - - return alias From efcefd8928da4083fd3135571e38180646e4fadd Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:31:44 +0100 Subject: [PATCH 49/97] alfred.py: remove superfluous shebang --- alfred.py | 1 - 1 file changed, 1 deletion(-) diff --git a/alfred.py b/alfred.py index 8850328..878ac0f 100644 --- a/alfred.py +++ b/alfred.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 import subprocess import json From 9a8d40ea9a91fa0b4d80284d6db424595787043d Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:41:12 +0100 Subject: [PATCH 50/97] update .gitignore --- .gitignore | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index c161fb3..e7425cf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,8 @@ -*.pyc +# script-generated aliases*.json nodedb/ + +# python bytecode / cache +*.pyc pycache/ +__pycache__/ From 10e10944a521c97d867736ee02dac2e5c8d78ea4 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 16:49:37 +0100 Subject: [PATCH 51/97] fix alot of pep8 --- GlobalRRD.py | 15 ++-- NodeRRD.py | 27 +++++--- RRD.py | 65 +++++++++--------- alfred.py | 14 ++-- backend.py | 132 +++++++++++++++++------------------ batman.py | 142 ++++++++++++++++++++------------------ graph.py | 96 ++++++++++++++------------ nodes.py | 191 +++++++++++++++++++++++++++------------------------ rrddb.py | 72 +++++++++---------- 9 files changed, 397 insertions(+), 357 deletions(-) diff --git a/GlobalRRD.py b/GlobalRRD.py index f3f3960..b3cf31a 100644 --- a/GlobalRRD.py +++ b/GlobalRRD.py @@ -2,6 +2,7 @@ import os import subprocess from RRD import RRD, DS, RRA + class GlobalRRD(RRD): ds_list = [ # Number of nodes available @@ -10,14 +11,17 @@ class GlobalRRD(RRD): DS('clients', 'GAUGE', 120, 0, float('NaN')), ] rra_list = [ - RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples - RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples + # 2 hours of 1 minute samples + RRA('AVERAGE', 0.5, 1, 120), + # 31 days of 1 hour samples + RRA('AVERAGE', 0.5, 60, 744), + # ~5 years of 1 day samples + RRA('AVERAGE', 0.5, 1440, 1780), ] def __init__(self, directory): super().__init__(os.path.join(directory, "nodes.rrd")) - self.ensureSanity(self.ds_list, self.rra_list, step=60) + self.ensure_sanity(self.ds_list, self.rra_list, step=60) def update(self, nodeCount, clientCount): super().update({'nodes': nodeCount, 'clients': clientCount}) @@ -30,6 +34,5 @@ class GlobalRRD(RRD): 'DEF:nodes=' + self.filename + ':nodes:AVERAGE', 'LINE1:nodes#F00:nodes\\l', 'DEF:clients=' + self.filename + ':clients:AVERAGE', - 'LINE2:clients#00F:clients', - ] + 'LINE2:clients#00F:clients'] subprocess.check_output(args) diff --git a/NodeRRD.py b/NodeRRD.py index ca24c0d..a4ec092 100644 --- a/NodeRRD.py +++ b/NodeRRD.py @@ -2,19 +2,24 @@ import os import subprocess from RRD import RRD, DS, RRA + class NodeRRD(RRD): ds_list = [ DS('upstate', 'GAUGE', 120, 0, 1), DS('clients', 'GAUGE', 120, 0, float('NaN')), ] rra_list = [ - RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples - RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples - RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples + # 2 hours of 1 minute samples + RRA('AVERAGE', 0.5, 1, 120), + # 5 days of 5 minute samples + RRA('AVERAGE', 0.5, 5, 1440), + # 30 days of 1 hour samples + RRA('AVERAGE', 0.5, 60, 720), + # 1 year of 12 hour samples + RRA('AVERAGE', 0.5, 720, 730), ] - def __init__(self, filename, node = None): + def __init__(self, filename, node=None): """ Create a new RRD for a given node. @@ -22,12 +27,13 @@ class NodeRRD(RRD): """ self.node = node super().__init__(filename) - self.ensureSanity(self.ds_list, self.rra_list, step=60) + self.ensure_sanity(self.ds_list, self.rra_list, step=60) @property def imagename(self): - return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" + return "{basename}.png".format(basename=os.path.basename(self.filename).rsplit('.', 2)[0]) + # TODO: fix this, python does not support function overloading def update(self): super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']}) @@ -36,8 +42,8 @@ class NodeRRD(RRD): Create a graph in the given directory. The file will be named basename.png if the RRD file is named basename.rrd """ - args = ['rrdtool','graph', os.path.join(directory, self.imagename), - '-s', '-' + timeframe , + args = ['rrdtool', 'graph', os.path.join(directory, self.imagename), + '-s', '-' + timeframe, '-w', '800', '-h', '400', '-l', '0', @@ -48,6 +54,5 @@ class NodeRRD(RRD): 'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*', 'AREA:c#0F0:up\\l', 'AREA:d#F00:down\\l', - 'LINE1:c#00F:clients connected\\l', - ] + 'LINE1:c#00F:clients connected\\l'] subprocess.check_output(args) diff --git a/RRD.py b/RRD.py index 9bb87a0..799338c 100644 --- a/RRD.py +++ b/RRD.py @@ -1,19 +1,20 @@ import subprocess import re -import io import os -from tempfile import TemporaryFile from operator import xor, eq from functools import reduce from itertools import starmap import math + class RRDIncompatibleException(Exception): """ Is raised when an RRD doesn't have the desired definition and cannot be upgraded to it. """ pass + + class RRDOutdatedException(Exception): """ Is raised when an RRD doesn't have the desired definition, but can be @@ -25,7 +26,8 @@ if not hasattr(__builtins__, "FileNotFoundError"): class FileNotFoundError(Exception): pass -class RRD: + +class RRD(object): """ An RRD is a Round Robin Database, a database which forgets old data and aggregates multiple records into new ones. @@ -49,7 +51,7 @@ class RRD: def _exec_rrdtool(self, cmd, *args, **kwargs): pargs = ["rrdtool", cmd, self.filename] - for k,v in kwargs.items(): + for k, v in kwargs.items(): pargs.extend(["--" + k, str(v)]) pargs.extend(args) subprocess.check_output(pargs) @@ -57,7 +59,7 @@ class RRD: def __init__(self, filename): self.filename = filename - def ensureSanity(self, ds_list, rra_list, **kwargs): + def ensure_sanity(self, ds_list, rra_list, **kwargs): """ Create or upgrade the RRD file if necessary to contain all DS in ds_list. If it needs to be created, the RRAs in rra_list and any kwargs @@ -65,13 +67,13 @@ class RRD: database are NOT modified! """ try: - self.checkSanity(ds_list) + self.check_sanity(ds_list) except FileNotFoundError: self.create(ds_list, rra_list, **kwargs) except RRDOutdatedException: self.upgrade(ds_list) - def checkSanity(self, ds_list=()): + def check_sanity(self, ds_list=()): """ Check if the RRD file exists and contains (at least) the DS listed in ds_list. @@ -82,7 +84,8 @@ class RRD: if set(ds_list) - set(info['ds'].values()) != set(): for ds in ds_list: if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type: - raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type)) + raise RRDIncompatibleException("%s is %s but should be %s" % + (ds.name, ds.type, info['ds'][ds.name].type)) else: raise RRDOutdatedException() @@ -106,7 +109,7 @@ class RRD: old_ds = info['ds'][ds.name] if info['ds'][ds.name].type != ds.type: raise RuntimeError('Cannot convert existing DS "%s" from type "%s" to "%s"' % - (ds.name, old_ds.type, ds.type)) + (ds.name, old_ds.type, ds.type)) ds.index = old_ds.index new_ds[ds.index] = ds else: @@ -116,12 +119,11 @@ class RRD: dump = subprocess.Popen( ["rrdtool", "dump", self.filename], - stdout=subprocess.PIPE - ) + stdout=subprocess.PIPE) + restore = subprocess.Popen( ["rrdtool", "restore", "-", self.filename + ".new"], - stdin=subprocess.PIPE - ) + stdin=subprocess.PIPE) echo = True ds_definitions = True for line in dump.stdout: @@ -143,16 +145,14 @@ class RRD: %s %i - """ % ( - ds.name, - ds.type, - ds.args[0], - ds.args[1], - ds.args[2], - ds.last_ds, - ds.value, - ds.unknown_sec) - , "utf-8")) + """ % (ds.name, + ds.type, + ds.args[0], + ds.args[1], + ds.args[2], + ds.last_ds, + ds.value, + ds.unknown_sec), "utf-8")) if b'' in line: restore.stdin.write(added_ds_num*b""" @@ -272,7 +272,8 @@ class RRD: self._cached_info = info return info -class DS: + +class DS(object): """ DS stands for Data Source and represents one line of data points in a Round Robin Database (RRD). @@ -284,6 +285,7 @@ class DS: last_ds = 'U' value = 0 unknown_sec = 0 + def __init__(self, name, dst, *args): self.name = name self.type = dst @@ -293,7 +295,7 @@ class DS: return "DS:%s:%s:%s" % ( self.name, self.type, - ":".join(map(str, self._nan_to_U_args())) + ":".join(map(str, self._nan_to_u_args())) ) def __repr__(self): @@ -305,22 +307,23 @@ class DS: ) def __eq__(self, other): - return all(starmap(eq, zip(self._compare_keys(), other._compare_keys()))) + return all(starmap(eq, zip(self.compare_keys(), other.compare_keys()))) def __hash__(self): - return reduce(xor, map(hash, self._compare_keys())) + return reduce(xor, map(hash, self.compare_keys())) - def _nan_to_U_args(self): + def _nan_to_u_args(self): return tuple( 'U' if type(arg) is float and math.isnan(arg) else arg for arg in self.args ) - def _compare_keys(self): - return (self.name, self.type, self._nan_to_U_args()) + def compare_keys(self): + return self.name, self.type, self._nan_to_u_args() -class RRA: + +class RRA(object): def __init__(self, cf, *args): self.cf = cf self.args = args diff --git a/alfred.py b/alfred.py index 878ac0f..d334656 100644 --- a/alfred.py +++ b/alfred.py @@ -1,15 +1,19 @@ import subprocess import json + def _fetch(data_type): - output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) - return json.loads(output.decode("utf-8")).values() + output = subprocess.check_output(["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) + return json.loads(output.decode("utf-8")).values() + def nodeinfo(): - return _fetch(158) + return _fetch(158) + def statistics(): - return _fetch(159) + return _fetch(159) + def vis(): - return _fetch(160) + return _fetch(160) diff --git a/backend.py b/backend.py index 7544709..ff943ec 100755 --- a/backend.py +++ b/backend.py @@ -1,9 +1,11 @@ #!/usr/bin/env python3 - +""" +backend.py - ffmap-backend runner +https://github.com/ffnord/ffmap-backend +""" import argparse import json import os -import sys import networkx as nx from datetime import datetime from networkx.readwrite import json_graph @@ -11,91 +13,89 @@ from networkx.readwrite import json_graph import alfred import nodes import graph -from batman import batman -from rrddb import rrd +from batman import Batman +from rrddb import RRD -parser = argparse.ArgumentParser() -parser.add_argument('-a', '--aliases', - help='read aliases from FILE', - default=[], - action='append', - metavar='FILE') +def main(params): + if not params['mesh']: + params['mesh'] = ['bat0'] -parser.add_argument('-m', '--mesh', action='append', - help='batman mesh interface') + nodes_fn = os.path.join(params['destination_directory'], 'nodes.json') + graph_fn = os.path.join(params['destination_directory'], 'graph.json') -parser.add_argument('-d', '--destination-directory', action='store', - help='destination directory for generated files',required=True) + now = datetime.utcnow().replace(microsecond=0) -parser.add_argument('--vpn', action='append', metavar='MAC', - help='assume MAC to be part of the VPN') + with open(nodes_fn, 'r') as nodedb_handle: + nodedb = json.load(nodedb_handle) -parser.add_argument('--prune', metavar='DAYS', - help='forget nodes offline for at least DAYS') + # flush nodedb if it uses the old format + if 'links' in nodedb: + nodedb = {'nodes': dict()} -args = parser.parse_args() + nodedb['timestamp'] = now.isoformat() -options = vars(args) + for node_id, node in nodedb['nodes'].items(): + node['flags']['online'] = False -if not options['mesh']: - options['mesh'] = ['bat0'] + nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) -nodes_fn = os.path.join(options['destination_directory'], 'nodes.json') -graph_fn = os.path.join(options['destination_directory'], 'graph.json') + for aliases in params['aliases']: + with open(aliases, 'r') as f: + nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False) -now = datetime.utcnow().replace(microsecond=0) + nodes.reset_statistics(nodedb['nodes']) + nodes.import_statistics(nodedb['nodes'], alfred.statistics()) -try: - nodedb = json.load(open(nodes_fn)) + bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(Batman, params['mesh']))) + for vis_data, gateway_list in bm: + nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) + nodes.import_vis_clientcount(nodedb['nodes'], vis_data) + nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now) + nodes.mark_gateways(nodedb['nodes'], gateway_list) - # ignore if old format - if 'links' in nodedb: - raise -except: - nodedb = {'nodes': dict()} + if params['prune']: + nodes.prune_nodes(nodedb['nodes'], now, int(params['prune'])) -nodedb['timestamp'] = now.isoformat() + batadv_graph = nx.DiGraph() + for vis_data, gateway_list in bm: + graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data) -for node_id, node in nodedb['nodes'].items(): - node['flags']['online'] = False + if params['vpn']: + graph.mark_vpn(batadv_graph, frozenset(params['vpn'])) -nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) + batadv_graph = graph.merge_nodes(batadv_graph) + batadv_graph = graph.to_undirected(batadv_graph) -for aliases in options['aliases']: - with open(aliases, 'r') as f: - nodes.import_nodeinfo(nodedb['nodes'], json.load(f), now, assume_online=False) + with open(nodes_fn, 'w') as f: + json.dump(nodedb, f) -nodes.reset_statistics(nodedb['nodes']) -nodes.import_statistics(nodedb['nodes'], alfred.statistics()) + with open(graph_fn, 'w') as f: + json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) -bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), map(batman, options['mesh']))) -for vis_data, gateway_list in bm: - nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) - nodes.import_vis_clientcount(nodedb['nodes'], vis_data) - nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now) - nodes.mark_gateways(nodedb['nodes'], gateway_list) + scriptdir = os.path.dirname(os.path.realpath(__file__)) + rrd = RRD(scriptdir + '/nodedb/', params['destination_directory'] + '/nodes') + rrd.update_database(nodedb['nodes']) + rrd.update_images() -if options['prune']: - nodes.prune_nodes(nodedb['nodes'], now, int(options['prune'])) -batadv_graph = nx.DiGraph() -for vis_data, gateway_list in bm: - graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data) +if __name__ == '__main__': + parser = argparse.ArgumentParser() -if options['vpn']: - graph.mark_vpn(batadv_graph, frozenset(options['vpn'])) + parser.add_argument('-a', '--aliases', + help='read aliases from FILE', + default=[], action='append', + metavar='FILE') + parser.add_argument('-m', '--mesh', action='append', + help='batman mesh interface') + parser.add_argument('-d', '--destination-directory', action='store', + help='destination directory for generated files', + required=True) + parser.add_argument('--vpn', action='append', metavar='MAC', + help='assume MAC to be part of the VPN') + parser.add_argument('--prune', metavar='DAYS', + help='forget nodes offline for at least DAYS') -batadv_graph = graph.merge_nodes(batadv_graph) -batadv_graph = graph.to_undirected(batadv_graph) + options = vars(parser.parse_args()) -with open(nodes_fn, 'w') as f: - json.dump(nodedb, f) - -with open(graph_fn, 'w') as f: - json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) - -scriptdir = os.path.dirname(os.path.realpath(__file__)) -rrd = rrd(scriptdir + '/nodedb/', options['destination_directory'] + '/nodes') -rrd.update_database(nodedb['nodes']) -rrd.update_images() + main(options) diff --git a/batman.py b/batman.py index 94229ad..86ad4fe 100644 --- a/batman.py +++ b/batman.py @@ -1,82 +1,90 @@ -#!/usr/bin/env python3 import subprocess import json import re -class batman: - """ Bindings for B.A.T.M.A.N. advanced batctl tool - """ - def __init__(self, mesh_interface = "bat0"): - self.mesh_interface = mesh_interface - def vis_data(self,batadv_vis=False): - vds = self.vis_data_batctl_legacy() - if batadv_vis: - vds += self.vis_data_batadv_vis() - return vds - - def vis_data_helper(self,lines): - vd = [] - for line in lines: - try: - utf8_line = line.decode("utf-8") - vd.append(json.loads(utf8_line)) - except e: - pass - return vd - - def vis_data_batctl_legacy(self): - """ Parse "batctl -m vd json -n" into an array of dictionaries. +class Batman(object): """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"]) - lines = output.splitlines() - vds = self.vis_data_helper(lines) - return vds - - def vis_data_batadv_vis(self): - """ Parse "batadv-vis -i -f json" into an array of dictionaries. + Bindings for B.A.T.M.A.N. Advanced + commandline interface "batctl" """ - output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"]) - lines = output.splitlines() - return self.vis_data_helper(lines) + def __init__(self, mesh_interface='bat0'): + self.mesh_interface = mesh_interface - def gateway_list(self): - """ Parse "batctl -m gwl -n" into an array of dictionaries. - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"]) - output_utf8 = output.decode("utf-8") - lines = output_utf8.splitlines() + def vis_data(self, batadv_vis=False): + vds = self.vis_data_batctl_legacy() + if batadv_vis: + vds += self.vis_data_batadv_vis() + return vds - own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) + @staticmethod + def vis_data_helper(lines): + vd_tmp = [] + for line in lines: + try: + utf8_line = line.decode('utf-8') + vd_tmp.append(json.loads(utf8_line)) + except UnicodeDecodeError: + pass + return vd_tmp - gw = [] - gw_mode = self.gateway_mode() - if gw_mode['mode'] == 'server': - gw.append(own_mac) + def vis_data_batctl_legacy(self): + """ + Parse "batctl -m vd json -n" into an array of dictionaries. + """ + output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n']) + lines = output.splitlines() + vds = self.vis_data_helper(lines) + return vds - for line in lines: - gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) - if gw_line: - gw.append(gw_line.group(1)) + def vis_data_batadv_vis(self): + """ + Parse "batadv-vis -i -f json" into an array of dictionaries. + """ + output = subprocess.check_output(['batadv-vis', '-i', self.mesh_interface, '-f', 'json']) + lines = output.splitlines() + return self.vis_data_helper(lines) - return gw + def gateway_list(self): + """ + Parse "batctl -m gwl -n" into an array of dictionaries. + """ + output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gwl', '-n']) + output_utf8 = output.decode('utf-8') + lines = output_utf8.splitlines() - def gateway_mode(self): - """ Parse "batctl -m gw" - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"]) - elements = output.decode("utf-8").split() - mode = elements[0] - if mode == "server": - return {'mode': 'server', 'bandwidth': elements[3]} - else: - return {'mode': mode} + own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", lines[0]).group(1) + + gateways = [] + gw_mode = self.gateway_mode() + if gw_mode['mode'] == 'server': + gateways.append(own_mac) + + for line in lines: + gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) + if gw_line: + gateways.append(gw_line.group(1)) + + return gateways + + def gateway_mode(self): + """ + Parse "batctl -m gw" + """ + output = subprocess.check_output(['batctl', '-m', self.mesh_interface, 'gw']) + elements = output.decode("utf-8").split() + mode = elements[0] + if mode == 'server': + return {'mode': 'server', + 'bandwidth': elements[3]} + else: + return {'mode': mode} if __name__ == "__main__": - bc = batman() - vd = bc.vis_data() - gw = bc.gateway_list() - for x in vd: - print(x) - print(gw) - print(bc.gateway_mode()) + bc = Batman() + vd = bc.vis_data() + gw = bc.gateway_list() + for x in vd: + print(x) + print(gw) + print(bc.gateway_mode()) diff --git a/graph.py b/graph.py index b6e86aa..460e327 100644 --- a/graph.py +++ b/graph.py @@ -1,66 +1,74 @@ import networkx as nx -from copy import deepcopy from functools import reduce from itertools import chain from nodes import build_mac_table -def import_vis_data(graph, nodes, vis_data): - macs = build_mac_table(nodes) - nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data)) - nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data)) - graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b)))) - edges = filter(lambda d: 'neighbor' in d, vis_data) - graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) +def import_vis_data(graph, nodes, vis_data): + macs = build_mac_table(nodes) + nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data)) + nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data)) + graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b)))) + + edges = filter(lambda d: 'neighbor' in d, vis_data) + graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) + def mark_vpn(graph, vpn_macs): - components = map(frozenset, nx.weakly_connected_components(graph)) - components = filter(vpn_macs.intersection, components) - nodes = reduce(lambda a, b: a | b, components, set()) + components = map(frozenset, nx.weakly_connected_components(graph)) + components = filter(vpn_macs.intersection, components) + nodes = reduce(lambda a, b: a | b, components, set()) + + for node in nodes: + for k, v in graph[node].items(): + v['vpn'] = True - for node in nodes: - for k, v in graph[node].items(): - v['vpn'] = True def to_multigraph(graph): - def f(a): - node = graph.node[a] - return node['primary'] if node else a + def f(a): + node = graph.node[a] + return node['primary'] if node else a - G = nx.MultiDiGraph() - map_node = lambda node, data: (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) - G.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True)))) - G.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) + def map_node(node, data): + return (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) + + digraph = nx.MultiDiGraph() + digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True)))) + digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) + + return digraph - return G def merge_nodes(graph): - def merge_edges(data): - tq = min(map(lambda d: d['tq'], data)) - vpn = all(map(lambda d: d.get('vpn', False), data)) - return dict(tq=tq, vpn=vpn) + def merge_edges(data): + tq = min(map(lambda d: d['tq'], data)) + vpn = all(map(lambda d: d.get('vpn', False), data)) + return dict(tq=tq, vpn=vpn) - G = to_multigraph(graph) - H = nx.DiGraph() - H.add_nodes_from(G.nodes_iter(data=True)) - edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) - H.add_edges_from(edges) + multigraph = to_multigraph(graph) + digraph = nx.DiGraph() + digraph.add_nodes_from(multigraph.nodes_iter(data=True)) + edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values())) + for d in multigraph[e]] for e in multigraph]) + digraph.add_edges_from(edges) + + return digraph - return H def to_undirected(graph): - G = nx.MultiGraph() - G.add_nodes_from(graph.nodes_iter(data=True)) - G.add_edges_from(graph.edges_iter(data=True)) + multigraph = nx.MultiGraph() + multigraph.add_nodes_from(graph.nodes_iter(data=True)) + multigraph.add_edges_from(graph.edges_iter(data=True)) - def merge_edges(data): - tq = max(map(lambda d: d['tq'], data)) - vpn = all(map(lambda d: d.get('vpn', False), data)) - return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2) + def merge_edges(data): + tq = max(map(lambda d: d['tq'], data)) + vpn = all(map(lambda d: d.get('vpn', False), data)) + return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2) - H = nx.Graph() - H.add_nodes_from(G.nodes_iter(data=True)) - edges = chain.from_iterable([[(e, d, merge_edges(G[e][d].values())) for d in G[e]] for e in G]) - H.add_edges_from(edges) + graph = nx.Graph() + graph.add_nodes_from(multigraph.nodes_iter(data=True)) + edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values())) + for d in multigraph[e]] for e in multigraph]) + graph.add_edges_from(edges) - return H + return graph diff --git a/nodes.py b/nodes.py index 61949e1..23a2b0e 100644 --- a/nodes.py +++ b/nodes.py @@ -2,128 +2,137 @@ from collections import Counter, defaultdict from datetime import datetime from functools import reduce -def build_mac_table(nodes): - macs = dict() - for node_id, node in nodes.items(): - try: - for mac in node['nodeinfo']['network']['mesh_interfaces']: - macs[mac] = node_id - except KeyError: - pass - return macs +def build_mac_table(nodes): + macs = dict() + for node_id, node in nodes.items(): + try: + for mac in node['nodeinfo']['network']['mesh_interfaces']: + macs[mac] = node_id + except KeyError: + pass + return macs + def prune_nodes(nodes, now, days): - prune = [] - for node_id, node in nodes.items(): - if not 'lastseen' in node: - prune.append(node_id) - continue + prune = [] + for node_id, node in nodes.items(): + if 'lastseen' not in node: + prune.append(node_id) + continue - lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S') - delta = (now - lastseen).seconds + lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S') + delta = (now - lastseen).seconds - if delta >= days * 86400: - prune.append(node_id) + if delta >= days * 86400: + prune.append(node_id) + + for node_id in prune: + del nodes[node_id] - for node_id in prune: - del nodes[node_id] def mark_online(node, now): - node['lastseen'] = now.isoformat() - node.setdefault('firstseen', now.isoformat()) - node['flags']['online'] = True + node['lastseen'] = now.isoformat() + node.setdefault('firstseen', now.isoformat()) + node['flags']['online'] = True + def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): - for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): - node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) - node['nodeinfo'] = nodeinfo - node['flags']['online'] = False - node['flags']['gateway'] = False + for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): + node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) + node['nodeinfo'] = nodeinfo + node['flags']['online'] = False + node['flags']['gateway'] = False + + if assume_online: + mark_online(node, now) - if assume_online: - mark_online(node, now) def reset_statistics(nodes): - for node in nodes.values(): - node['statistics'] = { 'clients': 0 } + for node in nodes.values(): + node['statistics'] = {'clients': 0} + def import_statistics(nodes, statistics): - def add(node, statistics, target, source, f=lambda d: d): - try: - node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) - except (KeyError,TypeError): - pass + def add(node, statistics, target, source, f=lambda d: d): + try: + node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) + except (KeyError, TypeError): + pass + + macs = build_mac_table(nodes) + statistics = filter(lambda d: 'node_id' in d, statistics) + statistics = filter(lambda d: d['node_id'] in nodes, statistics) + for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics): + add(node, statistics, 'clients', ['clients', 'total']) + add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d)) + add(node, statistics, 'uptime', ['uptime']) + add(node, statistics, 'loadavg', ['loadavg']) + add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) + add(node, statistics, 'rootfs_usage', ['rootfs_usage']) - macs = build_mac_table(nodes) - statistics = filter(lambda d: 'node_id' in d, statistics) - statistics = filter(lambda d: d['node_id'] in nodes, statistics) - for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics): - add(node, statistics, 'clients', ['clients', 'total']) - add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d)) - add(node, statistics, 'uptime', ['uptime']) - add(node, statistics, 'loadavg', ['loadavg']) - add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) - add(node, statistics, 'rootfs_usage', ['rootfs_usage']) def import_mesh_ifs_vis_data(nodes, vis_data): - macs = build_mac_table(nodes) + macs = build_mac_table(nodes) - mesh_ifs = defaultdict(lambda: set()) - for line in filter(lambda d: 'secondary' in d, vis_data): - primary = line['of'] - mesh_ifs[primary].add(primary) - mesh_ifs[primary].add(line['secondary']) + mesh_ifs = defaultdict(lambda: set()) + for line in filter(lambda d: 'secondary' in d, vis_data): + primary = line['of'] + mesh_ifs[primary].add(primary) + mesh_ifs[primary].add(line['secondary']) - def if_to_node(ifs): - a = filter(lambda d: d in macs, ifs) - a = map(lambda d: nodes[macs[d]], a) - try: - return (next(a), ifs) - except StopIteration: - return None + def if_to_node(ifs): + a = filter(lambda d: d in macs, ifs) + a = map(lambda d: nodes[macs[d]], a) + try: + return next(a), ifs + except StopIteration: + return None - mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values())) + mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values())) - for v in mesh_nodes: - node = v[0] + for v in mesh_nodes: + node = v[0] - try: - mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) - except KeyError: - mesh_ifs = set() + try: + mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) + except KeyError: + mesh_ifs = set() + + node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) - node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) def import_vis_clientcount(nodes, vis_data): - macs = build_mac_table(nodes) - data = filter(lambda d: d.get('label', None) == 'TT', vis_data) - data = filter(lambda d: d['router'] in macs, data) - data = map(lambda d: macs[d['router']], data) + macs = build_mac_table(nodes) + data = filter(lambda d: d.get('label', None) == 'TT', vis_data) + data = filter(lambda d: d['router'] in macs, data) + data = map(lambda d: macs[d['router']], data) + + for node_id, clientcount in Counter(data).items(): + nodes[node_id]['statistics'].setdefault('clients', clientcount) - for node_id, clientcount in Counter(data).items(): - nodes[node_id]['statistics'].setdefault('clients', clientcount) def mark_gateways(nodes, gateways): - macs = build_mac_table(nodes) - gateways = filter(lambda d: d in macs, gateways) + macs = build_mac_table(nodes) + gateways = filter(lambda d: d in macs, gateways) + + for node in map(lambda d: nodes[macs[d]], gateways): + node['flags']['gateway'] = True - for node in map(lambda d: nodes[macs[d]], gateways): - node['flags']['gateway'] = True def mark_vis_data_online(nodes, vis_data, now): - macs = build_mac_table(nodes) + macs = build_mac_table(nodes) - online = set() - for line in vis_data: - if 'primary' in line: - online.add(line['primary']) - elif 'secondary' in line: - online.add(line['secondary']) - elif 'gateway' in line: - # This matches clients' MACs. - # On pre-Gluon nodes the primary MAC will be one of it. - online.add(line['gateway']) + online = set() + for line in vis_data: + if 'primary' in line: + online.add(line['primary']) + elif 'secondary' in line: + online.add(line['secondary']) + elif 'gateway' in line: + # This matches clients' MACs. + # On pre-Gluon nodes the primary MAC will be one of it. + online.add(line['gateway']) - for mac in filter(lambda d: d in macs, online): - mark_online(nodes[macs[mac]], now) + for mac in filter(lambda d: d in macs, online): + mark_online(nodes[macs[mac]], now) diff --git a/rrddb.py b/rrddb.py index 2fccff4..b023e6b 100644 --- a/rrddb.py +++ b/rrddb.py @@ -1,50 +1,50 @@ #!/usr/bin/env python3 -import subprocess import time import os from GlobalRRD import GlobalRRD from NodeRRD import NodeRRD -class rrd: - def __init__( self - , databaseDirectory - , imagePath - , displayTimeGlobal = "7d" - , displayTimeNode = "1d" - ): - self.dbPath = databaseDirectory - self.globalDb = GlobalRRD(self.dbPath) - self.imagePath = imagePath - self.displayTimeGlobal = displayTimeGlobal - self.displayTimeNode = displayTimeNode - self.currentTimeInt = (int(time.time())/60)*60 - self.currentTime = str(self.currentTimeInt) +class RRD(object): + def __init__(self, + database_directory, + image_path, + display_time_global="7d", + display_time_node="1d"): - try: - os.stat(self.imagePath) - except: - os.mkdir(self.imagePath) + self.dbPath = database_directory + self.globalDb = GlobalRRD(self.dbPath) + self.imagePath = image_path + self.displayTimeGlobal = display_time_global + self.displayTimeNode = display_time_node - def update_database(self, nodes): - online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items())) - client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values())) + self.currentTimeInt = (int(time.time())/60)*60 + self.currentTime = str(self.currentTimeInt) - self.globalDb.update(len(online_nodes), client_count) - for node_id, node in online_nodes.items(): - rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node) - rrd.update() + try: + os.stat(self.imagePath) + except OSError: + os.mkdir(self.imagePath) - def update_images(self): - self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) + def update_database(self, nodes): + online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items())) + client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values())) - nodeDbFiles = os.listdir(self.dbPath) + self.globalDb.update(len(online_nodes), client_count) + for node_id, node in online_nodes.items(): + rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node) + rrd.update() - for fileName in nodeDbFiles: - if not os.path.isfile(os.path.join(self.dbPath, fileName)): - continue + def update_images(self): + self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) - nodeName = os.path.basename(fileName).split('.') - if nodeName[1] == 'rrd' and not nodeName[0] == "nodes": - rrd = NodeRRD(os.path.join(self.dbPath, fileName)) - rrd.graph(self.imagePath, self.displayTimeNode) + nodedb_files = os.listdir(self.dbPath) + + for file_name in nodedb_files: + if not os.path.isfile(os.path.join(self.dbPath, file_name)): + continue + + node_name = os.path.basename(file_name).split('.') + if node_name[1] == 'rrd' and not node_name[0] == "nodes": + rrd = NodeRRD(os.path.join(self.dbPath, file_name)) + rrd.graph(self.imagePath, self.displayTimeNode) From 84746de0482a522ed59310f10ad8437cdbe0b62c Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 17:50:36 +0100 Subject: [PATCH 52/97] backend.py: use argparser to set mesh default, join paths with os.path.join --- backend.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/backend.py b/backend.py index ff943ec..55dd001 100755 --- a/backend.py +++ b/backend.py @@ -18,9 +18,6 @@ from rrddb import RRD def main(params): - if not params['mesh']: - params['mesh'] = ['bat0'] - nodes_fn = os.path.join(params['destination_directory'], 'nodes.json') graph_fn = os.path.join(params['destination_directory'], 'graph.json') @@ -73,8 +70,9 @@ def main(params): with open(graph_fn, 'w') as f: json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) - scriptdir = os.path.dirname(os.path.realpath(__file__)) - rrd = RRD(scriptdir + '/nodedb/', params['destination_directory'] + '/nodes') + script_directory = os.path.dirname(os.path.realpath(__file__)) + rrd = RRD(os.path.join(script_directory, 'nodedb'), + os.path.join(params['destination_directory'], 'nodes')) rrd.update_database(nodedb['nodes']) rrd.update_images() @@ -87,7 +85,8 @@ if __name__ == '__main__': default=[], action='append', metavar='FILE') parser.add_argument('-m', '--mesh', action='append', - help='batman mesh interface') + default=['bat0'], + help='batman mesh interface (defaults to bat0)') parser.add_argument('-d', '--destination-directory', action='store', help='destination directory for generated files', required=True) From 3c1140ebdfb52a5cda64b66cb9b37c81ce6e96d5 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 18:06:54 +0100 Subject: [PATCH 53/97] travis-ci: add pep8 check --- .travis.yml | 6 ++++++ README.md | 2 ++ 2 files changed, 8 insertions(+) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..cb081fa --- /dev/null +++ b/.travis.yml @@ -0,0 +1,6 @@ +sudo: false +language: python +python: + - "3.4" +install: "pip install pep8" +script: "pep8 *.py" diff --git a/README.md b/README.md index d70aaac..9f5af40 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # Data for Freifunk Map, Graph and Node List +[![Build Status](https://travis-ci.org/ffnord/ffmap-backend.svg?branch=master)](https://travis-ci.org/ffnord/ffmap-backend) + ffmap-backend gathers information on the batman network by invoking * batctl, From 629adc13cb0d46158f8393e49141d02b99a8e6b9 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Tue, 24 Mar 2015 18:48:05 +0100 Subject: [PATCH 54/97] create RRDs only when --rrd --- backend.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/backend.py b/backend.py index 0a91153..2d8cc9a 100755 --- a/backend.py +++ b/backend.py @@ -74,11 +74,12 @@ def main(params): with open(graph_fn, 'w') as f: json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) - script_directory = os.path.dirname(os.path.realpath(__file__)) - rrd = RRD(os.path.join(script_directory, 'nodedb'), - os.path.join(params['destination_directory'], 'nodes')) - rrd.update_database(nodedb['nodes']) - rrd.update_images() + if params['rrd']: + script_directory = os.path.dirname(os.path.realpath(__file__)) + rrd = RRD(os.path.join(script_directory, 'nodedb'), + os.path.join(params['destination_directory'], 'nodes')) + rrd.update_database(nodedb['nodes']) + rrd.update_images() if __name__ == '__main__': @@ -98,6 +99,9 @@ if __name__ == '__main__': help='assume MAC to be part of the VPN') parser.add_argument('--prune', metavar='DAYS', help='forget nodes offline for at least DAYS') + parser.add_argument('--rrd', dest='rrd', action='store_true', + default=False, + help='create RRD graphs') options = vars(parser.parse_args()) From c74b7b95fb5d65dcf0fee7eb5a5525ae884ab209 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 22:10:54 +0100 Subject: [PATCH 55/97] update package structure, move non-executables to lib --- backend.py | 19 +++++++++---------- GlobalRRD.py => lib/GlobalRRD.py | 3 ++- NodeRRD.py => lib/NodeRRD.py | 3 ++- RRD.py => lib/RRD.py | 0 lib/__init__.py | 1 + alfred.py => lib/alfred.py | 0 batman.py => lib/batman.py | 0 graph.py => lib/graph.py | 6 ++++-- nodes.py => lib/nodes.py | 0 rrddb.py => lib/rrddb.py | 5 +++-- 10 files changed, 21 insertions(+), 16 deletions(-) rename GlobalRRD.py => lib/GlobalRRD.py (97%) rename NodeRRD.py => lib/NodeRRD.py (98%) rename RRD.py => lib/RRD.py (100%) create mode 100644 lib/__init__.py rename alfred.py => lib/alfred.py (100%) rename batman.py => lib/batman.py (100%) rename graph.py => lib/graph.py (98%) rename nodes.py => lib/nodes.py (100%) rename rrddb.py => lib/rrddb.py (96%) diff --git a/backend.py b/backend.py index 2d8cc9a..5b60d3e 100755 --- a/backend.py +++ b/backend.py @@ -6,20 +6,19 @@ https://github.com/ffnord/ffmap-backend import argparse import json import os -import networkx as nx from datetime import datetime + +import networkx as nx from networkx.readwrite import json_graph -import alfred -import nodes -import graph -from batman import Batman -from rrddb import RRD +from lib import alfred, graph, nodes +from lib.batman import Batman +from lib.rrddb import RRD def main(params): - nodes_fn = os.path.join(params['destination_directory'], 'nodes.json') - graph_fn = os.path.join(params['destination_directory'], 'graph.json') + nodes_fn = os.path.join(params['dest_dir'], 'nodes.json') + graph_fn = os.path.join(params['dest_dir'], 'graph.json') now = datetime.utcnow().replace(microsecond=0) @@ -77,7 +76,7 @@ def main(params): if params['rrd']: script_directory = os.path.dirname(os.path.realpath(__file__)) rrd = RRD(os.path.join(script_directory, 'nodedb'), - os.path.join(params['destination_directory'], 'nodes')) + os.path.join(params['dest_dir'], 'nodes')) rrd.update_database(nodedb['nodes']) rrd.update_images() @@ -92,7 +91,7 @@ if __name__ == '__main__': parser.add_argument('-m', '--mesh', action='append', default=['bat0'], help='batman mesh interface (defaults to bat0)') - parser.add_argument('-d', '--destination-directory', action='store', + parser.add_argument('-d', '--dest-dir', action='store', help='destination directory for generated files', required=True) parser.add_argument('--vpn', action='append', metavar='MAC', diff --git a/GlobalRRD.py b/lib/GlobalRRD.py similarity index 97% rename from GlobalRRD.py rename to lib/GlobalRRD.py index 9c09549..47235f2 100644 --- a/GlobalRRD.py +++ b/lib/GlobalRRD.py @@ -1,6 +1,7 @@ import os import subprocess -from RRD import RRD, DS, RRA + +from lib.RRD import DS, RRA, RRD class GlobalRRD(RRD): diff --git a/NodeRRD.py b/lib/NodeRRD.py similarity index 98% rename from NodeRRD.py rename to lib/NodeRRD.py index a4ec092..37bc6f9 100644 --- a/NodeRRD.py +++ b/lib/NodeRRD.py @@ -1,6 +1,7 @@ import os import subprocess -from RRD import RRD, DS, RRA + +from lib.RRD import DS, RRA, RRD class NodeRRD(RRD): diff --git a/RRD.py b/lib/RRD.py similarity index 100% rename from RRD.py rename to lib/RRD.py diff --git a/lib/__init__.py b/lib/__init__.py new file mode 100644 index 0000000..64bd3f3 --- /dev/null +++ b/lib/__init__.py @@ -0,0 +1 @@ +__author__ = 'hexa' diff --git a/alfred.py b/lib/alfred.py similarity index 100% rename from alfred.py rename to lib/alfred.py diff --git a/batman.py b/lib/batman.py similarity index 100% rename from batman.py rename to lib/batman.py diff --git a/graph.py b/lib/graph.py similarity index 98% rename from graph.py rename to lib/graph.py index 460e327..d5163ff 100644 --- a/graph.py +++ b/lib/graph.py @@ -1,7 +1,9 @@ -import networkx as nx from functools import reduce from itertools import chain -from nodes import build_mac_table + +import networkx as nx + +from lib.nodes import build_mac_table def import_vis_data(graph, nodes, vis_data): diff --git a/nodes.py b/lib/nodes.py similarity index 100% rename from nodes.py rename to lib/nodes.py diff --git a/rrddb.py b/lib/rrddb.py similarity index 96% rename from rrddb.py rename to lib/rrddb.py index b023e6b..bcd33b3 100644 --- a/rrddb.py +++ b/lib/rrddb.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 import time import os -from GlobalRRD import GlobalRRD -from NodeRRD import NodeRRD + +from lib.GlobalRRD import GlobalRRD +from lib.NodeRRD import NodeRRD class RRD(object): From 6fba8ad21b50cacbb77e27ad6e981135db9e576d Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 22:48:00 +0100 Subject: [PATCH 56/97] add alfred socket support (--alfred-sock) --- backend.py | 8 +++++++- lib/alfred.py | 37 ++++++++++++++++++++++++++----------- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/backend.py b/backend.py index 5b60d3e..3eaeaf1 100755 --- a/backend.py +++ b/backend.py @@ -11,7 +11,8 @@ from datetime import datetime import networkx as nx from networkx.readwrite import json_graph -from lib import alfred, graph, nodes +from lib import graph, nodes +from lib.alfred import Alfred from lib.batman import Batman from lib.rrddb import RRD @@ -34,6 +35,8 @@ def main(params): for node_id, node in nodedb['nodes'].items(): node['flags']['online'] = False + alfred = Alfred(unix_sockpath=params['alfred_sock']) + nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) @@ -91,6 +94,9 @@ if __name__ == '__main__': parser.add_argument('-m', '--mesh', action='append', default=['bat0'], help='batman mesh interface (defaults to bat0)') + parser.add_argument('-s', '--alfred-sock', + default=None, + help='alfred unix socket path') parser.add_argument('-d', '--dest-dir', action='store', help='destination directory for generated files', required=True) diff --git a/lib/alfred.py b/lib/alfred.py index 0467316..1b9b220 100644 --- a/lib/alfred.py +++ b/lib/alfred.py @@ -1,20 +1,35 @@ import subprocess import json +import os -def _fetch(data_type): - output = subprocess.check_output( - ["alfred-json", "-z", "-f", "json", "-r", str(data_type)]) - return json.loads(output.decode("utf-8")).values() +class Alfred(object): + """ + Bindings for the alfred-json utility + """ + def __init__(self, unix_sockpath=None): + if unix_sockpath: + if os.path.exists(unix_sockpath): + self.unix_sock = unix_sockpath + else: + raise RuntimeError('alfred: invalid unix socket path given') + def _fetch(self, data_type): + cmd = ['alfred-json', + '-z', + '-f', 'json', + '-r', data_type] + if self.unix_sock: + cmd.extend(['-s', self.unix_sock]) -def nodeinfo(): - return _fetch(158) + output = subprocess.check_output(cmd) + return json.loads(output.decode("utf-8")).values() + def nodeinfo(self): + return self._fetch(158) -def statistics(): - return _fetch(159) + def statistics(self): + return self._fetch(159) - -def vis(): - return _fetch(160) + def vis(self): + return self._fetch(160) From 90ab26d50b8db176dcf1d8b09cfe9a16b376f033 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Tue, 24 Mar 2015 23:17:24 +0100 Subject: [PATCH 57/97] fix regressions in alfred.py --- lib/alfred.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/lib/alfred.py b/lib/alfred.py index 1b9b220..4353874 100644 --- a/lib/alfred.py +++ b/lib/alfred.py @@ -8,17 +8,15 @@ class Alfred(object): Bindings for the alfred-json utility """ def __init__(self, unix_sockpath=None): - if unix_sockpath: - if os.path.exists(unix_sockpath): - self.unix_sock = unix_sockpath - else: - raise RuntimeError('alfred: invalid unix socket path given') + self.unix_sock = unix_sockpath + if unix_sockpath is not None and not os.path.exists(unix_sockpath): + raise RuntimeError('alfred: invalid unix socket path given') def _fetch(self, data_type): cmd = ['alfred-json', '-z', '-f', 'json', - '-r', data_type] + '-r', str(data_type)] if self.unix_sock: cmd.extend(['-s', self.unix_sock]) From b143e3f2e5b062072d611ef99199c6e627163cd5 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Wed, 25 Mar 2015 13:27:54 +0100 Subject: [PATCH 58/97] batman: add batadv-vis socket support, needs at least alfred 2014.4.0 --- backend.py | 2 +- lib/batman.py | 9 ++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/backend.py b/backend.py index 3eaeaf1..173b127 100755 --- a/backend.py +++ b/backend.py @@ -50,7 +50,7 @@ def main(params): bm = list(map(lambda d: (d.vis_data(True), d.gateway_list()), - map(Batman, params['mesh']))) + map(Batman, params['mesh'], params['alfred_sock']))) for vis_data, gateway_list in bm: nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) nodes.import_vis_clientcount(nodedb['nodes'], vis_data) diff --git a/lib/batman.py b/lib/batman.py index ddd0bd5..a4aeca2 100644 --- a/lib/batman.py +++ b/lib/batman.py @@ -8,8 +8,9 @@ class Batman(object): Bindings for B.A.T.M.A.N. Advanced commandline interface "batctl" """ - def __init__(self, mesh_interface='bat0'): + def __init__(self, mesh_interface='bat0', alfred_sockpath=None): self.mesh_interface = mesh_interface + self.alfred_sock = alfred_sockpath def vis_data(self, batadv_vis=False): vds = self.vis_data_batctl_legacy() @@ -44,8 +45,10 @@ class Batman(object): Parse "batadv-vis -i -f json" into an array of dictionaries. """ - output = subprocess.check_output( - ['batadv-vis', '-i', self.mesh_interface, '-f', 'json']) + cmd = ['batadv-vis', '-i', self.mesh_interface, '-f', 'json'] + if self.alfred_sock: + cmd.extend(['-u', self.alfred_sock]) + output = subprocess.check_output(cmd) lines = output.splitlines() return self.vis_data_helper(lines) From 8d4856db56a6d7b1cc13e865c489f745579628b7 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Wed, 25 Mar 2015 14:11:00 +0100 Subject: [PATCH 59/97] lib/batman: refactor gateway handling --- lib/batman.py | 38 ++++++++++++++++++++------------------ 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/lib/batman.py b/lib/batman.py index a4aeca2..4f6b943 100644 --- a/lib/batman.py +++ b/lib/batman.py @@ -12,6 +12,9 @@ class Batman(object): self.mesh_interface = mesh_interface self.alfred_sock = alfred_sockpath + # compile regular expressions only once on startup + self.mac_addr_pattern = re.compile(r'(([a-z0-9]{2}:){5}[a-z0-9]{2})') + def vis_data(self, batadv_vis=False): vds = self.vis_data_batctl_legacy() if batadv_vis: @@ -60,36 +63,35 @@ class Batman(object): output = subprocess.check_output( ['batctl', '-m', self.mesh_interface, 'gwl', '-n']) output_utf8 = output.decode('utf-8') - lines = output_utf8.splitlines() - - own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$", - lines[0]).group(1) + rows = output_utf8.splitlines() gateways = [] - gw_mode = self.gateway_mode() - if gw_mode['mode'] == 'server': - gateways.append(own_mac) - for line in lines: - gw_line = re.match(r"^(?:=>)? +([0-9a-f:]+) ", line) - if gw_line: - gateways.append(gw_line.group(1)) + # local gateway + header = rows.pop(0) + mode, bandwidth = self.gateway_mode() + if mode == 'server': + local_gw_mac = self.mac_addr_pattern.search(header).group(0) + gateways.append(local_gw_mac) + + # remote gateway(s) + for row in rows: + match = self.mac_addr_pattern.search(row) + if match: + gateways.append(match.group(1)) return gateways def gateway_mode(self): """ Parse "batctl -m gw" + return: tuple mode, bandwidth, if mode != server then bandwidth is None """ output = subprocess.check_output( ['batctl', '-m', self.mesh_interface, 'gw']) - elements = output.decode("utf-8").split() - mode = elements[0] - if mode == 'server': - return {'mode': 'server', - 'bandwidth': elements[3]} - else: - return {'mode': mode} + chunks = output.decode("utf-8").split() + + return chunks[0], chunks[3] if 3 in chunks else None if __name__ == "__main__": bc = Batman() From eb26ea9a5f7920b8ea132ebec6bdf26765b2bf1b Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Wed, 25 Mar 2015 14:33:54 +0100 Subject: [PATCH 60/97] pep8: fix remaining line length issues and update travis to include lib/*.py --- .travis.yml | 2 +- lib/NodeRRD.py | 6 ++++-- lib/RRD.py | 23 +++++++++++++++-------- lib/graph.py | 29 +++++++++++++++++++---------- lib/nodes.py | 25 ++++++++++++++----------- lib/rrddb.py | 9 ++++++--- 6 files changed, 59 insertions(+), 35 deletions(-) diff --git a/.travis.yml b/.travis.yml index cb081fa..dcaceb4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,4 +3,4 @@ language: python python: - "3.4" install: "pip install pep8" -script: "pep8 *.py" +script: "pep8 *.py lib/*.py" diff --git a/lib/NodeRRD.py b/lib/NodeRRD.py index 37bc6f9..afabe6f 100644 --- a/lib/NodeRRD.py +++ b/lib/NodeRRD.py @@ -32,11 +32,13 @@ class NodeRRD(RRD): @property def imagename(self): - return "{basename}.png".format(basename=os.path.basename(self.filename).rsplit('.', 2)[0]) + return "{basename}.png".format( + basename=os.path.basename(self.filename).rsplit('.', 2)[0]) # TODO: fix this, python does not support function overloading def update(self): - super().update({'upstate': int(self.node['flags']['online']), 'clients': self.node['statistics']['clients']}) + super().update({'upstate': int(self.node['flags']['online']), + 'clients': self.node['statistics']['clients']}) def graph(self, directory, timeframe): """ diff --git a/lib/RRD.py b/lib/RRD.py index 799338c..3c406ac 100644 --- a/lib/RRD.py +++ b/lib/RRD.py @@ -83,9 +83,11 @@ class RRD(object): info = self.info() if set(ds_list) - set(info['ds'].values()) != set(): for ds in ds_list: - if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type: - raise RRDIncompatibleException("%s is %s but should be %s" % - (ds.name, ds.type, info['ds'][ds.name].type)) + if ds.name in info['ds'] and\ + ds.type != info['ds'][ds.name].type: + raise RRDIncompatibleException( + "{} is {} but should be {}".format( + ds.name, ds.type, info['ds'][ds.name].type)) else: raise RRDOutdatedException() @@ -108,8 +110,10 @@ class RRD(object): if ds.name in info['ds']: old_ds = info['ds'][ds.name] if info['ds'][ds.name].type != ds.type: - raise RuntimeError('Cannot convert existing DS "%s" from type "%s" to "%s"' % - (ds.name, old_ds.type, ds.type)) + raise RuntimeError( + "Cannot convert existing DS '{}'" + "from type '{}' to '{}'".format( + ds.name, old_ds.type, ds.type)) ds.index = old_ds.index new_ds[ds.index] = ds else: @@ -237,7 +241,8 @@ class RRD(object): for line in out.splitlines(): base = info for match in self._info_regex.finditer(line): - section, key, name, value = match.group("section", "key", "name", "value") + section, key, name, value = match.group( + "section", "key", "name", "value") if section and key: try: key = int(key) @@ -258,7 +263,8 @@ class RRD(object): base[name] = value dss = {} for name, ds in info['ds'].items(): - ds_obj = DS(name, ds['type'], ds['minimal_heartbeat'], ds['min'], ds['max']) + ds_obj = DS(name, ds['type'], ds['minimal_heartbeat'], + ds['min'], ds['max']) ds_obj.index = ds['index'] ds_obj.last_ds = ds['last_ds'] ds_obj.value = ds['value'] @@ -267,7 +273,8 @@ class RRD(object): info['ds'] = dss rras = [] for rra in info['rra'].values(): - rras.append(RRA(rra['cf'], rra['xff'], rra['pdp_per_row'], rra['rows'])) + rras.append(RRA(rra['cf'], rra['xff'], + rra['pdp_per_row'], rra['rows'])) info['rra'] = rras self._cached_info = info return info diff --git a/lib/graph.py b/lib/graph.py index d5163ff..7b45828 100644 --- a/lib/graph.py +++ b/lib/graph.py @@ -8,12 +8,17 @@ from lib.nodes import build_mac_table def import_vis_data(graph, nodes, vis_data): macs = build_mac_table(nodes) - nodes_a = map(lambda d: 2*[d['primary']], filter(lambda d: 'primary' in d, vis_data)) - nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data)) - graph.add_nodes_from(map(lambda a, b: (a, dict(primary=b, node_id=macs.get(b))), *zip(*chain(nodes_a, nodes_b)))) + nodes_a = map(lambda d: 2*[d['primary']], + filter(lambda d: 'primary' in d, vis_data)) + nodes_b = map(lambda d: [d['secondary'], d['of']], + filter(lambda d: 'secondary' in d, vis_data)) + graph.add_nodes_from(map(lambda a, b: + (a, dict(primary=b, node_id=macs.get(b))), + *zip(*chain(nodes_a, nodes_b)))) edges = filter(lambda d: 'neighbor' in d, vis_data) - graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], dict(tq=float(d['label']))), edges)) + graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'], + dict(tq=float(d['label']))), edges)) def mark_vpn(graph, vpn_macs): @@ -32,11 +37,13 @@ def to_multigraph(graph): return node['primary'] if node else a def map_node(node, data): - return (data['primary'], dict(node_id=data['node_id'])) if data else (node, dict()) + return (data['primary'], + dict(node_id=data['node_id'])) if data else (node, dict()) digraph = nx.MultiDiGraph() digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True)))) - digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), *zip(*graph.edges_iter(data=True)))) + digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data), + *zip(*graph.edges_iter(data=True)))) return digraph @@ -50,8 +57,9 @@ def merge_nodes(graph): multigraph = to_multigraph(graph) digraph = nx.DiGraph() digraph.add_nodes_from(multigraph.nodes_iter(data=True)) - edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values())) - for d in multigraph[e]] for e in multigraph]) + edges = chain.from_iterable([[(e, d, merge_edges( + multigraph[e][d].values())) + for d in multigraph[e]] for e in multigraph]) digraph.add_edges_from(edges) return digraph @@ -69,8 +77,9 @@ def to_undirected(graph): graph = nx.Graph() graph.add_nodes_from(multigraph.nodes_iter(data=True)) - edges = chain.from_iterable([[(e, d, merge_edges(multigraph[e][d].values())) - for d in multigraph[e]] for e in multigraph]) + edges = chain.from_iterable([[(e, d, merge_edges( + multigraph[e][d].values())) + for d in multigraph[e]] for e in multigraph]) graph.add_edges_from(edges) return graph diff --git a/lib/nodes.py b/lib/nodes.py index 23a2b0e..bb1e129 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -53,23 +53,26 @@ def reset_statistics(nodes): node['statistics'] = {'clients': 0} -def import_statistics(nodes, statistics): +def import_statistics(nodes, stats): def add(node, statistics, target, source, f=lambda d: d): try: - node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) + node['statistics'][target] = f(reduce(dict.__getitem__, + source, + statistics)) except (KeyError, TypeError): pass macs = build_mac_table(nodes) - statistics = filter(lambda d: 'node_id' in d, statistics) - statistics = filter(lambda d: d['node_id'] in nodes, statistics) - for node, statistics in map(lambda d: (nodes[d['node_id']], d), statistics): - add(node, statistics, 'clients', ['clients', 'total']) - add(node, statistics, 'gateway', ['gateway'], lambda d: macs.get(d, d)) - add(node, statistics, 'uptime', ['uptime']) - add(node, statistics, 'loadavg', ['loadavg']) - add(node, statistics, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) - add(node, statistics, 'rootfs_usage', ['rootfs_usage']) + stats = filter(lambda d: 'node_id' in d, stats) + stats = filter(lambda d: d['node_id'] in nodes, stats) + for node, stats in map(lambda d: (nodes[d['node_id']], d), stats): + add(node, stats, 'clients', ['clients', 'total']) + add(node, stats, 'gateway', ['gateway'], lambda d: macs.get(d, d)) + add(node, stats, 'uptime', ['uptime']) + add(node, stats, 'loadavg', ['loadavg']) + add(node, stats, 'memory_usage', ['memory'], + lambda d: 1 - d['free'] / d['total']) + add(node, stats, 'rootfs_usage', ['rootfs_usage']) def import_mesh_ifs_vis_data(nodes, vis_data): diff --git a/lib/rrddb.py b/lib/rrddb.py index bcd33b3..57437a7 100644 --- a/lib/rrddb.py +++ b/lib/rrddb.py @@ -28,8 +28,10 @@ class RRD(object): os.mkdir(self.imagePath) def update_database(self, nodes): - online_nodes = dict(filter(lambda d: d[1]['flags']['online'], nodes.items())) - client_count = sum(map(lambda d: d['statistics']['clients'], online_nodes.values())) + online_nodes = dict(filter( + lambda d: d[1]['flags']['online'], nodes.items())) + client_count = sum(map( + lambda d: d['statistics']['clients'], online_nodes.values())) self.globalDb.update(len(online_nodes), client_count) for node_id, node in online_nodes.items(): @@ -37,7 +39,8 @@ class RRD(object): rrd.update() def update_images(self): - self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) + self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), + self.displayTimeGlobal) nodedb_files = os.listdir(self.dbPath) From 9df369e88a3e7823d9812285d2cb8a335266f62e Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Wed, 25 Mar 2015 15:14:58 +0100 Subject: [PATCH 61/97] update batman interface handling in backend.py, add commenting --- backend.py | 42 +++++++++++++++++++++++++++++------------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/backend.py b/backend.py index 173b127..e2febf9 100755 --- a/backend.py +++ b/backend.py @@ -23,23 +23,24 @@ def main(params): now = datetime.utcnow().replace(microsecond=0) + # read nodedb state from node.json with open(nodes_fn, 'r') as nodedb_handle: nodedb = json.load(nodedb_handle) - # flush nodedb if it uses the old format if 'links' in nodedb: nodedb = {'nodes': dict()} + # update timestamp and assume all nodes are offline nodedb['timestamp'] = now.isoformat() - for node_id, node in nodedb['nodes'].items(): node['flags']['online'] = False + # integrate alfred nodeinfo alfred = Alfred(unix_sockpath=params['alfred_sock']) - nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), now, assume_online=True) + # integrate static aliases data for aliases in params['aliases']: with open(aliases, 'r') as f: nodes.import_nodeinfo(nodedb['nodes'], json.load(f), @@ -48,34 +49,49 @@ def main(params): nodes.reset_statistics(nodedb['nodes']) nodes.import_statistics(nodedb['nodes'], alfred.statistics()) - bm = list(map(lambda d: - (d.vis_data(True), d.gateway_list()), - map(Batman, params['mesh'], params['alfred_sock']))) - for vis_data, gateway_list in bm: - nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vis_data) - nodes.import_vis_clientcount(nodedb['nodes'], vis_data) - nodes.mark_vis_data_online(nodedb['nodes'], vis_data, now) - nodes.mark_gateways(nodedb['nodes'], gateway_list) + # initialize batman bindings for each mesh interface + # and acquire gwl and visdata + mesh_interfaces = frozenset(params['mesh']) + mesh_info = {} + for interface in mesh_interfaces: + bm = Batman(mesh_interface=interface, + alfred_sockpath=params['alfred_sock']) + vd = bm.vis_data(True) + gwl = bm.gateway_list() + mesh_info[interface] = (vd, gwl) + + # update nodedb from batman-adv data + for vd, gwl in mesh_info.values(): + nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vd) + nodes.import_vis_clientcount(nodedb['nodes'], vd) + nodes.mark_vis_data_online(nodedb['nodes'], vd, now) + nodes.mark_gateways(nodedb['nodes'], gwl) + + # clear the nodedb from nodes that have not been online in $prune days if params['prune']: nodes.prune_nodes(nodedb['nodes'], now, int(params['prune'])) + # build nxnetworks graph from nodedb and visdata batadv_graph = nx.DiGraph() - for vis_data, gateway_list in bm: - graph.import_vis_data(batadv_graph, nodedb['nodes'], vis_data) + for vd, gwl in mesh_info.values(): + graph.import_vis_data(batadv_graph, nodedb['nodes'], vd) + # force mac addresses to be vpn-link only (like gateways for example) if params['vpn']: graph.mark_vpn(batadv_graph, frozenset(params['vpn'])) batadv_graph = graph.merge_nodes(batadv_graph) batadv_graph = graph.to_undirected(batadv_graph) + # write processed data to dest dir with open(nodes_fn, 'w') as f: json.dump(nodedb, f) with open(graph_fn, 'w') as f: json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) + # optional rrd graphs (trigger with --rrd) if params['rrd']: script_directory = os.path.dirname(os.path.realpath(__file__)) rrd = RRD(os.path.join(script_directory, 'nodedb'), From a1fe27fc5199a91920fd2432a45abbf77fbaab1d Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Thu, 26 Mar 2015 01:53:44 +0100 Subject: [PATCH 62/97] Update argument parser * --mesh (-m) now accepts the interface:alfred_sock syntax to add multiple batman/alfred instances. Also multiple instances can be added at once now. Only one interface can be added without alfred socket support (available since 2014.4.0) though. * --alfred-sock (-s) was dropped in favor of the new --mesh syntax, which adds the interface to socket relationship * --vpn (-V) now accepts multiple mac addresses, ATTENTION: update your calls accordingly * --prune defaults to int now * --with-rrd was renamed from --rrd, to better reflect its boolean/toggle like state --- backend.py | 89 ++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 56 insertions(+), 33 deletions(-) diff --git a/backend.py b/backend.py index e2febf9..82817bd 100755 --- a/backend.py +++ b/backend.py @@ -6,6 +6,7 @@ https://github.com/ffnord/ffmap-backend import argparse import json import os +import sys from datetime import datetime import networkx as nx @@ -23,9 +24,37 @@ def main(params): now = datetime.utcnow().replace(microsecond=0) + # parse mesh param and instantiate Alfred/Batman instances + alfred_instances = [] + batman_instances = [] + for value in params['mesh']: + # (1) only batman-adv if, no alfred sock + if ':' not in value: + if len(params['mesh']) > 1: + raise ValueError( + 'Multiple mesh interfaces require the use of ' + 'alfred socket paths.') + alfred_instances.append(Alfred(unix_sockpath=None)) + batman_instances.append(Batman(mesh_interface=value)) + else: + # (2) batman-adv if + alfred socket + try: + batif, alfredsock = value.split(':') + alfred_instances.append(Alfred(unix_sockpath=alfredsock)) + batman_instances.append(Batman(mesh_interface=batif, + alfred_sockpath=alfredsock)) + except ValueError: + raise ValueError( + 'Unparseable value "{0}" in --mesh parameter.'. + format(value)) + # read nodedb state from node.json - with open(nodes_fn, 'r') as nodedb_handle: - nodedb = json.load(nodedb_handle) + try: + with open(nodes_fn, 'r') as nodedb_handle: + nodedb = json.load(nodedb_handle) + except FileNotFoundError: + nodedb = {'nodes': dict()} + # flush nodedb if it uses the old format if 'links' in nodedb: nodedb = {'nodes': dict()} @@ -36,9 +65,9 @@ def main(params): node['flags']['online'] = False # integrate alfred nodeinfo - alfred = Alfred(unix_sockpath=params['alfred_sock']) - nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), - now, assume_online=True) + for alfred in alfred_instances: + nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), + now, assume_online=True) # integrate static aliases data for aliases in params['aliases']: @@ -47,22 +76,19 @@ def main(params): now, assume_online=False) nodes.reset_statistics(nodedb['nodes']) - nodes.import_statistics(nodedb['nodes'], alfred.statistics()) + for alfred in alfred_instances: + nodes.import_statistics(nodedb['nodes'], alfred.statistics()) - # initialize batman bindings for each mesh interface - # and acquire gwl and visdata - mesh_interfaces = frozenset(params['mesh']) - mesh_info = {} - for interface in mesh_interfaces: - bm = Batman(mesh_interface=interface, - alfred_sockpath=params['alfred_sock']) - vd = bm.vis_data(True) - gwl = bm.gateway_list() + # acquire gwl and visdata for each batman instance + mesh_info = [] + for batman in batman_instances: + vd = batman.vis_data(True) + gwl = batman.gateway_list() - mesh_info[interface] = (vd, gwl) + mesh_info.append((vd, gwl)) # update nodedb from batman-adv data - for vd, gwl in mesh_info.values(): + for vd, gwl in mesh_info: nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vd) nodes.import_vis_clientcount(nodedb['nodes'], vd) nodes.mark_vis_data_online(nodedb['nodes'], vd, now) @@ -70,11 +96,11 @@ def main(params): # clear the nodedb from nodes that have not been online in $prune days if params['prune']: - nodes.prune_nodes(nodedb['nodes'], now, int(params['prune'])) + nodes.prune_nodes(nodedb['nodes'], now, params['prune']) # build nxnetworks graph from nodedb and visdata batadv_graph = nx.DiGraph() - for vd, gwl in mesh_info.values(): + for vd, gwl in mesh_info: graph.import_vis_data(batadv_graph, nodedb['nodes'], vd) # force mac addresses to be vpn-link only (like gateways for example) @@ -104,26 +130,23 @@ if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-a', '--aliases', - help='read aliases from FILE', + help='Read aliases from FILE', default=[], action='append', metavar='FILE') - parser.add_argument('-m', '--mesh', action='append', - default=['bat0'], - help='batman mesh interface (defaults to bat0)') - parser.add_argument('-s', '--alfred-sock', - default=None, - help='alfred unix socket path') + parser.add_argument('-m', '--mesh', + default=['bat0'], nargs='+', + help='Use given batman-adv mesh interface(s) (defaults to bat0); ' + 'specify alfred unix socket like bat0:/run/alfred0.sock.') parser.add_argument('-d', '--dest-dir', action='store', - help='destination directory for generated files', + help='Write output to destination directory', required=True) - parser.add_argument('--vpn', action='append', metavar='MAC', - help='assume MAC to be part of the VPN') - parser.add_argument('--prune', metavar='DAYS', + parser.add_argument('-V', '--vpn', nargs='+', metavar='MAC', + help='Assume MAC addresses are part of vpn') + parser.add_argument('-p', '--prune', metavar='DAYS', type=int, help='forget nodes offline for at least DAYS') - parser.add_argument('--rrd', dest='rrd', action='store_true', + parser.add_argument('--with-rrd', dest='rrd', action='store_true', default=False, - help='create RRD graphs') + help='enable the rendering of RRD graphs (cpu intensive)') options = vars(parser.parse_args()) - main(options) From 5b5f4a5d745839558268753a4f8ebc8b8e43f3a0 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Thu, 26 Mar 2015 02:13:14 +0100 Subject: [PATCH 63/97] fix pep8 line-length in argparser help --- backend.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/backend.py b/backend.py index 82817bd..f13dff4 100755 --- a/backend.py +++ b/backend.py @@ -135,8 +135,9 @@ if __name__ == '__main__': metavar='FILE') parser.add_argument('-m', '--mesh', default=['bat0'], nargs='+', - help='Use given batman-adv mesh interface(s) (defaults to bat0); ' - 'specify alfred unix socket like bat0:/run/alfred0.sock.') + help='Use given batman-adv mesh interface(s) (defaults' + 'to bat0); specify alfred unix socket like ' + 'bat0:/run/alfred0.sock.') parser.add_argument('-d', '--dest-dir', action='store', help='Write output to destination directory', required=True) @@ -146,7 +147,8 @@ if __name__ == '__main__': help='forget nodes offline for at least DAYS') parser.add_argument('--with-rrd', dest='rrd', action='store_true', default=False, - help='enable the rendering of RRD graphs (cpu intensive)') + help='enable the rendering of RRD graphs (cpu ' + 'intensive)') options = vars(parser.parse_args()) main(options) From 98d461815624e2df82f91bd3647050bf81e8ca1a Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Thu, 26 Mar 2015 14:21:11 +0100 Subject: [PATCH 64/97] fix pruning --- lib/nodes.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/nodes.py b/lib/nodes.py index bb1e129..93d3835 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -22,9 +22,9 @@ def prune_nodes(nodes, now, days): continue lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S') - delta = (now - lastseen).seconds + delta = (now - lastseen).days - if delta >= days * 86400: + if delta >= days: prune.append(node_id) for node_id in prune: From dd8f6b92af7abb0df2e4f7cd0da031135f5ed754 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 01:36:29 +0200 Subject: [PATCH 65/97] drop batctl vd json legacy support fixes #47 --- backend.py | 2 +- lib/batman.py | 18 ++---------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/backend.py b/backend.py index f13dff4..e5c5206 100755 --- a/backend.py +++ b/backend.py @@ -82,7 +82,7 @@ def main(params): # acquire gwl and visdata for each batman instance mesh_info = [] for batman in batman_instances: - vd = batman.vis_data(True) + vd = batman.vis_data() gwl = batman.gateway_list() mesh_info.append((vd, gwl)) diff --git a/lib/batman.py b/lib/batman.py index 4f6b943..5c48740 100644 --- a/lib/batman.py +++ b/lib/batman.py @@ -15,11 +15,8 @@ class Batman(object): # compile regular expressions only once on startup self.mac_addr_pattern = re.compile(r'(([a-z0-9]{2}:){5}[a-z0-9]{2})') - def vis_data(self, batadv_vis=False): - vds = self.vis_data_batctl_legacy() - if batadv_vis: - vds += self.vis_data_batadv_vis() - return vds + def vis_data(self): + return self.vis_data_batadv_vis() @staticmethod def vis_data_helper(lines): @@ -32,17 +29,6 @@ class Batman(object): pass return vd_tmp - def vis_data_batctl_legacy(self): - """ - Parse "batctl -m vd json -n" - into an array of dictionaries. - """ - output = subprocess.check_output( - ['batctl', '-m', self.mesh_interface, 'vd', 'json', '-n']) - lines = output.splitlines() - vds = self.vis_data_helper(lines) - return vds - def vis_data_batadv_vis(self): """ Parse "batadv-vis -i -f json" From 206ea3d6efc9e0d737b5956034d738f2f04e5225 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 01:50:35 +0200 Subject: [PATCH 66/97] update travis.yml to ignore PEP8 E113 Line-Length --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index dcaceb4..48ef08c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,4 +3,4 @@ language: python python: - "3.4" install: "pip install pep8" -script: "pep8 *.py lib/*.py" +script: "pep8 --ignore=E113 *.py lib/*.py" From 29e2647ad5d4f81b6ca35e271c6fc412a40e6fff Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 17:16:32 +0200 Subject: [PATCH 67/97] ignored the wrong pep8 error code --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 48ef08c..ee88058 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,4 +3,4 @@ language: python python: - "3.4" install: "pip install pep8" -script: "pep8 --ignore=E113 *.py lib/*.py" +script: "pep8 --ignore=E501 *.py lib/*.py" From bb2aa112c3c6b51d16e488cfbbef8298c8594645 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 17:34:35 +0200 Subject: [PATCH 68/97] make pep8 happy --- lib/RRD.py | 4 ++-- lib/graph.py | 2 +- lib/rrddb.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/RRD.py b/lib/RRD.py index 3c406ac..4e925a7 100644 --- a/lib/RRD.py +++ b/lib/RRD.py @@ -159,7 +159,7 @@ class RRD(object): ds.unknown_sec), "utf-8")) if b'' in line: - restore.stdin.write(added_ds_num*b""" + restore.stdin.write(added_ds_num * b""" NaN NaN @@ -173,7 +173,7 @@ class RRD(object): restore.stdin.write( line.replace( b'', - (added_ds_num*b'NaN')+b'' + (added_ds_num * b'NaN') + b'' ) ) diff --git a/lib/graph.py b/lib/graph.py index 7b45828..db1259e 100644 --- a/lib/graph.py +++ b/lib/graph.py @@ -8,7 +8,7 @@ from lib.nodes import build_mac_table def import_vis_data(graph, nodes, vis_data): macs = build_mac_table(nodes) - nodes_a = map(lambda d: 2*[d['primary']], + nodes_a = map(lambda d: 2 * [d['primary']], filter(lambda d: 'primary' in d, vis_data)) nodes_b = map(lambda d: [d['secondary'], d['of']], filter(lambda d: 'secondary' in d, vis_data)) diff --git a/lib/rrddb.py b/lib/rrddb.py index 57437a7..f1678f5 100644 --- a/lib/rrddb.py +++ b/lib/rrddb.py @@ -19,7 +19,7 @@ class RRD(object): self.displayTimeGlobal = display_time_global self.displayTimeNode = display_time_node - self.currentTimeInt = (int(time.time())/60)*60 + self.currentTimeInt = (int(time.time()) / 60) * 60 self.currentTime = str(self.currentTimeInt) try: From ebde2fcba2ec3654aeb774eddef569d7e2293814 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 17:41:40 +0200 Subject: [PATCH 69/97] create dest_dir if needed fixes #48 --- backend.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend.py b/backend.py index e5c5206..57707bf 100755 --- a/backend.py +++ b/backend.py @@ -19,6 +19,8 @@ from lib.rrddb import RRD def main(params): + os.makedirs(params['dest_dir'], exist_ok=True) + nodes_fn = os.path.join(params['dest_dir'], 'nodes.json') graph_fn = os.path.join(params['dest_dir'], 'graph.json') @@ -52,7 +54,7 @@ def main(params): try: with open(nodes_fn, 'r') as nodedb_handle: nodedb = json.load(nodedb_handle) - except FileNotFoundError: + except IOError: nodedb = {'nodes': dict()} # flush nodedb if it uses the old format From c9098b179379920cb7e06383290542c8b1851364 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 17:55:27 +0200 Subject: [PATCH 70/97] set version of nodes.json to 1 --- backend.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/backend.py b/backend.py index 57707bf..98f0124 100755 --- a/backend.py +++ b/backend.py @@ -61,6 +61,9 @@ def main(params): if 'links' in nodedb: nodedb = {'nodes': dict()} + # set version we're going to output + nodedb['version'] = 1 + # update timestamp and assume all nodes are offline nodedb['timestamp'] = now.isoformat() for node_id, node in nodedb['nodes'].items(): From 7f198980b6ea57f1e83155a3cff9c73ab1486710 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 17:58:25 +0200 Subject: [PATCH 71/97] introduce GRAPH_VERSION and NODES_VERSION --- backend.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/backend.py b/backend.py index 98f0124..4bf9058 100755 --- a/backend.py +++ b/backend.py @@ -17,6 +17,9 @@ from lib.alfred import Alfred from lib.batman import Batman from lib.rrddb import RRD +NODES_VERSION = 1 +GRAPH_VERSION = 1 + def main(params): os.makedirs(params['dest_dir'], exist_ok=True) @@ -62,7 +65,7 @@ def main(params): nodedb = {'nodes': dict()} # set version we're going to output - nodedb['version'] = 1 + nodedb['version'] = NODES_VERSION # update timestamp and assume all nodes are offline nodedb['timestamp'] = now.isoformat() @@ -119,8 +122,11 @@ def main(params): with open(nodes_fn, 'w') as f: json.dump(nodedb, f) + graph_out = {'batadv': json_graph.node_link_data(batadv_graph), + 'version': GRAPH_VERSION} + with open(graph_fn, 'w') as f: - json.dump({'batadv': json_graph.node_link_data(batadv_graph)}, f) + json.dump(graph_out, f) # optional rrd graphs (trigger with --rrd) if params['rrd']: From 428a9731e0fff2fb0b778ab1a834dfa5923e24b6 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 1 Apr 2015 23:31:34 +0200 Subject: [PATCH 72/97] README: removing owner info using jq --- README.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/README.md b/README.md index 9f5af40..af25e8f 100644 --- a/README.md +++ b/README.md @@ -60,3 +60,25 @@ For the script's regular execution add the following to the crontab: - online - gateway + +# Removing owner information + +If you'd like to redact information about the node owner from `nodes.json`, +you may use a filter like [jq]. In this case, specify an output directory +different from your webserver directory, e.g.: + + ./backend.py -d /ffmap-data + +Don't write to files generated in there. ffmap-backend uses them as its +database. + +After running ffmap-backend, copy `graph.json` to your webserver. Then, +filter `nodes.json` using `jq` like this: + + jq '.nodes = (.nodes | with_entries(del(.value.nodeinfo.owner)))' \ + < /ffmap-data/nodes.json > /var/www/data/nodes.json + +This will remove owner information from nodes.json before copying the data +to your webserver. + +[jq]: https://stedolan.github.io/jq/ From 4b88a196ac9b957395aba99d39bdedd9544a7421 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Thu, 2 Apr 2015 18:52:00 +0200 Subject: [PATCH 73/97] README: drop sudo explanation --- README.md | 26 +++++++------------------- 1 file changed, 7 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index af25e8f..6cbd238 100644 --- a/README.md +++ b/README.md @@ -2,33 +2,21 @@ [![Build Status](https://travis-ci.org/ffnord/ffmap-backend.svg?branch=master)](https://travis-ci.org/ffnord/ffmap-backend) -ffmap-backend gathers information on the batman network by invoking +ffmap-backend gathers information on the batman network by invoking : - * batctl, + * batctl (might require root), * alfred-json and * batadv-vis -as root (via sudo) and has this information placed into a target directory -as the file "nodes.json" and also updates the directory "nodes" with graphical -representations of uptimes and the number of clients connecting. +In order to use alfred-json and batadv-vis make sure the user running this +backend is allowed to access alfred's socket. + +The output will be written to a directory (`-d output`). Run `backend.py --help` for a quick overview of all available options. -When executed without root privileges, we suggest to grant sudo permissions -within wrappers of those binaries, so no further changes are required in other -scripts: - -
-$ cat < $HOME/batctl
-#!/bin/sh
-exec sudo /usr/sbin/batctl $*
-EOCAT
-
- -and analogously for batadv-vis. The entry for /etc/sudoers could be -whateveruser ALL=(ALL:ALL) NOPASSWD: /usr/sbin/batctl,/usr/sbin/batadv-vis,/usr/sbin/alfred-json - For the script's regular execution add the following to the crontab: +
 * * * * * /path/to/ffmap-backend/backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a --vpn d2:d0:93:63:f7:da
 
From fa740273bb706c20baf4e33bcfe5a7700c663299 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Wed, 8 Apr 2015 12:54:46 +0200 Subject: [PATCH 74/97] output nodelist.json --- backend.py | 5 +++++ lib/nodelist.py | 24 ++++++++++++++++++++++++ 2 files changed, 29 insertions(+) create mode 100644 lib/nodelist.py diff --git a/backend.py b/backend.py index 4bf9058..392d931 100755 --- a/backend.py +++ b/backend.py @@ -16,6 +16,7 @@ from lib import graph, nodes from lib.alfred import Alfred from lib.batman import Batman from lib.rrddb import RRD +from lib.nodelist import export_nodelist NODES_VERSION = 1 GRAPH_VERSION = 1 @@ -26,6 +27,7 @@ def main(params): nodes_fn = os.path.join(params['dest_dir'], 'nodes.json') graph_fn = os.path.join(params['dest_dir'], 'graph.json') + nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json') now = datetime.utcnow().replace(microsecond=0) @@ -128,6 +130,9 @@ def main(params): with open(graph_fn, 'w') as f: json.dump(graph_out, f) + with open(nodelist_fn, 'w') as f: + json.dump(export_nodelist(now, nodedb), f) + # optional rrd graphs (trigger with --rrd) if params['rrd']: script_directory = os.path.dirname(os.path.realpath(__file__)) diff --git a/lib/nodelist.py b/lib/nodelist.py new file mode 100644 index 0000000..15aea63 --- /dev/null +++ b/lib/nodelist.py @@ -0,0 +1,24 @@ +def export_nodelist(now, nodedb): + nodelist = list() + + for node_id, node in nodedb["nodes"].items(): + node_out = dict() + node_out["id"] = node_id + node_out["name"] = node["nodeinfo"]["hostname"] + + if "location" in node["nodeinfo"]: + node_out["position"] = {"lat": node["nodeinfo"]["location"]["latitude"], + "long": node["nodeinfo"]["location"]["longitude"]} + + node_out["status"] = dict() + node_out["status"]["online"] = node["flags"]["online"] + + if "lastseen" in node: + node_out["status"]["lastcontact"] = node["lastseen"] + + if "clients" in node["statistics"]: + node_out["status"]["clients"] = node["statistics"]["clients"] + + nodelist.append(node_out) + + return {"version": "1.0.1", "nodes": nodelist, "updated_at": now.isoformat()} From 9a652c429c465337dfaef3242d28061bc0efbeaa Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 12 Apr 2015 12:07:48 +0200 Subject: [PATCH 75/97] README: use new --vpn syntax --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6cbd238..b0137fc 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ Run `backend.py --help` for a quick overview of all available options. For the script's regular execution add the following to the crontab:
-* * * * * /path/to/ffmap-backend/backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a --vpn d2:d0:93:63:f7:da
+* * * * * backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a d2:d0:93:63:f7:da
 
# Data format From 7322a14274984cde34252fd62431f45f664a8352 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sun, 12 Apr 2015 18:57:44 +0200 Subject: [PATCH 76/97] batman: prefix sudo for batctl if not executed as root depends on proper sudo rule, like: mapuser ALL = NOPASSWD: /usr/sbin/batctl --- lib/batman.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/batman.py b/lib/batman.py index 5c48740..17f4db3 100644 --- a/lib/batman.py +++ b/lib/batman.py @@ -1,5 +1,6 @@ import subprocess import json +import os import re @@ -46,8 +47,10 @@ class Batman(object): Parse "batctl -m gwl -n" into an array of dictionaries. """ - output = subprocess.check_output( - ['batctl', '-m', self.mesh_interface, 'gwl', '-n']) + cmd = ['batctl', '-m', self.mesh_interface, 'gwl', '-n'] + if os.geteuid() > 0: + cmd.insert(0, 'sudo') + output = subprocess.check_output(cmd) output_utf8 = output.decode('utf-8') rows = output_utf8.splitlines() @@ -73,8 +76,10 @@ class Batman(object): Parse "batctl -m gw" return: tuple mode, bandwidth, if mode != server then bandwidth is None """ - output = subprocess.check_output( - ['batctl', '-m', self.mesh_interface, 'gw']) + cmd = ['batctl', '-m', self.mesh_interface, 'gw'] + if os.geteuid() > 0: + cmd.insert(0, 'sudo') + output = subprocess.check_output(cmd) chunks = output.decode("utf-8").split() return chunks[0], chunks[3] if 3 in chunks else None From 6f97932ea29c06e03cdda72f8df626005f1f1daa Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sun, 12 Apr 2015 19:13:22 +0200 Subject: [PATCH 77/97] README.md: add instructions to run under unprivileged user --- README.md | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index b0137fc..a3781ea 100644 --- a/README.md +++ b/README.md @@ -8,9 +8,6 @@ ffmap-backend gathers information on the batman network by invoking : * alfred-json and * batadv-vis -In order to use alfred-json and batadv-vis make sure the user running this -backend is allowed to access alfred's socket. - The output will be written to a directory (`-d output`). Run `backend.py --help` for a quick overview of all available options. @@ -21,6 +18,32 @@ For the script's regular execution add the following to the crontab: * * * * * backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a d2:d0:93:63:f7:da +# Running as unprivileged user + +Some information collected by ffmap-backend requires access to specific system resources. + +Make sure the user you are running this under is part of the group that owns the alfred socket, so +alfred-json can access the alfred daemon. + + # ls -al /var/run/alfred.sock + srw-rw---- 1 root alfred 0 Mar 19 22:00 /var/run/alfred.sock= + # adduser map alfred + Adding user `map' to group `alfred' ... + Adding user map to group alfred + Done. + $ groups + map alfred + +Running batctl requires passwordless sudo access, because it needs to access the debugfs to retrive +the gateway list. + + # echo 'map ALL = NOPASSWD: /usr/sbin/batctl' | tee /etc/sudoers.d/map + map ALL = NOPASSWD: /usr/sbin/batctl + # chmod 0440 /etc/sudoers.d/map + +That should be everything. The script automatically detects if it is run in unprivileged mode and +will prefix `sudo` where necessary. + # Data format ## nodes.json From 3ec0874b774b29e3bdbc10102b8650f6c97a7d91 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sun, 12 Apr 2015 19:26:45 +0200 Subject: [PATCH 78/97] Update --aliases (-a) switch to use nargs=+ This breaks calls with multiple --aliases params specified and introduces --aliases FILE1 FILE2 FILE3 [...] instead --- backend.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/backend.py b/backend.py index 392d931..7cd6630 100755 --- a/backend.py +++ b/backend.py @@ -147,8 +147,7 @@ if __name__ == '__main__': parser.add_argument('-a', '--aliases', help='Read aliases from FILE', - default=[], action='append', - metavar='FILE') + nargs='+', metavar='FILE') parser.add_argument('-m', '--mesh', default=['bat0'], nargs='+', help='Use given batman-adv mesh interface(s) (defaults' From 1ee17c04404e0bbf42536a1f3906b5576edb0545 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sun, 12 Apr 2015 19:38:21 +0200 Subject: [PATCH 79/97] partially revert 3ec0874b774b29e3bdbc10102b8650f6c97a7d91 --- backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend.py b/backend.py index 7cd6630..685c606 100755 --- a/backend.py +++ b/backend.py @@ -147,7 +147,7 @@ if __name__ == '__main__': parser.add_argument('-a', '--aliases', help='Read aliases from FILE', - nargs='+', metavar='FILE') + nargs='+', default=[], metavar='FILE') parser.add_argument('-m', '--mesh', default=['bat0'], nargs='+', help='Use given batman-adv mesh interface(s) (defaults' From dfcb9a39408620ce8cc7648a0b4e359277de2b00 Mon Sep 17 00:00:00 2001 From: Martin Weinelt Date: Sun, 12 Apr 2015 19:50:21 +0200 Subject: [PATCH 80/97] batman: ensure /usr/sbin and /usr/local/sbin are in PATH --- lib/batman.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/lib/batman.py b/lib/batman.py index 17f4db3..ee0a0fe 100644 --- a/lib/batman.py +++ b/lib/batman.py @@ -13,6 +13,14 @@ class Batman(object): self.mesh_interface = mesh_interface self.alfred_sock = alfred_sockpath + # ensure /usr/sbin and /usr/local/sbin are in PATH + env = os.environ + path = set(env['PATH'].split(':')) + path.add('/usr/sbin/') + path.add('/usr/local/sbin') + env['PATH'] = ':'.join(path) + self.environ = env + # compile regular expressions only once on startup self.mac_addr_pattern = re.compile(r'(([a-z0-9]{2}:){5}[a-z0-9]{2})') @@ -38,7 +46,7 @@ class Batman(object): cmd = ['batadv-vis', '-i', self.mesh_interface, '-f', 'json'] if self.alfred_sock: cmd.extend(['-u', self.alfred_sock]) - output = subprocess.check_output(cmd) + output = subprocess.check_output(cmd, env=self.environ) lines = output.splitlines() return self.vis_data_helper(lines) @@ -50,7 +58,7 @@ class Batman(object): cmd = ['batctl', '-m', self.mesh_interface, 'gwl', '-n'] if os.geteuid() > 0: cmd.insert(0, 'sudo') - output = subprocess.check_output(cmd) + output = subprocess.check_output(cmd, env=self.environ) output_utf8 = output.decode('utf-8') rows = output_utf8.splitlines() @@ -79,7 +87,7 @@ class Batman(object): cmd = ['batctl', '-m', self.mesh_interface, 'gw'] if os.geteuid() > 0: cmd.insert(0, 'sudo') - output = subprocess.check_output(cmd) + output = subprocess.check_output(cmd, env=self.environ) chunks = output.decode("utf-8").split() return chunks[0], chunks[3] if 3 in chunks else None From dccfb8c27a042320c35888441bf8f5cddc0cf0b6 Mon Sep 17 00:00:00 2001 From: Felix Oertel Date: Thu, 30 Apr 2015 17:37:19 +0200 Subject: [PATCH 81/97] [DOC] include dependencies --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index a3781ea..c1ac7dd 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,11 @@ For the script's regular execution add the following to the crontab: * * * * * backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a d2:d0:93:63:f7:da +# Dependencies + +- Python Package [Networkx](https://networkx.github.io/) +- [alfred-json](https://github.com/tcatm/alfred-json) + # Running as unprivileged user Some information collected by ffmap-backend requires access to specific system resources. From 1835abac7f78f490b5c65fba48e1beaf16e7c5a1 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 3 May 2015 13:11:22 +0200 Subject: [PATCH 82/97] basic nodeinfo validation (location) --- backend.py | 7 +++++-- lib/validate.py | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 lib/validate.py diff --git a/backend.py b/backend.py index 685c606..f638049 100755 --- a/backend.py +++ b/backend.py @@ -17,6 +17,7 @@ from lib.alfred import Alfred from lib.batman import Batman from lib.rrddb import RRD from lib.nodelist import export_nodelist +from lib.validate import validate_nodeinfos NODES_VERSION = 1 GRAPH_VERSION = 1 @@ -76,13 +77,15 @@ def main(params): # integrate alfred nodeinfo for alfred in alfred_instances: - nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), + nodeinfo = validate_nodeinfos(alfred.nodeinfo()) + nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, now, assume_online=True) # integrate static aliases data for aliases in params['aliases']: with open(aliases, 'r') as f: - nodes.import_nodeinfo(nodedb['nodes'], json.load(f), + nodeinfo = validate_nodeinfos(json.load(f)) + nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, now, assume_online=False) nodes.reset_statistics(nodedb['nodes']) diff --git a/lib/validate.py b/lib/validate.py new file mode 100644 index 0000000..eb6c11f --- /dev/null +++ b/lib/validate.py @@ -0,0 +1,19 @@ +import json + + +def validate_nodeinfos(nodeinfos): + result = [] + + for nodeinfo in nodeinfos: + if validate_nodeinfo(nodeinfo): + result.append(nodeinfo) + + return result + + +def validate_nodeinfo(nodeinfo): + if 'location' in nodeinfo: + if 'latitude' not in nodeinfo['location'] or 'longitude' not in nodeinfo['location']: + return False + + return True From 1141aa766f245ac70e59ad0c33bef51be93c3125 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 3 May 2015 13:16:26 +0200 Subject: [PATCH 83/97] nodes.py: catch ZeroDivisionError in statistics --- lib/nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/nodes.py b/lib/nodes.py index 93d3835..4ff1fdc 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -59,7 +59,7 @@ def import_statistics(nodes, stats): node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) - except (KeyError, TypeError): + except (KeyError, TypeError, ZeroDivisionError): pass macs = build_mac_table(nodes) From 3caf00be0765a875af0c78c483a2a07db026655f Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 9 May 2015 21:54:54 +0200 Subject: [PATCH 84/97] extract VPN interfaces from nodeinfo --- backend.py | 13 +++++++++++++ lib/nodes.py | 27 ++++++++++++++++++++++----- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/backend.py b/backend.py index f638049..3ea2f5d 100755 --- a/backend.py +++ b/backend.py @@ -120,6 +120,19 @@ def main(params): if params['vpn']: graph.mark_vpn(batadv_graph, frozenset(params['vpn'])) + def extract_tunnel(nodes): + macs = set() + for id, node in nodes.items(): + try: + for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]: + macs.add(mac) + except KeyError: + pass + + return macs + + graph.mark_vpn(batadv_graph, extract_tunnel(nodedb['nodes'])) + batadv_graph = graph.merge_nodes(batadv_graph) batadv_graph = graph.to_undirected(batadv_graph) diff --git a/lib/nodes.py b/lib/nodes.py index 4ff1fdc..d9543a8 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -97,12 +97,29 @@ def import_mesh_ifs_vis_data(nodes, vis_data): for v in mesh_nodes: node = v[0] - try: - mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) - except KeyError: - mesh_ifs = set() + ifs = set() - node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh_interfaces'])) + except KeyError: + pass + + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless'])) + except KeyError: + pass + + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel'])) + except KeyError: + pass + + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other'])) + except KeyError: + pass + + node['nodeinfo']['network']['mesh_interfaces'] = list(ifs | v[1]) def import_vis_clientcount(nodes, vis_data): From 8fd0b73418b954d0ccbe44f8ad0d02103464efb1 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 9 May 2015 22:04:45 +0200 Subject: [PATCH 85/97] remove dependency on mesh_interfaces --- lib/nodes.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/lib/nodes.py b/lib/nodes.py index d9543a8..2d0769d 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -11,6 +11,25 @@ def build_mac_table(nodes): macs[mac] = node_id except KeyError: pass + + try: + for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless']: + macs[mac] = node_id + except KeyError: + pass + + try: + for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel']: + macs[mac] = node_id + except KeyError: + pass + + try: + for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']: + macs[mac] = node_id + except KeyError: + pass + return macs From dafad3df4ca9224e46dac429dc797db14cdc38ac Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 9 May 2015 22:16:44 +0200 Subject: [PATCH 86/97] update aliases.json_sample --- aliases.json_sample | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/aliases.json_sample b/aliases.json_sample index ca1eb6b..db35900 100644 --- a/aliases.json_sample +++ b/aliases.json_sample @@ -7,18 +7,30 @@ "latitude": 53.86 }, "network": { - "mesh_interfaces": [ - "00:25:86:e6:f1:bf" - ] + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "00:25:86:e6:f1:bf" + ] + } + } + } } }, { "node_id": "gw1", "hostname": "burgtor", "network": { - "mesh_interfaces": [ - "52:54:00:f3:62:d9" - ] + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "52:54:00:f3:62:d9" + ] + } + } + } } } ] From 71ced22b0f02c9be53a22647302a0205205beba1 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Fri, 15 May 2015 18:20:49 +0200 Subject: [PATCH 87/97] README.md: Extend dependencies --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c1ac7dd..809f393 100644 --- a/README.md +++ b/README.md @@ -20,8 +20,10 @@ For the script's regular execution add the following to the crontab: # Dependencies -- Python Package [Networkx](https://networkx.github.io/) +- Python 3 +- Python 3 Package [Networkx](https://networkx.github.io/) - [alfred-json](https://github.com/tcatm/alfred-json) +- rrdtool (if run with `--with-rrd`) # Running as unprivileged user From 11ef32178d0b3d4f6cd9ac28123278efedc41c79 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 16 May 2015 09:38:23 +0200 Subject: [PATCH 88/97] Added jq filter to convert new format to old format This makes it easily possible to continue using the legacy ffmap-d3 front end with the new backend while migrating. --- README.md | 15 +++++++++++++++ ffmap-d3.jq | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 ffmap-d3.jq diff --git a/README.md b/README.md index 809f393..c345bb3 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,21 @@ will prefix `sudo` where necessary. - online - gateway +## Old data format + +If you want to still use the old [ffmap-d3](https://github.com/ffnord/ffmap-d3) +front end, you can use the file `ffmap-d3.jq` to convert the new output to the +old one: + +``` +jq -n -f ffmap-d3.jq \ + --argfile nodes nodedb/nodes.json \ + --argfile graph nodedb/graph.json \ + > nodedb/ffmap-d3.json +``` + +Then point your ffmap-d3 instance to the `ffmap-d3.json` file. + # Removing owner information If you'd like to redact information about the node owner from `nodes.json`, diff --git a/ffmap-d3.jq b/ffmap-d3.jq new file mode 100644 index 0000000..ebeece1 --- /dev/null +++ b/ffmap-d3.jq @@ -0,0 +1,52 @@ +{ + "meta": { + "timestamp": $nodes.timestamp + }, + "nodes": ( + $graph.batadv.nodes + | map( + if has("node_id") and .node_id + then ( + $nodes.nodes[.node_id] as $node + | { + "id": .id, + "uptime": $node.statistics.uptime, + "flags": ($node.flags + {"client": false}), + "name": $node.nodeinfo.hostname, + "clientcount": (if $node.statistics.clients >= 0 then $node.statistics.clients else 0 end), + "hardware": $node.nodeinfo.hardware.model, + "firmware": $node.nodeinfo.software.firmware.release, + "geo": (if $node.nodeinfo.location then [$node.nodeinfo.location.latitude, $node.nodeinfo.location.longitude] else null end), + #"lastseen": $node.lastseen, + "network": $node.nodeinfo.network + } + ) + else + { + "flags": {}, + "id": .id, + "geo": null, + "clientcount": 0 + } + end + ) + ), + "links": ( + $graph.batadv.links + | map( + $graph.batadv.nodes[.source].node_id as $source_id + | $graph.batadv.nodes[.target].node_id as $target_id + | select( + $source_id and $target_id and + ($nodes.nodes | (has($source_id) and has($target_id))) + ) + | { + "target": .target, + "source": .source, + "quality": "\(.tq), \(.tq)", + "id": ($source_id + "-" + $target_id), + "type": (if .vpn then "vpn" else null end) + } + ) + ) +} From 64dee31ebb030583b4dd374d39d62e2a53937458 Mon Sep 17 00:00:00 2001 From: stebifan Date: Sun, 7 Jun 2015 23:52:32 +0200 Subject: [PATCH 89/97] Added traffic to Statistics --- lib/nodes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/nodes.py b/lib/nodes.py index 2d0769d..3c0fa2f 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -92,6 +92,7 @@ def import_statistics(nodes, stats): add(node, stats, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) add(node, stats, 'rootfs_usage', ['rootfs_usage']) + add(node, stats, 'traffic', ['traffic']) def import_mesh_ifs_vis_data(nodes, vis_data): From b0b6f8e0cd47e57bf220dca09991b049bfb3ff6e Mon Sep 17 00:00:00 2001 From: kantorkel Date: Mon, 30 Nov 2015 19:45:14 +0100 Subject: [PATCH 90/97] status srv01 --- alfred_merge.py | 42 ++++++++++++++++++++++++++++++++++++++++++ backend.py | 7 +++---- lib/nodelist.py | 3 +++ lib/nodes.py | 21 ++++++++++++++++----- 4 files changed, 64 insertions(+), 9 deletions(-) create mode 100755 alfred_merge.py diff --git a/alfred_merge.py b/alfred_merge.py new file mode 100755 index 0000000..aa623b2 --- /dev/null +++ b/alfred_merge.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +import subprocess +import json + +from collections import MutableMapping + +def rec_merge(d1, d2): + ''' + Update two dicts of dicts recursively, + if either mapping has leaves that are non-dicts, + the second's leaf overwrites the first's. + ''' + for k, v in d1.items(): # in Python 2, use .iteritems()! + if k in d2: + # this next check is the only difference! + if all(isinstance(e, MutableMapping) for e in (v, d2[k])): + d2[k] = rec_merge(v, d2[k]) + # we could further check types and merge as appropriate here. + d3 = d1.copy() + d3.update(d2) + return d3 + + +class alfred_merge: + def __init__(self,request_data_type_1 = 158, request_data_type_2 = 159): + self.request_data_type_1 = request_data_type_1 + self.request_data_type_2 = request_data_type_2 + + def aliases(self): + output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"]) + alfred_data_1 = json.loads(output.decode("utf-8")) + output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"]) + alfred_data_2 = json.loads(output.decode("utf-8")) + + return json.dumps(rec_merge(alfred_data_1, alfred_data_2)) + + +if __name__ == "__main__": + ad = alfred_merge() + al = ad.aliases() + print(al) + diff --git a/backend.py b/backend.py index 3ea2f5d..b2f38b7 100755 --- a/backend.py +++ b/backend.py @@ -84,9 +84,9 @@ def main(params): # integrate static aliases data for aliases in params['aliases']: with open(aliases, 'r') as f: - nodeinfo = validate_nodeinfos(json.load(f)) - nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, - now, assume_online=False) +# nodeinfo = validate_nodeinfos(json.load(f)) + nodes.import_nodeinfo(nodedb['nodes'], json.load(f), + now, assume_online=False, statics=True) nodes.reset_statistics(nodedb['nodes']) for alfred in alfred_instances: @@ -157,7 +157,6 @@ def main(params): rrd.update_database(nodedb['nodes']) rrd.update_images() - if __name__ == '__main__': parser = argparse.ArgumentParser() diff --git a/lib/nodelist.py b/lib/nodelist.py index 15aea63..a931dcf 100644 --- a/lib/nodelist.py +++ b/lib/nodelist.py @@ -13,6 +13,9 @@ def export_nodelist(now, nodedb): node_out["status"] = dict() node_out["status"]["online"] = node["flags"]["online"] + if "firstseen" in node: + node_out["status"]["firstcontact"] = node["firstseen"] + if "lastseen" in node: node_out["status"]["lastcontact"] = node["lastseen"] diff --git a/lib/nodes.py b/lib/nodes.py index 3c0fa2f..f517438 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -56,12 +56,23 @@ def mark_online(node, now): node['flags']['online'] = True -def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): +def overrideFields(dest, src, fields): + for field in fields: + if field in src: + dest[field] = src[field] + else: + dest.pop(field, None) + + +def import_nodeinfo(nodes, nodeinfos, now, assume_online=False, statics=False): for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): - node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) - node['nodeinfo'] = nodeinfo - node['flags']['online'] = False - node['flags']['gateway'] = False + node = nodes.setdefault(nodeinfo['node_id'], {'flags': {'online': False, 'gateway': False}}) + + if statics: + node['nodeinfo'] = node.setdefault('nodeinfo', {}) + overrideFields(node['nodeinfo'], nodeinfo, ['hostname', 'location', 'node_id']) + else: + node['nodeinfo'] = nodeinfo if assume_online: mark_online(node, now) From 2043c88c03787cf7e17f7536981beafc2a30d735 Mon Sep 17 00:00:00 2001 From: kantorkel Date: Mon, 30 Nov 2015 20:00:15 +0100 Subject: [PATCH 91/97] fastd2aliases --- generate_aliases.py | 110 ++++++++++++++++++++++++++++++++++++++++++++ mkmap.sh | 6 +++ 2 files changed, 116 insertions(+) create mode 100755 generate_aliases.py create mode 100755 mkmap.sh diff --git a/generate_aliases.py b/generate_aliases.py new file mode 100755 index 0000000..98fe407 --- /dev/null +++ b/generate_aliases.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python2 + +from __future__ import print_function + +import json +import os +import sys + +if len(sys.argv) != 2: + print('usage: ' + sys.argv[0] + ' /path/to/peers') + sys.exit(1) + +peersDir = sys.argv[1] + +def normalizeMac(mac): + mac = mac.lower() + normalized = '' + + n = 0 + + for c in mac: + if c != ':': + if n > 0 and n % 2 == 0: + normalized = normalized + ':' + normalized = normalized + c + n += 1 + + return normalized + +def toAlias(peer): + alias = {} + + if not (peer.has_key('name') and peer.has_key('mac')): + return None + + name = peer['name'] + mac = peer['mac'] + + alias['node_id'] = mac.replace(':', '') + alias['hostname'] = name + + if peer.has_key('geo'): + geo = peer['geo'] + + location = {} + + if geo.has_key('lon'): location['longitude'] = geo['lon'] + if geo.has_key('lat'): location['latitude'] = geo['lat'] + + alias['location'] = location + + #alias['network'] = {} + #alias['network']['mesh_interfaces'] = [mac] + + return alias + +aliases = [] + +for filename in os.listdir(peersDir): + if len(filename) == 0 or filename[0] == '.': + continue + + isGateway = False + + absFilename = peersDir + '/' + filename + if os.path.isfile(absFilename): + peerFile = open(absFilename, 'r') + try: + peerLines = peerFile.readlines() + peer = {} + + for line in peerLines: + parts = line.split() + + if len(parts) > 2: + if parts[1] == 'Knotenname:': + peer['name'] = parts[2] + + elif parts[0] == 'remote': + isGateway = True + + elif parts[1] == 'MAC:': + peer['mac'] = normalizeMac(parts[2]) + + elif parts[1] == 'Koordinaten:' and len(parts) > 3: + try: + peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])} + + except ValueError: + print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr) + + elif len(parts) == 2 and parts[0] == 'key': + keyParts = parts[1].split('"') + if len(keyParts) > 1: + peer['vpn'] = keyParts[1].lower() + + if isGateway: + continue + + alias = toAlias(peer) + if alias: + aliases.append(alias) + + except Exception as e: + print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr) + + finally: + peerFile.close() + +print(json.dumps(aliases)) diff --git a/mkmap.sh b/mkmap.sh new file mode 100755 index 0000000..cf85166 --- /dev/null +++ b/mkmap.sh @@ -0,0 +1,6 @@ +#!/bin/bash +PATH='/opt/ffmap-backend/' +PEERS="/etc/fastd/ffhh-mesh-vpn/peers" + +python2 $PATH/generate_aliases.py $PEERS > $PATH/aliases.json +python3 $PATH/backend.py -d /var/www/meshviewer/data/ -a $PATH/aliases.json --vpn de:ad:be:ff:01:01 From 793486ff65fcdd7ab6b17dc3d5ee36778740126c Mon Sep 17 00:00:00 2001 From: kantorkel Date: Tue, 1 Dec 2015 19:29:57 +0100 Subject: [PATCH 92/97] funktionierendes setup --- alfred_merge.py | 4 +-- gateway.json | 86 +++++++++++++++++++++++++++++++++++++++++++++++++ lib/alfred.py | 2 +- mkmap.sh | 6 ++-- 4 files changed, 92 insertions(+), 6 deletions(-) create mode 100644 gateway.json diff --git a/alfred_merge.py b/alfred_merge.py index aa623b2..ee1143f 100755 --- a/alfred_merge.py +++ b/alfred_merge.py @@ -27,9 +27,9 @@ class alfred_merge: self.request_data_type_2 = request_data_type_2 def aliases(self): - output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"]) + output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"]) alfred_data_1 = json.loads(output.decode("utf-8")) - output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"]) + output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"]) alfred_data_2 = json.loads(output.decode("utf-8")) return json.dumps(rec_merge(alfred_data_1, alfred_data_2)) diff --git a/gateway.json b/gateway.json new file mode 100644 index 0000000..c7b0966 --- /dev/null +++ b/gateway.json @@ -0,0 +1,86 @@ +[ + { + "node_id": "deadbeef0101", + "hostname": "gw01", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:01:01", + "de:ad:bf:ff:01:01" + ] + } + } + } + } + }, + { + "node_id": "deadbeef0505", + "hostname": "gw02", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:05:05", + "de:ad:be:ff:05:06", + "de:ad:bf:ff:05:05" + ] + } + } + } + } + }, + { + "node_id": "deadbeef0303", + "hostname": "gw03", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:03:03", + "de:ad:bf:ff:03:03" + ] + } + } + } + } + }, + { + "node_id": "deadbfff2222", + "hostname": "gw05", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:22:22", + "de:ad:be:ff:22:23", + "de:ad:bf:ff:22:22" + ] + } + } + } + } + }, + { + "node_id": "deadbeef8888", + "hostname": "gw08", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:88:88", + "de:ad:be:ff:88:89", + "de:ad:bf:ff:88:88" + ] + } + } + } + } + } +] + diff --git a/lib/alfred.py b/lib/alfred.py index 4353874..531eaea 100644 --- a/lib/alfred.py +++ b/lib/alfred.py @@ -13,7 +13,7 @@ class Alfred(object): raise RuntimeError('alfred: invalid unix socket path given') def _fetch(self, data_type): - cmd = ['alfred-json', + cmd = ['/usr/local/bin/alfred-json', '-z', '-f', 'json', '-r', str(data_type)] diff --git a/mkmap.sh b/mkmap.sh index cf85166..b49f9a5 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -1,6 +1,6 @@ #!/bin/bash -PATH='/opt/ffmap-backend/' +FFMAPPATH='/opt/ffmap-backend/' PEERS="/etc/fastd/ffhh-mesh-vpn/peers" -python2 $PATH/generate_aliases.py $PEERS > $PATH/aliases.json -python3 $PATH/backend.py -d /var/www/meshviewer/data/ -a $PATH/aliases.json --vpn de:ad:be:ff:01:01 +python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json +python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 From 4b5bad262c11b1410a88f82f65dd68f19a7f2e46 Mon Sep 17 00:00:00 2001 From: kantorkel Date: Tue, 1 Dec 2015 21:44:45 +0100 Subject: [PATCH 93/97] =?UTF-8?q?node=5Fnumber.py=20fuer=20meta.hamburg.fr?= =?UTF-8?q?eifunk.net=20hinzugef=C3=BCgt?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- node_number.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 node_number.py diff --git a/node_number.py b/node_number.py new file mode 100644 index 0000000..b965a01 --- /dev/null +++ b/node_number.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +#Bibliotheken importieren +import time +import datetime +import json + +#Datei oeffnen +f = open('/var/www/meshviewer/nodelist.json') + +#JSON einlesen +data = json.load(f) + +#Nodes attribut aussortieren +nodes = data['nodes'] + +#Zaehler mit Wert 0 anlegen +num_nodes = 0 + +#Fuer jeden Knoten in nodes +for node in nodes: + #Status Attribut aussortieren + status = node['status'] + + #Wenn der Status online entaehlt, hochzaehlen + if status['online']: + num_nodes += 1 + +#Zeit holen +thetime = datetime.datetime.now().isoformat() + +ffhh = None + +#Freifunk API-Datei einladen und JSON lesen +with open('/var/www/meta/ffhh.json', 'r') as fp: + ffhh = json.load(fp) + +#Attribute Zeitstempel und Knotenanzahl setzen +ffhh['state']['lastchange'] = thetime +ffhh['state']['nodes'] = num_nodes + +#Freifunk API-Datein mit geaenderten werten schreiben +with open('/var/www/meta/ffhh.json', 'w') as fp: + json.dump(ffhh, fp, indent=2, separators=(',', ': ')) From 0c0fa78200e76d009980a03fd3237ecb4611ece9 Mon Sep 17 00:00:00 2001 From: 4ndr3 <4ndr3@users.noreply.github.com> Date: Fri, 16 Jun 2017 23:35:42 +0200 Subject: [PATCH 94/97] =?UTF-8?q?S=C3=BCd-Dom=C3=A4ne=20hinzugef=C3=BCgt?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Von nodelist.json auf nodes.json gewechselt - nodes.json für Süd Domäne hinzugefügt - Liest nodes.json's nun aus URLs, da sie auf verschiedenen servern liegen - Durchsucht die nodes.json's nur noch als Strings, statt JSON auszuwerten --- node_number.py | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/node_number.py b/node_number.py index b965a01..765dc0f 100644 --- a/node_number.py +++ b/node_number.py @@ -3,27 +3,20 @@ import time import datetime import json +import urllib2 #Datei oeffnen -f = open('/var/www/meshviewer/nodelist.json') - -#JSON einlesen -data = json.load(f) - -#Nodes attribut aussortieren -nodes = data['nodes'] +Datei = urllib2.urlopen('https://map.hamburg.freifunk.net/nodes.json') +Datei_Sued = urllib2.urlopen('https://map.hamburg.freifunk.net/hhsued/mv1/nodes.json') #Zaehler mit Wert 0 anlegen num_nodes = 0 -#Fuer jeden Knoten in nodes -for node in nodes: - #Status Attribut aussortieren - status = node['status'] - - #Wenn der Status online entaehlt, hochzaehlen - if status['online']: - num_nodes += 1 +Text = Datei.read() +n = Text.count('"online": true') +Text = Datei_Sued.read() +n_Sued = Text.count('"online":true') +num_nodes = n + n_Sued #Zeit holen thetime = datetime.datetime.now().isoformat() From b343748fe8bc02632caded4039b79030f5f7909a Mon Sep 17 00:00:00 2001 From: 4ndr3 <4ndr3@users.noreply.github.com> Date: Fri, 16 Jun 2017 23:41:11 +0200 Subject: [PATCH 95/97] code vereinfacht --- node_number.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/node_number.py b/node_number.py index 765dc0f..20cd00e 100644 --- a/node_number.py +++ b/node_number.py @@ -9,14 +9,10 @@ import urllib2 Datei = urllib2.urlopen('https://map.hamburg.freifunk.net/nodes.json') Datei_Sued = urllib2.urlopen('https://map.hamburg.freifunk.net/hhsued/mv1/nodes.json') -#Zaehler mit Wert 0 anlegen -num_nodes = 0 - Text = Datei.read() -n = Text.count('"online": true') +Knotenzahl = Text.count('"online": true') Text = Datei_Sued.read() -n_Sued = Text.count('"online":true') -num_nodes = n + n_Sued +Knotenzahl = Knotenzahl + Text.count('"online":true') #Zeit holen thetime = datetime.datetime.now().isoformat() @@ -29,7 +25,7 @@ with open('/var/www/meta/ffhh.json', 'r') as fp: #Attribute Zeitstempel und Knotenanzahl setzen ffhh['state']['lastchange'] = thetime -ffhh['state']['nodes'] = num_nodes +ffhh['state']['nodes'] = Knotenzahl #Freifunk API-Datein mit geaenderten werten schreiben with open('/var/www/meta/ffhh.json', 'w') as fp: From 209271cbf77e1127056abda96379088db339f3e6 Mon Sep 17 00:00:00 2001 From: Alexander Dietrich Date: Mon, 10 Jul 2017 21:48:25 +0200 Subject: [PATCH 96/97] Use tempfiles when updating JSON --- backend.py | 15 ++++++++++++--- mkmap.sh | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/backend.py b/backend.py index b2f38b7..74d3e52 100755 --- a/backend.py +++ b/backend.py @@ -27,8 +27,13 @@ def main(params): os.makedirs(params['dest_dir'], exist_ok=True) nodes_fn = os.path.join(params['dest_dir'], 'nodes.json') + tmp_nodes_fn = os.path.join(params['dest_dir'], 'nodes.json.tmp') + graph_fn = os.path.join(params['dest_dir'], 'graph.json') + tmp_graph_fn = os.path.join(params['dest_dir'], 'graph.json.tmp') + nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json') + tmp_nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json.tmp') now = datetime.utcnow().replace(microsecond=0) @@ -137,18 +142,22 @@ def main(params): batadv_graph = graph.to_undirected(batadv_graph) # write processed data to dest dir - with open(nodes_fn, 'w') as f: + with open(tmp_nodes_fn, 'w') as f: json.dump(nodedb, f) graph_out = {'batadv': json_graph.node_link_data(batadv_graph), 'version': GRAPH_VERSION} - with open(graph_fn, 'w') as f: + with open(tmp_graph_fn, 'w') as f: json.dump(graph_out, f) - with open(nodelist_fn, 'w') as f: + with open(tmp_nodelist_fn, 'w') as f: json.dump(export_nodelist(now, nodedb), f) + os.rename(tmp_nodes_fn, nodes_fn) + os.rename(tmp_graph_fn, graph_fn) + os.rename(tmp_nodelist_fn, nodelist_fn) + # optional rrd graphs (trigger with --rrd) if params['rrd']: script_directory = os.path.dirname(os.path.realpath(__file__)) diff --git a/mkmap.sh b/mkmap.sh index b49f9a5..2d9b0f6 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -3,4 +3,4 @@ FFMAPPATH='/opt/ffmap-backend/' PEERS="/etc/fastd/ffhh-mesh-vpn/peers" python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json -python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 +python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 From 325f6cd1f4950174a73a83bdca571cff3d47c687 Mon Sep 17 00:00:00 2001 From: Alexander Dietrich Date: Mon, 10 Jul 2017 21:50:14 +0200 Subject: [PATCH 97/97] Support for "bat-ffhh" interface, add generate_aliases_v2.py --- backend.py | 3 +- gateway.json | 89 ++++++-------------------------- generate_aliases_v2.py | 112 +++++++++++++++++++++++++++++++++++++++++ lib/batman.py | 4 -- lib/graph.py | 1 - lib/nodes.py | 15 +++++- mkmap.sh | 3 +- node_number.py | 0 8 files changed, 146 insertions(+), 81 deletions(-) create mode 100755 generate_aliases_v2.py mode change 100644 => 100755 node_number.py diff --git a/backend.py b/backend.py index 74d3e52..8b9d9a6 100755 --- a/backend.py +++ b/backend.py @@ -102,7 +102,6 @@ def main(params): for batman in batman_instances: vd = batman.vis_data() gwl = batman.gateway_list() - mesh_info.append((vd, gwl)) # update nodedb from batman-adv data @@ -131,6 +130,8 @@ def main(params): try: for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]: macs.add(mac) + for mac in node["nodeinfo"]["network"]["mesh"]["bat-ffhh"]["interfaces"]["tunnel"]: + macs.add(mac) except KeyError: pass diff --git a/gateway.json b/gateway.json index c7b0966..4a72859 100644 --- a/gateway.json +++ b/gateway.json @@ -1,86 +1,29 @@ [ { - "node_id": "deadbeef0101", - "hostname": "gw01", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:01:01", - "de:ad:bf:ff:01:01" - ] - } - } - } - } + "node_id": "deadbfff0101", + "hostname": "gw01" }, { "node_id": "deadbeef0505", - "hostname": "gw02", + "hostname": "gw02.hamburg.freifunk.net", "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:05:05", - "de:ad:be:ff:05:06", - "de:ad:bf:ff:05:05" - ] - } + "mac": "de:ad:be:ef:05:05", + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:05:05", + "de:ad:be:fc:05:05", + "de:ad:bf:ff:05:05" + ] + } + } } - } } }, { - "node_id": "deadbeef0303", - "hostname": "gw03", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:03:03", - "de:ad:bf:ff:03:03" - ] - } - } - } - } - }, - { - "node_id": "deadbfff2222", - "hostname": "gw05", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:22:22", - "de:ad:be:ff:22:23", - "de:ad:bf:ff:22:22" - ] - } - } - } - } - }, - { - "node_id": "deadbeef8888", - "hostname": "gw08", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:88:88", - "de:ad:be:ff:88:89", - "de:ad:bf:ff:88:88" - ] - } - } - } - } + "node_id": "00163efb9d8d", + "hostname": "gw03" } ] diff --git a/generate_aliases_v2.py b/generate_aliases_v2.py new file mode 100755 index 0000000..7a04c7c --- /dev/null +++ b/generate_aliases_v2.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python2 + +from __future__ import print_function + +import json +import os +import sys + +if len(sys.argv) != 2: + print('usage: ' + sys.argv[0] + ' /path/to/peers') + sys.exit(1) + +peersDir = sys.argv[1] + +def normalizeMac(mac): + mac = mac.lower() + normalized = '' + + n = 0 + + for c in mac: + if c != ':': + if n > 0 and n % 2 == 0: + normalized = normalized + ':' + normalized = normalized + c + n += 1 + + return normalized + +def toAlias(peer): + alias = {} + + if not (peer.has_key('name') and peer.has_key('mac')): + return None + + name = peer['name'] + mac = peer['mac'] + + alias['node_id'] = mac.replace(':', '') + alias['hostname'] = name + + if peer.has_key('geo'): + geo = peer['geo'] + + location = {} + + if geo.has_key('lon'): location['longitude'] = geo['lon'] + if geo.has_key('lat'): location['latitude'] = geo['lat'] + + alias['location'] = location + + #alias['network'] = {} + #alias['network']['mesh_interfaces'] = [mac] + + return {'nodeinfo':alias} + +aliases = {} + +for filename in os.listdir(peersDir): + if len(filename) == 0 or filename[0] == '.': + continue + + isGateway = False + + absFilename = peersDir + '/' + filename + if os.path.isfile(absFilename): + peerFile = open(absFilename, 'r') + try: + peerLines = peerFile.readlines() + peer = {} + + for line in peerLines: + parts = line.split() + + if len(parts) > 2: + if parts[1] == 'Knotenname:': + peer['name'] = parts[2] + + elif parts[0] == 'remote': + isGateway = True + + elif parts[1] == 'MAC:': + peer['mac'] = normalizeMac(parts[2]) + + elif parts[1] == 'Koordinaten:' and len(parts) > 3: + try: + peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])} + + except ValueError: + print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr) + + elif len(parts) == 2 and parts[0] == 'key': + keyParts = parts[1].split('"') + if len(keyParts) > 1: + peer['vpn'] = keyParts[1].lower() + + if isGateway: + continue + + alias = toAlias(peer) + if alias: + tmpid = alias['nodeinfo']['node_id'] +# alias['nodeinfo'].pop('node_id') + aliases[tmpid] = alias + + except Exception as e: + print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr) + + finally: + peerFile.close() + +print(json.dumps(aliases)) diff --git a/lib/batman.py b/lib/batman.py index ee0a0fe..ee0198b 100644 --- a/lib/batman.py +++ b/lib/batman.py @@ -96,7 +96,3 @@ if __name__ == "__main__": bc = Batman() vd = bc.vis_data() gw = bc.gateway_list() - for x in vd: - print(x) - print(gw) - print(bc.gateway_mode()) diff --git a/lib/graph.py b/lib/graph.py index db1259e..2833e6e 100644 --- a/lib/graph.py +++ b/lib/graph.py @@ -25,7 +25,6 @@ def mark_vpn(graph, vpn_macs): components = map(frozenset, nx.weakly_connected_components(graph)) components = filter(vpn_macs.intersection, components) nodes = reduce(lambda a, b: a | b, components, set()) - for node in nodes: for k, v in graph[node].items(): v['vpn'] = True diff --git a/lib/nodes.py b/lib/nodes.py index f517438..86ce5b9 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -6,6 +6,10 @@ from functools import reduce def build_mac_table(nodes): macs = dict() for node_id, node in nodes.items(): + try: + macs[node['network']['mac']] = node_id + except KeyError: + pass try: for mac in node['nodeinfo']['network']['mesh_interfaces']: macs[mac] = node_id @@ -23,6 +27,11 @@ def build_mac_table(nodes): macs[mac] = node_id except KeyError: pass + try: + for mac in node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel']: + macs[mac] = node_id + except KeyError: + pass try: for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']: @@ -145,6 +154,11 @@ def import_mesh_ifs_vis_data(nodes, vis_data): except KeyError: pass + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel'])) + except KeyError: + pass + try: ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other'])) except KeyError: @@ -166,7 +180,6 @@ def import_vis_clientcount(nodes, vis_data): def mark_gateways(nodes, gateways): macs = build_mac_table(nodes) gateways = filter(lambda d: d in macs, gateways) - for node in map(lambda d: nodes[macs[d]], gateways): node['flags']['gateway'] = True diff --git a/mkmap.sh b/mkmap.sh index 2d9b0f6..9423943 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -3,4 +3,5 @@ FFMAPPATH='/opt/ffmap-backend/' PEERS="/etc/fastd/ffhh-mesh-vpn/peers" python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json -python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 +#python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 --vpn 00:16:3e:fb:9d:8d --vpn 00:16:3e:fb:9d:9d +python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 de:ad:be:ff:05:05 de:ad:be:ff:05:06 de:ad:be:ff:03:03 de:ad:be:ff:22:22 de:ad:be:ff:22:23 de:ad:be:ff:88:88 de:ad:be:ff:88:89 de:ad:bf:ff:88:88 de:ad:bf:ff:22:22 de:ad:bf:ff:03:03 de:ad:bf:ff:05:05 de:ad:bf:ff:01:01 de:ad:be:fc:03:03 00:16:3e:53:75:0d de:ad:be:fc:05:05 de:ad:be:fc:01:01 de:ad:be:ef:03:03 de:ad:be:ef:01:01 de:ad:be:ef:05:05 00:16:3e:fb:9d:8d 00:16:3e:fb:9d:9d diff --git a/node_number.py b/node_number.py old mode 100644 new mode 100755