From 820da07451b29880e02243d22d12dd1b129db9ed Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Fri, 17 Apr 2015 19:15:29 +0200 Subject: [PATCH 01/18] remove RRD support --- backend.py | 12 -- lib/GlobalRRD.py | 40 ------ lib/NodeRRD.py | 61 --------- lib/RRD.py | 346 ----------------------------------------------- lib/rrddb.py | 54 -------- 5 files changed, 513 deletions(-) delete mode 100644 lib/GlobalRRD.py delete mode 100644 lib/NodeRRD.py delete mode 100644 lib/RRD.py delete mode 100644 lib/rrddb.py diff --git a/backend.py b/backend.py index 685c606..b570ffa 100755 --- a/backend.py +++ b/backend.py @@ -15,7 +15,6 @@ from networkx.readwrite import json_graph from lib import graph, nodes from lib.alfred import Alfred from lib.batman import Batman -from lib.rrddb import RRD from lib.nodelist import export_nodelist NODES_VERSION = 1 @@ -133,13 +132,6 @@ def main(params): with open(nodelist_fn, 'w') as f: json.dump(export_nodelist(now, nodedb), f) - # optional rrd graphs (trigger with --rrd) - if params['rrd']: - script_directory = os.path.dirname(os.path.realpath(__file__)) - rrd = RRD(os.path.join(script_directory, 'nodedb'), - os.path.join(params['dest_dir'], 'nodes')) - rrd.update_database(nodedb['nodes']) - rrd.update_images() if __name__ == '__main__': @@ -160,10 +152,6 @@ if __name__ == '__main__': help='Assume MAC addresses are part of vpn') parser.add_argument('-p', '--prune', metavar='DAYS', type=int, help='forget nodes offline for at least DAYS') - parser.add_argument('--with-rrd', dest='rrd', action='store_true', - default=False, - help='enable the rendering of RRD graphs (cpu ' - 'intensive)') options = vars(parser.parse_args()) main(options) diff --git a/lib/GlobalRRD.py b/lib/GlobalRRD.py deleted file mode 100644 index 47235f2..0000000 --- a/lib/GlobalRRD.py +++ /dev/null @@ -1,40 +0,0 @@ -import os -import subprocess - -from lib.RRD import DS, RRA, RRD - - -class GlobalRRD(RRD): - ds_list = [ - # Number of nodes available - DS('nodes', 'GAUGE', 120, 0, float('NaN')), - # Number of client available - DS('clients', 'GAUGE', 120, 0, float('NaN')), - ] - rra_list = [ - # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 1, 120), - # 31 days of 1 hour samples - RRA('AVERAGE', 0.5, 60, 744), - # ~5 years of 1 day samples - RRA('AVERAGE', 0.5, 1440, 1780), - ] - - def __init__(self, directory): - super().__init__(os.path.join(directory, "nodes.rrd")) - self.ensure_sanity(self.ds_list, self.rra_list, step=60) - - # TODO: fix this, python does not support function overloading - def update(self, node_count, client_count): - super().update({'nodes': node_count, 'clients': client_count}) - - def graph(self, filename, timeframe): - args = ["rrdtool", 'graph', filename, - '-s', '-' + timeframe, - '-w', '800', - '-h' '400', - 'DEF:nodes=' + self.filename + ':nodes:AVERAGE', - 'LINE1:nodes#F00:nodes\\l', - 'DEF:clients=' + self.filename + ':clients:AVERAGE', - 'LINE2:clients#00F:clients'] - subprocess.check_output(args) diff --git a/lib/NodeRRD.py b/lib/NodeRRD.py deleted file mode 100644 index afabe6f..0000000 --- a/lib/NodeRRD.py +++ /dev/null @@ -1,61 +0,0 @@ -import os -import subprocess - -from lib.RRD import DS, RRA, RRD - - -class NodeRRD(RRD): - ds_list = [ - DS('upstate', 'GAUGE', 120, 0, 1), - DS('clients', 'GAUGE', 120, 0, float('NaN')), - ] - rra_list = [ - # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 1, 120), - # 5 days of 5 minute samples - RRA('AVERAGE', 0.5, 5, 1440), - # 30 days of 1 hour samples - RRA('AVERAGE', 0.5, 60, 720), - # 1 year of 12 hour samples - RRA('AVERAGE', 0.5, 720, 730), - ] - - def __init__(self, filename, node=None): - """ - Create a new RRD for a given node. - - If the RRD isn't supposed to be updated, the node can be omitted. - """ - self.node = node - super().__init__(filename) - self.ensure_sanity(self.ds_list, self.rra_list, step=60) - - @property - def imagename(self): - return "{basename}.png".format( - basename=os.path.basename(self.filename).rsplit('.', 2)[0]) - - # TODO: fix this, python does not support function overloading - def update(self): - super().update({'upstate': int(self.node['flags']['online']), - 'clients': self.node['statistics']['clients']}) - - def graph(self, directory, timeframe): - """ - Create a graph in the given directory. The file will be named - basename.png if the RRD file is named basename.rrd - """ - args = ['rrdtool', 'graph', os.path.join(directory, self.imagename), - '-s', '-' + timeframe, - '-w', '800', - '-h', '400', - '-l', '0', - '-y', '1:1', - 'DEF:clients=' + self.filename + ':clients:AVERAGE', - 'VDEF:maxc=clients,MAXIMUM', - 'CDEF:c=0,clients,ADDNAN', - 'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*', - 'AREA:c#0F0:up\\l', - 'AREA:d#F00:down\\l', - 'LINE1:c#00F:clients connected\\l'] - subprocess.check_output(args) diff --git a/lib/RRD.py b/lib/RRD.py deleted file mode 100644 index 4e925a7..0000000 --- a/lib/RRD.py +++ /dev/null @@ -1,346 +0,0 @@ -import subprocess -import re -import os -from operator import xor, eq -from functools import reduce -from itertools import starmap -import math - - -class RRDIncompatibleException(Exception): - """ - Is raised when an RRD doesn't have the desired definition and cannot be - upgraded to it. - """ - pass - - -class RRDOutdatedException(Exception): - """ - Is raised when an RRD doesn't have the desired definition, but can be - upgraded to it. - """ - pass - -if not hasattr(__builtins__, "FileNotFoundError"): - class FileNotFoundError(Exception): - pass - - -class RRD(object): - """ - An RRD is a Round Robin Database, a database which forgets old data and - aggregates multiple records into new ones. - - It contains multiple Data Sources (DS) which can be thought of as columns, - and Round Robin Archives (RRA) which can be thought of as tables with the - DS as columns and time-dependant rows. - """ - - # rra[2].cdp_prep[0].value = 1,8583033333e+03 - _info_regex = re.compile(""" - (?P
[a-z_]+) - \[ (?P[a-zA-Z0-9_]+) \] - \. - | - (?P[a-z_]+) - \s*=\s* - "? (?P.*?) "? - $""", re.X) - _cached_info = None - - def _exec_rrdtool(self, cmd, *args, **kwargs): - pargs = ["rrdtool", cmd, self.filename] - for k, v in kwargs.items(): - pargs.extend(["--" + k, str(v)]) - pargs.extend(args) - subprocess.check_output(pargs) - - def __init__(self, filename): - self.filename = filename - - def ensure_sanity(self, ds_list, rra_list, **kwargs): - """ - Create or upgrade the RRD file if necessary to contain all DS in - ds_list. If it needs to be created, the RRAs in rra_list and any kwargs - will be used for creation. Note that RRAs and options of an existing - database are NOT modified! - """ - try: - self.check_sanity(ds_list) - except FileNotFoundError: - self.create(ds_list, rra_list, **kwargs) - except RRDOutdatedException: - self.upgrade(ds_list) - - def check_sanity(self, ds_list=()): - """ - Check if the RRD file exists and contains (at least) the DS listed in - ds_list. - """ - if not os.path.exists(self.filename): - raise FileNotFoundError(self.filename) - info = self.info() - if set(ds_list) - set(info['ds'].values()) != set(): - for ds in ds_list: - if ds.name in info['ds'] and\ - ds.type != info['ds'][ds.name].type: - raise RRDIncompatibleException( - "{} is {} but should be {}".format( - ds.name, ds.type, info['ds'][ds.name].type)) - else: - raise RRDOutdatedException() - - def upgrade(self, dss): - """ - Upgrade the DS definitions (!) of this RRD. - (To update its values, use update()) - - The list dss contains DSS objects to be updated or added. The - parameters of a DS can be changed, but not its type. New DS are always - added at the end in the order of their appearance in the list. - - This is done internally via an rrdtool dump -> rrdtool restore and - modifying the dump on the fly. - """ - info = self.info() - new_ds = list(info['ds'].values()) - new_ds.sort(key=lambda ds: ds.index) - for ds in dss: - if ds.name in info['ds']: - old_ds = info['ds'][ds.name] - if info['ds'][ds.name].type != ds.type: - raise RuntimeError( - "Cannot convert existing DS '{}'" - "from type '{}' to '{}'".format( - ds.name, old_ds.type, ds.type)) - ds.index = old_ds.index - new_ds[ds.index] = ds - else: - ds.index = len(new_ds) - new_ds.append(ds) - added_ds_num = len(new_ds) - len(info['ds']) - - dump = subprocess.Popen( - ["rrdtool", "dump", self.filename], - stdout=subprocess.PIPE) - - restore = subprocess.Popen( - ["rrdtool", "restore", "-", self.filename + ".new"], - stdin=subprocess.PIPE) - echo = True - ds_definitions = True - for line in dump.stdout: - if ds_definitions and b'' in line: - echo = False - if b'' in line: - ds_definitions = False - for ds in new_ds: - restore.stdin.write(bytes(""" - - %s - %s - %i - %s - %s - - - %s - %s - %i - - """ % (ds.name, - ds.type, - ds.args[0], - ds.args[1], - ds.args[2], - ds.last_ds, - ds.value, - ds.unknown_sec), "utf-8")) - - if b'' in line: - restore.stdin.write(added_ds_num * b""" - - NaN - NaN - NaN - 0 - - """) - - # echoing of input line - if echo: - restore.stdin.write( - line.replace( - b'', - (added_ds_num * b'NaN') + b'' - ) - ) - - if ds_definitions and b'' in line: - echo = True - dump.stdout.close() - restore.stdin.close() - dump.wait() - restore.wait() - - os.rename(self.filename + ".new", self.filename) - self._cached_info = None - - def create(self, ds_list, rra_list, **kwargs): - """ - Create a new RRD file with the specified list of RRAs and DSs. - - Any kwargs are passed as --key=value to rrdtool create. - """ - self._exec_rrdtool( - "create", - *map(str, rra_list + ds_list), - **kwargs - ) - self._cached_info = None - - def update(self, V): - """ - Update the RRD with new values V. - - V can be either list or dict: - * If it is a dict, its keys must be DS names in the RRD and it is - ensured that the correct DS are updated with the correct values, by - passing a "template" to rrdtool update (see man rrdupdate). - * If it is a list, no template is generated and the order of the - values in V must be the same as that of the DS in the RRD. - """ - try: - args = ['N:' + ':'.join(map(str, V.values()))] - kwargs = {'template': ':'.join(V.keys())} - except AttributeError: - args = ['N:' + ':'.join(map(str, V))] - kwargs = {} - self._exec_rrdtool("update", *args, **kwargs) - self._cached_info = None - - def info(self): - """ - Return a dictionary with information about the RRD. - - See `man rrdinfo` for more details. - """ - if self._cached_info: - return self._cached_info - env = os.environ.copy() - env["LC_ALL"] = "C" - proc = subprocess.Popen( - ["rrdtool", "info", self.filename], - stdout=subprocess.PIPE, - env=env - ) - out, err = proc.communicate() - out = out.decode() - info = {} - for line in out.splitlines(): - base = info - for match in self._info_regex.finditer(line): - section, key, name, value = match.group( - "section", "key", "name", "value") - if section and key: - try: - key = int(key) - except ValueError: - pass - if section not in base: - base[section] = {} - if key not in base[section]: - base[section][key] = {} - base = base[section][key] - if name and value: - try: - base[name] = int(value) - except ValueError: - try: - base[name] = float(value) - except: - base[name] = value - dss = {} - for name, ds in info['ds'].items(): - ds_obj = DS(name, ds['type'], ds['minimal_heartbeat'], - ds['min'], ds['max']) - ds_obj.index = ds['index'] - ds_obj.last_ds = ds['last_ds'] - ds_obj.value = ds['value'] - ds_obj.unknown_sec = ds['unknown_sec'] - dss[name] = ds_obj - info['ds'] = dss - rras = [] - for rra in info['rra'].values(): - rras.append(RRA(rra['cf'], rra['xff'], - rra['pdp_per_row'], rra['rows'])) - info['rra'] = rras - self._cached_info = info - return info - - -class DS(object): - """ - DS stands for Data Source and represents one line of data points in a Round - Robin Database (RRD). - """ - name = None - type = None - args = [] - index = -1 - last_ds = 'U' - value = 0 - unknown_sec = 0 - - def __init__(self, name, dst, *args): - self.name = name - self.type = dst - self.args = args - - def __str__(self): - return "DS:%s:%s:%s" % ( - self.name, - self.type, - ":".join(map(str, self._nan_to_u_args())) - ) - - def __repr__(self): - return "%s(%r, %r, %s)" % ( - self.__class__.__name__, - self.name, - self.type, - ", ".join(map(repr, self.args)) - ) - - def __eq__(self, other): - return all(starmap(eq, zip(self.compare_keys(), other.compare_keys()))) - - def __hash__(self): - return reduce(xor, map(hash, self.compare_keys())) - - def _nan_to_u_args(self): - return tuple( - 'U' if type(arg) is float and math.isnan(arg) - else arg - for arg in self.args - ) - - def compare_keys(self): - return self.name, self.type, self._nan_to_u_args() - - -class RRA(object): - def __init__(self, cf, *args): - self.cf = cf - self.args = args - - def __str__(self): - return "RRA:%s:%s" % (self.cf, ":".join(map(str, self.args))) - - def __repr__(self): - return "%s(%r, %s)" % ( - self.__class__.__name__, - self.cf, - ", ".join(map(repr, self.args)) - ) diff --git a/lib/rrddb.py b/lib/rrddb.py deleted file mode 100644 index f1678f5..0000000 --- a/lib/rrddb.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python3 -import time -import os - -from lib.GlobalRRD import GlobalRRD -from lib.NodeRRD import NodeRRD - - -class RRD(object): - def __init__(self, - database_directory, - image_path, - display_time_global="7d", - display_time_node="1d"): - - self.dbPath = database_directory - self.globalDb = GlobalRRD(self.dbPath) - self.imagePath = image_path - self.displayTimeGlobal = display_time_global - self.displayTimeNode = display_time_node - - self.currentTimeInt = (int(time.time()) / 60) * 60 - self.currentTime = str(self.currentTimeInt) - - try: - os.stat(self.imagePath) - except OSError: - os.mkdir(self.imagePath) - - def update_database(self, nodes): - online_nodes = dict(filter( - lambda d: d[1]['flags']['online'], nodes.items())) - client_count = sum(map( - lambda d: d['statistics']['clients'], online_nodes.values())) - - self.globalDb.update(len(online_nodes), client_count) - for node_id, node in online_nodes.items(): - rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node) - rrd.update() - - def update_images(self): - self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), - self.displayTimeGlobal) - - nodedb_files = os.listdir(self.dbPath) - - for file_name in nodedb_files: - if not os.path.isfile(os.path.join(self.dbPath, file_name)): - continue - - node_name = os.path.basename(file_name).split('.') - if node_name[1] == 'rrd' and not node_name[0] == "nodes": - rrd = NodeRRD(os.path.join(self.dbPath, file_name)) - rrd.graph(self.imagePath, self.displayTimeNode) From dccfb8c27a042320c35888441bf8f5cddc0cf0b6 Mon Sep 17 00:00:00 2001 From: Felix Oertel Date: Thu, 30 Apr 2015 17:37:19 +0200 Subject: [PATCH 02/18] [DOC] include dependencies --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index a3781ea..c1ac7dd 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,11 @@ For the script's regular execution add the following to the crontab: * * * * * backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a d2:d0:93:63:f7:da +# Dependencies + +- Python Package [Networkx](https://networkx.github.io/) +- [alfred-json](https://github.com/tcatm/alfred-json) + # Running as unprivileged user Some information collected by ffmap-backend requires access to specific system resources. From 1835abac7f78f490b5c65fba48e1beaf16e7c5a1 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 3 May 2015 13:11:22 +0200 Subject: [PATCH 03/18] basic nodeinfo validation (location) --- backend.py | 7 +++++-- lib/validate.py | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 lib/validate.py diff --git a/backend.py b/backend.py index 685c606..f638049 100755 --- a/backend.py +++ b/backend.py @@ -17,6 +17,7 @@ from lib.alfred import Alfred from lib.batman import Batman from lib.rrddb import RRD from lib.nodelist import export_nodelist +from lib.validate import validate_nodeinfos NODES_VERSION = 1 GRAPH_VERSION = 1 @@ -76,13 +77,15 @@ def main(params): # integrate alfred nodeinfo for alfred in alfred_instances: - nodes.import_nodeinfo(nodedb['nodes'], alfred.nodeinfo(), + nodeinfo = validate_nodeinfos(alfred.nodeinfo()) + nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, now, assume_online=True) # integrate static aliases data for aliases in params['aliases']: with open(aliases, 'r') as f: - nodes.import_nodeinfo(nodedb['nodes'], json.load(f), + nodeinfo = validate_nodeinfos(json.load(f)) + nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, now, assume_online=False) nodes.reset_statistics(nodedb['nodes']) diff --git a/lib/validate.py b/lib/validate.py new file mode 100644 index 0000000..eb6c11f --- /dev/null +++ b/lib/validate.py @@ -0,0 +1,19 @@ +import json + + +def validate_nodeinfos(nodeinfos): + result = [] + + for nodeinfo in nodeinfos: + if validate_nodeinfo(nodeinfo): + result.append(nodeinfo) + + return result + + +def validate_nodeinfo(nodeinfo): + if 'location' in nodeinfo: + if 'latitude' not in nodeinfo['location'] or 'longitude' not in nodeinfo['location']: + return False + + return True From 1141aa766f245ac70e59ad0c33bef51be93c3125 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sun, 3 May 2015 13:16:26 +0200 Subject: [PATCH 04/18] nodes.py: catch ZeroDivisionError in statistics --- lib/nodes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/nodes.py b/lib/nodes.py index 93d3835..4ff1fdc 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -59,7 +59,7 @@ def import_statistics(nodes, stats): node['statistics'][target] = f(reduce(dict.__getitem__, source, statistics)) - except (KeyError, TypeError): + except (KeyError, TypeError, ZeroDivisionError): pass macs = build_mac_table(nodes) From 3caf00be0765a875af0c78c483a2a07db026655f Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 9 May 2015 21:54:54 +0200 Subject: [PATCH 05/18] extract VPN interfaces from nodeinfo --- backend.py | 13 +++++++++++++ lib/nodes.py | 27 ++++++++++++++++++++++----- 2 files changed, 35 insertions(+), 5 deletions(-) diff --git a/backend.py b/backend.py index f638049..3ea2f5d 100755 --- a/backend.py +++ b/backend.py @@ -120,6 +120,19 @@ def main(params): if params['vpn']: graph.mark_vpn(batadv_graph, frozenset(params['vpn'])) + def extract_tunnel(nodes): + macs = set() + for id, node in nodes.items(): + try: + for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]: + macs.add(mac) + except KeyError: + pass + + return macs + + graph.mark_vpn(batadv_graph, extract_tunnel(nodedb['nodes'])) + batadv_graph = graph.merge_nodes(batadv_graph) batadv_graph = graph.to_undirected(batadv_graph) diff --git a/lib/nodes.py b/lib/nodes.py index 4ff1fdc..d9543a8 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -97,12 +97,29 @@ def import_mesh_ifs_vis_data(nodes, vis_data): for v in mesh_nodes: node = v[0] - try: - mesh_ifs = set(node['nodeinfo']['network']['mesh_interfaces']) - except KeyError: - mesh_ifs = set() + ifs = set() - node['nodeinfo']['network']['mesh_interfaces'] = list(mesh_ifs | v[1]) + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh_interfaces'])) + except KeyError: + pass + + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless'])) + except KeyError: + pass + + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel'])) + except KeyError: + pass + + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other'])) + except KeyError: + pass + + node['nodeinfo']['network']['mesh_interfaces'] = list(ifs | v[1]) def import_vis_clientcount(nodes, vis_data): From 8fd0b73418b954d0ccbe44f8ad0d02103464efb1 Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 9 May 2015 22:04:45 +0200 Subject: [PATCH 06/18] remove dependency on mesh_interfaces --- lib/nodes.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/lib/nodes.py b/lib/nodes.py index d9543a8..2d0769d 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -11,6 +11,25 @@ def build_mac_table(nodes): macs[mac] = node_id except KeyError: pass + + try: + for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless']: + macs[mac] = node_id + except KeyError: + pass + + try: + for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel']: + macs[mac] = node_id + except KeyError: + pass + + try: + for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']: + macs[mac] = node_id + except KeyError: + pass + return macs From dafad3df4ca9224e46dac429dc797db14cdc38ac Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 9 May 2015 22:16:44 +0200 Subject: [PATCH 07/18] update aliases.json_sample --- aliases.json_sample | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/aliases.json_sample b/aliases.json_sample index ca1eb6b..db35900 100644 --- a/aliases.json_sample +++ b/aliases.json_sample @@ -7,18 +7,30 @@ "latitude": 53.86 }, "network": { - "mesh_interfaces": [ - "00:25:86:e6:f1:bf" - ] + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "00:25:86:e6:f1:bf" + ] + } + } + } } }, { "node_id": "gw1", "hostname": "burgtor", "network": { - "mesh_interfaces": [ - "52:54:00:f3:62:d9" - ] + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "52:54:00:f3:62:d9" + ] + } + } + } } } ] From 71ced22b0f02c9be53a22647302a0205205beba1 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Fri, 15 May 2015 18:20:49 +0200 Subject: [PATCH 08/18] README.md: Extend dependencies --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c1ac7dd..809f393 100644 --- a/README.md +++ b/README.md @@ -20,8 +20,10 @@ For the script's regular execution add the following to the crontab: # Dependencies -- Python Package [Networkx](https://networkx.github.io/) +- Python 3 +- Python 3 Package [Networkx](https://networkx.github.io/) - [alfred-json](https://github.com/tcatm/alfred-json) +- rrdtool (if run with `--with-rrd`) # Running as unprivileged user From 11ef32178d0b3d4f6cd9ac28123278efedc41c79 Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Sat, 16 May 2015 09:38:23 +0200 Subject: [PATCH 09/18] Added jq filter to convert new format to old format This makes it easily possible to continue using the legacy ffmap-d3 front end with the new backend while migrating. --- README.md | 15 +++++++++++++++ ffmap-d3.jq | 52 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 67 insertions(+) create mode 100644 ffmap-d3.jq diff --git a/README.md b/README.md index 809f393..c345bb3 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,21 @@ will prefix `sudo` where necessary. - online - gateway +## Old data format + +If you want to still use the old [ffmap-d3](https://github.com/ffnord/ffmap-d3) +front end, you can use the file `ffmap-d3.jq` to convert the new output to the +old one: + +``` +jq -n -f ffmap-d3.jq \ + --argfile nodes nodedb/nodes.json \ + --argfile graph nodedb/graph.json \ + > nodedb/ffmap-d3.json +``` + +Then point your ffmap-d3 instance to the `ffmap-d3.json` file. + # Removing owner information If you'd like to redact information about the node owner from `nodes.json`, diff --git a/ffmap-d3.jq b/ffmap-d3.jq new file mode 100644 index 0000000..ebeece1 --- /dev/null +++ b/ffmap-d3.jq @@ -0,0 +1,52 @@ +{ + "meta": { + "timestamp": $nodes.timestamp + }, + "nodes": ( + $graph.batadv.nodes + | map( + if has("node_id") and .node_id + then ( + $nodes.nodes[.node_id] as $node + | { + "id": .id, + "uptime": $node.statistics.uptime, + "flags": ($node.flags + {"client": false}), + "name": $node.nodeinfo.hostname, + "clientcount": (if $node.statistics.clients >= 0 then $node.statistics.clients else 0 end), + "hardware": $node.nodeinfo.hardware.model, + "firmware": $node.nodeinfo.software.firmware.release, + "geo": (if $node.nodeinfo.location then [$node.nodeinfo.location.latitude, $node.nodeinfo.location.longitude] else null end), + #"lastseen": $node.lastseen, + "network": $node.nodeinfo.network + } + ) + else + { + "flags": {}, + "id": .id, + "geo": null, + "clientcount": 0 + } + end + ) + ), + "links": ( + $graph.batadv.links + | map( + $graph.batadv.nodes[.source].node_id as $source_id + | $graph.batadv.nodes[.target].node_id as $target_id + | select( + $source_id and $target_id and + ($nodes.nodes | (has($source_id) and has($target_id))) + ) + | { + "target": .target, + "source": .source, + "quality": "\(.tq), \(.tq)", + "id": ($source_id + "-" + $target_id), + "type": (if .vpn then "vpn" else null end) + } + ) + ) +} From 64dee31ebb030583b4dd374d39d62e2a53937458 Mon Sep 17 00:00:00 2001 From: stebifan Date: Sun, 7 Jun 2015 23:52:32 +0200 Subject: [PATCH 10/18] Added traffic to Statistics --- lib/nodes.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/nodes.py b/lib/nodes.py index 2d0769d..3c0fa2f 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -92,6 +92,7 @@ def import_statistics(nodes, stats): add(node, stats, 'memory_usage', ['memory'], lambda d: 1 - d['free'] / d['total']) add(node, stats, 'rootfs_usage', ['rootfs_usage']) + add(node, stats, 'traffic', ['traffic']) def import_mesh_ifs_vis_data(nodes, vis_data): From b0b6f8e0cd47e57bf220dca09991b049bfb3ff6e Mon Sep 17 00:00:00 2001 From: kantorkel Date: Mon, 30 Nov 2015 19:45:14 +0100 Subject: [PATCH 11/18] status srv01 --- alfred_merge.py | 42 ++++++++++++++++++++++++++++++++++++++++++ backend.py | 7 +++---- lib/nodelist.py | 3 +++ lib/nodes.py | 21 ++++++++++++++++----- 4 files changed, 64 insertions(+), 9 deletions(-) create mode 100755 alfred_merge.py diff --git a/alfred_merge.py b/alfred_merge.py new file mode 100755 index 0000000..aa623b2 --- /dev/null +++ b/alfred_merge.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python3 +import subprocess +import json + +from collections import MutableMapping + +def rec_merge(d1, d2): + ''' + Update two dicts of dicts recursively, + if either mapping has leaves that are non-dicts, + the second's leaf overwrites the first's. + ''' + for k, v in d1.items(): # in Python 2, use .iteritems()! + if k in d2: + # this next check is the only difference! + if all(isinstance(e, MutableMapping) for e in (v, d2[k])): + d2[k] = rec_merge(v, d2[k]) + # we could further check types and merge as appropriate here. + d3 = d1.copy() + d3.update(d2) + return d3 + + +class alfred_merge: + def __init__(self,request_data_type_1 = 158, request_data_type_2 = 159): + self.request_data_type_1 = request_data_type_1 + self.request_data_type_2 = request_data_type_2 + + def aliases(self): + output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"]) + alfred_data_1 = json.loads(output.decode("utf-8")) + output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"]) + alfred_data_2 = json.loads(output.decode("utf-8")) + + return json.dumps(rec_merge(alfred_data_1, alfred_data_2)) + + +if __name__ == "__main__": + ad = alfred_merge() + al = ad.aliases() + print(al) + diff --git a/backend.py b/backend.py index 3ea2f5d..b2f38b7 100755 --- a/backend.py +++ b/backend.py @@ -84,9 +84,9 @@ def main(params): # integrate static aliases data for aliases in params['aliases']: with open(aliases, 'r') as f: - nodeinfo = validate_nodeinfos(json.load(f)) - nodes.import_nodeinfo(nodedb['nodes'], nodeinfo, - now, assume_online=False) +# nodeinfo = validate_nodeinfos(json.load(f)) + nodes.import_nodeinfo(nodedb['nodes'], json.load(f), + now, assume_online=False, statics=True) nodes.reset_statistics(nodedb['nodes']) for alfred in alfred_instances: @@ -157,7 +157,6 @@ def main(params): rrd.update_database(nodedb['nodes']) rrd.update_images() - if __name__ == '__main__': parser = argparse.ArgumentParser() diff --git a/lib/nodelist.py b/lib/nodelist.py index 15aea63..a931dcf 100644 --- a/lib/nodelist.py +++ b/lib/nodelist.py @@ -13,6 +13,9 @@ def export_nodelist(now, nodedb): node_out["status"] = dict() node_out["status"]["online"] = node["flags"]["online"] + if "firstseen" in node: + node_out["status"]["firstcontact"] = node["firstseen"] + if "lastseen" in node: node_out["status"]["lastcontact"] = node["lastseen"] diff --git a/lib/nodes.py b/lib/nodes.py index 3c0fa2f..f517438 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -56,12 +56,23 @@ def mark_online(node, now): node['flags']['online'] = True -def import_nodeinfo(nodes, nodeinfos, now, assume_online=False): +def overrideFields(dest, src, fields): + for field in fields: + if field in src: + dest[field] = src[field] + else: + dest.pop(field, None) + + +def import_nodeinfo(nodes, nodeinfos, now, assume_online=False, statics=False): for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos): - node = nodes.setdefault(nodeinfo['node_id'], {'flags': dict()}) - node['nodeinfo'] = nodeinfo - node['flags']['online'] = False - node['flags']['gateway'] = False + node = nodes.setdefault(nodeinfo['node_id'], {'flags': {'online': False, 'gateway': False}}) + + if statics: + node['nodeinfo'] = node.setdefault('nodeinfo', {}) + overrideFields(node['nodeinfo'], nodeinfo, ['hostname', 'location', 'node_id']) + else: + node['nodeinfo'] = nodeinfo if assume_online: mark_online(node, now) From 2043c88c03787cf7e17f7536981beafc2a30d735 Mon Sep 17 00:00:00 2001 From: kantorkel Date: Mon, 30 Nov 2015 20:00:15 +0100 Subject: [PATCH 12/18] fastd2aliases --- generate_aliases.py | 110 ++++++++++++++++++++++++++++++++++++++++++++ mkmap.sh | 6 +++ 2 files changed, 116 insertions(+) create mode 100755 generate_aliases.py create mode 100755 mkmap.sh diff --git a/generate_aliases.py b/generate_aliases.py new file mode 100755 index 0000000..98fe407 --- /dev/null +++ b/generate_aliases.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python2 + +from __future__ import print_function + +import json +import os +import sys + +if len(sys.argv) != 2: + print('usage: ' + sys.argv[0] + ' /path/to/peers') + sys.exit(1) + +peersDir = sys.argv[1] + +def normalizeMac(mac): + mac = mac.lower() + normalized = '' + + n = 0 + + for c in mac: + if c != ':': + if n > 0 and n % 2 == 0: + normalized = normalized + ':' + normalized = normalized + c + n += 1 + + return normalized + +def toAlias(peer): + alias = {} + + if not (peer.has_key('name') and peer.has_key('mac')): + return None + + name = peer['name'] + mac = peer['mac'] + + alias['node_id'] = mac.replace(':', '') + alias['hostname'] = name + + if peer.has_key('geo'): + geo = peer['geo'] + + location = {} + + if geo.has_key('lon'): location['longitude'] = geo['lon'] + if geo.has_key('lat'): location['latitude'] = geo['lat'] + + alias['location'] = location + + #alias['network'] = {} + #alias['network']['mesh_interfaces'] = [mac] + + return alias + +aliases = [] + +for filename in os.listdir(peersDir): + if len(filename) == 0 or filename[0] == '.': + continue + + isGateway = False + + absFilename = peersDir + '/' + filename + if os.path.isfile(absFilename): + peerFile = open(absFilename, 'r') + try: + peerLines = peerFile.readlines() + peer = {} + + for line in peerLines: + parts = line.split() + + if len(parts) > 2: + if parts[1] == 'Knotenname:': + peer['name'] = parts[2] + + elif parts[0] == 'remote': + isGateway = True + + elif parts[1] == 'MAC:': + peer['mac'] = normalizeMac(parts[2]) + + elif parts[1] == 'Koordinaten:' and len(parts) > 3: + try: + peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])} + + except ValueError: + print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr) + + elif len(parts) == 2 and parts[0] == 'key': + keyParts = parts[1].split('"') + if len(keyParts) > 1: + peer['vpn'] = keyParts[1].lower() + + if isGateway: + continue + + alias = toAlias(peer) + if alias: + aliases.append(alias) + + except Exception as e: + print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr) + + finally: + peerFile.close() + +print(json.dumps(aliases)) diff --git a/mkmap.sh b/mkmap.sh new file mode 100755 index 0000000..cf85166 --- /dev/null +++ b/mkmap.sh @@ -0,0 +1,6 @@ +#!/bin/bash +PATH='/opt/ffmap-backend/' +PEERS="/etc/fastd/ffhh-mesh-vpn/peers" + +python2 $PATH/generate_aliases.py $PEERS > $PATH/aliases.json +python3 $PATH/backend.py -d /var/www/meshviewer/data/ -a $PATH/aliases.json --vpn de:ad:be:ff:01:01 From 793486ff65fcdd7ab6b17dc3d5ee36778740126c Mon Sep 17 00:00:00 2001 From: kantorkel Date: Tue, 1 Dec 2015 19:29:57 +0100 Subject: [PATCH 13/18] funktionierendes setup --- alfred_merge.py | 4 +-- gateway.json | 86 +++++++++++++++++++++++++++++++++++++++++++++++++ lib/alfred.py | 2 +- mkmap.sh | 6 ++-- 4 files changed, 92 insertions(+), 6 deletions(-) create mode 100644 gateway.json diff --git a/alfred_merge.py b/alfred_merge.py index aa623b2..ee1143f 100755 --- a/alfred_merge.py +++ b/alfred_merge.py @@ -27,9 +27,9 @@ class alfred_merge: self.request_data_type_2 = request_data_type_2 def aliases(self): - output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"]) + output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"]) alfred_data_1 = json.loads(output.decode("utf-8")) - output = subprocess.check_output(["/usr/local/sbin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"]) + output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"]) alfred_data_2 = json.loads(output.decode("utf-8")) return json.dumps(rec_merge(alfred_data_1, alfred_data_2)) diff --git a/gateway.json b/gateway.json new file mode 100644 index 0000000..c7b0966 --- /dev/null +++ b/gateway.json @@ -0,0 +1,86 @@ +[ + { + "node_id": "deadbeef0101", + "hostname": "gw01", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:01:01", + "de:ad:bf:ff:01:01" + ] + } + } + } + } + }, + { + "node_id": "deadbeef0505", + "hostname": "gw02", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:05:05", + "de:ad:be:ff:05:06", + "de:ad:bf:ff:05:05" + ] + } + } + } + } + }, + { + "node_id": "deadbeef0303", + "hostname": "gw03", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:03:03", + "de:ad:bf:ff:03:03" + ] + } + } + } + } + }, + { + "node_id": "deadbfff2222", + "hostname": "gw05", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:22:22", + "de:ad:be:ff:22:23", + "de:ad:bf:ff:22:22" + ] + } + } + } + } + }, + { + "node_id": "deadbeef8888", + "hostname": "gw08", + "network": { + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:88:88", + "de:ad:be:ff:88:89", + "de:ad:bf:ff:88:88" + ] + } + } + } + } + } +] + diff --git a/lib/alfred.py b/lib/alfred.py index 4353874..531eaea 100644 --- a/lib/alfred.py +++ b/lib/alfred.py @@ -13,7 +13,7 @@ class Alfred(object): raise RuntimeError('alfred: invalid unix socket path given') def _fetch(self, data_type): - cmd = ['alfred-json', + cmd = ['/usr/local/bin/alfred-json', '-z', '-f', 'json', '-r', str(data_type)] diff --git a/mkmap.sh b/mkmap.sh index cf85166..b49f9a5 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -1,6 +1,6 @@ #!/bin/bash -PATH='/opt/ffmap-backend/' +FFMAPPATH='/opt/ffmap-backend/' PEERS="/etc/fastd/ffhh-mesh-vpn/peers" -python2 $PATH/generate_aliases.py $PEERS > $PATH/aliases.json -python3 $PATH/backend.py -d /var/www/meshviewer/data/ -a $PATH/aliases.json --vpn de:ad:be:ff:01:01 +python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json +python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 From 4b5bad262c11b1410a88f82f65dd68f19a7f2e46 Mon Sep 17 00:00:00 2001 From: kantorkel Date: Tue, 1 Dec 2015 21:44:45 +0100 Subject: [PATCH 14/18] =?UTF-8?q?node=5Fnumber.py=20fuer=20meta.hamburg.fr?= =?UTF-8?q?eifunk.net=20hinzugef=C3=BCgt?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- node_number.py | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 node_number.py diff --git a/node_number.py b/node_number.py new file mode 100644 index 0000000..b965a01 --- /dev/null +++ b/node_number.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +#Bibliotheken importieren +import time +import datetime +import json + +#Datei oeffnen +f = open('/var/www/meshviewer/nodelist.json') + +#JSON einlesen +data = json.load(f) + +#Nodes attribut aussortieren +nodes = data['nodes'] + +#Zaehler mit Wert 0 anlegen +num_nodes = 0 + +#Fuer jeden Knoten in nodes +for node in nodes: + #Status Attribut aussortieren + status = node['status'] + + #Wenn der Status online entaehlt, hochzaehlen + if status['online']: + num_nodes += 1 + +#Zeit holen +thetime = datetime.datetime.now().isoformat() + +ffhh = None + +#Freifunk API-Datei einladen und JSON lesen +with open('/var/www/meta/ffhh.json', 'r') as fp: + ffhh = json.load(fp) + +#Attribute Zeitstempel und Knotenanzahl setzen +ffhh['state']['lastchange'] = thetime +ffhh['state']['nodes'] = num_nodes + +#Freifunk API-Datein mit geaenderten werten schreiben +with open('/var/www/meta/ffhh.json', 'w') as fp: + json.dump(ffhh, fp, indent=2, separators=(',', ': ')) From 0c0fa78200e76d009980a03fd3237ecb4611ece9 Mon Sep 17 00:00:00 2001 From: 4ndr3 <4ndr3@users.noreply.github.com> Date: Fri, 16 Jun 2017 23:35:42 +0200 Subject: [PATCH 15/18] =?UTF-8?q?S=C3=BCd-Dom=C3=A4ne=20hinzugef=C3=BCgt?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Von nodelist.json auf nodes.json gewechselt - nodes.json für Süd Domäne hinzugefügt - Liest nodes.json's nun aus URLs, da sie auf verschiedenen servern liegen - Durchsucht die nodes.json's nur noch als Strings, statt JSON auszuwerten --- node_number.py | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/node_number.py b/node_number.py index b965a01..765dc0f 100644 --- a/node_number.py +++ b/node_number.py @@ -3,27 +3,20 @@ import time import datetime import json +import urllib2 #Datei oeffnen -f = open('/var/www/meshviewer/nodelist.json') - -#JSON einlesen -data = json.load(f) - -#Nodes attribut aussortieren -nodes = data['nodes'] +Datei = urllib2.urlopen('https://map.hamburg.freifunk.net/nodes.json') +Datei_Sued = urllib2.urlopen('https://map.hamburg.freifunk.net/hhsued/mv1/nodes.json') #Zaehler mit Wert 0 anlegen num_nodes = 0 -#Fuer jeden Knoten in nodes -for node in nodes: - #Status Attribut aussortieren - status = node['status'] - - #Wenn der Status online entaehlt, hochzaehlen - if status['online']: - num_nodes += 1 +Text = Datei.read() +n = Text.count('"online": true') +Text = Datei_Sued.read() +n_Sued = Text.count('"online":true') +num_nodes = n + n_Sued #Zeit holen thetime = datetime.datetime.now().isoformat() From b343748fe8bc02632caded4039b79030f5f7909a Mon Sep 17 00:00:00 2001 From: 4ndr3 <4ndr3@users.noreply.github.com> Date: Fri, 16 Jun 2017 23:41:11 +0200 Subject: [PATCH 16/18] code vereinfacht --- node_number.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/node_number.py b/node_number.py index 765dc0f..20cd00e 100644 --- a/node_number.py +++ b/node_number.py @@ -9,14 +9,10 @@ import urllib2 Datei = urllib2.urlopen('https://map.hamburg.freifunk.net/nodes.json') Datei_Sued = urllib2.urlopen('https://map.hamburg.freifunk.net/hhsued/mv1/nodes.json') -#Zaehler mit Wert 0 anlegen -num_nodes = 0 - Text = Datei.read() -n = Text.count('"online": true') +Knotenzahl = Text.count('"online": true') Text = Datei_Sued.read() -n_Sued = Text.count('"online":true') -num_nodes = n + n_Sued +Knotenzahl = Knotenzahl + Text.count('"online":true') #Zeit holen thetime = datetime.datetime.now().isoformat() @@ -29,7 +25,7 @@ with open('/var/www/meta/ffhh.json', 'r') as fp: #Attribute Zeitstempel und Knotenanzahl setzen ffhh['state']['lastchange'] = thetime -ffhh['state']['nodes'] = num_nodes +ffhh['state']['nodes'] = Knotenzahl #Freifunk API-Datein mit geaenderten werten schreiben with open('/var/www/meta/ffhh.json', 'w') as fp: From 209271cbf77e1127056abda96379088db339f3e6 Mon Sep 17 00:00:00 2001 From: Alexander Dietrich Date: Mon, 10 Jul 2017 21:48:25 +0200 Subject: [PATCH 17/18] Use tempfiles when updating JSON --- backend.py | 15 ++++++++++++--- mkmap.sh | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/backend.py b/backend.py index b2f38b7..74d3e52 100755 --- a/backend.py +++ b/backend.py @@ -27,8 +27,13 @@ def main(params): os.makedirs(params['dest_dir'], exist_ok=True) nodes_fn = os.path.join(params['dest_dir'], 'nodes.json') + tmp_nodes_fn = os.path.join(params['dest_dir'], 'nodes.json.tmp') + graph_fn = os.path.join(params['dest_dir'], 'graph.json') + tmp_graph_fn = os.path.join(params['dest_dir'], 'graph.json.tmp') + nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json') + tmp_nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json.tmp') now = datetime.utcnow().replace(microsecond=0) @@ -137,18 +142,22 @@ def main(params): batadv_graph = graph.to_undirected(batadv_graph) # write processed data to dest dir - with open(nodes_fn, 'w') as f: + with open(tmp_nodes_fn, 'w') as f: json.dump(nodedb, f) graph_out = {'batadv': json_graph.node_link_data(batadv_graph), 'version': GRAPH_VERSION} - with open(graph_fn, 'w') as f: + with open(tmp_graph_fn, 'w') as f: json.dump(graph_out, f) - with open(nodelist_fn, 'w') as f: + with open(tmp_nodelist_fn, 'w') as f: json.dump(export_nodelist(now, nodedb), f) + os.rename(tmp_nodes_fn, nodes_fn) + os.rename(tmp_graph_fn, graph_fn) + os.rename(tmp_nodelist_fn, nodelist_fn) + # optional rrd graphs (trigger with --rrd) if params['rrd']: script_directory = os.path.dirname(os.path.realpath(__file__)) diff --git a/mkmap.sh b/mkmap.sh index b49f9a5..2d9b0f6 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -3,4 +3,4 @@ FFMAPPATH='/opt/ffmap-backend/' PEERS="/etc/fastd/ffhh-mesh-vpn/peers" python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json -python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 +python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 From 325f6cd1f4950174a73a83bdca571cff3d47c687 Mon Sep 17 00:00:00 2001 From: Alexander Dietrich Date: Mon, 10 Jul 2017 21:50:14 +0200 Subject: [PATCH 18/18] Support for "bat-ffhh" interface, add generate_aliases_v2.py --- backend.py | 3 +- gateway.json | 89 ++++++-------------------------- generate_aliases_v2.py | 112 +++++++++++++++++++++++++++++++++++++++++ lib/batman.py | 4 -- lib/graph.py | 1 - lib/nodes.py | 15 +++++- mkmap.sh | 3 +- node_number.py | 0 8 files changed, 146 insertions(+), 81 deletions(-) create mode 100755 generate_aliases_v2.py mode change 100644 => 100755 node_number.py diff --git a/backend.py b/backend.py index 74d3e52..8b9d9a6 100755 --- a/backend.py +++ b/backend.py @@ -102,7 +102,6 @@ def main(params): for batman in batman_instances: vd = batman.vis_data() gwl = batman.gateway_list() - mesh_info.append((vd, gwl)) # update nodedb from batman-adv data @@ -131,6 +130,8 @@ def main(params): try: for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]: macs.add(mac) + for mac in node["nodeinfo"]["network"]["mesh"]["bat-ffhh"]["interfaces"]["tunnel"]: + macs.add(mac) except KeyError: pass diff --git a/gateway.json b/gateway.json index c7b0966..4a72859 100644 --- a/gateway.json +++ b/gateway.json @@ -1,86 +1,29 @@ [ { - "node_id": "deadbeef0101", - "hostname": "gw01", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:01:01", - "de:ad:bf:ff:01:01" - ] - } - } - } - } + "node_id": "deadbfff0101", + "hostname": "gw01" }, { "node_id": "deadbeef0505", - "hostname": "gw02", + "hostname": "gw02.hamburg.freifunk.net", "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:05:05", - "de:ad:be:ff:05:06", - "de:ad:bf:ff:05:05" - ] - } + "mac": "de:ad:be:ef:05:05", + "mesh": { + "bat0": { + "interfaces": { + "tunnel": [ + "de:ad:be:ff:05:05", + "de:ad:be:fc:05:05", + "de:ad:bf:ff:05:05" + ] + } + } } - } } }, { - "node_id": "deadbeef0303", - "hostname": "gw03", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:03:03", - "de:ad:bf:ff:03:03" - ] - } - } - } - } - }, - { - "node_id": "deadbfff2222", - "hostname": "gw05", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:22:22", - "de:ad:be:ff:22:23", - "de:ad:bf:ff:22:22" - ] - } - } - } - } - }, - { - "node_id": "deadbeef8888", - "hostname": "gw08", - "network": { - "mesh": { - "bat0": { - "interfaces": { - "tunnel": [ - "de:ad:be:ff:88:88", - "de:ad:be:ff:88:89", - "de:ad:bf:ff:88:88" - ] - } - } - } - } + "node_id": "00163efb9d8d", + "hostname": "gw03" } ] diff --git a/generate_aliases_v2.py b/generate_aliases_v2.py new file mode 100755 index 0000000..7a04c7c --- /dev/null +++ b/generate_aliases_v2.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python2 + +from __future__ import print_function + +import json +import os +import sys + +if len(sys.argv) != 2: + print('usage: ' + sys.argv[0] + ' /path/to/peers') + sys.exit(1) + +peersDir = sys.argv[1] + +def normalizeMac(mac): + mac = mac.lower() + normalized = '' + + n = 0 + + for c in mac: + if c != ':': + if n > 0 and n % 2 == 0: + normalized = normalized + ':' + normalized = normalized + c + n += 1 + + return normalized + +def toAlias(peer): + alias = {} + + if not (peer.has_key('name') and peer.has_key('mac')): + return None + + name = peer['name'] + mac = peer['mac'] + + alias['node_id'] = mac.replace(':', '') + alias['hostname'] = name + + if peer.has_key('geo'): + geo = peer['geo'] + + location = {} + + if geo.has_key('lon'): location['longitude'] = geo['lon'] + if geo.has_key('lat'): location['latitude'] = geo['lat'] + + alias['location'] = location + + #alias['network'] = {} + #alias['network']['mesh_interfaces'] = [mac] + + return {'nodeinfo':alias} + +aliases = {} + +for filename in os.listdir(peersDir): + if len(filename) == 0 or filename[0] == '.': + continue + + isGateway = False + + absFilename = peersDir + '/' + filename + if os.path.isfile(absFilename): + peerFile = open(absFilename, 'r') + try: + peerLines = peerFile.readlines() + peer = {} + + for line in peerLines: + parts = line.split() + + if len(parts) > 2: + if parts[1] == 'Knotenname:': + peer['name'] = parts[2] + + elif parts[0] == 'remote': + isGateway = True + + elif parts[1] == 'MAC:': + peer['mac'] = normalizeMac(parts[2]) + + elif parts[1] == 'Koordinaten:' and len(parts) > 3: + try: + peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])} + + except ValueError: + print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr) + + elif len(parts) == 2 and parts[0] == 'key': + keyParts = parts[1].split('"') + if len(keyParts) > 1: + peer['vpn'] = keyParts[1].lower() + + if isGateway: + continue + + alias = toAlias(peer) + if alias: + tmpid = alias['nodeinfo']['node_id'] +# alias['nodeinfo'].pop('node_id') + aliases[tmpid] = alias + + except Exception as e: + print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr) + + finally: + peerFile.close() + +print(json.dumps(aliases)) diff --git a/lib/batman.py b/lib/batman.py index ee0a0fe..ee0198b 100644 --- a/lib/batman.py +++ b/lib/batman.py @@ -96,7 +96,3 @@ if __name__ == "__main__": bc = Batman() vd = bc.vis_data() gw = bc.gateway_list() - for x in vd: - print(x) - print(gw) - print(bc.gateway_mode()) diff --git a/lib/graph.py b/lib/graph.py index db1259e..2833e6e 100644 --- a/lib/graph.py +++ b/lib/graph.py @@ -25,7 +25,6 @@ def mark_vpn(graph, vpn_macs): components = map(frozenset, nx.weakly_connected_components(graph)) components = filter(vpn_macs.intersection, components) nodes = reduce(lambda a, b: a | b, components, set()) - for node in nodes: for k, v in graph[node].items(): v['vpn'] = True diff --git a/lib/nodes.py b/lib/nodes.py index f517438..86ce5b9 100644 --- a/lib/nodes.py +++ b/lib/nodes.py @@ -6,6 +6,10 @@ from functools import reduce def build_mac_table(nodes): macs = dict() for node_id, node in nodes.items(): + try: + macs[node['network']['mac']] = node_id + except KeyError: + pass try: for mac in node['nodeinfo']['network']['mesh_interfaces']: macs[mac] = node_id @@ -23,6 +27,11 @@ def build_mac_table(nodes): macs[mac] = node_id except KeyError: pass + try: + for mac in node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel']: + macs[mac] = node_id + except KeyError: + pass try: for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']: @@ -145,6 +154,11 @@ def import_mesh_ifs_vis_data(nodes, vis_data): except KeyError: pass + try: + ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel'])) + except KeyError: + pass + try: ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other'])) except KeyError: @@ -166,7 +180,6 @@ def import_vis_clientcount(nodes, vis_data): def mark_gateways(nodes, gateways): macs = build_mac_table(nodes) gateways = filter(lambda d: d in macs, gateways) - for node in map(lambda d: nodes[macs[d]], gateways): node['flags']['gateway'] = True diff --git a/mkmap.sh b/mkmap.sh index 2d9b0f6..9423943 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -3,4 +3,5 @@ FFMAPPATH='/opt/ffmap-backend/' PEERS="/etc/fastd/ffhh-mesh-vpn/peers" python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json -python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 +#python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 --vpn 00:16:3e:fb:9d:8d --vpn 00:16:3e:fb:9d:9d +python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 de:ad:be:ff:05:05 de:ad:be:ff:05:06 de:ad:be:ff:03:03 de:ad:be:ff:22:22 de:ad:be:ff:22:23 de:ad:be:ff:88:88 de:ad:be:ff:88:89 de:ad:bf:ff:88:88 de:ad:bf:ff:22:22 de:ad:bf:ff:03:03 de:ad:bf:ff:05:05 de:ad:bf:ff:01:01 de:ad:be:fc:03:03 00:16:3e:53:75:0d de:ad:be:fc:05:05 de:ad:be:fc:01:01 de:ad:be:ef:03:03 de:ad:be:ef:01:01 de:ad:be:ef:05:05 00:16:3e:fb:9d:8d 00:16:3e:fb:9d:9d diff --git a/node_number.py b/node_number.py old mode 100644 new mode 100755