diff --git a/.gitignore b/.gitignore index 0f42dec..0d20b64 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1 @@ *.pyc -aliases.json -nodedb/ diff --git a/README.md b/README.md index fc718fe..8e4abd8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Data for Freifunk Map, Graph and Node List -ffmap-backend gathers information on the batman network by invoking +ffmap-backend gathers information on the batman network by invoking batctl and batadv-vis @@ -41,13 +41,13 @@ Alias /map /home/ffmap/www/ Alias /firmware /home/freifunk/autoupdates/ -To execute, run - ./mkmap.sh ../www +To execute, run + python3 -mffmap.run --input-alfred --input-badadv --output-d3json ../www/nodes.json The script expects above described sudo-wrappers in the $HOME directory of the user executing the script. If those are not available, an error will occurr if not executed as root. Also, the tool realpath optionally allows to execute the script from anywhere in the directory tree. For the script's regular execution add the following to the crontab:
-*/5 * * * * /home/ffmap/ffmap-backend/mkmap.sh /home/ffmap/www
+*/5 * * * * python3 -mffmap.run --input-alfred --input-badadv --output-d3json /home/ffmap/www/nodes.json
 
diff --git a/aliases.json_sample b/aliases.json_sample deleted file mode 100644 index 1f3ca33..0000000 --- a/aliases.json_sample +++ /dev/null @@ -1,9 +0,0 @@ -{ - "b0:48:7a:e7:d3:64" : { - "name" : "Meute-AP" - }, - "8e:3d:c2:10:10:28" : { - "name" : "holstentor", - "vpn" : true - } -} diff --git a/bat2nodes.py b/bat2nodes.py deleted file mode 100755 index e1fde6e..0000000 --- a/bat2nodes.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python3 - -import json -import fileinput -import argparse -import os -import datetime - -from batman import batman -from alfred import alfred -from rrd import rrd -from nodedb import NodeDB -from json_encoder import CustomJSONEncoder - -# Force encoding to UTF-8 -import locale # Ensures that subsequent open()s -locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded. - -import sys -#sys.stdin = open('/dev/stdin', 'r') -#sys.stdout = open('/dev/stdout', 'w') -#sys.stderr = open('/dev/stderr', 'w') - -parser = argparse.ArgumentParser() - -parser.add_argument('-a', '--aliases', - help='read aliases from FILE', - action='append', - metavar='FILE') - -parser.add_argument('-m', '--mesh', action='append', - help='batman mesh interface') - -parser.add_argument('-o', '--obscure', action='store_true', - help='obscure client macs') - -parser.add_argument('-A', '--alfred', action='store_true', - help='retrieve aliases from alfred') - -parser.add_argument('-d', '--destination-directory', action='store', - help='destination directory for generated files',required=True) - -args = parser.parse_args() - -options = vars(args) - -db = NodeDB() -if options['mesh']: - for mesh_interface in options['mesh']: - bm = batman(mesh_interface) - db.parse_vis_data(bm.vis_data(options['alfred'])) - for gw in bm.gateway_list(): - db.mark_gateways(gw['mac']) -else: - bm = batman() - db.parse_vis_data(bm.vis_data(options['alfred'])) - for gw in bm.gateway_list(): - db.mark_gateways([gw['mac']]) - -if options['aliases']: - for aliases in options['aliases']: - db.import_aliases(json.load(open(aliases))) - -if options['alfred']: - af = alfred() - db.import_aliases(af.aliases()) - -db.count_clients() - -if options['obscure']: - db.obscure_clients() - -scriptdir = os.path.dirname(os.path.realpath(__file__)) - -exported = db.export() -exported['meta'] = {'timestamp': datetime.datetime.utcnow().replace(microsecond=0).isoformat()} - -#Write nodes json -nodes_json = open(options['destination_directory'] + '/nodes.json.new','w') -json.dump(exported, nodes_json, cls=CustomJSONEncoder) -nodes_json.close() - -#Move to destination -os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json') - -rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes") -rrd.update_database(db) -rrd.update_images() diff --git a/ffhlwiki.py b/ffhlwiki.py deleted file mode 100755 index 588ae72..0000000 --- a/ffhlwiki.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python3 - -import json -import argparse -from itertools import zip_longest -from urllib.request import urlopen -from bs4 import BeautifulSoup - -def import_wikigps(url): - def fetch_wikitable(url): - f = urlopen(url) - - soup = BeautifulSoup(f) - - table = soup.find_all("table")[0] - - rows = table.find_all("tr") - - headers = [] - - data = [] - - def maybe_strip(x): - if isinstance(x.string, str): - return x.string.strip() - else: - return "" - - for row in rows: - tds = list([maybe_strip(x) for x in row.find_all("td")]) - ths = list([maybe_strip(x) for x in row.find_all("th")]) - - if any(tds): - data.append(tds) - - if any(ths): - headers = ths - - nodes = [] - - for d in data: - nodes.append(dict(zip(headers, d))) - - return nodes - - nodes = fetch_wikitable(url) - - aliases = {} - - for node in nodes: - try: - node['MAC'] = node['MAC'].split(',') - except KeyError: - pass - - try: - node['GPS'] = node['GPS'].split(',') - except KeyError: - pass - - try: - node['Knotenname'] = node['Knotenname'].split(',') - except KeyError: - pass - - nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname']) - - for data in nodes: - alias = {} - - mac = data[0].strip() - - if data[1]: - alias['geo'] = [float(x) for x in data[1].strip().split(' ')] - - if data[2]: - alias['name'] = data[2].strip() - - aliases[mac] = alias - - return aliases - -parser = argparse.ArgumentParser() - -parser.add_argument('url', help='wiki URL') - -args = parser.parse_args() - -options = vars(args) - -aliases = import_wikigps(options['url']) - -print(json.dumps(aliases)) diff --git a/ffmap/__init__.py b/ffmap/__init__.py new file mode 100644 index 0000000..9542acc --- /dev/null +++ b/ffmap/__init__.py @@ -0,0 +1,42 @@ +import importlib + +from ffmap.nodedb import NodeDB + +def run(inputs, outputs): + """Fill the database with given inputs and give it to given outputs. + + Arguments: + inputs -- list of Input instances (with a compatible get_data(nodedb) method) + outputs -- list of Output instances (with a compatible output(nodedb) method) + """ + db = NodeDB() + for input_ in inputs: + input_.get_data(db) + + for output in outputs: + output.output(db) + +def run_names(inputs, outputs): + """Fill the database with inputs and give it to outputs, each given + by names. + + In contrast to run(inputs, outputs), this method expects only the + names of the modules to use, not instances thereof. + Arguments: + inputs -- list of dicts, each dict having the keys "name" with the + name of the input to use (directory name in inputs/), and + the key "options" with a dict of input-dependent options. + outputs -- list of dicts, see inputs. + """ + input_instances = [] + output_instances = [] + + for input_ in inputs: + module = importlib.import_module(".inputs." + input_["name"], "ffmap") + input_instances.append(module.Input(**input_["options"])) + + for output in outputs: + module = importlib.import_module(".outputs." + output["name"], "ffmap") + output_instances.append(module.Output(**output["options"])) + + run(input_instances, output_instances) diff --git a/nodedb/.gitkeep b/ffmap/inputs/__init__.py similarity index 100% rename from nodedb/.gitkeep rename to ffmap/inputs/__init__.py diff --git a/inputs/alfred/__init__.py b/ffmap/inputs/alfred.py similarity index 100% rename from inputs/alfred/__init__.py rename to ffmap/inputs/alfred.py diff --git a/inputs/batadv/__init__.py b/ffmap/inputs/batadv.py similarity index 100% rename from inputs/batadv/__init__.py rename to ffmap/inputs/batadv.py diff --git a/ffmap/inputs/wiki.py b/ffmap/inputs/wiki.py new file mode 100755 index 0000000..ab36ad5 --- /dev/null +++ b/ffmap/inputs/wiki.py @@ -0,0 +1,71 @@ +import json +import argparse +from itertools import zip_longest +from urllib.request import urlopen +from bs4 import BeautifulSoup + +class Input: + def __init__(self, url="http://luebeck.freifunk.net/wiki/Knoten"): + self.url = url + + def fetch_wikitable(self): + f = urlopen(self.url) + soup = BeautifulSoup(f) + table = soup.find("table") + rows = table.find_all("tr") + headers = [] + data = [] + + def maybe_strip(x): + if isinstance(x.string, str): + return x.string.strip() + else: + return "" + + for row in rows: + tds = list([maybe_strip(x) for x in row.find_all("td")]) + ths = list([maybe_strip(x) for x in row.find_all("th")]) + + if any(tds): + data.append(tds) + + if any(ths): + headers = ths + + return [dict(zip(headers, d)) for d in data] + + def get_data(self, nodedb): + nodes = self.fetch_wikitable() + + for node in nodes: + if "MAC" not in node or not node["MAC"]: + # without MAC, we cannot merge this data with others, so + # we might as well ignore it + continue + + newnode = { + "network": { + "mac": node.get("MAC").lower(), + }, + "location": { + "latitude": float(node.get("GPS", " ").split(" ")[0]), + "longitude": float(node.get("GPS", " ").split(" ")[1]), + "description": node.get("Ort"), + } if " " in node.get("GPS", "") else None, + "hostname": node.get("Knotenname"), + "hardware": { + "model": node["Router"], + } if node.get("Router") else None, + "software": { + "firmware": { + "base": "LFF", + "release": node.get("LFF Version"), + }, + }, + "owner": { + "contact": node["Betreiber"], + } if node.get("Betreiber") else None, + } + # remove keys with None as value + newnode = {k: v for k,v in newnode.items() if v is not None} + nodedb.add_or_update([newnode["network"]["mac"]], newnode) diff --git a/node.py b/ffmap/node.py similarity index 74% rename from node.py rename to ffmap/node.py index 5fa58f6..e2169f2 100644 --- a/node.py +++ b/ffmap/node.py @@ -7,11 +7,20 @@ class NoneDict: even previously inexistent keys can be accessed, but nothing is stored permanently in this class. """ - __repr__ = lambda self: 'NoneDict()' - __bool__ = lambda self: False - __getitem__ = lambda self, k: NoneDict() - __json__ = lambda self: None - __float__ = lambda self: float('NaN') + def __repr__(self): + return 'NoneDict()' + def __bool__(self): + return False + def __getitem__(self, k): + return NoneDict() + def __json__(self): + return None + def __float__(self): + return float('NaN') + def __iter__(self): + # empty generator + return + yield def __setitem__(self, key, value): raise RuntimeError("NoneDict is readonly") @@ -39,6 +48,16 @@ class Node(defaultdict): """ return hash(self.id) + def deep_update(self, other): + """Update the dictionary like dict.update() but recursively.""" + def dmerge(a, b): + for k, v in b.items(): + if isinstance(v, dict) and isinstance(a.get(k), dict): + dmerge(a[k], v) + else: + a[k] = v + dmerge(self, other) + @property def vpn_neighbors(self): try: diff --git a/nodedb.py b/ffmap/nodedb.py similarity index 92% rename from nodedb.py rename to ffmap/nodedb.py index a056184..0be76b0 100644 --- a/nodedb.py +++ b/ffmap/nodedb.py @@ -1,6 +1,6 @@ -from node import Node +from .node import Node -class AmbiguityException(Exception): +class AmbiguityError(Exception): """Indicate the ambiguity of identifiers. This exception is raised if there is more than one match for a set @@ -43,7 +43,7 @@ class NodeDB(dict): continue if id_ in self: if node is not None: - raise AmbiguityException([node_id, id_]) + raise AmbiguityError([node_id, id_]) node = self[id_] node_id = id_ @@ -53,7 +53,7 @@ class NodeDB(dict): # Update the node with the given properties using its own update method. if other is not None: - node.update(other) + node.deep_update(other) # Add new aliases if any for id_ in ids: diff --git a/ffmap/outputs/__init__.py b/ffmap/outputs/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/ffmap/outputs/__init__.py @@ -0,0 +1 @@ + diff --git a/outputs/json/__init__.py b/ffmap/outputs/d3json.py similarity index 87% rename from outputs/json/__init__.py rename to ffmap/outputs/d3json.py index f005c38..fd5b267 100644 --- a/outputs/json/__init__.py +++ b/ffmap/outputs/d3json.py @@ -1,11 +1,12 @@ import json +from datetime import datetime __all__ = ["Exporter"] class CustomJSONEncoder(json.JSONEncoder): """ - JSON encoder that uses an object's __json__() method to convert it to - something JSON-compatible. + JSON encoder that uses an object's __json__() method to convert it + to something JSON-compatible. """ def default(self, obj): try: @@ -14,7 +15,7 @@ class CustomJSONEncoder(json.JSONEncoder): pass return super().default(obj) -class Exporter: +class Output: def __init__(self, filepath="nodes.json"): self.filepath = filepath @@ -60,9 +61,14 @@ class Exporter: return { "nodes": nodes, "links": links, + "meta": { + "timestamp": datetime.utcnow() + .replace(microsecond=0) + .isoformat() + } } - def export(self, nodedb): + def output(self, nodedb): with open(self.filepath, "w") as nodes_json: json.dump( self.generate(nodedb), diff --git a/outputs/rrd/__init__.py b/ffmap/outputs/rrd.py similarity index 86% rename from outputs/rrd/__init__.py rename to ffmap/outputs/rrd.py index 5e9fbc1..ce450c3 100644 --- a/outputs/rrd/__init__.py +++ b/ffmap/outputs/rrd.py @@ -1,8 +1,7 @@ import os -from .NodeRRD import NodeRRD -from .GlobalRRD import GlobalRRD +from ffmap.rrd.rrds import NodeRRD, GlobalRRD -class Exporter: +class Output: def __init__(self, directory="nodedb"): self.directory = directory try: @@ -10,7 +9,7 @@ class Exporter: except OSError: pass - def export(self, nodedb): + def output(self, nodedb): nodes = set(nodedb.values()) clients = 0 nodecount = 0 diff --git a/outputs/rrd/RRD.py b/ffmap/rrd/__init__.py similarity index 100% rename from outputs/rrd/RRD.py rename to ffmap/rrd/__init__.py diff --git a/outputs/rrd/NodeRRD.py b/ffmap/rrd/rrds.py similarity index 72% rename from outputs/rrd/NodeRRD.py rename to ffmap/rrd/rrds.py index fc8aef1..2155d0c 100644 --- a/outputs/rrd/NodeRRD.py +++ b/ffmap/rrd/rrds.py @@ -1,7 +1,7 @@ import os import subprocess -from node import Node -from .RRD import RRD, DS, RRA +from ffmap.node import Node +from . import RRD, DS, RRA class NodeRRD(RRD): ds_list = [ @@ -81,3 +81,35 @@ class NodeRRD(RRD): 'LINE1:c#00F:clients connected\\l', ] subprocess.check_output(args) + +class GlobalRRD(RRD): + ds_list = [ + # Number of nodes available + DS('nodes', 'GAUGE', 120, 0, float('NaN')), + # Number of client available + DS('clients', 'GAUGE', 120, 0, float('NaN')), + ] + rra_list = [ + RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples + RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples + RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples + ] + + def __init__(self, filepath): + super().__init__(filepath) + self.ensureSanity(self.ds_list, self.rra_list, step=60) + + def update(self, nodeCount, clientCount): + super().update({'nodes': nodeCount, 'clients': clientCount}) + + def graph(self, filename, timeframe): + args = ["rrdtool", 'graph', filename, + '-s', '-' + timeframe, + '-w', '800', + '-h' '400', + 'DEF:nodes=' + self.filename + ':nodes:AVERAGE', + 'LINE1:nodes#F00:nodes\\l', + 'DEF:clients=' + self.filename + ':clients:AVERAGE', + 'LINE2:clients#00F:clients', + ] + subprocess.check_output(args) diff --git a/ffmap/run.py b/ffmap/run.py new file mode 100644 index 0000000..a9e004f --- /dev/null +++ b/ffmap/run.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +import argparse +import sys + +from ffmap import run_names + +class MyAction(argparse.Action): + def __call__(self, parser, namespace, values, option_string=None): + if self.dest.startswith(("input_", "output_")): + collection_name = self.dest.split("_")[0] + "s" + name = self.dest.split("_", 1)[1] + if not hasattr(namespace, collection_name): + setattr(namespace, collection_name, []) + collection = getattr(namespace, collection_name) + collection.append({ + "name": name, + "options": {self.metavar.lower(): values} + if values is not None else {} + }) + else: + raise Exception("Unexpected dest=" + self.dest) + +def parser_add_myarg(parser, name, metavar="OPT", help=None): + parser.add_argument("--" + name, + metavar=metavar, + type=str, + nargs='?', + const=None, + action=MyAction, + help=help) + +parser = argparse.ArgumentParser( + description="""Merge node data from multiple sources and generate + various output formats from this data""", +) +input_group = parser.add_argument_group("Inputs", description=""" + Inputs are used in the order given on the command line, where later + inputs can overwrite attributes of earlier inputs if named equally, + but the first input encountering a node sets its id, which is + immutable afterwards. + + The same input can be given multiple times, probably with different + options. +""") +output_group = parser.add_argument_group("Outputs") +parser_add_myarg(input_group, 'input-alfred', metavar="REQUEST_DATA_TYPE", + help="read node details from A.L.F.R.E.D.") +parser_add_myarg(input_group, 'input-wiki', metavar="URL", + help="read node details from a Wiki page") +parser_add_myarg(input_group, 'input-batadv', metavar="MESH_INTERFACE", + help="add node's neighbors and clients from batadv-vis") +parser_add_myarg(output_group, 'output-d3json', metavar="FILEPATH", + help="generate JSON file compatible with ffmap-d3") +parser_add_myarg(output_group, 'output-rrd', metavar="DIRECTORY", + help="update RRDs with statistics, one global and one per node") + +args = parser.parse_args() + +if "inputs" not in args or not args.inputs: + parser.print_help(sys.stderr) + sys.stderr.write("\nERROR: No input has been defined!\n") + sys.exit(1) + +if "outputs" not in args or not args.outputs: + parser.print_help(sys.stderr) + sys.stderr.write("\nERROR: No output has been defined!\n") + sys.exit(1) + +run_names(inputs=args.inputs, outputs=args.outputs) diff --git a/mkmap.sh b/mkmap.sh deleted file mode 100755 index 28195be..0000000 --- a/mkmap.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -set -e - -DEST=$1 -LOCKFILE="/run/lock/ffmap" - -[ "$DEST" ] || exit 1 - -cd "$(dirname "$0")"/ - -if lockfile-check "$LOCKFILE"; then - exit -fi -lockfile-create "$LOCKFILE" -lockfile-touch "$LOCKFILE" & -LOCKPID="$!" - -./bat2nodes.py -A -a aliases.json -d $DEST - -kill "$LOCKPID" -lockfile-remove "$LOCKFILE" - -if lockfile-check "$LOCKFILE-sync"; then - exit -fi -lockfile-create "$LOCKFILE-sync" -lockfile-touch "$LOCKFILE-sync" & -LOCKPID="$!" - -kill "$LOCKPID" -lockfile-remove "$LOCKFILE-sync" diff --git a/outputs/rrd/GlobalRRD.py b/outputs/rrd/GlobalRRD.py deleted file mode 100644 index b114418..0000000 --- a/outputs/rrd/GlobalRRD.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import subprocess -from .RRD import RRD, DS, RRA - -class GlobalRRD(RRD): - ds_list = [ - # Number of nodes available - DS('nodes', 'GAUGE', 120, 0, float('NaN')), - # Number of client available - DS('clients', 'GAUGE', 120, 0, float('NaN')), - ] - rra_list = [ - RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples - RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples - RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples - ] - - def __init__(self, filepath): - super().__init__(filepath) - self.ensureSanity(self.ds_list, self.rra_list, step=60) - - def update(self, nodeCount, clientCount): - super().update({'nodes': nodeCount, 'clients': clientCount}) - - def graph(self, filename, timeframe): - args = ["rrdtool", 'graph', filename, - '-s', '-' + timeframe, - '-w', '800', - '-h' '400', - 'DEF:nodes=' + self.filename + ':nodes:AVERAGE', - 'LINE1:nodes#F00:nodes\\l', - 'DEF:clients=' + self.filename + ':clients:AVERAGE', - 'LINE2:clients#00F:clients', - ] - subprocess.check_output(args) diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..4ee3d1f --- /dev/null +++ b/setup.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 + +from distutils.core import setup + +setup(name='FFmap', + version='0.1', + description='Freifunk map backend', + url='https://github.com/ffnord/ffmap-backend', + packages=['ffmap', 'ffmap.inputs', 'ffmap.outputs', 'ffmap.rrd'], + )