From 6fc1423124def6343a0aeb456c2af269e744b18d Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Fri, 21 Feb 2014 15:27:19 +0100
Subject: [PATCH 01/15] Make handling of node attributes more flexible.

This commit makes Nodes special dicts that return None-like objects for
inexistent keys, making it a dynamic attribute store.

Also, it removes the D3MapBuilder and moves its logic to the Node and
Link classes' newly introduced export() method. Only they need to be
changed to populate the final nodes.json with more attributes.
---
 alfred.py       | 15 ++++-----------
 bat2nodes.py    |  8 +++++---
 d3mapbuilder.py | 36 ------------------------------------
 ffhlwiki.py     |  2 +-
 json_encoder.py | 13 +++++++++++++
 link.py         | 13 +++++++++++--
 node.py         | 48 +++++++++++++++++++++++++++++++++++++++++++-----
 nodedb.py       | 20 ++++++++------------
 8 files changed, 85 insertions(+), 70 deletions(-)
 delete mode 100644 d3mapbuilder.py
 create mode 100644 json_encoder.py

diff --git a/alfred.py b/alfred.py
index 6d926bb..b8aa1e2 100755
--- a/alfred.py
+++ b/alfred.py
@@ -12,16 +12,11 @@ class alfred:
     alias = {}
     for mac,node in alfred_data.items():
       node_alias = {}
-      if 'location' in node:
-        try:
-          node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude'])
-        except:
-          pass
+      for key in node:
+        node_alias[key] = node[key]
 
-      try:
-        node_alias['firmware'] = node['software']['firmware']['release']
-      except KeyError:
-        pass
+      if 'location' in node:
+        node_alias['geo'] = [node['location']['latitude'], node['location']['longitude']]
 
       try:
         node_alias['id'] = node['network']['mac']
@@ -30,8 +25,6 @@ class alfred:
 
       if 'hostname' in node:
         node_alias['name'] = node['hostname']
-      elif 'name' in node:
-        node_alias['name'] = node['name']
       if len(node_alias):
         alias[mac] = node_alias
     return alias
diff --git a/bat2nodes.py b/bat2nodes.py
index 921b548..e1fde6e 100755
--- a/bat2nodes.py
+++ b/bat2nodes.py
@@ -4,12 +4,13 @@ import json
 import fileinput
 import argparse
 import os
+import datetime
 
 from batman import batman
 from alfred import alfred
 from rrd import rrd
 from nodedb import NodeDB
-from d3mapbuilder import D3MapBuilder
+from json_encoder import CustomJSONEncoder
 
 # Force encoding to UTF-8
 import locale                                  # Ensures that subsequent open()s
@@ -71,11 +72,12 @@ if options['obscure']:
 
 scriptdir = os.path.dirname(os.path.realpath(__file__))
 
-m = D3MapBuilder(db)
+exported = db.export()
+exported['meta'] = {'timestamp': datetime.datetime.utcnow().replace(microsecond=0).isoformat()}
 
 #Write nodes json
 nodes_json = open(options['destination_directory'] + '/nodes.json.new','w')
-nodes_json.write(m.build())
+json.dump(exported, nodes_json, cls=CustomJSONEncoder)
 nodes_json.close()
 
 #Move to destination
diff --git a/d3mapbuilder.py b/d3mapbuilder.py
deleted file mode 100644
index ff7589f..0000000
--- a/d3mapbuilder.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import json
-import datetime
-
-class D3MapBuilder:
-  def __init__(self, db):
-    self._db = db
-
-  def build(self):
-    output = dict()
-
-    now = datetime.datetime.utcnow().replace(microsecond=0)
-
-    nodes = self._db.get_nodes()
-
-    output['nodes'] = [{'name': x.name, 'id': x.id,
-                        'macs': ', '.join(x.macs),
-                        'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None,
-                        'firmware': x.firmware,
-                        'flags': x.flags,
-                        'clientcount': x.clientcount
-                       } for x in nodes]
-
-    links = self._db.get_links()
-
-    output['links'] = [{'source': x.source.id, 'target': x.target.id,
-                        'quality': x.quality,
-                        'type': x.type,
-                        'id': x.id
-                       } for x in links]
-
-    output['meta'] = {
-                      'timestamp': now.isoformat()
-                     }
-
-    return json.dumps(output)
-
diff --git a/ffhlwiki.py b/ffhlwiki.py
index c1ba01e..588ae72 100755
--- a/ffhlwiki.py
+++ b/ffhlwiki.py
@@ -71,7 +71,7 @@ def import_wikigps(url):
       mac = data[0].strip()
 
       if data[1]:
-        alias['gps'] = data[1].strip()
+        alias['geo'] = [float(x) for x in data[1].strip().split(' ')]
 
       if data[2]:
         alias['name'] = data[2].strip()
diff --git a/json_encoder.py b/json_encoder.py
new file mode 100644
index 0000000..8d62771
--- /dev/null
+++ b/json_encoder.py
@@ -0,0 +1,13 @@
+from json import JSONEncoder
+
+class CustomJSONEncoder(JSONEncoder):
+  """
+  JSON encoder that uses an object's __json__() method to convert it to
+  something JSON-compatible.
+  """
+  def default(self, obj):
+    try:
+      return obj.__json__()
+    except AttributeError:
+      pass
+    return super().default(obj)
diff --git a/link.py b/link.py
index 896079b..b161608 100644
--- a/link.py
+++ b/link.py
@@ -1,11 +1,20 @@
 class Link():
   def __init__(self):
     self.id = None
-    self.source = None
-    self.target = None
+    self.source = LinkConnector()
+    self.target = LinkConnector()
     self.quality = None
     self.type = None
 
+  def export(self):
+    return {
+      'source': self.source.id,
+      'target': self.target.id,
+      'quality': self.quality,
+      'type': self.type,
+      'id': self.id
+    }
+
 class LinkConnector():
   def __init__(self):
     self.id = None
diff --git a/node.py b/node.py
index 0fe35fb..504768a 100644
--- a/node.py
+++ b/node.py
@@ -1,4 +1,31 @@
-class Node():
+from collections import defaultdict
+
+class NoneDict:
+  """
+  A NoneDict acts like None but returns a NoneDict for every item in it.
+
+  This is similar to the behaviour of collections.defaultdict in that even
+  previously inexistent keys can be accessed, but there is nothing stored
+  permanently.
+  """
+  __repr__ = lambda self: 'NoneDict()'
+  __bool__ = lambda self: False
+  __getitem__ = lambda self, k: NoneDict()
+  __json__ = lambda self: None
+  def __setitem__(self, key, value):
+    raise RuntimeError("NoneDict is readonly")
+
+class casualdict(defaultdict):
+  """
+  This special defaultdict returns a NoneDict for inexistent items. Also, its
+  items can be accessed as attributed as well.
+  """
+  def __init__(self):
+    super().__init__(NoneDict)
+  __getattr__ = defaultdict.__getitem__
+  __setattr__ = defaultdict.__setitem__
+
+class Node(casualdict):
   def __init__(self):
     self.name = ""
     self.id = ""
@@ -9,9 +36,7 @@ class Node():
       "gateway": False,
       "client": False
     })
-    self.gps = None
-    self.firmware = None
-    self.clientcount = 0
+    super().__init__()
 
   def add_mac(self, mac):
     mac = mac.lower()
@@ -25,7 +50,20 @@ class Node():
   def __repr__(self):
     return self.macs.__repr__()
 
+  def export(self):
+    """
+    Return a dict that contains all attributes of the Node that are supposed to
+    be exported to other applications.
+    """
+    return {
+      "name": self.name,
+      "id": self.id,
+      "macs": list(self.macs),
+      "geo": self.geo,
+      "firmware": self.software['firmware']['release'],
+      "flags": self.flags
+    }
+
 class Interface():
   def __init__(self):
     self.vpn = False
-
diff --git a/nodedb.py b/nodedb.py
index fa9caed..e5ff30e 100644
--- a/nodedb.py
+++ b/nodedb.py
@@ -1,4 +1,3 @@
-import json
 from functools import reduce
 from collections import defaultdict
 from node import Node, Interface
@@ -18,6 +17,12 @@ class NodeDB:
   def get_nodes(self):
     return self._nodes
 
+  def export(self):
+    return {
+      'nodes': [node.export() for node in self.get_nodes()],
+      'links': [link.export() for link in self.get_links()],
+    }
+
   def maybe_node_by_fuzzy_mac(self, mac):
     mac_a = mac.lower()
 
@@ -179,21 +184,12 @@ class NodeDB:
           node.add_mac(mac)
           self._nodes.append(node)
 
-      if 'name' in alias:
-        node.name = alias['name']
+      for key in alias:
+        node[key] = alias[key]
 
       if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces:
         node.interfaces[mac].vpn = True
 
-      if 'gps' in alias:
-        node.gps = alias['gps']
-
-      if 'firmware' in alias:
-        node.firmware = alias['firmware']
-
-      if 'id' in alias:
-        node.id = alias['id']
-
   # list of macs
   # if options['gateway']:
   #   mark_gateways(options['gateway'])

From 43e70191f19f89b65b37c42141aa2c038f122940 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sat, 22 Feb 2014 13:34:14 +0100
Subject: [PATCH 02/15] RRD: Fix updating of DS

---
 RRD.py | 17 +++++------------
 1 file changed, 5 insertions(+), 12 deletions(-)

diff --git a/RRD.py b/RRD.py
index d1ae870..9bb87a0 100644
--- a/RRD.py
+++ b/RRD.py
@@ -80,9 +80,9 @@ class RRD:
             raise FileNotFoundError(self.filename)
         info = self.info()
         if set(ds_list) - set(info['ds'].values()) != set():
-            if set((ds.name, ds.type) for ds in ds_list) \
-             - set((ds.name, ds.type) for ds in info['ds'].values()) != set():
-                raise RRDIncompatibleException()
+            for ds in ds_list:
+                if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type:
+                    raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type))
             else:
                 raise RRDOutdatedException()
 
@@ -177,15 +177,8 @@ class RRD:
                 echo = True
         dump.stdout.close()
         restore.stdin.close()
-        try:
-            dump.wait(1)
-        except subprocess.TimeoutExpired:
-            dump.kill()
-        try:
-            restore.wait(2)
-        except subprocess.TimeoutExpired:
-            dump.kill()
-            raise RuntimeError("rrdtool restore process killed")
+        dump.wait()
+        restore.wait()
 
         os.rename(self.filename + ".new", self.filename)
         self._cached_info = None

From 7075d8481c641725786ecde30b66146115e15225 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sat, 22 Feb 2014 13:35:34 +0100
Subject: [PATCH 03/15] NodeRRD: add many more DS, rrd.py: generate neighbor
 counts

---
 NodeRRD.py | 31 ++++++++++++++++++++++++++++++-
 node.py    |  1 +
 rrd.py     | 10 +++++++++-
 3 files changed, 40 insertions(+), 2 deletions(-)

diff --git a/NodeRRD.py b/NodeRRD.py
index f53cad6..0118234 100644
--- a/NodeRRD.py
+++ b/NodeRRD.py
@@ -7,6 +7,19 @@ class NodeRRD(RRD):
     ds_list = [
         DS('upstate', 'GAUGE', 120, 0, 1),
         DS('clients', 'GAUGE', 120, 0, float('NaN')),
+        DS('neighbors', 'GAUGE', 120, 0, float('NaN')),
+        DS('vpn_neighbors', 'GAUGE', 120, 0, float('NaN')),
+        DS('loadavg', 'GAUGE', 120, 0, float('NaN')),
+        DS('rx_bytes', 'DERIVE', 120, 0, float('NaN')),
+        DS('rx_packets', 'DERIVE', 120, 0, float('NaN')),
+        DS('tx_bytes', 'DERIVE', 120, 0, float('NaN')),
+        DS('tx_packets', 'DERIVE', 120, 0, float('NaN')),
+        DS('mgmt_rx_bytes', 'DERIVE', 120, 0, float('NaN')),
+        DS('mgmt_rx_packets', 'DERIVE', 120, 0, float('NaN')),
+        DS('mgmt_tx_bytes', 'DERIVE', 120, 0, float('NaN')),
+        DS('mgmt_tx_packets', 'DERIVE', 120, 0, float('NaN')),
+        DS('forward_bytes', 'DERIVE', 120, 0, float('NaN')),
+        DS('forward_packets', 'DERIVE', 120, 0, float('NaN')),
     ]
     rra_list = [
         RRA('AVERAGE', 0.5, 1, 120),    #  2 hours of  1 minute samples
@@ -30,7 +43,23 @@ class NodeRRD(RRD):
         return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png"
 
     def update(self):
-        super().update({'upstate': 1, 'clients': self.node.clients})
+        values = {
+            'upstate': 1,
+            'clients': float(self.node.clients),
+            'neighbors': float(self.node.neighbors),
+            'vpn_neighbors': float(self.node.vpn_neighbors),
+            'loadavg': float(self.node.statistics['loadavg']),
+        }
+        for item in ('rx', 'tx', 'mgmt_rx', 'mgmt_tx', 'forward'):
+            try:
+                values['%s_bytes' % item] = int(self.node.statistics['traffic'][item]['bytes'])
+            except TypeError:
+                pass
+            try:
+                values['%s_packets' % item] = int(self.node.statistics['traffic'][item]['packets'])
+            except TypeError:
+                pass
+        super().update(values)
 
     def graph(self, directory, timeframe):
         """
diff --git a/node.py b/node.py
index 504768a..83531b2 100644
--- a/node.py
+++ b/node.py
@@ -12,6 +12,7 @@ class NoneDict:
   __bool__ = lambda self: False
   __getitem__ = lambda self, k: NoneDict()
   __json__ = lambda self: None
+  __float__ = lambda self: float('NaN')
   def __setitem__(self, key, value):
     raise RuntimeError("NoneDict is readonly")
 
diff --git a/rrd.py b/rrd.py
index 5c3330d..dad78c5 100755
--- a/rrd.py
+++ b/rrd.py
@@ -33,7 +33,9 @@ class rrd:
       if node.flags['online']:
         if not node.flags['client']:
           nodes[node.id] = node
-          node.clients = 0;
+          node.clients = 0
+          node.neighbors = 0
+          node.vpn_neighbors = 0
           if 'legacy' in node.flags and node.flags['legacy']:
             clientCount -= 1
         else:
@@ -45,6 +47,12 @@ class rrd:
         nodes[source].clients += 1
       elif target in nodes and not source in nodes:
         nodes[target].clients += 1
+      elif source in nodes and target in nodes:
+        nodes[source].neighbors += 1
+        nodes[target].neighbors += 1
+        if link.type == 'vpn':
+          nodes[target].vpn_neighbors += 1
+          nodes[source].vpn_neighbors += 1
 
     self.globalDb.update(len(nodes), clientCount)
     for node in nodes.values():

From 89e4c6370050a012bc5ce09a9a1c798f2656e527 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Wed, 19 Mar 2014 23:26:28 +0100
Subject: [PATCH 04/15] alfred.py: Make geo attribute setting more robust

---
 alfred.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/alfred.py b/alfred.py
index b8aa1e2..06ee1f7 100755
--- a/alfred.py
+++ b/alfred.py
@@ -15,8 +15,10 @@ class alfred:
       for key in node:
         node_alias[key] = node[key]
 
-      if 'location' in node:
+      try:
         node_alias['geo'] = [node['location']['latitude'], node['location']['longitude']]
+      except (TypeError, KeyError):
+        pass
 
       try:
         node_alias['id'] = node['network']['mac']

From ee515476645dd48812f0b4fee1acf7a3ee8b21f3 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sun, 6 Jul 2014 20:04:24 +0200
Subject: [PATCH 05/15] =?UTF-8?q?mkmap.sh:=20Remove=20L=C3=BCbeck-specific?=
 =?UTF-8?q?=20stuff?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 mkmap.sh | 5 +----
 1 file changed, 1 insertion(+), 4 deletions(-)

diff --git a/mkmap.sh b/mkmap.sh
index ce3b162..1c6453f 100755
--- a/mkmap.sh
+++ b/mkmap.sh
@@ -9,7 +9,4 @@ DEST=$1
 
 cd "$(dirname "$0")"/
 
-./ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > aliases_hl.json
-./ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > aliases_moelln.json
-
-./bat2nodes.py -A -a aliases.json -a aliases_hl.json -a aliases_moelln.json -d $DEST
+./bat2nodes.py -A -a aliases.json -d $DEST

From 54402ce08906df0e7675766a84b788986f2bfd92 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sun, 6 Jul 2014 20:07:49 +0200
Subject: [PATCH 06/15] mkmap.sh: Add locking around script call

---
 mkmap.sh | 22 +++++++++++++++++++++-
 1 file changed, 21 insertions(+), 1 deletion(-)

diff --git a/mkmap.sh b/mkmap.sh
index 1c6453f..28195be 100755
--- a/mkmap.sh
+++ b/mkmap.sh
@@ -3,10 +3,30 @@
 set -e
 
 DEST=$1
-
+LOCKFILE="/run/lock/ffmap"
 
 [ "$DEST" ] || exit 1
 
 cd "$(dirname "$0")"/
 
+if lockfile-check "$LOCKFILE"; then
+    exit
+fi
+lockfile-create "$LOCKFILE"
+lockfile-touch "$LOCKFILE" &
+LOCKPID="$!"
+
 ./bat2nodes.py -A -a aliases.json -d $DEST
+
+kill "$LOCKPID"
+lockfile-remove "$LOCKFILE"
+
+if lockfile-check "$LOCKFILE-sync"; then
+    exit
+fi
+lockfile-create "$LOCKFILE-sync"
+lockfile-touch "$LOCKFILE-sync" &
+LOCKPID="$!"
+
+kill "$LOCKPID"
+lockfile-remove "$LOCKFILE-sync"

From f5e3705eec4888ec0d40f98333df3d447f1f842f Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Mon, 7 Jul 2014 23:27:21 +0200
Subject: [PATCH 07/15] Began rewrite with more modular design

---
 alfred.py                                |  37 --
 batman.py                                |  86 -----
 hostid.py                                |  13 -
 inputs/alfred/__init__.py                |  18 +
 inputs/batadv/__init__.py                | 100 +++++
 json_encoder.py                          |  13 -
 link.py                                  |  24 --
 node.py                                  | 128 +++----
 nodedb.py                                | 441 +++--------------------
 outputs/json/__init__.py                 |  71 ++++
 GlobalRRD.py => outputs/rrd/GlobalRRD.py |   6 +-
 NodeRRD.py => outputs/rrd/NodeRRD.py     |  14 +-
 RRD.py => outputs/rrd/RRD.py             |   0
 outputs/rrd/__init__.py                  |  31 ++
 rrd.py                                   |  80 ----
 15 files changed, 354 insertions(+), 708 deletions(-)
 delete mode 100755 alfred.py
 delete mode 100755 batman.py
 delete mode 100644 hostid.py
 create mode 100644 inputs/alfred/__init__.py
 create mode 100644 inputs/batadv/__init__.py
 delete mode 100644 json_encoder.py
 delete mode 100644 link.py
 create mode 100644 outputs/json/__init__.py
 rename GlobalRRD.py => outputs/rrd/GlobalRRD.py (89%)
 rename NodeRRD.py => outputs/rrd/NodeRRD.py (85%)
 rename RRD.py => outputs/rrd/RRD.py (100%)
 create mode 100644 outputs/rrd/__init__.py
 delete mode 100755 rrd.py

diff --git a/alfred.py b/alfred.py
deleted file mode 100755
index 06ee1f7..0000000
--- a/alfred.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python3
-import subprocess
-import json
-
-class alfred:
-  def __init__(self,request_data_type = 158):
-    self.request_data_type = request_data_type
-
-  def aliases(self):
-    output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json"])
-    alfred_data = json.loads(output.decode("utf-8"))
-    alias = {}
-    for mac,node in alfred_data.items():
-      node_alias = {}
-      for key in node:
-        node_alias[key] = node[key]
-
-      try:
-        node_alias['geo'] = [node['location']['latitude'], node['location']['longitude']]
-      except (TypeError, KeyError):
-        pass
-
-      try:
-        node_alias['id'] = node['network']['mac']
-      except KeyError:
-        pass
-
-      if 'hostname' in node:
-        node_alias['name'] = node['hostname']
-      if len(node_alias):
-        alias[mac] = node_alias
-    return alias
-
-if __name__ == "__main__":
-  ad = alfred()
-  al = ad.aliases()
-  print(al)
diff --git a/batman.py b/batman.py
deleted file mode 100755
index c9b3db6..0000000
--- a/batman.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python3
-import subprocess
-import json
-import re
-
-class batman:
-  """ Bindings for B.A.T.M.A.N. advanced batctl tool
-  """
-  def __init__(self, mesh_interface = "bat0"):
-    self.mesh_interface = mesh_interface
-
-  def vis_data(self,batadv_vis=False):
-    vds = self.vis_data_batctl_legacy()
-    if batadv_vis:
-        vds += self.vis_data_batadv_vis()
-    return vds
-
-  def vis_data_helper(self,lines):
-    vd = []
-    for line in lines:
-      try:
-        utf8_line = line.decode("utf-8")
-        vd.append(json.loads(utf8_line))
-      except e:
-        pass
-    return vd
-
-  def vis_data_batctl_legacy(self):
-    """ Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
-    """
-    output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"])
-    lines = output.splitlines()
-    vds = self.vis_data_helper(lines)
-    for vd in vds:
-      vd['legacy'] = True
-    return vds
-
-  def vis_data_batadv_vis(self):
-    """ Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
-    """
-    output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"])
-    lines = output.splitlines()
-    return self.vis_data_helper(lines)
-
-  def gateway_list(self):
-    """ Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries.
-    """
-    output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"])
-    output_utf8 = output.decode("utf-8")
-    # TODO Parse information
-    lines = output_utf8.splitlines()
-    own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",lines[0]).group(1)
-    # Remove header line
-    del lines[0]
-    # Fill gateway list
-    gw = []
-    gw_mode = self.gateway_mode()
-    if gw_mode['mode'] == 'server':
-      gw.append({'mac': own_mac, 'bandwidth': gw_mode['bandwidth']})
-    for line in lines:
-      gw_line = line.split()
-      if (gw_line[0] == 'No'):
-        continue
-      # When in client gateway mode maybe gw_line[0] is not the right.
-      gw.append({'mac':gw_line[0], 'bandwidth': gw_line[-1]})
-    return gw
-
-  def gateway_mode(self):
-    """ Parse "batctl -m <mesh_interface> gw"
-    """
-    output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"])
-    elements = output.decode("utf-8").split()
-    mode = elements[0]
-    if mode == "server":
-        return {'mode': 'server', 'bandwidth': elements[3]}
-    else:
-        return {'mode': mode}
-
-if __name__ == "__main__":
-  bc = batman()
-  vd = bc.vis_data()
-  gw = bc.gateway_list()
-  for x in vd:
-    print(x)
-  print(gw)
-  print(bc.gateway_mode())
diff --git a/hostid.py b/hostid.py
deleted file mode 100644
index 2b4038e..0000000
--- a/hostid.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import re
-from functools import reduce
-
-def mac_to_hostid(mac):
-  int_mac = list(map(lambda x: int(x, 16), mac.split(":")))
-  int_mac[0] ^= 2
-  bytes = map(lambda x: "%02x" % x, int_mac[0:3] + [0xff, 0xfe] + int_mac[3:])
-  return reduce(lambda a, i:
-                  [a[0] + ("" if i == 0 else ":") + a[1] + a[2]] + a[3:],
-                range(0, 4),
-                [""] + list(bytes)
-               )
-
diff --git a/inputs/alfred/__init__.py b/inputs/alfred/__init__.py
new file mode 100644
index 0000000..6c0f66e
--- /dev/null
+++ b/inputs/alfred/__init__.py
@@ -0,0 +1,18 @@
+import subprocess
+import json
+
+class Input:
+    def __init__(self,request_data_type = 158):
+        self.request_data_type = request_data_type
+
+    def get_data(self, nodedb):
+        """Add data from alfred to the supplied nodedb"""
+        output = subprocess.check_output([
+            "alfred-json",
+            "-r", str(self.request_data_type),
+            "-f", "json",
+        ])
+        alfred_data = json.loads(output.decode("utf-8"))
+
+        for mac, node in alfred_data.items():
+            nodedb.add_or_update([mac], node)
diff --git a/inputs/batadv/__init__.py b/inputs/batadv/__init__.py
new file mode 100644
index 0000000..576b09a
--- /dev/null
+++ b/inputs/batadv/__init__.py
@@ -0,0 +1,100 @@
+import subprocess
+import json
+
+class Input:
+    """Fill the NodeDB with links from batadv-vis.
+
+    The links are added as lists containing the neighboring nodes, not
+    only their identifiers!  Mind this when exporting the database, as
+    it probably leads to recursion.
+    """
+    def __init__(self, mesh_interface="bat0"):
+        self.mesh_interface = mesh_interface
+
+    @staticmethod
+    def _is_similar_mac(a, b):
+        """Determine if two MAC addresses are similar."""
+        if a == b:
+            return True
+
+        # Split the address into bytes
+        try:
+            mac_a = list(int(i, 16) for i in a.split(":"))
+            mac_b = list(int(i, 16) for i in b.split(":"))
+        except ValueError:
+            return False
+
+        # Second and third byte musn't differ
+        if mac_a[1] != mac_b[1] or mac_a[2] != mac_b[2]:
+            return False
+
+        # First byte must only differ in bit 2
+        if mac_a[0] | 2 != mac_b[0] | 2:
+            return False
+
+        # Count differing bytes after the third
+        c = [x for x in zip(mac_a[3:], mac_b[3:]) if x[0] != x[1]]
+
+        # No more than two additional bytes must differ
+        if len(c) > 2:
+            return False
+
+        # If no more bytes differ, they are very similar
+        if len(c) == 0:
+            return True
+
+        # If the sum of absolute differences isn't greater than 2, they
+        # are pretty similar
+        delta = sum(abs(i[0] - i[1]) for i in c)
+        return delta < 2
+
+    def get_data(self, nodedb):
+        """Add data from batadv-vis to the supplied nodedb"""
+        output = subprocess.check_output([
+            "batadv-vis",
+            "-i", str(self.mesh_interface),
+            "-f", "jsondoc",
+        ])
+        data = json.loads(output.decode("utf-8"))
+
+        # First pass
+        for node in data["vis"]:
+            # Determine possible other MAC addresses of this node by
+            # comparing all its client's MAC addresses to its primary
+            # MAC address.  If they are similar, it probably is another
+            # address of the node itself!  If it isn't, it is a real
+            # client.
+            node['aliases'] = [node["primary"]]
+            if 'secondary' in node:
+                node['aliases'].extend(node['secondary'])
+            real_clients = []
+            for mac in node["clients"]:
+                if self._is_similar_mac(mac, node["primary"]):
+                    node['aliases'].append(mac)
+                else:
+                    real_clients.append(mac)
+            node['clients'] = real_clients
+
+            # Add nodes and aliases without any information at first.
+            # This way, we can later link the objects themselves.
+            nodedb.add_or_update(node['aliases'])
+
+        # Second pass
+        for node in data["vis"]:
+            # We only need the primary address now, all aliases are
+            # already present in the database.  Furthermore, we can be
+            # sure that all neighbors are in the database as well.  If
+            # a neighbor isn't added already, we simply ignore it.
+            nodedb.add_or_update(
+                [node["primary"]],
+                {
+                    "clients": node["clients"],
+                    "neighbors": [
+                        {
+                            "metric": neighbor['metric'],
+                            "neighbor": nodedb[neighbor['neighbor']],
+                        } for neighbor in node["neighbors"]
+                          if neighbor['neighbor'] in nodedb
+                    ]
+                }
+            )
diff --git a/json_encoder.py b/json_encoder.py
deleted file mode 100644
index 8d62771..0000000
--- a/json_encoder.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from json import JSONEncoder
-
-class CustomJSONEncoder(JSONEncoder):
-  """
-  JSON encoder that uses an object's __json__() method to convert it to
-  something JSON-compatible.
-  """
-  def default(self, obj):
-    try:
-      return obj.__json__()
-    except AttributeError:
-      pass
-    return super().default(obj)
diff --git a/link.py b/link.py
deleted file mode 100644
index b161608..0000000
--- a/link.py
+++ /dev/null
@@ -1,24 +0,0 @@
-class Link():
-  def __init__(self):
-    self.id = None
-    self.source = LinkConnector()
-    self.target = LinkConnector()
-    self.quality = None
-    self.type = None
-
-  def export(self):
-    return {
-      'source': self.source.id,
-      'target': self.target.id,
-      'quality': self.quality,
-      'type': self.type,
-      'id': self.id
-    }
-
-class LinkConnector():
-  def __init__(self):
-    self.id = None
-    self.interface = None
-
-  def __repr__(self):
-    return "LinkConnector(%d, %s)" % (self.id, self.interface)
diff --git a/node.py b/node.py
index 83531b2..5fa58f6 100644
--- a/node.py
+++ b/node.py
@@ -1,70 +1,70 @@
 from collections import defaultdict
 
 class NoneDict:
-  """
-  A NoneDict acts like None but returns a NoneDict for every item in it.
+    """Act like None but return a NoneDict for every item request.
 
-  This is similar to the behaviour of collections.defaultdict in that even
-  previously inexistent keys can be accessed, but there is nothing stored
-  permanently.
-  """
-  __repr__ = lambda self: 'NoneDict()'
-  __bool__ = lambda self: False
-  __getitem__ = lambda self, k: NoneDict()
-  __json__ = lambda self: None
-  __float__ = lambda self: float('NaN')
-  def __setitem__(self, key, value):
-    raise RuntimeError("NoneDict is readonly")
-
-class casualdict(defaultdict):
-  """
-  This special defaultdict returns a NoneDict for inexistent items. Also, its
-  items can be accessed as attributed as well.
-  """
-  def __init__(self):
-    super().__init__(NoneDict)
-  __getattr__ = defaultdict.__getitem__
-  __setattr__ = defaultdict.__setitem__
-
-class Node(casualdict):
-  def __init__(self):
-    self.name = ""
-    self.id = ""
-    self.macs = set()
-    self.interfaces = dict()
-    self.flags = dict({
-      "online": False,
-      "gateway": False,
-      "client": False
-    })
-    super().__init__()
-
-  def add_mac(self, mac):
-    mac = mac.lower()
-    if len(self.macs) == 0:
-      self.id = mac
-
-    self.macs.add(mac)
-
-    self.interfaces[mac] = Interface()
-
-  def __repr__(self):
-    return self.macs.__repr__()
-
-  def export(self):
+    This is similar to the behaviour of collections.defaultdict in that
+    even previously inexistent keys can be accessed, but nothing is
+    stored permanently in this class.
     """
-    Return a dict that contains all attributes of the Node that are supposed to
-    be exported to other applications.
-    """
-    return {
-      "name": self.name,
-      "id": self.id,
-      "macs": list(self.macs),
-      "geo": self.geo,
-      "firmware": self.software['firmware']['release'],
-      "flags": self.flags
-    }
+    __repr__ = lambda self: 'NoneDict()'
+    __bool__ = lambda self: False
+    __getitem__ = lambda self, k: NoneDict()
+    __json__ = lambda self: None
+    __float__ = lambda self: float('NaN')
+    def __setitem__(self, key, value):
+        raise RuntimeError("NoneDict is readonly")
 
-class Interface():
-  def __init__(self):
-    self.vpn = False
+class Node(defaultdict):
+    _id = None
+    def __init__(self, id_=None):
+        self._id = id_
+        super().__init__(NoneDict)
+
+    def __repr__(self):
+        return "Node(%s)" % self.id
+
+    @property
+    def id(self):
+        return self._id
+
+    def __hash__(self):
+        """Generate hash from the node's id.
+
+        WARNING: Obviously this hash doesn't cover all of the node's
+        data, but we need nodes to be hashable in order to eliminate
+        duplicates in the NodeDB.
+
+        At least the id cannot change after initialization...
+        """
+        return hash(self.id)
+
+    @property
+    def vpn_neighbors(self):
+        try:
+            vpn_neighbors = []
+            for neighbor in self['neighbors']:
+                if neighbor['neighbor']['vpn']:
+                    vpn_neighbors.append(neighbor)
+            return vpn_neighbors
+        except TypeError:
+            return []
+
+    def export(self):
+        """Generate a serializable dict of the node.
+
+        In particular, this replaces any references to other nodes by
+        their id to prevent circular references.
+        """
+        ret = dict(self)
+        if "neighbors" in self:
+            ret["neighbors"] = []
+            for neighbor in self["neighbors"]:
+                new_neighbor = {}
+                for key, val in neighbor.items():
+                    if isinstance(val, Node):
+                        new_neighbor[key] = val.id
+                    else:
+                        new_neighbor[key] = val
+                ret["neighbors"].append(new_neighbor)
+        return ret
diff --git a/nodedb.py b/nodedb.py
index e5ff30e..a056184 100644
--- a/nodedb.py
+++ b/nodedb.py
@@ -1,381 +1,60 @@
-from functools import reduce
-from collections import defaultdict
-from node import Node, Interface
-from link import Link, LinkConnector
-
-class NodeDB:
-  def __init__(self):
-    self._nodes = []
-    self._links = []
-
-  # fetch list of links
-  def get_links(self):
-    self.update_vpn_links()
-    return self.reduce_links()
-
-  # fetch list of nodes
-  def get_nodes(self):
-    return self._nodes
-
-  def export(self):
-    return {
-      'nodes': [node.export() for node in self.get_nodes()],
-      'links': [link.export() for link in self.get_links()],
-    }
-
-  def maybe_node_by_fuzzy_mac(self, mac):
-    mac_a = mac.lower()
-
-    for node in self._nodes:
-      for mac_b in node.macs:
-        if is_derived_mac(mac_a, mac_b):
-          return node
-
-    raise KeyError
-
-  def maybe_node_by_mac(self, macs):
-    for node in self._nodes:
-      for mac in macs:
-        if mac.lower() in node.macs:
-          return node
-
-    raise KeyError
-
-  def maybe_node_by_id(self, mac):
-    for node in self._nodes:
-      if mac.lower() == node.id:
-        return node
-
-    raise KeyError
-
-  def parse_vis_data(self,vis_data):
-    for x in vis_data:
-
-      if 'of' in x:
-        try:
-          node = self.maybe_node_by_mac((x['of'], x['secondary']))
-        except:
-          node = Node()
-          node.flags['online'] = True
-          if 'legacy' in x:
-            node.flags['legacy'] = True
-          self._nodes.append(node)
-
-        node.add_mac(x['of'])
-        node.add_mac(x['secondary'])
-
-    for x in vis_data:
-
-      if 'router' in x:
-        try:
-          node = self.maybe_node_by_mac((x['router'], ))
-        except:
-          node = Node()
-          node.flags['online'] = True
-          if 'legacy' in x:
-            node.flags['legacy'] = True
-          node.add_mac(x['router'])
-          self._nodes.append(node)
-
-        # If it's a TT link and the MAC is very similar
-        # consider this MAC as one of the routers
-        # MACs
-        if 'gateway' in x and x['label'] == "TT":
-          if is_similar(x['router'], x['gateway']):
-            node.add_mac(x['gateway'])
-
-            # skip processing as regular link
-            continue
-
-        try:
-          if 'neighbor' in x:
-            try:
-              node = self.maybe_node_by_mac((x['neighbor']))
-            except:
-              continue
-
-          if 'gateway' in x:
-            x['neighbor'] = x['gateway']
-
-          node = self.maybe_node_by_mac((x['neighbor'], ))
-        except:
-          node = Node()
-          node.flags['online'] = True
-          if x['label'] == 'TT':
-            node.flags['client'] = True
-
-          node.add_mac(x['neighbor'])
-          self._nodes.append(node)
-
-    for x in vis_data:
-
-      if 'router' in x:
-        try:
-          if 'gateway' in x:
-            x['neighbor'] = x['gateway']
-
-          router = self.maybe_node_by_mac((x['router'], ))
-          neighbor = self.maybe_node_by_mac((x['neighbor'], ))
-        except:
-          continue
-
-        # filter TT links merged in previous step
-        if router == neighbor:
-          continue
-
-        link = Link()
-        link.source = LinkConnector()
-        link.source.interface = x['router']
-        link.source.id = self._nodes.index(router)
-        link.target = LinkConnector()
-        link.target.interface = x['neighbor']
-        link.target.id = self._nodes.index(neighbor)
-        link.quality = x['label']
-        link.id = "-".join(sorted((link.source.interface, link.target.interface)))
-
-        if x['label'] == "TT":
-          link.type = "client"
-
-        self._links.append(link)
-
-    for x in vis_data:
-
-      if 'primary' in x:
-        try:
-          node = self.maybe_node_by_mac((x['primary'], ))
-        except:
-          continue
-
-        node.id = x['primary']
-
-  def reduce_links(self):
-    tmp_links = defaultdict(list)
-
-    for link in self._links:
-      tmp_links[link.id].append(link)
-
-    links = []
-
-    def reduce_link(a, b):
-      a.id = b.id
-      a.source = b.source
-      a.target = b.target
-      a.type = b.type
-      a.quality = ", ".join([x for x in (a.quality, b.quality) if x])
-
-      return a
-
-    for k, v in tmp_links.items():
-      new_link = reduce(reduce_link, v, Link())
-      links.append(new_link)
-
-    return links
-
-  def import_aliases(self, aliases):
-    for mac, alias in aliases.items():
-      try:
-        node = self.maybe_node_by_mac([mac])
-      except:
-        try:
-          node = self.maybe_node_by_fuzzy_mac(mac)
-        except:
-          # create an offline node
-          node = Node()
-          node.add_mac(mac)
-          self._nodes.append(node)
-
-      for key in alias:
-        node[key] = alias[key]
-
-      if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces:
-        node.interfaces[mac].vpn = True
-
-  # list of macs
-  # if options['gateway']:
-  #   mark_gateways(options['gateway'])
-  def mark_gateways(self, gateways):
-    for gateway in gateways:
-      try:
-        node = self.maybe_node_by_mac((gateway, ))
-      except:
-        print("WARNING: did not find gateway '",gateway,"' in node list")
-        continue
-
-      node.flags['gateway'] = True
-
-  def update_vpn_links(self):
-    changes = 1
-    while changes > 0:
-      changes = 0
-      for link in self._links:
-        if link.type == "client":
-          continue
-
-        source_interface = self._nodes[link.source.id].interfaces[link.source.interface]
-        target_interface = self._nodes[link.target.id].interfaces[link.target.interface]
-        if source_interface.vpn or target_interface.vpn:
-          source_interface.vpn = True
-          target_interface.vpn = True
-          if link.type != "vpn":
-            changes += 1
-
-          link.type = "vpn"
-
-  def count_clients(self):
-    for link in self._links:
-      try:
-        a = self.maybe_node_by_id(link.source.interface)
-        b = self.maybe_node_by_id(link.target.interface)
-
-        if a.flags['client']:
-          client = a
-          node = b
-        elif b.flags['client']:
-          client = b
-          node = a
-        else:
-          continue
-
-        node.clientcount += 1
-      except:
-        pass
-
-  def obscure_clients(self):
-
-    globalIdCounter = 0
-    nodeCounters = {}
-    clientIds = {}
-
-    for node in self._nodes:
-      if node.flags['client']:
-        node.macs = set()
-        clientIds[node.id] = None
-
-    for link in self._links:
-      ids = link.source.interface
-      idt = link.target.interface
-
-      try:
-        node_source = self.maybe_node_by_fuzzy_mac(ids)
-        node_target = self.maybe_node_by_id(idt)
-
-        if not node_source.flags['client'] and not node_target.flags['client']:
-          # if none of the nodes associated with this link are clients,
-          # we do not want to obscure
-          continue
-
-        if ids in clientIds and idt in clientIds:
-          # This is for corner cases, when a client
-          # is linked to another client.
-          clientIds[ids] = str(globalIdCounter)
-          ids = str(globalIdCounter)
-          globalIdCounter += 1
-
-          clientIds[idt] = str(globalIdCounter)
-          idt = str(globalIdCounter)
-          globalIdCounter += 1
-
-        elif ids in clientIds:
-          newId = generateId(idt)
-          clientIds[ids] = newId
-          ids = newId
-
-          link.source.interface = ids;
-          node_source.id = ids;
-
-        elif idt in clientIds:
-          newId = generateId(ids,nodeCounters)
-          clientIds[idt] = newId
-          idt = newId
-
-          link.target.interface = idt;
-          node_target.id = idt;
-
-        link.id = ids + "-" + idt
-
-      except KeyError:
-        pass
-
-# extends node id by incremented node counter
-def generateId(nodeId,nodeCounters):
-  if nodeId in nodeCounters:
-    n = nodeCounters[nodeId]
-    nodeCounters[nodeId] = n + 1
-  else:
-    nodeCounters[nodeId] = 1
-    n = 0
-
-  return nodeId + "_" + str(n)
-
-# compares two MACs and decides whether they are
-# similar and could be from the same node
-def is_similar(a, b):
-  if a == b:
-    return True
-
-  try:
-    mac_a = list(int(i, 16) for i in a.split(":"))
-    mac_b = list(int(i, 16) for i in b.split(":"))
-  except ValueError:
-    return False
-
-  # first byte must only differ in bit 2
-  if mac_a[0] | 2 == mac_b[0] | 2:
-    # count different bytes
-    c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]]
-  else:
-    return False
-
-  # no more than two additional bytes must differ
-  if len(c) <= 2:
-    delta = 0
-
-  if len(c) > 0:
-    delta = sum(abs(i[0] -i[1]) for i in c)
-
-  # These addresses look pretty similar!
-  return delta < 8
-
-def is_derived_mac(a, b):
-  if a == b:
-    return True
-
-  try:
-    mac_a = list(int(i, 16) for i in a.split(":"))
-    mac_b = list(int(i, 16) for i in b.split(":"))
-  except ValueError:
-    return False
-
-  if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]:
-    return False
-
-  x = list(mac_a)
-  x[5] += 1
-  x[5] %= 255
-  if mac_b == x:
-    return True
-
-  x[0] |= 2
-  if mac_b == x:
-    return True
-
-  x[3] += 1
-  x[3] %= 255
-  if mac_b == x:
-    return True
-
-  x = list(mac_a)
-  x[0] |= 2
-  x[5] += 2
-  x[5] %= 255
-  if mac_b == x:
-    return True
-
-  x = list(mac_a)
-  x[0] |= 2
-  x[3] += 1
-  x[3] %= 255
-  if mac_b == x:
-    return True
-
-  return False
+from node import Node
+
+class AmbiguityException(Exception):
+    """Indicate the ambiguity of identifiers.
+
+    This exception is raised if there is more than one match for a set
+    of identifiers.
+
+    Attributes:
+    identifiers -- set of ambiguous identifiers
+    """
+
+    identifiers = []
+
+    def __init__(self, identifiers):
+        self.identifiers = identifiers
+
+    def __str__(self):
+        return "Ambiguous identifiers: %s" % ", ".join(self.identifiers)
+
+class NodeDB(dict):
+    def add_or_update(self, ids, other=None):
+        """Add or update a node in the database.
+
+        Searches for an already existing node and updates it, or adds a new
+        one if no existing one is found.  Raises an AmbiguityException if
+        more than one different nodes are found matching the criteria.
+
+        Arguments:
+        ids -- list of possible identifiers (probably MAC addresses) of the
+               node
+        other -- dict of values to update in an existing node or add to
+                 the new one.  Defaults to None, in which case no values
+                 are added or updated, only the aliases of the
+                 (possibly freshly created) node are updated.
+        """
+
+        # Find existing node, if any
+        node = None
+        node_id = None
+        for id_ in ids:
+            if id_ == node_id:
+                continue
+            if id_ in self:
+                if node is not None:
+                    raise AmbiguityException([node_id, id_])
+                node = self[id_]
+                node_id = id_
+
+        # If no node was found, create a new one
+        if node is None:
+            node = Node(ids[0])
+
+        # Update the node with the given properties using its own update method.
+        if other is not None:
+            node.update(other)
+
+        # Add new aliases if any
+        for id_ in ids:
+            self[id_] = node
diff --git a/outputs/json/__init__.py b/outputs/json/__init__.py
new file mode 100644
index 0000000..f005c38
--- /dev/null
+++ b/outputs/json/__init__.py
@@ -0,0 +1,71 @@
+import json
+
+__all__ = ["Exporter"]
+
+class CustomJSONEncoder(json.JSONEncoder):
+    """
+    JSON encoder that uses an object's __json__() method to convert it to
+    something JSON-compatible.
+    """
+    def default(self, obj):
+        try:
+            return obj.__json__()
+        except AttributeError:
+            pass
+        return super().default(obj)
+
+class Exporter:
+    def __init__(self, filepath="nodes.json"):
+        self.filepath = filepath
+
+    @staticmethod
+    def generate(nodedb):
+        indexes = {}
+        nodes = []
+        count = 0
+        for node in set(nodedb.values()):
+            nodes.append(node.export())
+            indexes[node.id] = count
+            count += 1
+
+        links = []
+        for node in set(nodedb.values()):
+            if "neighbors" in node:
+                links.extend(
+                    {
+                        "source": indexes[node.id],
+                        "target": indexes[neighbor["neighbor"].id],
+                        "quality": neighbor["metric"],
+                        "type": "vpn" if neighbor["neighbor"]["vpn"] else None,
+                        "id": "-".join((node.id, neighbor["neighbor"].id)),
+                    } for neighbor in node["neighbors"]
+                )
+            if "clients" in node:
+                for client in node["clients"]:
+                    if not client in indexes:
+                        nodes.append({
+                            "id": client,
+                        })
+                        indexes[client] = count
+                        count += 1
+
+                    links.append({
+                        "source": indexes[node.id],
+                        "target": indexes[client],
+                        "quality": "TT",
+                        "type": "client",
+                        "id": "-".join((node.id, client)),
+                    })
+
+        return {
+            "nodes": nodes,
+            "links": links,
+        }
+
+    def export(self, nodedb):
+        with open(self.filepath, "w") as nodes_json:
+            json.dump(
+                self.generate(nodedb),
+                nodes_json,
+                cls=CustomJSONEncoder
+            )
diff --git a/GlobalRRD.py b/outputs/rrd/GlobalRRD.py
similarity index 89%
rename from GlobalRRD.py
rename to outputs/rrd/GlobalRRD.py
index f3f3960..b114418 100644
--- a/GlobalRRD.py
+++ b/outputs/rrd/GlobalRRD.py
@@ -1,6 +1,6 @@
 import os
 import subprocess
-from RRD import RRD, DS, RRA
+from .RRD import RRD, DS, RRA
 
 class GlobalRRD(RRD):
     ds_list = [
@@ -15,8 +15,8 @@ class GlobalRRD(RRD):
         RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day    samples
     ]
 
-    def __init__(self, directory):
-        super().__init__(os.path.join(directory, "nodes.rrd"))
+    def __init__(self, filepath):
+        super().__init__(filepath)
         self.ensureSanity(self.ds_list, self.rra_list, step=60)
 
     def update(self, nodeCount, clientCount):
diff --git a/NodeRRD.py b/outputs/rrd/NodeRRD.py
similarity index 85%
rename from NodeRRD.py
rename to outputs/rrd/NodeRRD.py
index 0118234..fc8aef1 100644
--- a/NodeRRD.py
+++ b/outputs/rrd/NodeRRD.py
@@ -1,7 +1,7 @@
 import os
 import subprocess
 from node import Node
-from RRD import RRD, DS, RRA
+from .RRD import RRD, DS, RRA
 
 class NodeRRD(RRD):
     ds_list = [
@@ -45,18 +45,18 @@ class NodeRRD(RRD):
     def update(self):
         values = {
             'upstate': 1,
-            'clients': float(self.node.clients),
-            'neighbors': float(self.node.neighbors),
-            'vpn_neighbors': float(self.node.vpn_neighbors),
-            'loadavg': float(self.node.statistics['loadavg']),
+            'clients': float(len(self.node.get('clients', []))),
+            'neighbors': float(len(self.node.get('neighbors', []))),
+            'vpn_neighbors': float(len(self.node.vpn_neighbors)),
+            'loadavg': float(self.node['statistics']['loadavg']),
         }
         for item in ('rx', 'tx', 'mgmt_rx', 'mgmt_tx', 'forward'):
             try:
-                values['%s_bytes' % item] = int(self.node.statistics['traffic'][item]['bytes'])
+                values[item + '_bytes'] = int(self.node['statistics']['traffic'][item]['bytes'])
             except TypeError:
                 pass
             try:
-                values['%s_packets' % item] = int(self.node.statistics['traffic'][item]['packets'])
+                values[item + '_packets'] = int(self.node['statistics']['traffic'][item]['packets'])
             except TypeError:
                 pass
         super().update(values)
diff --git a/RRD.py b/outputs/rrd/RRD.py
similarity index 100%
rename from RRD.py
rename to outputs/rrd/RRD.py
diff --git a/outputs/rrd/__init__.py b/outputs/rrd/__init__.py
new file mode 100644
index 0000000..5e9fbc1
--- /dev/null
+++ b/outputs/rrd/__init__.py
@@ -0,0 +1,31 @@
+import os
+from .NodeRRD import NodeRRD
+from .GlobalRRD import GlobalRRD
+
+class Exporter:
+    def __init__(self, directory="nodedb"):
+        self.directory = directory
+        try:
+            os.mkdir(self.directory)
+        except OSError:
+            pass
+
+    def export(self, nodedb):
+        nodes = set(nodedb.values())
+        clients = 0
+        nodecount = 0
+        for node in nodes:
+            clients += len(node.get("clients", []))
+            nodecount += 1
+            NodeRRD(
+                os.path.join(
+                    self.directory,
+                    str(node.id).replace(':', '') + '.rrd'
+                ),
+                node
+            ).update()
+
+        GlobalRRD(os.path.join(self.directory, "nodes.rrd")).update(
+            nodecount,
+            clients
+        )
diff --git a/rrd.py b/rrd.py
deleted file mode 100755
index dad78c5..0000000
--- a/rrd.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-import subprocess
-import time
-import os
-from GlobalRRD import GlobalRRD
-from NodeRRD import NodeRRD
-
-class rrd:
-  def __init__( self
-              , databaseDirectory
-              , imagePath
-              , displayTimeGlobal = "7d"
-              , displayTimeNode = "1d"
-              ):
-    self.dbPath = databaseDirectory
-    self.globalDb = GlobalRRD(self.dbPath)
-    self.imagePath = imagePath
-    self.displayTimeGlobal = displayTimeGlobal
-    self.displayTimeNode = displayTimeNode
-
-    self.currentTimeInt = (int(time.time())/60)*60
-    self.currentTime    = str(self.currentTimeInt)
-
-    try:
-      os.stat(self.imagePath)
-    except:
-      os.mkdir(self.imagePath)
-
-  def update_database(self,db):
-    nodes = {}
-    clientCount = 0
-    for node in db.get_nodes():
-      if node.flags['online']:
-        if not node.flags['client']:
-          nodes[node.id] = node
-          node.clients = 0
-          node.neighbors = 0
-          node.vpn_neighbors = 0
-          if 'legacy' in node.flags and node.flags['legacy']:
-            clientCount -= 1
-        else:
-          clientCount += 1
-    for link in db.get_links():
-      source = link.source.interface
-      target = link.target.interface
-      if source in nodes and not target in nodes:
-        nodes[source].clients += 1
-      elif target in nodes and not source in nodes:
-        nodes[target].clients += 1
-      elif source in nodes and target in nodes:
-        nodes[source].neighbors += 1
-        nodes[target].neighbors += 1
-        if link.type == 'vpn':
-          nodes[target].vpn_neighbors += 1
-          nodes[source].vpn_neighbors += 1
-
-    self.globalDb.update(len(nodes), clientCount)
-    for node in nodes.values():
-      rrd = NodeRRD(
-        os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'),
-        node
-      )
-      rrd.update()
-
-  def update_images(self):
-    """ Creates an image for every rrd file in the database directory.
-    """
-
-    self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal)
-
-    nodeDbFiles = os.listdir(self.dbPath)
-
-    for fileName in nodeDbFiles:
-      if not os.path.isfile(os.path.join(self.dbPath, fileName)):
-        continue
-
-      nodeName = os.path.basename(fileName).split('.')
-      if nodeName[1] == 'rrd' and not nodeName[0] == "nodes":
-        rrd = NodeRRD(os.path.join(self.dbPath, fileName))
-        rrd.graph(self.imagePath, self.displayTimeNode)

From e54e7467fc3c7cab4189498450f8c672348c130f Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Tue, 8 Jul 2014 14:20:47 +0200
Subject: [PATCH 08/15] Create package ffmap, add wiki input, remove old code

---
 .gitignore                                    |  2 -
 README.md                                     |  8 +-
 aliases.json_sample                           |  9 --
 bat2nodes.py                                  | 88 ------------------
 ffhlwiki.py                                   | 93 -------------------
 ffmap/__init__.py                             | 42 +++++++++
 nodedb/.gitkeep => ffmap/inputs/__init__.py   |  0
 .../__init__.py => ffmap/inputs/alfred.py     |  0
 .../__init__.py => ffmap/inputs/batadv.py     |  0
 ffmap/inputs/wiki.py                          | 71 ++++++++++++++
 node.py => ffmap/node.py                      | 29 +++++-
 nodedb.py => ffmap/nodedb.py                  |  8 +-
 ffmap/outputs/__init__.py                     |  1 +
 .../__init__.py => ffmap/outputs/d3json.py    | 14 ++-
 .../rrd/__init__.py => ffmap/outputs/rrd.py   |  7 +-
 outputs/rrd/RRD.py => ffmap/rrd/__init__.py   |  0
 outputs/rrd/NodeRRD.py => ffmap/rrd/rrds.py   | 36 ++++++-
 ffmap/run.py                                  | 69 ++++++++++++++
 mkmap.sh                                      | 32 -------
 outputs/rrd/GlobalRRD.py                      | 35 -------
 setup.py                                      | 10 ++
 21 files changed, 272 insertions(+), 282 deletions(-)
 delete mode 100644 aliases.json_sample
 delete mode 100755 bat2nodes.py
 delete mode 100755 ffhlwiki.py
 create mode 100644 ffmap/__init__.py
 rename nodedb/.gitkeep => ffmap/inputs/__init__.py (100%)
 rename inputs/alfred/__init__.py => ffmap/inputs/alfred.py (100%)
 rename inputs/batadv/__init__.py => ffmap/inputs/batadv.py (100%)
 create mode 100755 ffmap/inputs/wiki.py
 rename node.py => ffmap/node.py (74%)
 rename nodedb.py => ffmap/nodedb.py (92%)
 create mode 100644 ffmap/outputs/__init__.py
 rename outputs/json/__init__.py => ffmap/outputs/d3json.py (87%)
 rename outputs/rrd/__init__.py => ffmap/outputs/rrd.py (86%)
 rename outputs/rrd/RRD.py => ffmap/rrd/__init__.py (100%)
 rename outputs/rrd/NodeRRD.py => ffmap/rrd/rrds.py (72%)
 create mode 100644 ffmap/run.py
 delete mode 100755 mkmap.sh
 delete mode 100644 outputs/rrd/GlobalRRD.py
 create mode 100644 setup.py

diff --git a/.gitignore b/.gitignore
index 0f42dec..0d20b64 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1 @@
 *.pyc
-aliases.json
-nodedb/
diff --git a/README.md b/README.md
index fc718fe..8e4abd8 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
 # Data for Freifunk Map, Graph and Node List
 
-ffmap-backend gathers information on the batman network by invoking 
+ffmap-backend gathers information on the batman network by invoking
    batctl
 and
    batadv-vis
@@ -41,13 +41,13 @@ Alias /map /home/ffmap/www/
 Alias /firmware /home/freifunk/autoupdates/
 </pre>
 
-To execute, run 
- ./mkmap.sh ../www
+To execute, run
+ python3 -mffmap.run --input-alfred --input-badadv --output-d3json ../www/nodes.json
 The script expects above described sudo-wrappers in the $HOME directory of the user executing
 the script. If those are not available, an error will occurr if not executed as root. Also,
 the tool realpath optionally allows to execute the script from anywhere in the directory tree.
 
 For the script's regular execution add the following to the crontab:
 <pre>
-*/5 * * * * /home/ffmap/ffmap-backend/mkmap.sh /home/ffmap/www
+*/5 * * * * python3 -mffmap.run --input-alfred --input-badadv --output-d3json /home/ffmap/www/nodes.json
 </pre>
diff --git a/aliases.json_sample b/aliases.json_sample
deleted file mode 100644
index 1f3ca33..0000000
--- a/aliases.json_sample
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-   "b0:48:7a:e7:d3:64" : {
-      "name" : "Meute-AP"
-   },
-   "8e:3d:c2:10:10:28" : {
-      "name" : "holstentor",
-      "vpn" : true
-   }
-}
diff --git a/bat2nodes.py b/bat2nodes.py
deleted file mode 100755
index e1fde6e..0000000
--- a/bat2nodes.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python3
-
-import json
-import fileinput
-import argparse
-import os
-import datetime
-
-from batman import batman
-from alfred import alfred
-from rrd import rrd
-from nodedb import NodeDB
-from json_encoder import CustomJSONEncoder
-
-# Force encoding to UTF-8
-import locale                                  # Ensures that subsequent open()s
-locale.getpreferredencoding = lambda _=None: 'UTF-8'  # are UTF-8 encoded.
-
-import sys
-#sys.stdin = open('/dev/stdin', 'r')
-#sys.stdout = open('/dev/stdout', 'w')
-#sys.stderr = open('/dev/stderr', 'w')
-
-parser = argparse.ArgumentParser()
-
-parser.add_argument('-a', '--aliases',
-                  help='read aliases from FILE',
-                  action='append',
-                  metavar='FILE')
-
-parser.add_argument('-m', '--mesh', action='append',
-                  help='batman mesh interface')
-
-parser.add_argument('-o', '--obscure', action='store_true',
-                  help='obscure client macs')
-
-parser.add_argument('-A', '--alfred', action='store_true',
-                  help='retrieve aliases from alfred')
-
-parser.add_argument('-d', '--destination-directory', action='store',
-                  help='destination directory for generated files',required=True)
-
-args = parser.parse_args()
-
-options = vars(args)
-
-db = NodeDB()
-if options['mesh']:
-  for mesh_interface in options['mesh']:
-    bm = batman(mesh_interface)
-    db.parse_vis_data(bm.vis_data(options['alfred']))
-    for gw in bm.gateway_list():
-      db.mark_gateways(gw['mac'])
-else:
-  bm = batman()
-  db.parse_vis_data(bm.vis_data(options['alfred']))
-  for gw in bm.gateway_list():
-    db.mark_gateways([gw['mac']])
-
-if options['aliases']:
-  for aliases in options['aliases']:
-    db.import_aliases(json.load(open(aliases)))
-
-if options['alfred']:
-  af = alfred()
-  db.import_aliases(af.aliases())
-
-db.count_clients()
-
-if options['obscure']:
-  db.obscure_clients()
-
-scriptdir = os.path.dirname(os.path.realpath(__file__))
-
-exported = db.export()
-exported['meta'] = {'timestamp': datetime.datetime.utcnow().replace(microsecond=0).isoformat()}
-
-#Write nodes json
-nodes_json = open(options['destination_directory'] + '/nodes.json.new','w')
-json.dump(exported, nodes_json, cls=CustomJSONEncoder)
-nodes_json.close()
-
-#Move to destination
-os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json')
-
-rrd = rrd(scriptdir +  "/nodedb/", options['destination_directory'] + "/nodes")
-rrd.update_database(db)
-rrd.update_images()
diff --git a/ffhlwiki.py b/ffhlwiki.py
deleted file mode 100755
index 588ae72..0000000
--- a/ffhlwiki.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python3
-
-import json
-import argparse
-from itertools import zip_longest
-from urllib.request import urlopen
-from bs4 import BeautifulSoup
-
-def import_wikigps(url):
-  def fetch_wikitable(url):
-    f = urlopen(url)
-
-    soup = BeautifulSoup(f)
-
-    table = soup.find_all("table")[0]
-
-    rows = table.find_all("tr")
-
-    headers = []
-
-    data = []
-
-    def maybe_strip(x):
-      if isinstance(x.string, str):
-        return x.string.strip()
-      else:
-        return ""
-
-    for row in rows:
-      tds = list([maybe_strip(x) for x in row.find_all("td")])
-      ths = list([maybe_strip(x) for x in row.find_all("th")])
-
-      if any(tds):
-        data.append(tds)
-
-      if any(ths):
-        headers = ths
-
-    nodes = []
-
-    for d in data:
-      nodes.append(dict(zip(headers, d)))
-
-    return nodes
-
-  nodes = fetch_wikitable(url)
-
-  aliases = {}
-
-  for node in nodes:
-    try:
-      node['MAC'] = node['MAC'].split(',')
-    except KeyError:
-      pass
-
-    try:
-      node['GPS'] = node['GPS'].split(',')
-    except KeyError:
-      pass
-
-    try:
-      node['Knotenname'] = node['Knotenname'].split(',')
-    except KeyError:
-      pass
-
-    nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
-
-    for data in nodes:
-      alias = {}
-
-      mac = data[0].strip()
-
-      if data[1]:
-        alias['geo'] = [float(x) for x in data[1].strip().split(' ')]
-
-      if data[2]:
-        alias['name'] = data[2].strip()
-
-      aliases[mac] = alias
-
-  return aliases
-
-parser = argparse.ArgumentParser()
-
-parser.add_argument('url', help='wiki URL')
-
-args = parser.parse_args()
-
-options = vars(args)
-
-aliases = import_wikigps(options['url'])
-
-print(json.dumps(aliases))
diff --git a/ffmap/__init__.py b/ffmap/__init__.py
new file mode 100644
index 0000000..9542acc
--- /dev/null
+++ b/ffmap/__init__.py
@@ -0,0 +1,42 @@
+import importlib
+
+from ffmap.nodedb import NodeDB
+
+def run(inputs, outputs):
+    """Fill the database with given inputs and give it to given outputs.
+
+    Arguments:
+    inputs -- list of Input instances (with a compatible get_data(nodedb) method)
+    outputs -- list of Output instances (with a compatible output(nodedb) method)
+    """
+    db = NodeDB()
+    for input_ in inputs:
+        input_.get_data(db)
+
+    for output in outputs:
+        output.output(db)
+
+def run_names(inputs, outputs):
+    """Fill the database with inputs and give it to outputs, each given
+    by names.
+
+    In contrast to run(inputs, outputs), this method expects only the
+    names of the modules to use, not instances thereof.
+    Arguments:
+    inputs -- list of dicts, each dict having the keys "name" with the
+              name of the input to use (directory name in inputs/), and
+              the key "options" with a dict of input-dependent options.
+    outputs -- list of dicts, see inputs.
+    """
+    input_instances = []
+    output_instances = []
+
+    for input_ in inputs:
+        module = importlib.import_module(".inputs." + input_["name"], "ffmap")
+        input_instances.append(module.Input(**input_["options"]))
+
+    for output in outputs:
+        module = importlib.import_module(".outputs." + output["name"], "ffmap")
+        output_instances.append(module.Output(**output["options"]))
+
+    run(input_instances, output_instances)
diff --git a/nodedb/.gitkeep b/ffmap/inputs/__init__.py
similarity index 100%
rename from nodedb/.gitkeep
rename to ffmap/inputs/__init__.py
diff --git a/inputs/alfred/__init__.py b/ffmap/inputs/alfred.py
similarity index 100%
rename from inputs/alfred/__init__.py
rename to ffmap/inputs/alfred.py
diff --git a/inputs/batadv/__init__.py b/ffmap/inputs/batadv.py
similarity index 100%
rename from inputs/batadv/__init__.py
rename to ffmap/inputs/batadv.py
diff --git a/ffmap/inputs/wiki.py b/ffmap/inputs/wiki.py
new file mode 100755
index 0000000..ab36ad5
--- /dev/null
+++ b/ffmap/inputs/wiki.py
@@ -0,0 +1,71 @@
+import json
+import argparse
+from itertools import zip_longest
+from urllib.request import urlopen
+from bs4 import BeautifulSoup
+
+class Input:
+    def __init__(self, url="http://luebeck.freifunk.net/wiki/Knoten"):
+        self.url = url
+
+    def fetch_wikitable(self):
+        f = urlopen(self.url)
+        soup = BeautifulSoup(f)
+        table = soup.find("table")
+        rows = table.find_all("tr")
+        headers = []
+        data = []
+
+        def maybe_strip(x):
+            if isinstance(x.string, str):
+                return x.string.strip()
+            else:
+                return ""
+
+        for row in rows:
+            tds = list([maybe_strip(x) for x in row.find_all("td")])
+            ths = list([maybe_strip(x) for x in row.find_all("th")])
+
+            if any(tds):
+                data.append(tds)
+
+            if any(ths):
+                headers = ths
+
+        return [dict(zip(headers, d)) for d in data]
+
+    def get_data(self, nodedb):
+        nodes = self.fetch_wikitable()
+
+        for node in nodes:
+            if "MAC" not in node or not node["MAC"]:
+                # without MAC, we cannot merge this data with others, so
+                # we might as well ignore it
+                continue
+
+            newnode = {
+                "network": {
+                    "mac": node.get("MAC").lower(),
+                },
+                "location": {
+                    "latitude": float(node.get("GPS", " ").split(" ")[0]),
+                    "longitude": float(node.get("GPS", " ").split(" ")[1]),
+                    "description": node.get("Ort"),
+                } if " " in node.get("GPS", "") else None,
+                "hostname": node.get("Knotenname"),
+                "hardware": {
+                    "model": node["Router"],
+                } if node.get("Router") else None,
+                "software": {
+                    "firmware": {
+                        "base": "LFF",
+                        "release": node.get("LFF Version"),
+                    },
+                },
+                "owner": {
+                    "contact": node["Betreiber"],
+                } if node.get("Betreiber") else None,
+            }
+            # remove keys with None as value
+            newnode = {k: v for k,v in newnode.items() if v is not None}
+            nodedb.add_or_update([newnode["network"]["mac"]], newnode)
diff --git a/node.py b/ffmap/node.py
similarity index 74%
rename from node.py
rename to ffmap/node.py
index 5fa58f6..e2169f2 100644
--- a/node.py
+++ b/ffmap/node.py
@@ -7,11 +7,20 @@ class NoneDict:
     even previously inexistent keys can be accessed, but nothing is
     stored permanently in this class.
     """
-    __repr__ = lambda self: 'NoneDict()'
-    __bool__ = lambda self: False
-    __getitem__ = lambda self, k: NoneDict()
-    __json__ = lambda self: None
-    __float__ = lambda self: float('NaN')
+    def __repr__(self):
+        return 'NoneDict()'
+    def __bool__(self):
+        return False
+    def __getitem__(self, k):
+        return NoneDict()
+    def __json__(self):
+        return None
+    def __float__(self):
+        return float('NaN')
+    def __iter__(self):
+        # empty generator
+        return
+        yield
     def __setitem__(self, key, value):
         raise RuntimeError("NoneDict is readonly")
 
@@ -39,6 +48,16 @@ class Node(defaultdict):
         """
         return hash(self.id)
 
+    def deep_update(self, other):
+        """Update the dictionary like dict.update() but recursively."""
+        def dmerge(a, b):
+            for k, v in b.items():
+                if isinstance(v, dict) and isinstance(a.get(k), dict):
+                    dmerge(a[k], v)
+                else:
+                    a[k] = v
+        dmerge(self, other)
+
     @property
     def vpn_neighbors(self):
         try:
diff --git a/nodedb.py b/ffmap/nodedb.py
similarity index 92%
rename from nodedb.py
rename to ffmap/nodedb.py
index a056184..0be76b0 100644
--- a/nodedb.py
+++ b/ffmap/nodedb.py
@@ -1,6 +1,6 @@
-from node import Node
+from .node import Node
 
-class AmbiguityException(Exception):
+class AmbiguityError(Exception):
     """Indicate the ambiguity of identifiers.
 
     This exception is raised if there is more than one match for a set
@@ -43,7 +43,7 @@ class NodeDB(dict):
                 continue
             if id_ in self:
                 if node is not None:
-                    raise AmbiguityException([node_id, id_])
+                    raise AmbiguityError([node_id, id_])
                 node = self[id_]
                 node_id = id_
 
@@ -53,7 +53,7 @@ class NodeDB(dict):
 
         # Update the node with the given properties using its own update method.
         if other is not None:
-            node.update(other)
+            node.deep_update(other)
 
         # Add new aliases if any
         for id_ in ids:
diff --git a/ffmap/outputs/__init__.py b/ffmap/outputs/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/ffmap/outputs/__init__.py
@@ -0,0 +1 @@
+
diff --git a/outputs/json/__init__.py b/ffmap/outputs/d3json.py
similarity index 87%
rename from outputs/json/__init__.py
rename to ffmap/outputs/d3json.py
index f005c38..fd5b267 100644
--- a/outputs/json/__init__.py
+++ b/ffmap/outputs/d3json.py
@@ -1,11 +1,12 @@
 import json
+from datetime import datetime
 
 __all__ = ["Exporter"]
 
 class CustomJSONEncoder(json.JSONEncoder):
     """
-    JSON encoder that uses an object's __json__() method to convert it to
-    something JSON-compatible.
+    JSON encoder that uses an object's __json__() method to convert it
+    to something JSON-compatible.
     """
     def default(self, obj):
         try:
@@ -14,7 +15,7 @@ class CustomJSONEncoder(json.JSONEncoder):
             pass
         return super().default(obj)
 
-class Exporter:
+class Output:
     def __init__(self, filepath="nodes.json"):
         self.filepath = filepath
 
@@ -60,9 +61,14 @@ class Exporter:
         return {
             "nodes": nodes,
             "links": links,
+            "meta": {
+                "timestamp": datetime.utcnow()
+                                     .replace(microsecond=0)
+                                     .isoformat()
+            }
         }
 
-    def export(self, nodedb):
+    def output(self, nodedb):
         with open(self.filepath, "w") as nodes_json:
             json.dump(
                 self.generate(nodedb),
diff --git a/outputs/rrd/__init__.py b/ffmap/outputs/rrd.py
similarity index 86%
rename from outputs/rrd/__init__.py
rename to ffmap/outputs/rrd.py
index 5e9fbc1..ce450c3 100644
--- a/outputs/rrd/__init__.py
+++ b/ffmap/outputs/rrd.py
@@ -1,8 +1,7 @@
 import os
-from .NodeRRD import NodeRRD
-from .GlobalRRD import GlobalRRD
+from ffmap.rrd.rrds import NodeRRD, GlobalRRD
 
-class Exporter:
+class Output:
     def __init__(self, directory="nodedb"):
         self.directory = directory
         try:
@@ -10,7 +9,7 @@ class Exporter:
         except OSError:
             pass
 
-    def export(self, nodedb):
+    def output(self, nodedb):
         nodes = set(nodedb.values())
         clients = 0
         nodecount = 0
diff --git a/outputs/rrd/RRD.py b/ffmap/rrd/__init__.py
similarity index 100%
rename from outputs/rrd/RRD.py
rename to ffmap/rrd/__init__.py
diff --git a/outputs/rrd/NodeRRD.py b/ffmap/rrd/rrds.py
similarity index 72%
rename from outputs/rrd/NodeRRD.py
rename to ffmap/rrd/rrds.py
index fc8aef1..2155d0c 100644
--- a/outputs/rrd/NodeRRD.py
+++ b/ffmap/rrd/rrds.py
@@ -1,7 +1,7 @@
 import os
 import subprocess
-from node import Node
-from .RRD import RRD, DS, RRA
+from ffmap.node import Node
+from . import RRD, DS, RRA
 
 class NodeRRD(RRD):
     ds_list = [
@@ -81,3 +81,35 @@ class NodeRRD(RRD):
                 'LINE1:c#00F:clients connected\\l',
                 ]
         subprocess.check_output(args)
+
+class GlobalRRD(RRD):
+    ds_list = [
+        # Number of nodes available
+        DS('nodes', 'GAUGE', 120, 0, float('NaN')),
+        # Number of client available
+        DS('clients', 'GAUGE', 120, 0, float('NaN')),
+    ]
+    rra_list = [
+        RRA('AVERAGE', 0.5, 1, 120),    #  2 hours of 1 minute samples
+        RRA('AVERAGE', 0.5, 60, 744),   # 31 days  of 1 hour   samples
+        RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day    samples
+    ]
+
+    def __init__(self, filepath):
+        super().__init__(filepath)
+        self.ensureSanity(self.ds_list, self.rra_list, step=60)
+
+    def update(self, nodeCount, clientCount):
+        super().update({'nodes': nodeCount, 'clients': clientCount})
+
+    def graph(self, filename, timeframe):
+        args = ["rrdtool", 'graph', filename,
+                '-s', '-' + timeframe,
+                '-w', '800',
+                '-h' '400',
+                'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
+                'LINE1:nodes#F00:nodes\\l',
+                'DEF:clients=' + self.filename + ':clients:AVERAGE',
+                'LINE2:clients#00F:clients',
+        ]
+        subprocess.check_output(args)
diff --git a/ffmap/run.py b/ffmap/run.py
new file mode 100644
index 0000000..a9e004f
--- /dev/null
+++ b/ffmap/run.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python3
+import argparse
+import sys
+
+from ffmap import run_names
+
+class MyAction(argparse.Action):
+    def __call__(self, parser, namespace, values, option_string=None):
+        if self.dest.startswith(("input_", "output_")):
+            collection_name = self.dest.split("_")[0] + "s"
+            name = self.dest.split("_", 1)[1]
+            if not hasattr(namespace, collection_name):
+                setattr(namespace, collection_name, [])
+            collection = getattr(namespace, collection_name)
+            collection.append({
+                "name": name,
+                "options": {self.metavar.lower(): values}
+                           if values is not None else {}
+            })
+        else:
+            raise Exception("Unexpected dest=" + self.dest)
+
+def parser_add_myarg(parser, name, metavar="OPT", help=None):
+    parser.add_argument("--" + name,
+                        metavar=metavar,
+                        type=str,
+                        nargs='?',
+                        const=None,
+                        action=MyAction,
+                        help=help)
+
+parser = argparse.ArgumentParser(
+    description="""Merge node data from multiple sources and generate
+                   various output formats from this data""",
+)
+input_group = parser.add_argument_group("Inputs", description="""
+    Inputs are used in the order given on the command line, where later
+    inputs can overwrite attributes of earlier inputs if named equally,
+    but the first input encountering a node sets its id, which is
+    immutable afterwards.
+
+    The same input can be given multiple times, probably with different
+    options.
+""")
+output_group = parser.add_argument_group("Outputs")
+parser_add_myarg(input_group, 'input-alfred', metavar="REQUEST_DATA_TYPE",
+                 help="read node details from A.L.F.R.E.D.")
+parser_add_myarg(input_group, 'input-wiki', metavar="URL",
+                 help="read node details from a Wiki page")
+parser_add_myarg(input_group, 'input-batadv', metavar="MESH_INTERFACE",
+                 help="add node's neighbors and clients from batadv-vis")
+parser_add_myarg(output_group, 'output-d3json', metavar="FILEPATH",
+                 help="generate JSON file compatible with ffmap-d3")
+parser_add_myarg(output_group, 'output-rrd', metavar="DIRECTORY",
+                 help="update RRDs with statistics, one global and one per node")
+
+args = parser.parse_args()
+
+if "inputs" not in args or not args.inputs:
+    parser.print_help(sys.stderr)
+    sys.stderr.write("\nERROR: No input has been defined!\n")
+    sys.exit(1)
+
+if "outputs" not in args or not args.outputs:
+    parser.print_help(sys.stderr)
+    sys.stderr.write("\nERROR: No output has been defined!\n")
+    sys.exit(1)
+
+run_names(inputs=args.inputs, outputs=args.outputs)
diff --git a/mkmap.sh b/mkmap.sh
deleted file mode 100755
index 28195be..0000000
--- a/mkmap.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-
-set -e
-
-DEST=$1
-LOCKFILE="/run/lock/ffmap"
-
-[ "$DEST" ] || exit 1
-
-cd "$(dirname "$0")"/
-
-if lockfile-check "$LOCKFILE"; then
-    exit
-fi
-lockfile-create "$LOCKFILE"
-lockfile-touch "$LOCKFILE" &
-LOCKPID="$!"
-
-./bat2nodes.py -A -a aliases.json -d $DEST
-
-kill "$LOCKPID"
-lockfile-remove "$LOCKFILE"
-
-if lockfile-check "$LOCKFILE-sync"; then
-    exit
-fi
-lockfile-create "$LOCKFILE-sync"
-lockfile-touch "$LOCKFILE-sync" &
-LOCKPID="$!"
-
-kill "$LOCKPID"
-lockfile-remove "$LOCKFILE-sync"
diff --git a/outputs/rrd/GlobalRRD.py b/outputs/rrd/GlobalRRD.py
deleted file mode 100644
index b114418..0000000
--- a/outputs/rrd/GlobalRRD.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import os
-import subprocess
-from .RRD import RRD, DS, RRA
-
-class GlobalRRD(RRD):
-    ds_list = [
-        # Number of nodes available
-        DS('nodes', 'GAUGE', 120, 0, float('NaN')),
-        # Number of client available
-        DS('clients', 'GAUGE', 120, 0, float('NaN')),
-    ]
-    rra_list = [
-        RRA('AVERAGE', 0.5, 1, 120),    #  2 hours of 1 minute samples
-        RRA('AVERAGE', 0.5, 60, 744),   # 31 days  of 1 hour   samples
-        RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day    samples
-    ]
-
-    def __init__(self, filepath):
-        super().__init__(filepath)
-        self.ensureSanity(self.ds_list, self.rra_list, step=60)
-
-    def update(self, nodeCount, clientCount):
-        super().update({'nodes': nodeCount, 'clients': clientCount})
-
-    def graph(self, filename, timeframe):
-        args = ["rrdtool", 'graph', filename,
-                '-s', '-' + timeframe,
-                '-w', '800',
-                '-h' '400',
-                'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
-                'LINE1:nodes#F00:nodes\\l',
-                'DEF:clients=' + self.filename + ':clients:AVERAGE',
-                'LINE2:clients#00F:clients',
-        ]
-        subprocess.check_output(args)
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..4ee3d1f
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python3
+
+from distutils.core import setup
+
+setup(name='FFmap',
+      version='0.1',
+      description='Freifunk map backend',
+      url='https://github.com/ffnord/ffmap-backend',
+      packages=['ffmap', 'ffmap.inputs', 'ffmap.outputs', 'ffmap.rrd'],
+     )

From 446bc984039816bc6f6e6a5a202c9306e68c954c Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Thu, 31 Jul 2014 11:41:26 +0200
Subject: [PATCH 09/15] input alfred: unify nodeinfo and stats datatypes

---
 ffmap/inputs/alfred.py | 18 ++++++++++++++++--
 1 file changed, 16 insertions(+), 2 deletions(-)

diff --git a/ffmap/inputs/alfred.py b/ffmap/inputs/alfred.py
index 6c0f66e..340c0d9 100644
--- a/ffmap/inputs/alfred.py
+++ b/ffmap/inputs/alfred.py
@@ -7,12 +7,26 @@ class Input:
 
     def get_data(self, nodedb):
         """Add data from alfred to the supplied nodedb"""
+        # get nodeinfo
         output = subprocess.check_output([
             "alfred-json",
             "-r", str(self.request_data_type),
             "-f", "json",
         ])
-        alfred_data = json.loads(output.decode("utf-8"))
+        nodeinfo = json.loads(output.decode("utf-8"))
 
-        for mac, node in alfred_data.items():
+        # get statistics
+        output = subprocess.check_output([
+            "alfred-json",
+            "-r", str(self.request_data_type+1),
+            "-f", "json",
+        ])
+        statistics = json.loads(output.decode("utf-8"))
+
+        # merge statistics into nodeinfo to be compatible with earlier versions
+        for mac, node in statistics.items():
+            if mac in nodeinfo:
+                nodeinfo[mac]['statistics'] = statistics[mac]
+
+        for mac, node in nodeinfo.items():
             nodedb.add_or_update([mac], node)

From 5fba69de7adf5e5d9fda211f0dd972e9153da899 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Thu, 31 Jul 2014 16:31:14 +0200
Subject: [PATCH 10/15] d3json: make output more similar to pre-rewrite version

---
 ffmap/node.py           |  2 ++
 ffmap/outputs/d3json.py | 61 +++++++++++++++++++++++++----------------
 2 files changed, 39 insertions(+), 24 deletions(-)

diff --git a/ffmap/node.py b/ffmap/node.py
index e2169f2..b89dd19 100644
--- a/ffmap/node.py
+++ b/ffmap/node.py
@@ -86,4 +86,6 @@ class Node(defaultdict):
                     else:
                         new_neighbor[key] = val
                 ret["neighbors"].append(new_neighbor)
+        if "id" not in ret:
+            ret["id"] = self.id
         return ret
diff --git a/ffmap/outputs/d3json.py b/ffmap/outputs/d3json.py
index fd5b267..06e82fb 100644
--- a/ffmap/outputs/d3json.py
+++ b/ffmap/outputs/d3json.py
@@ -25,42 +25,55 @@ class Output:
         nodes = []
         count = 0
         for node in set(nodedb.values()):
-            nodes.append(node.export())
+            node_export = node.export()
+            node_export["flags"] = {
+                "gateway": "vpn" in node and node["vpn"],
+                "client": False,
+                "online": True
+            }
+            nodes.append(node_export)
             indexes[node.id] = count
             count += 1
 
-        links = []
+        links = {}
         for node in set(nodedb.values()):
-            if "neighbors" in node:
-                links.extend(
-                    {
+            for neighbor in node.get("neighbors", []):
+                key = (neighbor["neighbor"].id, node.id)
+                rkey = tuple(reversed(key))
+                if rkey in links:
+                    links[rkey]["quality"] += ","+neighbor["metric"]
+                else:
+                    links[key] = {
                         "source": indexes[node.id],
                         "target": indexes[neighbor["neighbor"].id],
                         "quality": neighbor["metric"],
-                        "type": "vpn" if neighbor["neighbor"]["vpn"] else None,
+                        "type": "vpn" if neighbor["neighbor"]["vpn"] or node["vpn"] else None,
                         "id": "-".join((node.id, neighbor["neighbor"].id)),
-                    } for neighbor in node["neighbors"]
-                )
-            if "clients" in node:
-                for client in node["clients"]:
-                    if not client in indexes:
-                        nodes.append({
-                            "id": client,
-                        })
-                        indexes[client] = count
-                        count += 1
-
-                    links.append({
-                        "source": indexes[node.id],
-                        "target": indexes[client],
-                        "quality": "TT",
-                        "type": "client",
-                        "id": "-".join((node.id, client)),
+                    }
+            for client in node.get("clients", []):
+                if not client in indexes:
+                    nodes.append({
+                        "id": client,
+                        "flags": {
+                            "client": True,
+                            "online": True,
+                            "gateway": False
+                        }
                     })
+                    indexes[client] = count
+                    count += 1
+
+                links[(node.id, client)] = {
+                    "source": indexes[node.id],
+                    "target": indexes[client],
+                    "quality": "TT",
+                    "type": "client",
+                    "id": "-".join((node.id, client)),
+                }
 
         return {
             "nodes": nodes,
-            "links": links,
+            "links": list(links.values()),
             "meta": {
                 "timestamp": datetime.utcnow()
                                      .replace(microsecond=0)

From 6d452fc1495120e7767572535c387fc6223b4e53 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sat, 6 Sep 2014 13:48:03 +0200
Subject: [PATCH 11/15] d3json: obscure client MACs

---
 ffmap/outputs/d3json.py | 25 +++++++++++++------------
 1 file changed, 13 insertions(+), 12 deletions(-)

diff --git a/ffmap/outputs/d3json.py b/ffmap/outputs/d3json.py
index 06e82fb..31f03a6 100644
--- a/ffmap/outputs/d3json.py
+++ b/ffmap/outputs/d3json.py
@@ -50,26 +50,27 @@ class Output:
                         "type": "vpn" if neighbor["neighbor"]["vpn"] or node["vpn"] else None,
                         "id": "-".join((node.id, neighbor["neighbor"].id)),
                     }
+            clientcount = 0
             for client in node.get("clients", []):
-                if not client in indexes:
-                    nodes.append({
-                        "id": client,
-                        "flags": {
-                            "client": True,
-                            "online": True,
-                            "gateway": False
-                        }
-                    })
-                    indexes[client] = count
-                    count += 1
+                nodes.append({
+                    "id": "%s-%s" % (node.id, clientcount),
+                    "flags": {
+                        "client": True,
+                        "online": True,
+                        "gateway": False
+                    }
+                })
+                indexes[client] = count
 
                 links[(node.id, client)] = {
                     "source": indexes[node.id],
                     "target": indexes[client],
                     "quality": "TT",
                     "type": "client",
-                    "id": "-".join((node.id, client)),
+                    "id": "%s-%i" % (node.id, clientcount),
                 }
+                count += 1
+                clientcount += 1
 
         return {
             "nodes": nodes,

From f08aaaff4e1a62b9b1dbfb0f699ee2e1e8edc1a8 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sun, 7 Sep 2014 12:17:36 +0200
Subject: [PATCH 12/15] Fix fuzzy MAC matching

---
 ffmap/inputs/batadv.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/ffmap/inputs/batadv.py b/ffmap/inputs/batadv.py
index 576b09a..3a5abc9 100644
--- a/ffmap/inputs/batadv.py
+++ b/ffmap/inputs/batadv.py
@@ -28,8 +28,8 @@ class Input:
         if mac_a[1] != mac_b[1] or mac_a[2] != mac_b[2]:
             return False
 
-        # First byte must only differ in bit 2
-        if mac_a[0] | 2 != mac_b[0] | 2:
+        # First byte must only differ in bits 2 and 3
+        if mac_a[0] | 6 != mac_b[0] | 6:
             return False
 
         # Count differing bytes after the third

From 66112061d6820265f1d9157f5a62132ae76c6528 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sat, 20 Sep 2014 12:42:40 +0200
Subject: [PATCH 13/15] Fix adding of nodes with multiple matching alises

---
 ffmap/nodedb.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ffmap/nodedb.py b/ffmap/nodedb.py
index 0be76b0..344ed29 100644
--- a/ffmap/nodedb.py
+++ b/ffmap/nodedb.py
@@ -42,7 +42,7 @@ class NodeDB(dict):
             if id_ == node_id:
                 continue
             if id_ in self:
-                if node is not None:
+                if node is not None and node is not self[id_]:
                     raise AmbiguityError([node_id, id_])
                 node = self[id_]
                 node_id = id_

From 322860be7e31e1dae98a525d9dd2963872b181b5 Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Sat, 20 Sep 2014 12:42:53 +0200
Subject: [PATCH 14/15] Add MACs from mesh_interfaces as alises

---
 ffmap/inputs/alfred.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/ffmap/inputs/alfred.py b/ffmap/inputs/alfred.py
index 340c0d9..13730af 100644
--- a/ffmap/inputs/alfred.py
+++ b/ffmap/inputs/alfred.py
@@ -29,4 +29,5 @@ class Input:
                 nodeinfo[mac]['statistics'] = statistics[mac]
 
         for mac, node in nodeinfo.items():
-            nodedb.add_or_update([mac], node)
+            aliases = [mac] + node.get('network', {}).get('mesh_interfaces', [])
+            nodedb.add_or_update(aliases, node)

From cd1329963acc721088843f65c4060fd7ac64897a Mon Sep 17 00:00:00 2001
From: Jan-Philipp Litza <janphilipp@litza.de>
Date: Tue, 23 Sep 2014 22:31:51 +0200
Subject: [PATCH 15/15] Alfred input: Pass -z switch to alfred-json

---
 ffmap/inputs/alfred.py | 26 +++++++++++---------------
 1 file changed, 11 insertions(+), 15 deletions(-)

diff --git a/ffmap/inputs/alfred.py b/ffmap/inputs/alfred.py
index 13730af..9df5f5e 100644
--- a/ffmap/inputs/alfred.py
+++ b/ffmap/inputs/alfred.py
@@ -5,23 +5,19 @@ class Input:
     def __init__(self,request_data_type = 158):
         self.request_data_type = request_data_type
 
+    @staticmethod
+    def _call_alfred(request_data_type):
+        return json.loads(subprocess.check_output([
+            "alfred-json",
+            "-z",
+            "-r", str(request_data_type),
+            "-f", "json",
+        ]).decode("utf-8"))
+
     def get_data(self, nodedb):
         """Add data from alfred to the supplied nodedb"""
-        # get nodeinfo
-        output = subprocess.check_output([
-            "alfred-json",
-            "-r", str(self.request_data_type),
-            "-f", "json",
-        ])
-        nodeinfo = json.loads(output.decode("utf-8"))
-
-        # get statistics
-        output = subprocess.check_output([
-            "alfred-json",
-            "-r", str(self.request_data_type+1),
-            "-f", "json",
-        ])
-        statistics = json.loads(output.decode("utf-8"))
+        nodeinfo = self._call_alfred(self.request_data_type)
+        statistics = self._call_alfred(self.request_data_type+1)
 
         # merge statistics into nodeinfo to be compatible with earlier versions
         for mac, node in statistics.items():