Create package ffmap, add wiki input, remove old code

This commit is contained in:
Jan-Philipp Litza 2014-07-08 14:20:47 +02:00
parent f5e3705eec
commit e54e7467fc
21 changed files with 272 additions and 282 deletions

2
.gitignore vendored
View file

@ -1,3 +1 @@
*.pyc
aliases.json
nodedb/

View file

@ -1,6 +1,6 @@
# Data for Freifunk Map, Graph and Node List
ffmap-backend gathers information on the batman network by invoking
ffmap-backend gathers information on the batman network by invoking
batctl
and
batadv-vis
@ -41,13 +41,13 @@ Alias /map /home/ffmap/www/
Alias /firmware /home/freifunk/autoupdates/
</pre>
To execute, run
./mkmap.sh ../www
To execute, run
python3 -mffmap.run --input-alfred --input-badadv --output-d3json ../www/nodes.json
The script expects above described sudo-wrappers in the $HOME directory of the user executing
the script. If those are not available, an error will occurr if not executed as root. Also,
the tool realpath optionally allows to execute the script from anywhere in the directory tree.
For the script's regular execution add the following to the crontab:
<pre>
*/5 * * * * /home/ffmap/ffmap-backend/mkmap.sh /home/ffmap/www
*/5 * * * * python3 -mffmap.run --input-alfred --input-badadv --output-d3json /home/ffmap/www/nodes.json
</pre>

View file

@ -1,9 +0,0 @@
{
"b0:48:7a:e7:d3:64" : {
"name" : "Meute-AP"
},
"8e:3d:c2:10:10:28" : {
"name" : "holstentor",
"vpn" : true
}
}

View file

@ -1,88 +0,0 @@
#!/usr/bin/env python3
import json
import fileinput
import argparse
import os
import datetime
from batman import batman
from alfred import alfred
from rrd import rrd
from nodedb import NodeDB
from json_encoder import CustomJSONEncoder
# Force encoding to UTF-8
import locale # Ensures that subsequent open()s
locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded.
import sys
#sys.stdin = open('/dev/stdin', 'r')
#sys.stdout = open('/dev/stdout', 'w')
#sys.stderr = open('/dev/stderr', 'w')
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='read aliases from FILE',
action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append',
help='batman mesh interface')
parser.add_argument('-o', '--obscure', action='store_true',
help='obscure client macs')
parser.add_argument('-A', '--alfred', action='store_true',
help='retrieve aliases from alfred')
parser.add_argument('-d', '--destination-directory', action='store',
help='destination directory for generated files',required=True)
args = parser.parse_args()
options = vars(args)
db = NodeDB()
if options['mesh']:
for mesh_interface in options['mesh']:
bm = batman(mesh_interface)
db.parse_vis_data(bm.vis_data(options['alfred']))
for gw in bm.gateway_list():
db.mark_gateways(gw['mac'])
else:
bm = batman()
db.parse_vis_data(bm.vis_data(options['alfred']))
for gw in bm.gateway_list():
db.mark_gateways([gw['mac']])
if options['aliases']:
for aliases in options['aliases']:
db.import_aliases(json.load(open(aliases)))
if options['alfred']:
af = alfred()
db.import_aliases(af.aliases())
db.count_clients()
if options['obscure']:
db.obscure_clients()
scriptdir = os.path.dirname(os.path.realpath(__file__))
exported = db.export()
exported['meta'] = {'timestamp': datetime.datetime.utcnow().replace(microsecond=0).isoformat()}
#Write nodes json
nodes_json = open(options['destination_directory'] + '/nodes.json.new','w')
json.dump(exported, nodes_json, cls=CustomJSONEncoder)
nodes_json.close()
#Move to destination
os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json')
rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes")
rrd.update_database(db)
rrd.update_images()

View file

@ -1,93 +0,0 @@
#!/usr/bin/env python3
import json
import argparse
from itertools import zip_longest
from urllib.request import urlopen
from bs4 import BeautifulSoup
def import_wikigps(url):
def fetch_wikitable(url):
f = urlopen(url)
soup = BeautifulSoup(f)
table = soup.find_all("table")[0]
rows = table.find_all("tr")
headers = []
data = []
def maybe_strip(x):
if isinstance(x.string, str):
return x.string.strip()
else:
return ""
for row in rows:
tds = list([maybe_strip(x) for x in row.find_all("td")])
ths = list([maybe_strip(x) for x in row.find_all("th")])
if any(tds):
data.append(tds)
if any(ths):
headers = ths
nodes = []
for d in data:
nodes.append(dict(zip(headers, d)))
return nodes
nodes = fetch_wikitable(url)
aliases = {}
for node in nodes:
try:
node['MAC'] = node['MAC'].split(',')
except KeyError:
pass
try:
node['GPS'] = node['GPS'].split(',')
except KeyError:
pass
try:
node['Knotenname'] = node['Knotenname'].split(',')
except KeyError:
pass
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
for data in nodes:
alias = {}
mac = data[0].strip()
if data[1]:
alias['geo'] = [float(x) for x in data[1].strip().split(' ')]
if data[2]:
alias['name'] = data[2].strip()
aliases[mac] = alias
return aliases
parser = argparse.ArgumentParser()
parser.add_argument('url', help='wiki URL')
args = parser.parse_args()
options = vars(args)
aliases = import_wikigps(options['url'])
print(json.dumps(aliases))

42
ffmap/__init__.py Normal file
View file

@ -0,0 +1,42 @@
import importlib
from ffmap.nodedb import NodeDB
def run(inputs, outputs):
"""Fill the database with given inputs and give it to given outputs.
Arguments:
inputs -- list of Input instances (with a compatible get_data(nodedb) method)
outputs -- list of Output instances (with a compatible output(nodedb) method)
"""
db = NodeDB()
for input_ in inputs:
input_.get_data(db)
for output in outputs:
output.output(db)
def run_names(inputs, outputs):
"""Fill the database with inputs and give it to outputs, each given
by names.
In contrast to run(inputs, outputs), this method expects only the
names of the modules to use, not instances thereof.
Arguments:
inputs -- list of dicts, each dict having the keys "name" with the
name of the input to use (directory name in inputs/), and
the key "options" with a dict of input-dependent options.
outputs -- list of dicts, see inputs.
"""
input_instances = []
output_instances = []
for input_ in inputs:
module = importlib.import_module(".inputs." + input_["name"], "ffmap")
input_instances.append(module.Input(**input_["options"]))
for output in outputs:
module = importlib.import_module(".outputs." + output["name"], "ffmap")
output_instances.append(module.Output(**output["options"]))
run(input_instances, output_instances)

71
ffmap/inputs/wiki.py Executable file
View file

@ -0,0 +1,71 @@
import json
import argparse
from itertools import zip_longest
from urllib.request import urlopen
from bs4 import BeautifulSoup
class Input:
def __init__(self, url="http://luebeck.freifunk.net/wiki/Knoten"):
self.url = url
def fetch_wikitable(self):
f = urlopen(self.url)
soup = BeautifulSoup(f)
table = soup.find("table")
rows = table.find_all("tr")
headers = []
data = []
def maybe_strip(x):
if isinstance(x.string, str):
return x.string.strip()
else:
return ""
for row in rows:
tds = list([maybe_strip(x) for x in row.find_all("td")])
ths = list([maybe_strip(x) for x in row.find_all("th")])
if any(tds):
data.append(tds)
if any(ths):
headers = ths
return [dict(zip(headers, d)) for d in data]
def get_data(self, nodedb):
nodes = self.fetch_wikitable()
for node in nodes:
if "MAC" not in node or not node["MAC"]:
# without MAC, we cannot merge this data with others, so
# we might as well ignore it
continue
newnode = {
"network": {
"mac": node.get("MAC").lower(),
},
"location": {
"latitude": float(node.get("GPS", " ").split(" ")[0]),
"longitude": float(node.get("GPS", " ").split(" ")[1]),
"description": node.get("Ort"),
} if " " in node.get("GPS", "") else None,
"hostname": node.get("Knotenname"),
"hardware": {
"model": node["Router"],
} if node.get("Router") else None,
"software": {
"firmware": {
"base": "LFF",
"release": node.get("LFF Version"),
},
},
"owner": {
"contact": node["Betreiber"],
} if node.get("Betreiber") else None,
}
# remove keys with None as value
newnode = {k: v for k,v in newnode.items() if v is not None}
nodedb.add_or_update([newnode["network"]["mac"]], newnode)

View file

@ -7,11 +7,20 @@ class NoneDict:
even previously inexistent keys can be accessed, but nothing is
stored permanently in this class.
"""
__repr__ = lambda self: 'NoneDict()'
__bool__ = lambda self: False
__getitem__ = lambda self, k: NoneDict()
__json__ = lambda self: None
__float__ = lambda self: float('NaN')
def __repr__(self):
return 'NoneDict()'
def __bool__(self):
return False
def __getitem__(self, k):
return NoneDict()
def __json__(self):
return None
def __float__(self):
return float('NaN')
def __iter__(self):
# empty generator
return
yield
def __setitem__(self, key, value):
raise RuntimeError("NoneDict is readonly")
@ -39,6 +48,16 @@ class Node(defaultdict):
"""
return hash(self.id)
def deep_update(self, other):
"""Update the dictionary like dict.update() but recursively."""
def dmerge(a, b):
for k, v in b.items():
if isinstance(v, dict) and isinstance(a.get(k), dict):
dmerge(a[k], v)
else:
a[k] = v
dmerge(self, other)
@property
def vpn_neighbors(self):
try:

View file

@ -1,6 +1,6 @@
from node import Node
from .node import Node
class AmbiguityException(Exception):
class AmbiguityError(Exception):
"""Indicate the ambiguity of identifiers.
This exception is raised if there is more than one match for a set
@ -43,7 +43,7 @@ class NodeDB(dict):
continue
if id_ in self:
if node is not None:
raise AmbiguityException([node_id, id_])
raise AmbiguityError([node_id, id_])
node = self[id_]
node_id = id_
@ -53,7 +53,7 @@ class NodeDB(dict):
# Update the node with the given properties using its own update method.
if other is not None:
node.update(other)
node.deep_update(other)
# Add new aliases if any
for id_ in ids:

View file

@ -0,0 +1 @@

View file

@ -1,11 +1,12 @@
import json
from datetime import datetime
__all__ = ["Exporter"]
class CustomJSONEncoder(json.JSONEncoder):
"""
JSON encoder that uses an object's __json__() method to convert it to
something JSON-compatible.
JSON encoder that uses an object's __json__() method to convert it
to something JSON-compatible.
"""
def default(self, obj):
try:
@ -14,7 +15,7 @@ class CustomJSONEncoder(json.JSONEncoder):
pass
return super().default(obj)
class Exporter:
class Output:
def __init__(self, filepath="nodes.json"):
self.filepath = filepath
@ -60,9 +61,14 @@ class Exporter:
return {
"nodes": nodes,
"links": links,
"meta": {
"timestamp": datetime.utcnow()
.replace(microsecond=0)
.isoformat()
}
}
def export(self, nodedb):
def output(self, nodedb):
with open(self.filepath, "w") as nodes_json:
json.dump(
self.generate(nodedb),

View file

@ -1,8 +1,7 @@
import os
from .NodeRRD import NodeRRD
from .GlobalRRD import GlobalRRD
from ffmap.rrd.rrds import NodeRRD, GlobalRRD
class Exporter:
class Output:
def __init__(self, directory="nodedb"):
self.directory = directory
try:
@ -10,7 +9,7 @@ class Exporter:
except OSError:
pass
def export(self, nodedb):
def output(self, nodedb):
nodes = set(nodedb.values())
clients = 0
nodecount = 0

View file

@ -1,7 +1,7 @@
import os
import subprocess
from node import Node
from .RRD import RRD, DS, RRA
from ffmap.node import Node
from . import RRD, DS, RRA
class NodeRRD(RRD):
ds_list = [
@ -81,3 +81,35 @@ class NodeRRD(RRD):
'LINE1:c#00F:clients connected\\l',
]
subprocess.check_output(args)
class GlobalRRD(RRD):
ds_list = [
# Number of nodes available
DS('nodes', 'GAUGE', 120, 0, float('NaN')),
# Number of client available
DS('clients', 'GAUGE', 120, 0, float('NaN')),
]
rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples
RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples
]
def __init__(self, filepath):
super().__init__(filepath)
self.ensureSanity(self.ds_list, self.rra_list, step=60)
def update(self, nodeCount, clientCount):
super().update({'nodes': nodeCount, 'clients': clientCount})
def graph(self, filename, timeframe):
args = ["rrdtool", 'graph', filename,
'-s', '-' + timeframe,
'-w', '800',
'-h' '400',
'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
'LINE1:nodes#F00:nodes\\l',
'DEF:clients=' + self.filename + ':clients:AVERAGE',
'LINE2:clients#00F:clients',
]
subprocess.check_output(args)

69
ffmap/run.py Normal file
View file

@ -0,0 +1,69 @@
#!/usr/bin/env python3
import argparse
import sys
from ffmap import run_names
class MyAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if self.dest.startswith(("input_", "output_")):
collection_name = self.dest.split("_")[0] + "s"
name = self.dest.split("_", 1)[1]
if not hasattr(namespace, collection_name):
setattr(namespace, collection_name, [])
collection = getattr(namespace, collection_name)
collection.append({
"name": name,
"options": {self.metavar.lower(): values}
if values is not None else {}
})
else:
raise Exception("Unexpected dest=" + self.dest)
def parser_add_myarg(parser, name, metavar="OPT", help=None):
parser.add_argument("--" + name,
metavar=metavar,
type=str,
nargs='?',
const=None,
action=MyAction,
help=help)
parser = argparse.ArgumentParser(
description="""Merge node data from multiple sources and generate
various output formats from this data""",
)
input_group = parser.add_argument_group("Inputs", description="""
Inputs are used in the order given on the command line, where later
inputs can overwrite attributes of earlier inputs if named equally,
but the first input encountering a node sets its id, which is
immutable afterwards.
The same input can be given multiple times, probably with different
options.
""")
output_group = parser.add_argument_group("Outputs")
parser_add_myarg(input_group, 'input-alfred', metavar="REQUEST_DATA_TYPE",
help="read node details from A.L.F.R.E.D.")
parser_add_myarg(input_group, 'input-wiki', metavar="URL",
help="read node details from a Wiki page")
parser_add_myarg(input_group, 'input-batadv', metavar="MESH_INTERFACE",
help="add node's neighbors and clients from batadv-vis")
parser_add_myarg(output_group, 'output-d3json', metavar="FILEPATH",
help="generate JSON file compatible with ffmap-d3")
parser_add_myarg(output_group, 'output-rrd', metavar="DIRECTORY",
help="update RRDs with statistics, one global and one per node")
args = parser.parse_args()
if "inputs" not in args or not args.inputs:
parser.print_help(sys.stderr)
sys.stderr.write("\nERROR: No input has been defined!\n")
sys.exit(1)
if "outputs" not in args or not args.outputs:
parser.print_help(sys.stderr)
sys.stderr.write("\nERROR: No output has been defined!\n")
sys.exit(1)
run_names(inputs=args.inputs, outputs=args.outputs)

View file

@ -1,32 +0,0 @@
#!/bin/bash
set -e
DEST=$1
LOCKFILE="/run/lock/ffmap"
[ "$DEST" ] || exit 1
cd "$(dirname "$0")"/
if lockfile-check "$LOCKFILE"; then
exit
fi
lockfile-create "$LOCKFILE"
lockfile-touch "$LOCKFILE" &
LOCKPID="$!"
./bat2nodes.py -A -a aliases.json -d $DEST
kill "$LOCKPID"
lockfile-remove "$LOCKFILE"
if lockfile-check "$LOCKFILE-sync"; then
exit
fi
lockfile-create "$LOCKFILE-sync"
lockfile-touch "$LOCKFILE-sync" &
LOCKPID="$!"
kill "$LOCKPID"
lockfile-remove "$LOCKFILE-sync"

View file

@ -1,35 +0,0 @@
import os
import subprocess
from .RRD import RRD, DS, RRA
class GlobalRRD(RRD):
ds_list = [
# Number of nodes available
DS('nodes', 'GAUGE', 120, 0, float('NaN')),
# Number of client available
DS('clients', 'GAUGE', 120, 0, float('NaN')),
]
rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples
RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples
]
def __init__(self, filepath):
super().__init__(filepath)
self.ensureSanity(self.ds_list, self.rra_list, step=60)
def update(self, nodeCount, clientCount):
super().update({'nodes': nodeCount, 'clients': clientCount})
def graph(self, filename, timeframe):
args = ["rrdtool", 'graph', filename,
'-s', '-' + timeframe,
'-w', '800',
'-h' '400',
'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
'LINE1:nodes#F00:nodes\\l',
'DEF:clients=' + self.filename + ':clients:AVERAGE',
'LINE2:clients#00F:clients',
]
subprocess.check_output(args)

10
setup.py Normal file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env python3
from distutils.core import setup
setup(name='FFmap',
version='0.1',
description='Freifunk map backend',
url='https://github.com/ffnord/ffmap-backend',
packages=['ffmap', 'ffmap.inputs', 'ffmap.outputs', 'ffmap.rrd'],
)