Compare commits

..

2 commits

Author SHA1 Message Date
Daniel Ehlers 01fa5e7a0b nodedb.py: Transport new data to node entry 2014-02-09 15:27:25 +01:00
Daniel Ehlers 02a2862ca9 alfred.py: Transfer more data from alfred into the alias list 2014-02-09 15:27:25 +01:00
33 changed files with 963 additions and 1707 deletions

8
.gitignore vendored
View file

@ -1,8 +1,2 @@
# script-generated
aliases*.json
nodedb/
# python bytecode / cache
*.pyc
pycache/
__pycache__/
aliases.json

View file

@ -1,6 +0,0 @@
sudo: false
language: python
python:
- "3.4"
install: "pip install pep8"
script: "pep8 --ignore=E501 *.py lib/*.py"

118
README.md
View file

@ -1,117 +1 @@
# Data for Freifunk Map, Graph and Node List
[![Build Status](https://travis-ci.org/ffnord/ffmap-backend.svg?branch=master)](https://travis-ci.org/ffnord/ffmap-backend)
ffmap-backend gathers information on the batman network by invoking :
* batctl (might require root),
* alfred-json and
* batadv-vis
The output will be written to a directory (`-d output`).
Run `backend.py --help` for a quick overview of all available options.
For the script's regular execution add the following to the crontab:
<pre>
* * * * * backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a d2:d0:93:63:f7:da
</pre>
# Dependencies
- Python 3
- Python 3 Package [Networkx](https://networkx.github.io/)
- [alfred-json](https://github.com/tcatm/alfred-json)
- rrdtool (if run with `--with-rrd`)
# Running as unprivileged user
Some information collected by ffmap-backend requires access to specific system resources.
Make sure the user you are running this under is part of the group that owns the alfred socket, so
alfred-json can access the alfred daemon.
# ls -al /var/run/alfred.sock
srw-rw---- 1 root alfred 0 Mar 19 22:00 /var/run/alfred.sock=
# adduser map alfred
Adding user `map' to group `alfred' ...
Adding user map to group alfred
Done.
$ groups
map alfred
Running batctl requires passwordless sudo access, because it needs to access the debugfs to retrive
the gateway list.
# echo 'map ALL = NOPASSWD: /usr/sbin/batctl' | tee /etc/sudoers.d/map
map ALL = NOPASSWD: /usr/sbin/batctl
# chmod 0440 /etc/sudoers.d/map
That should be everything. The script automatically detects if it is run in unprivileged mode and
will prefix `sudo` where necessary.
# Data format
## nodes.json
{ 'nodes': {
node_id: { 'flags': { flags },
'firstseen': isoformat,
'lastseen': isoformat,
'nodeinfo': {...}, # copied from alfred type 158
'statistics': {
'uptime': double, # seconds
'memory_usage': double, # 0..1
'clients': double,
'rootfs_usage': double, # 0..1
'loadavg': double,
'gateway': mac
}
},
...
}
'timestamp': isoformat
}
### flags (bool)
- online
- gateway
## Old data format
If you want to still use the old [ffmap-d3](https://github.com/ffnord/ffmap-d3)
front end, you can use the file `ffmap-d3.jq` to convert the new output to the
old one:
```
jq -n -f ffmap-d3.jq \
--argfile nodes nodedb/nodes.json \
--argfile graph nodedb/graph.json \
> nodedb/ffmap-d3.json
```
Then point your ffmap-d3 instance to the `ffmap-d3.json` file.
# Removing owner information
If you'd like to redact information about the node owner from `nodes.json`,
you may use a filter like [jq]. In this case, specify an output directory
different from your webserver directory, e.g.:
./backend.py -d /ffmap-data
Don't write to files generated in there. ffmap-backend uses them as its
database.
After running ffmap-backend, copy `graph.json` to your webserver. Then,
filter `nodes.json` using `jq` like this:
jq '.nodes = (.nodes | with_entries(del(.value.nodeinfo.owner)))' \
< /ffmap-data/nodes.json > /var/www/data/nodes.json
This will remove owner information from nodes.json before copying the data
to your webserver.
[jq]: https://stedolan.github.io/jq/
See mkmap.sh for now :)

58
alfred.py Executable file
View file

@ -0,0 +1,58 @@
#!/usr/bin/env python3
import subprocess
import json
class alfred:
def __init__(self,request_data_type = 158):
self.request_data_type = request_data_type
def aliases(self):
output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json"])
alfred_data = json.loads(output.decode("utf-8"))
alias = {}
for mac,node in alfred_data.items():
node_alias = {}
if 'location' in node:
if 'latitude' in node['location'] and 'longitude' in node['location']:
node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude'])
if 'software' in node:
if 'firmware' in node['software']:
if 'base' in node['software']['firmware']:
node_alias['firmware-base'] = node['software']['firmware']['base']
if 'release' in node['software']['firmware']:
node_alias['firmware'] = node['software']['firmware']['release']
if 'autoupdater' in node['software']:
if 'branch' in node['software']['autoupdater']:
node_alias['autoupdater-branch'] = node['software']['autoupdater']['branch']
if 'enabled' in node['software']['autoupdater']:
node_alias['autoupdater-enabled'] = node['software']['autoupdater']['enabled']
if 'fastd' in node['software']:
if 'enabled' in node['software']['fastd']:
node_alias['fastd-enabled'] = node['software']['fastd']['enabled']
if 'version' in node['software']['fastd']:
node_alias['fastd-version'] = node['software']['fastd']['version']
if 'hardware' in node:
if 'model' in node['hardware']:
node_alias['hardware-model'] = node['hardware']['model']
if 'network' in node:
if 'gateway' in node['network']:
node_alias['selected-gateway'] = node['network']['gateway']
if 'hostname' in node:
node_alias['name'] = node['hostname']
elif 'name' in node:
node_alias['name'] = node['name']
if len(node_alias):
alias[mac] = node_alias
return alias
if __name__ == "__main__":
ad = alfred()
al = ad.alias()
print(al)

View file

@ -1,42 +0,0 @@
#!/usr/bin/env python3
import subprocess
import json
from collections import MutableMapping
def rec_merge(d1, d2):
'''
Update two dicts of dicts recursively,
if either mapping has leaves that are non-dicts,
the second's leaf overwrites the first's.
'''
for k, v in d1.items(): # in Python 2, use .iteritems()!
if k in d2:
# this next check is the only difference!
if all(isinstance(e, MutableMapping) for e in (v, d2[k])):
d2[k] = rec_merge(v, d2[k])
# we could further check types and merge as appropriate here.
d3 = d1.copy()
d3.update(d2)
return d3
class alfred_merge:
def __init__(self,request_data_type_1 = 158, request_data_type_2 = 159):
self.request_data_type_1 = request_data_type_1
self.request_data_type_2 = request_data_type_2
def aliases(self):
output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"])
alfred_data_1 = json.loads(output.decode("utf-8"))
output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"])
alfred_data_2 = json.loads(output.decode("utf-8"))
return json.dumps(rec_merge(alfred_data_1, alfred_data_2))
if __name__ == "__main__":
ad = alfred_merge()
al = ad.aliases()
print(al)

View file

@ -1,36 +1,9 @@
[
{
"node_id": "krtek",
"hostname": "krtek",
"location": {
"longitude": 10.74,
"latitude": 53.86
"b0:48:7a:e7:d3:64" : {
"name" : "Meute-AP"
},
"network": {
"mesh": {
"bat0": {
"interfaces": {
"tunnel": [
"00:25:86:e6:f1:bf"
]
"8e:3d:c2:10:10:28" : {
"name" : "holstentor",
"vpn" : true
}
}
}
}
},
{
"node_id": "gw1",
"hostname": "burgtor",
"network": {
"mesh": {
"bat0": {
"interfaces": {
"tunnel": [
"52:54:00:f3:62:d9"
]
}
}
}
}
}
]

View file

@ -1,194 +0,0 @@
#!/usr/bin/env python3
"""
backend.py - ffmap-backend runner
https://github.com/ffnord/ffmap-backend
"""
import argparse
import json
import os
import sys
from datetime import datetime
import networkx as nx
from networkx.readwrite import json_graph
from lib import graph, nodes
from lib.alfred import Alfred
from lib.batman import Batman
from lib.rrddb import RRD
from lib.nodelist import export_nodelist
from lib.validate import validate_nodeinfos
NODES_VERSION = 1
GRAPH_VERSION = 1
def main(params):
os.makedirs(params['dest_dir'], exist_ok=True)
nodes_fn = os.path.join(params['dest_dir'], 'nodes.json')
tmp_nodes_fn = os.path.join(params['dest_dir'], 'nodes.json.tmp')
graph_fn = os.path.join(params['dest_dir'], 'graph.json')
tmp_graph_fn = os.path.join(params['dest_dir'], 'graph.json.tmp')
nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json')
tmp_nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json.tmp')
now = datetime.utcnow().replace(microsecond=0)
# parse mesh param and instantiate Alfred/Batman instances
alfred_instances = []
batman_instances = []
for value in params['mesh']:
# (1) only batman-adv if, no alfred sock
if ':' not in value:
if len(params['mesh']) > 1:
raise ValueError(
'Multiple mesh interfaces require the use of '
'alfred socket paths.')
alfred_instances.append(Alfred(unix_sockpath=None))
batman_instances.append(Batman(mesh_interface=value))
else:
# (2) batman-adv if + alfred socket
try:
batif, alfredsock = value.split(':')
alfred_instances.append(Alfred(unix_sockpath=alfredsock))
batman_instances.append(Batman(mesh_interface=batif,
alfred_sockpath=alfredsock))
except ValueError:
raise ValueError(
'Unparseable value "{0}" in --mesh parameter.'.
format(value))
# read nodedb state from node.json
try:
with open(nodes_fn, 'r') as nodedb_handle:
nodedb = json.load(nodedb_handle)
except IOError:
nodedb = {'nodes': dict()}
# flush nodedb if it uses the old format
if 'links' in nodedb:
nodedb = {'nodes': dict()}
# set version we're going to output
nodedb['version'] = NODES_VERSION
# update timestamp and assume all nodes are offline
nodedb['timestamp'] = now.isoformat()
for node_id, node in nodedb['nodes'].items():
node['flags']['online'] = False
# integrate alfred nodeinfo
for alfred in alfred_instances:
nodeinfo = validate_nodeinfos(alfred.nodeinfo())
nodes.import_nodeinfo(nodedb['nodes'], nodeinfo,
now, assume_online=True)
# integrate static aliases data
for aliases in params['aliases']:
with open(aliases, 'r') as f:
# nodeinfo = validate_nodeinfos(json.load(f))
nodes.import_nodeinfo(nodedb['nodes'], json.load(f),
now, assume_online=False, statics=True)
nodes.reset_statistics(nodedb['nodes'])
for alfred in alfred_instances:
nodes.import_statistics(nodedb['nodes'], alfred.statistics())
# acquire gwl and visdata for each batman instance
mesh_info = []
for batman in batman_instances:
vd = batman.vis_data()
gwl = batman.gateway_list()
mesh_info.append((vd, gwl))
# update nodedb from batman-adv data
for vd, gwl in mesh_info:
nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vd)
nodes.import_vis_clientcount(nodedb['nodes'], vd)
nodes.mark_vis_data_online(nodedb['nodes'], vd, now)
nodes.mark_gateways(nodedb['nodes'], gwl)
# clear the nodedb from nodes that have not been online in $prune days
if params['prune']:
nodes.prune_nodes(nodedb['nodes'], now, params['prune'])
# build nxnetworks graph from nodedb and visdata
batadv_graph = nx.DiGraph()
for vd, gwl in mesh_info:
graph.import_vis_data(batadv_graph, nodedb['nodes'], vd)
# force mac addresses to be vpn-link only (like gateways for example)
if params['vpn']:
graph.mark_vpn(batadv_graph, frozenset(params['vpn']))
def extract_tunnel(nodes):
macs = set()
for id, node in nodes.items():
try:
for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]:
macs.add(mac)
for mac in node["nodeinfo"]["network"]["mesh"]["bat-ffhh"]["interfaces"]["tunnel"]:
macs.add(mac)
except KeyError:
pass
return macs
graph.mark_vpn(batadv_graph, extract_tunnel(nodedb['nodes']))
batadv_graph = graph.merge_nodes(batadv_graph)
batadv_graph = graph.to_undirected(batadv_graph)
# write processed data to dest dir
with open(tmp_nodes_fn, 'w') as f:
json.dump(nodedb, f)
graph_out = {'batadv': json_graph.node_link_data(batadv_graph),
'version': GRAPH_VERSION}
with open(tmp_graph_fn, 'w') as f:
json.dump(graph_out, f)
with open(tmp_nodelist_fn, 'w') as f:
json.dump(export_nodelist(now, nodedb), f)
os.rename(tmp_nodes_fn, nodes_fn)
os.rename(tmp_graph_fn, graph_fn)
os.rename(tmp_nodelist_fn, nodelist_fn)
# optional rrd graphs (trigger with --rrd)
if params['rrd']:
script_directory = os.path.dirname(os.path.realpath(__file__))
rrd = RRD(os.path.join(script_directory, 'nodedb'),
os.path.join(params['dest_dir'], 'nodes'))
rrd.update_database(nodedb['nodes'])
rrd.update_images()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='Read aliases from FILE',
nargs='+', default=[], metavar='FILE')
parser.add_argument('-m', '--mesh',
default=['bat0'], nargs='+',
help='Use given batman-adv mesh interface(s) (defaults'
'to bat0); specify alfred unix socket like '
'bat0:/run/alfred0.sock.')
parser.add_argument('-d', '--dest-dir', action='store',
help='Write output to destination directory',
required=True)
parser.add_argument('-V', '--vpn', nargs='+', metavar='MAC',
help='Assume MAC addresses are part of vpn')
parser.add_argument('-p', '--prune', metavar='DAYS', type=int,
help='forget nodes offline for at least DAYS')
parser.add_argument('--with-rrd', dest='rrd', action='store_true',
default=False,
help='enable the rendering of RRD graphs (cpu '
'intensive)')
options = vars(parser.parse_args())
main(options)

84
bat2nodes.py Executable file
View file

@ -0,0 +1,84 @@
#!/usr/bin/env python3
import json
import fileinput
import argparse
import os
from batman import batman
from alfred import alfred
from rrd import rrd
from nodedb import NodeDB
from d3mapbuilder import D3MapBuilder
# Force encoding to UTF-8
import locale # Ensures that subsequent open()s
locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded.
import sys
#sys.stdin = open('/dev/stdin', 'r')
#sys.stdout = open('/dev/stdout', 'w')
#sys.stderr = open('/dev/stderr', 'w')
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='read aliases from FILE',
action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append',
help='batman mesh interface')
parser.add_argument('-o', '--obscure', action='store_true',
help='obscure client macs')
parser.add_argument('-A', '--alfred', action='store_true',
help='retrieve aliases from alfred')
parser.add_argument('-d', '--destination-directory', action='store',
help='destination directory for generated files',required=True)
args = parser.parse_args()
options = vars(args)
db = NodeDB()
if options['mesh']:
for mesh_interface in options['mesh']:
bm = batman(mesh_interface)
db.parse_vis_data(bm.vis_data(options['alfred']))
for gw in bm.gateway_list():
db.mark_gateways(gw.mac)
else:
bm = batman()
db.parse_vis_data(bm.vis_data(options['alfred']))
for gw in bm.gateway_list():
db.mark_gateways([gw['mac']])
if options['aliases']:
for aliases in options['aliases']:
db.import_aliases(json.load(open(aliases)))
if options['alfred']:
af = alfred()
db.import_aliases(af.aliases())
if options['obscure']:
db.obscure_clients()
scriptdir = os.path.dirname(os.path.realpath(__file__))
rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes")
rrd.update_database(db)
rrd.update_images()
m = D3MapBuilder(db)
#Write nodes json
nodes_json = open(options['destination_directory'] + '/nodes.json.new','w')
nodes_json.write(m.build())
nodes_json.close()
#Move to destination
os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json')

84
batman.py Executable file
View file

@ -0,0 +1,84 @@
#!/usr/bin/env python3
import subprocess
import json
import re
class batman:
""" Bindings for B.A.T.M.A.N. advanced batctl tool
"""
def __init__(self, mesh_interface = "bat0"):
self.mesh_interface = mesh_interface
def vis_data(self,batadv_vis=False):
vds = self.vis_data_batctl_legacy()
if batadv_vis:
vds += self.vis_data_batadv_vis()
return vds
def vis_data_helper(self,lines):
vd = []
for line in lines:
try:
utf8_line = line.decode("utf-8")
vd.append(json.loads(utf8_line))
except e:
pass
return vd
def vis_data_batctl_legacy(self):
""" Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
"""
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"])
lines = output.splitlines()
vds = self.vis_data_helper(lines)
for vd in vds:
vd['legacy'] = True
return vds
def vis_data_batadv_vis(self):
""" Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
"""
output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"])
lines = output.splitlines()
return self.vis_data_helper(lines)
def gateway_list(self):
""" Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries.
"""
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"])
output_utf8 = output.decode("utf-8")
# TODO Parse information
lines = output_utf8.splitlines()
own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",lines[0]).group(1)
# Remove header line
del lines[0]
# Fill gateway list
gw = []
gw_mode = self.gateway_mode()
if gw_mode['mode'] == 'server':
gw.append({'mac': own_mac, 'bandwidth': gw_mode['bandwidth']})
for line in lines:
gw_line = line.split()
# When in client gateway mode maybe gw_line[0] is not the right.
gw.append({'mac':gw_line[0], 'bandwidth': gw_line[-1]})
return gw
def gateway_mode(self):
""" Parse "batctl -m <mesh_interface> gw"
"""
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"])
elements = output.decode("utf-8").split()
mode = elements[0]
if mode == "server":
return {'mode': 'server', 'bandwidth': elements[3]}
else:
return {'mode': mode}
if __name__ == "__main__":
bc = batman()
vd = bc.vis_data()
gw = bc.gateway_list()
for x in vd:
print(x)
print(gw)
print(bc.gateway_mode())

35
d3mapbuilder.py Normal file
View file

@ -0,0 +1,35 @@
import json
import datetime
class D3MapBuilder:
def __init__(self, db):
self._db = db
def build(self):
output = dict()
now = datetime.datetime.utcnow().replace(microsecond=0)
nodes = self._db.get_nodes()
output['nodes'] = [{'name': x.name, 'id': x.id,
'macs': ', '.join(x.macs),
'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None,
'firmware': x.firmware,
'flags': x.flags
} for x in nodes]
links = self._db.get_links()
output['links'] = [{'source': x.source.id, 'target': x.target.id,
'quality': x.quality,
'type': x.type,
'id': x.id
} for x in links]
output['meta'] = {
'timestamp': now.isoformat()
}
return json.dumps(output)

93
ffhlwiki.py Executable file
View file

@ -0,0 +1,93 @@
#!/usr/bin/env python3
import json
import argparse
from itertools import zip_longest
from urllib.request import urlopen
from bs4 import BeautifulSoup
def import_wikigps(url):
def fetch_wikitable(url):
f = urlopen(url)
soup = BeautifulSoup(f)
table = soup.find_all("table")[0]
rows = table.find_all("tr")
headers = []
data = []
def maybe_strip(x):
if isinstance(x.string, str):
return x.string.strip()
else:
return ""
for row in rows:
tds = list([maybe_strip(x) for x in row.find_all("td")])
ths = list([maybe_strip(x) for x in row.find_all("th")])
if any(tds):
data.append(tds)
if any(ths):
headers = ths
nodes = []
for d in data:
nodes.append(dict(zip(headers, d)))
return nodes
nodes = fetch_wikitable(url)
aliases = {}
for node in nodes:
try:
node['MAC'] = node['MAC'].split(',')
except KeyError:
pass
try:
node['GPS'] = node['GPS'].split(',')
except KeyError:
pass
try:
node['Knotenname'] = node['Knotenname'].split(',')
except KeyError:
pass
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
for data in nodes:
alias = {}
mac = data[0].strip()
if data[1]:
alias['gps'] = data[1].strip()
if data[2]:
alias['name'] = data[2].strip()
aliases[mac] = alias
return aliases
parser = argparse.ArgumentParser()
parser.add_argument('url', help='wiki URL')
args = parser.parse_args()
options = vars(args)
aliases = import_wikigps(options['url'])
print(json.dumps(aliases))

View file

@ -1,52 +0,0 @@
{
"meta": {
"timestamp": $nodes.timestamp
},
"nodes": (
$graph.batadv.nodes
| map(
if has("node_id") and .node_id
then (
$nodes.nodes[.node_id] as $node
| {
"id": .id,
"uptime": $node.statistics.uptime,
"flags": ($node.flags + {"client": false}),
"name": $node.nodeinfo.hostname,
"clientcount": (if $node.statistics.clients >= 0 then $node.statistics.clients else 0 end),
"hardware": $node.nodeinfo.hardware.model,
"firmware": $node.nodeinfo.software.firmware.release,
"geo": (if $node.nodeinfo.location then [$node.nodeinfo.location.latitude, $node.nodeinfo.location.longitude] else null end),
#"lastseen": $node.lastseen,
"network": $node.nodeinfo.network
}
)
else
{
"flags": {},
"id": .id,
"geo": null,
"clientcount": 0
}
end
)
),
"links": (
$graph.batadv.links
| map(
$graph.batadv.nodes[.source].node_id as $source_id
| $graph.batadv.nodes[.target].node_id as $target_id
| select(
$source_id and $target_id and
($nodes.nodes | (has($source_id) and has($target_id)))
)
| {
"target": .target,
"source": .source,
"quality": "\(.tq), \(.tq)",
"id": ($source_id + "-" + $target_id),
"type": (if .vpn then "vpn" else null end)
}
)
)
}

View file

@ -1,29 +0,0 @@
[
{
"node_id": "deadbfff0101",
"hostname": "gw01"
},
{
"node_id": "deadbeef0505",
"hostname": "gw02.hamburg.freifunk.net",
"network": {
"mac": "de:ad:be:ef:05:05",
"mesh": {
"bat0": {
"interfaces": {
"tunnel": [
"de:ad:be:ff:05:05",
"de:ad:be:fc:05:05",
"de:ad:bf:ff:05:05"
]
}
}
}
}
},
{
"node_id": "00163efb9d8d",
"hostname": "gw03"
}
]

View file

@ -1,110 +0,0 @@
#!/usr/bin/env python2
from __future__ import print_function
import json
import os
import sys
if len(sys.argv) != 2:
print('usage: ' + sys.argv[0] + ' /path/to/peers')
sys.exit(1)
peersDir = sys.argv[1]
def normalizeMac(mac):
mac = mac.lower()
normalized = ''
n = 0
for c in mac:
if c != ':':
if n > 0 and n % 2 == 0:
normalized = normalized + ':'
normalized = normalized + c
n += 1
return normalized
def toAlias(peer):
alias = {}
if not (peer.has_key('name') and peer.has_key('mac')):
return None
name = peer['name']
mac = peer['mac']
alias['node_id'] = mac.replace(':', '')
alias['hostname'] = name
if peer.has_key('geo'):
geo = peer['geo']
location = {}
if geo.has_key('lon'): location['longitude'] = geo['lon']
if geo.has_key('lat'): location['latitude'] = geo['lat']
alias['location'] = location
#alias['network'] = {}
#alias['network']['mesh_interfaces'] = [mac]
return alias
aliases = []
for filename in os.listdir(peersDir):
if len(filename) == 0 or filename[0] == '.':
continue
isGateway = False
absFilename = peersDir + '/' + filename
if os.path.isfile(absFilename):
peerFile = open(absFilename, 'r')
try:
peerLines = peerFile.readlines()
peer = {}
for line in peerLines:
parts = line.split()
if len(parts) > 2:
if parts[1] == 'Knotenname:':
peer['name'] = parts[2]
elif parts[0] == 'remote':
isGateway = True
elif parts[1] == 'MAC:':
peer['mac'] = normalizeMac(parts[2])
elif parts[1] == 'Koordinaten:' and len(parts) > 3:
try:
peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])}
except ValueError:
print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr)
elif len(parts) == 2 and parts[0] == 'key':
keyParts = parts[1].split('"')
if len(keyParts) > 1:
peer['vpn'] = keyParts[1].lower()
if isGateway:
continue
alias = toAlias(peer)
if alias:
aliases.append(alias)
except Exception as e:
print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr)
finally:
peerFile.close()
print(json.dumps(aliases))

View file

@ -1,112 +0,0 @@
#!/usr/bin/env python2
from __future__ import print_function
import json
import os
import sys
if len(sys.argv) != 2:
print('usage: ' + sys.argv[0] + ' /path/to/peers')
sys.exit(1)
peersDir = sys.argv[1]
def normalizeMac(mac):
mac = mac.lower()
normalized = ''
n = 0
for c in mac:
if c != ':':
if n > 0 and n % 2 == 0:
normalized = normalized + ':'
normalized = normalized + c
n += 1
return normalized
def toAlias(peer):
alias = {}
if not (peer.has_key('name') and peer.has_key('mac')):
return None
name = peer['name']
mac = peer['mac']
alias['node_id'] = mac.replace(':', '')
alias['hostname'] = name
if peer.has_key('geo'):
geo = peer['geo']
location = {}
if geo.has_key('lon'): location['longitude'] = geo['lon']
if geo.has_key('lat'): location['latitude'] = geo['lat']
alias['location'] = location
#alias['network'] = {}
#alias['network']['mesh_interfaces'] = [mac]
return {'nodeinfo':alias}
aliases = {}
for filename in os.listdir(peersDir):
if len(filename) == 0 or filename[0] == '.':
continue
isGateway = False
absFilename = peersDir + '/' + filename
if os.path.isfile(absFilename):
peerFile = open(absFilename, 'r')
try:
peerLines = peerFile.readlines()
peer = {}
for line in peerLines:
parts = line.split()
if len(parts) > 2:
if parts[1] == 'Knotenname:':
peer['name'] = parts[2]
elif parts[0] == 'remote':
isGateway = True
elif parts[1] == 'MAC:':
peer['mac'] = normalizeMac(parts[2])
elif parts[1] == 'Koordinaten:' and len(parts) > 3:
try:
peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])}
except ValueError:
print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr)
elif len(parts) == 2 and parts[0] == 'key':
keyParts = parts[1].split('"')
if len(keyParts) > 1:
peer['vpn'] = keyParts[1].lower()
if isGateway:
continue
alias = toAlias(peer)
if alias:
tmpid = alias['nodeinfo']['node_id']
# alias['nodeinfo'].pop('node_id')
aliases[tmpid] = alias
except Exception as e:
print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr)
finally:
peerFile.close()
print(json.dumps(aliases))

13
hostid.py Normal file
View file

@ -0,0 +1,13 @@
import re
from functools import reduce
def mac_to_hostid(mac):
int_mac = list(map(lambda x: int(x, 16), mac.split(":")))
int_mac[0] ^= 2
bytes = map(lambda x: "%02x" % x, int_mac[0:3] + [0xff, 0xfe] + int_mac[3:])
return reduce(lambda a, i:
[a[0] + ("" if i == 0 else ":") + a[1] + a[2]] + a[3:],
range(0, 4),
[""] + list(bytes)
)

View file

@ -1,40 +0,0 @@
import os
import subprocess
from lib.RRD import DS, RRA, RRD
class GlobalRRD(RRD):
ds_list = [
# Number of nodes available
DS('nodes', 'GAUGE', 120, 0, float('NaN')),
# Number of client available
DS('clients', 'GAUGE', 120, 0, float('NaN')),
]
rra_list = [
# 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 1, 120),
# 31 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 744),
# ~5 years of 1 day samples
RRA('AVERAGE', 0.5, 1440, 1780),
]
def __init__(self, directory):
super().__init__(os.path.join(directory, "nodes.rrd"))
self.ensure_sanity(self.ds_list, self.rra_list, step=60)
# TODO: fix this, python does not support function overloading
def update(self, node_count, client_count):
super().update({'nodes': node_count, 'clients': client_count})
def graph(self, filename, timeframe):
args = ["rrdtool", 'graph', filename,
'-s', '-' + timeframe,
'-w', '800',
'-h' '400',
'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
'LINE1:nodes#F00:nodes\\l',
'DEF:clients=' + self.filename + ':clients:AVERAGE',
'LINE2:clients#00F:clients']
subprocess.check_output(args)

View file

@ -1,61 +0,0 @@
import os
import subprocess
from lib.RRD import DS, RRA, RRD
class NodeRRD(RRD):
ds_list = [
DS('upstate', 'GAUGE', 120, 0, 1),
DS('clients', 'GAUGE', 120, 0, float('NaN')),
]
rra_list = [
# 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 1, 120),
# 5 days of 5 minute samples
RRA('AVERAGE', 0.5, 5, 1440),
# 30 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 720),
# 1 year of 12 hour samples
RRA('AVERAGE', 0.5, 720, 730),
]
def __init__(self, filename, node=None):
"""
Create a new RRD for a given node.
If the RRD isn't supposed to be updated, the node can be omitted.
"""
self.node = node
super().__init__(filename)
self.ensure_sanity(self.ds_list, self.rra_list, step=60)
@property
def imagename(self):
return "{basename}.png".format(
basename=os.path.basename(self.filename).rsplit('.', 2)[0])
# TODO: fix this, python does not support function overloading
def update(self):
super().update({'upstate': int(self.node['flags']['online']),
'clients': self.node['statistics']['clients']})
def graph(self, directory, timeframe):
"""
Create a graph in the given directory. The file will be named
basename.png if the RRD file is named basename.rrd
"""
args = ['rrdtool', 'graph', os.path.join(directory, self.imagename),
'-s', '-' + timeframe,
'-w', '800',
'-h', '400',
'-l', '0',
'-y', '1:1',
'DEF:clients=' + self.filename + ':clients:AVERAGE',
'VDEF:maxc=clients,MAXIMUM',
'CDEF:c=0,clients,ADDNAN',
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
'AREA:c#0F0:up\\l',
'AREA:d#F00:down\\l',
'LINE1:c#00F:clients connected\\l']
subprocess.check_output(args)

View file

@ -1,346 +0,0 @@
import subprocess
import re
import os
from operator import xor, eq
from functools import reduce
from itertools import starmap
import math
class RRDIncompatibleException(Exception):
"""
Is raised when an RRD doesn't have the desired definition and cannot be
upgraded to it.
"""
pass
class RRDOutdatedException(Exception):
"""
Is raised when an RRD doesn't have the desired definition, but can be
upgraded to it.
"""
pass
if not hasattr(__builtins__, "FileNotFoundError"):
class FileNotFoundError(Exception):
pass
class RRD(object):
"""
An RRD is a Round Robin Database, a database which forgets old data and
aggregates multiple records into new ones.
It contains multiple Data Sources (DS) which can be thought of as columns,
and Round Robin Archives (RRA) which can be thought of as tables with the
DS as columns and time-dependant rows.
"""
# rra[2].cdp_prep[0].value = 1,8583033333e+03
_info_regex = re.compile("""
(?P<section>[a-z_]+)
\[ (?P<key>[a-zA-Z0-9_]+) \]
\.
|
(?P<name>[a-z_]+)
\s*=\s*
"? (?P<value>.*?) "?
$""", re.X)
_cached_info = None
def _exec_rrdtool(self, cmd, *args, **kwargs):
pargs = ["rrdtool", cmd, self.filename]
for k, v in kwargs.items():
pargs.extend(["--" + k, str(v)])
pargs.extend(args)
subprocess.check_output(pargs)
def __init__(self, filename):
self.filename = filename
def ensure_sanity(self, ds_list, rra_list, **kwargs):
"""
Create or upgrade the RRD file if necessary to contain all DS in
ds_list. If it needs to be created, the RRAs in rra_list and any kwargs
will be used for creation. Note that RRAs and options of an existing
database are NOT modified!
"""
try:
self.check_sanity(ds_list)
except FileNotFoundError:
self.create(ds_list, rra_list, **kwargs)
except RRDOutdatedException:
self.upgrade(ds_list)
def check_sanity(self, ds_list=()):
"""
Check if the RRD file exists and contains (at least) the DS listed in
ds_list.
"""
if not os.path.exists(self.filename):
raise FileNotFoundError(self.filename)
info = self.info()
if set(ds_list) - set(info['ds'].values()) != set():
for ds in ds_list:
if ds.name in info['ds'] and\
ds.type != info['ds'][ds.name].type:
raise RRDIncompatibleException(
"{} is {} but should be {}".format(
ds.name, ds.type, info['ds'][ds.name].type))
else:
raise RRDOutdatedException()
def upgrade(self, dss):
"""
Upgrade the DS definitions (!) of this RRD.
(To update its values, use update())
The list dss contains DSS objects to be updated or added. The
parameters of a DS can be changed, but not its type. New DS are always
added at the end in the order of their appearance in the list.
This is done internally via an rrdtool dump -> rrdtool restore and
modifying the dump on the fly.
"""
info = self.info()
new_ds = list(info['ds'].values())
new_ds.sort(key=lambda ds: ds.index)
for ds in dss:
if ds.name in info['ds']:
old_ds = info['ds'][ds.name]
if info['ds'][ds.name].type != ds.type:
raise RuntimeError(
"Cannot convert existing DS '{}'"
"from type '{}' to '{}'".format(
ds.name, old_ds.type, ds.type))
ds.index = old_ds.index
new_ds[ds.index] = ds
else:
ds.index = len(new_ds)
new_ds.append(ds)
added_ds_num = len(new_ds) - len(info['ds'])
dump = subprocess.Popen(
["rrdtool", "dump", self.filename],
stdout=subprocess.PIPE)
restore = subprocess.Popen(
["rrdtool", "restore", "-", self.filename + ".new"],
stdin=subprocess.PIPE)
echo = True
ds_definitions = True
for line in dump.stdout:
if ds_definitions and b'<ds>' in line:
echo = False
if b'<!-- Round Robin Archives -->' in line:
ds_definitions = False
for ds in new_ds:
restore.stdin.write(bytes("""
<ds>
<name> %s </name>
<type> %s </type>
<minimal_heartbeat>%i</minimal_heartbeat>
<min>%s</min>
<max>%s</max>
<!-- PDP Status -->
<last_ds>%s</last_ds>
<value>%s</value>
<unknown_sec> %i </unknown_sec>
</ds>
""" % (ds.name,
ds.type,
ds.args[0],
ds.args[1],
ds.args[2],
ds.last_ds,
ds.value,
ds.unknown_sec), "utf-8"))
if b'</cdp_prep>' in line:
restore.stdin.write(added_ds_num * b"""
<ds>
<primary_value> NaN </primary_value>
<secondary_value> NaN </secondary_value>
<value> NaN </value>
<unknown_datapoints> 0 </unknown_datapoints>
</ds>
""")
# echoing of input line
if echo:
restore.stdin.write(
line.replace(
b'</row>',
(added_ds_num * b'<v>NaN</v>') + b'</row>'
)
)
if ds_definitions and b'</ds>' in line:
echo = True
dump.stdout.close()
restore.stdin.close()
dump.wait()
restore.wait()
os.rename(self.filename + ".new", self.filename)
self._cached_info = None
def create(self, ds_list, rra_list, **kwargs):
"""
Create a new RRD file with the specified list of RRAs and DSs.
Any kwargs are passed as --key=value to rrdtool create.
"""
self._exec_rrdtool(
"create",
*map(str, rra_list + ds_list),
**kwargs
)
self._cached_info = None
def update(self, V):
"""
Update the RRD with new values V.
V can be either list or dict:
* If it is a dict, its keys must be DS names in the RRD and it is
ensured that the correct DS are updated with the correct values, by
passing a "template" to rrdtool update (see man rrdupdate).
* If it is a list, no template is generated and the order of the
values in V must be the same as that of the DS in the RRD.
"""
try:
args = ['N:' + ':'.join(map(str, V.values()))]
kwargs = {'template': ':'.join(V.keys())}
except AttributeError:
args = ['N:' + ':'.join(map(str, V))]
kwargs = {}
self._exec_rrdtool("update", *args, **kwargs)
self._cached_info = None
def info(self):
"""
Return a dictionary with information about the RRD.
See `man rrdinfo` for more details.
"""
if self._cached_info:
return self._cached_info
env = os.environ.copy()
env["LC_ALL"] = "C"
proc = subprocess.Popen(
["rrdtool", "info", self.filename],
stdout=subprocess.PIPE,
env=env
)
out, err = proc.communicate()
out = out.decode()
info = {}
for line in out.splitlines():
base = info
for match in self._info_regex.finditer(line):
section, key, name, value = match.group(
"section", "key", "name", "value")
if section and key:
try:
key = int(key)
except ValueError:
pass
if section not in base:
base[section] = {}
if key not in base[section]:
base[section][key] = {}
base = base[section][key]
if name and value:
try:
base[name] = int(value)
except ValueError:
try:
base[name] = float(value)
except:
base[name] = value
dss = {}
for name, ds in info['ds'].items():
ds_obj = DS(name, ds['type'], ds['minimal_heartbeat'],
ds['min'], ds['max'])
ds_obj.index = ds['index']
ds_obj.last_ds = ds['last_ds']
ds_obj.value = ds['value']
ds_obj.unknown_sec = ds['unknown_sec']
dss[name] = ds_obj
info['ds'] = dss
rras = []
for rra in info['rra'].values():
rras.append(RRA(rra['cf'], rra['xff'],
rra['pdp_per_row'], rra['rows']))
info['rra'] = rras
self._cached_info = info
return info
class DS(object):
"""
DS stands for Data Source and represents one line of data points in a Round
Robin Database (RRD).
"""
name = None
type = None
args = []
index = -1
last_ds = 'U'
value = 0
unknown_sec = 0
def __init__(self, name, dst, *args):
self.name = name
self.type = dst
self.args = args
def __str__(self):
return "DS:%s:%s:%s" % (
self.name,
self.type,
":".join(map(str, self._nan_to_u_args()))
)
def __repr__(self):
return "%s(%r, %r, %s)" % (
self.__class__.__name__,
self.name,
self.type,
", ".join(map(repr, self.args))
)
def __eq__(self, other):
return all(starmap(eq, zip(self.compare_keys(), other.compare_keys())))
def __hash__(self):
return reduce(xor, map(hash, self.compare_keys()))
def _nan_to_u_args(self):
return tuple(
'U' if type(arg) is float and math.isnan(arg)
else arg
for arg in self.args
)
def compare_keys(self):
return self.name, self.type, self._nan_to_u_args()
class RRA(object):
def __init__(self, cf, *args):
self.cf = cf
self.args = args
def __str__(self):
return "RRA:%s:%s" % (self.cf, ":".join(map(str, self.args)))
def __repr__(self):
return "%s(%r, %s)" % (
self.__class__.__name__,
self.cf,
", ".join(map(repr, self.args))
)

View file

@ -1 +0,0 @@
__author__ = 'hexa'

View file

@ -1,33 +0,0 @@
import subprocess
import json
import os
class Alfred(object):
"""
Bindings for the alfred-json utility
"""
def __init__(self, unix_sockpath=None):
self.unix_sock = unix_sockpath
if unix_sockpath is not None and not os.path.exists(unix_sockpath):
raise RuntimeError('alfred: invalid unix socket path given')
def _fetch(self, data_type):
cmd = ['/usr/local/bin/alfred-json',
'-z',
'-f', 'json',
'-r', str(data_type)]
if self.unix_sock:
cmd.extend(['-s', self.unix_sock])
output = subprocess.check_output(cmd)
return json.loads(output.decode("utf-8")).values()
def nodeinfo(self):
return self._fetch(158)
def statistics(self):
return self._fetch(159)
def vis(self):
return self._fetch(160)

View file

@ -1,98 +0,0 @@
import subprocess
import json
import os
import re
class Batman(object):
"""
Bindings for B.A.T.M.A.N. Advanced
commandline interface "batctl"
"""
def __init__(self, mesh_interface='bat0', alfred_sockpath=None):
self.mesh_interface = mesh_interface
self.alfred_sock = alfred_sockpath
# ensure /usr/sbin and /usr/local/sbin are in PATH
env = os.environ
path = set(env['PATH'].split(':'))
path.add('/usr/sbin/')
path.add('/usr/local/sbin')
env['PATH'] = ':'.join(path)
self.environ = env
# compile regular expressions only once on startup
self.mac_addr_pattern = re.compile(r'(([a-z0-9]{2}:){5}[a-z0-9]{2})')
def vis_data(self):
return self.vis_data_batadv_vis()
@staticmethod
def vis_data_helper(lines):
vd_tmp = []
for line in lines:
try:
utf8_line = line.decode('utf-8')
vd_tmp.append(json.loads(utf8_line))
except UnicodeDecodeError:
pass
return vd_tmp
def vis_data_batadv_vis(self):
"""
Parse "batadv-vis -i <mesh_interface> -f json"
into an array of dictionaries.
"""
cmd = ['batadv-vis', '-i', self.mesh_interface, '-f', 'json']
if self.alfred_sock:
cmd.extend(['-u', self.alfred_sock])
output = subprocess.check_output(cmd, env=self.environ)
lines = output.splitlines()
return self.vis_data_helper(lines)
def gateway_list(self):
"""
Parse "batctl -m <mesh_interface> gwl -n"
into an array of dictionaries.
"""
cmd = ['batctl', '-m', self.mesh_interface, 'gwl', '-n']
if os.geteuid() > 0:
cmd.insert(0, 'sudo')
output = subprocess.check_output(cmd, env=self.environ)
output_utf8 = output.decode('utf-8')
rows = output_utf8.splitlines()
gateways = []
# local gateway
header = rows.pop(0)
mode, bandwidth = self.gateway_mode()
if mode == 'server':
local_gw_mac = self.mac_addr_pattern.search(header).group(0)
gateways.append(local_gw_mac)
# remote gateway(s)
for row in rows:
match = self.mac_addr_pattern.search(row)
if match:
gateways.append(match.group(1))
return gateways
def gateway_mode(self):
"""
Parse "batctl -m <mesh_interface> gw"
return: tuple mode, bandwidth, if mode != server then bandwidth is None
"""
cmd = ['batctl', '-m', self.mesh_interface, 'gw']
if os.geteuid() > 0:
cmd.insert(0, 'sudo')
output = subprocess.check_output(cmd, env=self.environ)
chunks = output.decode("utf-8").split()
return chunks[0], chunks[3] if 3 in chunks else None
if __name__ == "__main__":
bc = Batman()
vd = bc.vis_data()
gw = bc.gateway_list()

View file

@ -1,84 +0,0 @@
from functools import reduce
from itertools import chain
import networkx as nx
from lib.nodes import build_mac_table
def import_vis_data(graph, nodes, vis_data):
macs = build_mac_table(nodes)
nodes_a = map(lambda d: 2 * [d['primary']],
filter(lambda d: 'primary' in d, vis_data))
nodes_b = map(lambda d: [d['secondary'], d['of']],
filter(lambda d: 'secondary' in d, vis_data))
graph.add_nodes_from(map(lambda a, b:
(a, dict(primary=b, node_id=macs.get(b))),
*zip(*chain(nodes_a, nodes_b))))
edges = filter(lambda d: 'neighbor' in d, vis_data)
graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'],
dict(tq=float(d['label']))), edges))
def mark_vpn(graph, vpn_macs):
components = map(frozenset, nx.weakly_connected_components(graph))
components = filter(vpn_macs.intersection, components)
nodes = reduce(lambda a, b: a | b, components, set())
for node in nodes:
for k, v in graph[node].items():
v['vpn'] = True
def to_multigraph(graph):
def f(a):
node = graph.node[a]
return node['primary'] if node else a
def map_node(node, data):
return (data['primary'],
dict(node_id=data['node_id'])) if data else (node, dict())
digraph = nx.MultiDiGraph()
digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data),
*zip(*graph.edges_iter(data=True))))
return digraph
def merge_nodes(graph):
def merge_edges(data):
tq = min(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn)
multigraph = to_multigraph(graph)
digraph = nx.DiGraph()
digraph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(
multigraph[e][d].values()))
for d in multigraph[e]] for e in multigraph])
digraph.add_edges_from(edges)
return digraph
def to_undirected(graph):
multigraph = nx.MultiGraph()
multigraph.add_nodes_from(graph.nodes_iter(data=True))
multigraph.add_edges_from(graph.edges_iter(data=True))
def merge_edges(data):
tq = max(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2)
graph = nx.Graph()
graph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(
multigraph[e][d].values()))
for d in multigraph[e]] for e in multigraph])
graph.add_edges_from(edges)
return graph

View file

@ -1,27 +0,0 @@
def export_nodelist(now, nodedb):
nodelist = list()
for node_id, node in nodedb["nodes"].items():
node_out = dict()
node_out["id"] = node_id
node_out["name"] = node["nodeinfo"]["hostname"]
if "location" in node["nodeinfo"]:
node_out["position"] = {"lat": node["nodeinfo"]["location"]["latitude"],
"long": node["nodeinfo"]["location"]["longitude"]}
node_out["status"] = dict()
node_out["status"]["online"] = node["flags"]["online"]
if "firstseen" in node:
node_out["status"]["firstcontact"] = node["firstseen"]
if "lastseen" in node:
node_out["status"]["lastcontact"] = node["lastseen"]
if "clients" in node["statistics"]:
node_out["status"]["clients"] = node["statistics"]["clients"]
nodelist.append(node_out)
return {"version": "1.0.1", "nodes": nodelist, "updated_at": now.isoformat()}

View file

@ -1,202 +0,0 @@
from collections import Counter, defaultdict
from datetime import datetime
from functools import reduce
def build_mac_table(nodes):
macs = dict()
for node_id, node in nodes.items():
try:
macs[node['network']['mac']] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh_interfaces']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']:
macs[mac] = node_id
except KeyError:
pass
return macs
def prune_nodes(nodes, now, days):
prune = []
for node_id, node in nodes.items():
if 'lastseen' not in node:
prune.append(node_id)
continue
lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S')
delta = (now - lastseen).days
if delta >= days:
prune.append(node_id)
for node_id in prune:
del nodes[node_id]
def mark_online(node, now):
node['lastseen'] = now.isoformat()
node.setdefault('firstseen', now.isoformat())
node['flags']['online'] = True
def overrideFields(dest, src, fields):
for field in fields:
if field in src:
dest[field] = src[field]
else:
dest.pop(field, None)
def import_nodeinfo(nodes, nodeinfos, now, assume_online=False, statics=False):
for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos):
node = nodes.setdefault(nodeinfo['node_id'], {'flags': {'online': False, 'gateway': False}})
if statics:
node['nodeinfo'] = node.setdefault('nodeinfo', {})
overrideFields(node['nodeinfo'], nodeinfo, ['hostname', 'location', 'node_id'])
else:
node['nodeinfo'] = nodeinfo
if assume_online:
mark_online(node, now)
def reset_statistics(nodes):
for node in nodes.values():
node['statistics'] = {'clients': 0}
def import_statistics(nodes, stats):
def add(node, statistics, target, source, f=lambda d: d):
try:
node['statistics'][target] = f(reduce(dict.__getitem__,
source,
statistics))
except (KeyError, TypeError, ZeroDivisionError):
pass
macs = build_mac_table(nodes)
stats = filter(lambda d: 'node_id' in d, stats)
stats = filter(lambda d: d['node_id'] in nodes, stats)
for node, stats in map(lambda d: (nodes[d['node_id']], d), stats):
add(node, stats, 'clients', ['clients', 'total'])
add(node, stats, 'gateway', ['gateway'], lambda d: macs.get(d, d))
add(node, stats, 'uptime', ['uptime'])
add(node, stats, 'loadavg', ['loadavg'])
add(node, stats, 'memory_usage', ['memory'],
lambda d: 1 - d['free'] / d['total'])
add(node, stats, 'rootfs_usage', ['rootfs_usage'])
add(node, stats, 'traffic', ['traffic'])
def import_mesh_ifs_vis_data(nodes, vis_data):
macs = build_mac_table(nodes)
mesh_ifs = defaultdict(lambda: set())
for line in filter(lambda d: 'secondary' in d, vis_data):
primary = line['of']
mesh_ifs[primary].add(primary)
mesh_ifs[primary].add(line['secondary'])
def if_to_node(ifs):
a = filter(lambda d: d in macs, ifs)
a = map(lambda d: nodes[macs[d]], a)
try:
return next(a), ifs
except StopIteration:
return None
mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values()))
for v in mesh_nodes:
node = v[0]
ifs = set()
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh_interfaces']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']))
except KeyError:
pass
node['nodeinfo']['network']['mesh_interfaces'] = list(ifs | v[1])
def import_vis_clientcount(nodes, vis_data):
macs = build_mac_table(nodes)
data = filter(lambda d: d.get('label', None) == 'TT', vis_data)
data = filter(lambda d: d['router'] in macs, data)
data = map(lambda d: macs[d['router']], data)
for node_id, clientcount in Counter(data).items():
nodes[node_id]['statistics'].setdefault('clients', clientcount)
def mark_gateways(nodes, gateways):
macs = build_mac_table(nodes)
gateways = filter(lambda d: d in macs, gateways)
for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True
def mark_vis_data_online(nodes, vis_data, now):
macs = build_mac_table(nodes)
online = set()
for line in vis_data:
if 'primary' in line:
online.add(line['primary'])
elif 'secondary' in line:
online.add(line['secondary'])
elif 'gateway' in line:
# This matches clients' MACs.
# On pre-Gluon nodes the primary MAC will be one of it.
online.add(line['gateway'])
for mac in filter(lambda d: d in macs, online):
mark_online(nodes[macs[mac]], now)

View file

@ -1,54 +0,0 @@
#!/usr/bin/env python3
import time
import os
from lib.GlobalRRD import GlobalRRD
from lib.NodeRRD import NodeRRD
class RRD(object):
def __init__(self,
database_directory,
image_path,
display_time_global="7d",
display_time_node="1d"):
self.dbPath = database_directory
self.globalDb = GlobalRRD(self.dbPath)
self.imagePath = image_path
self.displayTimeGlobal = display_time_global
self.displayTimeNode = display_time_node
self.currentTimeInt = (int(time.time()) / 60) * 60
self.currentTime = str(self.currentTimeInt)
try:
os.stat(self.imagePath)
except OSError:
os.mkdir(self.imagePath)
def update_database(self, nodes):
online_nodes = dict(filter(
lambda d: d[1]['flags']['online'], nodes.items()))
client_count = sum(map(
lambda d: d['statistics']['clients'], online_nodes.values()))
self.globalDb.update(len(online_nodes), client_count)
for node_id, node in online_nodes.items():
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node)
rrd.update()
def update_images(self):
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"),
self.displayTimeGlobal)
nodedb_files = os.listdir(self.dbPath)
for file_name in nodedb_files:
if not os.path.isfile(os.path.join(self.dbPath, file_name)):
continue
node_name = os.path.basename(file_name).split('.')
if node_name[1] == 'rrd' and not node_name[0] == "nodes":
rrd = NodeRRD(os.path.join(self.dbPath, file_name))
rrd.graph(self.imagePath, self.displayTimeNode)

View file

@ -1,19 +0,0 @@
import json
def validate_nodeinfos(nodeinfos):
result = []
for nodeinfo in nodeinfos:
if validate_nodeinfo(nodeinfo):
result.append(nodeinfo)
return result
def validate_nodeinfo(nodeinfo):
if 'location' in nodeinfo:
if 'latitude' not in nodeinfo['location'] or 'longitude' not in nodeinfo['location']:
return False
return True

15
link.py Normal file
View file

@ -0,0 +1,15 @@
class Link():
def __init__(self):
self.id = None
self.source = None
self.target = None
self.quality = None
self.type = None
class LinkConnector():
def __init__(self):
self.id = None
self.interface = None
def __repr__(self):
return "LinkConnector(%d, %s)" % (self.id, self.interface)

View file

@ -1,7 +1,15 @@
#!/bin/bash
FFMAPPATH='/opt/ffmap-backend/'
PEERS="/etc/fastd/ffhh-mesh-vpn/peers"
python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json
#python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 --vpn 00:16:3e:fb:9d:8d --vpn 00:16:3e:fb:9d:9d
python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 de:ad:be:ff:05:05 de:ad:be:ff:05:06 de:ad:be:ff:03:03 de:ad:be:ff:22:22 de:ad:be:ff:22:23 de:ad:be:ff:88:88 de:ad:be:ff:88:89 de:ad:bf:ff:88:88 de:ad:bf:ff:22:22 de:ad:bf:ff:03:03 de:ad:bf:ff:05:05 de:ad:bf:ff:01:01 de:ad:be:fc:03:03 00:16:3e:53:75:0d de:ad:be:fc:05:05 de:ad:be:fc:01:01 de:ad:be:ef:03:03 de:ad:be:ef:01:01 de:ad:be:ef:05:05 00:16:3e:fb:9d:8d 00:16:3e:fb:9d:9d
set -e
DEST=$1
[ "$DEST" ] || exit 1
cd "$(dirname "$0")"/
./ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > aliases_hl.json
./ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > aliases_moelln.json
./bat2nodes.py -A -a aliases.json -a aliases_hl.json -a aliases_moelln.json -d $DEST

30
node.py Normal file
View file

@ -0,0 +1,30 @@
class Node():
def __init__(self):
self.name = ""
self.id = ""
self.macs = set()
self.interfaces = dict()
self.flags = dict({
"online": False,
"gateway": False,
"client": False
})
self.gps = None
self.firmware = None
def add_mac(self, mac):
mac = mac.lower()
if len(self.macs) == 0:
self.id = mac
self.macs.add(mac)
self.interfaces[mac] = Interface()
def __repr__(self):
return self.macs.__repr__()
class Interface():
def __init__(self):
self.vpn = False

View file

@ -1,32 +0,0 @@
#!/usr/bin/env python
#Bibliotheken importieren
import time
import datetime
import json
import urllib2
#Datei oeffnen
Datei = urllib2.urlopen('https://map.hamburg.freifunk.net/nodes.json')
Datei_Sued = urllib2.urlopen('https://map.hamburg.freifunk.net/hhsued/mv1/nodes.json')
Text = Datei.read()
Knotenzahl = Text.count('"online": true')
Text = Datei_Sued.read()
Knotenzahl = Knotenzahl + Text.count('"online":true')
#Zeit holen
thetime = datetime.datetime.now().isoformat()
ffhh = None
#Freifunk API-Datei einladen und JSON lesen
with open('/var/www/meta/ffhh.json', 'r') as fp:
ffhh = json.load(fp)
#Attribute Zeitstempel und Knotenanzahl setzen
ffhh['state']['lastchange'] = thetime
ffhh['state']['nodes'] = Knotenzahl
#Freifunk API-Datein mit geaenderten werten schreiben
with open('/var/www/meta/ffhh.json', 'w') as fp:
json.dump(ffhh, fp, indent=2, separators=(',', ': '))

374
nodedb.py Normal file
View file

@ -0,0 +1,374 @@
import json
from functools import reduce
from collections import defaultdict
from node import Node, Interface
from link import Link, LinkConnector
class NodeDB:
def __init__(self):
self._nodes = []
self._links = []
# fetch list of links
def get_links(self):
self.update_vpn_links()
return self.reduce_links()
# fetch list of nodes
def get_nodes(self):
return self._nodes
def maybe_node_by_fuzzy_mac(self, mac):
mac_a = mac.lower()
for node in self._nodes:
for mac_b in node.macs:
if is_derived_mac(mac_a, mac_b):
return node
raise KeyError
def maybe_node_by_mac(self, macs):
for node in self._nodes:
for mac in macs:
if mac.lower() in node.macs:
return node
raise KeyError
def maybe_node_by_id(self, mac):
for node in self._nodes:
if mac.lower() == node.id:
return node
raise KeyError
def parse_vis_data(self,vis_data):
for x in vis_data:
if 'of' in x:
try:
node = self.maybe_node_by_mac((x['of'], x['secondary']))
except:
node = Node()
node.flags['online'] = True
if 'legacy' in x:
node.flags['legacy'] = True
self._nodes.append(node)
node.add_mac(x['of'])
node.add_mac(x['secondary'])
for x in vis_data:
if 'router' in x:
try:
node = self.maybe_node_by_mac((x['router'], ))
except:
node = Node()
node.flags['online'] = True
if 'legacy' in x:
node.flags['legacy'] = True
node.add_mac(x['router'])
self._nodes.append(node)
# If it's a TT link and the MAC is very similar
# consider this MAC as one of the routers
# MACs
if 'gateway' in x and x['label'] == "TT":
if is_similar(x['router'], x['gateway']):
node.add_mac(x['gateway'])
# skip processing as regular link
continue
try:
if 'neighbor' in x:
try:
node = self.maybe_node_by_mac((x['neighbor']))
except:
continue
if 'gateway' in x:
x['neighbor'] = x['gateway']
node = self.maybe_node_by_mac((x['neighbor'], ))
except:
node = Node()
node.flags['online'] = True
if x['label'] == 'TT':
node.flags['client'] = True
node.add_mac(x['neighbor'])
self._nodes.append(node)
for x in vis_data:
if 'router' in x:
try:
if 'gateway' in x:
x['neighbor'] = x['gateway']
router = self.maybe_node_by_mac((x['router'], ))
neighbor = self.maybe_node_by_mac((x['neighbor'], ))
except:
continue
# filter TT links merged in previous step
if router == neighbor:
continue
link = Link()
link.source = LinkConnector()
link.source.interface = x['router']
link.source.id = self._nodes.index(router)
link.target = LinkConnector()
link.target.interface = x['neighbor']
link.target.id = self._nodes.index(neighbor)
link.quality = x['label']
link.id = "-".join(sorted((link.source.interface, link.target.interface)))
if x['label'] == "TT":
link.type = "client"
self._links.append(link)
for x in vis_data:
if 'primary' in x:
try:
node = self.maybe_node_by_mac((x['primary'], ))
except:
continue
node.id = x['primary']
def reduce_links(self):
tmp_links = defaultdict(list)
for link in self._links:
tmp_links[link.id].append(link)
links = []
def reduce_link(a, b):
a.id = b.id
a.source = b.source
a.target = b.target
a.type = b.type
a.quality = ", ".join([x for x in (a.quality, b.quality) if x])
return a
for k, v in tmp_links.items():
new_link = reduce(reduce_link, v, Link())
links.append(new_link)
return links
def import_aliases(self, aliases):
for mac, alias in aliases.items():
try:
node = self.maybe_node_by_fuzzy_mac(mac)
except:
# create an offline node
node = Node()
node.add_mac(mac)
self._nodes.append(node)
if 'name' in alias:
node.name = alias['name']
if 'vpn' in alias and alias['vpn']:
node.interfaces[mac].vpn = True
if 'gps' in alias:
node.gps = alias['gps']
if 'firmware' in alias:
node.firmware = alias['firmware']
if 'firmware-base' in alias:
node.firmware-base = alias['firmware-base']
if 'hardware-model' in alias:
node.hardware-model = alias['hardware-model']
if 'selected-gateway' in alias:
node.selected-gateway = alias['selected-gateway']
if 'autoupdater-branch' in alias:
node.autoupdater-branch = alias['autoupdater-branch']
if 'autoupdater-enabled' in alias:
node.autoupdater-branch = alias['autoupdater-enabled']
# list of macs
# if options['gateway']:
# mark_gateways(options['gateway'])
def mark_gateways(self, gateways):
for gateway in gateways:
try:
node = self.maybe_node_by_mac((gateway, ))
except:
continue
node.flags['gateway'] = True
def update_vpn_links(self):
changes = 1
while changes > 0:
changes = 0
for link in self._links:
if link.type == "client":
continue
source_interface = self._nodes[link.source.id].interfaces[link.source.interface]
target_interface = self._nodes[link.target.id].interfaces[link.target.interface]
if source_interface.vpn or target_interface.vpn:
source_interface.vpn = True
target_interface.vpn = True
if link.type != "vpn":
changes += 1
link.type = "vpn"
def obscure_clients(self):
globalIdCounter = 0
nodeCounters = {}
clientIds = {}
for node in self._nodes:
if node.flags['client']:
node.macs = set()
clientIds[node.id] = None
for link in self._links:
ids = link.source.interface
idt = link.target.interface
try:
node_source = self.maybe_node_by_fuzzy_mac(ids)
node_target = self.maybe_node_by_id(idt)
if not node_source.flags['client'] and not node_target.flags['client']:
# if none of the nodes associated with this link are clients,
# we do not want to obscure
continue
if ids in clientIds and idt in clientIds:
# This is for corner cases, when a client
# is linked to another client.
clientIds[ids] = str(globalIdCounter)
ids = str(globalIdCounter)
globalIdCounter += 1
clientIds[idt] = str(globalIdCounter)
idt = str(globalIdCounter)
globalIdCounter += 1
elif ids in clientIds:
newId = generateId(idt)
clientIds[ids] = newId
ids = newId
link.source.interface = ids;
node_source.id = ids;
elif idt in clientIds:
newId = generateId(ids,nodeCounters)
clientIds[idt] = newId
idt = newId
link.target.interface = idt;
node_target.id = idt;
link.id = ids + "-" + idt
except KeyError:
pass
# extends node id by incremented node counter
def generateId(nodeId,nodeCounters):
if nodeId in nodeCounters:
n = nodeCounters[nodeId]
nodeCounters[nodeId] = n + 1
else:
nodeCounters[nodeId] = 1
n = 0
return nodeId + "_" + str(n)
# compares two MACs and decides whether they are
# similar and could be from the same node
def is_similar(a, b):
if a == b:
return True
try:
mac_a = list(int(i, 16) for i in a.split(":"))
mac_b = list(int(i, 16) for i in b.split(":"))
except ValueError:
return False
# first byte must only differ in bit 2
if mac_a[0] | 2 == mac_b[0] | 2:
# count different bytes
c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]]
else:
return False
# no more than two additional bytes must differ
if len(c) <= 2:
delta = 0
if len(c) > 0:
delta = sum(abs(i[0] -i[1]) for i in c)
# These addresses look pretty similar!
return delta < 8
def is_derived_mac(a, b):
if a == b:
return True
try:
mac_a = list(int(i, 16) for i in a.split(":"))
mac_b = list(int(i, 16) for i in b.split(":"))
except ValueError:
return False
if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]:
return False
x = list(mac_a)
x[5] += 1
x[5] %= 255
if mac_b == x:
return True
x[0] |= 2
if mac_b == x:
return True
x[3] += 1
x[3] %= 255
if mac_b == x:
return True
x = list(mac_a)
x[0] |= 2
x[5] += 2
x[5] %= 255
if mac_b == x:
return True
x = list(mac_a)
x[0] |= 2
x[3] += 1
x[3] %= 255
if mac_b == x:
return True
return False

153
rrd.py Executable file
View file

@ -0,0 +1,153 @@
#!/usr/bin/env python3
import subprocess
import time
import os
class rrd:
def __init__( self
, databaseDirectory
, imagePath
, displayTimeGlobal = "7d"
, displayTimeNode = "1d"
):
self.dbPath = databaseDirectory
self.globalDbFile = databaseDirectory + "/nodes.rrd"
self.imagePath = imagePath
self.displayTimeGlobal = displayTimeGlobal
self.displayTimeNode = displayTimeNode
self.currentTimeInt = (int(time.time())/60)*60
self.currentTime = str(self.currentTimeInt)
try:
os.stat(self.imagePath)
except:
os.mkdir(self.imagePath)
def checkAndCreateIfNeededGlobalDatabase(self):
""" Creates the global database file iff it did not exist.
"""
if not os.path.exists(self.globalDbFile):
# Create Database with rrdtool
args = ["rrdtool",'create', self.globalDbFile
,'--start', str(round(self.currentTimeInt - 60))
,'--step' , '60'
# Number of nodes available
,'DS:nodes:GAUGE:120:0:U'
,'RRA:LAST:0:1:44640'
,'RRA:LAST:0:60:744'
,'RRA:LAST:0:1440:1780'
# Number of client available
,'DS:clients:GAUGE:120:0:U'
,'RRA:LAST:0:1:44640'
,'RRA:LAST:0:60:744'
,'RRA:LAST:0:1440:1780'
]
subprocess.call(args)
def updateGlobalDatabase(self,nodeCount,clientCount):
""" Adds a new (#Nodes,#Clients) entry to the global database.
"""
# Update Global RRDatabase
args = ["rrdtool",'updatev', self.globalDbFile
# #Nodes #Clients
, self.currentTime + ":"+str(nodeCount)+":"+str(clientCount)
]
subprocess.check_output(args)
def createGlobalGraph(self):
nodeGraph = self.imagePath + "/" + "globalGraph.png"
args = ["rrdtool", 'graph', nodeGraph, '-s', '-' + self.displayTimeGlobal, '-w', '800', '-h' '400'
,'DEF:nodes=' + self.globalDbFile + ':nodes:LAST', 'LINE1:nodes#F00:nodes\\l'
,'DEF:clients=' + self.globalDbFile + ':clients:LAST','LINE2:clients#00F:clients'
]
subprocess.check_output(args)
def nodeMACToRRDFile(self,nodeMAC):
return self.dbPath + "/" + str(nodeMAC).replace(":","") + ".rrd"
def nodeMACToPNGFile(self,nodeMAC):
return self.imagePath + "/" + str(nodeMAC).replace(":","") + ".png"
def checkAndCreateIfNeededNodeDatabase(self,nodePrimaryMAC):
# TODO check for bad nodeNames
nodeFile = self.nodeMACToRRDFile(nodePrimaryMAC);
if not os.path.exists(nodeFile):
# TODO Skalen anpassen
args = ["rrdtool",'create',nodeFile
,'--start',str(round(self.currentTimeInt - 60))
,'--step' , '60'
,'DS:upstate:GAUGE:120:0:1'
,'RRA:LAST:0:1:44640'
# Number of client available
,'DS:clients:GAUGE:120:0:U'
,'RRA:LAST:0:1:44640'
]
subprocess.check_output(args)
# Call only if node is up
def updateNodeDatabase(self,nodePrimaryMAC,clientCount):
nodeFile = self.nodeMACToRRDFile(nodePrimaryMAC)
# Update Global RRDatabase
args = ["rrdtool",'updatev', nodeFile
# #Upstate #Clients
, self.currentTime + ":"+str(1)+":"+str(clientCount)
]
subprocess.check_output(args)
def createNodeGraph(self,nodePrimaryMAC,displayTimeNode):
nodeGraph = self.nodeMACToPNGFile(nodePrimaryMAC)
nodeFile = self.nodeMACToRRDFile(nodePrimaryMAC)
args = ['rrdtool','graph', nodeGraph, '-s', '-' + self.displayTimeNode , '-w', '800', '-h', '400', '-l', '0', '-y', '1:1',
'DEF:clients=' + nodeFile + ':clients:LAST',
'VDEF:maxc=clients,MAXIMUM',
'CDEF:c=0,clients,ADDNAN',
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
'AREA:c#0F0:up\\l',
'AREA:d#F00:down\\l',
'LINE1:c#00F:clients connected\\l',
]
subprocess.check_output(args)
def update_database(self,db):
nodes = {}
clientCount = 0
for node in db.get_nodes():
if node.flags['online']:
if not node.flags['client']:
nodes[node.id] = node
node.clients = 0;
if 'legacy' in node.flags and node.flags['legacy']:
clientCount -= 1
else:
clientCount += 1
for link in db.get_links():
source = link.source.interface
target = link.target.interface
if source in nodes and not target in nodes:
nodes[source].clients += 1
elif target in nodes and not source in nodes:
nodes[target].clients += 1
self.checkAndCreateIfNeededGlobalDatabase()
self.updateGlobalDatabase(len(nodes),clientCount)
for mac in nodes:
self.checkAndCreateIfNeededNodeDatabase(mac)
self.updateNodeDatabase(mac,nodes[mac].clients)
def update_images(self):
""" Creates a image for every rrd file in the database directory.
"""
self.createGlobalGraph()
nodeDbFiles = os.listdir(self.dbPath)
for fileName in nodeDbFiles:
if not os.path.isfile(os.path.join(self.dbPath, fileName)):
continue
nodeName = os.path.basename(fileName).split('.')
if nodeName[1] == 'rrd' and not nodeName[0] == "nodes":
self.createNodeGraph(nodeName[0],self.displayTimeNode)