Compare commits
15 commits
Author | SHA1 | Date | |
---|---|---|---|
cd1329963a | |||
322860be7e | |||
66112061d6 | |||
f08aaaff4e | |||
6d452fc149 | |||
5fba69de7a | |||
446bc98403 | |||
e54e7467fc | |||
f5e3705eec | |||
54402ce089 | |||
ee51547664 | |||
89e4c63700 | |||
7075d8481c | |||
43e70191f1 | |||
6fc1423124 |
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,3 +1 @@
|
||||||
*.pyc
|
*.pyc
|
||||||
aliases.json
|
|
||||||
nodedb/
|
|
||||||
|
|
35
GlobalRRD.py
35
GlobalRRD.py
|
@ -1,35 +0,0 @@
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
from RRD import RRD, DS, RRA
|
|
||||||
|
|
||||||
class GlobalRRD(RRD):
|
|
||||||
ds_list = [
|
|
||||||
# Number of nodes available
|
|
||||||
DS('nodes', 'GAUGE', 120, 0, float('NaN')),
|
|
||||||
# Number of client available
|
|
||||||
DS('clients', 'GAUGE', 120, 0, float('NaN')),
|
|
||||||
]
|
|
||||||
rra_list = [
|
|
||||||
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples
|
|
||||||
RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples
|
|
||||||
RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, directory):
|
|
||||||
super().__init__(os.path.join(directory, "nodes.rrd"))
|
|
||||||
self.ensureSanity(self.ds_list, self.rra_list, step=60)
|
|
||||||
|
|
||||||
def update(self, nodeCount, clientCount):
|
|
||||||
super().update({'nodes': nodeCount, 'clients': clientCount})
|
|
||||||
|
|
||||||
def graph(self, filename, timeframe):
|
|
||||||
args = ["rrdtool", 'graph', filename,
|
|
||||||
'-s', '-' + timeframe,
|
|
||||||
'-w', '800',
|
|
||||||
'-h' '400',
|
|
||||||
'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
|
|
||||||
'LINE1:nodes#F00:nodes\\l',
|
|
||||||
'DEF:clients=' + self.filename + ':clients:AVERAGE',
|
|
||||||
'LINE2:clients#00F:clients',
|
|
||||||
]
|
|
||||||
subprocess.check_output(args)
|
|
54
NodeRRD.py
54
NodeRRD.py
|
@ -1,54 +0,0 @@
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
from node import Node
|
|
||||||
from RRD import RRD, DS, RRA
|
|
||||||
|
|
||||||
class NodeRRD(RRD):
|
|
||||||
ds_list = [
|
|
||||||
DS('upstate', 'GAUGE', 120, 0, 1),
|
|
||||||
DS('clients', 'GAUGE', 120, 0, float('NaN')),
|
|
||||||
]
|
|
||||||
rra_list = [
|
|
||||||
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples
|
|
||||||
RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples
|
|
||||||
RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples
|
|
||||||
RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, filename, node = None):
|
|
||||||
"""
|
|
||||||
Create a new RRD for a given node.
|
|
||||||
|
|
||||||
If the RRD isn't supposed to be updated, the node can be omitted.
|
|
||||||
"""
|
|
||||||
self.node = node
|
|
||||||
super().__init__(filename)
|
|
||||||
self.ensureSanity(self.ds_list, self.rra_list, step=60)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def imagename(self):
|
|
||||||
return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png"
|
|
||||||
|
|
||||||
def update(self):
|
|
||||||
super().update({'upstate': 1, 'clients': self.node.clients})
|
|
||||||
|
|
||||||
def graph(self, directory, timeframe):
|
|
||||||
"""
|
|
||||||
Create a graph in the given directory. The file will be named
|
|
||||||
basename.png if the RRD file is named basename.rrd
|
|
||||||
"""
|
|
||||||
args = ['rrdtool','graph', os.path.join(directory, self.imagename),
|
|
||||||
'-s', '-' + timeframe ,
|
|
||||||
'-w', '800',
|
|
||||||
'-h', '400',
|
|
||||||
'-l', '0',
|
|
||||||
'-y', '1:1',
|
|
||||||
'DEF:clients=' + self.filename + ':clients:AVERAGE',
|
|
||||||
'VDEF:maxc=clients,MAXIMUM',
|
|
||||||
'CDEF:c=0,clients,ADDNAN',
|
|
||||||
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
|
|
||||||
'AREA:c#0F0:up\\l',
|
|
||||||
'AREA:d#F00:down\\l',
|
|
||||||
'LINE1:c#00F:clients connected\\l',
|
|
||||||
]
|
|
||||||
subprocess.check_output(args)
|
|
|
@ -42,12 +42,12 @@ Alias /firmware /home/freifunk/autoupdates/
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
To execute, run
|
To execute, run
|
||||||
./mkmap.sh ../www
|
python3 -mffmap.run --input-alfred --input-badadv --output-d3json ../www/nodes.json
|
||||||
The script expects above described sudo-wrappers in the $HOME directory of the user executing
|
The script expects above described sudo-wrappers in the $HOME directory of the user executing
|
||||||
the script. If those are not available, an error will occurr if not executed as root. Also,
|
the script. If those are not available, an error will occurr if not executed as root. Also,
|
||||||
the tool realpath optionally allows to execute the script from anywhere in the directory tree.
|
the tool realpath optionally allows to execute the script from anywhere in the directory tree.
|
||||||
|
|
||||||
For the script's regular execution add the following to the crontab:
|
For the script's regular execution add the following to the crontab:
|
||||||
<pre>
|
<pre>
|
||||||
*/5 * * * * /home/ffmap/ffmap-backend/mkmap.sh /home/ffmap/www
|
*/5 * * * * python3 -mffmap.run --input-alfred --input-badadv --output-d3json /home/ffmap/www/nodes.json
|
||||||
</pre>
|
</pre>
|
||||||
|
|
42
alfred.py
42
alfred.py
|
@ -1,42 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import subprocess
|
|
||||||
import json
|
|
||||||
|
|
||||||
class alfred:
|
|
||||||
def __init__(self,request_data_type = 158):
|
|
||||||
self.request_data_type = request_data_type
|
|
||||||
|
|
||||||
def aliases(self):
|
|
||||||
output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json"])
|
|
||||||
alfred_data = json.loads(output.decode("utf-8"))
|
|
||||||
alias = {}
|
|
||||||
for mac,node in alfred_data.items():
|
|
||||||
node_alias = {}
|
|
||||||
if 'location' in node:
|
|
||||||
try:
|
|
||||||
node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude'])
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
node_alias['firmware'] = node['software']['firmware']['release']
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
node_alias['id'] = node['network']['mac']
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if 'hostname' in node:
|
|
||||||
node_alias['name'] = node['hostname']
|
|
||||||
elif 'name' in node:
|
|
||||||
node_alias['name'] = node['name']
|
|
||||||
if len(node_alias):
|
|
||||||
alias[mac] = node_alias
|
|
||||||
return alias
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
ad = alfred()
|
|
||||||
al = ad.aliases()
|
|
||||||
print(al)
|
|
|
@ -1,9 +0,0 @@
|
||||||
{
|
|
||||||
"b0:48:7a:e7:d3:64" : {
|
|
||||||
"name" : "Meute-AP"
|
|
||||||
},
|
|
||||||
"8e:3d:c2:10:10:28" : {
|
|
||||||
"name" : "holstentor",
|
|
||||||
"vpn" : true
|
|
||||||
}
|
|
||||||
}
|
|
86
bat2nodes.py
86
bat2nodes.py
|
@ -1,86 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import json
|
|
||||||
import fileinput
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
|
|
||||||
from batman import batman
|
|
||||||
from alfred import alfred
|
|
||||||
from rrd import rrd
|
|
||||||
from nodedb import NodeDB
|
|
||||||
from d3mapbuilder import D3MapBuilder
|
|
||||||
|
|
||||||
# Force encoding to UTF-8
|
|
||||||
import locale # Ensures that subsequent open()s
|
|
||||||
locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
#sys.stdin = open('/dev/stdin', 'r')
|
|
||||||
#sys.stdout = open('/dev/stdout', 'w')
|
|
||||||
#sys.stderr = open('/dev/stderr', 'w')
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
|
|
||||||
parser.add_argument('-a', '--aliases',
|
|
||||||
help='read aliases from FILE',
|
|
||||||
action='append',
|
|
||||||
metavar='FILE')
|
|
||||||
|
|
||||||
parser.add_argument('-m', '--mesh', action='append',
|
|
||||||
help='batman mesh interface')
|
|
||||||
|
|
||||||
parser.add_argument('-o', '--obscure', action='store_true',
|
|
||||||
help='obscure client macs')
|
|
||||||
|
|
||||||
parser.add_argument('-A', '--alfred', action='store_true',
|
|
||||||
help='retrieve aliases from alfred')
|
|
||||||
|
|
||||||
parser.add_argument('-d', '--destination-directory', action='store',
|
|
||||||
help='destination directory for generated files',required=True)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
options = vars(args)
|
|
||||||
|
|
||||||
db = NodeDB()
|
|
||||||
if options['mesh']:
|
|
||||||
for mesh_interface in options['mesh']:
|
|
||||||
bm = batman(mesh_interface)
|
|
||||||
db.parse_vis_data(bm.vis_data(options['alfred']))
|
|
||||||
for gw in bm.gateway_list():
|
|
||||||
db.mark_gateways(gw['mac'])
|
|
||||||
else:
|
|
||||||
bm = batman()
|
|
||||||
db.parse_vis_data(bm.vis_data(options['alfred']))
|
|
||||||
for gw in bm.gateway_list():
|
|
||||||
db.mark_gateways([gw['mac']])
|
|
||||||
|
|
||||||
if options['aliases']:
|
|
||||||
for aliases in options['aliases']:
|
|
||||||
db.import_aliases(json.load(open(aliases)))
|
|
||||||
|
|
||||||
if options['alfred']:
|
|
||||||
af = alfred()
|
|
||||||
db.import_aliases(af.aliases())
|
|
||||||
|
|
||||||
db.count_clients()
|
|
||||||
|
|
||||||
if options['obscure']:
|
|
||||||
db.obscure_clients()
|
|
||||||
|
|
||||||
scriptdir = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
m = D3MapBuilder(db)
|
|
||||||
|
|
||||||
#Write nodes json
|
|
||||||
nodes_json = open(options['destination_directory'] + '/nodes.json.new','w')
|
|
||||||
nodes_json.write(m.build())
|
|
||||||
nodes_json.close()
|
|
||||||
|
|
||||||
#Move to destination
|
|
||||||
os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json')
|
|
||||||
|
|
||||||
rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes")
|
|
||||||
rrd.update_database(db)
|
|
||||||
rrd.update_images()
|
|
86
batman.py
86
batman.py
|
@ -1,86 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import subprocess
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
|
|
||||||
class batman:
|
|
||||||
""" Bindings for B.A.T.M.A.N. advanced batctl tool
|
|
||||||
"""
|
|
||||||
def __init__(self, mesh_interface = "bat0"):
|
|
||||||
self.mesh_interface = mesh_interface
|
|
||||||
|
|
||||||
def vis_data(self,batadv_vis=False):
|
|
||||||
vds = self.vis_data_batctl_legacy()
|
|
||||||
if batadv_vis:
|
|
||||||
vds += self.vis_data_batadv_vis()
|
|
||||||
return vds
|
|
||||||
|
|
||||||
def vis_data_helper(self,lines):
|
|
||||||
vd = []
|
|
||||||
for line in lines:
|
|
||||||
try:
|
|
||||||
utf8_line = line.decode("utf-8")
|
|
||||||
vd.append(json.loads(utf8_line))
|
|
||||||
except e:
|
|
||||||
pass
|
|
||||||
return vd
|
|
||||||
|
|
||||||
def vis_data_batctl_legacy(self):
|
|
||||||
""" Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
|
|
||||||
"""
|
|
||||||
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"])
|
|
||||||
lines = output.splitlines()
|
|
||||||
vds = self.vis_data_helper(lines)
|
|
||||||
for vd in vds:
|
|
||||||
vd['legacy'] = True
|
|
||||||
return vds
|
|
||||||
|
|
||||||
def vis_data_batadv_vis(self):
|
|
||||||
""" Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
|
|
||||||
"""
|
|
||||||
output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"])
|
|
||||||
lines = output.splitlines()
|
|
||||||
return self.vis_data_helper(lines)
|
|
||||||
|
|
||||||
def gateway_list(self):
|
|
||||||
""" Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries.
|
|
||||||
"""
|
|
||||||
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"])
|
|
||||||
output_utf8 = output.decode("utf-8")
|
|
||||||
# TODO Parse information
|
|
||||||
lines = output_utf8.splitlines()
|
|
||||||
own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",lines[0]).group(1)
|
|
||||||
# Remove header line
|
|
||||||
del lines[0]
|
|
||||||
# Fill gateway list
|
|
||||||
gw = []
|
|
||||||
gw_mode = self.gateway_mode()
|
|
||||||
if gw_mode['mode'] == 'server':
|
|
||||||
gw.append({'mac': own_mac, 'bandwidth': gw_mode['bandwidth']})
|
|
||||||
for line in lines:
|
|
||||||
gw_line = line.split()
|
|
||||||
if (gw_line[0] == 'No'):
|
|
||||||
continue
|
|
||||||
# When in client gateway mode maybe gw_line[0] is not the right.
|
|
||||||
gw.append({'mac':gw_line[0], 'bandwidth': gw_line[-1]})
|
|
||||||
return gw
|
|
||||||
|
|
||||||
def gateway_mode(self):
|
|
||||||
""" Parse "batctl -m <mesh_interface> gw"
|
|
||||||
"""
|
|
||||||
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"])
|
|
||||||
elements = output.decode("utf-8").split()
|
|
||||||
mode = elements[0]
|
|
||||||
if mode == "server":
|
|
||||||
return {'mode': 'server', 'bandwidth': elements[3]}
|
|
||||||
else:
|
|
||||||
return {'mode': mode}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
bc = batman()
|
|
||||||
vd = bc.vis_data()
|
|
||||||
gw = bc.gateway_list()
|
|
||||||
for x in vd:
|
|
||||||
print(x)
|
|
||||||
print(gw)
|
|
||||||
print(bc.gateway_mode())
|
|
|
@ -1,36 +0,0 @@
|
||||||
import json
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
class D3MapBuilder:
|
|
||||||
def __init__(self, db):
|
|
||||||
self._db = db
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
output = dict()
|
|
||||||
|
|
||||||
now = datetime.datetime.utcnow().replace(microsecond=0)
|
|
||||||
|
|
||||||
nodes = self._db.get_nodes()
|
|
||||||
|
|
||||||
output['nodes'] = [{'name': x.name, 'id': x.id,
|
|
||||||
'macs': ', '.join(x.macs),
|
|
||||||
'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None,
|
|
||||||
'firmware': x.firmware,
|
|
||||||
'flags': x.flags,
|
|
||||||
'clientcount': x.clientcount
|
|
||||||
} for x in nodes]
|
|
||||||
|
|
||||||
links = self._db.get_links()
|
|
||||||
|
|
||||||
output['links'] = [{'source': x.source.id, 'target': x.target.id,
|
|
||||||
'quality': x.quality,
|
|
||||||
'type': x.type,
|
|
||||||
'id': x.id
|
|
||||||
} for x in links]
|
|
||||||
|
|
||||||
output['meta'] = {
|
|
||||||
'timestamp': now.isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return json.dumps(output)
|
|
||||||
|
|
93
ffhlwiki.py
93
ffhlwiki.py
|
@ -1,93 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import json
|
|
||||||
import argparse
|
|
||||||
from itertools import zip_longest
|
|
||||||
from urllib.request import urlopen
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
def import_wikigps(url):
|
|
||||||
def fetch_wikitable(url):
|
|
||||||
f = urlopen(url)
|
|
||||||
|
|
||||||
soup = BeautifulSoup(f)
|
|
||||||
|
|
||||||
table = soup.find_all("table")[0]
|
|
||||||
|
|
||||||
rows = table.find_all("tr")
|
|
||||||
|
|
||||||
headers = []
|
|
||||||
|
|
||||||
data = []
|
|
||||||
|
|
||||||
def maybe_strip(x):
|
|
||||||
if isinstance(x.string, str):
|
|
||||||
return x.string.strip()
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
for row in rows:
|
|
||||||
tds = list([maybe_strip(x) for x in row.find_all("td")])
|
|
||||||
ths = list([maybe_strip(x) for x in row.find_all("th")])
|
|
||||||
|
|
||||||
if any(tds):
|
|
||||||
data.append(tds)
|
|
||||||
|
|
||||||
if any(ths):
|
|
||||||
headers = ths
|
|
||||||
|
|
||||||
nodes = []
|
|
||||||
|
|
||||||
for d in data:
|
|
||||||
nodes.append(dict(zip(headers, d)))
|
|
||||||
|
|
||||||
return nodes
|
|
||||||
|
|
||||||
nodes = fetch_wikitable(url)
|
|
||||||
|
|
||||||
aliases = {}
|
|
||||||
|
|
||||||
for node in nodes:
|
|
||||||
try:
|
|
||||||
node['MAC'] = node['MAC'].split(',')
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
node['GPS'] = node['GPS'].split(',')
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
node['Knotenname'] = node['Knotenname'].split(',')
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
|
|
||||||
|
|
||||||
for data in nodes:
|
|
||||||
alias = {}
|
|
||||||
|
|
||||||
mac = data[0].strip()
|
|
||||||
|
|
||||||
if data[1]:
|
|
||||||
alias['gps'] = data[1].strip()
|
|
||||||
|
|
||||||
if data[2]:
|
|
||||||
alias['name'] = data[2].strip()
|
|
||||||
|
|
||||||
aliases[mac] = alias
|
|
||||||
|
|
||||||
return aliases
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
|
|
||||||
parser.add_argument('url', help='wiki URL')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
options = vars(args)
|
|
||||||
|
|
||||||
aliases = import_wikigps(options['url'])
|
|
||||||
|
|
||||||
print(json.dumps(aliases))
|
|
42
ffmap/__init__.py
Normal file
42
ffmap/__init__.py
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
from ffmap.nodedb import NodeDB
|
||||||
|
|
||||||
|
def run(inputs, outputs):
|
||||||
|
"""Fill the database with given inputs and give it to given outputs.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
inputs -- list of Input instances (with a compatible get_data(nodedb) method)
|
||||||
|
outputs -- list of Output instances (with a compatible output(nodedb) method)
|
||||||
|
"""
|
||||||
|
db = NodeDB()
|
||||||
|
for input_ in inputs:
|
||||||
|
input_.get_data(db)
|
||||||
|
|
||||||
|
for output in outputs:
|
||||||
|
output.output(db)
|
||||||
|
|
||||||
|
def run_names(inputs, outputs):
|
||||||
|
"""Fill the database with inputs and give it to outputs, each given
|
||||||
|
by names.
|
||||||
|
|
||||||
|
In contrast to run(inputs, outputs), this method expects only the
|
||||||
|
names of the modules to use, not instances thereof.
|
||||||
|
Arguments:
|
||||||
|
inputs -- list of dicts, each dict having the keys "name" with the
|
||||||
|
name of the input to use (directory name in inputs/), and
|
||||||
|
the key "options" with a dict of input-dependent options.
|
||||||
|
outputs -- list of dicts, see inputs.
|
||||||
|
"""
|
||||||
|
input_instances = []
|
||||||
|
output_instances = []
|
||||||
|
|
||||||
|
for input_ in inputs:
|
||||||
|
module = importlib.import_module(".inputs." + input_["name"], "ffmap")
|
||||||
|
input_instances.append(module.Input(**input_["options"]))
|
||||||
|
|
||||||
|
for output in outputs:
|
||||||
|
module = importlib.import_module(".outputs." + output["name"], "ffmap")
|
||||||
|
output_instances.append(module.Output(**output["options"]))
|
||||||
|
|
||||||
|
run(input_instances, output_instances)
|
29
ffmap/inputs/alfred.py
Normal file
29
ffmap/inputs/alfred.py
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
|
||||||
|
class Input:
|
||||||
|
def __init__(self,request_data_type = 158):
|
||||||
|
self.request_data_type = request_data_type
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _call_alfred(request_data_type):
|
||||||
|
return json.loads(subprocess.check_output([
|
||||||
|
"alfred-json",
|
||||||
|
"-z",
|
||||||
|
"-r", str(request_data_type),
|
||||||
|
"-f", "json",
|
||||||
|
]).decode("utf-8"))
|
||||||
|
|
||||||
|
def get_data(self, nodedb):
|
||||||
|
"""Add data from alfred to the supplied nodedb"""
|
||||||
|
nodeinfo = self._call_alfred(self.request_data_type)
|
||||||
|
statistics = self._call_alfred(self.request_data_type+1)
|
||||||
|
|
||||||
|
# merge statistics into nodeinfo to be compatible with earlier versions
|
||||||
|
for mac, node in statistics.items():
|
||||||
|
if mac in nodeinfo:
|
||||||
|
nodeinfo[mac]['statistics'] = statistics[mac]
|
||||||
|
|
||||||
|
for mac, node in nodeinfo.items():
|
||||||
|
aliases = [mac] + node.get('network', {}).get('mesh_interfaces', [])
|
||||||
|
nodedb.add_or_update(aliases, node)
|
100
ffmap/inputs/batadv.py
Normal file
100
ffmap/inputs/batadv.py
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
import subprocess
|
||||||
|
import json
|
||||||
|
|
||||||
|
class Input:
|
||||||
|
"""Fill the NodeDB with links from batadv-vis.
|
||||||
|
|
||||||
|
The links are added as lists containing the neighboring nodes, not
|
||||||
|
only their identifiers! Mind this when exporting the database, as
|
||||||
|
it probably leads to recursion.
|
||||||
|
"""
|
||||||
|
def __init__(self, mesh_interface="bat0"):
|
||||||
|
self.mesh_interface = mesh_interface
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _is_similar_mac(a, b):
|
||||||
|
"""Determine if two MAC addresses are similar."""
|
||||||
|
if a == b:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Split the address into bytes
|
||||||
|
try:
|
||||||
|
mac_a = list(int(i, 16) for i in a.split(":"))
|
||||||
|
mac_b = list(int(i, 16) for i in b.split(":"))
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Second and third byte musn't differ
|
||||||
|
if mac_a[1] != mac_b[1] or mac_a[2] != mac_b[2]:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# First byte must only differ in bits 2 and 3
|
||||||
|
if mac_a[0] | 6 != mac_b[0] | 6:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Count differing bytes after the third
|
||||||
|
c = [x for x in zip(mac_a[3:], mac_b[3:]) if x[0] != x[1]]
|
||||||
|
|
||||||
|
# No more than two additional bytes must differ
|
||||||
|
if len(c) > 2:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If no more bytes differ, they are very similar
|
||||||
|
if len(c) == 0:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If the sum of absolute differences isn't greater than 2, they
|
||||||
|
# are pretty similar
|
||||||
|
delta = sum(abs(i[0] - i[1]) for i in c)
|
||||||
|
return delta < 2
|
||||||
|
|
||||||
|
def get_data(self, nodedb):
|
||||||
|
"""Add data from batadv-vis to the supplied nodedb"""
|
||||||
|
output = subprocess.check_output([
|
||||||
|
"batadv-vis",
|
||||||
|
"-i", str(self.mesh_interface),
|
||||||
|
"-f", "jsondoc",
|
||||||
|
])
|
||||||
|
data = json.loads(output.decode("utf-8"))
|
||||||
|
|
||||||
|
# First pass
|
||||||
|
for node in data["vis"]:
|
||||||
|
# Determine possible other MAC addresses of this node by
|
||||||
|
# comparing all its client's MAC addresses to its primary
|
||||||
|
# MAC address. If they are similar, it probably is another
|
||||||
|
# address of the node itself! If it isn't, it is a real
|
||||||
|
# client.
|
||||||
|
node['aliases'] = [node["primary"]]
|
||||||
|
if 'secondary' in node:
|
||||||
|
node['aliases'].extend(node['secondary'])
|
||||||
|
real_clients = []
|
||||||
|
for mac in node["clients"]:
|
||||||
|
if self._is_similar_mac(mac, node["primary"]):
|
||||||
|
node['aliases'].append(mac)
|
||||||
|
else:
|
||||||
|
real_clients.append(mac)
|
||||||
|
node['clients'] = real_clients
|
||||||
|
|
||||||
|
# Add nodes and aliases without any information at first.
|
||||||
|
# This way, we can later link the objects themselves.
|
||||||
|
nodedb.add_or_update(node['aliases'])
|
||||||
|
|
||||||
|
# Second pass
|
||||||
|
for node in data["vis"]:
|
||||||
|
# We only need the primary address now, all aliases are
|
||||||
|
# already present in the database. Furthermore, we can be
|
||||||
|
# sure that all neighbors are in the database as well. If
|
||||||
|
# a neighbor isn't added already, we simply ignore it.
|
||||||
|
nodedb.add_or_update(
|
||||||
|
[node["primary"]],
|
||||||
|
{
|
||||||
|
"clients": node["clients"],
|
||||||
|
"neighbors": [
|
||||||
|
{
|
||||||
|
"metric": neighbor['metric'],
|
||||||
|
"neighbor": nodedb[neighbor['neighbor']],
|
||||||
|
} for neighbor in node["neighbors"]
|
||||||
|
if neighbor['neighbor'] in nodedb
|
||||||
|
]
|
||||||
|
}
|
||||||
|
)
|
71
ffmap/inputs/wiki.py
Executable file
71
ffmap/inputs/wiki.py
Executable file
|
@ -0,0 +1,71 @@
|
||||||
|
import json
|
||||||
|
import argparse
|
||||||
|
from itertools import zip_longest
|
||||||
|
from urllib.request import urlopen
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
|
class Input:
|
||||||
|
def __init__(self, url="http://luebeck.freifunk.net/wiki/Knoten"):
|
||||||
|
self.url = url
|
||||||
|
|
||||||
|
def fetch_wikitable(self):
|
||||||
|
f = urlopen(self.url)
|
||||||
|
soup = BeautifulSoup(f)
|
||||||
|
table = soup.find("table")
|
||||||
|
rows = table.find_all("tr")
|
||||||
|
headers = []
|
||||||
|
data = []
|
||||||
|
|
||||||
|
def maybe_strip(x):
|
||||||
|
if isinstance(x.string, str):
|
||||||
|
return x.string.strip()
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
tds = list([maybe_strip(x) for x in row.find_all("td")])
|
||||||
|
ths = list([maybe_strip(x) for x in row.find_all("th")])
|
||||||
|
|
||||||
|
if any(tds):
|
||||||
|
data.append(tds)
|
||||||
|
|
||||||
|
if any(ths):
|
||||||
|
headers = ths
|
||||||
|
|
||||||
|
return [dict(zip(headers, d)) for d in data]
|
||||||
|
|
||||||
|
def get_data(self, nodedb):
|
||||||
|
nodes = self.fetch_wikitable()
|
||||||
|
|
||||||
|
for node in nodes:
|
||||||
|
if "MAC" not in node or not node["MAC"]:
|
||||||
|
# without MAC, we cannot merge this data with others, so
|
||||||
|
# we might as well ignore it
|
||||||
|
continue
|
||||||
|
|
||||||
|
newnode = {
|
||||||
|
"network": {
|
||||||
|
"mac": node.get("MAC").lower(),
|
||||||
|
},
|
||||||
|
"location": {
|
||||||
|
"latitude": float(node.get("GPS", " ").split(" ")[0]),
|
||||||
|
"longitude": float(node.get("GPS", " ").split(" ")[1]),
|
||||||
|
"description": node.get("Ort"),
|
||||||
|
} if " " in node.get("GPS", "") else None,
|
||||||
|
"hostname": node.get("Knotenname"),
|
||||||
|
"hardware": {
|
||||||
|
"model": node["Router"],
|
||||||
|
} if node.get("Router") else None,
|
||||||
|
"software": {
|
||||||
|
"firmware": {
|
||||||
|
"base": "LFF",
|
||||||
|
"release": node.get("LFF Version"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"owner": {
|
||||||
|
"contact": node["Betreiber"],
|
||||||
|
} if node.get("Betreiber") else None,
|
||||||
|
}
|
||||||
|
# remove keys with None as value
|
||||||
|
newnode = {k: v for k,v in newnode.items() if v is not None}
|
||||||
|
nodedb.add_or_update([newnode["network"]["mac"]], newnode)
|
91
ffmap/node.py
Normal file
91
ffmap/node.py
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
class NoneDict:
|
||||||
|
"""Act like None but return a NoneDict for every item request.
|
||||||
|
|
||||||
|
This is similar to the behaviour of collections.defaultdict in that
|
||||||
|
even previously inexistent keys can be accessed, but nothing is
|
||||||
|
stored permanently in this class.
|
||||||
|
"""
|
||||||
|
def __repr__(self):
|
||||||
|
return 'NoneDict()'
|
||||||
|
def __bool__(self):
|
||||||
|
return False
|
||||||
|
def __getitem__(self, k):
|
||||||
|
return NoneDict()
|
||||||
|
def __json__(self):
|
||||||
|
return None
|
||||||
|
def __float__(self):
|
||||||
|
return float('NaN')
|
||||||
|
def __iter__(self):
|
||||||
|
# empty generator
|
||||||
|
return
|
||||||
|
yield
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
raise RuntimeError("NoneDict is readonly")
|
||||||
|
|
||||||
|
class Node(defaultdict):
|
||||||
|
_id = None
|
||||||
|
def __init__(self, id_=None):
|
||||||
|
self._id = id_
|
||||||
|
super().__init__(NoneDict)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "Node(%s)" % self.id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id(self):
|
||||||
|
return self._id
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
"""Generate hash from the node's id.
|
||||||
|
|
||||||
|
WARNING: Obviously this hash doesn't cover all of the node's
|
||||||
|
data, but we need nodes to be hashable in order to eliminate
|
||||||
|
duplicates in the NodeDB.
|
||||||
|
|
||||||
|
At least the id cannot change after initialization...
|
||||||
|
"""
|
||||||
|
return hash(self.id)
|
||||||
|
|
||||||
|
def deep_update(self, other):
|
||||||
|
"""Update the dictionary like dict.update() but recursively."""
|
||||||
|
def dmerge(a, b):
|
||||||
|
for k, v in b.items():
|
||||||
|
if isinstance(v, dict) and isinstance(a.get(k), dict):
|
||||||
|
dmerge(a[k], v)
|
||||||
|
else:
|
||||||
|
a[k] = v
|
||||||
|
dmerge(self, other)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def vpn_neighbors(self):
|
||||||
|
try:
|
||||||
|
vpn_neighbors = []
|
||||||
|
for neighbor in self['neighbors']:
|
||||||
|
if neighbor['neighbor']['vpn']:
|
||||||
|
vpn_neighbors.append(neighbor)
|
||||||
|
return vpn_neighbors
|
||||||
|
except TypeError:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def export(self):
|
||||||
|
"""Generate a serializable dict of the node.
|
||||||
|
|
||||||
|
In particular, this replaces any references to other nodes by
|
||||||
|
their id to prevent circular references.
|
||||||
|
"""
|
||||||
|
ret = dict(self)
|
||||||
|
if "neighbors" in self:
|
||||||
|
ret["neighbors"] = []
|
||||||
|
for neighbor in self["neighbors"]:
|
||||||
|
new_neighbor = {}
|
||||||
|
for key, val in neighbor.items():
|
||||||
|
if isinstance(val, Node):
|
||||||
|
new_neighbor[key] = val.id
|
||||||
|
else:
|
||||||
|
new_neighbor[key] = val
|
||||||
|
ret["neighbors"].append(new_neighbor)
|
||||||
|
if "id" not in ret:
|
||||||
|
ret["id"] = self.id
|
||||||
|
return ret
|
60
ffmap/nodedb.py
Normal file
60
ffmap/nodedb.py
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
from .node import Node
|
||||||
|
|
||||||
|
class AmbiguityError(Exception):
|
||||||
|
"""Indicate the ambiguity of identifiers.
|
||||||
|
|
||||||
|
This exception is raised if there is more than one match for a set
|
||||||
|
of identifiers.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
identifiers -- set of ambiguous identifiers
|
||||||
|
"""
|
||||||
|
|
||||||
|
identifiers = []
|
||||||
|
|
||||||
|
def __init__(self, identifiers):
|
||||||
|
self.identifiers = identifiers
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "Ambiguous identifiers: %s" % ", ".join(self.identifiers)
|
||||||
|
|
||||||
|
class NodeDB(dict):
|
||||||
|
def add_or_update(self, ids, other=None):
|
||||||
|
"""Add or update a node in the database.
|
||||||
|
|
||||||
|
Searches for an already existing node and updates it, or adds a new
|
||||||
|
one if no existing one is found. Raises an AmbiguityException if
|
||||||
|
more than one different nodes are found matching the criteria.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
ids -- list of possible identifiers (probably MAC addresses) of the
|
||||||
|
node
|
||||||
|
other -- dict of values to update in an existing node or add to
|
||||||
|
the new one. Defaults to None, in which case no values
|
||||||
|
are added or updated, only the aliases of the
|
||||||
|
(possibly freshly created) node are updated.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Find existing node, if any
|
||||||
|
node = None
|
||||||
|
node_id = None
|
||||||
|
for id_ in ids:
|
||||||
|
if id_ == node_id:
|
||||||
|
continue
|
||||||
|
if id_ in self:
|
||||||
|
if node is not None and node is not self[id_]:
|
||||||
|
raise AmbiguityError([node_id, id_])
|
||||||
|
node = self[id_]
|
||||||
|
node_id = id_
|
||||||
|
|
||||||
|
# If no node was found, create a new one
|
||||||
|
if node is None:
|
||||||
|
node = Node(ids[0])
|
||||||
|
|
||||||
|
# Update the node with the given properties using its own update method.
|
||||||
|
if other is not None:
|
||||||
|
node.deep_update(other)
|
||||||
|
|
||||||
|
# Add new aliases if any
|
||||||
|
for id_ in ids:
|
||||||
|
self[id_] = node
|
1
ffmap/outputs/__init__.py
Normal file
1
ffmap/outputs/__init__.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
|
91
ffmap/outputs/d3json.py
Normal file
91
ffmap/outputs/d3json.py
Normal file
|
@ -0,0 +1,91 @@
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
__all__ = ["Exporter"]
|
||||||
|
|
||||||
|
class CustomJSONEncoder(json.JSONEncoder):
|
||||||
|
"""
|
||||||
|
JSON encoder that uses an object's __json__() method to convert it
|
||||||
|
to something JSON-compatible.
|
||||||
|
"""
|
||||||
|
def default(self, obj):
|
||||||
|
try:
|
||||||
|
return obj.__json__()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return super().default(obj)
|
||||||
|
|
||||||
|
class Output:
|
||||||
|
def __init__(self, filepath="nodes.json"):
|
||||||
|
self.filepath = filepath
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def generate(nodedb):
|
||||||
|
indexes = {}
|
||||||
|
nodes = []
|
||||||
|
count = 0
|
||||||
|
for node in set(nodedb.values()):
|
||||||
|
node_export = node.export()
|
||||||
|
node_export["flags"] = {
|
||||||
|
"gateway": "vpn" in node and node["vpn"],
|
||||||
|
"client": False,
|
||||||
|
"online": True
|
||||||
|
}
|
||||||
|
nodes.append(node_export)
|
||||||
|
indexes[node.id] = count
|
||||||
|
count += 1
|
||||||
|
|
||||||
|
links = {}
|
||||||
|
for node in set(nodedb.values()):
|
||||||
|
for neighbor in node.get("neighbors", []):
|
||||||
|
key = (neighbor["neighbor"].id, node.id)
|
||||||
|
rkey = tuple(reversed(key))
|
||||||
|
if rkey in links:
|
||||||
|
links[rkey]["quality"] += ","+neighbor["metric"]
|
||||||
|
else:
|
||||||
|
links[key] = {
|
||||||
|
"source": indexes[node.id],
|
||||||
|
"target": indexes[neighbor["neighbor"].id],
|
||||||
|
"quality": neighbor["metric"],
|
||||||
|
"type": "vpn" if neighbor["neighbor"]["vpn"] or node["vpn"] else None,
|
||||||
|
"id": "-".join((node.id, neighbor["neighbor"].id)),
|
||||||
|
}
|
||||||
|
clientcount = 0
|
||||||
|
for client in node.get("clients", []):
|
||||||
|
nodes.append({
|
||||||
|
"id": "%s-%s" % (node.id, clientcount),
|
||||||
|
"flags": {
|
||||||
|
"client": True,
|
||||||
|
"online": True,
|
||||||
|
"gateway": False
|
||||||
|
}
|
||||||
|
})
|
||||||
|
indexes[client] = count
|
||||||
|
|
||||||
|
links[(node.id, client)] = {
|
||||||
|
"source": indexes[node.id],
|
||||||
|
"target": indexes[client],
|
||||||
|
"quality": "TT",
|
||||||
|
"type": "client",
|
||||||
|
"id": "%s-%i" % (node.id, clientcount),
|
||||||
|
}
|
||||||
|
count += 1
|
||||||
|
clientcount += 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"nodes": nodes,
|
||||||
|
"links": list(links.values()),
|
||||||
|
"meta": {
|
||||||
|
"timestamp": datetime.utcnow()
|
||||||
|
.replace(microsecond=0)
|
||||||
|
.isoformat()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def output(self, nodedb):
|
||||||
|
with open(self.filepath, "w") as nodes_json:
|
||||||
|
json.dump(
|
||||||
|
self.generate(nodedb),
|
||||||
|
nodes_json,
|
||||||
|
cls=CustomJSONEncoder
|
||||||
|
)
|
30
ffmap/outputs/rrd.py
Normal file
30
ffmap/outputs/rrd.py
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
import os
|
||||||
|
from ffmap.rrd.rrds import NodeRRD, GlobalRRD
|
||||||
|
|
||||||
|
class Output:
|
||||||
|
def __init__(self, directory="nodedb"):
|
||||||
|
self.directory = directory
|
||||||
|
try:
|
||||||
|
os.mkdir(self.directory)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def output(self, nodedb):
|
||||||
|
nodes = set(nodedb.values())
|
||||||
|
clients = 0
|
||||||
|
nodecount = 0
|
||||||
|
for node in nodes:
|
||||||
|
clients += len(node.get("clients", []))
|
||||||
|
nodecount += 1
|
||||||
|
NodeRRD(
|
||||||
|
os.path.join(
|
||||||
|
self.directory,
|
||||||
|
str(node.id).replace(':', '') + '.rrd'
|
||||||
|
),
|
||||||
|
node
|
||||||
|
).update()
|
||||||
|
|
||||||
|
GlobalRRD(os.path.join(self.directory, "nodes.rrd")).update(
|
||||||
|
nodecount,
|
||||||
|
clients
|
||||||
|
)
|
|
@ -80,9 +80,9 @@ class RRD:
|
||||||
raise FileNotFoundError(self.filename)
|
raise FileNotFoundError(self.filename)
|
||||||
info = self.info()
|
info = self.info()
|
||||||
if set(ds_list) - set(info['ds'].values()) != set():
|
if set(ds_list) - set(info['ds'].values()) != set():
|
||||||
if set((ds.name, ds.type) for ds in ds_list) \
|
for ds in ds_list:
|
||||||
- set((ds.name, ds.type) for ds in info['ds'].values()) != set():
|
if ds.name in info['ds'] and ds.type != info['ds'][ds.name].type:
|
||||||
raise RRDIncompatibleException()
|
raise RRDIncompatibleException("%s is %s but should be %s" % (ds.name, ds.type, info['ds'][ds.name].type))
|
||||||
else:
|
else:
|
||||||
raise RRDOutdatedException()
|
raise RRDOutdatedException()
|
||||||
|
|
||||||
|
@ -177,15 +177,8 @@ class RRD:
|
||||||
echo = True
|
echo = True
|
||||||
dump.stdout.close()
|
dump.stdout.close()
|
||||||
restore.stdin.close()
|
restore.stdin.close()
|
||||||
try:
|
dump.wait()
|
||||||
dump.wait(1)
|
restore.wait()
|
||||||
except subprocess.TimeoutExpired:
|
|
||||||
dump.kill()
|
|
||||||
try:
|
|
||||||
restore.wait(2)
|
|
||||||
except subprocess.TimeoutExpired:
|
|
||||||
dump.kill()
|
|
||||||
raise RuntimeError("rrdtool restore process killed")
|
|
||||||
|
|
||||||
os.rename(self.filename + ".new", self.filename)
|
os.rename(self.filename + ".new", self.filename)
|
||||||
self._cached_info = None
|
self._cached_info = None
|
115
ffmap/rrd/rrds.py
Normal file
115
ffmap/rrd/rrds.py
Normal file
|
@ -0,0 +1,115 @@
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
from ffmap.node import Node
|
||||||
|
from . import RRD, DS, RRA
|
||||||
|
|
||||||
|
class NodeRRD(RRD):
|
||||||
|
ds_list = [
|
||||||
|
DS('upstate', 'GAUGE', 120, 0, 1),
|
||||||
|
DS('clients', 'GAUGE', 120, 0, float('NaN')),
|
||||||
|
DS('neighbors', 'GAUGE', 120, 0, float('NaN')),
|
||||||
|
DS('vpn_neighbors', 'GAUGE', 120, 0, float('NaN')),
|
||||||
|
DS('loadavg', 'GAUGE', 120, 0, float('NaN')),
|
||||||
|
DS('rx_bytes', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('rx_packets', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('tx_bytes', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('tx_packets', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('mgmt_rx_bytes', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('mgmt_rx_packets', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('mgmt_tx_bytes', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('mgmt_tx_packets', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('forward_bytes', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
DS('forward_packets', 'DERIVE', 120, 0, float('NaN')),
|
||||||
|
]
|
||||||
|
rra_list = [
|
||||||
|
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples
|
||||||
|
RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples
|
||||||
|
RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples
|
||||||
|
RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, filename, node = None):
|
||||||
|
"""
|
||||||
|
Create a new RRD for a given node.
|
||||||
|
|
||||||
|
If the RRD isn't supposed to be updated, the node can be omitted.
|
||||||
|
"""
|
||||||
|
self.node = node
|
||||||
|
super().__init__(filename)
|
||||||
|
self.ensureSanity(self.ds_list, self.rra_list, step=60)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def imagename(self):
|
||||||
|
return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png"
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
values = {
|
||||||
|
'upstate': 1,
|
||||||
|
'clients': float(len(self.node.get('clients', []))),
|
||||||
|
'neighbors': float(len(self.node.get('neighbors', []))),
|
||||||
|
'vpn_neighbors': float(len(self.node.vpn_neighbors)),
|
||||||
|
'loadavg': float(self.node['statistics']['loadavg']),
|
||||||
|
}
|
||||||
|
for item in ('rx', 'tx', 'mgmt_rx', 'mgmt_tx', 'forward'):
|
||||||
|
try:
|
||||||
|
values[item + '_bytes'] = int(self.node['statistics']['traffic'][item]['bytes'])
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
values[item + '_packets'] = int(self.node['statistics']['traffic'][item]['packets'])
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
super().update(values)
|
||||||
|
|
||||||
|
def graph(self, directory, timeframe):
|
||||||
|
"""
|
||||||
|
Create a graph in the given directory. The file will be named
|
||||||
|
basename.png if the RRD file is named basename.rrd
|
||||||
|
"""
|
||||||
|
args = ['rrdtool','graph', os.path.join(directory, self.imagename),
|
||||||
|
'-s', '-' + timeframe ,
|
||||||
|
'-w', '800',
|
||||||
|
'-h', '400',
|
||||||
|
'-l', '0',
|
||||||
|
'-y', '1:1',
|
||||||
|
'DEF:clients=' + self.filename + ':clients:AVERAGE',
|
||||||
|
'VDEF:maxc=clients,MAXIMUM',
|
||||||
|
'CDEF:c=0,clients,ADDNAN',
|
||||||
|
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
|
||||||
|
'AREA:c#0F0:up\\l',
|
||||||
|
'AREA:d#F00:down\\l',
|
||||||
|
'LINE1:c#00F:clients connected\\l',
|
||||||
|
]
|
||||||
|
subprocess.check_output(args)
|
||||||
|
|
||||||
|
class GlobalRRD(RRD):
|
||||||
|
ds_list = [
|
||||||
|
# Number of nodes available
|
||||||
|
DS('nodes', 'GAUGE', 120, 0, float('NaN')),
|
||||||
|
# Number of client available
|
||||||
|
DS('clients', 'GAUGE', 120, 0, float('NaN')),
|
||||||
|
]
|
||||||
|
rra_list = [
|
||||||
|
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples
|
||||||
|
RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples
|
||||||
|
RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples
|
||||||
|
]
|
||||||
|
|
||||||
|
def __init__(self, filepath):
|
||||||
|
super().__init__(filepath)
|
||||||
|
self.ensureSanity(self.ds_list, self.rra_list, step=60)
|
||||||
|
|
||||||
|
def update(self, nodeCount, clientCount):
|
||||||
|
super().update({'nodes': nodeCount, 'clients': clientCount})
|
||||||
|
|
||||||
|
def graph(self, filename, timeframe):
|
||||||
|
args = ["rrdtool", 'graph', filename,
|
||||||
|
'-s', '-' + timeframe,
|
||||||
|
'-w', '800',
|
||||||
|
'-h' '400',
|
||||||
|
'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
|
||||||
|
'LINE1:nodes#F00:nodes\\l',
|
||||||
|
'DEF:clients=' + self.filename + ':clients:AVERAGE',
|
||||||
|
'LINE2:clients#00F:clients',
|
||||||
|
]
|
||||||
|
subprocess.check_output(args)
|
69
ffmap/run.py
Normal file
69
ffmap/run.py
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ffmap import run_names
|
||||||
|
|
||||||
|
class MyAction(argparse.Action):
|
||||||
|
def __call__(self, parser, namespace, values, option_string=None):
|
||||||
|
if self.dest.startswith(("input_", "output_")):
|
||||||
|
collection_name = self.dest.split("_")[0] + "s"
|
||||||
|
name = self.dest.split("_", 1)[1]
|
||||||
|
if not hasattr(namespace, collection_name):
|
||||||
|
setattr(namespace, collection_name, [])
|
||||||
|
collection = getattr(namespace, collection_name)
|
||||||
|
collection.append({
|
||||||
|
"name": name,
|
||||||
|
"options": {self.metavar.lower(): values}
|
||||||
|
if values is not None else {}
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
raise Exception("Unexpected dest=" + self.dest)
|
||||||
|
|
||||||
|
def parser_add_myarg(parser, name, metavar="OPT", help=None):
|
||||||
|
parser.add_argument("--" + name,
|
||||||
|
metavar=metavar,
|
||||||
|
type=str,
|
||||||
|
nargs='?',
|
||||||
|
const=None,
|
||||||
|
action=MyAction,
|
||||||
|
help=help)
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="""Merge node data from multiple sources and generate
|
||||||
|
various output formats from this data""",
|
||||||
|
)
|
||||||
|
input_group = parser.add_argument_group("Inputs", description="""
|
||||||
|
Inputs are used in the order given on the command line, where later
|
||||||
|
inputs can overwrite attributes of earlier inputs if named equally,
|
||||||
|
but the first input encountering a node sets its id, which is
|
||||||
|
immutable afterwards.
|
||||||
|
|
||||||
|
The same input can be given multiple times, probably with different
|
||||||
|
options.
|
||||||
|
""")
|
||||||
|
output_group = parser.add_argument_group("Outputs")
|
||||||
|
parser_add_myarg(input_group, 'input-alfred', metavar="REQUEST_DATA_TYPE",
|
||||||
|
help="read node details from A.L.F.R.E.D.")
|
||||||
|
parser_add_myarg(input_group, 'input-wiki', metavar="URL",
|
||||||
|
help="read node details from a Wiki page")
|
||||||
|
parser_add_myarg(input_group, 'input-batadv', metavar="MESH_INTERFACE",
|
||||||
|
help="add node's neighbors and clients from batadv-vis")
|
||||||
|
parser_add_myarg(output_group, 'output-d3json', metavar="FILEPATH",
|
||||||
|
help="generate JSON file compatible with ffmap-d3")
|
||||||
|
parser_add_myarg(output_group, 'output-rrd', metavar="DIRECTORY",
|
||||||
|
help="update RRDs with statistics, one global and one per node")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if "inputs" not in args or not args.inputs:
|
||||||
|
parser.print_help(sys.stderr)
|
||||||
|
sys.stderr.write("\nERROR: No input has been defined!\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if "outputs" not in args or not args.outputs:
|
||||||
|
parser.print_help(sys.stderr)
|
||||||
|
sys.stderr.write("\nERROR: No output has been defined!\n")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
run_names(inputs=args.inputs, outputs=args.outputs)
|
13
hostid.py
13
hostid.py
|
@ -1,13 +0,0 @@
|
||||||
import re
|
|
||||||
from functools import reduce
|
|
||||||
|
|
||||||
def mac_to_hostid(mac):
|
|
||||||
int_mac = list(map(lambda x: int(x, 16), mac.split(":")))
|
|
||||||
int_mac[0] ^= 2
|
|
||||||
bytes = map(lambda x: "%02x" % x, int_mac[0:3] + [0xff, 0xfe] + int_mac[3:])
|
|
||||||
return reduce(lambda a, i:
|
|
||||||
[a[0] + ("" if i == 0 else ":") + a[1] + a[2]] + a[3:],
|
|
||||||
range(0, 4),
|
|
||||||
[""] + list(bytes)
|
|
||||||
)
|
|
||||||
|
|
15
link.py
15
link.py
|
@ -1,15 +0,0 @@
|
||||||
class Link():
|
|
||||||
def __init__(self):
|
|
||||||
self.id = None
|
|
||||||
self.source = None
|
|
||||||
self.target = None
|
|
||||||
self.quality = None
|
|
||||||
self.type = None
|
|
||||||
|
|
||||||
class LinkConnector():
|
|
||||||
def __init__(self):
|
|
||||||
self.id = None
|
|
||||||
self.interface = None
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "LinkConnector(%d, %s)" % (self.id, self.interface)
|
|
15
mkmap.sh
15
mkmap.sh
|
@ -1,15 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
DEST=$1
|
|
||||||
|
|
||||||
|
|
||||||
[ "$DEST" ] || exit 1
|
|
||||||
|
|
||||||
cd "$(dirname "$0")"/
|
|
||||||
|
|
||||||
./ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > aliases_hl.json
|
|
||||||
./ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > aliases_moelln.json
|
|
||||||
|
|
||||||
./bat2nodes.py -A -a aliases.json -a aliases_hl.json -a aliases_moelln.json -d $DEST
|
|
31
node.py
31
node.py
|
@ -1,31 +0,0 @@
|
||||||
class Node():
|
|
||||||
def __init__(self):
|
|
||||||
self.name = ""
|
|
||||||
self.id = ""
|
|
||||||
self.macs = set()
|
|
||||||
self.interfaces = dict()
|
|
||||||
self.flags = dict({
|
|
||||||
"online": False,
|
|
||||||
"gateway": False,
|
|
||||||
"client": False
|
|
||||||
})
|
|
||||||
self.gps = None
|
|
||||||
self.firmware = None
|
|
||||||
self.clientcount = 0
|
|
||||||
|
|
||||||
def add_mac(self, mac):
|
|
||||||
mac = mac.lower()
|
|
||||||
if len(self.macs) == 0:
|
|
||||||
self.id = mac
|
|
||||||
|
|
||||||
self.macs.add(mac)
|
|
||||||
|
|
||||||
self.interfaces[mac] = Interface()
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return self.macs.__repr__()
|
|
||||||
|
|
||||||
class Interface():
|
|
||||||
def __init__(self):
|
|
||||||
self.vpn = False
|
|
||||||
|
|
385
nodedb.py
385
nodedb.py
|
@ -1,385 +0,0 @@
|
||||||
import json
|
|
||||||
from functools import reduce
|
|
||||||
from collections import defaultdict
|
|
||||||
from node import Node, Interface
|
|
||||||
from link import Link, LinkConnector
|
|
||||||
|
|
||||||
class NodeDB:
|
|
||||||
def __init__(self):
|
|
||||||
self._nodes = []
|
|
||||||
self._links = []
|
|
||||||
|
|
||||||
# fetch list of links
|
|
||||||
def get_links(self):
|
|
||||||
self.update_vpn_links()
|
|
||||||
return self.reduce_links()
|
|
||||||
|
|
||||||
# fetch list of nodes
|
|
||||||
def get_nodes(self):
|
|
||||||
return self._nodes
|
|
||||||
|
|
||||||
def maybe_node_by_fuzzy_mac(self, mac):
|
|
||||||
mac_a = mac.lower()
|
|
||||||
|
|
||||||
for node in self._nodes:
|
|
||||||
for mac_b in node.macs:
|
|
||||||
if is_derived_mac(mac_a, mac_b):
|
|
||||||
return node
|
|
||||||
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
def maybe_node_by_mac(self, macs):
|
|
||||||
for node in self._nodes:
|
|
||||||
for mac in macs:
|
|
||||||
if mac.lower() in node.macs:
|
|
||||||
return node
|
|
||||||
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
def maybe_node_by_id(self, mac):
|
|
||||||
for node in self._nodes:
|
|
||||||
if mac.lower() == node.id:
|
|
||||||
return node
|
|
||||||
|
|
||||||
raise KeyError
|
|
||||||
|
|
||||||
def parse_vis_data(self,vis_data):
|
|
||||||
for x in vis_data:
|
|
||||||
|
|
||||||
if 'of' in x:
|
|
||||||
try:
|
|
||||||
node = self.maybe_node_by_mac((x['of'], x['secondary']))
|
|
||||||
except:
|
|
||||||
node = Node()
|
|
||||||
node.flags['online'] = True
|
|
||||||
if 'legacy' in x:
|
|
||||||
node.flags['legacy'] = True
|
|
||||||
self._nodes.append(node)
|
|
||||||
|
|
||||||
node.add_mac(x['of'])
|
|
||||||
node.add_mac(x['secondary'])
|
|
||||||
|
|
||||||
for x in vis_data:
|
|
||||||
|
|
||||||
if 'router' in x:
|
|
||||||
try:
|
|
||||||
node = self.maybe_node_by_mac((x['router'], ))
|
|
||||||
except:
|
|
||||||
node = Node()
|
|
||||||
node.flags['online'] = True
|
|
||||||
if 'legacy' in x:
|
|
||||||
node.flags['legacy'] = True
|
|
||||||
node.add_mac(x['router'])
|
|
||||||
self._nodes.append(node)
|
|
||||||
|
|
||||||
# If it's a TT link and the MAC is very similar
|
|
||||||
# consider this MAC as one of the routers
|
|
||||||
# MACs
|
|
||||||
if 'gateway' in x and x['label'] == "TT":
|
|
||||||
if is_similar(x['router'], x['gateway']):
|
|
||||||
node.add_mac(x['gateway'])
|
|
||||||
|
|
||||||
# skip processing as regular link
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
if 'neighbor' in x:
|
|
||||||
try:
|
|
||||||
node = self.maybe_node_by_mac((x['neighbor']))
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if 'gateway' in x:
|
|
||||||
x['neighbor'] = x['gateway']
|
|
||||||
|
|
||||||
node = self.maybe_node_by_mac((x['neighbor'], ))
|
|
||||||
except:
|
|
||||||
node = Node()
|
|
||||||
node.flags['online'] = True
|
|
||||||
if x['label'] == 'TT':
|
|
||||||
node.flags['client'] = True
|
|
||||||
|
|
||||||
node.add_mac(x['neighbor'])
|
|
||||||
self._nodes.append(node)
|
|
||||||
|
|
||||||
for x in vis_data:
|
|
||||||
|
|
||||||
if 'router' in x:
|
|
||||||
try:
|
|
||||||
if 'gateway' in x:
|
|
||||||
x['neighbor'] = x['gateway']
|
|
||||||
|
|
||||||
router = self.maybe_node_by_mac((x['router'], ))
|
|
||||||
neighbor = self.maybe_node_by_mac((x['neighbor'], ))
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# filter TT links merged in previous step
|
|
||||||
if router == neighbor:
|
|
||||||
continue
|
|
||||||
|
|
||||||
link = Link()
|
|
||||||
link.source = LinkConnector()
|
|
||||||
link.source.interface = x['router']
|
|
||||||
link.source.id = self._nodes.index(router)
|
|
||||||
link.target = LinkConnector()
|
|
||||||
link.target.interface = x['neighbor']
|
|
||||||
link.target.id = self._nodes.index(neighbor)
|
|
||||||
link.quality = x['label']
|
|
||||||
link.id = "-".join(sorted((link.source.interface, link.target.interface)))
|
|
||||||
|
|
||||||
if x['label'] == "TT":
|
|
||||||
link.type = "client"
|
|
||||||
|
|
||||||
self._links.append(link)
|
|
||||||
|
|
||||||
for x in vis_data:
|
|
||||||
|
|
||||||
if 'primary' in x:
|
|
||||||
try:
|
|
||||||
node = self.maybe_node_by_mac((x['primary'], ))
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
|
|
||||||
node.id = x['primary']
|
|
||||||
|
|
||||||
def reduce_links(self):
|
|
||||||
tmp_links = defaultdict(list)
|
|
||||||
|
|
||||||
for link in self._links:
|
|
||||||
tmp_links[link.id].append(link)
|
|
||||||
|
|
||||||
links = []
|
|
||||||
|
|
||||||
def reduce_link(a, b):
|
|
||||||
a.id = b.id
|
|
||||||
a.source = b.source
|
|
||||||
a.target = b.target
|
|
||||||
a.type = b.type
|
|
||||||
a.quality = ", ".join([x for x in (a.quality, b.quality) if x])
|
|
||||||
|
|
||||||
return a
|
|
||||||
|
|
||||||
for k, v in tmp_links.items():
|
|
||||||
new_link = reduce(reduce_link, v, Link())
|
|
||||||
links.append(new_link)
|
|
||||||
|
|
||||||
return links
|
|
||||||
|
|
||||||
def import_aliases(self, aliases):
|
|
||||||
for mac, alias in aliases.items():
|
|
||||||
try:
|
|
||||||
node = self.maybe_node_by_mac([mac])
|
|
||||||
except:
|
|
||||||
try:
|
|
||||||
node = self.maybe_node_by_fuzzy_mac(mac)
|
|
||||||
except:
|
|
||||||
# create an offline node
|
|
||||||
node = Node()
|
|
||||||
node.add_mac(mac)
|
|
||||||
self._nodes.append(node)
|
|
||||||
|
|
||||||
if 'name' in alias:
|
|
||||||
node.name = alias['name']
|
|
||||||
|
|
||||||
if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces:
|
|
||||||
node.interfaces[mac].vpn = True
|
|
||||||
|
|
||||||
if 'gps' in alias:
|
|
||||||
node.gps = alias['gps']
|
|
||||||
|
|
||||||
if 'firmware' in alias:
|
|
||||||
node.firmware = alias['firmware']
|
|
||||||
|
|
||||||
if 'id' in alias:
|
|
||||||
node.id = alias['id']
|
|
||||||
|
|
||||||
# list of macs
|
|
||||||
# if options['gateway']:
|
|
||||||
# mark_gateways(options['gateway'])
|
|
||||||
def mark_gateways(self, gateways):
|
|
||||||
for gateway in gateways:
|
|
||||||
try:
|
|
||||||
node = self.maybe_node_by_mac((gateway, ))
|
|
||||||
except:
|
|
||||||
print("WARNING: did not find gateway '",gateway,"' in node list")
|
|
||||||
continue
|
|
||||||
|
|
||||||
node.flags['gateway'] = True
|
|
||||||
|
|
||||||
def update_vpn_links(self):
|
|
||||||
changes = 1
|
|
||||||
while changes > 0:
|
|
||||||
changes = 0
|
|
||||||
for link in self._links:
|
|
||||||
if link.type == "client":
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_interface = self._nodes[link.source.id].interfaces[link.source.interface]
|
|
||||||
target_interface = self._nodes[link.target.id].interfaces[link.target.interface]
|
|
||||||
if source_interface.vpn or target_interface.vpn:
|
|
||||||
source_interface.vpn = True
|
|
||||||
target_interface.vpn = True
|
|
||||||
if link.type != "vpn":
|
|
||||||
changes += 1
|
|
||||||
|
|
||||||
link.type = "vpn"
|
|
||||||
|
|
||||||
def count_clients(self):
|
|
||||||
for link in self._links:
|
|
||||||
try:
|
|
||||||
a = self.maybe_node_by_id(link.source.interface)
|
|
||||||
b = self.maybe_node_by_id(link.target.interface)
|
|
||||||
|
|
||||||
if a.flags['client']:
|
|
||||||
client = a
|
|
||||||
node = b
|
|
||||||
elif b.flags['client']:
|
|
||||||
client = b
|
|
||||||
node = a
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
node.clientcount += 1
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def obscure_clients(self):
|
|
||||||
|
|
||||||
globalIdCounter = 0
|
|
||||||
nodeCounters = {}
|
|
||||||
clientIds = {}
|
|
||||||
|
|
||||||
for node in self._nodes:
|
|
||||||
if node.flags['client']:
|
|
||||||
node.macs = set()
|
|
||||||
clientIds[node.id] = None
|
|
||||||
|
|
||||||
for link in self._links:
|
|
||||||
ids = link.source.interface
|
|
||||||
idt = link.target.interface
|
|
||||||
|
|
||||||
try:
|
|
||||||
node_source = self.maybe_node_by_fuzzy_mac(ids)
|
|
||||||
node_target = self.maybe_node_by_id(idt)
|
|
||||||
|
|
||||||
if not node_source.flags['client'] and not node_target.flags['client']:
|
|
||||||
# if none of the nodes associated with this link are clients,
|
|
||||||
# we do not want to obscure
|
|
||||||
continue
|
|
||||||
|
|
||||||
if ids in clientIds and idt in clientIds:
|
|
||||||
# This is for corner cases, when a client
|
|
||||||
# is linked to another client.
|
|
||||||
clientIds[ids] = str(globalIdCounter)
|
|
||||||
ids = str(globalIdCounter)
|
|
||||||
globalIdCounter += 1
|
|
||||||
|
|
||||||
clientIds[idt] = str(globalIdCounter)
|
|
||||||
idt = str(globalIdCounter)
|
|
||||||
globalIdCounter += 1
|
|
||||||
|
|
||||||
elif ids in clientIds:
|
|
||||||
newId = generateId(idt)
|
|
||||||
clientIds[ids] = newId
|
|
||||||
ids = newId
|
|
||||||
|
|
||||||
link.source.interface = ids;
|
|
||||||
node_source.id = ids;
|
|
||||||
|
|
||||||
elif idt in clientIds:
|
|
||||||
newId = generateId(ids,nodeCounters)
|
|
||||||
clientIds[idt] = newId
|
|
||||||
idt = newId
|
|
||||||
|
|
||||||
link.target.interface = idt;
|
|
||||||
node_target.id = idt;
|
|
||||||
|
|
||||||
link.id = ids + "-" + idt
|
|
||||||
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# extends node id by incremented node counter
|
|
||||||
def generateId(nodeId,nodeCounters):
|
|
||||||
if nodeId in nodeCounters:
|
|
||||||
n = nodeCounters[nodeId]
|
|
||||||
nodeCounters[nodeId] = n + 1
|
|
||||||
else:
|
|
||||||
nodeCounters[nodeId] = 1
|
|
||||||
n = 0
|
|
||||||
|
|
||||||
return nodeId + "_" + str(n)
|
|
||||||
|
|
||||||
# compares two MACs and decides whether they are
|
|
||||||
# similar and could be from the same node
|
|
||||||
def is_similar(a, b):
|
|
||||||
if a == b:
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
mac_a = list(int(i, 16) for i in a.split(":"))
|
|
||||||
mac_b = list(int(i, 16) for i in b.split(":"))
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# first byte must only differ in bit 2
|
|
||||||
if mac_a[0] | 2 == mac_b[0] | 2:
|
|
||||||
# count different bytes
|
|
||||||
c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]]
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# no more than two additional bytes must differ
|
|
||||||
if len(c) <= 2:
|
|
||||||
delta = 0
|
|
||||||
|
|
||||||
if len(c) > 0:
|
|
||||||
delta = sum(abs(i[0] -i[1]) for i in c)
|
|
||||||
|
|
||||||
# These addresses look pretty similar!
|
|
||||||
return delta < 8
|
|
||||||
|
|
||||||
def is_derived_mac(a, b):
|
|
||||||
if a == b:
|
|
||||||
return True
|
|
||||||
|
|
||||||
try:
|
|
||||||
mac_a = list(int(i, 16) for i in a.split(":"))
|
|
||||||
mac_b = list(int(i, 16) for i in b.split(":"))
|
|
||||||
except ValueError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]:
|
|
||||||
return False
|
|
||||||
|
|
||||||
x = list(mac_a)
|
|
||||||
x[5] += 1
|
|
||||||
x[5] %= 255
|
|
||||||
if mac_b == x:
|
|
||||||
return True
|
|
||||||
|
|
||||||
x[0] |= 2
|
|
||||||
if mac_b == x:
|
|
||||||
return True
|
|
||||||
|
|
||||||
x[3] += 1
|
|
||||||
x[3] %= 255
|
|
||||||
if mac_b == x:
|
|
||||||
return True
|
|
||||||
|
|
||||||
x = list(mac_a)
|
|
||||||
x[0] |= 2
|
|
||||||
x[5] += 2
|
|
||||||
x[5] %= 255
|
|
||||||
if mac_b == x:
|
|
||||||
return True
|
|
||||||
|
|
||||||
x = list(mac_a)
|
|
||||||
x[0] |= 2
|
|
||||||
x[3] += 1
|
|
||||||
x[3] %= 255
|
|
||||||
if mac_b == x:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
72
rrd.py
72
rrd.py
|
@ -1,72 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import subprocess
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
from GlobalRRD import GlobalRRD
|
|
||||||
from NodeRRD import NodeRRD
|
|
||||||
|
|
||||||
class rrd:
|
|
||||||
def __init__( self
|
|
||||||
, databaseDirectory
|
|
||||||
, imagePath
|
|
||||||
, displayTimeGlobal = "7d"
|
|
||||||
, displayTimeNode = "1d"
|
|
||||||
):
|
|
||||||
self.dbPath = databaseDirectory
|
|
||||||
self.globalDb = GlobalRRD(self.dbPath)
|
|
||||||
self.imagePath = imagePath
|
|
||||||
self.displayTimeGlobal = displayTimeGlobal
|
|
||||||
self.displayTimeNode = displayTimeNode
|
|
||||||
|
|
||||||
self.currentTimeInt = (int(time.time())/60)*60
|
|
||||||
self.currentTime = str(self.currentTimeInt)
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.stat(self.imagePath)
|
|
||||||
except:
|
|
||||||
os.mkdir(self.imagePath)
|
|
||||||
|
|
||||||
def update_database(self,db):
|
|
||||||
nodes = {}
|
|
||||||
clientCount = 0
|
|
||||||
for node in db.get_nodes():
|
|
||||||
if node.flags['online']:
|
|
||||||
if not node.flags['client']:
|
|
||||||
nodes[node.id] = node
|
|
||||||
node.clients = 0;
|
|
||||||
if 'legacy' in node.flags and node.flags['legacy']:
|
|
||||||
clientCount -= 1
|
|
||||||
else:
|
|
||||||
clientCount += 1
|
|
||||||
for link in db.get_links():
|
|
||||||
source = link.source.interface
|
|
||||||
target = link.target.interface
|
|
||||||
if source in nodes and not target in nodes:
|
|
||||||
nodes[source].clients += 1
|
|
||||||
elif target in nodes and not source in nodes:
|
|
||||||
nodes[target].clients += 1
|
|
||||||
|
|
||||||
self.globalDb.update(len(nodes), clientCount)
|
|
||||||
for node in nodes.values():
|
|
||||||
rrd = NodeRRD(
|
|
||||||
os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'),
|
|
||||||
node
|
|
||||||
)
|
|
||||||
rrd.update()
|
|
||||||
|
|
||||||
def update_images(self):
|
|
||||||
""" Creates an image for every rrd file in the database directory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal)
|
|
||||||
|
|
||||||
nodeDbFiles = os.listdir(self.dbPath)
|
|
||||||
|
|
||||||
for fileName in nodeDbFiles:
|
|
||||||
if not os.path.isfile(os.path.join(self.dbPath, fileName)):
|
|
||||||
continue
|
|
||||||
|
|
||||||
nodeName = os.path.basename(fileName).split('.')
|
|
||||||
if nodeName[1] == 'rrd' and not nodeName[0] == "nodes":
|
|
||||||
rrd = NodeRRD(os.path.join(self.dbPath, fileName))
|
|
||||||
rrd.graph(self.imagePath, self.displayTimeNode)
|
|
Loading…
Reference in a new issue