2012-05-11 18:39:26 +02:00
|
|
|
import json
|
|
|
|
from node import Node
|
|
|
|
from link import Link
|
2012-06-06 21:43:13 +02:00
|
|
|
from itertools import zip_longest
|
2012-05-11 18:39:26 +02:00
|
|
|
|
2012-05-12 14:58:16 +02:00
|
|
|
from bs4 import BeautifulSoup
|
2012-06-05 17:40:21 +02:00
|
|
|
from urllib.request import urlopen
|
2012-05-12 14:58:16 +02:00
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
class NodeDB:
|
|
|
|
def __init__(self):
|
|
|
|
self._nodes = []
|
2012-06-04 20:58:17 +02:00
|
|
|
self._links = []
|
2012-05-11 18:39:26 +02:00
|
|
|
|
|
|
|
# fetch list of links
|
|
|
|
def get_links(self):
|
|
|
|
return [self.map_link(x) for x in self._links]
|
|
|
|
|
|
|
|
# fetch list of nodes
|
|
|
|
def get_nodes(self):
|
|
|
|
return self._nodes
|
|
|
|
|
2012-06-04 20:58:17 +02:00
|
|
|
def add_link(self, a, b, q):
|
|
|
|
l = tuple(sorted((a,b)))
|
|
|
|
for link in self._links:
|
|
|
|
if l == link[0]:
|
|
|
|
if link[1] != str(q):
|
2012-06-05 16:32:47 +02:00
|
|
|
link[1] = max(float(link[1]), float(q))
|
2012-06-04 20:58:17 +02:00
|
|
|
return
|
|
|
|
self._links.append([l,str(q)])
|
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
def maybe_node_by_mac(self, macs):
|
|
|
|
for node in self._nodes:
|
|
|
|
for mac in macs:
|
2012-05-12 16:44:27 +02:00
|
|
|
if mac.lower() in node.macs:
|
2012-05-11 18:39:26 +02:00
|
|
|
return node
|
|
|
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
# import_batman(list(fileinput.input(options['batmanjson'])))
|
|
|
|
def import_batman(self, lines):
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'of' in x:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((x['of'], x['secondary']))
|
|
|
|
except:
|
|
|
|
node = Node()
|
|
|
|
node.online = True
|
|
|
|
self._nodes.append(node)
|
|
|
|
|
|
|
|
node.add_mac(x['of'])
|
|
|
|
node.add_mac(x['secondary'])
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'router' in x:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((x['router'], ))
|
|
|
|
except:
|
|
|
|
node = Node()
|
|
|
|
node.online = True
|
|
|
|
node.add_mac(x['router'])
|
|
|
|
self._nodes.append(node)
|
|
|
|
|
2012-06-06 13:57:10 +02:00
|
|
|
# If it's a TT link and the MAC is very similar
|
|
|
|
# consider this MAC as one of the routers
|
|
|
|
# MACs
|
|
|
|
if 'gateway' in x and x['label'] == "TT":
|
|
|
|
router = list(int(i, 16) for i in x['router'].split(":"))
|
|
|
|
gateway = list(int(i, 16) for i in x['gateway'].split(":"))
|
|
|
|
|
|
|
|
# first byte must only differ in bit 2
|
|
|
|
if router[0] == gateway[0] | 2:
|
|
|
|
# count different bytes
|
|
|
|
a = [x for x in zip(router[1:], gateway[1:]) if x[0] != x[1]]
|
|
|
|
|
|
|
|
# no more than two additional bytes must differ
|
|
|
|
if len(a) <= 2:
|
|
|
|
delta = 0
|
|
|
|
if len(a) > 0:
|
|
|
|
delta = sum(abs(i[0] -i[1]) for i in a)
|
|
|
|
|
|
|
|
if delta < 8:
|
|
|
|
# This TT link looks like a mac of the router!
|
|
|
|
node.add_mac(x['gateway'])
|
|
|
|
|
|
|
|
# skip processing as regular link
|
|
|
|
continue
|
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
try:
|
|
|
|
if 'gateway' in x:
|
|
|
|
x['neighbor'] = x['gateway']
|
|
|
|
|
|
|
|
node = self.maybe_node_by_mac((x['neighbor'], ))
|
|
|
|
except:
|
|
|
|
node = Node()
|
|
|
|
node.online = True
|
|
|
|
if x['label'] == 'TT':
|
|
|
|
node.group = 3
|
|
|
|
|
|
|
|
node.add_mac(x['neighbor'])
|
|
|
|
self._nodes.append(node)
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'router' in x:
|
|
|
|
try:
|
|
|
|
if 'gateway' in x:
|
|
|
|
x['neighbor'] = x['gateway']
|
|
|
|
|
|
|
|
router = self.maybe_node_by_mac((x['router'], ))
|
|
|
|
neighbor = self.maybe_node_by_mac((x['neighbor'], ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
a = self._nodes.index(router)
|
|
|
|
b = self._nodes.index(neighbor)
|
|
|
|
|
2012-06-06 14:19:45 +02:00
|
|
|
if a != b:
|
|
|
|
self.add_link(a, b, x['label'])
|
2012-05-11 18:39:26 +02:00
|
|
|
|
2012-06-04 17:33:23 +02:00
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'primary' in x:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((x['primary'], ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
node.id = x['primary']
|
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
def import_aliases(self, aliases):
|
|
|
|
for mac, alias in aliases.items():
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((mac, ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
node.name = alias['name']
|
|
|
|
if 'group' in alias:
|
|
|
|
node.group = alias['group']
|
|
|
|
|
|
|
|
# list of macs
|
|
|
|
# if options['gateway']:
|
|
|
|
# mark_gateways(options['gateway'])
|
|
|
|
def mark_gateways(self, gateways):
|
|
|
|
for gateway in gateways:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((gateway, ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
node.group = 2
|
|
|
|
|
|
|
|
def map_link(self, pair):
|
|
|
|
distance = 80
|
|
|
|
strength = 0.2
|
2012-06-04 20:47:05 +02:00
|
|
|
if any(filter(lambda x: self._nodes[x].group == 3, pair[0])):
|
2012-05-11 18:39:26 +02:00
|
|
|
distance = 10
|
|
|
|
strength = 1
|
|
|
|
|
|
|
|
link = Link()
|
2012-06-04 20:47:05 +02:00
|
|
|
link.pair = pair[0]
|
2012-05-11 18:39:26 +02:00
|
|
|
link.distance = distance
|
|
|
|
link.strength = strength
|
2012-06-04 20:47:05 +02:00
|
|
|
link.quality = pair[1]
|
2012-05-11 18:39:26 +02:00
|
|
|
return link
|
|
|
|
|
2012-05-12 14:58:16 +02:00
|
|
|
def import_wikigps(self, url):
|
|
|
|
def fetch_wikitable(url):
|
2012-06-03 16:59:57 +02:00
|
|
|
f = urlopen(url)
|
2012-05-12 14:58:16 +02:00
|
|
|
|
|
|
|
soup = BeautifulSoup(f)
|
|
|
|
|
|
|
|
table = soup.find_all("table")[0]
|
|
|
|
|
|
|
|
rows = table.find_all("tr")
|
|
|
|
|
|
|
|
headers = []
|
|
|
|
|
|
|
|
data = []
|
|
|
|
|
|
|
|
def maybe_strip(x):
|
2012-06-05 17:40:21 +02:00
|
|
|
if isinstance(x.string, str):
|
2012-05-12 14:58:16 +02:00
|
|
|
return x.string.strip()
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
for row in rows:
|
|
|
|
tds = list([maybe_strip(x) for x in row.find_all("td")])
|
|
|
|
ths = list([maybe_strip(x) for x in row.find_all("th")])
|
|
|
|
|
|
|
|
if any(tds):
|
|
|
|
data.append(tds)
|
|
|
|
|
|
|
|
if any(ths):
|
|
|
|
headers = ths
|
|
|
|
|
|
|
|
nodes = []
|
|
|
|
|
|
|
|
for d in data:
|
|
|
|
nodes.append(dict(zip(headers, d)))
|
|
|
|
|
|
|
|
return nodes
|
|
|
|
|
|
|
|
nodes = fetch_wikitable(url)
|
|
|
|
|
|
|
|
for node in nodes:
|
2012-06-06 21:43:13 +02:00
|
|
|
try:
|
|
|
|
node['MAC'] = node['MAC'].split(',')
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
node['GPS'] = node['GPS'].split(',')
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2012-05-12 14:58:16 +02:00
|
|
|
|
2012-06-06 21:43:13 +02:00
|
|
|
try:
|
|
|
|
node['Nick'] = node['Nick'].split(',')
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
nodes = zip_longest(node['MAC'], node['GPS'], node['Nick'])
|
2012-06-04 19:02:12 +02:00
|
|
|
|
2012-06-06 21:43:13 +02:00
|
|
|
for data in nodes:
|
|
|
|
if not data[0]:
|
|
|
|
continue
|
2012-05-12 14:58:16 +02:00
|
|
|
|
|
|
|
try:
|
2012-06-06 21:43:13 +02:00
|
|
|
node = self.maybe_node_by_mac((data[0], ))
|
2012-05-12 14:58:16 +02:00
|
|
|
except:
|
|
|
|
node = Node()
|
2012-06-06 21:43:13 +02:00
|
|
|
node.add_mac(data[0])
|
2012-05-12 14:58:16 +02:00
|
|
|
self._nodes.append(node)
|
|
|
|
|
2012-06-06 21:43:13 +02:00
|
|
|
if data[1]:
|
|
|
|
node.gps = data[1]
|
|
|
|
|
|
|
|
if data[2]:
|
|
|
|
node.name = data[2]
|
2012-06-04 18:37:40 +02:00
|
|
|
|