2012-05-11 18:39:26 +02:00
|
|
|
import json
|
2012-06-11 23:53:45 +02:00
|
|
|
from functools import reduce
|
|
|
|
from collections import defaultdict
|
|
|
|
from node import Node, Interface
|
|
|
|
from link import Link, LinkConnector
|
2012-06-06 21:43:13 +02:00
|
|
|
from itertools import zip_longest
|
2012-05-11 18:39:26 +02:00
|
|
|
|
2012-05-12 14:58:16 +02:00
|
|
|
from bs4 import BeautifulSoup
|
2012-06-05 17:40:21 +02:00
|
|
|
from urllib.request import urlopen
|
2012-05-12 14:58:16 +02:00
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
class NodeDB:
|
|
|
|
def __init__(self):
|
|
|
|
self._nodes = []
|
2012-06-04 20:58:17 +02:00
|
|
|
self._links = []
|
2012-05-11 18:39:26 +02:00
|
|
|
|
|
|
|
# fetch list of links
|
|
|
|
def get_links(self):
|
2012-06-12 02:09:44 +02:00
|
|
|
self.update_vpn_links()
|
2012-06-11 23:53:45 +02:00
|
|
|
return self.reduce_links()
|
2012-05-11 18:39:26 +02:00
|
|
|
|
|
|
|
# fetch list of nodes
|
|
|
|
def get_nodes(self):
|
|
|
|
return self._nodes
|
|
|
|
|
2012-07-08 01:30:21 +02:00
|
|
|
def maybe_node_by_fuzzy_mac(self, mac):
|
|
|
|
mac_a = mac.lower()
|
|
|
|
|
|
|
|
for node in self._nodes:
|
|
|
|
for mac_b in node.macs:
|
2012-08-20 23:00:39 +02:00
|
|
|
if is_derived_mac(mac_a, mac_b):
|
2012-07-08 01:30:21 +02:00
|
|
|
return node
|
|
|
|
|
2012-07-08 02:09:00 +02:00
|
|
|
raise KeyError
|
2012-07-08 01:30:21 +02:00
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
def maybe_node_by_mac(self, macs):
|
|
|
|
for node in self._nodes:
|
|
|
|
for mac in macs:
|
2012-05-12 16:44:27 +02:00
|
|
|
if mac.lower() in node.macs:
|
2012-05-11 18:39:26 +02:00
|
|
|
return node
|
|
|
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
# import_batman(list(fileinput.input(options['batmanjson'])))
|
|
|
|
def import_batman(self, lines):
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'of' in x:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((x['of'], x['secondary']))
|
|
|
|
except:
|
|
|
|
node = Node()
|
2012-06-07 22:21:50 +02:00
|
|
|
node.flags['online'] = True
|
2012-05-11 18:39:26 +02:00
|
|
|
self._nodes.append(node)
|
|
|
|
|
|
|
|
node.add_mac(x['of'])
|
|
|
|
node.add_mac(x['secondary'])
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'router' in x:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((x['router'], ))
|
|
|
|
except:
|
|
|
|
node = Node()
|
2012-06-07 22:21:50 +02:00
|
|
|
node.flags['online'] = True
|
2012-05-11 18:39:26 +02:00
|
|
|
node.add_mac(x['router'])
|
|
|
|
self._nodes.append(node)
|
|
|
|
|
2012-06-06 13:57:10 +02:00
|
|
|
# If it's a TT link and the MAC is very similar
|
|
|
|
# consider this MAC as one of the routers
|
|
|
|
# MACs
|
|
|
|
if 'gateway' in x and x['label'] == "TT":
|
2012-07-08 02:09:00 +02:00
|
|
|
if is_similar(x['router'], x['gateway']):
|
2012-07-07 23:11:37 +02:00
|
|
|
node.add_mac(x['gateway'])
|
2012-06-06 13:57:10 +02:00
|
|
|
|
2012-07-07 23:11:37 +02:00
|
|
|
# skip processing as regular link
|
|
|
|
continue
|
2012-06-06 13:57:10 +02:00
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
try:
|
2012-06-15 18:23:03 +02:00
|
|
|
if 'neighbor' in x:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((x['neighbor']))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
if 'gateway' in x:
|
|
|
|
x['neighbor'] = x['gateway']
|
|
|
|
|
|
|
|
node = self.maybe_node_by_mac((x['neighbor'], ))
|
|
|
|
except:
|
|
|
|
node = Node()
|
2012-06-07 22:21:50 +02:00
|
|
|
node.flags['online'] = True
|
2012-05-11 18:39:26 +02:00
|
|
|
if x['label'] == 'TT':
|
2012-06-07 22:21:50 +02:00
|
|
|
node.flags['client'] = True
|
2012-05-11 18:39:26 +02:00
|
|
|
|
|
|
|
node.add_mac(x['neighbor'])
|
|
|
|
self._nodes.append(node)
|
|
|
|
|
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'router' in x:
|
|
|
|
try:
|
|
|
|
if 'gateway' in x:
|
|
|
|
x['neighbor'] = x['gateway']
|
|
|
|
|
|
|
|
router = self.maybe_node_by_mac((x['router'], ))
|
|
|
|
neighbor = self.maybe_node_by_mac((x['neighbor'], ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
2012-06-12 00:32:13 +02:00
|
|
|
# filter TT links merged in previous step
|
|
|
|
if router == neighbor:
|
|
|
|
continue
|
|
|
|
|
2012-06-11 23:53:45 +02:00
|
|
|
link = Link()
|
|
|
|
link.source = LinkConnector()
|
|
|
|
link.source.interface = x['router']
|
|
|
|
link.source.id = self._nodes.index(router)
|
|
|
|
link.target = LinkConnector()
|
|
|
|
link.target.interface = x['neighbor']
|
|
|
|
link.target.id = self._nodes.index(neighbor)
|
|
|
|
link.quality = x['label']
|
|
|
|
link.id = "-".join(sorted((link.source.interface, link.target.interface)))
|
2012-05-11 18:39:26 +02:00
|
|
|
|
2012-06-11 23:53:45 +02:00
|
|
|
if x['label'] == "TT":
|
|
|
|
link.type = "client"
|
|
|
|
|
|
|
|
self._links.append(link)
|
2012-05-11 18:39:26 +02:00
|
|
|
|
2012-06-04 17:33:23 +02:00
|
|
|
for line in lines:
|
|
|
|
x = json.loads(line)
|
|
|
|
|
|
|
|
if 'primary' in x:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((x['primary'], ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
node.id = x['primary']
|
|
|
|
|
2012-06-11 23:53:45 +02:00
|
|
|
def reduce_links(self):
|
|
|
|
tmp_links = defaultdict(list)
|
|
|
|
|
|
|
|
for link in self._links:
|
|
|
|
tmp_links[link.id].append(link)
|
|
|
|
|
|
|
|
links = []
|
|
|
|
|
|
|
|
def reduce_link(a, b):
|
|
|
|
a.id = b.id
|
|
|
|
a.source = b.source
|
|
|
|
a.target = b.target
|
|
|
|
a.type = b.type
|
|
|
|
a.quality = ", ".join([x for x in (a.quality, b.quality) if x])
|
|
|
|
|
|
|
|
return a
|
|
|
|
|
|
|
|
for k, v in tmp_links.items():
|
|
|
|
new_link = reduce(reduce_link, v, Link())
|
|
|
|
links.append(new_link)
|
|
|
|
|
|
|
|
return links
|
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
def import_aliases(self, aliases):
|
|
|
|
for mac, alias in aliases.items():
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((mac, ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
|
|
|
node.name = alias['name']
|
|
|
|
|
2012-06-12 02:09:44 +02:00
|
|
|
if 'vpn' in alias and alias['vpn']:
|
2012-06-12 02:26:41 +02:00
|
|
|
node.interfaces[mac].vpn = True
|
2012-06-12 02:09:44 +02:00
|
|
|
|
2012-06-30 16:49:45 +02:00
|
|
|
if 'gps' in alias:
|
|
|
|
node.gps = alias['gps']
|
|
|
|
|
2012-05-11 18:39:26 +02:00
|
|
|
# list of macs
|
|
|
|
# if options['gateway']:
|
|
|
|
# mark_gateways(options['gateway'])
|
|
|
|
def mark_gateways(self, gateways):
|
|
|
|
for gateway in gateways:
|
|
|
|
try:
|
|
|
|
node = self.maybe_node_by_mac((gateway, ))
|
|
|
|
except:
|
|
|
|
continue
|
|
|
|
|
2012-06-07 22:21:50 +02:00
|
|
|
node.flags['gateway'] = True
|
2012-06-11 23:53:45 +02:00
|
|
|
|
2012-06-12 02:09:44 +02:00
|
|
|
def update_vpn_links(self):
|
2012-06-11 23:53:45 +02:00
|
|
|
changes = 1
|
|
|
|
while changes > 0:
|
|
|
|
changes = 0
|
|
|
|
for link in self._links:
|
|
|
|
if link.type == "client":
|
|
|
|
continue
|
2012-06-07 22:58:45 +02:00
|
|
|
|
2012-06-11 23:53:45 +02:00
|
|
|
source_interface = self._nodes[link.source.id].interfaces[link.source.interface]
|
|
|
|
target_interface = self._nodes[link.target.id].interfaces[link.target.interface]
|
|
|
|
if source_interface.vpn or target_interface.vpn:
|
|
|
|
source_interface.vpn = True
|
|
|
|
target_interface.vpn = True
|
|
|
|
if link.type != "vpn":
|
|
|
|
changes += 1
|
2012-05-11 18:39:26 +02:00
|
|
|
|
2012-06-11 23:53:45 +02:00
|
|
|
link.type = "vpn"
|
2012-05-11 18:39:26 +02:00
|
|
|
|
2012-05-12 14:58:16 +02:00
|
|
|
def import_wikigps(self, url):
|
|
|
|
def fetch_wikitable(url):
|
2012-06-03 16:59:57 +02:00
|
|
|
f = urlopen(url)
|
2012-05-12 14:58:16 +02:00
|
|
|
|
|
|
|
soup = BeautifulSoup(f)
|
|
|
|
|
|
|
|
table = soup.find_all("table")[0]
|
|
|
|
|
|
|
|
rows = table.find_all("tr")
|
|
|
|
|
|
|
|
headers = []
|
|
|
|
|
|
|
|
data = []
|
|
|
|
|
|
|
|
def maybe_strip(x):
|
2012-06-05 17:40:21 +02:00
|
|
|
if isinstance(x.string, str):
|
2012-05-12 14:58:16 +02:00
|
|
|
return x.string.strip()
|
|
|
|
else:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
for row in rows:
|
|
|
|
tds = list([maybe_strip(x) for x in row.find_all("td")])
|
|
|
|
ths = list([maybe_strip(x) for x in row.find_all("th")])
|
|
|
|
|
|
|
|
if any(tds):
|
|
|
|
data.append(tds)
|
|
|
|
|
|
|
|
if any(ths):
|
|
|
|
headers = ths
|
|
|
|
|
|
|
|
nodes = []
|
|
|
|
|
|
|
|
for d in data:
|
|
|
|
nodes.append(dict(zip(headers, d)))
|
|
|
|
|
|
|
|
return nodes
|
|
|
|
|
|
|
|
nodes = fetch_wikitable(url)
|
|
|
|
|
|
|
|
for node in nodes:
|
2012-06-06 21:43:13 +02:00
|
|
|
try:
|
|
|
|
node['MAC'] = node['MAC'].split(',')
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
node['GPS'] = node['GPS'].split(',')
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2012-05-12 14:58:16 +02:00
|
|
|
|
2012-06-06 21:43:13 +02:00
|
|
|
try:
|
2012-08-22 20:47:17 +02:00
|
|
|
node['Knotenname'] = node['Knotenname'].split(',')
|
2012-06-06 21:43:13 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2012-08-22 20:47:17 +02:00
|
|
|
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
|
2012-06-04 19:02:12 +02:00
|
|
|
|
2012-06-06 21:43:13 +02:00
|
|
|
for data in nodes:
|
|
|
|
if not data[0]:
|
|
|
|
continue
|
2012-05-12 14:58:16 +02:00
|
|
|
|
|
|
|
try:
|
2012-07-08 01:30:21 +02:00
|
|
|
node = self.maybe_node_by_fuzzy_mac(data[0])
|
2012-07-08 02:09:00 +02:00
|
|
|
except KeyError:
|
2012-05-12 14:58:16 +02:00
|
|
|
node = Node()
|
|
|
|
self._nodes.append(node)
|
|
|
|
|
2012-08-20 23:45:35 +02:00
|
|
|
node.add_mac(data[0])
|
|
|
|
|
2012-06-06 21:43:13 +02:00
|
|
|
if data[1]:
|
|
|
|
node.gps = data[1]
|
|
|
|
|
|
|
|
if data[2]:
|
|
|
|
node.name = data[2]
|
2012-06-04 18:37:40 +02:00
|
|
|
|
2012-07-07 23:11:37 +02:00
|
|
|
# compares two MACs and decides whether they are
|
|
|
|
# similar and could be from the same node
|
|
|
|
def is_similar(a, b):
|
2012-07-08 02:09:00 +02:00
|
|
|
if a == b:
|
|
|
|
return True
|
|
|
|
|
|
|
|
try:
|
|
|
|
mac_a = list(int(i, 16) for i in a.split(":"))
|
|
|
|
mac_b = list(int(i, 16) for i in b.split(":"))
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
|
2012-08-20 22:59:32 +02:00
|
|
|
# first byte must only differ in bit 2
|
|
|
|
if mac_a[0] | 2 == mac_b[0] | 2:
|
|
|
|
# count different bytes
|
|
|
|
c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]]
|
|
|
|
else:
|
|
|
|
return False
|
2012-07-07 23:11:37 +02:00
|
|
|
|
2012-08-20 22:59:32 +02:00
|
|
|
# no more than two additional bytes must differ
|
|
|
|
if len(c) <= 2:
|
|
|
|
delta = 0
|
2012-08-20 22:05:01 +02:00
|
|
|
|
2012-08-20 22:59:32 +02:00
|
|
|
if len(c) > 0:
|
|
|
|
delta = sum(abs(i[0] -i[1]) for i in c)
|
2012-07-07 23:11:37 +02:00
|
|
|
|
2012-08-20 22:59:32 +02:00
|
|
|
# These addresses look pretty similar!
|
|
|
|
return delta < 8
|
2012-07-07 23:11:37 +02:00
|
|
|
|
2012-08-20 23:00:39 +02:00
|
|
|
def is_derived_mac(a, b):
|
|
|
|
if a == b:
|
|
|
|
return True
|
|
|
|
|
|
|
|
try:
|
|
|
|
mac_a = list(int(i, 16) for i in a.split(":"))
|
|
|
|
mac_b = list(int(i, 16) for i in b.split(":"))
|
|
|
|
except ValueError:
|
|
|
|
return False
|
|
|
|
|
2012-08-21 00:10:37 +02:00
|
|
|
x = list(mac_a)
|
2012-08-20 23:00:39 +02:00
|
|
|
x[5] += 1
|
2012-08-20 23:44:31 +02:00
|
|
|
x[5] %= 255
|
2012-08-20 23:00:39 +02:00
|
|
|
if mac_b == x:
|
|
|
|
return True
|
|
|
|
|
|
|
|
x[0] |= 2
|
|
|
|
if mac_b == x:
|
|
|
|
return True
|
|
|
|
|
|
|
|
x[3] += 1
|
2012-08-20 23:44:31 +02:00
|
|
|
x[3] %= 255
|
2012-08-20 23:00:39 +02:00
|
|
|
if mac_b == x:
|
|
|
|
return True
|
|
|
|
|
2012-08-21 00:10:37 +02:00
|
|
|
x = list(mac_a)
|
2012-08-20 23:00:39 +02:00
|
|
|
x[0] |= 2
|
|
|
|
x[5] += 2
|
2012-08-20 23:44:31 +02:00
|
|
|
x[5] %= 255
|
2012-08-20 23:00:39 +02:00
|
|
|
if mac_b == x:
|
|
|
|
return True
|
|
|
|
|
2012-08-21 00:10:37 +02:00
|
|
|
x = list(mac_a)
|
2012-08-20 23:00:39 +02:00
|
|
|
x[0] |= 2
|
|
|
|
x[3] += 1
|
2012-08-20 23:44:31 +02:00
|
|
|
x[3] %= 255
|
2012-08-20 23:00:39 +02:00
|
|
|
if mac_b == x:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|