From f5e3705eec4888ec0d40f98333df3d447f1f842f Mon Sep 17 00:00:00 2001 From: Jan-Philipp Litza Date: Mon, 7 Jul 2014 23:27:21 +0200 Subject: [PATCH] Began rewrite with more modular design --- alfred.py | 37 -- batman.py | 86 ----- hostid.py | 13 - inputs/alfred/__init__.py | 18 + inputs/batadv/__init__.py | 100 +++++ json_encoder.py | 13 - link.py | 24 -- node.py | 128 +++---- nodedb.py | 441 +++-------------------- outputs/json/__init__.py | 71 ++++ GlobalRRD.py => outputs/rrd/GlobalRRD.py | 6 +- NodeRRD.py => outputs/rrd/NodeRRD.py | 14 +- RRD.py => outputs/rrd/RRD.py | 0 outputs/rrd/__init__.py | 31 ++ rrd.py | 80 ---- 15 files changed, 354 insertions(+), 708 deletions(-) delete mode 100755 alfred.py delete mode 100755 batman.py delete mode 100644 hostid.py create mode 100644 inputs/alfred/__init__.py create mode 100644 inputs/batadv/__init__.py delete mode 100644 json_encoder.py delete mode 100644 link.py create mode 100644 outputs/json/__init__.py rename GlobalRRD.py => outputs/rrd/GlobalRRD.py (89%) rename NodeRRD.py => outputs/rrd/NodeRRD.py (85%) rename RRD.py => outputs/rrd/RRD.py (100%) create mode 100644 outputs/rrd/__init__.py delete mode 100755 rrd.py diff --git a/alfred.py b/alfred.py deleted file mode 100755 index 06ee1f7..0000000 --- a/alfred.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python3 -import subprocess -import json - -class alfred: - def __init__(self,request_data_type = 158): - self.request_data_type = request_data_type - - def aliases(self): - output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json"]) - alfred_data = json.loads(output.decode("utf-8")) - alias = {} - for mac,node in alfred_data.items(): - node_alias = {} - for key in node: - node_alias[key] = node[key] - - try: - node_alias['geo'] = [node['location']['latitude'], node['location']['longitude']] - except (TypeError, KeyError): - pass - - try: - node_alias['id'] = node['network']['mac'] - except KeyError: - pass - - if 'hostname' in node: - node_alias['name'] = node['hostname'] - if len(node_alias): - alias[mac] = node_alias - return alias - -if __name__ == "__main__": - ad = alfred() - al = ad.aliases() - print(al) diff --git a/batman.py b/batman.py deleted file mode 100755 index c9b3db6..0000000 --- a/batman.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python3 -import subprocess -import json -import re - -class batman: - """ Bindings for B.A.T.M.A.N. advanced batctl tool - """ - def __init__(self, mesh_interface = "bat0"): - self.mesh_interface = mesh_interface - - def vis_data(self,batadv_vis=False): - vds = self.vis_data_batctl_legacy() - if batadv_vis: - vds += self.vis_data_batadv_vis() - return vds - - def vis_data_helper(self,lines): - vd = [] - for line in lines: - try: - utf8_line = line.decode("utf-8") - vd.append(json.loads(utf8_line)) - except e: - pass - return vd - - def vis_data_batctl_legacy(self): - """ Parse "batctl -m vd json -n" into an array of dictionaries. - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"]) - lines = output.splitlines() - vds = self.vis_data_helper(lines) - for vd in vds: - vd['legacy'] = True - return vds - - def vis_data_batadv_vis(self): - """ Parse "batadv-vis -i -f json" into an array of dictionaries. - """ - output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"]) - lines = output.splitlines() - return self.vis_data_helper(lines) - - def gateway_list(self): - """ Parse "batctl -m gwl -n" into an array of dictionaries. - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"]) - output_utf8 = output.decode("utf-8") - # TODO Parse information - lines = output_utf8.splitlines() - own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",lines[0]).group(1) - # Remove header line - del lines[0] - # Fill gateway list - gw = [] - gw_mode = self.gateway_mode() - if gw_mode['mode'] == 'server': - gw.append({'mac': own_mac, 'bandwidth': gw_mode['bandwidth']}) - for line in lines: - gw_line = line.split() - if (gw_line[0] == 'No'): - continue - # When in client gateway mode maybe gw_line[0] is not the right. - gw.append({'mac':gw_line[0], 'bandwidth': gw_line[-1]}) - return gw - - def gateway_mode(self): - """ Parse "batctl -m gw" - """ - output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"]) - elements = output.decode("utf-8").split() - mode = elements[0] - if mode == "server": - return {'mode': 'server', 'bandwidth': elements[3]} - else: - return {'mode': mode} - -if __name__ == "__main__": - bc = batman() - vd = bc.vis_data() - gw = bc.gateway_list() - for x in vd: - print(x) - print(gw) - print(bc.gateway_mode()) diff --git a/hostid.py b/hostid.py deleted file mode 100644 index 2b4038e..0000000 --- a/hostid.py +++ /dev/null @@ -1,13 +0,0 @@ -import re -from functools import reduce - -def mac_to_hostid(mac): - int_mac = list(map(lambda x: int(x, 16), mac.split(":"))) - int_mac[0] ^= 2 - bytes = map(lambda x: "%02x" % x, int_mac[0:3] + [0xff, 0xfe] + int_mac[3:]) - return reduce(lambda a, i: - [a[0] + ("" if i == 0 else ":") + a[1] + a[2]] + a[3:], - range(0, 4), - [""] + list(bytes) - ) - diff --git a/inputs/alfred/__init__.py b/inputs/alfred/__init__.py new file mode 100644 index 0000000..6c0f66e --- /dev/null +++ b/inputs/alfred/__init__.py @@ -0,0 +1,18 @@ +import subprocess +import json + +class Input: + def __init__(self,request_data_type = 158): + self.request_data_type = request_data_type + + def get_data(self, nodedb): + """Add data from alfred to the supplied nodedb""" + output = subprocess.check_output([ + "alfred-json", + "-r", str(self.request_data_type), + "-f", "json", + ]) + alfred_data = json.loads(output.decode("utf-8")) + + for mac, node in alfred_data.items(): + nodedb.add_or_update([mac], node) diff --git a/inputs/batadv/__init__.py b/inputs/batadv/__init__.py new file mode 100644 index 0000000..576b09a --- /dev/null +++ b/inputs/batadv/__init__.py @@ -0,0 +1,100 @@ +import subprocess +import json + +class Input: + """Fill the NodeDB with links from batadv-vis. + + The links are added as lists containing the neighboring nodes, not + only their identifiers! Mind this when exporting the database, as + it probably leads to recursion. + """ + def __init__(self, mesh_interface="bat0"): + self.mesh_interface = mesh_interface + + @staticmethod + def _is_similar_mac(a, b): + """Determine if two MAC addresses are similar.""" + if a == b: + return True + + # Split the address into bytes + try: + mac_a = list(int(i, 16) for i in a.split(":")) + mac_b = list(int(i, 16) for i in b.split(":")) + except ValueError: + return False + + # Second and third byte musn't differ + if mac_a[1] != mac_b[1] or mac_a[2] != mac_b[2]: + return False + + # First byte must only differ in bit 2 + if mac_a[0] | 2 != mac_b[0] | 2: + return False + + # Count differing bytes after the third + c = [x for x in zip(mac_a[3:], mac_b[3:]) if x[0] != x[1]] + + # No more than two additional bytes must differ + if len(c) > 2: + return False + + # If no more bytes differ, they are very similar + if len(c) == 0: + return True + + # If the sum of absolute differences isn't greater than 2, they + # are pretty similar + delta = sum(abs(i[0] - i[1]) for i in c) + return delta < 2 + + def get_data(self, nodedb): + """Add data from batadv-vis to the supplied nodedb""" + output = subprocess.check_output([ + "batadv-vis", + "-i", str(self.mesh_interface), + "-f", "jsondoc", + ]) + data = json.loads(output.decode("utf-8")) + + # First pass + for node in data["vis"]: + # Determine possible other MAC addresses of this node by + # comparing all its client's MAC addresses to its primary + # MAC address. If they are similar, it probably is another + # address of the node itself! If it isn't, it is a real + # client. + node['aliases'] = [node["primary"]] + if 'secondary' in node: + node['aliases'].extend(node['secondary']) + real_clients = [] + for mac in node["clients"]: + if self._is_similar_mac(mac, node["primary"]): + node['aliases'].append(mac) + else: + real_clients.append(mac) + node['clients'] = real_clients + + # Add nodes and aliases without any information at first. + # This way, we can later link the objects themselves. + nodedb.add_or_update(node['aliases']) + + # Second pass + for node in data["vis"]: + # We only need the primary address now, all aliases are + # already present in the database. Furthermore, we can be + # sure that all neighbors are in the database as well. If + # a neighbor isn't added already, we simply ignore it. + nodedb.add_or_update( + [node["primary"]], + { + "clients": node["clients"], + "neighbors": [ + { + "metric": neighbor['metric'], + "neighbor": nodedb[neighbor['neighbor']], + } for neighbor in node["neighbors"] + if neighbor['neighbor'] in nodedb + ] + } + ) diff --git a/json_encoder.py b/json_encoder.py deleted file mode 100644 index 8d62771..0000000 --- a/json_encoder.py +++ /dev/null @@ -1,13 +0,0 @@ -from json import JSONEncoder - -class CustomJSONEncoder(JSONEncoder): - """ - JSON encoder that uses an object's __json__() method to convert it to - something JSON-compatible. - """ - def default(self, obj): - try: - return obj.__json__() - except AttributeError: - pass - return super().default(obj) diff --git a/link.py b/link.py deleted file mode 100644 index b161608..0000000 --- a/link.py +++ /dev/null @@ -1,24 +0,0 @@ -class Link(): - def __init__(self): - self.id = None - self.source = LinkConnector() - self.target = LinkConnector() - self.quality = None - self.type = None - - def export(self): - return { - 'source': self.source.id, - 'target': self.target.id, - 'quality': self.quality, - 'type': self.type, - 'id': self.id - } - -class LinkConnector(): - def __init__(self): - self.id = None - self.interface = None - - def __repr__(self): - return "LinkConnector(%d, %s)" % (self.id, self.interface) diff --git a/node.py b/node.py index 83531b2..5fa58f6 100644 --- a/node.py +++ b/node.py @@ -1,70 +1,70 @@ from collections import defaultdict class NoneDict: - """ - A NoneDict acts like None but returns a NoneDict for every item in it. + """Act like None but return a NoneDict for every item request. - This is similar to the behaviour of collections.defaultdict in that even - previously inexistent keys can be accessed, but there is nothing stored - permanently. - """ - __repr__ = lambda self: 'NoneDict()' - __bool__ = lambda self: False - __getitem__ = lambda self, k: NoneDict() - __json__ = lambda self: None - __float__ = lambda self: float('NaN') - def __setitem__(self, key, value): - raise RuntimeError("NoneDict is readonly") - -class casualdict(defaultdict): - """ - This special defaultdict returns a NoneDict for inexistent items. Also, its - items can be accessed as attributed as well. - """ - def __init__(self): - super().__init__(NoneDict) - __getattr__ = defaultdict.__getitem__ - __setattr__ = defaultdict.__setitem__ - -class Node(casualdict): - def __init__(self): - self.name = "" - self.id = "" - self.macs = set() - self.interfaces = dict() - self.flags = dict({ - "online": False, - "gateway": False, - "client": False - }) - super().__init__() - - def add_mac(self, mac): - mac = mac.lower() - if len(self.macs) == 0: - self.id = mac - - self.macs.add(mac) - - self.interfaces[mac] = Interface() - - def __repr__(self): - return self.macs.__repr__() - - def export(self): + This is similar to the behaviour of collections.defaultdict in that + even previously inexistent keys can be accessed, but nothing is + stored permanently in this class. """ - Return a dict that contains all attributes of the Node that are supposed to - be exported to other applications. - """ - return { - "name": self.name, - "id": self.id, - "macs": list(self.macs), - "geo": self.geo, - "firmware": self.software['firmware']['release'], - "flags": self.flags - } + __repr__ = lambda self: 'NoneDict()' + __bool__ = lambda self: False + __getitem__ = lambda self, k: NoneDict() + __json__ = lambda self: None + __float__ = lambda self: float('NaN') + def __setitem__(self, key, value): + raise RuntimeError("NoneDict is readonly") -class Interface(): - def __init__(self): - self.vpn = False +class Node(defaultdict): + _id = None + def __init__(self, id_=None): + self._id = id_ + super().__init__(NoneDict) + + def __repr__(self): + return "Node(%s)" % self.id + + @property + def id(self): + return self._id + + def __hash__(self): + """Generate hash from the node's id. + + WARNING: Obviously this hash doesn't cover all of the node's + data, but we need nodes to be hashable in order to eliminate + duplicates in the NodeDB. + + At least the id cannot change after initialization... + """ + return hash(self.id) + + @property + def vpn_neighbors(self): + try: + vpn_neighbors = [] + for neighbor in self['neighbors']: + if neighbor['neighbor']['vpn']: + vpn_neighbors.append(neighbor) + return vpn_neighbors + except TypeError: + return [] + + def export(self): + """Generate a serializable dict of the node. + + In particular, this replaces any references to other nodes by + their id to prevent circular references. + """ + ret = dict(self) + if "neighbors" in self: + ret["neighbors"] = [] + for neighbor in self["neighbors"]: + new_neighbor = {} + for key, val in neighbor.items(): + if isinstance(val, Node): + new_neighbor[key] = val.id + else: + new_neighbor[key] = val + ret["neighbors"].append(new_neighbor) + return ret diff --git a/nodedb.py b/nodedb.py index e5ff30e..a056184 100644 --- a/nodedb.py +++ b/nodedb.py @@ -1,381 +1,60 @@ -from functools import reduce -from collections import defaultdict -from node import Node, Interface -from link import Link, LinkConnector - -class NodeDB: - def __init__(self): - self._nodes = [] - self._links = [] - - # fetch list of links - def get_links(self): - self.update_vpn_links() - return self.reduce_links() - - # fetch list of nodes - def get_nodes(self): - return self._nodes - - def export(self): - return { - 'nodes': [node.export() for node in self.get_nodes()], - 'links': [link.export() for link in self.get_links()], - } - - def maybe_node_by_fuzzy_mac(self, mac): - mac_a = mac.lower() - - for node in self._nodes: - for mac_b in node.macs: - if is_derived_mac(mac_a, mac_b): - return node - - raise KeyError - - def maybe_node_by_mac(self, macs): - for node in self._nodes: - for mac in macs: - if mac.lower() in node.macs: - return node - - raise KeyError - - def maybe_node_by_id(self, mac): - for node in self._nodes: - if mac.lower() == node.id: - return node - - raise KeyError - - def parse_vis_data(self,vis_data): - for x in vis_data: - - if 'of' in x: - try: - node = self.maybe_node_by_mac((x['of'], x['secondary'])) - except: - node = Node() - node.flags['online'] = True - if 'legacy' in x: - node.flags['legacy'] = True - self._nodes.append(node) - - node.add_mac(x['of']) - node.add_mac(x['secondary']) - - for x in vis_data: - - if 'router' in x: - try: - node = self.maybe_node_by_mac((x['router'], )) - except: - node = Node() - node.flags['online'] = True - if 'legacy' in x: - node.flags['legacy'] = True - node.add_mac(x['router']) - self._nodes.append(node) - - # If it's a TT link and the MAC is very similar - # consider this MAC as one of the routers - # MACs - if 'gateway' in x and x['label'] == "TT": - if is_similar(x['router'], x['gateway']): - node.add_mac(x['gateway']) - - # skip processing as regular link - continue - - try: - if 'neighbor' in x: - try: - node = self.maybe_node_by_mac((x['neighbor'])) - except: - continue - - if 'gateway' in x: - x['neighbor'] = x['gateway'] - - node = self.maybe_node_by_mac((x['neighbor'], )) - except: - node = Node() - node.flags['online'] = True - if x['label'] == 'TT': - node.flags['client'] = True - - node.add_mac(x['neighbor']) - self._nodes.append(node) - - for x in vis_data: - - if 'router' in x: - try: - if 'gateway' in x: - x['neighbor'] = x['gateway'] - - router = self.maybe_node_by_mac((x['router'], )) - neighbor = self.maybe_node_by_mac((x['neighbor'], )) - except: - continue - - # filter TT links merged in previous step - if router == neighbor: - continue - - link = Link() - link.source = LinkConnector() - link.source.interface = x['router'] - link.source.id = self._nodes.index(router) - link.target = LinkConnector() - link.target.interface = x['neighbor'] - link.target.id = self._nodes.index(neighbor) - link.quality = x['label'] - link.id = "-".join(sorted((link.source.interface, link.target.interface))) - - if x['label'] == "TT": - link.type = "client" - - self._links.append(link) - - for x in vis_data: - - if 'primary' in x: - try: - node = self.maybe_node_by_mac((x['primary'], )) - except: - continue - - node.id = x['primary'] - - def reduce_links(self): - tmp_links = defaultdict(list) - - for link in self._links: - tmp_links[link.id].append(link) - - links = [] - - def reduce_link(a, b): - a.id = b.id - a.source = b.source - a.target = b.target - a.type = b.type - a.quality = ", ".join([x for x in (a.quality, b.quality) if x]) - - return a - - for k, v in tmp_links.items(): - new_link = reduce(reduce_link, v, Link()) - links.append(new_link) - - return links - - def import_aliases(self, aliases): - for mac, alias in aliases.items(): - try: - node = self.maybe_node_by_mac([mac]) - except: - try: - node = self.maybe_node_by_fuzzy_mac(mac) - except: - # create an offline node - node = Node() - node.add_mac(mac) - self._nodes.append(node) - - for key in alias: - node[key] = alias[key] - - if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces: - node.interfaces[mac].vpn = True - - # list of macs - # if options['gateway']: - # mark_gateways(options['gateway']) - def mark_gateways(self, gateways): - for gateway in gateways: - try: - node = self.maybe_node_by_mac((gateway, )) - except: - print("WARNING: did not find gateway '",gateway,"' in node list") - continue - - node.flags['gateway'] = True - - def update_vpn_links(self): - changes = 1 - while changes > 0: - changes = 0 - for link in self._links: - if link.type == "client": - continue - - source_interface = self._nodes[link.source.id].interfaces[link.source.interface] - target_interface = self._nodes[link.target.id].interfaces[link.target.interface] - if source_interface.vpn or target_interface.vpn: - source_interface.vpn = True - target_interface.vpn = True - if link.type != "vpn": - changes += 1 - - link.type = "vpn" - - def count_clients(self): - for link in self._links: - try: - a = self.maybe_node_by_id(link.source.interface) - b = self.maybe_node_by_id(link.target.interface) - - if a.flags['client']: - client = a - node = b - elif b.flags['client']: - client = b - node = a - else: - continue - - node.clientcount += 1 - except: - pass - - def obscure_clients(self): - - globalIdCounter = 0 - nodeCounters = {} - clientIds = {} - - for node in self._nodes: - if node.flags['client']: - node.macs = set() - clientIds[node.id] = None - - for link in self._links: - ids = link.source.interface - idt = link.target.interface - - try: - node_source = self.maybe_node_by_fuzzy_mac(ids) - node_target = self.maybe_node_by_id(idt) - - if not node_source.flags['client'] and not node_target.flags['client']: - # if none of the nodes associated with this link are clients, - # we do not want to obscure - continue - - if ids in clientIds and idt in clientIds: - # This is for corner cases, when a client - # is linked to another client. - clientIds[ids] = str(globalIdCounter) - ids = str(globalIdCounter) - globalIdCounter += 1 - - clientIds[idt] = str(globalIdCounter) - idt = str(globalIdCounter) - globalIdCounter += 1 - - elif ids in clientIds: - newId = generateId(idt) - clientIds[ids] = newId - ids = newId - - link.source.interface = ids; - node_source.id = ids; - - elif idt in clientIds: - newId = generateId(ids,nodeCounters) - clientIds[idt] = newId - idt = newId - - link.target.interface = idt; - node_target.id = idt; - - link.id = ids + "-" + idt - - except KeyError: - pass - -# extends node id by incremented node counter -def generateId(nodeId,nodeCounters): - if nodeId in nodeCounters: - n = nodeCounters[nodeId] - nodeCounters[nodeId] = n + 1 - else: - nodeCounters[nodeId] = 1 - n = 0 - - return nodeId + "_" + str(n) - -# compares two MACs and decides whether they are -# similar and could be from the same node -def is_similar(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - # first byte must only differ in bit 2 - if mac_a[0] | 2 == mac_b[0] | 2: - # count different bytes - c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]] - else: - return False - - # no more than two additional bytes must differ - if len(c) <= 2: - delta = 0 - - if len(c) > 0: - delta = sum(abs(i[0] -i[1]) for i in c) - - # These addresses look pretty similar! - return delta < 8 - -def is_derived_mac(a, b): - if a == b: - return True - - try: - mac_a = list(int(i, 16) for i in a.split(":")) - mac_b = list(int(i, 16) for i in b.split(":")) - except ValueError: - return False - - if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]: - return False - - x = list(mac_a) - x[5] += 1 - x[5] %= 255 - if mac_b == x: - return True - - x[0] |= 2 - if mac_b == x: - return True - - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[5] += 2 - x[5] %= 255 - if mac_b == x: - return True - - x = list(mac_a) - x[0] |= 2 - x[3] += 1 - x[3] %= 255 - if mac_b == x: - return True - - return False +from node import Node + +class AmbiguityException(Exception): + """Indicate the ambiguity of identifiers. + + This exception is raised if there is more than one match for a set + of identifiers. + + Attributes: + identifiers -- set of ambiguous identifiers + """ + + identifiers = [] + + def __init__(self, identifiers): + self.identifiers = identifiers + + def __str__(self): + return "Ambiguous identifiers: %s" % ", ".join(self.identifiers) + +class NodeDB(dict): + def add_or_update(self, ids, other=None): + """Add or update a node in the database. + + Searches for an already existing node and updates it, or adds a new + one if no existing one is found. Raises an AmbiguityException if + more than one different nodes are found matching the criteria. + + Arguments: + ids -- list of possible identifiers (probably MAC addresses) of the + node + other -- dict of values to update in an existing node or add to + the new one. Defaults to None, in which case no values + are added or updated, only the aliases of the + (possibly freshly created) node are updated. + """ + + # Find existing node, if any + node = None + node_id = None + for id_ in ids: + if id_ == node_id: + continue + if id_ in self: + if node is not None: + raise AmbiguityException([node_id, id_]) + node = self[id_] + node_id = id_ + + # If no node was found, create a new one + if node is None: + node = Node(ids[0]) + + # Update the node with the given properties using its own update method. + if other is not None: + node.update(other) + + # Add new aliases if any + for id_ in ids: + self[id_] = node diff --git a/outputs/json/__init__.py b/outputs/json/__init__.py new file mode 100644 index 0000000..f005c38 --- /dev/null +++ b/outputs/json/__init__.py @@ -0,0 +1,71 @@ +import json + +__all__ = ["Exporter"] + +class CustomJSONEncoder(json.JSONEncoder): + """ + JSON encoder that uses an object's __json__() method to convert it to + something JSON-compatible. + """ + def default(self, obj): + try: + return obj.__json__() + except AttributeError: + pass + return super().default(obj) + +class Exporter: + def __init__(self, filepath="nodes.json"): + self.filepath = filepath + + @staticmethod + def generate(nodedb): + indexes = {} + nodes = [] + count = 0 + for node in set(nodedb.values()): + nodes.append(node.export()) + indexes[node.id] = count + count += 1 + + links = [] + for node in set(nodedb.values()): + if "neighbors" in node: + links.extend( + { + "source": indexes[node.id], + "target": indexes[neighbor["neighbor"].id], + "quality": neighbor["metric"], + "type": "vpn" if neighbor["neighbor"]["vpn"] else None, + "id": "-".join((node.id, neighbor["neighbor"].id)), + } for neighbor in node["neighbors"] + ) + if "clients" in node: + for client in node["clients"]: + if not client in indexes: + nodes.append({ + "id": client, + }) + indexes[client] = count + count += 1 + + links.append({ + "source": indexes[node.id], + "target": indexes[client], + "quality": "TT", + "type": "client", + "id": "-".join((node.id, client)), + }) + + return { + "nodes": nodes, + "links": links, + } + + def export(self, nodedb): + with open(self.filepath, "w") as nodes_json: + json.dump( + self.generate(nodedb), + nodes_json, + cls=CustomJSONEncoder + ) diff --git a/GlobalRRD.py b/outputs/rrd/GlobalRRD.py similarity index 89% rename from GlobalRRD.py rename to outputs/rrd/GlobalRRD.py index f3f3960..b114418 100644 --- a/GlobalRRD.py +++ b/outputs/rrd/GlobalRRD.py @@ -1,6 +1,6 @@ import os import subprocess -from RRD import RRD, DS, RRA +from .RRD import RRD, DS, RRA class GlobalRRD(RRD): ds_list = [ @@ -15,8 +15,8 @@ class GlobalRRD(RRD): RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples ] - def __init__(self, directory): - super().__init__(os.path.join(directory, "nodes.rrd")) + def __init__(self, filepath): + super().__init__(filepath) self.ensureSanity(self.ds_list, self.rra_list, step=60) def update(self, nodeCount, clientCount): diff --git a/NodeRRD.py b/outputs/rrd/NodeRRD.py similarity index 85% rename from NodeRRD.py rename to outputs/rrd/NodeRRD.py index 0118234..fc8aef1 100644 --- a/NodeRRD.py +++ b/outputs/rrd/NodeRRD.py @@ -1,7 +1,7 @@ import os import subprocess from node import Node -from RRD import RRD, DS, RRA +from .RRD import RRD, DS, RRA class NodeRRD(RRD): ds_list = [ @@ -45,18 +45,18 @@ class NodeRRD(RRD): def update(self): values = { 'upstate': 1, - 'clients': float(self.node.clients), - 'neighbors': float(self.node.neighbors), - 'vpn_neighbors': float(self.node.vpn_neighbors), - 'loadavg': float(self.node.statistics['loadavg']), + 'clients': float(len(self.node.get('clients', []))), + 'neighbors': float(len(self.node.get('neighbors', []))), + 'vpn_neighbors': float(len(self.node.vpn_neighbors)), + 'loadavg': float(self.node['statistics']['loadavg']), } for item in ('rx', 'tx', 'mgmt_rx', 'mgmt_tx', 'forward'): try: - values['%s_bytes' % item] = int(self.node.statistics['traffic'][item]['bytes']) + values[item + '_bytes'] = int(self.node['statistics']['traffic'][item]['bytes']) except TypeError: pass try: - values['%s_packets' % item] = int(self.node.statistics['traffic'][item]['packets']) + values[item + '_packets'] = int(self.node['statistics']['traffic'][item]['packets']) except TypeError: pass super().update(values) diff --git a/RRD.py b/outputs/rrd/RRD.py similarity index 100% rename from RRD.py rename to outputs/rrd/RRD.py diff --git a/outputs/rrd/__init__.py b/outputs/rrd/__init__.py new file mode 100644 index 0000000..5e9fbc1 --- /dev/null +++ b/outputs/rrd/__init__.py @@ -0,0 +1,31 @@ +import os +from .NodeRRD import NodeRRD +from .GlobalRRD import GlobalRRD + +class Exporter: + def __init__(self, directory="nodedb"): + self.directory = directory + try: + os.mkdir(self.directory) + except OSError: + pass + + def export(self, nodedb): + nodes = set(nodedb.values()) + clients = 0 + nodecount = 0 + for node in nodes: + clients += len(node.get("clients", [])) + nodecount += 1 + NodeRRD( + os.path.join( + self.directory, + str(node.id).replace(':', '') + '.rrd' + ), + node + ).update() + + GlobalRRD(os.path.join(self.directory, "nodes.rrd")).update( + nodecount, + clients + ) diff --git a/rrd.py b/rrd.py deleted file mode 100755 index dad78c5..0000000 --- a/rrd.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 -import subprocess -import time -import os -from GlobalRRD import GlobalRRD -from NodeRRD import NodeRRD - -class rrd: - def __init__( self - , databaseDirectory - , imagePath - , displayTimeGlobal = "7d" - , displayTimeNode = "1d" - ): - self.dbPath = databaseDirectory - self.globalDb = GlobalRRD(self.dbPath) - self.imagePath = imagePath - self.displayTimeGlobal = displayTimeGlobal - self.displayTimeNode = displayTimeNode - - self.currentTimeInt = (int(time.time())/60)*60 - self.currentTime = str(self.currentTimeInt) - - try: - os.stat(self.imagePath) - except: - os.mkdir(self.imagePath) - - def update_database(self,db): - nodes = {} - clientCount = 0 - for node in db.get_nodes(): - if node.flags['online']: - if not node.flags['client']: - nodes[node.id] = node - node.clients = 0 - node.neighbors = 0 - node.vpn_neighbors = 0 - if 'legacy' in node.flags and node.flags['legacy']: - clientCount -= 1 - else: - clientCount += 1 - for link in db.get_links(): - source = link.source.interface - target = link.target.interface - if source in nodes and not target in nodes: - nodes[source].clients += 1 - elif target in nodes and not source in nodes: - nodes[target].clients += 1 - elif source in nodes and target in nodes: - nodes[source].neighbors += 1 - nodes[target].neighbors += 1 - if link.type == 'vpn': - nodes[target].vpn_neighbors += 1 - nodes[source].vpn_neighbors += 1 - - self.globalDb.update(len(nodes), clientCount) - for node in nodes.values(): - rrd = NodeRRD( - os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'), - node - ) - rrd.update() - - def update_images(self): - """ Creates an image for every rrd file in the database directory. - """ - - self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal) - - nodeDbFiles = os.listdir(self.dbPath) - - for fileName in nodeDbFiles: - if not os.path.isfile(os.path.join(self.dbPath, fileName)): - continue - - nodeName = os.path.basename(fileName).split('.') - if nodeName[1] == 'rrd' and not nodeName[0] == "nodes": - rrd = NodeRRD(os.path.join(self.dbPath, fileName)) - rrd.graph(self.imagePath, self.displayTimeNode)