From 666cd20c04c38c265179a5ff9c10e0027954afcf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Sun, 23 Apr 2017 22:57:07 +0200 Subject: [PATCH 01/10] Wokring on support for hopglass server (raw.json). --- NodeHierarchy.py | 14 +++++---- parser/Hopglass.py | 74 ++++++++++++++++++++++++++++++++++++++++++++ parser/JsonParser.py | 4 +-- requirements.txt | 1 + 4 files changed, 85 insertions(+), 8 deletions(-) create mode 100644 parser/Hopglass.py create mode 100644 requirements.txt diff --git a/NodeHierarchy.py b/NodeHierarchy.py index 27b1331..bd31eb9 100755 --- a/NodeHierarchy.py +++ b/NodeHierarchy.py @@ -1,7 +1,8 @@ #!/usr/bin/python3 import argparse -from parser.NodesParser import NodesParser -from parser.GraphParser import GraphParser +# from parser.NodesParser import NodesParser +# from parser.GraphParser import GraphParser +from parser.Hopglass import Hopglass from cloud.Node import Node from cloud.Link import Link from cloud.GlobalGraph import GlobalGraph @@ -13,8 +14,9 @@ from info.Info import Info class NodeHierarchy(object): def __init__(self): self.__args__ = self.__parseArguments__() - self.__nodesJson__ = NodesParser(self.__args__.json_path) - self.__graphJson__ = GraphParser(self.__args__.json_path) + self.__hopglass = Hopglass(self.__args__.raw_json) + # self.__nodesJson__ = NodesParser(self.__args__.json_path) + # self.__graphJson__ = GraphParser(self.__args__.json_path) self.__shapesJson__ = self.__parseShapes__() self.nodes = self.__createNodeObjects__() self.links = self.__createLinkObjects__() @@ -48,7 +50,7 @@ class NodeHierarchy(object): def __createLinkObjects__(self): links = [] - for link in self.__graphJson__.links: + for link in self.__hopglass.links: try: srcNode = self.nodes[link['source']['node_id']] except: @@ -68,7 +70,7 @@ class NodeHierarchy(object): def __parseArguments__(self): parser = argparse.ArgumentParser(description='This Script generates a hierarchical nodes list for node migration using nginx geo feature.') - parser.add_argument('-j', '--json-path', required=False, default='https://service.freifunk-muensterland.de/maps/data/', help='Path of nodes.json and graph.json (can be local folder or remote URL).') + parser.add_argument('-r', '--raw-json', required=False, default='https://karte.freifunk-muensterland.de/data/raw.json', help='Location of raw.json file (can be local folder or remote URL).') parser.add_argument('-s', '--shapes-path', required=False, default='https://freifunk-muensterland.de/md-fw-dl/shapes/', help='Path of shapefiles (can be local folder or remote URL).') parser.add_argument('-t', '--targets', nargs='+', required=True, help='List of targets which should be proceeded. Example: -t citya cityb ...') parser.add_argument('-o', '--out-file', default='./webserver-configuration', required=False, help='Filename where the generated Output should stored.') diff --git a/parser/Hopglass.py b/parser/Hopglass.py new file mode 100644 index 0000000..aa11eef --- /dev/null +++ b/parser/Hopglass.py @@ -0,0 +1,74 @@ +from parser.JsonParser import JsonParser +import collections +import json + +class Hopglass(JsonParser): + def __init__(self, filePath): + super().__init__(filePath) + self.ifIDs = {} + self.links = collections.defaultdict(dict) + self.nodes = {} + self.__aggregateData__() + #print(self.ifIDs) + for k, v in self.links.items(): + print(k,v,'\n') + + def __aggregateData__(self): + for nodeID, nodeData in self.__jsonData__.items(): + + # let pass nodes that provide all required informations only + if not set(('nodeinfo', 'neighbours')) <= set(nodeData): + continue + + nodeInfo = nodeData['nodeinfo'] + neighbours = nodeData['neighbours'] + + if not 'batadv' in neighbours: + continue + + if not 'mesh' in nodeInfo.get('network', {}): + continue + + for batID, batVal in nodeInfo['network']['mesh'].items(): + if not 'interfaces' in batVal: + continue + for ifType, ifVal in batVal['interfaces'].items(): + for mac in ifVal: + self.ifIDs[mac] = { + 'type' : ifType, + 'nodeID' : nodeID + } + + self.nodes[nodeID] = nodeData + + for nodeID, nodeData in self.nodes.items(): + for iname, ivalue in nodeData['neighbours']['batadv'].items(): + if 'neighbours' not in ivalue: + continue + if not iname in self.ifIDs: + continue + for nname, nvalue in ivalue['neighbours'].items(): + if nname not in self.ifIDs: + continue + nifID = self.ifIDs[nname]['nodeID'] + partID = (nodeID, nifID) if nodeID > nifID else (nifID, nodeID) + linkID = (iname, nname) if iname > nname else (nname, iname) + + linkNode = { + 'nodeID' : nodeID, + 'type' : self.ifIDs[iname]['type'], + 'tq' : nvalue['tq'] + } + + if linkID in self.links[partID]: + self.links[partID][linkID].append(linkNode) + else: + self.links[partID][linkID] = [linkNode] + + + def getLinksForNodeID(self, nodeID): + links = [] + for link in self.links: + if link['target']['node_id'] == nodeID or link['source']['node_id'] == nodeID: + links.append(link) + return links diff --git a/parser/JsonParser.py b/parser/JsonParser.py index 96f914f..d9bed53 100644 --- a/parser/JsonParser.py +++ b/parser/JsonParser.py @@ -9,7 +9,7 @@ class JsonParser(object): def __getFile__(self, fileName): if fileName.startswith('https://') or fileName.startswith('http://'): if self.printStatus: - print('Download', fileName.rsplit('/', 1)[1] , 'from URL:', fileName) + print('Download', fileName, 'from URL:', fileName) resource = urllib.request.urlopen(fileName) try: data = json.loads(resource.read().decode('utf-8')) @@ -19,7 +19,7 @@ class JsonParser(object): resource.close() else: if self.printStatus: - print('Open', fileName.rsplit('/', 1)[1] , 'from file:', fileName) + print('Open', fileName, 'from file:', fileName) with open(fileName) as data_file: try: data = json.load(data_file) diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..231726c --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +shapely \ No newline at end of file From 1a017185389afe0270f1d09f9b53ef1be8d66c15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Sun, 23 Apr 2017 23:09:31 +0200 Subject: [PATCH 02/10] Wokring on support for hopglass server (raw.json). --- NodeHierarchy.py | 2 +- cloud/NodeInit.py | 9 +++------ parser/Hopglass.py | 12 +++++++++++- 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/NodeHierarchy.py b/NodeHierarchy.py index bd31eb9..8e2b574 100755 --- a/NodeHierarchy.py +++ b/NodeHierarchy.py @@ -42,7 +42,7 @@ class NodeHierarchy(object): def __createNodeObjects__(self): nodes = {} - for nodeID, nodeValue in self.__nodesJson__.nodes.items(): + for nodeID, nodeValue in self.__hopglass.nodes.items(): print('Create Node object #',len(nodes), '\r',end = '') nodes[nodeID] = Node(nodeValue) print('') diff --git a/cloud/NodeInit.py b/cloud/NodeInit.py index 2ad9a16..aa70ee9 100644 --- a/cloud/NodeInit.py +++ b/cloud/NodeInit.py @@ -4,11 +4,11 @@ class NodeInit(object): self.nodeID = self.__jsonObject__['nodeinfo']['node_id'] self.interfaces = self.__getInterfaces__() self.hostname = self.__jsonObject__['nodeinfo']['hostname'] - self.isGateway = self.__jsonObject__['flags']['gateway'] + self.isGateway = self.__jsonObject__['nodeinfo']['isGateway'] self.geo = self.__getGeo__() self.isAutoupdaterEnabled = self.__getAutoupdaterStatus__() self.autoupdaterBranch = self.__getBranch__() - self.isOnline = self.__jsonObject__['flags']['online'] + self.isOnline = self.__jsonObject__['nodeinfo']['isOnline'] self.publicIPv6Addresses = self.__getPublicAddresses__() self.domID = self.__getSiteCode__() @@ -25,10 +25,7 @@ class NodeInit(object): return False def __getBranch__(self): - if 'autoupdater' in self.__jsonObject__['nodeinfo']['software']: - return self.__jsonObject__['nodeinfo']['software']['autoupdater']['branch'] - else: - return None + return self.__jsonObject__.get('nodeinfo', {}).get('software', {}).get('autoupdater', {}).get('branch', None) def __getGeo__(self): geo = {} diff --git a/parser/Hopglass.py b/parser/Hopglass.py index aa11eef..8bca9f2 100644 --- a/parser/Hopglass.py +++ b/parser/Hopglass.py @@ -8,6 +8,8 @@ class Hopglass(JsonParser): self.ifIDs = {} self.links = collections.defaultdict(dict) self.nodes = {} + self.gatewayMacs = [] + self.gateways = [] self.__aggregateData__() #print(self.ifIDs) for k, v in self.links.items(): @@ -17,11 +19,12 @@ class Hopglass(JsonParser): for nodeID, nodeData in self.__jsonData__.items(): # let pass nodes that provide all required informations only - if not set(('nodeinfo', 'neighbours')) <= set(nodeData): + if not set(('nodeinfo', 'neighbours', 'statistics')) <= set(nodeData): continue nodeInfo = nodeData['nodeinfo'] neighbours = nodeData['neighbours'] + statistics = nodeData['statistics'] if not 'batadv' in neighbours: continue @@ -29,6 +32,9 @@ class Hopglass(JsonParser): if not 'mesh' in nodeInfo.get('network', {}): continue + if statistics.get('gateway', False): + self.gatewayMacs.append(statistics['gateway']) + for batID, batVal in nodeInfo['network']['mesh'].items(): if not 'interfaces' in batVal: continue @@ -42,9 +48,13 @@ class Hopglass(JsonParser): self.nodes[nodeID] = nodeData for nodeID, nodeData in self.nodes.items(): + nodeData['nodeinfo']['isGateway'] = False + nodeData['nodeinfo']['isOnline'] = True # Todo: implement detection for iname, ivalue in nodeData['neighbours']['batadv'].items(): if 'neighbours' not in ivalue: continue + if iname in self.gatewayMacs: + nodeData['nodeinfo']['isGateway'] = True if not iname in self.ifIDs: continue for nname, nvalue in ivalue['neighbours'].items(): From cce68e8da6ae7efe6a05bfe93d9e596ff563f1ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Mon, 24 Apr 2017 11:13:00 +0200 Subject: [PATCH 03/10] Wokring on support for hopglass server (raw.json). --- NodeHierarchy.py | 16 +++------ cloud/Link.py | 82 ++++++++++++--------------------------------- cloud/LocalGraph.py | 7 ++-- parser/Hopglass.py | 6 ++-- 4 files changed, 34 insertions(+), 77 deletions(-) diff --git a/NodeHierarchy.py b/NodeHierarchy.py index 8e2b574..7ae922f 100755 --- a/NodeHierarchy.py +++ b/NodeHierarchy.py @@ -50,18 +50,10 @@ class NodeHierarchy(object): def __createLinkObjects__(self): links = [] - for link in self.__hopglass.links: - try: - srcNode = self.nodes[link['source']['node_id']] - except: - srcNode = None - try: - dstNode = self.nodes[link['target']['node_id']] - except: - dstNode = None - - print('Create Link object #',len(links), '\r',end = '') - links.append(Link(link, srcNode, dstNode)) + for linkParID, linkPar in self.__hopglass.links.items(): + for linkID, link in linkPar.items(): + print('Create Link object #',len(links), '\r',end = '') + links.append(Link(link, (self.nodes[linkParID[0]], self.nodes[linkParID[1]]))) print('') return links diff --git a/cloud/Link.py b/cloud/Link.py index 214da49..c9915c1 100644 --- a/cloud/Link.py +++ b/cloud/Link.py @@ -1,69 +1,31 @@ class Link(object): - def __init__(self, LinkJsonObject, srcNode, dstNode): - self.__jsonObject__ = LinkJsonObject - self.__srcNode__ = srcNode - self.__dstNode__ = dstNode - self.linkType = self.__getLinkType__() - self.isVpn = self.__getLinkVpnState__() - - + def __init__(self, LinkJsonObject, nodes): + self.__jsonObject = LinkJsonObject + self.linkType, self.isVpn = self.__getLinkType__() + self.__nodes = nodes + def __getLinkType__(self): - type_src = None - type_dst = None - if self.__srcNode__ != None: - for k, v in self.__srcNode__.interfaces.items(): - if self.__jsonObject__['source']['interface_mac'] in v: - type_src = k - if self.__dstNode__ != None: - for k, v in self.__dstNode__.interfaces.items(): - if self.__jsonObject__['target']['interface_mac'] in v: - type_dst = k - - if type_src == type_dst: - if type_src == None: - return 'unknown' - return type_src - else: - if type_src == None: - return type_dst - elif type_dst == None: - return type_src - else: - #print(self.__srcNode__.hostname, type_src, '<-->', self.__dstNode__.hostname, type_dst) - if type_src == 'wireless': - return type_dst - else: - return type_src - - def __getLinkVpnState__(self): - if self.__jsonObject__['vpn'] == True: - return True - for node in self.getEndpointNodes(getGateways = True): - if node.isGateway == True: - return True - return False - + types = [x['type'] for x in self.__jsonObject] + ltype = types[0] + lvpn = False + for x in types: + if x != 'unknown' and x != 'other': + if x == 'l2tp' or x == 'tunnel': + lvpn = True + val = x + return ltype, lvpn + def getEndpointNodes(self, getGateways = False): - nodes = [] - if self.__srcNode__ != None: - if getGateways == True or self.__srcNode__.isGateway == False: - nodes.append(self.__srcNode__) - if self.__dstNode__ != None: - if getGateways == True or self.__dstNode__.isGateway == False: - nodes.append(self.__dstNode__) - return nodes - + return self.__nodes + def getEndpointNodeIDs(self, getGateways = True): - nodeIDs = [] - for node in self.getEndpointNodes(getGateways): - nodeIDs.append(node.nodeID) - return nodeIDs - + return [x.nodeID for x in self.__nodes] + def isNodeIDinLink(self, nodeID): - for endpoint in self.getEndpointNodes(): - if endpoint.nodeID == nodeID: + for x in self.__nodes: + if nodeID == x.nodeID: return True return False - + def isNodeInLink(self, node): return self.isNodeIDinLink(node.nodeID) diff --git a/cloud/LocalGraph.py b/cloud/LocalGraph.py index e81eaf8..29ff1f4 100644 --- a/cloud/LocalGraph.py +++ b/cloud/LocalGraph.py @@ -125,6 +125,9 @@ class LocalGraph(Graph): print('BranchesThatExistsInCloud:', self.getBranchesThatExistsInCloud()) print('lan links in cloud:') for link in self.getLanLinksInCloud(): - if link.__srcNode__ != None and link.__dstNode__ != None: - print(' ', link.__srcNode__.hostname, '<--->', link.__dstNode__.hostname) + hosts = link.getEndpointNodes() + if len(hosts) == 1: + print(' ', hosts.hostname, 'has unknown neighbour.') + else: + print(' ', hosts[0].hostname, '<--->', hosts[1].hostname) print('=====') diff --git a/parser/Hopglass.py b/parser/Hopglass.py index 8bca9f2..25b1736 100644 --- a/parser/Hopglass.py +++ b/parser/Hopglass.py @@ -11,9 +11,9 @@ class Hopglass(JsonParser): self.gatewayMacs = [] self.gateways = [] self.__aggregateData__() - #print(self.ifIDs) - for k, v in self.links.items(): - print(k,v,'\n') + # print(self.ifIDs) + # for k, v in self.links.items(): + # print(k,v,'\n') def __aggregateData__(self): for nodeID, nodeData in self.__jsonData__.items(): From 1c8600435e9ffaf46b99f464eec727b8cecf27dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Mon, 24 Apr 2017 15:29:54 +0200 Subject: [PATCH 04/10] Updated README.md --- README.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index dc5f9d1..3d15f86 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ Die Hilfe liefert folgendes: ``` $ ./NodeHierarchy.py --help -usage: NodeHierarchy.py [-h] [-j JSON_PATH] [-s SHAPES_PATH] -t TARGETS +usage: NodeHierarchy.py [-h] [-r RAW_JSON] [-s SHAPES_PATH] -t TARGETS [TARGETS ...] [-o OUT_FILE] [-v] [-f [{exclude_clouds_with_lan_links,no_lan} [{exclude_clouds_with_lan_links,no_lan} ...]]] [-i [{get_offline_nodes,offline} [{get_offline_nodes,offline} ...]]] @@ -41,9 +41,10 @@ geo feature. optional arguments: -h, --help show this help message and exit - -j JSON_PATH, --json-path JSON_PATH - Path of nodes.json and graph.json (can be local folder - or remote URL). + -r RAW_JSON, --raw-json RAW_JSON + Location of raw.json file (can be local folder or + remote URL). Default: https://karte.freifunk- + muensterland.de/data/raw.json -s SHAPES_PATH, --shapes-path SHAPES_PATH Path of shapefiles (can be local folder or remote URL). @@ -72,7 +73,7 @@ optional arguments: ### Anmerkungen - ``--targets`` Gibt die Namen der Ziele (Zieldomänen) an. Der Geo-Schalter in der nginx-Konfiguration wird ebenfalls diesen Namen tragen. -- ``--json-path`` Gibt das Daten-Verzeichnis eures Meshviewers an. Default: ``https://service.freifunk-muensterland.de/maps/data/`` +- ``--raw-json`` Gibt den Ort der raw.json (hopglass-server) an. Default: ``https://karte.freifunk-muensterland.de/data/raw.json`` - ``--shapes-path`` Verzeichnis an dem die Shapefiles der einzelnen Ziel-Domänen liegen. Default: ``https://freifunk-muensterland.de/md-fw-dl/shapes/`` - *Anmerkung:* Es werden Dateien in Abhängigkeit mit den Target-Namen im Verzeichnis erwartet. - *Beispiel:* Bei ``-targets domaene01 domaene02`` werden die Dateien ``domaene01.geojson`` und ``domaene02.geojson`` erwartet. From 9c16434c0ec0da74522106156be61742964e2af8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Fri, 18 Aug 2017 22:07:40 +0200 Subject: [PATCH 05/10] Improved handling of invalid nodes data. --- NodeHierarchy.py | 5 +++-- cloud/GlobalGraph.py | 9 +++++---- cloud/Graph.py | 2 +- cloud/NodeInit.py | 9 ++++++--- 4 files changed, 15 insertions(+), 10 deletions(-) diff --git a/NodeHierarchy.py b/NodeHierarchy.py index 7ae922f..414bb19 100755 --- a/NodeHierarchy.py +++ b/NodeHierarchy.py @@ -43,8 +43,9 @@ class NodeHierarchy(object): def __createNodeObjects__(self): nodes = {} for nodeID, nodeValue in self.__hopglass.nodes.items(): - print('Create Node object #',len(nodes), '\r',end = '') - nodes[nodeID] = Node(nodeValue) + if nodeValue['nodeinfo']['node_id']: + print('Create Node object #',len(nodes), '\r',end = '') + nodes[nodeID] = Node(nodeValue) print('') return nodes diff --git a/cloud/GlobalGraph.py b/cloud/GlobalGraph.py index d5c42d1..f369cce 100644 --- a/cloud/GlobalGraph.py +++ b/cloud/GlobalGraph.py @@ -26,18 +26,19 @@ class GlobalGraph(Graph): def __getConnectedNodes__(self, nodeID, trace = []): neighNodeIDs = self.getNeighbourNodeIDsForNodeID(nodeID) - trace_new = trace[:] + [x for x in neighNodeIDs if x not in trace] + trace_new = list(set(trace + neighNodeIDs)) for neighNodeID in neighNodeIDs: if neighNodeID not in trace: - trace_new = trace_new + [x for x in self.__getConnectedNodes__(neighNodeID, trace_new) if x not in trace_new] + trace_new = list(set(trace_new + self.__getConnectedNodes__(neighNodeID, trace_new))) return trace_new def __createLocalCloudByNodesList__(self, nodesIDList): nodes = {} links = [] for nodeID in nodesIDList: - nodes[nodeID] = self.__nodes__[nodeID] - links = links + [x for x in self.getLinksByNodeID(nodeID) if x not in links] + if nodeID: + nodes[nodeID] = self.__nodes__[nodeID] + links = list(set(links + self.getLinksByNodeID(nodeID))) return LocalGraph(nodes, links, self.__enableDebugPrinting__) def __debugPrint__(self): diff --git a/cloud/Graph.py b/cloud/Graph.py index 6f7b9e1..03d7d48 100644 --- a/cloud/Graph.py +++ b/cloud/Graph.py @@ -26,7 +26,7 @@ class Graph(object): if link.isVpn == False: endpoints = link.getEndpointNodeIDs(getGateways = False) if nodeID in endpoints: - neighNodeIDs = neighNodeIDs + [x for x in endpoints if x not in neighNodeIDs] + neighNodeIDs = list(set(neighNodeIDs + endpoints)) return neighNodeIDs def getLinksByNodeID(self, nodeID): diff --git a/cloud/NodeInit.py b/cloud/NodeInit.py index aa70ee9..a7abf1e 100644 --- a/cloud/NodeInit.py +++ b/cloud/NodeInit.py @@ -19,13 +19,16 @@ class NodeInit(object): return {} def __getAutoupdaterStatus__(self): - if 'autoupdater' in self.__jsonObject__['nodeinfo']['software']: + try: return self.__jsonObject__['nodeinfo']['software']['autoupdater']['enabled'] - else: + except: return False def __getBranch__(self): - return self.__jsonObject__.get('nodeinfo', {}).get('software', {}).get('autoupdater', {}).get('branch', None) + try: + return self.__jsonObject__['nodeinfo']['software']['autoupdater']['branch'] + except: + return None def __getGeo__(self): geo = {} From 67da82fb3800a4bbf8cc5b180f0b075ce591b307 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Fri, 18 Aug 2017 22:10:27 +0200 Subject: [PATCH 06/10] Updated README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 3d15f86..1f34579 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Node Hierarchy -Dieses Tool generiert auf Basis einer ``graph.json`` und ``nodes.json`` des [Meshviewers](https://github.com/ffnord/meshviewer/) sowie (Multi-)Polygonen (im [geojson](http://geojson.org/) Format) der einzelnen Zieldomänen eine [nginx](http://nginx.org/) Konfigurationsdatei (auf Basis des [Geo-Moduls](http://nginx.org/en/docs/http/ngx_http_geo_module.html)), um Knoten in der richtigen Reihenfolge umzuziehen. +Dieses Tool generiert auf Basis einer ``raw.json`` des [hopglass-server](https://github.com/hopglass/hopglass-server) sowie (Multi-)Polygonen (im [geojson](http://geojson.org/) Format) der einzelnen Zieldomänen eine [nginx](http://nginx.org/) Konfigurationsdatei (auf Basis des [Geo-Moduls](http://nginx.org/en/docs/http/ngx_http_geo_module.html)), um Knoten in der richtigen Reihenfolge umzuziehen. ## Vorgehensweise @@ -131,9 +131,9 @@ schreibt in die Datei ``./offline_nodes.csv`` (default-Einstellung der Schalter ## Bekannte Probleme -Wenn es sich bei der Quell-Domäne um eine L2TP-Domäne handelt, läuft das Tool derzeit nur, wenn [alfred](https://github.com/ffnord/ffnord-alfred-announce) auf allen Gateway-Servern läuft. +Wenn es sich bei der Quell-Domäne um eine L2TP-Domäne handelt, läuft das Tool derzeit nur, wenn [alfred](https://github.com/ffnord/ffnord-alfred-announce) oder respondd auf allen Gateway-Servern läuft. -*Anmerkung:* Wenn in der ``nodes.json`` und ``graph.json`` mehrere Domänen vorhanden sind und dort teilweise L2TP-Domänen vorhanden sind (dieses aber nicht das Gebiet eurer Zieldomäne betrifft), kann das sehr negative Auswirkungen auf die Laufzeit haben (> 30 Sekunden). +*Anmerkung:* Wenn in der ``graph.json`` mehrere Domänen vorhanden sind und dort teilweise L2TP-Domänen vorhanden sind (dieses aber nicht das Gebiet eurer Zieldomäne betrifft), kann das sehr negative Auswirkungen auf die Laufzeit haben (> 30 Sekunden). ## Lizenz From 0632593675a548d8a570bc043654d33905a7fe8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Fri, 18 Aug 2017 22:27:00 +0200 Subject: [PATCH 07/10] Ignore links where not both sides are known. --- NodeHierarchy.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/NodeHierarchy.py b/NodeHierarchy.py index 414bb19..c914c81 100755 --- a/NodeHierarchy.py +++ b/NodeHierarchy.py @@ -54,7 +54,8 @@ class NodeHierarchy(object): for linkParID, linkPar in self.__hopglass.links.items(): for linkID, link in linkPar.items(): print('Create Link object #',len(links), '\r',end = '') - links.append(Link(link, (self.nodes[linkParID[0]], self.nodes[linkParID[1]]))) + if linkParID[0] != 'null' and linkParID[1] != 'null': + links.append(Link(link, (self.nodes[linkParID[0]], self.nodes[linkParID[1]]))) print('') return links From 8d4021f122505602b6f7e7938fecb662907c2818 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Sat, 19 Aug 2017 23:25:30 +0200 Subject: [PATCH 08/10] Several updates: - Implemented option to translate sitecode to domainname. - Added filter: Skip nodes if they are already in the target domain. - Several minor changes. --- NodeHierarchy.py | 16 +++++++++++++--- README.md | 16 ++++++++++------ cloud/NodeInit.py | 19 +++++++++++-------- generator/Filter.py | 10 ++++++++-- generator/NginxConfGen.py | 4 ++-- 5 files changed, 44 insertions(+), 21 deletions(-) diff --git a/NodeHierarchy.py b/NodeHierarchy.py index c914c81..aeb0721 100755 --- a/NodeHierarchy.py +++ b/NodeHierarchy.py @@ -45,10 +45,19 @@ class NodeHierarchy(object): for nodeID, nodeValue in self.__hopglass.nodes.items(): if nodeValue['nodeinfo']['node_id']: print('Create Node object #',len(nodes), '\r',end = '') - nodes[nodeID] = Node(nodeValue) + nodes[nodeID] = Node(self.__prepareNodeData__(nodeValue)) print('') return nodes - + + def __prepareNodeData__(self, nodeValue): + if self.__args__.site_to_target_prefix: + pref = self.__args__.site_to_target_prefix.split(',') + try: + nodeValue['nodeinfo']['system']['site_code'] = nodeValue['nodeinfo']['system']['site_code'].replace(pref[0],pref[1]) + except: + pass + return nodeValue + def __createLinkObjects__(self): links = [] for linkParID, linkPar in self.__hopglass.links.items(): @@ -67,9 +76,10 @@ class NodeHierarchy(object): parser.add_argument('-r', '--raw-json', required=False, default='https://karte.freifunk-muensterland.de/data/raw.json', help='Location of raw.json file (can be local folder or remote URL).') parser.add_argument('-s', '--shapes-path', required=False, default='https://freifunk-muensterland.de/md-fw-dl/shapes/', help='Path of shapefiles (can be local folder or remote URL).') parser.add_argument('-t', '--targets', nargs='+', required=True, help='List of targets which should be proceeded. Example: -t citya cityb ...') + parser.add_argument('-sttp', '--site-to-target-prefix', required=False, help='Used to match site and target also when prefixes are different. Example: -sttp "ffmsd,domaene"') parser.add_argument('-o', '--out-file', default='./webserver-configuration', required=False, help='Filename where the generated Output should stored.') parser.add_argument('-v', '--debug', required=False, action='store_true', help='Enable debugging output.') - parser.add_argument('-f', '--filters', nargs='*', required=False, choices=('exclude_clouds_with_lan_links', 'no_lan'), help='Filter out nodes and local clouds based on filter rules.') + parser.add_argument('-f', '--filters', nargs='*', required=False, choices=('exclude_clouds_with_lan_links', 'no_lan', 'domain_transitions_only'), help='Filter out nodes and local clouds based on filter rules.') parser.add_argument('-i', '--info', nargs='*', required=False, choices=('get_offline_nodes','offline'), help='Get infos about the graph, links and nodes.') parser.add_argument('-if', '--info-filters', nargs='*', required=False, help='Filter info results. Currently supported: min_age:TIME_RANGE, max_age:TIME_RANGE. Examples: -if min_age:1d max_age:2w') parser.add_argument('-iop', '--info-out-path', required=False, default='./', help='Folder where info files should be written. Default: ./') diff --git a/README.md b/README.md index 1f34579..bdcbf1d 100644 --- a/README.md +++ b/README.md @@ -29,8 +29,9 @@ Die Hilfe liefert folgendes: ``` $ ./NodeHierarchy.py --help usage: NodeHierarchy.py [-h] [-r RAW_JSON] [-s SHAPES_PATH] -t TARGETS - [TARGETS ...] [-o OUT_FILE] [-v] - [-f [{exclude_clouds_with_lan_links,no_lan} [{exclude_clouds_with_lan_links,no_lan} ...]]] + [TARGETS ...] [-sttp SITE_TO_TARGET_PREFIX] + [-o OUT_FILE] [-v] + [-f [{exclude_clouds_with_lan_links,no_lan,domain_transitions_only} [{exclude_clouds_with_lan_links,no_lan,domain_transitions_only} ...]]] [-i [{get_offline_nodes,offline} [{get_offline_nodes,offline} ...]]] [-if [INFO_FILTERS [INFO_FILTERS ...]]] [-iop INFO_OUT_PATH] @@ -43,18 +44,20 @@ optional arguments: -h, --help show this help message and exit -r RAW_JSON, --raw-json RAW_JSON Location of raw.json file (can be local folder or - remote URL). Default: https://karte.freifunk- - muensterland.de/data/raw.json + remote URL). -s SHAPES_PATH, --shapes-path SHAPES_PATH Path of shapefiles (can be local folder or remote URL). -t TARGETS [TARGETS ...], --targets TARGETS [TARGETS ...] List of targets which should be proceeded. Example: -t citya cityb ... + -sttp SITE_TO_TARGET_PREFIX, --site-to-target-prefix SITE_TO_TARGET_PREFIX + Used to match site and target also when prefixes are + different. Example: -sttp "ffmsd,domaene" -o OUT_FILE, --out-file OUT_FILE Filename where the generated Output should stored. -v, --debug Enable debugging output. - -f [{exclude_clouds_with_lan_links,no_lan} [{exclude_clouds_with_lan_links,no_lan} ...]], --filters [{exclude_clouds_with_lan_links,no_lan} [{exclude_clouds_with_lan_links,no_lan} ...]] + -f [{exclude_clouds_with_lan_links,no_lan,domain_transitions_only} [{exclude_clouds_with_lan_links,no_lan,domain_transitions_only} ...]], --filters [{exclude_clouds_with_lan_links,no_lan,domain_transitions_only} [{exclude_clouds_with_lan_links,no_lan,domain_transitions_only} ...]] Filter out nodes and local clouds based on filter rules. -i [{get_offline_nodes,offline} [{get_offline_nodes,offline} ...]], --info [{get_offline_nodes,offline} [{get_offline_nodes,offline} ...]] @@ -90,7 +93,8 @@ Weitere Filterungen lassen sich über das ``--filters`` Attribut aktivieren. Folgende Filter sind derzeit implementiert (zukünftig folgen noch weitere): -- ``exclude_clouds_with_lan_links`` bzw. ``no_lan`` Filtert alle lokalen Wolken aus, in denen sich mindestens ein Mesh-on-LAN Link befindet +- ``exclude_clouds_with_lan_links`` bzw. ``no_lan`` filtert alle lokalen Wolken aus, in denen sich mindestens ein Mesh-on-LAN Link befindet +- ``domain_transitions_only`` filtert alle Knoten aus, die sich bereits in der richtigen Domäne befinden / die Firmware der richtigen Domäne besitzen ## Nginx Konfiguration diff --git a/cloud/NodeInit.py b/cloud/NodeInit.py index a7abf1e..131f10e 100644 --- a/cloud/NodeInit.py +++ b/cloud/NodeInit.py @@ -10,7 +10,7 @@ class NodeInit(object): self.autoupdaterBranch = self.__getBranch__() self.isOnline = self.__jsonObject__['nodeinfo']['isOnline'] self.publicIPv6Addresses = self.__getPublicAddresses__() - self.domID = self.__getSiteCode__() + self.domName = self.__getSiteCode__() def __getInterfaces__(self): try: @@ -31,19 +31,22 @@ class NodeInit(object): return None def __getGeo__(self): - geo = {} - if 'location' in self.__jsonObject__['nodeinfo'] and 'latitude' in self.__jsonObject__['nodeinfo']['location'] and 'longitude' in self.__jsonObject__['nodeinfo']['location']: - geo['lat'] = self.__jsonObject__['nodeinfo']['location']['latitude'] - geo['lon'] = self.__jsonObject__['nodeinfo']['location']['longitude'] - return geo - return None + try: + return { + 'lat' : self.__jsonObject__['nodeinfo']['location']['latitude'], + 'lon' : self.__jsonObject__['nodeinfo']['location']['longitude'] + } + except: + return None def __getPublicAddresses__(self): addresses = [] - if 'addresses' in self.__jsonObject__['nodeinfo']['network']: + try: for address in self.__jsonObject__['nodeinfo']['network']['addresses']: if not address.startswith('fe80'): addresses.append(address) + except: + pass return addresses def __getSiteCode__(self): diff --git a/generator/Filter.py b/generator/Filter.py index fad49c6..c141a9e 100644 --- a/generator/Filter.py +++ b/generator/Filter.py @@ -3,7 +3,7 @@ class Filter(object): self.__args__ = args self.__filters__ = self.__getFilters() - def filterLocalGraphs(self, localGraphs): + def filterLocalGraphs(self, domain, localGraphs): filteredGraphs = [] for localGraph in localGraphs: if localGraph.isAutoupdaterEnabledOnAllNodes() == False: @@ -21,8 +21,14 @@ class Filter(object): def __getFilters(self): return [] if self.__args__.filters == None else self.__args__.filters - def filterNodes(self, nodes): + def filterNodes(self, domain, nodes): filteredNodes = [] for node in nodes: + if 'domain_transitions_only' in self.__filters__: + try: + if domain.name == node.domName: + continue + except: + pass filteredNodes.append(node) return filteredNodes diff --git a/generator/NginxConfGen.py b/generator/NginxConfGen.py index 583ac4d..aedc895 100644 --- a/generator/NginxConfGen.py +++ b/generator/NginxConfGen.py @@ -17,9 +17,9 @@ class NginxConfGen(object): def __genDomain__(self, domain): nodes = {} - for localGraph in self.__filter__.filterLocalGraphs(domain.localGraphs): + for localGraph in self.__filter__.filterLocalGraphs(domain, domain.localGraphs): try: - for node in self.__filter__.filterNodes(localGraph.getNodesWithNoDependencies()): + for node in self.__filter__.filterNodes(domain, localGraph.getNodesWithNoDependencies()): nodes[node.nodeID] = { 'hostname' : node.hostname, 'ipv6_addresses' : node.publicIPv6Addresses From 07223d6cb678a44dd2dc8b263d5c3a1849a64a53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Sun, 24 Sep 2017 23:04:24 +0200 Subject: [PATCH 09/10] Updated parser for shapefiles. geometry collections can be used now, too. --- parser/ShapesParser.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/parser/ShapesParser.py b/parser/ShapesParser.py index a0748b4..080468f 100644 --- a/parser/ShapesParser.py +++ b/parser/ShapesParser.py @@ -7,6 +7,11 @@ class ShapesParser(JsonParser): def __createShapes__(self): shapes = [] - for feature in self.__jsonData__['features']: - shapes.append(shape(feature['geometry'])) + if 'features' in self.__jsonData__: + for feature in self.__jsonData__['features']: + shapes.append(shape(feature['geometry'])) + elif 'geometries' in self.__jsonData__: + for geometry in self.__jsonData__['geometries']: + shapes.append(shape(geometry)) + return shapes From 95b3b20a43a90b468fd595aff0a97ae47bf16f00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Simon=20W=C3=BCllhorst?= Date: Sun, 24 Sep 2017 23:20:42 +0200 Subject: [PATCH 10/10] Updated shapes parser: ignore features without geometry --- parser/ShapesParser.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/parser/ShapesParser.py b/parser/ShapesParser.py index 080468f..d9191ae 100644 --- a/parser/ShapesParser.py +++ b/parser/ShapesParser.py @@ -9,7 +9,8 @@ class ShapesParser(JsonParser): shapes = [] if 'features' in self.__jsonData__: for feature in self.__jsonData__['features']: - shapes.append(shape(feature['geometry'])) + if feature['geometry']: + shapes.append(shape(feature['geometry'])) elif 'geometries' in self.__jsonData__: for geometry in self.__jsonData__['geometries']: shapes.append(shape(geometry))