Initial commit of version 2
This commit is contained in:
commit
ff7105eedc
7
.gitignore
vendored
Normal file
7
.gitignore
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
.project
|
||||||
|
.pydevproject
|
||||||
|
./webserver-configuration
|
||||||
|
*.pyc
|
||||||
|
*.pyo
|
||||||
|
|
||||||
|
|
21
LICENSE.txt
Normal file
21
LICENSE.txt
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2016 Simon Wüllhorst
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
78
NodeHierarchy.py
Executable file
78
NodeHierarchy.py
Executable file
|
@ -0,0 +1,78 @@
|
||||||
|
#!/usr/bin/python3
|
||||||
|
import argparse
|
||||||
|
from parser.NodesParser import NodesParser
|
||||||
|
from parser.GraphParser import GraphParser
|
||||||
|
from cloud.Node import Node
|
||||||
|
from cloud.Link import Link
|
||||||
|
from cloud.GlobalGraph import GlobalGraph
|
||||||
|
from parser.ShapesParser import ShapesParser
|
||||||
|
from cloud.Domaene import Domaene
|
||||||
|
from generator.NginxConfGen import NginxConfGen
|
||||||
|
|
||||||
|
class NodeHierarchy(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.__args__ = self.__parseArguments__()
|
||||||
|
self.__nodesJson__ = NodesParser(self.__args__.json_path)
|
||||||
|
self.__graphJson__ = GraphParser(self.__args__.json_path)
|
||||||
|
self.__shapesJson__ = self.__parseShapes__()
|
||||||
|
self.nodes = self.__createNodeObjects__()
|
||||||
|
self.links = self.__createLinkObjects__()
|
||||||
|
self.globalGraph = self.__createGlobalGraph__()
|
||||||
|
self.domains = self.__createDomains__()
|
||||||
|
self.nginxConf = NginxConfGen(self.domains, self.__args__)
|
||||||
|
self.nginxConf.writeNginxConfigFile()
|
||||||
|
|
||||||
|
def __parseShapes__(self):
|
||||||
|
shapesJson = {}
|
||||||
|
for targetName in self.__args__.targets:
|
||||||
|
shapesJson[targetName] = ShapesParser(self.__args__.shapes_path, targetName)
|
||||||
|
return shapesJson
|
||||||
|
|
||||||
|
def __createDomains__(self):
|
||||||
|
domains = {}
|
||||||
|
for k, v in self.__shapesJson__.items():
|
||||||
|
print('Create Domain object #',len(domains), '\r',end = '')
|
||||||
|
domains[k] = Domaene(k,v, self.globalGraph)
|
||||||
|
print('')
|
||||||
|
return domains
|
||||||
|
|
||||||
|
def __createNodeObjects__(self):
|
||||||
|
nodes = {}
|
||||||
|
for nodeID, nodeValue in self.__nodesJson__.nodes.items():
|
||||||
|
print('Create Node object #',len(nodes), '\r',end = '')
|
||||||
|
nodes[nodeID] = Node(nodeValue)
|
||||||
|
print('')
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def __createLinkObjects__(self):
|
||||||
|
links = []
|
||||||
|
for link in self.__graphJson__.links:
|
||||||
|
try:
|
||||||
|
srcNode = self.nodes[link['source']['node_id']]
|
||||||
|
except:
|
||||||
|
srcNode = None
|
||||||
|
try:
|
||||||
|
dstNode = self.nodes[link['target']['node_id']]
|
||||||
|
except:
|
||||||
|
dstNode = None
|
||||||
|
|
||||||
|
print('Create Link object #',len(links), '\r',end = '')
|
||||||
|
links.append(Link(link, srcNode, dstNode))
|
||||||
|
print('')
|
||||||
|
return links
|
||||||
|
|
||||||
|
def __createGlobalGraph__(self):
|
||||||
|
return GlobalGraph(self.nodes, self.links, self.__args__.debug)
|
||||||
|
|
||||||
|
def __parseArguments__(self):
|
||||||
|
parser = argparse.ArgumentParser(description='This Script generates a hierarchical nodes list for node migration using nginx geo feature.')
|
||||||
|
parser.add_argument('-j', '--json-path', required=False, default='https://service.freifunk-muensterland.de/maps/data/', help='Path of nodes.json and graph.json (can be local folder or remote URL).')
|
||||||
|
parser.add_argument('-s', '--shapes-path', required=False, default='https://freifunk-muensterland.de/md-fw-dl/shapes/', help='Path of shapefiles (can be local folder or remote URL).')
|
||||||
|
parser.add_argument('-t', '--targets', nargs='+', required=True, help='List of targets which should be proceeded. Example: -t citya cityb ...')
|
||||||
|
parser.add_argument('-o', '--out-file', required=False, help='Filename where the generated Output should stored.', default='./webserver-configuration')
|
||||||
|
parser.add_argument('-v', '--debug', required=False, action='store_true', help='Enable debugging output.')
|
||||||
|
parser.add_argument('-f', '--filters', nargs='*', required=False, help='Filter out nodes and local clouds based on filter rules')
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
NodeHierarchy()
|
105
README.md
Normal file
105
README.md
Normal file
|
@ -0,0 +1,105 @@
|
||||||
|
# Node Hierarchy
|
||||||
|
Dieses Tool generiert auf Basis einer graph.json und nodes.json des [Meshviewers](https://github.com/ffnord/meshviewer/) sowie (Multi-)Polygonen (im [geojson](http://geojson.org/) Format) der einzelnen Zieldomänen eine [nginx](http://nginx.org/) Konfigurationsdatei (auf Basis des [Geo-Moduls](http://nginx.org/en/docs/http/ngx_http_geo_module.html)), um Knoten in der richtigen Reihenfolge umzuziehen.
|
||||||
|
|
||||||
|
|
||||||
|
## Vorgehensweise
|
||||||
|
Das Tool zerteilt den (globalen) Graphen in viele lokale Graphen, also die Menge an Knoten (und Links), die vor Ort ein Mesh bilden. Diese werden auf Basis der Shapefiles den einzelnen Zieldomänen zugeordnet (es werden die GeoPositionen der einzelnen Knoten "gemittelt"). Hier wird nun geprüft, welche Knoten keine Abhängigkeiten besitzen, also kein anderer Knoten über diesen Knoten gehen muss um einen Gatewayserver zu erreichen. Diese werden dann in die Konfiguration geschrieben. Sind diese Knoten aktualisiert, fällt die Abhängigkeit des Knoten weg, der zuvor benötigt wurde um das Gateway zu erreichen.
|
||||||
|
Daher muss das Tool regelmäßig ausgeführt werden und die Ausgabe jeweils in die nginx Konfiguration übernommen werden.
|
||||||
|
|
||||||
|
|
||||||
|
## Abhängigkeiten
|
||||||
|
Das Tool läuft ausschließlich mit **Python >= 3**.
|
||||||
|
Folgende (Python-)Abhängigkeiten werden benötigt:
|
||||||
|
|
||||||
|
- [shapely](https://pypi.python.org/pypi/Shapely)
|
||||||
|
|
||||||
|
Diese lassen sich wie folgt via [pip](https://pypi.python.org/pypi/pip) installieren:
|
||||||
|
|
||||||
|
```
|
||||||
|
pip3 install shapely
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Bedienung
|
||||||
|
Das Tool wird ausschließlich über Argumente beim Aufruf konfiguriert.
|
||||||
|
|
||||||
|
Die Hilfe liefert folgendes:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ ./NodeHierarchy.py --help
|
||||||
|
usage: NodeHierarchy.py [-h] [-j JSON_PATH] [-s SHAPES_PATH] -t TARGETS
|
||||||
|
[TARGETS ...] [-o OUT_FILE] [-v]
|
||||||
|
[-f [FILTERS [FILTERS ...]]]
|
||||||
|
|
||||||
|
This Script generates a hierarchical nodes list for node migration using nginx
|
||||||
|
geo feature.
|
||||||
|
|
||||||
|
optional arguments:
|
||||||
|
-h, --help show this help message and exit
|
||||||
|
-j JSON_PATH, --json-path JSON_PATH
|
||||||
|
Path of nodes.json and graph.json (can be local folder
|
||||||
|
or remote URL).
|
||||||
|
-s SHAPES_PATH, --shapes-path SHAPES_PATH
|
||||||
|
Path of shapefiles (can be local folder or remote
|
||||||
|
URL).
|
||||||
|
-t TARGETS [TARGETS ...], --targets TARGETS [TARGETS ...]
|
||||||
|
List of targets which should be proceeded. Example: -t
|
||||||
|
citya cityb ...
|
||||||
|
-o OUT_FILE, --out-file OUT_FILE
|
||||||
|
Filename where the generated Output should stored.
|
||||||
|
-v, --debug Enable debugging output.
|
||||||
|
-f [FILTERS [FILTERS ...]], --filters [FILTERS [FILTERS ...]]
|
||||||
|
Filter out nodes and local clouds based on filter
|
||||||
|
rules
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Anmerkungen
|
||||||
|
|
||||||
|
- ``--targets`` Gibt die Namen der Ziele (Zieldomänen) an. Der Geo-Schalter in der nginx Konfiguration wird ebenfalls diesen Namen tragen.
|
||||||
|
- ``--json-path`` Gibt das Daten-Verzeichnis eures Meshviewers an. Default: ``https://service.freifunk-muensterland.de/maps/data/``
|
||||||
|
- ``--shapes-path`` Verzeichnis an dem die Shapefiles der einzelnen Ziel-Domänen liegen. Default: ``https://freifunk-muensterland.de/md-fw-dl/shapes/``
|
||||||
|
- *Anmerkung:* Es werden Dateien in Abhängigkeit mit den Target-Namen im Verzeichnis erwartet.
|
||||||
|
- *Beispiel:* Bei ``-targets domaene01 domaene02`` werden die Dateien ``domaene01.geojson`` und ``domaene02.geojson`` erwartet.
|
||||||
|
- Falls ihr hier mehr Anpassbarkeit benötigt, eröffnet ein Issue, dann baue ich da was ein.
|
||||||
|
- ``--filters`` Siehe Abschnitt *Filter*.
|
||||||
|
|
||||||
|
Der Rest ist trivial.
|
||||||
|
|
||||||
|
|
||||||
|
### Filter
|
||||||
|
Standardmäßig werden alle Knoten ausgefiltert, die offline sind.
|
||||||
|
Außerdem werden alle lokalen Wolken ausgefiltert, in denen sich mindstens ein Knoten mit deaktiviertem Autoupdater befindet.
|
||||||
|
|
||||||
|
Weitere Filterungen lassen sich über das ``--filters`` Attribut aktivieren.
|
||||||
|
|
||||||
|
Folgende Filter sind derzeit implementiert (zukünftig folgen noch weitere):
|
||||||
|
|
||||||
|
- ``exclude_clouds_with_lan_links`` bzw. ``no_lan`` Filtert alle lokalen Wolken aus, in denen sich mindestens ein Mesh-on-LAN Link befindet
|
||||||
|
|
||||||
|
|
||||||
|
## Nginx Konfiguration
|
||||||
|
Das Tool generiert nur Konfigurationscode, der Schalter auf basis von IPv6 Adressen beinhaltet. Die Auswirkungen, die diese Schalter haben sollen müsst ihr noch selbst definieren. Typischerweise möchte man auf Basis der Schalter einen Rewrite machen.
|
||||||
|
|
||||||
|
Beispiel:
|
||||||
|
|
||||||
|
```
|
||||||
|
if ($domaene01) {
|
||||||
|
rewrite ^/site-ffms/(.*)$ /domaene01/$1;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
*Anmerkung:* Bei $domane01 handelt es sich um den generierten Schalter, entspricht also ``--targets domaene01``.
|
||||||
|
|
||||||
|
|
||||||
|
## Bekannte Probleme
|
||||||
|
Wenn es sich bei der Quell-Domäne um eine L2TP Domäne handelt, läuft das Tool derzeit nur, wenn [alfred](https://github.com/ffnord/ffnord-alfred-announce) auf allen Gateway-Servern läuft.
|
||||||
|
|
||||||
|
*Anmerkung:* Wenn in der ``nodes.json`` und ``graph.json`` mehrere Domänen vorhanden sind und dort teilweise L2TP Domänen vorhanden sind (dieses aber nicht das Gebiet eurer Zieldomäne betrifft), kann das sehr negative Auswirkungen auf die Laufzeit haben (> 30 Sekunden).
|
||||||
|
|
||||||
|
|
||||||
|
## Lizenz
|
||||||
|
Dieses Tool unterliegt der MIT Lizenz.
|
||||||
|
Solltet ihr Probleme mit dieser Lizensierung haben, schreibt mich einfach an. ;)
|
||||||
|
|
||||||
|
2016 - Simon Wüllhorst
|
23
cloud/Domaene.py
Normal file
23
cloud/Domaene.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
from cloud.LocalGraph import LocalGraph
|
||||||
|
class Domaene(object):
|
||||||
|
def __init__(self, name, shapes, globalGraph):
|
||||||
|
self.name = name
|
||||||
|
self.__shapes__ = shapes
|
||||||
|
self.__globalGraph__ = globalGraph
|
||||||
|
self.localGraphs = self.__getLocalGraphsInDomaene__()
|
||||||
|
|
||||||
|
def __getLocalGraphsInDomaene__(self):
|
||||||
|
graphs = []
|
||||||
|
for localGraph in self.__globalGraph__.localGraphs:
|
||||||
|
if self.isLocalGraphInDomaene(localGraph) == True:
|
||||||
|
graphs.append(localGraph)
|
||||||
|
return graphs
|
||||||
|
|
||||||
|
def isLocalGraphInDomaene(self, localGraph):
|
||||||
|
return self.isPointInDomaene(localGraph.getGeoCenterOfNodeCloud())
|
||||||
|
|
||||||
|
def isPointInDomaene(self, point):
|
||||||
|
for shape in self.__shapes__.shapes:
|
||||||
|
if point.within(shape):
|
||||||
|
return True
|
||||||
|
return False
|
46
cloud/GlobalGraph.py
Normal file
46
cloud/GlobalGraph.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
from cloud.LocalGraph import LocalGraph
|
||||||
|
from cloud.Graph import Graph
|
||||||
|
from exceptions.HieraException import HieraException
|
||||||
|
class GlobalGraph(Graph):
|
||||||
|
def __init__(self, nodes, links, debugPrint = False):
|
||||||
|
super().__init__(nodes, links)
|
||||||
|
self.__enableDebugPrinting__ = debugPrint
|
||||||
|
self.localGraphs = self.__buildLocalGraphs__()
|
||||||
|
|
||||||
|
if self.__enableDebugPrinting__:
|
||||||
|
self.__debugPrint__()
|
||||||
|
|
||||||
|
def __buildLocalGraphs__(self):
|
||||||
|
nodeIDs = self.getListOfNodeIDsOnline()
|
||||||
|
localGraphs = []
|
||||||
|
while len(nodeIDs) > 0:
|
||||||
|
connectedNodes = self.__getConnectedNodes__(nodeIDs[0])
|
||||||
|
try:
|
||||||
|
localGraphs.append(self.__createLocalCloudByNodesList__(connectedNodes))
|
||||||
|
print('Create LocalGraph object #',len(localGraphs), '\r',end = '')
|
||||||
|
except HieraException:
|
||||||
|
print('Was not able to add local cloud, because no VPN link was found.')
|
||||||
|
nodeIDs = [x for x in nodeIDs if x not in connectedNodes]
|
||||||
|
print('')
|
||||||
|
return localGraphs
|
||||||
|
|
||||||
|
def __getConnectedNodes__(self, nodeID, trace = []):
|
||||||
|
neighNodeIDs = self.getNeighbourNodeIDsForNodeID(nodeID)
|
||||||
|
trace_new = trace[:] + [x for x in neighNodeIDs if x not in trace]
|
||||||
|
for neighNodeID in neighNodeIDs:
|
||||||
|
if neighNodeID not in trace:
|
||||||
|
trace_new = trace_new + [x for x in self.__getConnectedNodes__(neighNodeID, trace_new) if x not in trace_new]
|
||||||
|
return trace_new
|
||||||
|
|
||||||
|
def __createLocalCloudByNodesList__(self, nodesIDList):
|
||||||
|
nodes = {}
|
||||||
|
links = []
|
||||||
|
for nodeID in nodesIDList:
|
||||||
|
nodes[nodeID] = self.__nodes__[nodeID]
|
||||||
|
links = links + [x for x in self.getLinksByNodeID(nodeID) if x not in links]
|
||||||
|
return LocalGraph(nodes, links, self.__enableDebugPrinting__)
|
||||||
|
|
||||||
|
def __debugPrint__(self):
|
||||||
|
for localGraph in self.localGraphs:
|
||||||
|
for node in localGraph.getNodesWithNoDependencies():
|
||||||
|
print(node.hostname, node.publicIPv6Addresses)
|
45
cloud/Graph.py
Normal file
45
cloud/Graph.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
from oauthlib.oauth1.rfc5849 import endpoints
|
||||||
|
class Graph(object):
|
||||||
|
def __init__(self, nodes, links):
|
||||||
|
self.__nodes__ = nodes
|
||||||
|
self.__links__ = links
|
||||||
|
|
||||||
|
def getListOfNodeIDs(self, getGateways = False):
|
||||||
|
nodeIDs = []
|
||||||
|
for k, v in self.__nodes__.items():
|
||||||
|
if getGateways == True or v.isGateway == False:
|
||||||
|
nodeIDs.append(k)
|
||||||
|
return nodeIDs
|
||||||
|
|
||||||
|
def getListOfNodeIDsOnline(self, getGateways = False):
|
||||||
|
onlineNodeIDs = []
|
||||||
|
nodeIDs = self.getListOfNodeIDs(getGateways)
|
||||||
|
for nodeID in nodeIDs:
|
||||||
|
if self.__nodes__[nodeID].isOnline == True:
|
||||||
|
onlineNodeIDs.append(nodeID)
|
||||||
|
return onlineNodeIDs
|
||||||
|
|
||||||
|
def getNeighbourNodeIDsForNodeID(self, nodeID):
|
||||||
|
neighNodeIDs = [nodeID]
|
||||||
|
endpoints = []
|
||||||
|
for link in self.__links__:
|
||||||
|
if link.isVpn == False:
|
||||||
|
endpoints = link.getEndpointNodeIDs(getGateways = False)
|
||||||
|
if nodeID in endpoints:
|
||||||
|
neighNodeIDs = neighNodeIDs + [x for x in endpoints if x not in neighNodeIDs]
|
||||||
|
return neighNodeIDs
|
||||||
|
|
||||||
|
def getLinksByNodeID(self, nodeID):
|
||||||
|
links = []
|
||||||
|
for link in self.__links__:
|
||||||
|
endpoints = link.getEndpointNodeIDs()
|
||||||
|
if nodeID in endpoints:
|
||||||
|
if link not in links:
|
||||||
|
links.append(link)
|
||||||
|
return links
|
||||||
|
|
||||||
|
def getLinkCount(self):
|
||||||
|
return len(self.__links__)
|
||||||
|
|
||||||
|
def getNodesCount(self):
|
||||||
|
return len(self.__nodes__)
|
52
cloud/Link.py
Normal file
52
cloud/Link.py
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
class Link(object):
|
||||||
|
def __init__(self, LinkJsonObject, srcNode, dstNode):
|
||||||
|
self.__jsonObject__ = LinkJsonObject
|
||||||
|
self.__srcNode__ = srcNode
|
||||||
|
self.__dstNode__ = dstNode
|
||||||
|
self.linkType = self.__getLinkType__()
|
||||||
|
self.isVpn = self.__getLinkVpnState__()
|
||||||
|
|
||||||
|
|
||||||
|
def __getLinkType__(self):
|
||||||
|
if self.__srcNode__ != None:
|
||||||
|
for k, v in self.__srcNode__.interfaces.items():
|
||||||
|
if self.__jsonObject__['source']['interface_mac'] in v:
|
||||||
|
return k
|
||||||
|
if self.__dstNode__ != None:
|
||||||
|
for k, v in self.__dstNode__.interfaces.items():
|
||||||
|
if self.__jsonObject__['target']['interface_mac'] in v:
|
||||||
|
return k
|
||||||
|
return 'unknown'
|
||||||
|
|
||||||
|
def __getLinkVpnState__(self):
|
||||||
|
if self.__jsonObject__['vpn'] == True:
|
||||||
|
return True
|
||||||
|
for node in self.getEndpointNodes(getGateways = True):
|
||||||
|
if node.isGateway == True:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getEndpointNodes(self, getGateways = False):
|
||||||
|
nodes = []
|
||||||
|
if self.__srcNode__ != None:
|
||||||
|
if getGateways == True or self.__srcNode__.isGateway == False:
|
||||||
|
nodes.append(self.__srcNode__)
|
||||||
|
if self.__dstNode__ != None:
|
||||||
|
if getGateways == True or self.__dstNode__.isGateway == False:
|
||||||
|
nodes.append(self.__dstNode__)
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def getEndpointNodeIDs(self, getGateways = True):
|
||||||
|
nodeIDs = []
|
||||||
|
for node in self.getEndpointNodes(getGateways):
|
||||||
|
nodeIDs.append(node.nodeID)
|
||||||
|
return nodeIDs
|
||||||
|
|
||||||
|
def isNodeIDinLink(self, nodeID):
|
||||||
|
for endpoint in self.getEndpointNodes():
|
||||||
|
if endpoint.nodeID == nodeID:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isNodeInLink(self, node):
|
||||||
|
return self.isNodeIDinLink(node.nodeID)
|
118
cloud/LocalGraph.py
Normal file
118
cloud/LocalGraph.py
Normal file
|
@ -0,0 +1,118 @@
|
||||||
|
from cloud.Graph import Graph
|
||||||
|
from shapely.geometry import MultiPoint
|
||||||
|
from exceptions.HieraException import HieraException
|
||||||
|
class LocalGraph(Graph):
|
||||||
|
def __init__(self, nodes, links, debugPrint = False):
|
||||||
|
super().__init__(nodes, links)
|
||||||
|
self.__enableDebugPrinting__ = debugPrint
|
||||||
|
if self.__enableDebugPrinting__:
|
||||||
|
self.__debugPrint__()
|
||||||
|
|
||||||
|
def getNodesWithVpn(self):
|
||||||
|
nodes = []
|
||||||
|
for link in self.__links__:
|
||||||
|
if link.isVpn == True:
|
||||||
|
nodes = nodes + [x for x in link.getEndpointNodes() if x not in nodes]
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def getCountOfNodesWithVpn(self):
|
||||||
|
return len(self.getNodesWithVpn())
|
||||||
|
|
||||||
|
def getDeptOfNode(self, node):
|
||||||
|
return self.getDeptOfNodeByID(node.nodeID, [])
|
||||||
|
|
||||||
|
def getDeptOfNodeByID(self, nodeID, trace):
|
||||||
|
if self.getCountOfNodesWithVpn() == 0:
|
||||||
|
raise HieraException('No VPN Node in LocalCloud was found!')
|
||||||
|
new_trace = trace[:]
|
||||||
|
new_trace.append(nodeID)
|
||||||
|
lowestDepth = None
|
||||||
|
currentDept = None
|
||||||
|
links = self.getLinksByNodeID(nodeID)
|
||||||
|
endpoints = []
|
||||||
|
for link in links:
|
||||||
|
endpoints = endpoints + [x for x in link.getEndpointNodeIDs() if x not in endpoints]
|
||||||
|
if link.isVpn == True:
|
||||||
|
return 0
|
||||||
|
for childNodeID in endpoints:
|
||||||
|
if childNodeID not in new_trace:
|
||||||
|
currentDept = self.getDeptOfNodeByID(childNodeID, new_trace + endpoints)
|
||||||
|
if currentDept != None:
|
||||||
|
currentDept = currentDept + 1
|
||||||
|
if lowestDepth == None or currentDept < lowestDepth:
|
||||||
|
lowestDepth = currentDept
|
||||||
|
return lowestDepth
|
||||||
|
|
||||||
|
def getMaxDepth(self):
|
||||||
|
maxDepth = 0
|
||||||
|
for k,v in self.__nodes__.items():
|
||||||
|
nodeDepth = self.getDeptOfNode(v)
|
||||||
|
maxDepth = nodeDepth if nodeDepth > maxDepth else maxDepth
|
||||||
|
return maxDepth
|
||||||
|
|
||||||
|
def getAllNodesWithDepthEquals(self, depth):
|
||||||
|
nodes = []
|
||||||
|
for k,v in self.__nodes__.items():
|
||||||
|
if self.getDeptOfNode(v) == depth:
|
||||||
|
nodes.append(v)
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def getNodesWithNoDependencies(self):
|
||||||
|
#TODO: Implement smarter selection
|
||||||
|
return self.getAllNodesWithDepthEquals(self.getMaxDepth())
|
||||||
|
|
||||||
|
def isAutoupdaterEnabledOnAllNodes(self):
|
||||||
|
for k, v in self.__nodes__.items():
|
||||||
|
if v.isAutoupdaterEnabled == False:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def getLanLinksInCloud(self):
|
||||||
|
links = []
|
||||||
|
for link in self.__links__:
|
||||||
|
if link.linkType == 'other' and link.isVpn == False:
|
||||||
|
links.append(link)
|
||||||
|
return links
|
||||||
|
|
||||||
|
def areLanLinksInCloud(self):
|
||||||
|
for link in self.__links__:
|
||||||
|
if link.linkType == 'other' and link.isVpn == False:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getBranchesThatExistsInCloud(self):
|
||||||
|
branches = []
|
||||||
|
for k, v in self.__nodes__.items():
|
||||||
|
if v.autoupdaterBranch not in branches:
|
||||||
|
branches.append(v.autoupdaterBranch)
|
||||||
|
return branches
|
||||||
|
|
||||||
|
def getGeoCenterOfNodeCloud(self):
|
||||||
|
geoPoints = []
|
||||||
|
for k, v in self.__nodes__.items():
|
||||||
|
if v.geo != None:
|
||||||
|
geoPoints.append((v.geo['lon'], v.geo['lat']))
|
||||||
|
return MultiPoint(geoPoints).representative_point()
|
||||||
|
|
||||||
|
def __debugPrint__(self):
|
||||||
|
print('nodes:')
|
||||||
|
for k,v in self.__nodes__.items():
|
||||||
|
print('>',v.hostname)
|
||||||
|
|
||||||
|
print('nodes with vpn:')
|
||||||
|
for node in self.getNodesWithVpn():
|
||||||
|
print('>',node.hostname)
|
||||||
|
|
||||||
|
print('nodes with no dependencies:')
|
||||||
|
for node in self.getNodesWithNoDependencies():
|
||||||
|
print('>', node.hostname)
|
||||||
|
|
||||||
|
print('maxdepth:', self.getMaxDepth())
|
||||||
|
print('isAutoupdaterEnabledOnAllNodes:', self.isAutoupdaterEnabledOnAllNodes())
|
||||||
|
print('areLanLinksInCloud:', self.areLanLinksInCloud())
|
||||||
|
print('BranchesThatExistsInCloud:', self.getBranchesThatExistsInCloud())
|
||||||
|
print('lan links in cloud:')
|
||||||
|
for link in self.getLanLinksInCloud():
|
||||||
|
if link.__srcNode__ != None and link.__dstNode__ != None:
|
||||||
|
print(' ', link.__srcNode__.hostname, '<--->', link.__dstNode__.hostname)
|
||||||
|
print('=====')
|
4
cloud/Node.py
Normal file
4
cloud/Node.py
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
from cloud.NodeInit import NodeInit
|
||||||
|
class Node(NodeInit):
|
||||||
|
def __init__(self, NodeJsonObject):
|
||||||
|
super().__init__(NodeJsonObject)
|
54
cloud/NodeInit.py
Normal file
54
cloud/NodeInit.py
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
class NodeInit(object):
|
||||||
|
def __init__(self, NodeJsonObject):
|
||||||
|
self.__jsonObject__ = NodeJsonObject
|
||||||
|
self.nodeID = self.__jsonObject__['nodeinfo']['node_id']
|
||||||
|
self.interfaces = self.__getInterfaces__()
|
||||||
|
self.hostname = self.__jsonObject__['nodeinfo']['hostname']
|
||||||
|
self.isGateway = self.__jsonObject__['flags']['gateway']
|
||||||
|
self.geo = self.__getGeo__()
|
||||||
|
self.isAutoupdaterEnabled = self.__getAutoupdaterStatus__()
|
||||||
|
self.autoupdaterBranch = self.__getBranch__()
|
||||||
|
self.isOnline = self.__jsonObject__['flags']['online']
|
||||||
|
self.publicIPv6Addresses = self.__getPublicAddresses__()
|
||||||
|
self.domID = self.__getSiteCode__()
|
||||||
|
|
||||||
|
def __getInterfaces__(self):
|
||||||
|
try:
|
||||||
|
return self.__jsonObject__['nodeinfo']['network']['mesh']['bat0']['interfaces']
|
||||||
|
except:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def __getAutoupdaterStatus__(self):
|
||||||
|
if 'autoupdater' in self.__jsonObject__['nodeinfo']['software']:
|
||||||
|
return self.__jsonObject__['nodeinfo']['software']['autoupdater']['enabled']
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __getBranch__(self):
|
||||||
|
if 'autoupdater' in self.__jsonObject__['nodeinfo']['software']:
|
||||||
|
return self.__jsonObject__['nodeinfo']['software']['autoupdater']['branch']
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __getGeo__(self):
|
||||||
|
geo = {}
|
||||||
|
if 'location' in self.__jsonObject__['nodeinfo'] and 'latitude' in self.__jsonObject__['nodeinfo']['location'] and 'longitude' in self.__jsonObject__['nodeinfo']['location']:
|
||||||
|
geo['lat'] = self.__jsonObject__['nodeinfo']['location']['latitude']
|
||||||
|
geo['lon'] = self.__jsonObject__['nodeinfo']['location']['longitude']
|
||||||
|
return geo
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __getPublicAddresses__(self):
|
||||||
|
addresses = []
|
||||||
|
if 'addresses' in self.__jsonObject__['nodeinfo']['network']:
|
||||||
|
for address in self.__jsonObject__['nodeinfo']['network']['addresses']:
|
||||||
|
#TODO: make more generic
|
||||||
|
if address.startswith('2a03'):
|
||||||
|
addresses.append(address)
|
||||||
|
return addresses
|
||||||
|
|
||||||
|
def __getSiteCode__(self):
|
||||||
|
try:
|
||||||
|
return self.__jsonObject__['nodeinfo']['system']['site_code']
|
||||||
|
except:
|
||||||
|
return None
|
0
cloud/__init__.py
Normal file
0
cloud/__init__.py
Normal file
3
exceptions/HieraException.py
Normal file
3
exceptions/HieraException.py
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
class HieraException(Exception):
|
||||||
|
def __init__(self, message = ''):
|
||||||
|
super().__init__(message)
|
0
exceptions/__init__.py
Normal file
0
exceptions/__init__.py
Normal file
28
generator/Filter.py
Normal file
28
generator/Filter.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
class Filter(object):
|
||||||
|
def __init__(self, args):
|
||||||
|
self.__args__ = args
|
||||||
|
self.__filters__ = self.__getFilters()
|
||||||
|
|
||||||
|
def filterLocalGraphs(self, localGraphs):
|
||||||
|
filteredGraphs = []
|
||||||
|
for localGraph in localGraphs:
|
||||||
|
if localGraph.isAutoupdaterEnabledOnAllNodes() == False:
|
||||||
|
continue
|
||||||
|
if self.__allowCloudsWithLanLinks__() == False and len(localGraph.getLanLinksInCloud()) > 0:
|
||||||
|
continue
|
||||||
|
filteredGraphs.append(localGraph)
|
||||||
|
return filteredGraphs
|
||||||
|
|
||||||
|
def __allowCloudsWithLanLinks__(self):
|
||||||
|
if 'exclude_clouds_with_lan_links' in self.__filters__ or 'no_lan' in self.__filters__:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def __getFilters(self):
|
||||||
|
return [] if self.__args__.filters == None else self.__args__.filters
|
||||||
|
|
||||||
|
def filterNodes(self, nodes):
|
||||||
|
filteredNodes = []
|
||||||
|
for node in nodes:
|
||||||
|
filteredNodes.append(node)
|
||||||
|
return filteredNodes
|
44
generator/NginxConfGen.py
Normal file
44
generator/NginxConfGen.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
from exceptions.HieraException import HieraException
|
||||||
|
from generator.Filter import Filter
|
||||||
|
class NginxConfGen(object):
|
||||||
|
def __init__(self, domains, args):
|
||||||
|
self.__domains__ = domains
|
||||||
|
self.__args__ = args
|
||||||
|
self.__filter__ = Filter(self.__args__)
|
||||||
|
self.__generatedDomains__ = self.__genDomains__()
|
||||||
|
|
||||||
|
def __genDomains__(self):
|
||||||
|
domains = {}
|
||||||
|
for k,v in self.__domains__.items():
|
||||||
|
domains[k] = self.__genDomain__(v)
|
||||||
|
return domains
|
||||||
|
|
||||||
|
def __genDomain__(self, domain):
|
||||||
|
nodes = {}
|
||||||
|
for localGraph in self.__filter__.filterLocalGraphs(domain.localGraphs):
|
||||||
|
try:
|
||||||
|
for node in self.__filter__.filterNodes(localGraph.getNodesWithNoDependencies()):
|
||||||
|
nodes[node.nodeID] = {
|
||||||
|
'hostname' : node.hostname,
|
||||||
|
'ipv6_addresses' : node.publicIPv6Addresses
|
||||||
|
}
|
||||||
|
except HieraException:
|
||||||
|
print('Was not able to add local cloud, because no VPN link was found.')
|
||||||
|
|
||||||
|
return nodes
|
||||||
|
|
||||||
|
def writeNginxConfigFile(self):
|
||||||
|
f = open(self.__args__.out_file,'w')
|
||||||
|
f.write(self.__genNginxConfigFileContent__())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
def __genNginxConfigFileContent__(self):
|
||||||
|
content = ''
|
||||||
|
for k, v in self.__generatedDomains__.items():
|
||||||
|
content += 'geo $' + k + ' {\n default 0;'
|
||||||
|
for ksub, vsub in v.items():
|
||||||
|
for address in vsub['ipv6_addresses']:
|
||||||
|
content += '\n ' + address + ' 1; #' + vsub['hostname']
|
||||||
|
content += '\n}\n'
|
||||||
|
return content
|
||||||
|
|
0
generator/__init__.py
Normal file
0
generator/__init__.py
Normal file
34
parser/GraphParser.py
Normal file
34
parser/GraphParser.py
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
from parser.JsonParser import JsonParser
|
||||||
|
|
||||||
|
class GraphParser(JsonParser):
|
||||||
|
def __init__(self, filePath):
|
||||||
|
super().__init__(filePath.rstrip('/')+'/graph.json')
|
||||||
|
self.links = self.__prettyFormGraph__()
|
||||||
|
|
||||||
|
def __prettyFormGraph__(self):
|
||||||
|
links = []
|
||||||
|
for link in self.__jsonData__['batadv']['links']:
|
||||||
|
prettyLink = link
|
||||||
|
prettyLink['target'] = self.__getEndpointData__(self.__jsonData__['batadv']['nodes'][link['target']])
|
||||||
|
prettyLink['source'] = self.__getEndpointData__(self.__jsonData__['batadv']['nodes'][link['source']])
|
||||||
|
links.append(prettyLink)
|
||||||
|
return links
|
||||||
|
|
||||||
|
def __getEndpointData__(self, endpoint):
|
||||||
|
data = {}
|
||||||
|
if endpoint:
|
||||||
|
if 'id' in endpoint:
|
||||||
|
data['interface_mac'] = endpoint['id']
|
||||||
|
if 'node_id' in endpoint:
|
||||||
|
data['node_id'] = endpoint['node_id']
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def getLinksForNodeID(self, nodeID):
|
||||||
|
links = []
|
||||||
|
for link in self.links:
|
||||||
|
if link['target']['node_id'] == nodeID or link['source']['node_id'] == nodeID:
|
||||||
|
links.append(link)
|
||||||
|
return links
|
25
parser/JsonParser.py
Normal file
25
parser/JsonParser.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import json, urllib.request
|
||||||
|
from exceptions.HieraException import HieraException
|
||||||
|
|
||||||
|
class JsonParser(object):
|
||||||
|
def __init__(self, fileName):
|
||||||
|
self.printStatus = True
|
||||||
|
self.__jsonData__ = self.__getFile__(fileName)
|
||||||
|
|
||||||
|
def __getFile__(self, fileName):
|
||||||
|
if fileName.startswith('https://') or fileName.startswith('http://'):
|
||||||
|
if self.printStatus:
|
||||||
|
print('Download', fileName.rsplit('/', 1)[1] , 'from URL:', fileName)
|
||||||
|
resource = urllib.request.urlopen(fileName)
|
||||||
|
else:
|
||||||
|
if self.printStatus:
|
||||||
|
print('Open', fileName.rsplit('/', 1)[1] , 'from file:', fileName)
|
||||||
|
resource = open(fileName)
|
||||||
|
try:
|
||||||
|
data = json.loads(resource.read().decode('utf-8'))
|
||||||
|
except:
|
||||||
|
raise HieraException('Error while parsing a json file (perhapes misformed file): ' + fileName)
|
||||||
|
finally:
|
||||||
|
resource.close()
|
||||||
|
|
||||||
|
return data
|
20
parser/NodesParser.py
Normal file
20
parser/NodesParser.py
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
from parser.JsonParser import JsonParser
|
||||||
|
|
||||||
|
class NodesParser(JsonParser):
|
||||||
|
def __init__(self, filePath):
|
||||||
|
super().__init__(filePath.rstrip('/')+'/nodes.json')
|
||||||
|
self.nodes = self.__jsonData__['nodes']
|
||||||
|
#print(self.nodes)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def getNodeByID(self, nodeID):
|
||||||
|
if nodeID in self.nodes:
|
||||||
|
return self.nodes[nodeID]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def getListOfNodeIDs(self):
|
||||||
|
IDlist = []
|
||||||
|
for k, v in self.nodes.items():
|
||||||
|
IDlist.append(k)
|
||||||
|
return IDlist
|
12
parser/ShapesParser.py
Normal file
12
parser/ShapesParser.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
from parser.JsonParser import JsonParser
|
||||||
|
from shapely.geometry import shape
|
||||||
|
class ShapesParser(JsonParser):
|
||||||
|
def __init__(self, filePath, targetName):
|
||||||
|
super().__init__(filePath.rstrip('/') + '/' + targetName + '.geojson')
|
||||||
|
self.shapes = self.__createShapes__()
|
||||||
|
|
||||||
|
def __createShapes__(self):
|
||||||
|
shapes = []
|
||||||
|
for feature in self.__jsonData__['features']:
|
||||||
|
shapes.append(shape(feature['geometry']))
|
||||||
|
return shapes
|
0
parser/__init__.py
Normal file
0
parser/__init__.py
Normal file
63
webserver-configuration
Normal file
63
webserver-configuration
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
geo $domaene21 {
|
||||||
|
default 0;
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe81:fd08 1; #Mesum-0009-Schuhhaus-Franke
|
||||||
|
2a03:2260:115:400:c66e:1fff:fefe:c0c2 1; #R.h.eine_Raeder
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe82:156 1; #Steinfurt-Elte-Britta-Buelter-0018
|
||||||
|
2a03:2260:115:400:32b5:c2ff:fe82:8b2b 1; #Mesum-0025
|
||||||
|
2a03:2260:115:400:c6e9:84ff:feb5:f052 1; #Steinfurt-Elte-0004
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:e0ae 1; #Steinfurt-Elte-Ferienhaus-Wueller-0008
|
||||||
|
2a03:2260:115:400:c66e:1fff:fe2e:14fe 1; #klick.news
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe57:4a4a 1; #Fahrschule_Hinzmann
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe9f:5c82 1; #freifunk-RheineTFH
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe82:194 1; #Steinfurt-Elte-Jays-Pizza-0020
|
||||||
|
2a03:2260:115:400:c6e9:84ff:fe5a:fefc 1; #www.ev-jugendhilfe.de-13
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:d494 1; #Elte-Landgasthaus-Hotel-Eggert-0013
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe5f:fe68 1; #Mesum-0011-Blumenhaus-Behnen
|
||||||
|
2a03:2260:115:400:32b5:c2ff:fe0e:aa6c 1; #Freifunk_NetZwerg
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe60:50c2 1; #Steinfurt-Elte-Hellhuegel
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe2a:573c 1; #Mesum-0020-LVM-Schipp
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe2a:55ea 1; #Mesum-0018-Haarstudio-Baglivi
|
||||||
|
2a03:2260:115:400:32b5:c2ff:feb0:42e2 1; #Connys-Koeppken
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe2a:6552 1; #Mesum-0021-Johannes-Apotheke
|
||||||
|
2a03:2260:115:400:f6f2:6dff:fe3e:7118 1; #Restaurant_Kreta
|
||||||
|
2a03:2260:115:400:c24a:ff:fe8c:726c 1; #Steinfurt-Mesum-0002
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe60:4f7c 1; #Mesum-0024
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:e38e 1; #Steinfurt-Elte-0015
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe22:ccd2 1; #Steinfurt0271-Pizzeria-Valentino
|
||||||
|
2a03:2260:115:400:f6f2:6dff:fed5:3926 1; #Steinfurt0270-Trattoria-La-Rustica
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe82:130 1; #Steinfurt-Elte-Ferienwohnung-Waldblick-0021
|
||||||
|
2a03:2260:115:400:32b5:c2ff:fe81:2320 1; #Steinfurt-Elte-0001
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:ff96 1; #Steinfurt-Mesum-0003
|
||||||
|
2a03:2260:115:400:16cc:20ff:fecd:1086 1; #Steinfurt-Mesum-0005
|
||||||
|
2a03:2260:115:400:c66e:1fff:fefe:4e6a 1; #Steinfurt-Elte-Bauernkate
|
||||||
|
2a03:2260:115:400:32b5:c2ff:fed9:feb2 1; #Steinfurt0046
|
||||||
|
2a03:2260:115:400:c66e:1fff:fe97:629a 1; #Bronco
|
||||||
|
2a03:2260:115:400:32b5:c2ff:feb3:a7b6 1; #Eschendorf6
|
||||||
|
2a03:2260:115:400:32b5:c2ff:feee:8b06 1; #Eschendorf_02
|
||||||
|
2a03:2260:115:400:16cc:20ff:fe63:8374 1; #Hardy_Schmitz_Rheine_02
|
||||||
|
2a03:2260:115:400:62e3:27ff:febd:fdcc 1; #Steinfurt-Mesum-0028-Gabriels-Gartenmarkt
|
||||||
|
2a03:2260:115:400:f6f2:6dff:fe40:370a 1; #Steinfurt-Mesum-0031-Wilp
|
||||||
|
2a03:2260:115:400:c24a:ff:fe84:8c2e 1; #Freifunk_Reitstall_Schulte_Mesum
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe2a:567c 1; #Mesum-0019-Fleischerei-Ende
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe57:6b40 1; #Freifunk-Elte-Wortmann
|
||||||
|
2a03:2260:115:400:32b5:c2ff:fe81:2822 1; #Steinfurt-Elte-0003
|
||||||
|
2a03:2260:115:400:32b5:c2ff:fe81:25ba 1; #Steinfurt-Elte-0002
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe5f:fece 1; #Steinfurt-Mesum-0001
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:ff00 1; #Steinfurt-Elte-0010
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe76:8850 1; #DieZauberfee
|
||||||
|
2a03:2260:115:400:12fe:edff:feb7:49a0 1; #steinfurt-elte-ls1478
|
||||||
|
2a03:2260:115:400:62e3:27ff:fece:f9e8 1; #BroncoRepeater
|
||||||
|
2a03:2260:115:400:c6e9:84ff:fef0:7df4 1; #Best_Pizza_Doepihaus
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:d7f4 1; #Steinfurt-Elte-SGELTE-0018
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe5f:ff0c 1; #Steinfurt-Elte-0025
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe5a:314 1; #Steinfurt-Elte-Zum-Splenterkotten-0023
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:ff52 1; #Steinfurt-Elte-0012
|
||||||
|
2a03:2260:115:400:f6f2:6dff:fe3e:7108 1; #Hueseck
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe59:ff6e 1; #Steinfurt-Elte-Fachwerkhofanlage-Poepping-0009
|
||||||
|
2a03:2260:115:400:c66e:1fff:feaf:6002 1; #Mesum-0050
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe5f:ff06 1; #Mesum-0012-Provinzial-Huesing
|
||||||
|
2a03:2260:115:400:62e3:27ff:fe60:512c 1; #Steinfurt-Elte-0026
|
||||||
|
2a03:2260:115:400:16cc:20ff:fe63:809a 1; #Hardy_Schmitz_Rheine_03
|
||||||
|
2a03:2260:115:400:c66e:1fff:fec9:e6de 1; #Freifunk-Schlifski
|
||||||
|
2a03:2260:115:400:f6f2:6dff:fe3f:e2fe 1; #BS.NetworX.Rheine
|
||||||
|
}
|
Loading…
Reference in a new issue