Merge branch 'master' of ../tools

This commit is contained in:
Simon Wüllhorst 2016-04-29 16:15:19 +02:00
commit a2fce2b217
12 changed files with 891 additions and 0 deletions

4
.gitignore vendored Normal file
View file

@ -0,0 +1,4 @@
nodes_*
*.pyc
geo-cache/
contact/

46
README.md Normal file
View file

@ -0,0 +1,46 @@
# Knoten Migrationstool
Dieses Tool dient zur Generierung von nginx-Konfigurationsdateien für die Migration von Knoten von einer Domäne in eine andere Domäne. Dabei wird berücksichtigt, welche Knoten von anderen Knoten abhängen um ins Internet zu kommen.
## Konfiguration
In der ``targets``-Datei werden Domänen (oder ähnliche Ziele) definiert. Hierbei handelt es sich um eine JSON-Datei. Dabei werden dort Gebietsrelationen für den Nominatik-Geocoder eingetragen.
## Aufruf
Es muss eine Datei mit den ``targets`` existieren.
Im einfachsten Fall wird das Programm dann wie folgt aufgerufen:
```
./node_hierarcy.py --all
```
Sollen spezielle ``nodes.json`` und ``graph.json`` Dateien verwendet werden, so kann der Datenpfad wie folgt angegeben werden (dabei kann es sich um einen lokalen Dateipfad als auch um eine http oder https URL handel):
```
./node_hierarcy.py --all --json-path https://service.freifunk-muensterland.de/maps/data/
```
Eine Ausgabe des Fortschritts erhält man mit dem Schalter ``-p`` oder ``--print-status``:
```
./node_hierarcy.py --all -p
```
Eine Limitierung auf eine Auswahl an Targets aus der Targets-Datei kann mit dem Schalter ``-t`` oder ``--targets`` vorgenommen werden:
```
./node_hierarcy.py -t domaene_01 domaene_02 --print-status
```
Weitere Hilfestellungen erhält mann mit ``-h`` oder ``--help``:
```
./node_hierarcy.py
```
### Ein- und Ausgabe
Standardmäßig wird eine Datei ``targets.json`` erwartet. Soll diese Datei von einer anderen Stelle aufgerufen werden kann das ``--targets-file``-Argument verwendet werden:
```
./node_hierarcy.py --targets-file /root/targets.json
```
Standardmäßig erfolgt die Ausgabe der generierten nginx-Konfigurationsdateien in das Verzeichnis ``./webserver-configuration/``. Das kann mit dem Schalter ``--out-path`` geändert werden:
```
./node_hierarcy.py --out-path /root/config/
```

93
domain_selector.py Normal file
View file

@ -0,0 +1,93 @@
#!/usr/bin/python
# -*- coding: utf-8 -
#Imports:
import json, urllib, os, glob
from graph import Graph
from hieraException import HieraException
class DomainSelector:
def __init__(self, nodesFile, graphFile, dataPath = './', printStatus = False, targets = None, branch = 'stable'):
if not os.path.isdir(dataPath):
print "\033[91mError:\033[0m Output folder was not found or is not writable. Given path:", dataPath
raise HieraException
self.printStatus = printStatus
self.targets = targets
self.dataPath = dataPath.rstrip('/')
self.nodesData = self.__getFile__(nodesFile)
self.graphData = self.__getFile__(graphFile)
self.__prepareOutDir__()
self.graph = Graph(self.nodesData, self.graphData)
if self.targets == None:
self.writeConfigFiles(self.graph.nodes_list,"all")
self.writeDumpFile(self.graph.nodes_list,"all")
else:
nodes = {}
for k,v in self.targets.iteritems():
nodes = self.graph.getNodeCloudsIn(v,branch)
self.writeConfigFiles(nodes,k)
self.writeDumpFile(nodes,k)
nodes = {}
self.writeConfigFiles(self.graph.getProblemNodes(noAutoupdater = True),"no_autoupdater")
self.writeConfigFiles(self.graph.getProblemNodes(noGeodata = True),"no_geo")
self.writeConfigFiles(self.graph.getProblemNodes(noGeodata = True, noAutoupdater = True),"no_nothing")
def __prepareOutDir__(self):
files = glob.glob(self.dataPath+'/*')
for f in files:
os.remove(f)
def __getFile__(self, nodesFile):
if nodesFile.startswith('https://') or nodesFile.startswith('http://'):
if self.printStatus:
print 'Download', nodesFile.rsplit('/', 1)[1] , 'from URL:', nodesFile
resource = urllib.urlopen(nodesFile)
else:
if self.printStatus:
print 'Open', nodesFile.rsplit('/', 1)[1] , 'from file:', nodesFile
resource = open(nodesFile)
try:
data = json.loads(resource.read())
except:
print "\033[91mError:\033[0m Error while parsing a json file (perhapes misformed file): ", nodesFile
raise HieraException
finally:
resource.close()
return data
def writeConfigFiles(self, nodes, name):
maxDepth = self.maxDepth(nodes)
if len(nodes) > 0:
for i in range(0,maxDepth):
content = 'geo $switch {\n default 0;'
f = open(self.dataPath.rstrip('/')+'/'+name+'_node_level'+str(i),'w')
for node in nodes.itervalues():
if node.stepsToVpn == i:
if node.ipv6 and node.hostname:
content += '\n '+node.ipv6+' 1; #'+node.hostname
content += '\n}'
f.write(content.encode('utf8'))
f.close()
def writeDumpFile(self, nodes, name):
content = {}
for node in nodes.itervalues():
if node.ipv6 and node.hostname:
content[node.nodeid] = {
'nodeid' : node.nodeid,
'ipv6' : node.ipv6,
'hostname' : node.hostname,
'level' : node.stepsToVpn,
}
with open(self.dataPath+'/'+name+'_node_statistics.json', 'w') as outfile:
json.dump(content, outfile)
def maxDepth(self, nodes):
maxDepth = 0
for v in nodes.itervalues():
if v.stepsToVpn > maxDepth:
maxDepth = v.stepsToVpn
return maxDepth+1

39
geocode.py Normal file
View file

@ -0,0 +1,39 @@
#!/usr/bin/python
# -*- coding: utf-8 -
#import time
from geopy.geocoders import Nominatim
from blitzdb import Document, FileBackend
class GeoAssign(Document):
pass
class Geocode:
def __init__(self, geocoderCache = True, printStatus = False):
self.printStatus = printStatus
self.geocoderCache = geocoderCache
if self.geocoderCache:
self.db = FileBackend('./geo-cache')
def getGeo(self, lon, lat):
if self.geocoderCache:
try:
nodeObj = self.db.get(GeoAssign,{'lat' : lat, 'lon' : lon})
nodeObj['cached'] = True
return nodeObj
except GeoAssign.DoesNotExist:
pass
if self.printStatus:
print('lon: '+str(lon)+', lat: '+str(lat)+' not in cache - start lookup at Nominatim-API')
geolocator = Nominatim()
location = geolocator.reverse([lat, lon], timeout=20)
if 'address' in location.raw:
location = location.raw['address']
nodeObj = GeoAssign({
'lat' : lat,
'lon' : lon,
'payload' : location
})
self.db.save(nodeObj)
self.db.commit()
nodeObj['cached'] = False
return nodeObj
else:
# got no results (i.e. coordinates are incorrect)
return None

100
get_state.py Normal file
View file

@ -0,0 +1,100 @@
#!/usr/bin/python
#
# (c) 2016 descilla <mail@simon-wuellhorst.de>
#
# This script is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License or any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY. See the
# GNU General Public License for more details.
#
# For a copy of the GNU General Public License
# see <http://www.gnu.org/licenses/>.
#
import glob, os, json, collections, argparse, urllib
class OfflineChecker:
def __init__(self, fileName):
self.printStatus = True
self.fileNames = []
self.results = {}
self.data = self.__getFile__(fileName)
self.addresses = self.__getFile__('nodes_legacy_adresses.json')
self.addressesOld = self.__getFile__('nodes_legacy_adresses_old.json')
self.parseJson(self.data)
self.getFwState()
#self.printResults()
def __getFile__(self, nodesFile):
if nodesFile.startswith('https://') or nodesFile.startswith('http://'):
if self.printStatus:
print "Download node.json from URL: " + nodesFile
resource = urllib.urlopen(nodesFile)
else:
if self.printStatus:
print "Open node.json file: " + nodesFile
resource = open(nodesFile)
data = json.loads(resource.read())
resource.close()
return data
def searchInLog(self, key, arg):
files = ['/var/log/nginx/access.log', '/var/log/nginx/access.log.1']
for fil in files:
with open(fil) as lg:
for line in lg:
if key and key in line:
if arg in line:
date = line.split('[')[1].split(']')[0]
dest_dom = line.split('gluon-')[1].split('-')[0]
return date, dest_dom
return None, None
def parseJson(self, data):
nodes_online = 0
users_online = 0
day_stamp = data['timestamp'].split('T')[0]
for key, node in data['nodes'].iteritems():
if 'statistics' in node:
users_online += node['statistics']['clients']
if 'flags' in node:
if node['flags']['online'] == False:
if 'system' in node['nodeinfo']:
siteCode = node['nodeinfo']['system']['site_code']
if siteCode not in self.results:
self.results[siteCode] = {}
self.results[siteCode][key] = {
'lastseen' : node['lastseen'],
'id' : key,
'mac' : node['nodeinfo']['network']['mac'],
'pub_v6' : self.getPublicAddress(node),
'name' : node['nodeinfo']['hostname']
}
def getFwState(self):
print 'fw_geladen\tlastseen\tziel_dom\tipv6_adresse\tnodeid\thostname\tmailaddress'
for node, val in self.results['ffms'].iteritems():
date, dest_dom = self.searchInLog(val['pub_v6'], "sysupgrade.bin")
if date and dest_dom:
#mail = self.addresses['nodes'][node]['nodeinfo']['owner']['contact'] if node in self.addresses['nodes'] and 'owner' in self.addresses['nodes'][node]['nodeinfo'] else ''
mail = 'JA' if (node in self.addresses['nodes'] and 'owner' in self.addresses['nodes'][node]['nodeinfo']) or (node in self.addressesOld['nodes'] and 'owner' in self.addressesOld['nodes'][node]['nodeinfo']) else 'NEIN'
print date +'\t'+ val['lastseen'] + '\t' + dest_dom + '\t' + val['pub_v6'] + '\t' + node + '\t' + val['name'] + '\t' + mail
def printResults(self):
ordered = collections.OrderedDict(sorted(self.results.items()))
print "date\tnodes_online\tusers_online"
for k,v in ordered.iteritems():
print k+'\t'+str(v['nodes_online'])+'\t'+str(v['users_online'])
def getPublicAddress(self,node):
if 'addresses' in node['nodeinfo']['network']:
for address in node['nodeinfo']['network']['addresses']:
if address.startswith('2a03'):
return address
return None
dmax = OfflineChecker('http://karte.freifunk-muensterland.org/data/nodes.json')

152
graph.py Normal file
View file

@ -0,0 +1,152 @@
#!/usr/bin/python
# -*- coding: utf-8 -
#Imports:
import urllib
import json
from pprint import pprint
from node import Node
from geocode import Geocode
class Graph:
def __init__(self, nodesData, graphData):
self.coder = Geocode(geocoderCache = True, printStatus = True)
self.data = graphData
self.nodes = nodesData
self.nodes_list = {}
self.nodes_no_autoupdater = {}
self.nodes_no_geo = {}
self.parseNodes()
self.parseLinks()
self.calculateStepsToVpn()
self.findMissingGeo()
def parseNodes(self):
for k,v in self.nodes['nodes'].iteritems():
lat, lon = self.getGeo(k)
node = Node(k, ipv6 = self.getPublicAddress(k), hostname = self.getHostname(k), isOnline = self.getOnlineState(k), lat=lat, lon=lon, coder = self.coder, autoupdater = self.getAutoupdaterStatus(k), branch = self.getBranch(k), isGateway = self.getIsGateway(k))
self.nodes_list[k] = node
def parseLinks(self):
link_nodes = self.data['batadv']['nodes']
for link in self.data['batadv']['links']:
if 'node_id' in link_nodes[link['source']] and 'node_id' in link_nodes[link['target']]:#else it is a vpn link
if self.nodes_list[link_nodes[link['source']]['node_id']].isGateway == True or self.nodes_list[link_nodes[link['target']]['node_id']].isGateway:
self.setVpnLink(link['source'], link['target'])
else:
self.setLinkBetween(link_nodes[link['source']]['node_id'], link_nodes[link['target']]['node_id'])
else:
self.setVpnLink(link['source'], link['target'])
def setLinkBetween(self, src, dst, stateOnline = True, lastSeen = None):
if src and dst:
self.nodes_list[src].links[dst] = {
'node' : self.nodes_list[dst],
'state_online' : stateOnline,
'last_seen' : lastSeen
}
self.nodes_list[dst].links[src] = {
'node' : self.nodes_list[src],
'state_online' : stateOnline,
'last_seen' : lastSeen
}
def setVpnLink(self, src, dst):
if 'node_id' not in self.data['batadv']['nodes'][src] or (self.data['batadv']['nodes'][src]['node_id'] and self.nodes_list[self.data['batadv']['nodes'][src]['node_id']].isGateway == True):
if 'node_id' in self.data['batadv']['nodes'][dst] and self.data['batadv']['nodes'][dst]['node_id']:
self.nodes_list[self.data['batadv']['nodes'][dst]['node_id']].stepsToVpn = 0
elif 'node_id' not in self.data['batadv']['nodes'][dst] or (self.data['batadv']['nodes'][dst]['node_id'] and self.nodes_list[self.data['batadv']['nodes'][dst]['node_id']].isGateway == True):
if 'node_id' in self.data['batadv']['nodes'][src] and self.data['batadv']['nodes'][src]['node_id']:
self.nodes_list[self.data['batadv']['nodes'][src]['node_id']].stepsToVpn = 0
def calculateStepsToVpn(self):
for node in self.nodes_list.itervalues():
node.calculateStepsToVpn()
def findMissingGeo(self):
for node in self.nodes_list.itervalues():
node.findMissingGeo()
def getAllLevelXNodes(self, level, online = True):
zmap = {}
for k,v in self.nodes_list.iteritems():
if v.isOnline or online == False:
if v.stepsToVpn == level:
zmap[k] = v
return zmap
def getHostname(self,node_id):
return self.nodes['nodes'][node_id]['nodeinfo']['hostname']
def getIsGateway(self,node_id):
return self.nodes['nodes'][node_id]['flags']['gateway']
def getAutoupdaterStatus(self, node_id):
#return True
if 'autoupdater' in self.nodes['nodes'][node_id]['nodeinfo']['software']:
return self.nodes['nodes'][node_id]['nodeinfo']['software']['autoupdater']['enabled']
else:
#if node is offline for a long time sometimes no autoupdater status can be found
return False
def getBranch(self, node_id):
#return True
if 'autoupdater' in self.nodes['nodes'][node_id]['nodeinfo']['software']:
return self.nodes['nodes'][node_id]['nodeinfo']['software']['autoupdater']['branch']
else:
#if node is offline for a long time sometimes no autoupdater status can be found
return None
def getGeo(self, node_id):
if 'location' in self.nodes['nodes'][node_id]['nodeinfo'] and 'latitude' in self.nodes['nodes'][node_id]['nodeinfo']['location'] and 'longitude' in self.nodes['nodes'][node_id]['nodeinfo']['location']:
return self.nodes['nodes'][node_id]['nodeinfo']['location']['latitude'], self.nodes['nodes'][node_id]['nodeinfo']['location']['longitude']
return None, None
def getPublicAddress(self,node_id):
if node_id in self.nodes['nodes']:
if 'addresses' in self.nodes['nodes'][node_id]['nodeinfo']['network']:
for address in self.nodes['nodes'][node_id]['nodeinfo']['network']['addresses']:
if address.startswith('2a03'):
return address
return None
def getOnlineState(self,node_id):
return self.nodes['nodes'][node_id]['flags']['online']
def getProblemNodes(self, noAutoupdater = False, noGeodata = False, online = True):
results = {}
for k,v in self.nodes_list.iteritems():
if v.isOnline or online == False:
if noAutoupdater and noGeodata:
if not v.autoupdater and not v.geodata:
results[k] = v
elif noAutoupdater:
if v.autoupdater and v.geodata:
results[k] = v
elif noGeodata:
if not v.geodata and v.autoupdater:
results[k] = v
return results
def getNodeCloudsIn(self, region, branch = 'stable'):
results = {}
# noAuto = False
for k,v in self.getAllLevelXNodes(0).iteritems():
if v.isOnline == True:
if v.geodata != None:
if v.isInRegion(region):
for ksub,vsub in v.getNodeCloud({}).iteritems():
if not vsub.autoupdater or (branch and vsub.branch != branch):
break
else:
results.update(v.getNodeCloud({}))
print "Result:",len(results), region
return results
def maxDepth(self):
maxDepth = 0
for v in self.nodes_list.itervalues():
if v.stepsToVpn > maxDepth:
maxDepth = v.stepsToVpn
return maxDepth+1

7
hieraException.py Normal file
View file

@ -0,0 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -
#Imports:
class HieraException(Exception):
pass

116
no_coords.py Executable file
View file

@ -0,0 +1,116 @@
#!/usr/bin/python
#
# (c) 2016 descilla <mail@simon-wuellhorst.de>
#
# This script is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License or any later version.
#
# This script is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY. See the
# GNU General Public License for more details.
#
# For a copy of the GNU General Public License
# see <http://www.gnu.org/licenses/>.
#
import glob, os, json, collections, argparse, urllib, datetime
from collections import OrderedDict
class OfflineChecker:
def __init__(self, dataFile):
self.printStatus = False
self.dataSet = self.__getFile__(dataFile)
self.fileNames = []
self.results = {}
self.addresses = []
self.getAddressFiles()
self.getFwState()
def __getFile__(self, nodesFile):
if nodesFile.startswith('https://') or nodesFile.startswith('http://'):
if self.printStatus:
print "Download node.json from URL: " + nodesFile
resource = urllib.urlopen(nodesFile)
else:
if self.printStatus:
print "Open node.json file: " + nodesFile
resource = open(nodesFile)
data = json.loads(resource.read())
resource.close()
return data
def getAddressFiles(self):
for file in glob.iglob('./nodes_adresses_*'):
self.addresses.append(self.__getFile__(file))
def readFile(self):
fil = 'operate.txt'
results = []
with open(fil) as lg:
for line in lg:
results.append(line)
return results
def getNodeAddressItem(self,ipv6):
pub = ""
i = 0
for adr in self.addresses:
i +=1
for val in adr['nodes'].itervalues():
pub = self.getPublicAddress(val)
if pub and pub in ipv6:
if 'owner' in val['nodeinfo']:
return val
def getNodeItem(self,ipv6):
pub = ""
for val in self.dataSet['nodes'].itervalues():
pub = self.getPublicAddress(val)
if pub and pub in ipv6:
return val
def getFwState(self):
lastDay = datetime.datetime.today() - datetime.timedelta(hours = 48)
onlyoldernodes = False
results = {}
for nodeIP in self.readFile():
nodeAddresses = self.getNodeAddressItem(nodeIP)
node = self.getNodeItem(nodeIP)
if node:
nodeLastSeen = datetime.datetime.strptime(node['lastseen'],'%Y-%m-%dT%H:%M:%S')
if nodeLastSeen < lastDay or onlyoldernodes == False:
au = node['nodeinfo']['software']['autoupdater']['branch']
loca = 'JA' if 'location' in node['nodeinfo'] else 'NEIN'
if nodeAddresses:
mail = nodeAddresses['nodeinfo']['owner']['contact'] if 'owner' in nodeAddresses['nodeinfo'] else 'NEIN'
#mail = 'JA' if 'owner' in nodeAddresses['nodeinfo'] else 'NEIN'
else:
mail = 'NEIN'
results[node['nodeinfo']['node_id']] = {
'lastseen' : node['lastseen'],
'ipv6' : self.getPublicAddress(node),
'node_id' : node['nodeinfo']['node_id'],
'name' : node['nodeinfo']['hostname'],
'contact' : mail,
'fw_base' : node['nodeinfo']['software']['firmware']['base'],
'fw_release' : node['nodeinfo']['software']['firmware']['release'],
'au_enabled' : str(node['nodeinfo']['software']['autoupdater']['enabled']),
'au_branch' : au,
'router_modell' : node['nodeinfo']['hardware']['model'],
'geo' : loca,
}
#print node['lastseen'] + ';' + self.getPublicAddress(node) + ';' + node['nodeinfo']['node_id'] + ';' + node['nodeinfo']['hostname'] + ';' + mail + ';' + node['nodeinfo']['software']['firmware']['base'] + ';' + node['nodeinfo']['software']['firmware']['release'] + ';' + str(node['nodeinfo']['software']['autoupdater']['enabled']) + ';' + au + ';' + node['nodeinfo']['hardware']['model'] + ';' + loca
self.printCSV(results)
def printCSV(self, data):
od = OrderedDict(sorted(data.items(), key=lambda x: x[1]['lastseen'], reverse=True))
print 'zuletzt online;nodeid;Knotenname;mailaddress;Firmware Base;Firmware Release;Autoupdater;AU-Branch;Router-Modell;geo'
for item in od.itervalues():
print item['lastseen'] + ';' + item['node_id'] + ';' + item['name'] + ';' + item['contact'] + ';' + item['fw_base'] + ';' + item['fw_release'] + ';' + item['au_enabled'] + ';' + item['au_branch'] + ';' + item['router_modell'] + ';' + item['geo']
def getPublicAddress(self,node):
if 'addresses' in node['nodeinfo']['network']:
for address in node['nodeinfo']['network']['addresses']:
if address.startswith('2a03'):
return address
return None
dmax = OfflineChecker('https://service.freifunk-muensterland.de/maps/data/nodes.json')

108
node.py Normal file
View file

@ -0,0 +1,108 @@
#!/usr/bin/python
# -*- coding: utf-8 -
from geocode import Geocode
import time
class Node(object):
def __init__(self, nodeid, ipv6 = None, hostname = None, isOnline = False, lastSeen = None, lat = None, lon = None, coder = None, autoupdater = False, branch = None, isGateway = False):
self.coder = coder
if self.coder == None:
self.coder = Geocode(geocoderCache = True, printStatus = True)
self.links = {}
self.nodeid = nodeid
self.ipv6 = ipv6
self.hostname = hostname
self.stepsToVpn = -1
self.isOnline = isOnline
self.lastSeen = lastSeen
self.autoupdater = autoupdater
self.branch = branch
self._geo = None
self.geodata = None
self.isGateway = isGateway
if lat != None and lon != None:
self.geo = {
'lat' : lat,
'lon' : lon
}
def addLink(self,nodeid, node):
if not nodeid in self.links:
self.links[nodeid] = node
else:
print "link still exists"
def calculateStepsToVpn(self, trace = []):
if self.stepsToVpn != 0:#self.stepsToVpn == -1 doesn't work, cause the shortest path could be the path to a former trace member
own_trace = trace[:]#clone - trace for preventing loops in pathfinding in graph
own_trace.append(self.nodeid)
lowest = -1
current = -1
for k,v in self.links.iteritems():
if k not in own_trace:
current = v['node'].calculateStepsToVpn(own_trace)
if lowest == -1 or current < lowest:
lowest = current
if lowest > -1:
self.stepsToVpn = lowest+1
return self.stepsToVpn
def findMissingGeo(self, trace = []):
if self.geo == None:
own_trace = trace[:]
own_trace.append(self.nodeid)
geo = None
for k,v in self.links.iteritems():
if k not in own_trace:
geo = v['node'].findMissingGeo(own_trace)
if geo != None:
self.geo = geo.copy()
break
return geo
else:
return self.geo
def getNodeCloud(self, nodes = {}):
nodes[self.nodeid] = self
for k,v in self.links.iteritems():
if k not in nodes:
nodes = v['node'].getNodeCloud(nodes)
return nodes
def isInRegion(self, regions):
#AND and OR Conditions are possible
val = False
if self.geodata == None:
return False
for region in regions:
val = False
for k,v in region.iteritems():
if k in self.geodata and self.geodata[k] == v:
val = True
else:
val = False
if val:
return True
return val
@property
def geo(self):
return self._geo
@geo.setter
def geo(self, value):
self._geo = value
self.__get_geodata__()
def __get_geodata__(self):
if self.geo != None:
result = self.coder.getGeo(self.geo['lon'], self.geo['lat'])
if result:
self.geodata = result['payload']
if result['cached'] == False:
time.sleep(1)
else:
self.getodata = None

44
node_hierarchy.py Executable file
View file

@ -0,0 +1,44 @@
#!/usr/bin/python
# -*- coding: utf-8 -
#Imports:
import argparse, json, sys
from domain_selector import DomainSelector
from hieraException import HieraException
parser = argparse.ArgumentParser(description='This Script generates a hierarchical nodes list for node migration using nginx geo feature.')
parser.add_argument('--json-path', required=False, default='https://service.freifunk-muensterland.de/maps/data/', help='Path of nodes.json and graph.json (can be local folder or remote URL).')
parser.add_argument('--targets-file', required=False, help='Json file of targets for nominatim geocoder.', default='./targets.json')
parser.add_argument('-t', '--targets', nargs='*', required=False, help='List of target names from target-file which should be proceeded. Example: -t citya -t cityb ...')
parser.add_argument('-a', '--all', '--all-targets', required=False, help='Proceed all targets from targets file.', action='store_true')
parser.add_argument('--out-path', required=False, help='Directory where the generated Output should stored.', default='./webserver-configuration/')
parser.add_argument('--only-specific-branch', required=False, help='Only attend nodes from specific branch.', default=None)
parser.add_argument('-p', '--print-status', required=False, action='store_true', help='Print Status (like geocoder tasks).')
args = parser.parse_args()
def prepareTargets(args):
resource = open(args.targets_file)
targets = json.loads(resource.read())
resource.close()
if len(targets) == 0:
print "\033[91mError:\033[0m No targets were found in targets file."
sys.exit(1)
if args.all == True:
return targets
elif args.targets == None or len(args.targets) == 0:
print "\033[91mError:\033[0m No target was given as argument and even --all switch was not enabled."
sys.exit(1)
else:
specific_targets = {}
for k, v in targets.iteritems():
if k in args.targets:
specific_targets[k] = v
return specific_targets
targets = prepareTargets(args)
try:
ds = DomainSelector(nodesFile = args.json_path.rstrip('/')+'/nodes.json', graphFile = args.json_path.rstrip('/')+'/graph.json', printStatus = args.print_status, dataPath = args.out_path, targets = targets, branch = args.only_specific_branch)
except HieraException:
print "\033[93mFailed:\033[0m Process was interrupted by HieraException-Exception (see error messages above)."

91
targets.json Normal file
View file

@ -0,0 +1,91 @@
{
"domaene_01" : [
{"city_district" : "Münster-Mitte"},
{"city_district" : "Münster-Nord"},
{"city_district" : "Münster-Ost"},
{"suburb" : "Berg Fidel"},
{"suburb" : "Gremmendorf"},
{"suburb" : "Mecklenbeck"},
{"suburb" : "Gievenbeck"},
{"suburb" : "Nienberge"},
{"suburb" : "Roxel"},
{"suburb" : "Sentruper Höhe"}
],
"domaene_02" : [
{"county" : "Kreis Coesfeld"}
],
"domaene_03" : [
{"town" : "48565"},
{"village" : "Wettringen"},
{"town" : "Ochtrup"},
{"village" : "Metelen"},
{"town" : "Horstmar"},
{"village" : "Laer"},
{"village" : "Nordwalde"},
{"village" : "Altenberge"}
],
"domaene_04" : [
{"town" : "Emsdetten"},
{"town" : "Neuenkirchen"},
{"town" : "Rheine"},
{"town" : "Greven"},
{"village" : "Ladbergen"},
{"town" : "Lengerich"},
{"town" : "Tecklenburg"},
{"village" : "Lienen"}
],
"domaene_05" : [
{"suburb" : "Amelsbüren"},
{"suburb" : "Hiltrup"},
{"suburb" : "Albachten"}
],
"domaene_06" : [
{"town" : "Ahaus"},
{"town" : "Bocholt"},
{"town" : "Borken"},
{"town" : "Gescher"},
{"village" : "Heek"},
{"town" : "Heiden"},
{"town" : "Isselburg"},
{"village" : "Legden"},
{"town" : "Raesfeld"},
{"town" : "Reken"},
{"town" : "Rhede"},
{"village" : "Schöppingen"},
{"town" : "Stadtlohn"},
{"village" : "Südlohn"},
{"town" : "Velen"},
{"town" : "Vreden"}
],
"domaene_07" : [
{"town" : "Telgte"}
],
"domaene_08" : [
{"town" : "Gescher"}
],
"domaene_09" : [
{"town" : "Stadtlohn"}
],
"domaene_11" : [
{"town" : "Bocholt"}
],
"domaene_12" : [
{
"town" : "Dülmen",
"suburb" : "Mitte"
},
{"suburb" : "Hausdülmen"},
{"suburb" : "Merfeld"},
{"suburb" : "Buldern"},
{"suburb" : "Hiddingsel"}
],
"domaene_13" : [
{"suburb" : "Rorup"}
],
"stadt_warendorf" : [
{"town" : "Warendorf"}
],
"domaene_14" : [
{"county" : "Kreis Warendorf"}
]
}

91
track_statistics.py Executable file
View file

@ -0,0 +1,91 @@
#!/usr/bin/python
# -*- coding: utf-8 -
#Imports:
import json, urllib
import operator
class TrackStatistics:
def __init__(self,nodesFile,statisticsFiles, printStatus = False):
self.printStatus = printStatus
self.nodesData = self.__getFile__(nodesFile)
self.statisticsData = []
for entry in statisticsFiles:
self.statisticsData.append({'data' : self.__getFile__(entry['data']), 'name' : entry['name']})
#print self.statisticsData
for domain in self.statisticsData:
self.printDomainStatisticsPerLevel(domain['data'], domain['name'])
def printDomainStatisticsPerLevel(self,data, name = "not set"):
#firmwareVersion = {}
print '-'*50
print 'Printing statistics for domain:', name
for level in range(0,self.maxDepth(data)):
firmwareVersion = {}
for nodeid, node in data.iteritems():
if level == node['level']:
fwver = self.nodesData['nodes'][nodeid]['nodeinfo']['software']['firmware']['release']
if fwver in firmwareVersion:
firmwareVersion[fwver] += 1
else:
firmwareVersion[fwver] = 1
print '\tLevel:',level
for k,v in sorted(firmwareVersion.items(), key=operator.itemgetter(1), reverse = True):
print '\t\t '+k+':\t'+str(v)
print '-'*50
def maxDepth(self, nodes):
maxDepth = 0
for v in nodes.itervalues():
if v['level'] > maxDepth:
maxDepth = v['level']
return maxDepth+1
def __getFile__(self, nodesFile):
if nodesFile.startswith('https://') or nodesFile.startswith('http://'):
if self.printStatus:
print "Download node.json from URL: " + nodesFile
resource = urllib.urlopen(nodesFile)
else:
if self.printStatus:
print "Open node.json file: " + nodesFile
resource = open(nodesFile)
data = json.loads(resource.read())
resource.close()
return data
data = [
{
'data' : '../domaenensplit_webserver_config/muenster_sued_node_statistics.json',
'name' : 'Münster Süd'
},
{
'data' : '../domaenensplit_webserver_config/muenster_stadt_node_statistics.json',
'name' : 'Münster Stadt'
},
{
'data' : '../domaenensplit_webserver_config/kreis_coesfeld_node_statistics.json',
'name' : 'Kreis Coesfeld'
},
{
'data' : '../domaenensplit_webserver_config/kreis_warendorf_node_statistics.json',
'name' : 'Kreis Warendorf'
},
{
'data' : '../domaenensplit_webserver_config/kreis_steinfurt_ost_node_statistics.json',
'name' : 'Kreis Steinfurt Ost'
},
{
'data' : '../domaenensplit_webserver_config/kreis_steinfurt_west_node_statistics.json',
'name' : 'Kreis Steinfurt West'
},
{
'data' : '../domaenensplit_webserver_config/kreis_borken_node_statistics.json',
'name' : 'Kreis Borken'
},
]
#stat = TrackStatistics('nodes.json', data, printStatus = True)
stat = TrackStatistics('https://freifunk-muensterland.de/map/data/nodes.json', data, printStatus = True)