Added info module for generating info files about nodes, links, graphs and domains. Currently there is offline node info available only. Furthermore there is a attribute to append filters to info module

This commit is contained in:
Simon Wüllhorst 2016-07-19 23:28:01 +02:00
parent 779087bb99
commit f617f3b2be
5 changed files with 169 additions and 4 deletions

View file

@ -8,6 +8,7 @@ from cloud.GlobalGraph import GlobalGraph
from parser.ShapesParser import ShapesParser
from cloud.Domaene import Domaene
from generator.NginxConfGen import NginxConfGen
from info.Info import Info
class NodeHierarchy(object):
def __init__(self):
@ -21,6 +22,7 @@ class NodeHierarchy(object):
self.domains = self.__createDomains__()
self.nginxConf = NginxConfGen(self.domains, self.__args__)
self.nginxConf.writeNginxConfigFile()
self.infos = Info(self.__args__.info, self.__args__.info_out_path, self.__args__.info_out_type, self.__args__.info_filters, self.nodes, self.globalGraph, self.domains)
def __parseShapes__(self):
shapesJson = {}
@ -69,10 +71,12 @@ class NodeHierarchy(object):
parser.add_argument('-j', '--json-path', required=False, default='https://service.freifunk-muensterland.de/maps/data/', help='Path of nodes.json and graph.json (can be local folder or remote URL).')
parser.add_argument('-s', '--shapes-path', required=False, default='https://freifunk-muensterland.de/md-fw-dl/shapes/', help='Path of shapefiles (can be local folder or remote URL).')
parser.add_argument('-t', '--targets', nargs='+', required=True, help='List of targets which should be proceeded. Example: -t citya cityb ...')
parser.add_argument('-o', '--out-file', required=False, help='Filename where the generated Output should stored.', default='./webserver-configuration')
parser.add_argument('-o', '--out-file', default='./webserver-configuration', required=False, help='Filename where the generated Output should stored.')
parser.add_argument('-v', '--debug', required=False, action='store_true', help='Enable debugging output.')
parser.add_argument('-f', '--filters', nargs='*', required=False, choices=('exclude_clouds_with_lan_links', 'no_lan'), help='Filter out nodes and local clouds based on filter rules')
parser.add_argument('-f', '--filters', nargs='*', required=False, choices=('exclude_clouds_with_lan_links', 'no_lan'), help='Filter out nodes and local clouds based on filter rules.')
parser.add_argument('-i', '--info', nargs='*', required=False, choices=('get_offline_nodes','offline'), help='Get infos about the graph, links and nodes.')
parser.add_argument('-if', '--info-filters', nargs='*', required=False, help='Filter info results. Currently supported: min_age:TIME_RANGE, max_age:TIME_RANGE. Examples: -if min_age:1d max_age:2w')
parser.add_argument('-iop', '--info-out-path', required=False, default='./', help='Folder where info files should be written. Default: ./')
parser.add_argument('-iot', '--info-out-type', nargs='+', required=False, default='csv', choices=('json', 'csv'), help='Defines the format of info output. Default: csv')
return parser.parse_args()
NodeHierarchy()

18
info/Info.py Normal file
View file

@ -0,0 +1,18 @@
from info.OfflineInfo import OfflineInfo
class Info(object):
def __init__(self, infoTypes, infoOutFolder, infoOutType, infoFilters, nodes, globalGraph, domains):
self.__infoTypes__ = infoTypes
self.__infoOutFolder__ = infoOutFolder
self.__infoOutType__ = infoOutType
self.__infoFilters__ = infoFilters
self.__nodes__ = nodes
self.__globalGraph__ = globalGraph
self.__domains__ = domains
if self.__infoTypes__ != None:
if 'get_offline_nodes' in self.__infoTypes__:
self.__offlineNodes__ = OfflineInfo(self.__infoFilters__, self.__nodes__, self.__domains__)
print(self.__infoOutType__)
if 'csv' in self.__infoOutType__:
self.__offlineNodes__.writeCSVtoFile(self.__infoOutFolder__+'/offline_nodes.csv')
if 'json' in self.__infoOutType__:
self.__offlineNodes__.writeJsonToFile(self.__infoOutFolder__+'/offline_nodes.json')

75
info/InfoMeta.py Normal file
View file

@ -0,0 +1,75 @@
import json
class InfoMeta(object):
def __init__(self):
self.resultNodes = None
self.resultGraph = None
def __generateNodesJson__(self):
if self.resultNodes == None:
return []
result = []
for node in self.resultNodes:
result.append(node.__jsonObject__)
return result
def __generateNodesCSV__(self):
if self.resultNodes == None:
return ''
result = '"hostname","site","nodeid","ipv6addresses","status","lastseen","firstseen","autoupdater","branch","firmware","hardware"\n'
for node in self.resultNodes:
nodeData = node.__jsonObject__
nodeinfo = nodeData['nodeinfo']
result += '"'+nodeinfo['hostname']+'",'
try:
result +='"'+nodeinfo['system']['site_code']+'",'
except:
result += '"none",'
result += '"'+nodeinfo['node_id']+'","'
#add array of public IPv6 Addresses
addresses = node.__getPublicAddresses__()
for i, address in enumerate(addresses):
if i == len(addresses)-1:
result += address
else:
result += address + ','
result += '",'
if nodeData['flags']['online'] == True:
result += '"online",'
else:
result += '"offline",'
result += '"'+nodeData['lastseen']+'","'+nodeData['firstseen']+'",'
try:
if nodeinfo['software']['autoupdater']['enabled'] == True:
result += '"enabled",'
else:
result += '"disabled",'
except:
result += '"none",'
try:
result += '"'+nodeinfo['software']['autoupdater']['branch']+'",'
except:
result += '"none",'
result += '"'+nodeinfo['software']['firmware']['release']+'",'
try:
result += '"'+nodeinfo['hardware']['model']+'"'
except:
result += '"none"'
result += '\n'
return result
def writeCSVtoFile(self, filename):
with open(filename, 'w') as out:
out.write(self.__generateNodesCSV__())
def writeJsonToFile(self, filename):
with open(filename, 'w') as out:
out.write(json.dumps(self.__generateNodesJson__(), sort_keys=True, indent=4, ensure_ascii=False))

68
info/OfflineInfo.py Normal file
View file

@ -0,0 +1,68 @@
from info.InfoMeta import InfoMeta
from shapely.geometry import Point
import datetime, re
from datetime import timedelta
class OfflineInfo(InfoMeta):
def __init__(self, filters, nodes, domains):
super().__init__()
self.__filters__ = filters
self.__nodes__ = nodes
self.__domains__ = domains
self.__minAge__, self.__maxAge__ = self.__parseFilters__()
self.resultNodes = self.__filterNodes__()
def __filterNodes__(self):
offlineNodes = []
for k,v in self.__nodes__.items():
if v.isOnline == False:
if v.geo != None:
for dk, dv in self.__domains__.items():
if dv.isPointInDomaene(Point((v.geo['lon'], v.geo['lat']))) == True:
nodeLastSeen = datetime.datetime.strptime(v.__jsonObject__['lastseen'],'%Y-%m-%dT%H:%M:%S')
if self.__minAge__ != None:
if self.__minAge__ < nodeLastSeen:
continue
if self.__maxAge__ != None:
if self.__maxAge__ > nodeLastSeen:
continue
offlineNodes.append(v)
return offlineNodes
def __parseFilters__(self):
if self.__filters__ == None:
return None, None
regX = re.compile("([0-9]+)([a-zA-Z]+)")
minAge = None
maxAge = None
for filter in self.__filters__:
attr = filter.split(':')
if len(attr) == 2:
if attr[0] == 'min_age' or attr[0] == 'max_age':
d = regX.match(attr[1])
if d != None:
val = int(d.group(1))
unit = d.group(2)
date = datetime.datetime.now()
if unit == 'd' or unit == 'day' or unit == 'days':
date = date - timedelta(days=val)
elif unit == 'w' or unit == 'week' or unit == 'weeks':
date = date - timedelta(days=val*7)
elif unit == 'm' or unit == 'month' or unit == 'months':
date = date - timedelta(days=val*30)
elif unit == 'y' or unit == 'year' or unit == 'years':
date = date - timedelta(days=val*365)
else:
date = None
if attr[0] == 'min_age':
minAge = date
elif attr[0] == 'max_age':
maxAge = date
return minAge, maxAge

0
info/__init__.py Normal file
View file