Updated node_hierarchy configuration
- Added json file for target configuration - Added arguments handling for commandline - Added some error handlings - Several other minor changes
This commit is contained in:
parent
e051d64780
commit
c6a167dde5
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
nodes_*
|
||||||
|
*.pyc
|
||||||
|
geo-cache/
|
||||||
|
contact/
|
46
README.md
Normal file
46
README.md
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
# Knoten Migrationstool
|
||||||
|
Dieses Tool dient zur Generierung von nginx-Konfigurationsdateien für die Migration von Knoten von einer Domäne in eine andere Domäne. Dabei wird berücksichtigt, welche Knoten von anderen Knoten abhängen um ins Internet zu kommen.
|
||||||
|
|
||||||
|
## Konfiguration
|
||||||
|
In der ``targets``-Datei werden Domänen (oder ähnliche Ziele) definiert. Hierbei handelt es sich um eine JSON-Datei. Dabei werden dort Gebietsrelationen für den Nominatik-Geocoder eingetragen.
|
||||||
|
|
||||||
|
## Aufruf
|
||||||
|
Es muss eine Datei mit den ``targets`` existieren.
|
||||||
|
|
||||||
|
Im einfachsten Fall wird das Programm dann wie folgt aufgerufen:
|
||||||
|
```
|
||||||
|
./node_hierarcy.py --all
|
||||||
|
```
|
||||||
|
|
||||||
|
Sollen spezielle ``nodes.json`` und ``graph.json`` Dateien verwendet werden, so kann der Datenpfad wie folgt angegeben werden (dabei kann es sich um einen lokalen Dateipfad als auch um eine http oder https URL handel):
|
||||||
|
|
||||||
|
```
|
||||||
|
./node_hierarcy.py --all --json-path https://service.freifunk-muensterland.de/maps/data/
|
||||||
|
```
|
||||||
|
|
||||||
|
Eine Ausgabe des Fortschritts erhält man mit dem Schalter ``-p`` oder ``--print-status``:
|
||||||
|
|
||||||
|
```
|
||||||
|
./node_hierarcy.py --all -p
|
||||||
|
```
|
||||||
|
|
||||||
|
Eine Limitierung auf eine Auswahl an Targets aus der Targets-Datei kann mit dem Schalter ``-t`` oder ``--targets`` vorgenommen werden:
|
||||||
|
```
|
||||||
|
./node_hierarcy.py -t domaene_01 domaene_02 --print-status
|
||||||
|
```
|
||||||
|
|
||||||
|
Weitere Hilfestellungen erhält mann mit ``-h`` oder ``--help``:
|
||||||
|
```
|
||||||
|
./node_hierarcy.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Ein- und Ausgabe
|
||||||
|
Standardmäßig wird eine Datei ``targets.json`` erwartet. Soll diese Datei von einer anderen Stelle aufgerufen werden kann das ``--targets-file``-Argument verwendet werden:
|
||||||
|
```
|
||||||
|
./node_hierarcy.py --targets-file /root/targets.json
|
||||||
|
```
|
||||||
|
|
||||||
|
Standardmäßig erfolgt die Ausgabe der generierten nginx-Konfigurationsdateien in das Verzeichnis ``./webserver-configuration/``. Das kann mit dem Schalter ``--out-path`` geändert werden:
|
||||||
|
```
|
||||||
|
./node_hierarcy.py --out-path /root/config/
|
||||||
|
```
|
|
@ -1,10 +1,16 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: utf-8 -
|
# -*- coding: utf-8 -
|
||||||
#Imports:
|
#Imports:
|
||||||
import json, urllib
|
import json, urllib, os
|
||||||
from graph import Graph
|
from graph import Graph
|
||||||
|
from hieraException import HieraException
|
||||||
|
|
||||||
class DomainSelector:
|
class DomainSelector:
|
||||||
def __init__(self, nodesFile, graphFile, dataPath = './', printStatus = False, targets = None, branch = 'stable'):
|
def __init__(self, nodesFile, graphFile, dataPath = './', printStatus = False, targets = None, branch = 'stable'):
|
||||||
|
if not os.path.isdir(dataPath):
|
||||||
|
print "\033[91mError:\033[0m Output folder was not found or is not writable. Given path:", dataPath
|
||||||
|
raise HieraException
|
||||||
|
|
||||||
self.printStatus = printStatus
|
self.printStatus = printStatus
|
||||||
self.targets = targets
|
self.targets = targets
|
||||||
self.nodesData = self.__getFile__(nodesFile)
|
self.nodesData = self.__getFile__(nodesFile)
|
||||||
|
@ -28,21 +34,27 @@ class DomainSelector:
|
||||||
def __getFile__(self, nodesFile):
|
def __getFile__(self, nodesFile):
|
||||||
if nodesFile.startswith('https://') or nodesFile.startswith('http://'):
|
if nodesFile.startswith('https://') or nodesFile.startswith('http://'):
|
||||||
if self.printStatus:
|
if self.printStatus:
|
||||||
print "Download node.json from URL: " + nodesFile
|
print 'Download', nodesFile.rsplit('/', 1)[1] , 'from URL:', nodesFile
|
||||||
resource = urllib.urlopen(nodesFile)
|
resource = urllib.urlopen(nodesFile)
|
||||||
else:
|
else:
|
||||||
if self.printStatus:
|
if self.printStatus:
|
||||||
print "Open node.json file: " + nodesFile
|
print 'Open', nodesFile.rsplit('/', 1)[1] , 'from file:', nodesFile
|
||||||
resource = open(nodesFile)
|
resource = open(nodesFile)
|
||||||
|
try:
|
||||||
data = json.loads(resource.read())
|
data = json.loads(resource.read())
|
||||||
|
except:
|
||||||
|
print "\033[91mError:\033[0m Error while parsing a json file (perhapes misformed file): ", nodesFile
|
||||||
|
raise HieraException
|
||||||
|
finally:
|
||||||
resource.close()
|
resource.close()
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def writeConfigFiles(self, nodes, name):
|
def writeConfigFiles(self, nodes, name):
|
||||||
maxDepth = self.maxDepth(nodes)
|
maxDepth = self.maxDepth(nodes)
|
||||||
for i in range(0,maxDepth):
|
for i in range(0,maxDepth):
|
||||||
content = 'geo $switch {\n default 0;'
|
content = 'geo $switch {\n default 0;'
|
||||||
f = open(self.dataPath+'/'+name+'_node_level'+str(i),'w')
|
f = open(self.dataPath.rstrip('/')+'/'+name+'_node_level'+str(i),'w')
|
||||||
for node in nodes.itervalues():
|
for node in nodes.itervalues():
|
||||||
if node.stepsToVpn == i:
|
if node.stepsToVpn == i:
|
||||||
if node.ipv6 and node.hostname:
|
if node.ipv6 and node.hostname:
|
||||||
|
|
100
get_state.py
Normal file
100
get_state.py
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
#
|
||||||
|
# (c) 2016 descilla <mail@simon-wuellhorst.de>
|
||||||
|
#
|
||||||
|
# This script is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; either version 2 of the License or any later version.
|
||||||
|
#
|
||||||
|
# This script is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# For a copy of the GNU General Public License
|
||||||
|
# see <http://www.gnu.org/licenses/>.
|
||||||
|
#
|
||||||
|
import glob, os, json, collections, argparse, urllib
|
||||||
|
|
||||||
|
class OfflineChecker:
|
||||||
|
def __init__(self, fileName):
|
||||||
|
self.printStatus = True
|
||||||
|
self.fileNames = []
|
||||||
|
self.results = {}
|
||||||
|
self.data = self.__getFile__(fileName)
|
||||||
|
self.addresses = self.__getFile__('nodes_legacy_adresses.json')
|
||||||
|
self.addressesOld = self.__getFile__('nodes_legacy_adresses_old.json')
|
||||||
|
self.parseJson(self.data)
|
||||||
|
self.getFwState()
|
||||||
|
#self.printResults()
|
||||||
|
|
||||||
|
def __getFile__(self, nodesFile):
|
||||||
|
if nodesFile.startswith('https://') or nodesFile.startswith('http://'):
|
||||||
|
if self.printStatus:
|
||||||
|
print "Download node.json from URL: " + nodesFile
|
||||||
|
resource = urllib.urlopen(nodesFile)
|
||||||
|
else:
|
||||||
|
if self.printStatus:
|
||||||
|
print "Open node.json file: " + nodesFile
|
||||||
|
resource = open(nodesFile)
|
||||||
|
data = json.loads(resource.read())
|
||||||
|
resource.close()
|
||||||
|
return data
|
||||||
|
|
||||||
|
def searchInLog(self, key, arg):
|
||||||
|
files = ['/var/log/nginx/access.log', '/var/log/nginx/access.log.1']
|
||||||
|
for fil in files:
|
||||||
|
with open(fil) as lg:
|
||||||
|
for line in lg:
|
||||||
|
if key and key in line:
|
||||||
|
if arg in line:
|
||||||
|
date = line.split('[')[1].split(']')[0]
|
||||||
|
dest_dom = line.split('gluon-')[1].split('-')[0]
|
||||||
|
return date, dest_dom
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
def parseJson(self, data):
|
||||||
|
nodes_online = 0
|
||||||
|
users_online = 0
|
||||||
|
day_stamp = data['timestamp'].split('T')[0]
|
||||||
|
for key, node in data['nodes'].iteritems():
|
||||||
|
if 'statistics' in node:
|
||||||
|
users_online += node['statistics']['clients']
|
||||||
|
if 'flags' in node:
|
||||||
|
if node['flags']['online'] == False:
|
||||||
|
if 'system' in node['nodeinfo']:
|
||||||
|
siteCode = node['nodeinfo']['system']['site_code']
|
||||||
|
if siteCode not in self.results:
|
||||||
|
self.results[siteCode] = {}
|
||||||
|
self.results[siteCode][key] = {
|
||||||
|
'lastseen' : node['lastseen'],
|
||||||
|
'id' : key,
|
||||||
|
'mac' : node['nodeinfo']['network']['mac'],
|
||||||
|
'pub_v6' : self.getPublicAddress(node),
|
||||||
|
'name' : node['nodeinfo']['hostname']
|
||||||
|
}
|
||||||
|
|
||||||
|
def getFwState(self):
|
||||||
|
print 'fw_geladen\tlastseen\tziel_dom\tipv6_adresse\tnodeid\thostname\tmailaddress'
|
||||||
|
for node, val in self.results['ffms'].iteritems():
|
||||||
|
date, dest_dom = self.searchInLog(val['pub_v6'], "sysupgrade.bin")
|
||||||
|
if date and dest_dom:
|
||||||
|
#mail = self.addresses['nodes'][node]['nodeinfo']['owner']['contact'] if node in self.addresses['nodes'] and 'owner' in self.addresses['nodes'][node]['nodeinfo'] else ''
|
||||||
|
mail = 'JA' if (node in self.addresses['nodes'] and 'owner' in self.addresses['nodes'][node]['nodeinfo']) or (node in self.addressesOld['nodes'] and 'owner' in self.addressesOld['nodes'][node]['nodeinfo']) else 'NEIN'
|
||||||
|
print date +'\t'+ val['lastseen'] + '\t' + dest_dom + '\t' + val['pub_v6'] + '\t' + node + '\t' + val['name'] + '\t' + mail
|
||||||
|
|
||||||
|
|
||||||
|
def printResults(self):
|
||||||
|
ordered = collections.OrderedDict(sorted(self.results.items()))
|
||||||
|
print "date\tnodes_online\tusers_online"
|
||||||
|
for k,v in ordered.iteritems():
|
||||||
|
print k+'\t'+str(v['nodes_online'])+'\t'+str(v['users_online'])
|
||||||
|
|
||||||
|
|
||||||
|
def getPublicAddress(self,node):
|
||||||
|
if 'addresses' in node['nodeinfo']['network']:
|
||||||
|
for address in node['nodeinfo']['network']['addresses']:
|
||||||
|
if address.startswith('2a03'):
|
||||||
|
return address
|
||||||
|
return None
|
||||||
|
|
||||||
|
dmax = OfflineChecker('http://karte.freifunk-muensterland.org/data/nodes.json')
|
7
hieraException.py
Normal file
7
hieraException.py
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -
|
||||||
|
#Imports:
|
||||||
|
|
||||||
|
|
||||||
|
class HieraException(Exception):
|
||||||
|
pass
|
|
@ -1,97 +1,52 @@
|
||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
# -*- coding: utf-8 -
|
# -*- coding: utf-8 -
|
||||||
#Imports:
|
#Imports:
|
||||||
|
import argparse, json, sys
|
||||||
from domain_selector import DomainSelector
|
from domain_selector import DomainSelector
|
||||||
|
from hieraException import HieraException
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description='This Script generates a hierarchical nodes list for node migration using nginx geo feature.')
|
||||||
|
parser.add_argument('--json-path', required=False, default='https://service.freifunk-muensterland.de/maps/data/', help='Path of nodes.json and graph.json (can be local folder or remote URL).')
|
||||||
|
parser.add_argument('--targets-file', required=False, help='Json file of targets for nominatim geocoder.', default='./targets.json')
|
||||||
|
parser.add_argument('-t', '--targets', nargs='*', required=False, help='List of target names from target-file which should be proceeded. Example: -t citya -t cityb ...')
|
||||||
|
parser.add_argument('-a', '--all', '--all-targets', required=False, help='Proceed all targets from targets file.', action='store_true')
|
||||||
|
parser.add_argument('--out-path', required=False, help='Directory where the generated Output should stored.', default='./webserver-configuration/')
|
||||||
|
parser.add_argument('--only-specific-branch', required=False, help='Only attend nodes from specific branch.', default=None)
|
||||||
|
parser.add_argument('-p', '--print-status', required=False, action='store_true', help='Print Status (like geocoder tasks).')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
def prepareTargets(args):
|
||||||
|
|
||||||
|
resource = open(args.targets_file)
|
||||||
|
targets = json.loads(resource.read())
|
||||||
|
resource.close()
|
||||||
|
|
||||||
|
if len(targets) == 0:
|
||||||
|
print "\033[91mError:\033[0m No targets were found in targets file."
|
||||||
|
sys.exit(1)
|
||||||
|
if args.all == True:
|
||||||
|
return targets
|
||||||
|
elif args.targets == None or len(args.targets) == 0:
|
||||||
|
print "\033[91mError:\033[0m No target was given as argument and even --all switch was not enabled."
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
specific_targets = {}
|
||||||
|
for k, v in targets.iteritems():
|
||||||
|
if k in args.targets:
|
||||||
|
specific_targets[k] = v
|
||||||
|
return specific_targets
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
print args
|
||||||
|
|
||||||
|
targets = prepareTargets(args)
|
||||||
|
|
||||||
|
|
||||||
targets = {
|
|
||||||
# 'muenster' : [
|
|
||||||
# {'city' : u'Münster'},
|
|
||||||
# {'county' : u'Münster'},
|
|
||||||
# ],
|
|
||||||
# 'kreis_warendorf' : [
|
|
||||||
# {'county' : u'Kreis Warendorf'},
|
|
||||||
# ],
|
|
||||||
# 'kreis_coesfeld' : [
|
|
||||||
# {'county' : u'Kreis Coesfeld'},
|
|
||||||
# ],
|
|
||||||
# 'kreis_steinfurt_west' : [
|
|
||||||
# {'town' : u'48565'},
|
|
||||||
# {'village' : u'Wettringen'},
|
|
||||||
# {'town' : u'Ochtrup'},
|
|
||||||
# {'village' : u'Metelen'},
|
|
||||||
# {'town' : u'Horstmar'},
|
|
||||||
# {'village' : u'Laer'},
|
|
||||||
# {'village' : u'Nordwalde'},
|
|
||||||
# {'village' : u'Altenberge'},
|
|
||||||
# ],
|
|
||||||
# 'kreis_steinfurt_ost' : [
|
|
||||||
# {'town' : u'Emsdetten'},
|
|
||||||
# {'town' : u'Neuenkirchen'},
|
|
||||||
# {'town' : u'Rheine'},
|
|
||||||
# {'town' : u'Greven'},
|
|
||||||
# {'village' : u'Ladbergen'},
|
|
||||||
# {'town' : u'Lengerich'},
|
|
||||||
# {'town' : u'Tecklenburg'},
|
|
||||||
# {'village' : u'Lienen'},
|
|
||||||
# ],
|
|
||||||
# 'muenster_stadt' : [
|
|
||||||
# {'city_district' : u'Münster-Mitte'},
|
|
||||||
# {'city_district' : u'Münster-Nord'},
|
|
||||||
# {'city_district' : u'Münster-Ost'},
|
|
||||||
# {'suburb' : u'Berg Fidel'},
|
|
||||||
# {'suburb' : u'Gremmendorf'},
|
|
||||||
# {'suburb' : u'Mecklenbeck'},
|
|
||||||
# {'suburb' : u'Gievenbeck'},
|
|
||||||
# {'suburb' : u'Nienberge'},
|
|
||||||
# {'suburb' : u'Roxel'},
|
|
||||||
# {'suburb' : u'Sentruper Höhe'},
|
|
||||||
# ],
|
|
||||||
# 'muenster_sued' : [
|
|
||||||
# {'suburb' : u'Amelsbüren'},
|
|
||||||
# {'suburb' : u'Hiltrup'},
|
|
||||||
# {'suburb' : u'Albachten'},
|
|
||||||
# ],
|
|
||||||
# 'kreis_borken' : [
|
|
||||||
# {'town' : u'Ahaus'},
|
|
||||||
# {'town' : u'Bocholt'},
|
|
||||||
# {'town' : u'Borken'},
|
|
||||||
# {'town' : u'Gescher'},
|
|
||||||
# {'village' : u'Heek'},
|
|
||||||
# {'town' : u'Heiden'},
|
|
||||||
# {'town' : u'Isselburg'},
|
|
||||||
# {'village' : u'Legden'},
|
|
||||||
# {'town' : u'Raesfeld'},
|
|
||||||
# {'town' : u'Reken'},
|
|
||||||
# {'town' : u'Rhede'},
|
|
||||||
# {'village' : u'Schöppingen'},
|
|
||||||
# {'town' : u'Stadtlohn'},
|
|
||||||
# {'village' : u'Südlohn'},
|
|
||||||
# {'town' : u'Velen'},
|
|
||||||
# {'town' : u'Vreden'},
|
|
||||||
# ],
|
|
||||||
# 'sassenberg' : [
|
|
||||||
# {'town' : u'Sassenberg'},
|
|
||||||
# ],
|
|
||||||
# 'telgte' : [
|
|
||||||
# {'town' : u'Telgte'},
|
|
||||||
# ],
|
|
||||||
# 'warendorf_stadt' : [
|
|
||||||
# {'town' : u'Warendorf'},
|
|
||||||
# ]
|
|
||||||
'stadt_stadtlohn' : [
|
|
||||||
{'town' : u'Stadtlohn'},
|
|
||||||
],
|
|
||||||
'stadt_bocholt' : [
|
|
||||||
{'town' : u'Bocholt'},
|
|
||||||
],
|
|
||||||
'stadt_telgte' : [
|
|
||||||
{'town' : u'Telgte'},
|
|
||||||
],
|
|
||||||
'stadt_warendorf' : [
|
|
||||||
{'town' : u'Warendorf'},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
#ds = DomainSelector(nodesFile = 'nodes.json', graphFile = 'graph.json', printStatus = True, dataPath = '../domaenensplit_webserver_config/', targets = targets)
|
#ds = DomainSelector(nodesFile = 'nodes.json', graphFile = 'graph.json', printStatus = True, dataPath = '../domaenensplit_webserver_config/', targets = targets)
|
||||||
#ds = DomainSelector(nodesFile = 'https://service.freifunk-muensterland.de/maps/data_legacy/nodes.json', graphFile = 'https://service.freifunk-muensterland.de/maps/data_legacy/graph.json', printStatus = True, dataPath = '../domaenensplit_webserver_config/', targets = targets, branch = None)
|
#ds = DomainSelector(nodesFile = 'https://service.freifunk-muensterland.de/maps/data_legacy/nodes.json', graphFile = 'https://service.freifunk-muensterland.de/maps/data_legacy/graph.json', printStatus = True, dataPath = '../domaenensplit_webserver_config/', targets = targets, branch = None)
|
||||||
ds = DomainSelector(nodesFile = 'https://service.freifunk-muensterland.de/maps/data/nodes.json', graphFile = 'https://service.freifunk-muensterland.de/maps/data/graph.json', printStatus = True, dataPath = '../domaenensplit_webserver_config/', targets = targets, branch = None)
|
try:
|
||||||
|
ds = DomainSelector(nodesFile = args.json_path.rstrip('/')+'/nodes.json', graphFile = args.json_path.rstrip('/')+'/graph.json', printStatus = args.print_status, dataPath = args.out_path, targets = targets, branch = args.only_specific_branch)
|
||||||
|
except HieraException:
|
||||||
|
print "\033[93mFailed:\033[0m Process was interrupted by HieraException-Exception (see error messages above)."
|
75
targets.json
Normal file
75
targets.json
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
{
|
||||||
|
"domaene_01" : [
|
||||||
|
{"city_district" : "Münster-Mitte"},
|
||||||
|
{"city_district" : "Münster-Nord"},
|
||||||
|
{"city_district" : "Münster-Ost"},
|
||||||
|
{"suburb" : "Berg Fidel"},
|
||||||
|
{"suburb" : "Gremmendorf"},
|
||||||
|
{"suburb" : "Mecklenbeck"},
|
||||||
|
{"suburb" : "Gievenbeck"},
|
||||||
|
{"suburb" : "Nienberge"},
|
||||||
|
{"suburb" : "Roxel"},
|
||||||
|
{"suburb" : "Sentruper Höhe"}
|
||||||
|
],
|
||||||
|
"domaene_02" : [
|
||||||
|
{"county" : "Kreis Coesfeld"}
|
||||||
|
],
|
||||||
|
"domaene_03" : [
|
||||||
|
{"town" : "48565"},
|
||||||
|
{"village" : "Wettringen"},
|
||||||
|
{"town" : "Ochtrup"},
|
||||||
|
{"village" : "Metelen"},
|
||||||
|
{"town" : "Horstmar"},
|
||||||
|
{"village" : "Laer"},
|
||||||
|
{"village" : "Nordwalde"},
|
||||||
|
{"village" : "Altenberge"}
|
||||||
|
],
|
||||||
|
"domaene_04" : [
|
||||||
|
{"town" : "Emsdetten"},
|
||||||
|
{"town" : "Neuenkirchen"},
|
||||||
|
{"town" : "Rheine"},
|
||||||
|
{"town" : "Greven"},
|
||||||
|
{"village" : "Ladbergen"},
|
||||||
|
{"town" : "Lengerich"},
|
||||||
|
{"town" : "Tecklenburg"},
|
||||||
|
{"village" : "Lienen"}
|
||||||
|
],
|
||||||
|
"domaene_05" : [
|
||||||
|
{"suburb" : "Amelsbüren"},
|
||||||
|
{"suburb" : "Hiltrup"},
|
||||||
|
{"suburb" : "Albachten"}
|
||||||
|
],
|
||||||
|
"domaene_06" : [
|
||||||
|
{"town" : "Ahaus"},
|
||||||
|
{"town" : "Bocholt"},
|
||||||
|
{"town" : "Borken"},
|
||||||
|
{"town" : "Gescher"},
|
||||||
|
{"village" : "Heek"},
|
||||||
|
{"town" : "Heiden"},
|
||||||
|
{"town" : "Isselburg"},
|
||||||
|
{"village" : "Legden"},
|
||||||
|
{"town" : "Raesfeld"},
|
||||||
|
{"town" : "Reken"},
|
||||||
|
{"town" : "Rhede"},
|
||||||
|
{"village" : "Schöppingen"},
|
||||||
|
{"town" : "Stadtlohn"},
|
||||||
|
{"village" : "Südlohn"},
|
||||||
|
{"town" : "Velen"},
|
||||||
|
{"town" : "Vreden"}
|
||||||
|
],
|
||||||
|
"domaene_07" : [
|
||||||
|
{"town" : "Telgte"}
|
||||||
|
],
|
||||||
|
"domaene_09" : [
|
||||||
|
{"town" : "Stadtlohn"}
|
||||||
|
],
|
||||||
|
"domaene_11" : [
|
||||||
|
{"town" : "Bocholt"}
|
||||||
|
],
|
||||||
|
"stadt_warendorf" : [
|
||||||
|
{"town" : "Warendorf"}
|
||||||
|
],
|
||||||
|
"domaene_14" : [
|
||||||
|
{"county" : "Kreis Warendorf"}
|
||||||
|
]
|
||||||
|
}
|
Loading…
Reference in a new issue