remove wikiimport from nodedb, move to ffhlwiki.py and call it from mkmap.sh
This commit is contained in:
parent
9b236c462b
commit
3ffbac8136
93
ffhlwiki.py
Executable file
93
ffhlwiki.py
Executable file
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import argparse
|
||||
from itertools import zip_longest
|
||||
from urllib.request import urlopen
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
def import_wikigps(url):
|
||||
def fetch_wikitable(url):
|
||||
f = urlopen(url)
|
||||
|
||||
soup = BeautifulSoup(f)
|
||||
|
||||
table = soup.find_all("table")[0]
|
||||
|
||||
rows = table.find_all("tr")
|
||||
|
||||
headers = []
|
||||
|
||||
data = []
|
||||
|
||||
def maybe_strip(x):
|
||||
if isinstance(x.string, str):
|
||||
return x.string.strip()
|
||||
else:
|
||||
return ""
|
||||
|
||||
for row in rows:
|
||||
tds = list([maybe_strip(x) for x in row.find_all("td")])
|
||||
ths = list([maybe_strip(x) for x in row.find_all("th")])
|
||||
|
||||
if any(tds):
|
||||
data.append(tds)
|
||||
|
||||
if any(ths):
|
||||
headers = ths
|
||||
|
||||
nodes = []
|
||||
|
||||
for d in data:
|
||||
nodes.append(dict(zip(headers, d)))
|
||||
|
||||
return nodes
|
||||
|
||||
nodes = fetch_wikitable(url)
|
||||
|
||||
aliases = []
|
||||
|
||||
for node in nodes:
|
||||
try:
|
||||
node['MAC'] = node['MAC'].split(',')
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
node['GPS'] = node['GPS'].split(',')
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
node['Knotenname'] = node['Knotenname'].split(',')
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
|
||||
|
||||
for data in nodes:
|
||||
alias = {}
|
||||
|
||||
alias['mac'] = data[0].strip()
|
||||
|
||||
if data[1]:
|
||||
alias['gps'] = data[1].strip()
|
||||
|
||||
if data[2]:
|
||||
alias['name'] = data[2].strip()
|
||||
|
||||
aliases.append(alias)
|
||||
|
||||
return aliases
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument('url', help='wiki URL')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
options = vars(args)
|
||||
|
||||
aliases = import_wikigps(options['url'])
|
||||
|
||||
print(json.dumps(aliases))
|
5
mkmap.sh
5
mkmap.sh
|
@ -17,7 +17,10 @@ if [ `cat /sys/class/net/bat0/mesh/gw_mode` = server ]; then
|
|||
GWS="$GWS -g $SELF"
|
||||
fi
|
||||
|
||||
batctl vd json | "$(dirname "$0")"/bat2nodes.py -a "$(dirname "$0")"/aliases.json $GWS - > $DEST/nodes.json.new
|
||||
"$(dirname "$0")"/ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > "$(dirname "$0")"/aliases_hl.json
|
||||
"$(dirname "$0")"/ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > "$(dirname "$0")"/aliases_moelln.json
|
||||
|
||||
batctl vd json | "$(dirname "$0")"/bat2nodes.py -a "$(dirname "$0")"/aliases.json -a aliases_hl.json -a aliases_moelln.json $GWS - > $DEST/nodes.json.new
|
||||
|
||||
mv $DEST/nodes.json.new $DEST/nodes.json
|
||||
|
||||
|
|
81
nodedb.py
81
nodedb.py
|
@ -3,10 +3,6 @@ from functools import reduce
|
|||
from collections import defaultdict
|
||||
from node import Node, Interface
|
||||
from link import Link, LinkConnector
|
||||
from itertools import zip_longest
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.request import urlopen
|
||||
|
||||
class NodeDB:
|
||||
def __init__(self):
|
||||
|
@ -213,83 +209,6 @@ class NodeDB:
|
|||
|
||||
link.type = "vpn"
|
||||
|
||||
def import_wikigps(self, url):
|
||||
def fetch_wikitable(url):
|
||||
f = urlopen(url)
|
||||
|
||||
soup = BeautifulSoup(f)
|
||||
|
||||
table = soup.find_all("table")[0]
|
||||
|
||||
rows = table.find_all("tr")
|
||||
|
||||
headers = []
|
||||
|
||||
data = []
|
||||
|
||||
def maybe_strip(x):
|
||||
if isinstance(x.string, str):
|
||||
return x.string.strip()
|
||||
else:
|
||||
return ""
|
||||
|
||||
for row in rows:
|
||||
tds = list([maybe_strip(x) for x in row.find_all("td")])
|
||||
ths = list([maybe_strip(x) for x in row.find_all("th")])
|
||||
|
||||
if any(tds):
|
||||
data.append(tds)
|
||||
|
||||
if any(ths):
|
||||
headers = ths
|
||||
|
||||
nodes = []
|
||||
|
||||
for d in data:
|
||||
nodes.append(dict(zip(headers, d)))
|
||||
|
||||
return nodes
|
||||
|
||||
nodes = fetch_wikitable(url)
|
||||
|
||||
for node in nodes:
|
||||
try:
|
||||
node['MAC'] = node['MAC'].split(',')
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
node['GPS'] = node['GPS'].split(',')
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
node['Knotenname'] = node['Knotenname'].split(',')
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
|
||||
|
||||
|
||||
for data in nodes:
|
||||
mac = data[0].strip()
|
||||
if not mac:
|
||||
continue
|
||||
|
||||
try:
|
||||
node = self.maybe_node_by_fuzzy_mac(mac)
|
||||
except KeyError:
|
||||
node = Node()
|
||||
self._nodes.append(node)
|
||||
|
||||
node.add_mac(mac)
|
||||
|
||||
if data[1]:
|
||||
node.gps = data[1].strip()
|
||||
|
||||
if data[2]:
|
||||
node.name = data[2].strip()
|
||||
|
||||
# compares two MACs and decides whether they are
|
||||
# similar and could be from the same node
|
||||
def is_similar(a, b):
|
||||
|
|
Loading…
Reference in a new issue