From 3ffbac81364c30626e3b0ffb1dd8a676b4cfc09b Mon Sep 17 00:00:00 2001 From: Nils Schneider Date: Sat, 2 Feb 2013 01:21:29 +0100 Subject: [PATCH] remove wikiimport from nodedb, move to ffhlwiki.py and call it from mkmap.sh --- ffhlwiki.py | 93 +++++++++++++++++++++++++++++++++++++++++++++++++++++ mkmap.sh | 5 ++- nodedb.py | 81 ---------------------------------------------- 3 files changed, 97 insertions(+), 82 deletions(-) create mode 100755 ffhlwiki.py diff --git a/ffhlwiki.py b/ffhlwiki.py new file mode 100755 index 0000000..ce4d3cb --- /dev/null +++ b/ffhlwiki.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 + +import json +import argparse +from itertools import zip_longest +from urllib.request import urlopen +from bs4 import BeautifulSoup + +def import_wikigps(url): + def fetch_wikitable(url): + f = urlopen(url) + + soup = BeautifulSoup(f) + + table = soup.find_all("table")[0] + + rows = table.find_all("tr") + + headers = [] + + data = [] + + def maybe_strip(x): + if isinstance(x.string, str): + return x.string.strip() + else: + return "" + + for row in rows: + tds = list([maybe_strip(x) for x in row.find_all("td")]) + ths = list([maybe_strip(x) for x in row.find_all("th")]) + + if any(tds): + data.append(tds) + + if any(ths): + headers = ths + + nodes = [] + + for d in data: + nodes.append(dict(zip(headers, d))) + + return nodes + + nodes = fetch_wikitable(url) + + aliases = [] + + for node in nodes: + try: + node['MAC'] = node['MAC'].split(',') + except KeyError: + pass + + try: + node['GPS'] = node['GPS'].split(',') + except KeyError: + pass + + try: + node['Knotenname'] = node['Knotenname'].split(',') + except KeyError: + pass + + nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname']) + + for data in nodes: + alias = {} + + alias['mac'] = data[0].strip() + + if data[1]: + alias['gps'] = data[1].strip() + + if data[2]: + alias['name'] = data[2].strip() + + aliases.append(alias) + + return aliases + +parser = argparse.ArgumentParser() + +parser.add_argument('url', help='wiki URL') + +args = parser.parse_args() + +options = vars(args) + +aliases = import_wikigps(options['url']) + +print(json.dumps(aliases)) diff --git a/mkmap.sh b/mkmap.sh index f43886c..5542303 100755 --- a/mkmap.sh +++ b/mkmap.sh @@ -17,7 +17,10 @@ if [ `cat /sys/class/net/bat0/mesh/gw_mode` = server ]; then GWS="$GWS -g $SELF" fi -batctl vd json | "$(dirname "$0")"/bat2nodes.py -a "$(dirname "$0")"/aliases.json $GWS - > $DEST/nodes.json.new +"$(dirname "$0")"/ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > "$(dirname "$0")"/aliases_hl.json +"$(dirname "$0")"/ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > "$(dirname "$0")"/aliases_moelln.json + +batctl vd json | "$(dirname "$0")"/bat2nodes.py -a "$(dirname "$0")"/aliases.json -a aliases_hl.json -a aliases_moelln.json $GWS - > $DEST/nodes.json.new mv $DEST/nodes.json.new $DEST/nodes.json diff --git a/nodedb.py b/nodedb.py index e7ba62e..38b4adc 100644 --- a/nodedb.py +++ b/nodedb.py @@ -3,10 +3,6 @@ from functools import reduce from collections import defaultdict from node import Node, Interface from link import Link, LinkConnector -from itertools import zip_longest - -from bs4 import BeautifulSoup -from urllib.request import urlopen class NodeDB: def __init__(self): @@ -213,83 +209,6 @@ class NodeDB: link.type = "vpn" - def import_wikigps(self, url): - def fetch_wikitable(url): - f = urlopen(url) - - soup = BeautifulSoup(f) - - table = soup.find_all("table")[0] - - rows = table.find_all("tr") - - headers = [] - - data = [] - - def maybe_strip(x): - if isinstance(x.string, str): - return x.string.strip() - else: - return "" - - for row in rows: - tds = list([maybe_strip(x) for x in row.find_all("td")]) - ths = list([maybe_strip(x) for x in row.find_all("th")]) - - if any(tds): - data.append(tds) - - if any(ths): - headers = ths - - nodes = [] - - for d in data: - nodes.append(dict(zip(headers, d))) - - return nodes - - nodes = fetch_wikitable(url) - - for node in nodes: - try: - node['MAC'] = node['MAC'].split(',') - except KeyError: - pass - - try: - node['GPS'] = node['GPS'].split(',') - except KeyError: - pass - - try: - node['Knotenname'] = node['Knotenname'].split(',') - except KeyError: - pass - - nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname']) - - - for data in nodes: - mac = data[0].strip() - if not mac: - continue - - try: - node = self.maybe_node_by_fuzzy_mac(mac) - except KeyError: - node = Node() - self._nodes.append(node) - - node.add_mac(mac) - - if data[1]: - node.gps = data[1].strip() - - if data[2]: - node.name = data[2].strip() - # compares two MACs and decides whether they are # similar and could be from the same node def is_similar(a, b):