Compare commits

...

98 commits
rewrite ... dev

Author SHA1 Message Date
Alexander Dietrich 325f6cd1f4 Support for "bat-ffhh" interface, add generate_aliases_v2.py 2017-07-10 21:57:20 +02:00
Alexander Dietrich 209271cbf7 Use tempfiles when updating JSON 2017-07-10 21:48:25 +02:00
4ndr3 b343748fe8 code vereinfacht 2017-06-16 23:41:11 +02:00
4ndr3 0c0fa78200 Süd-Domäne hinzugefügt
- Von nodelist.json auf nodes.json gewechselt
- nodes.json für Süd Domäne hinzugefügt
- Liest nodes.json's nun aus URLs, da sie auf verschiedenen servern liegen
- Durchsucht die nodes.json's nur noch als Strings, statt JSON auszuwerten
2017-06-16 23:35:42 +02:00
kantorkel 4b5bad262c node_number.py fuer meta.hamburg.freifunk.net hinzugefügt 2015-12-01 21:44:45 +01:00
kantorkel 793486ff65 funktionierendes setup 2015-12-01 19:29:57 +01:00
kantorkel 2043c88c03 fastd2aliases 2015-11-30 20:00:15 +01:00
kantorkel b0b6f8e0cd status srv01 2015-11-30 19:45:14 +01:00
Nils Schneider dcd6609030 Merge pull request #61 from Freifunk-Troisdorf/master
Added traffic to Statistics
2015-06-08 11:50:49 +02:00
stebifan 64dee31ebb Added traffic to Statistics 2015-06-07 23:52:32 +02:00
Nils Schneider b53a94ec0a Merge pull request #59 from ffnord/feature-ffmap-d3-jq
Added jq filter to convert new format to old format
2015-05-16 13:21:23 +02:00
Jan-Philipp Litza 11ef32178d Added jq filter to convert new format to old format
This makes it easily possible to continue using the legacy ffmap-d3
front end with the new backend while migrating.
2015-05-16 13:10:55 +02:00
Jan-Philipp Litza 71ced22b0f README.md: Extend dependencies 2015-05-15 18:20:49 +02:00
Nils Schneider dafad3df4c update aliases.json_sample 2015-05-09 22:16:44 +02:00
Nils Schneider 8fd0b73418 remove dependency on mesh_interfaces 2015-05-09 22:04:45 +02:00
Nils Schneider 3caf00be07 extract VPN interfaces from nodeinfo 2015-05-09 21:54:54 +02:00
Nils Schneider 1141aa766f nodes.py: catch ZeroDivisionError in statistics 2015-05-03 13:16:26 +02:00
Nils Schneider 1835abac7f basic nodeinfo validation (location) 2015-05-03 13:11:22 +02:00
Nils Schneider 8b8b2cc324 Merge pull request #57 from foertel/master
[DOC] include dependencies
2015-04-30 17:43:50 +02:00
Felix Oertel dccfb8c27a [DOC] include dependencies 2015-04-30 17:37:19 +02:00
Nils Schneider e3b15f61df Merge pull request #55 from mweinelt/master
Fixes a regression in --aliases argument, create env used for subprocess
2015-04-12 20:13:20 +02:00
Martin Weinelt dfcb9a3940 batman: ensure /usr/sbin and /usr/local/sbin are in PATH 2015-04-12 19:59:04 +02:00
Martin Weinelt 1ee17c0440 partially revert 3ec0874b77 2015-04-12 19:38:21 +02:00
Nils Schneider 4071a67541 Merge pull request #54 from mweinelt/master
Update README.md, Change --aliases to nargs=+
2015-04-12 19:29:37 +02:00
Martin Weinelt 3ec0874b77 Update --aliases (-a) switch to use nargs=+
This breaks calls with multiple --aliases params specified and
introduces --aliases FILE1 FILE2 FILE3 [...] instead
2015-04-12 19:26:45 +02:00
Martin Weinelt 6f97932ea2 README.md: add instructions to run under unprivileged user 2015-04-12 19:24:40 +02:00
Nils Schneider 5a891c1232 Merge pull request #53 from mweinelt/master
batman: prefix sudo for batctl if not executed as root
2015-04-12 19:13:16 +02:00
Martin Weinelt 7322a14274 batman: prefix sudo for batctl if not executed as root
depends on proper sudo rule, like:
mapuser ALL = NOPASSWD: /usr/sbin/batctl
2015-04-12 18:59:10 +02:00
Nils Schneider 9a652c429c README: use new --vpn syntax 2015-04-12 12:07:48 +02:00
Nils Schneider fa740273bb output nodelist.json 2015-04-08 12:54:46 +02:00
Nils Schneider 4b88a196ac README: drop sudo explanation 2015-04-02 18:52:00 +02:00
Nils Schneider 428a9731e0 README: removing owner info using jq 2015-04-01 23:33:00 +02:00
Nils Schneider 7f198980b6 introduce GRAPH_VERSION and NODES_VERSION 2015-04-01 18:06:12 +02:00
Nils Schneider c9098b1793 set version of nodes.json to 1 2015-04-01 17:55:27 +02:00
Nils Schneider ebde2fcba2 create dest_dir if needed
fixes #48
2015-04-01 17:41:40 +02:00
Nils Schneider bb2aa112c3 make pep8 happy 2015-04-01 17:34:35 +02:00
Nils Schneider 29e2647ad5 ignored the wrong pep8 error code 2015-04-01 17:16:32 +02:00
Nils Schneider 206ea3d6ef update travis.yml to ignore PEP8 E113 Line-Length 2015-04-01 01:50:35 +02:00
Nils Schneider dd8f6b92af drop batctl vd json legacy support
fixes #47
2015-04-01 01:36:31 +02:00
Nils Schneider 98d4618156 fix pruning 2015-03-26 14:21:11 +01:00
Nils Schneider c93d6c0192 Merge pull request #45 from mweinelt/pull2
Update argument parser
2015-03-26 14:14:34 +01:00
Martin Weinelt 5b5f4a5d74 fix pep8 line-length in argparser help 2015-03-26 14:12:50 +01:00
Martin Weinelt a1fe27fc51 Update argument parser
* --mesh (-m) now accepts the interface:alfred_sock syntax to add multiple batman/alfred instances. Also multiple instances can be added at once now. Only one interface can be added without alfred socket support (available since 2014.4.0) though.
* --alfred-sock (-s) was dropped in favor of the new --mesh syntax, which adds the interface to socket relationship
* --vpn (-V) now accepts multiple mac addresses, ATTENTION: update your calls accordingly
* --prune defaults to int now
* --with-rrd was renamed from --rrd, to better reflect its boolean/toggle like state
2015-03-26 01:53:44 +01:00
Nils Schneider e9c693c4c0 Merge pull request #43 from mweinelt/master
update batman interface handling in backend.py, add commenting
2015-03-25 15:38:03 +01:00
Martin Weinelt 9df369e88a update batman interface handling in backend.py, add commenting 2015-03-25 15:14:58 +01:00
Nils Schneider 30670feb31 Merge pull request #42 from mweinelt/master
pep8, batadv-vis socket support, batman gateway handling refactored
2015-03-25 14:43:04 +01:00
Martin Weinelt eb26ea9a5f pep8: fix remaining line length issues and update travis to include lib/*.py 2015-03-25 14:33:54 +01:00
Martin Weinelt 8d4856db56 lib/batman: refactor gateway handling 2015-03-25 14:11:00 +01:00
Martin Weinelt e3e5ae1615 Merge branch 'master' of https://github.com/ffnord/ffmap-backend 2015-03-25 14:07:45 +01:00
Martin Weinelt b143e3f2e5 batman: add batadv-vis socket support, needs at least alfred 2014.4.0 2015-03-25 13:27:54 +01:00
Nils Schneider dfccd01b69 Merge pull request #41 from mweinelt/master
fix regressions in alfred.py
2015-03-24 23:26:55 +01:00
Martin Weinelt 90ab26d50b fix regressions in alfred.py 2015-03-24 23:17:24 +01:00
Nils Schneider 79ec0eb9d9 Merge pull request #40 from mweinelt/master
Update package structure, add alfred socket support
2015-03-24 22:50:51 +01:00
Martin Weinelt 6fba8ad21b add alfred socket support (--alfred-sock) 2015-03-24 22:48:00 +01:00
Martin Weinelt c74b7b95fb update package structure, move non-executables to lib 2015-03-24 22:10:54 +01:00
Nils Schneider 629adc13cb create RRDs only when --rrd 2015-03-24 18:48:05 +01:00
Nils Schneider bd943e4360 Merge branch 'master' of https://github.com/mweinelt/ffmap-backend into mweinelt-master 2015-03-24 18:36:11 +01:00
Martin Weinelt 3c1140ebdf travis-ci: add pep8 check 2015-03-24 18:31:23 +01:00
Martin Weinelt 84746de048 backend.py: use argparser to set mesh default, join paths with os.path.join 2015-03-24 18:31:23 +01:00
Martin Weinelt 10e10944a5 fix alot of pep8 2015-03-24 18:31:23 +01:00
Martin Weinelt 9a8d40ea9a update .gitignore 2015-03-24 18:20:28 +01:00
Martin Weinelt efcefd8928 alfred.py: remove superfluous shebang 2015-03-24 18:19:31 +01:00
Nils Schneider 15a0f71847 alfred.py: remove unused function aliases 2015-03-24 18:18:40 +01:00
Martin Weinelt 3dd2a9e325 travis-ci: add pep8 check 2015-03-24 18:06:54 +01:00
Martin Weinelt 309971f1b0 batman.py: fix broken identation caused by previous commit 2015-03-24 17:58:06 +01:00
Martin Weinelt 1fb61db963 backend.py: use argparser to set mesh default, join paths with os.path.join 2015-03-24 17:50:36 +01:00
Martin Weinelt d4a7c83553 update .gitignore 2015-03-24 17:41:12 +01:00
Martin Weinelt e66731154b pep8: some line length fixes 2015-03-24 17:41:02 +01:00
Martin Weinelt 5b14ed5ad9 alfred.py: remove superfluous shebang 2015-03-24 17:31:44 +01:00
Martin Weinelt e098cd8d77 alfred.py: better ask for forgiveness, than permission 2015-03-24 17:00:35 +01:00
Martin Weinelt 3291b2b6ba fix alot of pep8 2015-03-24 16:49:37 +01:00
Nils Schneider 9195ea9650 make mesh_interfaces optional 2015-03-21 15:17:50 +01:00
Nils Schneider 3ddecd26bf fix lastseen handling of non-alfred nodes 2015-03-21 10:37:09 +01:00
Nils Schneider 2a2db65bc6 update README 2015-03-21 01:26:56 +01:00
Nils Schneider 41ee81d92c alfred: restructure code, add nodeinfo, statistics, vis 2015-02-24 13:34:22 +01:00
Nils Schneider 9257aa01a6 drop ffhlwiki and mkmap 2015-02-24 13:34:22 +01:00
Nils Schneider 474a374cd6 Merge pull request #37 from NoMoKeTo/patch-1
How about a less confusing repr?
2015-02-21 19:40:43 +01:00
Nils Martin Klünder 71c2417b9d How about a less confusing repr?
„WTF, why is this a string?”
2015-02-21 19:17:49 +01:00
Nils Schneider 45d920850e Merge pull request #36 from thisco-de/master
Fixes iteration over chars in maybe_node_by_mac() when processing neighbor entries
2015-02-01 14:25:54 +01:00
Stefan Laudemann b3c629264a Changes try-except-blocks around maybe_nody_by_*() calls to only catch KeyError exceptions.
Semantically, all the implemented error handling for the try-except-
blocks around calls to "maybe_node_by_mac()" or "maybe_node_by_id()" in
nodedb.py only handle the case that a particular MAC address cannot be
found in the list of known nodes. If such a MAC address cannot be found
in this list, the methods properly indicate this by raising a KeyError.
However, all the try-except-block generically catch all exceptions and
thus may cover other problems. But not only that problems might be
covered by this, generic try-except-blocks make finding errors and de-
bugging quite painful.
Hence, these try-except-blocks should only catch KeyErrors or at least
have an error handling that differs from other exceptions.
2015-02-01 13:50:01 +01:00
Stefan Laudemann 94f7256564 Adds missing comma to pass (x['neighbor'], ) as tuple (not as str).
As Python interprets "(elem)" as string and not as tuple,
maybe_node_by_mac() iterates over the single characters in the MAC-
addressed passed as parameter when called parse_vis_data(). Most of the
calls already use the "(elem, )" syntax to indicate that a tuple is
passed. However, there is still one call for which this is not the case
causing a noticable longer runtime due to calls to maybe_node_by_mac()
that cannot yield any useful result.
2015-02-01 13:47:07 +01:00
Nils Schneider ee8bbd8b3e simplify clientcount 2014-09-23 09:23:27 +02:00
Nils Schneider 6e101bc6de simplify mark_gateway 2014-09-23 09:23:27 +02:00
Nils Schneider 878267ca0e Merge pull request #31 from sargon/master
global rrd: Count online nodes instead of nodes in state
2014-09-23 09:16:51 +02:00
Daniel Ehlers b570d8956f global rrd: Count online nodes instead of nodes in state 2014-09-22 23:34:21 +02:00
Nils Schneider b7a079d418 remove dead code 2014-09-20 21:37:00 +02:00
Nils Schneider 48a1744639 firstseen 2014-09-20 21:17:15 +02:00
Jan-Philipp Litza 65655a38bb RRD: Fix updating of DS 2014-08-17 21:01:50 +02:00
Nils Schneider 26e57117ff rename rrd.py to rrddb.py 2014-08-17 19:32:13 +02:00
Nils Schneider 3780fb6cb1 fix rrd.py for real 2014-08-17 19:31:14 +02:00
Nils Schneider a5cb5f0fdb remove fuzzy matching 2014-08-17 19:10:32 +02:00
Nils Schneider 0d71de7091 fix rrd 2014-08-17 19:10:19 +02:00
Nils Schneider 2dfd11189d count clients, instead of nodes 2014-08-17 18:53:09 +02:00
Nils Schneider 663539c206 Revert "remove fuzzy matching"
This reverts commit a88b207cf1.
2014-08-12 20:51:36 +02:00
Nils Schneider a88b207cf1 remove fuzzy matching 2014-08-10 09:36:29 +02:00
Nils Schneider 263dd4ceff alfred.py: use gzip (requires alfred-json v0.2) 2014-07-05 20:23:06 +02:00
Nils Schneider 9f546be0c7 persistent state (mostly for gluon nodes), prune after 30d 2014-06-29 22:47:29 +02:00
Nils Schneider 56b884b810 remove dead code 2014-06-29 11:20:56 +02:00
33 changed files with 1336 additions and 997 deletions

9
.gitignore vendored
View file

@ -1,3 +1,8 @@
*.pyc # script-generated
aliases.json aliases*.json
nodedb/ nodedb/
# python bytecode / cache
*.pyc
pycache/
__pycache__/

6
.travis.yml Normal file
View file

@ -0,0 +1,6 @@
sudo: false
language: python
python:
- "3.4"
install: "pip install pep8"
script: "pep8 --ignore=E501 *.py lib/*.py"

150
README.md
View file

@ -1,53 +1,117 @@
# Data for Freifunk Map, Graph and Node List # Data for Freifunk Map, Graph and Node List
ffmap-backend gathers information on the batman network by invoking [![Build Status](https://travis-ci.org/ffnord/ffmap-backend.svg?branch=master)](https://travis-ci.org/ffnord/ffmap-backend)
batctl
and
batadv-vis
as root (via sudo) and has this information placed into a target directory
as the file "nodes.json" and also updates the directory "nodes" with graphical
representations of uptimes and the number of clients connecting.
The target directory is suggested to host all information for interpreting those ffmap-backend gathers information on the batman network by invoking :
node descriptions, e.g. as provided by https://github.com/ffnord/ffmap-d3.git .
When executed without root privileges, we suggest to grant sudo permissions
within wrappers of those binaries, so no further changes are required in other
scripts:
<pre> * batctl (might require root),
$ cat <<EOCAT > $HOME/batctl * alfred-json and
#!/bin/sh * batadv-vis
exec sudo /usr/sbin/batctl $*
EOCAT
</pre>
and analogously for batadv-vis. The entry for /etc/sudoers could be The output will be written to a directory (`-d output`).
whateveruser ALL=(ALL:ALL) NOPASSWD: /usr/sbin/batctl,/usr/sbin/batadv-vis,/usr/sbin/alfred-json
The destination directory can be made directly available through apache: Run `backend.py --help` for a quick overview of all available options.
<pre>
$ cat /etc/apache2/site-enabled/000-default
...
<Directory /home/whateverusername/www/>
Options Indexes FollowSymLinks MultiViews
AllowOverride None
Order allow,deny
allow from all
</Directory>
...
$ cat /etc/apache2/conf.d/freifunk
Alias /map /home/ffmap/www/
Alias /firmware /home/freifunk/autoupdates/
</pre>
To execute, run
./mkmap.sh ../www
The script expects above described sudo-wrappers in the $HOME directory of the user executing
the script. If those are not available, an error will occurr if not executed as root. Also,
the tool realpath optionally allows to execute the script from anywhere in the directory tree.
For the script's regular execution add the following to the crontab: For the script's regular execution add the following to the crontab:
<pre> <pre>
*/5 * * * * /home/ffmap/ffmap-backend/mkmap.sh /home/ffmap/www * * * * * backend.py -d /path/to/output -a /path/to/aliases.json --vpn ae:7f:58:7d:6c:2a d2:d0:93:63:f7:da
</pre> </pre>
# Dependencies
- Python 3
- Python 3 Package [Networkx](https://networkx.github.io/)
- [alfred-json](https://github.com/tcatm/alfred-json)
- rrdtool (if run with `--with-rrd`)
# Running as unprivileged user
Some information collected by ffmap-backend requires access to specific system resources.
Make sure the user you are running this under is part of the group that owns the alfred socket, so
alfred-json can access the alfred daemon.
# ls -al /var/run/alfred.sock
srw-rw---- 1 root alfred 0 Mar 19 22:00 /var/run/alfred.sock=
# adduser map alfred
Adding user `map' to group `alfred' ...
Adding user map to group alfred
Done.
$ groups
map alfred
Running batctl requires passwordless sudo access, because it needs to access the debugfs to retrive
the gateway list.
# echo 'map ALL = NOPASSWD: /usr/sbin/batctl' | tee /etc/sudoers.d/map
map ALL = NOPASSWD: /usr/sbin/batctl
# chmod 0440 /etc/sudoers.d/map
That should be everything. The script automatically detects if it is run in unprivileged mode and
will prefix `sudo` where necessary.
# Data format
## nodes.json
{ 'nodes': {
node_id: { 'flags': { flags },
'firstseen': isoformat,
'lastseen': isoformat,
'nodeinfo': {...}, # copied from alfred type 158
'statistics': {
'uptime': double, # seconds
'memory_usage': double, # 0..1
'clients': double,
'rootfs_usage': double, # 0..1
'loadavg': double,
'gateway': mac
}
},
...
}
'timestamp': isoformat
}
### flags (bool)
- online
- gateway
## Old data format
If you want to still use the old [ffmap-d3](https://github.com/ffnord/ffmap-d3)
front end, you can use the file `ffmap-d3.jq` to convert the new output to the
old one:
```
jq -n -f ffmap-d3.jq \
--argfile nodes nodedb/nodes.json \
--argfile graph nodedb/graph.json \
> nodedb/ffmap-d3.json
```
Then point your ffmap-d3 instance to the `ffmap-d3.json` file.
# Removing owner information
If you'd like to redact information about the node owner from `nodes.json`,
you may use a filter like [jq]. In this case, specify an output directory
different from your webserver directory, e.g.:
./backend.py -d /ffmap-data
Don't write to files generated in there. ffmap-backend uses them as its
database.
After running ffmap-backend, copy `graph.json` to your webserver. Then,
filter `nodes.json` using `jq` like this:
jq '.nodes = (.nodes | with_entries(del(.value.nodeinfo.owner)))' \
< /ffmap-data/nodes.json > /var/www/data/nodes.json
This will remove owner information from nodes.json before copying the data
to your webserver.
[jq]: https://stedolan.github.io/jq/

View file

@ -1,42 +0,0 @@
#!/usr/bin/env python3
import subprocess
import json
class alfred:
def __init__(self,request_data_type = 158):
self.request_data_type = request_data_type
def aliases(self):
output = subprocess.check_output(["alfred-json","-r",str(self.request_data_type),"-f","json"])
alfred_data = json.loads(output.decode("utf-8"))
alias = {}
for mac,node in alfred_data.items():
node_alias = {}
if 'location' in node:
try:
node_alias['gps'] = str(node['location']['latitude']) + ' ' + str(node['location']['longitude'])
except:
pass
try:
node_alias['firmware'] = node['software']['firmware']['release']
except KeyError:
pass
try:
node_alias['id'] = node['network']['mac']
except KeyError:
pass
if 'hostname' in node:
node_alias['name'] = node['hostname']
elif 'name' in node:
node_alias['name'] = node['name']
if len(node_alias):
alias[mac] = node_alias
return alias
if __name__ == "__main__":
ad = alfred()
al = ad.aliases()
print(al)

42
alfred_merge.py Executable file
View file

@ -0,0 +1,42 @@
#!/usr/bin/env python3
import subprocess
import json
from collections import MutableMapping
def rec_merge(d1, d2):
'''
Update two dicts of dicts recursively,
if either mapping has leaves that are non-dicts,
the second's leaf overwrites the first's.
'''
for k, v in d1.items(): # in Python 2, use .iteritems()!
if k in d2:
# this next check is the only difference!
if all(isinstance(e, MutableMapping) for e in (v, d2[k])):
d2[k] = rec_merge(v, d2[k])
# we could further check types and merge as appropriate here.
d3 = d1.copy()
d3.update(d2)
return d3
class alfred_merge:
def __init__(self,request_data_type_1 = 158, request_data_type_2 = 159):
self.request_data_type_1 = request_data_type_1
self.request_data_type_2 = request_data_type_2
def aliases(self):
output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_1),"-f","json"])
alfred_data_1 = json.loads(output.decode("utf-8"))
output = subprocess.check_output(["/usr/local/bin/alfred-json","-z", "-r",str(self.request_data_type_2),"-f","json"])
alfred_data_2 = json.loads(output.decode("utf-8"))
return json.dumps(rec_merge(alfred_data_1, alfred_data_2))
if __name__ == "__main__":
ad = alfred_merge()
al = ad.aliases()
print(al)

View file

@ -1,9 +1,36 @@
{ [
"b0:48:7a:e7:d3:64" : { {
"name" : "Meute-AP" "node_id": "krtek",
}, "hostname": "krtek",
"8e:3d:c2:10:10:28" : { "location": {
"name" : "holstentor", "longitude": 10.74,
"vpn" : true "latitude": 53.86
} },
} "network": {
"mesh": {
"bat0": {
"interfaces": {
"tunnel": [
"00:25:86:e6:f1:bf"
]
}
}
}
}
},
{
"node_id": "gw1",
"hostname": "burgtor",
"network": {
"mesh": {
"bat0": {
"interfaces": {
"tunnel": [
"52:54:00:f3:62:d9"
]
}
}
}
}
}
]

194
backend.py Executable file
View file

@ -0,0 +1,194 @@
#!/usr/bin/env python3
"""
backend.py - ffmap-backend runner
https://github.com/ffnord/ffmap-backend
"""
import argparse
import json
import os
import sys
from datetime import datetime
import networkx as nx
from networkx.readwrite import json_graph
from lib import graph, nodes
from lib.alfred import Alfred
from lib.batman import Batman
from lib.rrddb import RRD
from lib.nodelist import export_nodelist
from lib.validate import validate_nodeinfos
NODES_VERSION = 1
GRAPH_VERSION = 1
def main(params):
os.makedirs(params['dest_dir'], exist_ok=True)
nodes_fn = os.path.join(params['dest_dir'], 'nodes.json')
tmp_nodes_fn = os.path.join(params['dest_dir'], 'nodes.json.tmp')
graph_fn = os.path.join(params['dest_dir'], 'graph.json')
tmp_graph_fn = os.path.join(params['dest_dir'], 'graph.json.tmp')
nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json')
tmp_nodelist_fn = os.path.join(params['dest_dir'], 'nodelist.json.tmp')
now = datetime.utcnow().replace(microsecond=0)
# parse mesh param and instantiate Alfred/Batman instances
alfred_instances = []
batman_instances = []
for value in params['mesh']:
# (1) only batman-adv if, no alfred sock
if ':' not in value:
if len(params['mesh']) > 1:
raise ValueError(
'Multiple mesh interfaces require the use of '
'alfred socket paths.')
alfred_instances.append(Alfred(unix_sockpath=None))
batman_instances.append(Batman(mesh_interface=value))
else:
# (2) batman-adv if + alfred socket
try:
batif, alfredsock = value.split(':')
alfred_instances.append(Alfred(unix_sockpath=alfredsock))
batman_instances.append(Batman(mesh_interface=batif,
alfred_sockpath=alfredsock))
except ValueError:
raise ValueError(
'Unparseable value "{0}" in --mesh parameter.'.
format(value))
# read nodedb state from node.json
try:
with open(nodes_fn, 'r') as nodedb_handle:
nodedb = json.load(nodedb_handle)
except IOError:
nodedb = {'nodes': dict()}
# flush nodedb if it uses the old format
if 'links' in nodedb:
nodedb = {'nodes': dict()}
# set version we're going to output
nodedb['version'] = NODES_VERSION
# update timestamp and assume all nodes are offline
nodedb['timestamp'] = now.isoformat()
for node_id, node in nodedb['nodes'].items():
node['flags']['online'] = False
# integrate alfred nodeinfo
for alfred in alfred_instances:
nodeinfo = validate_nodeinfos(alfred.nodeinfo())
nodes.import_nodeinfo(nodedb['nodes'], nodeinfo,
now, assume_online=True)
# integrate static aliases data
for aliases in params['aliases']:
with open(aliases, 'r') as f:
# nodeinfo = validate_nodeinfos(json.load(f))
nodes.import_nodeinfo(nodedb['nodes'], json.load(f),
now, assume_online=False, statics=True)
nodes.reset_statistics(nodedb['nodes'])
for alfred in alfred_instances:
nodes.import_statistics(nodedb['nodes'], alfred.statistics())
# acquire gwl and visdata for each batman instance
mesh_info = []
for batman in batman_instances:
vd = batman.vis_data()
gwl = batman.gateway_list()
mesh_info.append((vd, gwl))
# update nodedb from batman-adv data
for vd, gwl in mesh_info:
nodes.import_mesh_ifs_vis_data(nodedb['nodes'], vd)
nodes.import_vis_clientcount(nodedb['nodes'], vd)
nodes.mark_vis_data_online(nodedb['nodes'], vd, now)
nodes.mark_gateways(nodedb['nodes'], gwl)
# clear the nodedb from nodes that have not been online in $prune days
if params['prune']:
nodes.prune_nodes(nodedb['nodes'], now, params['prune'])
# build nxnetworks graph from nodedb and visdata
batadv_graph = nx.DiGraph()
for vd, gwl in mesh_info:
graph.import_vis_data(batadv_graph, nodedb['nodes'], vd)
# force mac addresses to be vpn-link only (like gateways for example)
if params['vpn']:
graph.mark_vpn(batadv_graph, frozenset(params['vpn']))
def extract_tunnel(nodes):
macs = set()
for id, node in nodes.items():
try:
for mac in node["nodeinfo"]["network"]["mesh"]["bat0"]["interfaces"]["tunnel"]:
macs.add(mac)
for mac in node["nodeinfo"]["network"]["mesh"]["bat-ffhh"]["interfaces"]["tunnel"]:
macs.add(mac)
except KeyError:
pass
return macs
graph.mark_vpn(batadv_graph, extract_tunnel(nodedb['nodes']))
batadv_graph = graph.merge_nodes(batadv_graph)
batadv_graph = graph.to_undirected(batadv_graph)
# write processed data to dest dir
with open(tmp_nodes_fn, 'w') as f:
json.dump(nodedb, f)
graph_out = {'batadv': json_graph.node_link_data(batadv_graph),
'version': GRAPH_VERSION}
with open(tmp_graph_fn, 'w') as f:
json.dump(graph_out, f)
with open(tmp_nodelist_fn, 'w') as f:
json.dump(export_nodelist(now, nodedb), f)
os.rename(tmp_nodes_fn, nodes_fn)
os.rename(tmp_graph_fn, graph_fn)
os.rename(tmp_nodelist_fn, nodelist_fn)
# optional rrd graphs (trigger with --rrd)
if params['rrd']:
script_directory = os.path.dirname(os.path.realpath(__file__))
rrd = RRD(os.path.join(script_directory, 'nodedb'),
os.path.join(params['dest_dir'], 'nodes'))
rrd.update_database(nodedb['nodes'])
rrd.update_images()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='Read aliases from FILE',
nargs='+', default=[], metavar='FILE')
parser.add_argument('-m', '--mesh',
default=['bat0'], nargs='+',
help='Use given batman-adv mesh interface(s) (defaults'
'to bat0); specify alfred unix socket like '
'bat0:/run/alfred0.sock.')
parser.add_argument('-d', '--dest-dir', action='store',
help='Write output to destination directory',
required=True)
parser.add_argument('-V', '--vpn', nargs='+', metavar='MAC',
help='Assume MAC addresses are part of vpn')
parser.add_argument('-p', '--prune', metavar='DAYS', type=int,
help='forget nodes offline for at least DAYS')
parser.add_argument('--with-rrd', dest='rrd', action='store_true',
default=False,
help='enable the rendering of RRD graphs (cpu '
'intensive)')
options = vars(parser.parse_args())
main(options)

View file

@ -1,86 +0,0 @@
#!/usr/bin/env python3
import json
import fileinput
import argparse
import os
from batman import batman
from alfred import alfred
from rrd import rrd
from nodedb import NodeDB
from d3mapbuilder import D3MapBuilder
# Force encoding to UTF-8
import locale # Ensures that subsequent open()s
locale.getpreferredencoding = lambda _=None: 'UTF-8' # are UTF-8 encoded.
import sys
#sys.stdin = open('/dev/stdin', 'r')
#sys.stdout = open('/dev/stdout', 'w')
#sys.stderr = open('/dev/stderr', 'w')
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--aliases',
help='read aliases from FILE',
action='append',
metavar='FILE')
parser.add_argument('-m', '--mesh', action='append',
help='batman mesh interface')
parser.add_argument('-o', '--obscure', action='store_true',
help='obscure client macs')
parser.add_argument('-A', '--alfred', action='store_true',
help='retrieve aliases from alfred')
parser.add_argument('-d', '--destination-directory', action='store',
help='destination directory for generated files',required=True)
args = parser.parse_args()
options = vars(args)
db = NodeDB()
if options['mesh']:
for mesh_interface in options['mesh']:
bm = batman(mesh_interface)
db.parse_vis_data(bm.vis_data(options['alfred']))
for gw in bm.gateway_list():
db.mark_gateways(gw['mac'])
else:
bm = batman()
db.parse_vis_data(bm.vis_data(options['alfred']))
for gw in bm.gateway_list():
db.mark_gateways([gw['mac']])
if options['aliases']:
for aliases in options['aliases']:
db.import_aliases(json.load(open(aliases)))
if options['alfred']:
af = alfred()
db.import_aliases(af.aliases())
db.count_clients()
if options['obscure']:
db.obscure_clients()
scriptdir = os.path.dirname(os.path.realpath(__file__))
m = D3MapBuilder(db)
#Write nodes json
nodes_json = open(options['destination_directory'] + '/nodes.json.new','w')
nodes_json.write(m.build())
nodes_json.close()
#Move to destination
os.rename(options['destination_directory'] + '/nodes.json.new',options['destination_directory'] + '/nodes.json')
rrd = rrd(scriptdir + "/nodedb/", options['destination_directory'] + "/nodes")
rrd.update_database(db)
rrd.update_images()

View file

@ -1,86 +0,0 @@
#!/usr/bin/env python3
import subprocess
import json
import re
class batman:
""" Bindings for B.A.T.M.A.N. advanced batctl tool
"""
def __init__(self, mesh_interface = "bat0"):
self.mesh_interface = mesh_interface
def vis_data(self,batadv_vis=False):
vds = self.vis_data_batctl_legacy()
if batadv_vis:
vds += self.vis_data_batadv_vis()
return vds
def vis_data_helper(self,lines):
vd = []
for line in lines:
try:
utf8_line = line.decode("utf-8")
vd.append(json.loads(utf8_line))
except e:
pass
return vd
def vis_data_batctl_legacy(self):
""" Parse "batctl -m <mesh_interface> vd json -n" into an array of dictionaries.
"""
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"vd","json","-n"])
lines = output.splitlines()
vds = self.vis_data_helper(lines)
for vd in vds:
vd['legacy'] = True
return vds
def vis_data_batadv_vis(self):
""" Parse "batadv-vis -i <mesh_interface> -f json" into an array of dictionaries.
"""
output = subprocess.check_output(["batadv-vis","-i",self.mesh_interface,"-f","json"])
lines = output.splitlines()
return self.vis_data_helper(lines)
def gateway_list(self):
""" Parse "batctl -m <mesh_interface> gwl -n" into an array of dictionaries.
"""
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gwl","-n"])
output_utf8 = output.decode("utf-8")
# TODO Parse information
lines = output_utf8.splitlines()
own_mac = re.match(r"^.*MainIF/MAC: [^/]+/([0-9a-f:]+).*$",lines[0]).group(1)
# Remove header line
del lines[0]
# Fill gateway list
gw = []
gw_mode = self.gateway_mode()
if gw_mode['mode'] == 'server':
gw.append({'mac': own_mac, 'bandwidth': gw_mode['bandwidth']})
for line in lines:
gw_line = line.split()
if (gw_line[0] == 'No'):
continue
# When in client gateway mode maybe gw_line[0] is not the right.
gw.append({'mac':gw_line[0], 'bandwidth': gw_line[-1]})
return gw
def gateway_mode(self):
""" Parse "batctl -m <mesh_interface> gw"
"""
output = subprocess.check_output(["batctl","-m",self.mesh_interface,"gw"])
elements = output.decode("utf-8").split()
mode = elements[0]
if mode == "server":
return {'mode': 'server', 'bandwidth': elements[3]}
else:
return {'mode': mode}
if __name__ == "__main__":
bc = batman()
vd = bc.vis_data()
gw = bc.gateway_list()
for x in vd:
print(x)
print(gw)
print(bc.gateway_mode())

View file

@ -1,36 +0,0 @@
import json
import datetime
class D3MapBuilder:
def __init__(self, db):
self._db = db
def build(self):
output = dict()
now = datetime.datetime.utcnow().replace(microsecond=0)
nodes = self._db.get_nodes()
output['nodes'] = [{'name': x.name, 'id': x.id,
'macs': ', '.join(x.macs),
'geo': [float(x) for x in x.gps.split(" ")] if x.gps else None,
'firmware': x.firmware,
'flags': x.flags,
'clientcount': x.clientcount
} for x in nodes]
links = self._db.get_links()
output['links'] = [{'source': x.source.id, 'target': x.target.id,
'quality': x.quality,
'type': x.type,
'id': x.id
} for x in links]
output['meta'] = {
'timestamp': now.isoformat()
}
return json.dumps(output)

View file

@ -1,93 +0,0 @@
#!/usr/bin/env python3
import json
import argparse
from itertools import zip_longest
from urllib.request import urlopen
from bs4 import BeautifulSoup
def import_wikigps(url):
def fetch_wikitable(url):
f = urlopen(url)
soup = BeautifulSoup(f)
table = soup.find_all("table")[0]
rows = table.find_all("tr")
headers = []
data = []
def maybe_strip(x):
if isinstance(x.string, str):
return x.string.strip()
else:
return ""
for row in rows:
tds = list([maybe_strip(x) for x in row.find_all("td")])
ths = list([maybe_strip(x) for x in row.find_all("th")])
if any(tds):
data.append(tds)
if any(ths):
headers = ths
nodes = []
for d in data:
nodes.append(dict(zip(headers, d)))
return nodes
nodes = fetch_wikitable(url)
aliases = {}
for node in nodes:
try:
node['MAC'] = node['MAC'].split(',')
except KeyError:
pass
try:
node['GPS'] = node['GPS'].split(',')
except KeyError:
pass
try:
node['Knotenname'] = node['Knotenname'].split(',')
except KeyError:
pass
nodes = zip_longest(node['MAC'], node['GPS'], node['Knotenname'])
for data in nodes:
alias = {}
mac = data[0].strip()
if data[1]:
alias['gps'] = data[1].strip()
if data[2]:
alias['name'] = data[2].strip()
aliases[mac] = alias
return aliases
parser = argparse.ArgumentParser()
parser.add_argument('url', help='wiki URL')
args = parser.parse_args()
options = vars(args)
aliases = import_wikigps(options['url'])
print(json.dumps(aliases))

52
ffmap-d3.jq Normal file
View file

@ -0,0 +1,52 @@
{
"meta": {
"timestamp": $nodes.timestamp
},
"nodes": (
$graph.batadv.nodes
| map(
if has("node_id") and .node_id
then (
$nodes.nodes[.node_id] as $node
| {
"id": .id,
"uptime": $node.statistics.uptime,
"flags": ($node.flags + {"client": false}),
"name": $node.nodeinfo.hostname,
"clientcount": (if $node.statistics.clients >= 0 then $node.statistics.clients else 0 end),
"hardware": $node.nodeinfo.hardware.model,
"firmware": $node.nodeinfo.software.firmware.release,
"geo": (if $node.nodeinfo.location then [$node.nodeinfo.location.latitude, $node.nodeinfo.location.longitude] else null end),
#"lastseen": $node.lastseen,
"network": $node.nodeinfo.network
}
)
else
{
"flags": {},
"id": .id,
"geo": null,
"clientcount": 0
}
end
)
),
"links": (
$graph.batadv.links
| map(
$graph.batadv.nodes[.source].node_id as $source_id
| $graph.batadv.nodes[.target].node_id as $target_id
| select(
$source_id and $target_id and
($nodes.nodes | (has($source_id) and has($target_id)))
)
| {
"target": .target,
"source": .source,
"quality": "\(.tq), \(.tq)",
"id": ($source_id + "-" + $target_id),
"type": (if .vpn then "vpn" else null end)
}
)
)
}

29
gateway.json Normal file
View file

@ -0,0 +1,29 @@
[
{
"node_id": "deadbfff0101",
"hostname": "gw01"
},
{
"node_id": "deadbeef0505",
"hostname": "gw02.hamburg.freifunk.net",
"network": {
"mac": "de:ad:be:ef:05:05",
"mesh": {
"bat0": {
"interfaces": {
"tunnel": [
"de:ad:be:ff:05:05",
"de:ad:be:fc:05:05",
"de:ad:bf:ff:05:05"
]
}
}
}
}
},
{
"node_id": "00163efb9d8d",
"hostname": "gw03"
}
]

110
generate_aliases.py Executable file
View file

@ -0,0 +1,110 @@
#!/usr/bin/env python2
from __future__ import print_function
import json
import os
import sys
if len(sys.argv) != 2:
print('usage: ' + sys.argv[0] + ' /path/to/peers')
sys.exit(1)
peersDir = sys.argv[1]
def normalizeMac(mac):
mac = mac.lower()
normalized = ''
n = 0
for c in mac:
if c != ':':
if n > 0 and n % 2 == 0:
normalized = normalized + ':'
normalized = normalized + c
n += 1
return normalized
def toAlias(peer):
alias = {}
if not (peer.has_key('name') and peer.has_key('mac')):
return None
name = peer['name']
mac = peer['mac']
alias['node_id'] = mac.replace(':', '')
alias['hostname'] = name
if peer.has_key('geo'):
geo = peer['geo']
location = {}
if geo.has_key('lon'): location['longitude'] = geo['lon']
if geo.has_key('lat'): location['latitude'] = geo['lat']
alias['location'] = location
#alias['network'] = {}
#alias['network']['mesh_interfaces'] = [mac]
return alias
aliases = []
for filename in os.listdir(peersDir):
if len(filename) == 0 or filename[0] == '.':
continue
isGateway = False
absFilename = peersDir + '/' + filename
if os.path.isfile(absFilename):
peerFile = open(absFilename, 'r')
try:
peerLines = peerFile.readlines()
peer = {}
for line in peerLines:
parts = line.split()
if len(parts) > 2:
if parts[1] == 'Knotenname:':
peer['name'] = parts[2]
elif parts[0] == 'remote':
isGateway = True
elif parts[1] == 'MAC:':
peer['mac'] = normalizeMac(parts[2])
elif parts[1] == 'Koordinaten:' and len(parts) > 3:
try:
peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])}
except ValueError:
print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr)
elif len(parts) == 2 and parts[0] == 'key':
keyParts = parts[1].split('"')
if len(keyParts) > 1:
peer['vpn'] = keyParts[1].lower()
if isGateway:
continue
alias = toAlias(peer)
if alias:
aliases.append(alias)
except Exception as e:
print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr)
finally:
peerFile.close()
print(json.dumps(aliases))

112
generate_aliases_v2.py Executable file
View file

@ -0,0 +1,112 @@
#!/usr/bin/env python2
from __future__ import print_function
import json
import os
import sys
if len(sys.argv) != 2:
print('usage: ' + sys.argv[0] + ' /path/to/peers')
sys.exit(1)
peersDir = sys.argv[1]
def normalizeMac(mac):
mac = mac.lower()
normalized = ''
n = 0
for c in mac:
if c != ':':
if n > 0 and n % 2 == 0:
normalized = normalized + ':'
normalized = normalized + c
n += 1
return normalized
def toAlias(peer):
alias = {}
if not (peer.has_key('name') and peer.has_key('mac')):
return None
name = peer['name']
mac = peer['mac']
alias['node_id'] = mac.replace(':', '')
alias['hostname'] = name
if peer.has_key('geo'):
geo = peer['geo']
location = {}
if geo.has_key('lon'): location['longitude'] = geo['lon']
if geo.has_key('lat'): location['latitude'] = geo['lat']
alias['location'] = location
#alias['network'] = {}
#alias['network']['mesh_interfaces'] = [mac]
return {'nodeinfo':alias}
aliases = {}
for filename in os.listdir(peersDir):
if len(filename) == 0 or filename[0] == '.':
continue
isGateway = False
absFilename = peersDir + '/' + filename
if os.path.isfile(absFilename):
peerFile = open(absFilename, 'r')
try:
peerLines = peerFile.readlines()
peer = {}
for line in peerLines:
parts = line.split()
if len(parts) > 2:
if parts[1] == 'Knotenname:':
peer['name'] = parts[2]
elif parts[0] == 'remote':
isGateway = True
elif parts[1] == 'MAC:':
peer['mac'] = normalizeMac(parts[2])
elif parts[1] == 'Koordinaten:' and len(parts) > 3:
try:
peer['geo'] = {'lat': float(parts[2]), 'lon': float(parts[3])}
except ValueError:
print('Error in %s: Invalid coordinates: %s' % (absFilename, parts[2:4]), file = sys.stderr)
elif len(parts) == 2 and parts[0] == 'key':
keyParts = parts[1].split('"')
if len(keyParts) > 1:
peer['vpn'] = keyParts[1].lower()
if isGateway:
continue
alias = toAlias(peer)
if alias:
tmpid = alias['nodeinfo']['node_id']
# alias['nodeinfo'].pop('node_id')
aliases[tmpid] = alias
except Exception as e:
print('Error in %s, ignoring peer: %s' % (absFilename, e), file = sys.stderr)
finally:
peerFile.close()
print(json.dumps(aliases))

View file

@ -1,13 +0,0 @@
import re
from functools import reduce
def mac_to_hostid(mac):
int_mac = list(map(lambda x: int(x, 16), mac.split(":")))
int_mac[0] ^= 2
bytes = map(lambda x: "%02x" % x, int_mac[0:3] + [0xff, 0xfe] + int_mac[3:])
return reduce(lambda a, i:
[a[0] + ("" if i == 0 else ":") + a[1] + a[2]] + a[3:],
range(0, 4),
[""] + list(bytes)
)

View file

@ -1,6 +1,8 @@
import os import os
import subprocess import subprocess
from RRD import RRD, DS, RRA
from lib.RRD import DS, RRA, RRD
class GlobalRRD(RRD): class GlobalRRD(RRD):
ds_list = [ ds_list = [
@ -10,17 +12,21 @@ class GlobalRRD(RRD):
DS('clients', 'GAUGE', 120, 0, float('NaN')), DS('clients', 'GAUGE', 120, 0, float('NaN')),
] ]
rra_list = [ rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 60, 744), # 31 days of 1 hour samples RRA('AVERAGE', 0.5, 1, 120),
RRA('AVERAGE', 0.5, 1440, 1780),# ~5 years of 1 day samples # 31 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 744),
# ~5 years of 1 day samples
RRA('AVERAGE', 0.5, 1440, 1780),
] ]
def __init__(self, directory): def __init__(self, directory):
super().__init__(os.path.join(directory, "nodes.rrd")) super().__init__(os.path.join(directory, "nodes.rrd"))
self.ensureSanity(self.ds_list, self.rra_list, step=60) self.ensure_sanity(self.ds_list, self.rra_list, step=60)
def update(self, nodeCount, clientCount): # TODO: fix this, python does not support function overloading
super().update({'nodes': nodeCount, 'clients': clientCount}) def update(self, node_count, client_count):
super().update({'nodes': node_count, 'clients': client_count})
def graph(self, filename, timeframe): def graph(self, filename, timeframe):
args = ["rrdtool", 'graph', filename, args = ["rrdtool", 'graph', filename,
@ -30,6 +36,5 @@ class GlobalRRD(RRD):
'DEF:nodes=' + self.filename + ':nodes:AVERAGE', 'DEF:nodes=' + self.filename + ':nodes:AVERAGE',
'LINE1:nodes#F00:nodes\\l', 'LINE1:nodes#F00:nodes\\l',
'DEF:clients=' + self.filename + ':clients:AVERAGE', 'DEF:clients=' + self.filename + ':clients:AVERAGE',
'LINE2:clients#00F:clients', 'LINE2:clients#00F:clients']
]
subprocess.check_output(args) subprocess.check_output(args)

View file

@ -1,7 +1,8 @@
import os import os
import subprocess import subprocess
from node import Node
from RRD import RRD, DS, RRA from lib.RRD import DS, RRA, RRD
class NodeRRD(RRD): class NodeRRD(RRD):
ds_list = [ ds_list = [
@ -9,13 +10,17 @@ class NodeRRD(RRD):
DS('clients', 'GAUGE', 120, 0, float('NaN')), DS('clients', 'GAUGE', 120, 0, float('NaN')),
] ]
rra_list = [ rra_list = [
RRA('AVERAGE', 0.5, 1, 120), # 2 hours of 1 minute samples # 2 hours of 1 minute samples
RRA('AVERAGE', 0.5, 5, 1440), # 5 days of 5 minute samples RRA('AVERAGE', 0.5, 1, 120),
RRA('AVERAGE', 0.5, 60, 720), # 30 days of 1 hour samples # 5 days of 5 minute samples
RRA('AVERAGE', 0.5, 720, 730), # 1 year of 12 hour samples RRA('AVERAGE', 0.5, 5, 1440),
# 30 days of 1 hour samples
RRA('AVERAGE', 0.5, 60, 720),
# 1 year of 12 hour samples
RRA('AVERAGE', 0.5, 720, 730),
] ]
def __init__(self, filename, node = None): def __init__(self, filename, node=None):
""" """
Create a new RRD for a given node. Create a new RRD for a given node.
@ -23,22 +28,25 @@ class NodeRRD(RRD):
""" """
self.node = node self.node = node
super().__init__(filename) super().__init__(filename)
self.ensureSanity(self.ds_list, self.rra_list, step=60) self.ensure_sanity(self.ds_list, self.rra_list, step=60)
@property @property
def imagename(self): def imagename(self):
return os.path.basename(self.filename).rsplit('.', 2)[0] + ".png" return "{basename}.png".format(
basename=os.path.basename(self.filename).rsplit('.', 2)[0])
# TODO: fix this, python does not support function overloading
def update(self): def update(self):
super().update({'upstate': 1, 'clients': self.node.clients}) super().update({'upstate': int(self.node['flags']['online']),
'clients': self.node['statistics']['clients']})
def graph(self, directory, timeframe): def graph(self, directory, timeframe):
""" """
Create a graph in the given directory. The file will be named Create a graph in the given directory. The file will be named
basename.png if the RRD file is named basename.rrd basename.png if the RRD file is named basename.rrd
""" """
args = ['rrdtool','graph', os.path.join(directory, self.imagename), args = ['rrdtool', 'graph', os.path.join(directory, self.imagename),
'-s', '-' + timeframe , '-s', '-' + timeframe,
'-w', '800', '-w', '800',
'-h', '400', '-h', '400',
'-l', '0', '-l', '0',
@ -49,6 +57,5 @@ class NodeRRD(RRD):
'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*', 'CDEF:d=clients,UN,maxc,UN,1,maxc,IF,*',
'AREA:c#0F0:up\\l', 'AREA:c#0F0:up\\l',
'AREA:d#F00:down\\l', 'AREA:d#F00:down\\l',
'LINE1:c#00F:clients connected\\l', 'LINE1:c#00F:clients connected\\l']
]
subprocess.check_output(args) subprocess.check_output(args)

View file

@ -1,19 +1,20 @@
import subprocess import subprocess
import re import re
import io
import os import os
from tempfile import TemporaryFile
from operator import xor, eq from operator import xor, eq
from functools import reduce from functools import reduce
from itertools import starmap from itertools import starmap
import math import math
class RRDIncompatibleException(Exception): class RRDIncompatibleException(Exception):
""" """
Is raised when an RRD doesn't have the desired definition and cannot be Is raised when an RRD doesn't have the desired definition and cannot be
upgraded to it. upgraded to it.
""" """
pass pass
class RRDOutdatedException(Exception): class RRDOutdatedException(Exception):
""" """
Is raised when an RRD doesn't have the desired definition, but can be Is raised when an RRD doesn't have the desired definition, but can be
@ -25,7 +26,8 @@ if not hasattr(__builtins__, "FileNotFoundError"):
class FileNotFoundError(Exception): class FileNotFoundError(Exception):
pass pass
class RRD:
class RRD(object):
""" """
An RRD is a Round Robin Database, a database which forgets old data and An RRD is a Round Robin Database, a database which forgets old data and
aggregates multiple records into new ones. aggregates multiple records into new ones.
@ -49,7 +51,7 @@ class RRD:
def _exec_rrdtool(self, cmd, *args, **kwargs): def _exec_rrdtool(self, cmd, *args, **kwargs):
pargs = ["rrdtool", cmd, self.filename] pargs = ["rrdtool", cmd, self.filename]
for k,v in kwargs.items(): for k, v in kwargs.items():
pargs.extend(["--" + k, str(v)]) pargs.extend(["--" + k, str(v)])
pargs.extend(args) pargs.extend(args)
subprocess.check_output(pargs) subprocess.check_output(pargs)
@ -57,7 +59,7 @@ class RRD:
def __init__(self, filename): def __init__(self, filename):
self.filename = filename self.filename = filename
def ensureSanity(self, ds_list, rra_list, **kwargs): def ensure_sanity(self, ds_list, rra_list, **kwargs):
""" """
Create or upgrade the RRD file if necessary to contain all DS in Create or upgrade the RRD file if necessary to contain all DS in
ds_list. If it needs to be created, the RRAs in rra_list and any kwargs ds_list. If it needs to be created, the RRAs in rra_list and any kwargs
@ -65,13 +67,13 @@ class RRD:
database are NOT modified! database are NOT modified!
""" """
try: try:
self.checkSanity(ds_list) self.check_sanity(ds_list)
except FileNotFoundError: except FileNotFoundError:
self.create(ds_list, rra_list, **kwargs) self.create(ds_list, rra_list, **kwargs)
except RRDOutdatedException: except RRDOutdatedException:
self.upgrade(ds_list) self.upgrade(ds_list)
def checkSanity(self, ds_list=()): def check_sanity(self, ds_list=()):
""" """
Check if the RRD file exists and contains (at least) the DS listed in Check if the RRD file exists and contains (at least) the DS listed in
ds_list. ds_list.
@ -80,9 +82,12 @@ class RRD:
raise FileNotFoundError(self.filename) raise FileNotFoundError(self.filename)
info = self.info() info = self.info()
if set(ds_list) - set(info['ds'].values()) != set(): if set(ds_list) - set(info['ds'].values()) != set():
if set((ds.name, ds.type) for ds in ds_list) \ for ds in ds_list:
- set((ds.name, ds.type) for ds in info['ds'].values()) != set(): if ds.name in info['ds'] and\
raise RRDIncompatibleException() ds.type != info['ds'][ds.name].type:
raise RRDIncompatibleException(
"{} is {} but should be {}".format(
ds.name, ds.type, info['ds'][ds.name].type))
else: else:
raise RRDOutdatedException() raise RRDOutdatedException()
@ -105,8 +110,10 @@ class RRD:
if ds.name in info['ds']: if ds.name in info['ds']:
old_ds = info['ds'][ds.name] old_ds = info['ds'][ds.name]
if info['ds'][ds.name].type != ds.type: if info['ds'][ds.name].type != ds.type:
raise RuntimeError('Cannot convert existing DS "%s" from type "%s" to "%s"' % raise RuntimeError(
(ds.name, old_ds.type, ds.type)) "Cannot convert existing DS '{}'"
"from type '{}' to '{}'".format(
ds.name, old_ds.type, ds.type))
ds.index = old_ds.index ds.index = old_ds.index
new_ds[ds.index] = ds new_ds[ds.index] = ds
else: else:
@ -116,12 +123,11 @@ class RRD:
dump = subprocess.Popen( dump = subprocess.Popen(
["rrdtool", "dump", self.filename], ["rrdtool", "dump", self.filename],
stdout=subprocess.PIPE stdout=subprocess.PIPE)
)
restore = subprocess.Popen( restore = subprocess.Popen(
["rrdtool", "restore", "-", self.filename + ".new"], ["rrdtool", "restore", "-", self.filename + ".new"],
stdin=subprocess.PIPE stdin=subprocess.PIPE)
)
echo = True echo = True
ds_definitions = True ds_definitions = True
for line in dump.stdout: for line in dump.stdout:
@ -143,19 +149,17 @@ class RRD:
<value>%s</value> <value>%s</value>
<unknown_sec> %i </unknown_sec> <unknown_sec> %i </unknown_sec>
</ds> </ds>
""" % ( """ % (ds.name,
ds.name, ds.type,
ds.type, ds.args[0],
ds.args[0], ds.args[1],
ds.args[1], ds.args[2],
ds.args[2], ds.last_ds,
ds.last_ds, ds.value,
ds.value, ds.unknown_sec), "utf-8"))
ds.unknown_sec)
, "utf-8"))
if b'</cdp_prep>' in line: if b'</cdp_prep>' in line:
restore.stdin.write(added_ds_num*b""" restore.stdin.write(added_ds_num * b"""
<ds> <ds>
<primary_value> NaN </primary_value> <primary_value> NaN </primary_value>
<secondary_value> NaN </secondary_value> <secondary_value> NaN </secondary_value>
@ -169,7 +173,7 @@ class RRD:
restore.stdin.write( restore.stdin.write(
line.replace( line.replace(
b'</row>', b'</row>',
(added_ds_num*b'<v>NaN</v>')+b'</row>' (added_ds_num * b'<v>NaN</v>') + b'</row>'
) )
) )
@ -177,15 +181,8 @@ class RRD:
echo = True echo = True
dump.stdout.close() dump.stdout.close()
restore.stdin.close() restore.stdin.close()
try: dump.wait()
dump.wait(1) restore.wait()
except subprocess.TimeoutExpired:
dump.kill()
try:
restore.wait(2)
except subprocess.TimeoutExpired:
dump.kill()
raise RuntimeError("rrdtool restore process killed")
os.rename(self.filename + ".new", self.filename) os.rename(self.filename + ".new", self.filename)
self._cached_info = None self._cached_info = None
@ -244,7 +241,8 @@ class RRD:
for line in out.splitlines(): for line in out.splitlines():
base = info base = info
for match in self._info_regex.finditer(line): for match in self._info_regex.finditer(line):
section, key, name, value = match.group("section", "key", "name", "value") section, key, name, value = match.group(
"section", "key", "name", "value")
if section and key: if section and key:
try: try:
key = int(key) key = int(key)
@ -265,7 +263,8 @@ class RRD:
base[name] = value base[name] = value
dss = {} dss = {}
for name, ds in info['ds'].items(): for name, ds in info['ds'].items():
ds_obj = DS(name, ds['type'], ds['minimal_heartbeat'], ds['min'], ds['max']) ds_obj = DS(name, ds['type'], ds['minimal_heartbeat'],
ds['min'], ds['max'])
ds_obj.index = ds['index'] ds_obj.index = ds['index']
ds_obj.last_ds = ds['last_ds'] ds_obj.last_ds = ds['last_ds']
ds_obj.value = ds['value'] ds_obj.value = ds['value']
@ -274,12 +273,14 @@ class RRD:
info['ds'] = dss info['ds'] = dss
rras = [] rras = []
for rra in info['rra'].values(): for rra in info['rra'].values():
rras.append(RRA(rra['cf'], rra['xff'], rra['pdp_per_row'], rra['rows'])) rras.append(RRA(rra['cf'], rra['xff'],
rra['pdp_per_row'], rra['rows']))
info['rra'] = rras info['rra'] = rras
self._cached_info = info self._cached_info = info
return info return info
class DS:
class DS(object):
""" """
DS stands for Data Source and represents one line of data points in a Round DS stands for Data Source and represents one line of data points in a Round
Robin Database (RRD). Robin Database (RRD).
@ -291,6 +292,7 @@ class DS:
last_ds = 'U' last_ds = 'U'
value = 0 value = 0
unknown_sec = 0 unknown_sec = 0
def __init__(self, name, dst, *args): def __init__(self, name, dst, *args):
self.name = name self.name = name
self.type = dst self.type = dst
@ -300,7 +302,7 @@ class DS:
return "DS:%s:%s:%s" % ( return "DS:%s:%s:%s" % (
self.name, self.name,
self.type, self.type,
":".join(map(str, self._nan_to_U_args())) ":".join(map(str, self._nan_to_u_args()))
) )
def __repr__(self): def __repr__(self):
@ -312,22 +314,23 @@ class DS:
) )
def __eq__(self, other): def __eq__(self, other):
return all(starmap(eq, zip(self._compare_keys(), other._compare_keys()))) return all(starmap(eq, zip(self.compare_keys(), other.compare_keys())))
def __hash__(self): def __hash__(self):
return reduce(xor, map(hash, self._compare_keys())) return reduce(xor, map(hash, self.compare_keys()))
def _nan_to_U_args(self): def _nan_to_u_args(self):
return tuple( return tuple(
'U' if type(arg) is float and math.isnan(arg) 'U' if type(arg) is float and math.isnan(arg)
else arg else arg
for arg in self.args for arg in self.args
) )
def _compare_keys(self): def compare_keys(self):
return (self.name, self.type, self._nan_to_U_args()) return self.name, self.type, self._nan_to_u_args()
class RRA:
class RRA(object):
def __init__(self, cf, *args): def __init__(self, cf, *args):
self.cf = cf self.cf = cf
self.args = args self.args = args

1
lib/__init__.py Normal file
View file

@ -0,0 +1 @@
__author__ = 'hexa'

33
lib/alfred.py Normal file
View file

@ -0,0 +1,33 @@
import subprocess
import json
import os
class Alfred(object):
"""
Bindings for the alfred-json utility
"""
def __init__(self, unix_sockpath=None):
self.unix_sock = unix_sockpath
if unix_sockpath is not None and not os.path.exists(unix_sockpath):
raise RuntimeError('alfred: invalid unix socket path given')
def _fetch(self, data_type):
cmd = ['/usr/local/bin/alfred-json',
'-z',
'-f', 'json',
'-r', str(data_type)]
if self.unix_sock:
cmd.extend(['-s', self.unix_sock])
output = subprocess.check_output(cmd)
return json.loads(output.decode("utf-8")).values()
def nodeinfo(self):
return self._fetch(158)
def statistics(self):
return self._fetch(159)
def vis(self):
return self._fetch(160)

98
lib/batman.py Normal file
View file

@ -0,0 +1,98 @@
import subprocess
import json
import os
import re
class Batman(object):
"""
Bindings for B.A.T.M.A.N. Advanced
commandline interface "batctl"
"""
def __init__(self, mesh_interface='bat0', alfred_sockpath=None):
self.mesh_interface = mesh_interface
self.alfred_sock = alfred_sockpath
# ensure /usr/sbin and /usr/local/sbin are in PATH
env = os.environ
path = set(env['PATH'].split(':'))
path.add('/usr/sbin/')
path.add('/usr/local/sbin')
env['PATH'] = ':'.join(path)
self.environ = env
# compile regular expressions only once on startup
self.mac_addr_pattern = re.compile(r'(([a-z0-9]{2}:){5}[a-z0-9]{2})')
def vis_data(self):
return self.vis_data_batadv_vis()
@staticmethod
def vis_data_helper(lines):
vd_tmp = []
for line in lines:
try:
utf8_line = line.decode('utf-8')
vd_tmp.append(json.loads(utf8_line))
except UnicodeDecodeError:
pass
return vd_tmp
def vis_data_batadv_vis(self):
"""
Parse "batadv-vis -i <mesh_interface> -f json"
into an array of dictionaries.
"""
cmd = ['batadv-vis', '-i', self.mesh_interface, '-f', 'json']
if self.alfred_sock:
cmd.extend(['-u', self.alfred_sock])
output = subprocess.check_output(cmd, env=self.environ)
lines = output.splitlines()
return self.vis_data_helper(lines)
def gateway_list(self):
"""
Parse "batctl -m <mesh_interface> gwl -n"
into an array of dictionaries.
"""
cmd = ['batctl', '-m', self.mesh_interface, 'gwl', '-n']
if os.geteuid() > 0:
cmd.insert(0, 'sudo')
output = subprocess.check_output(cmd, env=self.environ)
output_utf8 = output.decode('utf-8')
rows = output_utf8.splitlines()
gateways = []
# local gateway
header = rows.pop(0)
mode, bandwidth = self.gateway_mode()
if mode == 'server':
local_gw_mac = self.mac_addr_pattern.search(header).group(0)
gateways.append(local_gw_mac)
# remote gateway(s)
for row in rows:
match = self.mac_addr_pattern.search(row)
if match:
gateways.append(match.group(1))
return gateways
def gateway_mode(self):
"""
Parse "batctl -m <mesh_interface> gw"
return: tuple mode, bandwidth, if mode != server then bandwidth is None
"""
cmd = ['batctl', '-m', self.mesh_interface, 'gw']
if os.geteuid() > 0:
cmd.insert(0, 'sudo')
output = subprocess.check_output(cmd, env=self.environ)
chunks = output.decode("utf-8").split()
return chunks[0], chunks[3] if 3 in chunks else None
if __name__ == "__main__":
bc = Batman()
vd = bc.vis_data()
gw = bc.gateway_list()

84
lib/graph.py Normal file
View file

@ -0,0 +1,84 @@
from functools import reduce
from itertools import chain
import networkx as nx
from lib.nodes import build_mac_table
def import_vis_data(graph, nodes, vis_data):
macs = build_mac_table(nodes)
nodes_a = map(lambda d: 2 * [d['primary']],
filter(lambda d: 'primary' in d, vis_data))
nodes_b = map(lambda d: [d['secondary'], d['of']],
filter(lambda d: 'secondary' in d, vis_data))
graph.add_nodes_from(map(lambda a, b:
(a, dict(primary=b, node_id=macs.get(b))),
*zip(*chain(nodes_a, nodes_b))))
edges = filter(lambda d: 'neighbor' in d, vis_data)
graph.add_edges_from(map(lambda d: (d['router'], d['neighbor'],
dict(tq=float(d['label']))), edges))
def mark_vpn(graph, vpn_macs):
components = map(frozenset, nx.weakly_connected_components(graph))
components = filter(vpn_macs.intersection, components)
nodes = reduce(lambda a, b: a | b, components, set())
for node in nodes:
for k, v in graph[node].items():
v['vpn'] = True
def to_multigraph(graph):
def f(a):
node = graph.node[a]
return node['primary'] if node else a
def map_node(node, data):
return (data['primary'],
dict(node_id=data['node_id'])) if data else (node, dict())
digraph = nx.MultiDiGraph()
digraph.add_nodes_from(map(map_node, *zip(*graph.nodes_iter(data=True))))
digraph.add_edges_from(map(lambda a, b, data: (f(a), f(b), data),
*zip(*graph.edges_iter(data=True))))
return digraph
def merge_nodes(graph):
def merge_edges(data):
tq = min(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn)
multigraph = to_multigraph(graph)
digraph = nx.DiGraph()
digraph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(
multigraph[e][d].values()))
for d in multigraph[e]] for e in multigraph])
digraph.add_edges_from(edges)
return digraph
def to_undirected(graph):
multigraph = nx.MultiGraph()
multigraph.add_nodes_from(graph.nodes_iter(data=True))
multigraph.add_edges_from(graph.edges_iter(data=True))
def merge_edges(data):
tq = max(map(lambda d: d['tq'], data))
vpn = all(map(lambda d: d.get('vpn', False), data))
return dict(tq=tq, vpn=vpn, bidirect=len(data) == 2)
graph = nx.Graph()
graph.add_nodes_from(multigraph.nodes_iter(data=True))
edges = chain.from_iterable([[(e, d, merge_edges(
multigraph[e][d].values()))
for d in multigraph[e]] for e in multigraph])
graph.add_edges_from(edges)
return graph

27
lib/nodelist.py Normal file
View file

@ -0,0 +1,27 @@
def export_nodelist(now, nodedb):
nodelist = list()
for node_id, node in nodedb["nodes"].items():
node_out = dict()
node_out["id"] = node_id
node_out["name"] = node["nodeinfo"]["hostname"]
if "location" in node["nodeinfo"]:
node_out["position"] = {"lat": node["nodeinfo"]["location"]["latitude"],
"long": node["nodeinfo"]["location"]["longitude"]}
node_out["status"] = dict()
node_out["status"]["online"] = node["flags"]["online"]
if "firstseen" in node:
node_out["status"]["firstcontact"] = node["firstseen"]
if "lastseen" in node:
node_out["status"]["lastcontact"] = node["lastseen"]
if "clients" in node["statistics"]:
node_out["status"]["clients"] = node["statistics"]["clients"]
nodelist.append(node_out)
return {"version": "1.0.1", "nodes": nodelist, "updated_at": now.isoformat()}

202
lib/nodes.py Normal file
View file

@ -0,0 +1,202 @@
from collections import Counter, defaultdict
from datetime import datetime
from functools import reduce
def build_mac_table(nodes):
macs = dict()
for node_id, node in nodes.items():
try:
macs[node['network']['mac']] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh_interfaces']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel']:
macs[mac] = node_id
except KeyError:
pass
try:
for mac in node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']:
macs[mac] = node_id
except KeyError:
pass
return macs
def prune_nodes(nodes, now, days):
prune = []
for node_id, node in nodes.items():
if 'lastseen' not in node:
prune.append(node_id)
continue
lastseen = datetime.strptime(node['lastseen'], '%Y-%m-%dT%H:%M:%S')
delta = (now - lastseen).days
if delta >= days:
prune.append(node_id)
for node_id in prune:
del nodes[node_id]
def mark_online(node, now):
node['lastseen'] = now.isoformat()
node.setdefault('firstseen', now.isoformat())
node['flags']['online'] = True
def overrideFields(dest, src, fields):
for field in fields:
if field in src:
dest[field] = src[field]
else:
dest.pop(field, None)
def import_nodeinfo(nodes, nodeinfos, now, assume_online=False, statics=False):
for nodeinfo in filter(lambda d: 'node_id' in d, nodeinfos):
node = nodes.setdefault(nodeinfo['node_id'], {'flags': {'online': False, 'gateway': False}})
if statics:
node['nodeinfo'] = node.setdefault('nodeinfo', {})
overrideFields(node['nodeinfo'], nodeinfo, ['hostname', 'location', 'node_id'])
else:
node['nodeinfo'] = nodeinfo
if assume_online:
mark_online(node, now)
def reset_statistics(nodes):
for node in nodes.values():
node['statistics'] = {'clients': 0}
def import_statistics(nodes, stats):
def add(node, statistics, target, source, f=lambda d: d):
try:
node['statistics'][target] = f(reduce(dict.__getitem__,
source,
statistics))
except (KeyError, TypeError, ZeroDivisionError):
pass
macs = build_mac_table(nodes)
stats = filter(lambda d: 'node_id' in d, stats)
stats = filter(lambda d: d['node_id'] in nodes, stats)
for node, stats in map(lambda d: (nodes[d['node_id']], d), stats):
add(node, stats, 'clients', ['clients', 'total'])
add(node, stats, 'gateway', ['gateway'], lambda d: macs.get(d, d))
add(node, stats, 'uptime', ['uptime'])
add(node, stats, 'loadavg', ['loadavg'])
add(node, stats, 'memory_usage', ['memory'],
lambda d: 1 - d['free'] / d['total'])
add(node, stats, 'rootfs_usage', ['rootfs_usage'])
add(node, stats, 'traffic', ['traffic'])
def import_mesh_ifs_vis_data(nodes, vis_data):
macs = build_mac_table(nodes)
mesh_ifs = defaultdict(lambda: set())
for line in filter(lambda d: 'secondary' in d, vis_data):
primary = line['of']
mesh_ifs[primary].add(primary)
mesh_ifs[primary].add(line['secondary'])
def if_to_node(ifs):
a = filter(lambda d: d in macs, ifs)
a = map(lambda d: nodes[macs[d]], a)
try:
return next(a), ifs
except StopIteration:
return None
mesh_nodes = filter(lambda d: d, map(if_to_node, mesh_ifs.values()))
for v in mesh_nodes:
node = v[0]
ifs = set()
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh_interfaces']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['wireless']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['tunnel']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat-ffhh']['interfaces']['tunnel']))
except KeyError:
pass
try:
ifs = ifs.union(set(node['nodeinfo']['network']['mesh']['bat0']['interfaces']['other']))
except KeyError:
pass
node['nodeinfo']['network']['mesh_interfaces'] = list(ifs | v[1])
def import_vis_clientcount(nodes, vis_data):
macs = build_mac_table(nodes)
data = filter(lambda d: d.get('label', None) == 'TT', vis_data)
data = filter(lambda d: d['router'] in macs, data)
data = map(lambda d: macs[d['router']], data)
for node_id, clientcount in Counter(data).items():
nodes[node_id]['statistics'].setdefault('clients', clientcount)
def mark_gateways(nodes, gateways):
macs = build_mac_table(nodes)
gateways = filter(lambda d: d in macs, gateways)
for node in map(lambda d: nodes[macs[d]], gateways):
node['flags']['gateway'] = True
def mark_vis_data_online(nodes, vis_data, now):
macs = build_mac_table(nodes)
online = set()
for line in vis_data:
if 'primary' in line:
online.add(line['primary'])
elif 'secondary' in line:
online.add(line['secondary'])
elif 'gateway' in line:
# This matches clients' MACs.
# On pre-Gluon nodes the primary MAC will be one of it.
online.add(line['gateway'])
for mac in filter(lambda d: d in macs, online):
mark_online(nodes[macs[mac]], now)

54
lib/rrddb.py Normal file
View file

@ -0,0 +1,54 @@
#!/usr/bin/env python3
import time
import os
from lib.GlobalRRD import GlobalRRD
from lib.NodeRRD import NodeRRD
class RRD(object):
def __init__(self,
database_directory,
image_path,
display_time_global="7d",
display_time_node="1d"):
self.dbPath = database_directory
self.globalDb = GlobalRRD(self.dbPath)
self.imagePath = image_path
self.displayTimeGlobal = display_time_global
self.displayTimeNode = display_time_node
self.currentTimeInt = (int(time.time()) / 60) * 60
self.currentTime = str(self.currentTimeInt)
try:
os.stat(self.imagePath)
except OSError:
os.mkdir(self.imagePath)
def update_database(self, nodes):
online_nodes = dict(filter(
lambda d: d[1]['flags']['online'], nodes.items()))
client_count = sum(map(
lambda d: d['statistics']['clients'], online_nodes.values()))
self.globalDb.update(len(online_nodes), client_count)
for node_id, node in online_nodes.items():
rrd = NodeRRD(os.path.join(self.dbPath, node_id + '.rrd'), node)
rrd.update()
def update_images(self):
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"),
self.displayTimeGlobal)
nodedb_files = os.listdir(self.dbPath)
for file_name in nodedb_files:
if not os.path.isfile(os.path.join(self.dbPath, file_name)):
continue
node_name = os.path.basename(file_name).split('.')
if node_name[1] == 'rrd' and not node_name[0] == "nodes":
rrd = NodeRRD(os.path.join(self.dbPath, file_name))
rrd.graph(self.imagePath, self.displayTimeNode)

19
lib/validate.py Normal file
View file

@ -0,0 +1,19 @@
import json
def validate_nodeinfos(nodeinfos):
result = []
for nodeinfo in nodeinfos:
if validate_nodeinfo(nodeinfo):
result.append(nodeinfo)
return result
def validate_nodeinfo(nodeinfo):
if 'location' in nodeinfo:
if 'latitude' not in nodeinfo['location'] or 'longitude' not in nodeinfo['location']:
return False
return True

15
link.py
View file

@ -1,15 +0,0 @@
class Link():
def __init__(self):
self.id = None
self.source = None
self.target = None
self.quality = None
self.type = None
class LinkConnector():
def __init__(self):
self.id = None
self.interface = None
def __repr__(self):
return "LinkConnector(%d, %s)" % (self.id, self.interface)

View file

@ -1,15 +1,7 @@
#!/bin/bash #!/bin/bash
FFMAPPATH='/opt/ffmap-backend/'
PEERS="/etc/fastd/ffhh-mesh-vpn/peers"
set -e python2 $FFMAPPATH/generate_aliases.py $PEERS > $FFMAPPATH/aliases.json
#python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 --vpn de:ad:be:ff:05:05 --vpn de:ad:be:ff:05:06 --vpn de:ad:be:ff:03:03 --vpn de:ad:be:ff:22:22 --vpn de:ad:be:ff:22:23 --vpn de:ad:be:ff:88:88 --vpn de:ad:be:ff:88:89 --vpn de:ad:bf:ff:88:88 --vpn de:ad:bf:ff:22:22 --vpn de:ad:bf:ff:03:03 --vpn de:ad:bf:ff:05:05 --vpn de:ad:bf:ff:01:01 --vpn de:ad:be:fc:03:03 --vpn 00:16:3e:53:75:0d --vpn de:ad:be:fc:05:05 --vpn de:ad:be:fc:01:01 --vpn de:ad:be:ef:03:03 --vpn de:ad:be:ef:01:01 --vpn de:ad:be:ef:05:05 --vpn 00:16:3e:fb:9d:8d --vpn 00:16:3e:fb:9d:9d
DEST=$1 python3 $FFMAPPATH/backend.py -d /var/www/meshviewer/ --aliases $FFMAPPATH/aliases.json $FFMAPPATH/gateway.json -m bat0:/var/run/alfred.sock -p 30 --vpn de:ad:be:ff:01:01 de:ad:be:ff:05:05 de:ad:be:ff:05:06 de:ad:be:ff:03:03 de:ad:be:ff:22:22 de:ad:be:ff:22:23 de:ad:be:ff:88:88 de:ad:be:ff:88:89 de:ad:bf:ff:88:88 de:ad:bf:ff:22:22 de:ad:bf:ff:03:03 de:ad:bf:ff:05:05 de:ad:bf:ff:01:01 de:ad:be:fc:03:03 00:16:3e:53:75:0d de:ad:be:fc:05:05 de:ad:be:fc:01:01 de:ad:be:ef:03:03 de:ad:be:ef:01:01 de:ad:be:ef:05:05 00:16:3e:fb:9d:8d 00:16:3e:fb:9d:9d
[ "$DEST" ] || exit 1
cd "$(dirname "$0")"/
./ffhlwiki.py http://freifunk.metameute.de/wiki/Knoten > aliases_hl.json
./ffhlwiki.py http://freifunk.metameute.de/wiki/Moelln:Knoten > aliases_moelln.json
./bat2nodes.py -A -a aliases.json -a aliases_hl.json -a aliases_moelln.json -d $DEST

31
node.py
View file

@ -1,31 +0,0 @@
class Node():
def __init__(self):
self.name = ""
self.id = ""
self.macs = set()
self.interfaces = dict()
self.flags = dict({
"online": False,
"gateway": False,
"client": False
})
self.gps = None
self.firmware = None
self.clientcount = 0
def add_mac(self, mac):
mac = mac.lower()
if len(self.macs) == 0:
self.id = mac
self.macs.add(mac)
self.interfaces[mac] = Interface()
def __repr__(self):
return self.macs.__repr__()
class Interface():
def __init__(self):
self.vpn = False

32
node_number.py Executable file
View file

@ -0,0 +1,32 @@
#!/usr/bin/env python
#Bibliotheken importieren
import time
import datetime
import json
import urllib2
#Datei oeffnen
Datei = urllib2.urlopen('https://map.hamburg.freifunk.net/nodes.json')
Datei_Sued = urllib2.urlopen('https://map.hamburg.freifunk.net/hhsued/mv1/nodes.json')
Text = Datei.read()
Knotenzahl = Text.count('"online": true')
Text = Datei_Sued.read()
Knotenzahl = Knotenzahl + Text.count('"online":true')
#Zeit holen
thetime = datetime.datetime.now().isoformat()
ffhh = None
#Freifunk API-Datei einladen und JSON lesen
with open('/var/www/meta/ffhh.json', 'r') as fp:
ffhh = json.load(fp)
#Attribute Zeitstempel und Knotenanzahl setzen
ffhh['state']['lastchange'] = thetime
ffhh['state']['nodes'] = Knotenzahl
#Freifunk API-Datein mit geaenderten werten schreiben
with open('/var/www/meta/ffhh.json', 'w') as fp:
json.dump(ffhh, fp, indent=2, separators=(',', ': '))

385
nodedb.py
View file

@ -1,385 +0,0 @@
import json
from functools import reduce
from collections import defaultdict
from node import Node, Interface
from link import Link, LinkConnector
class NodeDB:
def __init__(self):
self._nodes = []
self._links = []
# fetch list of links
def get_links(self):
self.update_vpn_links()
return self.reduce_links()
# fetch list of nodes
def get_nodes(self):
return self._nodes
def maybe_node_by_fuzzy_mac(self, mac):
mac_a = mac.lower()
for node in self._nodes:
for mac_b in node.macs:
if is_derived_mac(mac_a, mac_b):
return node
raise KeyError
def maybe_node_by_mac(self, macs):
for node in self._nodes:
for mac in macs:
if mac.lower() in node.macs:
return node
raise KeyError
def maybe_node_by_id(self, mac):
for node in self._nodes:
if mac.lower() == node.id:
return node
raise KeyError
def parse_vis_data(self,vis_data):
for x in vis_data:
if 'of' in x:
try:
node = self.maybe_node_by_mac((x['of'], x['secondary']))
except:
node = Node()
node.flags['online'] = True
if 'legacy' in x:
node.flags['legacy'] = True
self._nodes.append(node)
node.add_mac(x['of'])
node.add_mac(x['secondary'])
for x in vis_data:
if 'router' in x:
try:
node = self.maybe_node_by_mac((x['router'], ))
except:
node = Node()
node.flags['online'] = True
if 'legacy' in x:
node.flags['legacy'] = True
node.add_mac(x['router'])
self._nodes.append(node)
# If it's a TT link and the MAC is very similar
# consider this MAC as one of the routers
# MACs
if 'gateway' in x and x['label'] == "TT":
if is_similar(x['router'], x['gateway']):
node.add_mac(x['gateway'])
# skip processing as regular link
continue
try:
if 'neighbor' in x:
try:
node = self.maybe_node_by_mac((x['neighbor']))
except:
continue
if 'gateway' in x:
x['neighbor'] = x['gateway']
node = self.maybe_node_by_mac((x['neighbor'], ))
except:
node = Node()
node.flags['online'] = True
if x['label'] == 'TT':
node.flags['client'] = True
node.add_mac(x['neighbor'])
self._nodes.append(node)
for x in vis_data:
if 'router' in x:
try:
if 'gateway' in x:
x['neighbor'] = x['gateway']
router = self.maybe_node_by_mac((x['router'], ))
neighbor = self.maybe_node_by_mac((x['neighbor'], ))
except:
continue
# filter TT links merged in previous step
if router == neighbor:
continue
link = Link()
link.source = LinkConnector()
link.source.interface = x['router']
link.source.id = self._nodes.index(router)
link.target = LinkConnector()
link.target.interface = x['neighbor']
link.target.id = self._nodes.index(neighbor)
link.quality = x['label']
link.id = "-".join(sorted((link.source.interface, link.target.interface)))
if x['label'] == "TT":
link.type = "client"
self._links.append(link)
for x in vis_data:
if 'primary' in x:
try:
node = self.maybe_node_by_mac((x['primary'], ))
except:
continue
node.id = x['primary']
def reduce_links(self):
tmp_links = defaultdict(list)
for link in self._links:
tmp_links[link.id].append(link)
links = []
def reduce_link(a, b):
a.id = b.id
a.source = b.source
a.target = b.target
a.type = b.type
a.quality = ", ".join([x for x in (a.quality, b.quality) if x])
return a
for k, v in tmp_links.items():
new_link = reduce(reduce_link, v, Link())
links.append(new_link)
return links
def import_aliases(self, aliases):
for mac, alias in aliases.items():
try:
node = self.maybe_node_by_mac([mac])
except:
try:
node = self.maybe_node_by_fuzzy_mac(mac)
except:
# create an offline node
node = Node()
node.add_mac(mac)
self._nodes.append(node)
if 'name' in alias:
node.name = alias['name']
if 'vpn' in alias and alias['vpn'] and mac and node.interfaces and mac in node.interfaces:
node.interfaces[mac].vpn = True
if 'gps' in alias:
node.gps = alias['gps']
if 'firmware' in alias:
node.firmware = alias['firmware']
if 'id' in alias:
node.id = alias['id']
# list of macs
# if options['gateway']:
# mark_gateways(options['gateway'])
def mark_gateways(self, gateways):
for gateway in gateways:
try:
node = self.maybe_node_by_mac((gateway, ))
except:
print("WARNING: did not find gateway '",gateway,"' in node list")
continue
node.flags['gateway'] = True
def update_vpn_links(self):
changes = 1
while changes > 0:
changes = 0
for link in self._links:
if link.type == "client":
continue
source_interface = self._nodes[link.source.id].interfaces[link.source.interface]
target_interface = self._nodes[link.target.id].interfaces[link.target.interface]
if source_interface.vpn or target_interface.vpn:
source_interface.vpn = True
target_interface.vpn = True
if link.type != "vpn":
changes += 1
link.type = "vpn"
def count_clients(self):
for link in self._links:
try:
a = self.maybe_node_by_id(link.source.interface)
b = self.maybe_node_by_id(link.target.interface)
if a.flags['client']:
client = a
node = b
elif b.flags['client']:
client = b
node = a
else:
continue
node.clientcount += 1
except:
pass
def obscure_clients(self):
globalIdCounter = 0
nodeCounters = {}
clientIds = {}
for node in self._nodes:
if node.flags['client']:
node.macs = set()
clientIds[node.id] = None
for link in self._links:
ids = link.source.interface
idt = link.target.interface
try:
node_source = self.maybe_node_by_fuzzy_mac(ids)
node_target = self.maybe_node_by_id(idt)
if not node_source.flags['client'] and not node_target.flags['client']:
# if none of the nodes associated with this link are clients,
# we do not want to obscure
continue
if ids in clientIds and idt in clientIds:
# This is for corner cases, when a client
# is linked to another client.
clientIds[ids] = str(globalIdCounter)
ids = str(globalIdCounter)
globalIdCounter += 1
clientIds[idt] = str(globalIdCounter)
idt = str(globalIdCounter)
globalIdCounter += 1
elif ids in clientIds:
newId = generateId(idt)
clientIds[ids] = newId
ids = newId
link.source.interface = ids;
node_source.id = ids;
elif idt in clientIds:
newId = generateId(ids,nodeCounters)
clientIds[idt] = newId
idt = newId
link.target.interface = idt;
node_target.id = idt;
link.id = ids + "-" + idt
except KeyError:
pass
# extends node id by incremented node counter
def generateId(nodeId,nodeCounters):
if nodeId in nodeCounters:
n = nodeCounters[nodeId]
nodeCounters[nodeId] = n + 1
else:
nodeCounters[nodeId] = 1
n = 0
return nodeId + "_" + str(n)
# compares two MACs and decides whether they are
# similar and could be from the same node
def is_similar(a, b):
if a == b:
return True
try:
mac_a = list(int(i, 16) for i in a.split(":"))
mac_b = list(int(i, 16) for i in b.split(":"))
except ValueError:
return False
# first byte must only differ in bit 2
if mac_a[0] | 2 == mac_b[0] | 2:
# count different bytes
c = [x for x in zip(mac_a[1:], mac_b[1:]) if x[0] != x[1]]
else:
return False
# no more than two additional bytes must differ
if len(c) <= 2:
delta = 0
if len(c) > 0:
delta = sum(abs(i[0] -i[1]) for i in c)
# These addresses look pretty similar!
return delta < 8
def is_derived_mac(a, b):
if a == b:
return True
try:
mac_a = list(int(i, 16) for i in a.split(":"))
mac_b = list(int(i, 16) for i in b.split(":"))
except ValueError:
return False
if mac_a[4] != mac_b[4] or mac_a[2] != mac_b[2] or mac_a[1] != mac_b[1]:
return False
x = list(mac_a)
x[5] += 1
x[5] %= 255
if mac_b == x:
return True
x[0] |= 2
if mac_b == x:
return True
x[3] += 1
x[3] %= 255
if mac_b == x:
return True
x = list(mac_a)
x[0] |= 2
x[5] += 2
x[5] %= 255
if mac_b == x:
return True
x = list(mac_a)
x[0] |= 2
x[3] += 1
x[3] %= 255
if mac_b == x:
return True
return False

72
rrd.py
View file

@ -1,72 +0,0 @@
#!/usr/bin/env python3
import subprocess
import time
import os
from GlobalRRD import GlobalRRD
from NodeRRD import NodeRRD
class rrd:
def __init__( self
, databaseDirectory
, imagePath
, displayTimeGlobal = "7d"
, displayTimeNode = "1d"
):
self.dbPath = databaseDirectory
self.globalDb = GlobalRRD(self.dbPath)
self.imagePath = imagePath
self.displayTimeGlobal = displayTimeGlobal
self.displayTimeNode = displayTimeNode
self.currentTimeInt = (int(time.time())/60)*60
self.currentTime = str(self.currentTimeInt)
try:
os.stat(self.imagePath)
except:
os.mkdir(self.imagePath)
def update_database(self,db):
nodes = {}
clientCount = 0
for node in db.get_nodes():
if node.flags['online']:
if not node.flags['client']:
nodes[node.id] = node
node.clients = 0;
if 'legacy' in node.flags and node.flags['legacy']:
clientCount -= 1
else:
clientCount += 1
for link in db.get_links():
source = link.source.interface
target = link.target.interface
if source in nodes and not target in nodes:
nodes[source].clients += 1
elif target in nodes and not source in nodes:
nodes[target].clients += 1
self.globalDb.update(len(nodes), clientCount)
for node in nodes.values():
rrd = NodeRRD(
os.path.join(self.dbPath, str(node.id).replace(':', '') + '.rrd'),
node
)
rrd.update()
def update_images(self):
""" Creates an image for every rrd file in the database directory.
"""
self.globalDb.graph(os.path.join(self.imagePath, "globalGraph.png"), self.displayTimeGlobal)
nodeDbFiles = os.listdir(self.dbPath)
for fileName in nodeDbFiles:
if not os.path.isfile(os.path.join(self.dbPath, fileName)):
continue
nodeName = os.path.basename(fileName).split('.')
if nodeName[1] == 'rrd' and not nodeName[0] == "nodes":
rrd = NodeRRD(os.path.join(self.dbPath, fileName))
rrd.graph(self.imagePath, self.displayTimeNode)