Browse Source

fix issues identified by pylint

Especially change identing from tab to 4spaces.
Helge Jung 9 years ago
parent
commit
9fcbb2ea24
8 changed files with 990 additions and 924 deletions
  1. 84 72
      batcave.py
  2. 16 10
      ffstatus/__init__.py
  3. 105 92
      ffstatus/alfred.py
  4. 80 79
      ffstatus/batman.py
  5. 23 22
      ffstatus/dashing.py
  6. 43 39
      ffstatus/graphite.py
  7. 607 582
      ffstatus/server.py
  8. 32 28
      ffstatus/storage.py

+ 84 - 72
batcave.py

@@ -1,4 +1,6 @@
 #!/usr/bin/python
+# -*- coding: utf-8 -*-
+
 from __future__ import print_function
 import argparse
 import daemon
@@ -7,7 +9,12 @@ import sys
 import time
 import threading
 
-from ffstatus import *
+from ffstatus import \
+    dict_merge, merge_alfred_batman, \
+    ApiServer, \
+    AlfredParser, BatmanParser, \
+    DashingClient, GraphitePush, \
+    Storage
 
 DEFAULT_INTERVAL = 15
 
@@ -29,8 +36,8 @@ parser.add_argument('-S', '--storage-dir', default='.', help='Path where to stor
 args = parser.parse_args()
 
 if args.interval < 5:
-	print('A poll interval lower than 5s is not supported.')
-	sys.exit(1)
+    print('A poll interval lower than 5s is not supported.')
+    sys.exit(1)
 
 shall_daemonize = not args.no_detach
 
@@ -38,14 +45,14 @@ logger = logging.getLogger()
 logger.setLevel(logging.DEBUG if args.verbose else logging.INFO)
 
 if not args.logfile is None:
-	fh = logging.FileHandler(args.logfile)
-	fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
-	logger.addHandler(fh)
+    fh = logging.FileHandler(args.logfile)
+    fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
+    logger.addHandler(fh)
 
 if args.no_detach:
-	ch = logging.StreamHandler(sys.stdout)
-	ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
-	logger.addHandler(ch)
+    ch = logging.StreamHandler(sys.stdout)
+    ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
+    logger.addHandler(ch)
 
 logger.info('Starting up')
 
@@ -58,7 +65,7 @@ d = DashingClient(args.dashing_url, args.dashing_token) if not args.dashing_url
 g = GraphitePush(args.graphite_host, args.graphite_port) if not args.graphite_host is None else None
 
 if args.no_send:
-	if not g is None: g.dont_send = True
+    if not g is None: g.dont_send = True
 
 if not args.alfred_json is None: a.alfred_json = args.alfred_json
 if not args.batadv_vis is None: b.batadv_vis = args.batadv_vis
@@ -68,13 +75,14 @@ logger.debug('Configured B.A.T.M.A.N. source: ' + str(b))
 logger.debug('Configured Dashing: ' + str(d))
 logger.debug('Configured Graphite: ' + str(g))
 
-for i in [ ('AlfredParser', a), ('BatmanParser', b) ]:
-	try:
-		i[1].sanitycheck()
-	except Exception as err:
-		logger.critical(i[0] + '.sanitycheck() failed: ' + str(err))
-		print('FAILED SANITY CHECK: ' + str(err))
-		sys.exit(1)
+# execute sanitycheck() where possible
+for i in [('AlfredParser', a), ('BatmanParser', b)]:
+    try:
+        i[1].sanitycheck()
+    except Exception as err:
+        logger.critical(i[0] + '.sanitycheck() failed: ' + str(err))
+        print('FAILED SANITY CHECK: ' + str(err))
+        sys.exit(1)
 
 server = ApiServer((args.api_bind_host, args.api_bind_port), storage)
 server_thread = threading.Thread(target=server.serve_forever)
@@ -83,64 +91,68 @@ server_thread.start()
 logger.info('Started server: ' + str(server))
 
 if shall_daemonize:
-	daemon_context = daemon.DaemonContext(
-		files_preserve = [ fh.stream ],
-	)
+    daemon_context = daemon.DaemonContext(
+        files_preserve=[fh.stream],
+    )
 
-	daemon_context.open()
+    daemon_context.open()
 
 while True:
-	try:
-		ts = int(time.time())
-		logger.debug('Step 1/3: Fetching data ...')
-		alfreddata = a.fetch()
-		batmandata = b.fetch()
-		newdata = merge_alfred_batman(alfreddata, batmandata)
-		logger.debug('Fetched data: {0} ALFRED with {1} BATMAN makes {2} total'.format(len(alfreddata), len(batmandata), len(newdata)))
-
-		logger.debug('Step 2/3: Pushing update data ...')
-		if not g is None:
-			graphitedata = g.push(newdata, ts=ts)
-			logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
-		if not d is None:
-			d.push(newdata)
-
-		logger.debug('Step 3/3: Merging current data ...')
-		temp = dict_merge(storage.data, {})
-		for x in temp:
-			if not x in newdata: continue
-			temp[x]['aliases'] = []
-			temp[x]['clients'] = []
-			temp[x]['neighbours'] = []
-			if not '__RAW__' in temp[x]:
-				temp[x]['__RAW__'] = { }
-			if '__RAW__' in newdata[x]:
-				for key in newdata[x]['__RAW__']:
-					if key in temp[x]['__RAW__']:
-						del(temp[x]['__RAW__'][key])
-		storage.data = dict_merge(temp, newdata)
-		# sanitize each item's data
-		for itemid in storage.data:
-			if itemid.startswith('__'): continue
-			item = storage.data[itemid]
-
-			# remove node's MACs from clients list
-			clients = [ x for x in item['clients'] ] if 'clients' in item else []
-			if 'mac' in item and item['mac'] in clients: clients.remove(item['mac'])
-			if 'macs' in item:
-				for x in item['macs']:
-					if x in clients: clients.remove(x)
-			storage.data[itemid]['clientcount'] = len(clients)
-		logger.debug('I have data for ' + str(len(storage.data)) + ' nodes.')
-
-		storage.save()
-
-	except Exception as err:
-		logger.error(str(err))
-
-	logger.debug('Sleeping for {0} seconds'.format(args.interval))
-	time.sleep(args.interval)
+    try:
+        ts = int(time.time())
+        logger.debug('Step 1/3: Fetching data ...')
+        alfreddata = a.fetch()
+        batmandata = b.fetch()
+        newdata = merge_alfred_batman(alfreddata, batmandata)
+        logger.debug('Fetched data: {0} ALFRED with {1} BATMAN makes {2} total'.format(len(alfreddata), len(batmandata), len(newdata)))
+
+        logger.debug('Step 2/3: Pushing update data ...')
+        if not g is None:
+            graphitedata = g.push(newdata, ts=ts)
+            logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
+        if not d is None:
+            d.push(newdata)
+
+        logger.debug('Step 3/3: Merging current data ...')
+        temp = dict_merge(storage.data, {})
+        for x in temp:
+            if not x in newdata:
+                continue
+            temp[x]['aliases'] = []
+            temp[x]['clients'] = []
+            temp[x]['neighbours'] = []
+            if not '__RAW__' in temp[x]:
+                temp[x]['__RAW__'] = {}
+            if '__RAW__' in newdata[x]:
+                for key in newdata[x]['__RAW__']:
+                    if key in temp[x]['__RAW__']:
+                        del temp[x]['__RAW__'][key]
+
+        storage.data = dict_merge(temp, newdata)
+        # sanitize each item's data
+        for itemid in storage.data:
+            if itemid.startswith('__'):
+                continue
+            item = storage.data[itemid]
+
+            # remove node's MACs from clients list
+            clients = [x for x in item['clients']] if 'clients' in item else []
+            if 'mac' in item and item['mac'] in clients:
+                clients.remove(item['mac'])
+            if 'macs' in item:
+                for x in item['macs']:
+                    if x in clients:
+                        clients.remove(x)
+            storage.data[itemid]['clientcount'] = len(clients)
+        logger.debug('I have data for ' + str(len(storage.data)) + ' nodes.')
+
+        storage.save()
+
+    except Exception as err:
+        logger.error(str(err))
+
+    logger.debug('Sleeping for {0} seconds'.format(args.interval))
+    time.sleep(args.interval)
 
 storage.close()
 logger.info('Shut down.')
-

+ 16 - 10
ffstatus/__init__.py

@@ -1,3 +1,6 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
 from copy import deepcopy
 import json
 import logging
@@ -35,7 +38,7 @@ def dict_merge(a, b):
         if k in result and isinstance(result[k], dict):
             result[k] = dict_merge(result[k], v)
         elif k in result and isinstance(result[k], list):
-            result[k] = result[k] + [ deepcopy(x) for x in v if x not in result[k] ]
+            result[k] = result[k] + [deepcopy(x) for x in v if x not in result[k]]
         else:
             result[k] = deepcopy(v)
     return result
@@ -53,19 +56,22 @@ def merge_alfred_batman(alfreddata, batmandata):
         nodeinfo = alfreddata[nodeid]
         candidates = set()
         candidates.add(nodeid)
-        if 'mac' in nodeinfo: candidates.add(mac2id(nodeinfo['mac']))
+        if 'mac' in nodeinfo:
+            candidates.add(mac2id(nodeinfo['mac']))
         if 'macs' in nodeinfo:
             for mac in nodeinfo['macs']:
                 candidates.add(mac2id(mac))
 
         if 'network' in nodeinfo:
-            n = nodeinfo['network']
-            if 'mac' in n: candidates.add(mac2id(n['mac']))
-            if 'mesh_interfaces' in n:
-                for mac in n['mesh_interfaces']:
+            net = nodeinfo['network']
+            if 'mac' in net:
+                candidates.add(mac2id(net['mac']))
+            if 'mesh_interfaces' in net:
+                for mac in net['mesh_interfaces']:
                     candidates.add(mac2id(mac))
 
-        if not 'neighbours' in nodeinfo: nodeinfo['neighbours'] = []
+        if not 'neighbours' in nodeinfo:
+            nodeinfo['neighbours'] = []
 
         for candidate_raw in candidates:
             candidate = batlookup[candidate_raw] if candidate_raw in batlookup else candidate_raw
@@ -94,11 +100,11 @@ def resolve_ipblock(ipaddr):
         response = json.load(urllib2.urlopen(url))
         assert isinstance(response, dict)
 
-        obj = [x for x in response['objects']['object'] if x['type'] in ['inetnum','inet6num']][0]
+        obj = [x for x in response['objects']['object'] if x['type'] in ['inetnum', 'inet6num']][0]
 
         attrib = obj['attributes']['attribute']
         netname = '\n'.join([x['value'] for x in attrib if x['name'] == 'netname'])
-        netblock = '\n'.join([x['value'] for x in attrib if x['name'] in ['inetnum','inet6num']])
+        netblock = '\n'.join([x['value'] for x in attrib if x['name'] in ['inetnum', 'inet6num']])
         desc = '\n'.join([x['value'] for x in attrib if x['name'] == 'descr'])
 
         return {
@@ -108,7 +114,7 @@ def resolve_ipblock(ipaddr):
         }
 
     except urllib2.URLError as err:
-	output = err.read()
+        output = err.read()
         logger.error('Error "{1}" querying ip \'{0}\' from RIPE API: {2}'.format(ipaddr, err, output))
         if 'Retry-After' in err.headers:
             retry = int(err.headers['Retry-After'])

+ 105 - 92
ffstatus/alfred.py

@@ -1,4 +1,5 @@
 #!/usr/bin/python
+# -*- coding: utf-8 -*-
 
 from __future__ import print_function
 import io
@@ -7,97 +8,109 @@ import subprocess
 import time
 
 class AlfredParser:
-	alfred_json = 'alfred-json'
-	alfred_datatypes = [ ('static', 158), ('dynamic', 159) ]
-
-	def __str__(self):
-		return 'AlfredParser \'{0}\' {1}'.format(self.alfred_json, str.join(' ', [ '{1}=>{0}'.format(x[0], x[1]) for x in self.alfred_datatypes ]))
-
-	def sanitycheck(self):
-		testdata = None
-		try:
-			testdata = subprocess.check_output([self.alfred_json, '-z', '-r', str(int(self.alfred_datatypes[0][1]))])
-		except Exception as err:
-			raise Exception("alfred-json not found or incompatible: " + str(err))
-
-		try:
-			json.loads(testdata)
-		except Exception as err:
-			raise Exception("alfred-json does not return valid JSON data: " + str(err))
-
-		return True
-
-	def fetch(self, alfred_dump=None, include_rawdata=False):
-		data = { }
-		ts = int(time.time())
-
-		alfreddata = { }
-		for datatype in self.alfred_datatypes:
-			rawdata = subprocess.check_output([self.alfred_json, '-z', '-r', str(int(datatype[1]))])
-			newdata = json.loads(rawdata)
-			
-			for item in newdata:
-				if not item in alfreddata:
-					alfreddata[item] = { }
-
-				alfreddata[item][datatype[0]] = newdata[item]
-
-		if not alfred_dump is None:
-			jsondata = json.dumps(alfreddata, ensure_ascii=False)
-			f = io.open(alfred_dump, 'w')
-			f.write(jsondata)
-			f.close()
-
-		for alfredid in alfreddata:
-			alfredinfo = alfreddata[alfredid]
-
-			myid = alfredinfo['static']['node_id'] if 'node_id' in alfredinfo['static'] else alfredid.lower().replace(':', '')
-
-			nodeinfo = {
-				'hostname': None,
-				'mac': None,
-				'software': {},
-				'statistics': {},
-				'__UPDATED__': { 'alfred': ts, },
-				'__RAW__': { 'alfred': alfredinfo, },
-			}
-			data[myid] = nodeinfo
-
-			nodestatic = alfredinfo['static']
-			if 'hostname' in nodestatic: nodeinfo['hostname'] = nodestatic['hostname']
-			if 'network' in nodestatic:
-				if 'mac' in nodestatic['network']:
-					nodeinfo['mac'] = nodestatic['network']['mac']
-				if 'mesh_interfaces' in nodestatic['network']:
-					nodeinfo['macs'] = [ x for x in nodestatic['network']['mesh_interfaces'] ]
-				else:
-					nodeinfo['macs'] = []
-			if 'software' in nodestatic:
-				sw = nodestatic['software']
-
-				nodeinfo['software']['firmware'] = sw['firmware']['release'] if 'firmware' in sw and 'release' in sw['firmware'] else None
-				nodeinfo['software']['autoupdater'] = sw['autoupdater']['branch'] if sw['autoupdater']['enabled'] else 'off'
-
-			nodedyn = alfredinfo['dynamic'] if 'dynamic' in alfredinfo else nodestatic['statistics']
-			if 'uptime' in nodedyn: nodeinfo['uptime'] = int(float(nodedyn['uptime']))
-			if 'gateway' in nodedyn: nodeinfo['gateway'] = nodedyn['gateway']
-				
-			traffic = nodedyn["traffic"] if "traffic" in nodedyn else None
-			if not traffic is None:
-				if not 'traffic' in nodeinfo['statistics']: nodeinfo['statistics']['traffic'] = { }
-				t = nodeinfo['statistics']['traffic']
-				t['rxbytes'] = int(traffic["rx"]["bytes"])
-				t['txbytes'] = int(traffic["tx"]["bytes"])
-
-		return data
+    alfred_json = 'alfred-json'
+    alfred_datatypes = [('static', 158), ('dynamic', 159)]
+
+    def __str__(self):
+        types = ['{1}=>{0}'.format(x[0], x[1]) for x in self.alfred_datatypes]
+        return 'AlfredParser \'{0}\' {1}'.format(
+            self.alfred_json,
+            str.join(' ', types),
+        )
+
+    def sanitycheck(self):
+        testdata = None
+        try:
+            cmd = [self.alfred_json, '-z', '-r', str(int(self.alfred_datatypes[0][1]))]
+            testdata = subprocess.check_output(cmd)
+        except Exception as err:
+            raise Exception("alfred-json not found or incompatible: " + str(err))
+
+        try:
+            json.loads(testdata)
+        except Exception as err:
+            raise Exception("alfred-json does not return valid JSON data: " + str(err))
+
+        return True
+
+    def fetch(self, alfred_dump=None, include_rawdata=False):
+        data = { }
+        ts = int(time.time())
+
+        alfreddata = {}
+        for datatype in self.alfred_datatypes:
+            rawdata = subprocess.check_output([self.alfred_json, '-z', '-r', str(int(datatype[1]))])
+            newdata = json.loads(rawdata)
+            
+            for item in newdata:
+                if not item in alfreddata:
+                    alfreddata[item] = {}
+
+                alfreddata[item][datatype[0]] = newdata[item]
+
+        if not alfred_dump is None:
+            jsondata = json.dumps(alfreddata, ensure_ascii=False)
+            dumpfile = io.open(alfred_dump, 'w')
+            dumpfile.write(jsondata)
+            dumpfile.close()
+
+        for alfredid in alfreddata:
+            alfredinfo = alfreddata[alfredid]
+
+            if 'node_id' in alfredinfo['static']:
+                myid = alfredinfo['static']['node_id']
+            else:
+                alfredid.lower().replace(':', '')
+
+            nodeinfo = {
+                'hostname': None,
+                'mac': None,
+                'software': {},
+                'statistics': {},
+                '__UPDATED__': {'alfred': ts,},
+                '__RAW__': {'alfred': alfredinfo,},
+            }
+            data[myid] = nodeinfo
+
+            nodestatic = alfredinfo['static']
+            if 'hostname' in nodestatic:
+                nodeinfo['hostname'] = nodestatic['hostname']
+            if 'network' in nodestatic:
+                if 'mac' in nodestatic['network']:
+                    nodeinfo['mac'] = nodestatic['network']['mac']
+                if 'mesh_interfaces' in nodestatic['network']:
+                    nodeinfo['macs'] = [x for x in nodestatic['network']['mesh_interfaces']]
+                else:
+                    nodeinfo['macs'] = []
+            if 'software' in nodestatic:
+                sw = nodestatic['software']
+
+                nodeinfo['software']['firmware'] = sw['firmware']['release'] if 'firmware' in sw and 'release' in sw['firmware'] else None
+                nodeinfo['software']['autoupdater'] = sw['autoupdater']['branch'] if sw['autoupdater']['enabled'] else 'off'
+
+            nodedyn = alfredinfo['dynamic'] if 'dynamic' in alfredinfo else nodestatic['statistics']
+            if 'uptime' in nodedyn:
+                nodeinfo['uptime'] = int(float(nodedyn['uptime']))
+            if 'gateway' in nodedyn:
+                nodeinfo['gateway'] = nodedyn['gateway']
+                
+            traffic = nodedyn["traffic"] if "traffic" in nodedyn else None
+            if not traffic is None:
+                if not 'traffic' in nodeinfo['statistics']:
+                    nodeinfo['statistics']['traffic'] = { }
+                t = nodeinfo['statistics']['traffic']
+                t['rxbytes'] = int(traffic["rx"]["bytes"])
+                t['txbytes'] = int(traffic["tx"]["bytes"])
+
+        return data
 
 if __name__ == "__main__":
-	a = AlfredParser()
-	try:
-		a.sanitycheck()
-	except Exception  as err:
-		print('SANITY-CHECK failed:', str(err))
-		import sys
-		sys.exit(1)
-	data = a.fetch()
-	print(json.dumps(data))
+    a = AlfredParser()
+    try:
+        a.sanitycheck()
+    except Exception  as err:
+        print('SANITY-CHECK failed:', str(err))
+        import sys
+        sys.exit(1)
+    adata = a.fetch()
+    print(json.dumps(adata))

+ 80 - 79
ffstatus/batman.py

@@ -1,4 +1,5 @@
 #!/usr/bin/python
+# -*- coding: utf-8 -*-
 
 from __future__ import print_function
 import io
@@ -9,85 +10,85 @@ import time
 
 
 class BatmanParser:
-	batadv_vis = 'batadv-vis'
-	mactranslation = string.maketrans('2367abef', '014589cd')
-
-	def __str__(self):
-		return 'BatmanParser \'{0}\''.format(self.batadv_vis)
-
-	def sanitycheck(self):
-		"""Checks that batadv-vis is executable and gives sane output."""
-
-		testdata = None
-		try:
-			testdata = subprocess.check_output([self.batadv_vis, '-f', 'jsondoc'])
-		except Exception as err:
-			raise Exception("batadv-vis not found or incompatible: " + str(err))
-
-		try:
-			json.loads(testdata)
-		except Exception as err:
-			raise Exception("batadv-vis does not return valid JSON data: " + str(err))
-
-		return True
-
-	def mac2id(self, mac):
-		"""Derives a nodeid from the given MAC address."""
-
-		temp = str(mac.lower().replace(':', ''))
-#		temp = temp[0] + temp[1].translate(self.mactranslation) + temp[2:]
-		return temp
-
-	def fetch(self, batadv_dump=None, include_rawdata=False):
-		"""Fetches the current data from batadv-vis and returns it."""
-
-		data = { }
-		ts = int(time.time())
-
-		# call batadv-vis and parse output as JSON
-		rawdata = subprocess.check_output([self.batadv_vis, '-f', 'jsondoc'])
-		batmandata = json.loads(rawdata)
-
-		# dump raw data into file if requested
-		if not batadv_dump is None:
-			f = io.open(batadv_dump, 'w')
-			f.write(rawdata)
-			f.close()
-
-		# parse raw data, convert all MAC into nodeid
-		for item in batmandata['vis']:
-			itemid = self.mac2id(item['primary'])
-			aliases = []
-			if 'secondary' in item:
-				for mac in item['secondary']:
-					aliases.append(self.mac2id(mac))
-
-			neighbours = {}
-			if 'neighbors' in item:
-				for neighbour in item['neighbors']:
-					#if neighbour['router'] != item['primary']:
-					#	print('node {0}\'s neighbor {1} has unexpected router {2}'.format(itemid, neighbour['neighbor'], neighbour['router']))
-					neighbours[neighbour['neighbor']] = float(neighbour['metric'])
-
-			# construct dict entry as expected by BATCAVE
-			data[itemid] = {
-				'aliases': aliases,
-				'neighbours': neighbours,
-				'clients': [ x for x in item['clients'] ] if 'clients' in item else [],
-				'__UPDATED__': { 'batadv': ts, },
-				'__RAW__': { 'batadv': { itemid: item, } },
-			}
-
-		return data
+    batadv_vis = 'batadv-vis'
+    mactranslation = string.maketrans('2367abef', '014589cd')
+
+    def __str__(self):
+        return 'BatmanParser \'{0}\''.format(self.batadv_vis)
+
+    def sanitycheck(self):
+        """Checks that batadv-vis is executable and gives sane output."""
+
+        testdata = None
+        try:
+            testdata = subprocess.check_output([self.batadv_vis, '-f', 'jsondoc'])
+        except Exception as err:
+            raise Exception("batadv-vis not found or incompatible: " + str(err))
+
+        try:
+            json.loads(testdata)
+        except Exception as err:
+            raise Exception("batadv-vis does not return valid JSON data: " + str(err))
+
+        return True
+
+    def mac2id(self, mac):
+        """Derives a nodeid from the given MAC address."""
+
+        temp = str(mac.lower().replace(':', ''))
+#       temp = temp[0] + temp[1].translate(self.mactranslation) + temp[2:]
+        return temp
+
+    def fetch(self, batadv_dump=None, include_rawdata=False):
+        """Fetches the current data from batadv-vis and returns it."""
+
+        data = {}
+        ts = int(time.time())
+
+        # call batadv-vis and parse output as JSON
+        rawdata = subprocess.check_output([self.batadv_vis, '-f', 'jsondoc'])
+        batmandata = json.loads(rawdata)
+
+        # dump raw data into file if requested
+        if not batadv_dump is None:
+            dumpfile = io.open(batadv_dump, 'w')
+            dumpfile.write(rawdata)
+            dumpfile.close()
+
+        # parse raw data, convert all MAC into nodeid
+        for item in batmandata['vis']:
+            itemid = self.mac2id(item['primary'])
+            aliases = []
+            if 'secondary' in item:
+                for mac in item['secondary']:
+                    aliases.append(self.mac2id(mac))
+
+            neighbours = {}
+            if 'neighbors' in item:
+                for neighbour in item['neighbors']:
+                    #if neighbour['router'] != item['primary']:
+                    #   print('node {0}\'s neighbor {1} has unexpected router {2}'.format(itemid, neighbour['neighbor'], neighbour['router']))
+                    neighbours[neighbour['neighbor']] = float(neighbour['metric'])
+
+            # construct dict entry as expected by BATCAVE
+            data[itemid] = {
+                'aliases': aliases,
+                'neighbours': neighbours,
+                'clients': [x for x in item['clients']] if 'clients' in item else [],
+                '__UPDATED__': {'batadv': ts,},
+                '__RAW__': {'batadv': {itemid: item,}},
+            }
+
+        return data
 
 # standalone test mode
 if __name__ == "__main__":
-	b = BatmanParser()
-	try:
-		b.sanitycheck()
-	except Exception  as err:
-		print('SANITY-CHECK failed:', str(err))
-		import sys
-		sys.exit(1)
-	data = b.fetch()
-	print(json.dumps(data))
+    b = BatmanParser()
+    try:
+        b.sanitycheck()
+    except Exception  as err:
+        print('SANITY-CHECK failed:', str(err))
+        import sys
+        sys.exit(1)
+    bdata = b.fetch()
+    print(json.dumps(bdata))

+ 23 - 22
ffstatus/dashing.py

@@ -1,37 +1,38 @@
 #!/usr/bin/python
+# -*- coding: utf-8 -*-
 
 import json
 import logging
 import requests
 
 class DashingClient:
-	base_url = None
+    base_url = None
 
-	def __init__(self, base_url, auth_token):
-		self.base_url = base_url
-		self.auth_token = auth_token
-		self.logger = logging.getLogger('dashing')
+    def __init__(self, base_url, auth_token):
+        self.base_url = base_url
+        self.auth_token = auth_token
+        self.logger = logging.getLogger('dashing')
 
-	def __str__(self):
-		return 'Dashing at \'{0}\''.format(self.base_url)
+    def __str__(self):
+        return 'Dashing at \'{0}\''.format(self.base_url)
 
-	def send(self, metric, current, previous=None):
-		info = {
-			'auth_token': self.auth_token,
-			'current': int(current),
-		}
-		if not previous is None:
-			info['previous'] = previous
+    def send(self, metric, current, previous=None):
+        info = {
+            'auth_token': self.auth_token,
+            'current': int(current),
+        }
+        if not previous is None:
+            info['previous'] = previous
 
-		url = self.base_url + metric
-		r = requests.post(url, data=json.dumps(info))
-		self.logger.debug('Sent metric "{0}" = "{1}"'.format(metric, current))
-		return r
+        url = self.base_url + metric
+        r = requests.post(url, data=json.dumps(info))
+        self.logger.debug('Sent metric "{0}" = "{1}"'.format(metric, current))
+        return r
 
-	def push(self, data):
-		self.logger.warn('push() not implemented yet')
+    def push(self, data):
+        self.logger.warn('push() not implemented yet')
 
 if __name__ == "__main__":
-	d = DashingClient('http://dashing.krombel.de:3030/widgets/', 'bitnhmlj47hamrftxkiug')
-	d.send('testNumber', 42)
+    d = DashingClient('http://dashing.krombel.de:3030/widgets/', 'bitnhmlj47hamrftxkiug')
+    d.send('testNumber', 42)
 

+ 43 - 39
ffstatus/graphite.py

@@ -1,4 +1,5 @@
 #!/usr/bin/python
+# -*- coding: utf-8 -*-
 
 from __future__ import print_function
 import socket
@@ -6,55 +7,58 @@ import time
 import StringIO
 
 class GraphitePush:
-	dont_send = False
+    dont_send = False
 
-	prefix = 'ffpb.nodes.'
-	target_host = None
-	target_port = 2003
+    prefix = 'ffpb.nodes.'
+    target_host = None
+    target_port = 2003
 
-	whitelist = None #[ '24a43cf85efa', '24a43cf85edb', '24a43cd94f69', '24a43ca367f0', '24a43ca36807', '24a43cd221d5' ]
+    whitelist = None #[ '24a43cf85efa', '24a43cf85edb', '24a43cd94f69', '24a43ca367f0', '24a43ca36807', '24a43cd221d5' ]
 
-	def __init__(self, host, port=2003):
-		self.target_host = host
-		self.target_port = port
+    def __init__(self, host, port=2003):
+        self.target_host = host
+        self.target_port = port
 
-	def __str__(self):
-		return 'Graphite at [{0}]:{1} (prefix=\'{2}\', whitelist={3})'.format(
-			self.target_host, self.target_port,
-			self.prefix, self.whitelist)
+    def __str__(self):
+        return 'Graphite at [{0}]:{1} (prefix=\'{2}\', whitelist={3})'.format(
+            self.target_host, self.target_port,
+            self.prefix, self.whitelist)
 
-	def push(self, data, ts=None):
-		if ts is None: ts = time.time()
-		ts = int(ts)
+    def push(self, data, ts=None):
+        if ts is None:
+            ts = time.time()
+        ts = int(ts)
 
-		output = StringIO.StringIO()
-		whitelist = [ x for x in self.whitelist ] if not self.whitelist is None and len(self.whitelist) > 0 else None
-		
-		for nodeid in data:
-			if (not whitelist is None) and (not nodeid in whitelist):
-				#print("Skipping node {0} as it is not in the configured whitelist.".format(nodeid))
-				continue
+        output = StringIO.StringIO()
+        whitelist = None
+        if not self.whitelist is None and len(self.whitelist) > 0:
+            whitelist = [x for x in self.whitelist]
 
-			nodeinfo = data[nodeid]
+        for nodeid in data:
+            if (not whitelist is None) and (not nodeid in whitelist):
+                #print("Skipping node {0} as it is not in the configured whitelist.".format(nodeid))
+                continue
 
-			for item in ['uptime']:
-				if item in nodeinfo:
-					print(self.prefix, nodeid, '.', item, ' ', nodeinfo[item], ' ', ts, sep='', file=output)
+            nodeinfo = data[nodeid]
 
-			traffic = nodeinfo['statistics']['traffic'] if 'statistics' in nodeinfo and 'traffic' in nodeinfo['statistics'] else None
-			if not traffic is None:
-				for item in ['rxbytes', 'txbytes']:
-					print(self.prefix, nodeid, '.', item, ' ', traffic[item], ' ', ts, sep='', file=output)
+            for item in ['uptime']:
+                if item in nodeinfo:
+                    print(self.prefix, nodeid, '.', item, ' ', nodeinfo[item], ' ', ts, sep='', file=output)
 
-		all_output = output.getvalue()
+            traffic = nodeinfo['statistics']['traffic'] if 'statistics' in nodeinfo and 'traffic' in nodeinfo['statistics'] else None
+            if not traffic is None:
+                for item in ['rxbytes', 'txbytes']:
+                    print(self.prefix, nodeid, '.', item, ' ', traffic[item], ' ', ts, sep='', file=output)
 
-		if not self.dont_send:
-			s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
-			s.connect((self.target_host, self.target_port))
-			s.sendall(all_output)
-			s.shutdown(socket.SHUT_WR)
-			s.close()
+        all_output = output.getvalue()
 
-		output.close()
+        if not self.dont_send:
+            sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
+            sock.connect((self.target_host, self.target_port))
+            sock.sendall(all_output)
+            sock.shutdown(socket.SHUT_WR)
+            sock.close()
 
-		return all_output
+        output.close()
+
+        return all_output

+ 607 - 582
ffstatus/server.py

@@ -17,591 +17,616 @@ import time
 import ffstatus
 
 class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
-	DATAKEY_VPN = '__VPN__'
-	FIELDKEY_UPDATED = '__UPDATED__'
-
-	def __init__(self, request, client_address, server):
-		self.logger = logging.getLogger('API')
-		BaseHTTPRequestHandler.__init__(self, request, client_address, server)
-
-	def parse_url_pathquery(self):
-		"""Extracts the query parameters from the request path."""
-		url = re.match(r'^/(?P<path>.*?)(\?(?P<query>.+))?$', self.path.strip())
-		if url is None:
-			logging.warn('Failed to parse URL \'' + str(self.path) + '\'.')
-			return ( None, None )
-
-		path = url.group('path')
-		query = {}
-		if not url.group('query') is None:
-			for m in re.finditer(r'(?P<key>.+?)=(?P<value>.+?)(&|$)', url.group('query')):
-				query[m.group('key')] = m.group('value')
-		return ( path, query )
-
-	def do_GET(self):
-		"""Handles all HTTP GET requests."""
-
-		path, query = self.parse_url_pathquery()
-		if path is None:
-			self.send_error(400, 'Could not parse URL (' + str(self.path) + ')')
-                        return
-
-		# / - index page, shows generic help
-		if path == '':
-			self.respond_index(query)
-			return
-
-		# /list - list stored nodes
-		if path == 'list':
-			self.respond_list(query)
-			return
-
-		# /vpn - notification endpoint for gateway's VPN connections
-		if path == 'vpn':
-			self.respond_vpn(query)
-			return
-
-		# /providers
-		if path == 'providers':
-			self.respond_providers(query)
-			return
-
-		# /node/<id>.json - node's data
-		# /node/<id>/field - return specific field from node's data
-		m = re.match(r'node/(?P<id>[a-fA-F0-9]{12})(?P<cmd>\.json|/[a-zA-Z0-9_\-\.]+)$', path)
-		if m != None:
-			cmd = m.group('cmd')
-			nodeid = m.group('id').lower()
-			if cmd == '.json':
-				self.respond_node(nodeid)
-			else:
-				self.respond_nodedetail(nodeid, cmd[1:])
-			return
-
-		# /status/<id> - node's status
-		m = re.match(r'status/([a-f0-9]{12})$', path)
-		if m != None:
-			self.respond_nodestatus(m.group(1))
-			return
-
-		# no match -> 404
-		self.send_error(404, 'The URL \'{0}\' was not found here.'.format(path))
-
-	def do_POST(self):
-		"""Handles all HTTP POST requests."""
-
-		path, query = self.parse_url_pathquery()
-		if path is None:
-			self.send_error(400, 'Could not parse URL (' + str(self.path) + ')')
-                        return
-		params = self.parse_post_params()
-
-		# node id/mac to name mapping
-		if path == 'idmac2name':
-			self.respond_nodeidmac2name(params)
-			return
-
-		# no match -> 404
-		self.send_error(404, 'The URL \'{0}\' was not found here.'.format(path))
-
-	def send_nocache_headers(self):
-		"""Sets HTTP headers indicating that this response shall not be cached."""
-
-		self.send_header('Cache-Control', 'no-cache, no-store, must-revalidate')
-		self.send_header('Pragma', 'no-cache')
-		self.send_header('Expires', '0')
-
-	def send_headers(self, content_type='text/html; charset=utf-8', nocache=True):
-		"""Send HTTP 200 Response header with the given Content-Type.
-		Optionally send no-caching headers, too."""
-
-		self.send_response(200)
-		self.send_header('Content-Type', content_type)
-		if nocache: self.send_nocache_headers()
-		self.end_headers()
-
-	def parse_post_params(self):
-		ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
-		if ctype == 'multipart/form-data':
-			postvars = cgi.parse_multipart(self.rfile, pdict)
-		elif ctype == 'application/x-www-form-urlencoded':
-			length = int(self.headers.getheader('content-length'))
-			postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
-		else:
-			postvars = {}
-		return postvars
-
-	def respond_index(self, query):
-		"""Display the index page."""
-
-		storage = self.server.storage
-		self.send_headers()
-
-		self.wfile.write('<!DOCTYPE html><html><head><title>BATCAVE</title></head>\n')
-		self.wfile.write('<body>\n')
-		self.wfile.write('<H1 title="Batman/Alfred Transmission Collection, Aggregation & Value Engine">BATCAVE</H1>\n')
-
-		self.wfile.write('<p>Dies ist ein interner Hintergrund-Dienst. Er wird nur von anderen Diensten\n')
-		self.wfile.write('angesprochen und sollte aus einer Mehrzahl von Gr&uuml;nden nicht &ouml;ffentlich\n')
-		self.wfile.write('zug&auml;nglich sein.</p>\n')
-
-		self.wfile.write('<H2>Status</H2>\n')
-		self.wfile.write('Daten: <span id="datacount" class="value">')
-		self.wfile.write(len(storage.data))
-		self.wfile.write('</span>\n')
-
-		self.wfile.write('<H2>API</H2>\n')
-		self.wfile.write('<p>Grundsätzlich ist das Antwort-Format JSON und alle Daten sind Live-Daten (kein Cache) die ggf. etwas Bearbeitungs-Zeit erfordern.</p>')
-		self.wfile.write('<dl>\n')
-		self.wfile.write('<dt><a href="/nodes.json">nodes.json</a></dt><dd>zur Verwendung mit ffmap (MACs anonymisiert)</dd>\n')
-		self.wfile.write('<dt><a href="/node/ff00ff00ff00.json">/node/&lt;id&gt;.json</a></dt><dd><u>alle</u> vorhandenen Information zu der gewünschten Node</dd>\n')
-		self.wfile.write('</dl>\n')
-		self.wfile.write('</body></html>')
-
-	def respond_list(self, query):
-		"""List stored data."""
-
-		storage = self.server.storage
-		self.send_headers()
-
-		self.wfile.write('<!DOCTYPE html><html><head><title>BATCAVE</title></head>\n')
-		self.wfile.write('<body>\n')
-		self.wfile.write('<H1>BATCAVE - LIST</H1>\n')
-
-		self.wfile.write('<table>\n')
-		self.wfile.write('<thead><tr><th>ID</th><th>Name</th></tr></thead>\n')
-		self.wfile.write('<tbody>\n')
-
-		data = storage.data
-		if 'sort' in query:
-			if query['sort'] == 'name':
-				sorteddata = sorted(data, key=lambda x: data[x]['hostname'].lower())
-				data = sorteddata
-			elif query['sort'] == 'id':
-				sorteddata = sorted(data)
-				data = sorteddata
-
-		for nodeid in data:
-			if nodeid.startswith('__'): continue
-			nodename = storage.data[nodeid]['hostname'] if 'hostname' in storage.data[nodeid] else '&lt;?&gt;'
-			self.wfile.write('<tr><td><a href="/node/' + nodeid + '.json">' + nodeid + '</a></td><td>' + nodename + '</td></tr>')
-
-		self.wfile.write('</tbody>\n')
-		self.wfile.write('</table>\n')
-
-	def find_node(self, rawid):
-		"""Fetch node data from storage by given id, if necessary looking thorugh node aliases."""
-
-		storage = self.server.storage
-
-		# if we have a direct hit, return it immediately
-		if rawid in storage.data:
-			return storage.data[rawid]
-
-		# no direct hit -> search via aliases
-		nodeid = rawid
-		for n in storage.data:
-			if 'aliases' in storage.data[n] and rawid in storage.data[n]['aliases']:
-				nodeid = n
-
-		# return found node
-		return storage.data[nodeid] if nodeid in storage.data else None
-
-	def find_node_by_mac(self, mac):
-		"""Fetch node data from storage by given MAC address."""
-
-		storage = self.server.storage
-		needle = mac.lower()
-
-		# iterate over all nodes
-		for nodeid in storage.data:
-			if nodeid.startswith('__'): continue
-			node = storage.data[nodeid]
-
-			# check node's primary MAC
-			if 'mac' in node and needle == node['mac'].lower():
-				return node
-
-			# check alias MACs
-			if 'macs' in node:
-				haystack = [ x.lower() for x in node['macs'] ]
-				if mac in haystack:
-					return node
-
-		# MAC address not found
-		return None
-
-	def respond_node(self, rawid):
-		"""Display node data."""
-
-		# handle API example linked on index page
-		if rawid == 'ff00ff00ff00':
-			self.send_headers('text/json')
-			self.wfile.write(json.dumps({
-				'name': 'API-Example',
-				'nodeid': rawid,
-				'META': 'Dies ist ein minimaler Beispiel-Datensatz. Herzlichen Glückwunsch, du hast das Prinzip der API kapiert.',
-			}))
-			return
-
-		# search node by the given id
-		node = self.find_node(rawid)
-
-		# handle unknown nodes
-		if node is None:
-			self.send_error(404, 'No node with id \'' + rawid + '\' present.')
-			return
-
-		# remove fields from output: __RAW__
-		export = ffstatus.dict_merge({}, node)
-		if '__RAW__' in export:
-			del(export['__RAW__'])
-
-		# dump node data as JSON
-		self.send_headers('text/json')
-		self.wfile.write(json.dumps(export))
-
-	def get_nodestatus(self, rawid):
-		"""Determine node's status."""
-
-		# search node by the given id
-		node = self.find_node(rawid)
-
-		# handle unknown nodes
-		if node is None:
-			return None
-
-		# check that the last batadv update is noted in the data
-		updated = node[self.FIELDKEY_UPDATED] if self.FIELDKEY_UPDATED in node else None
-		if updated is None or not 'batadv' in updated:
-			return 'unknown'
-
-		# make decision based on time of last batadv update
-		diff = time.time() - updated['batadv']
-		if diff < 150:
-			return 'active'
-		elif diff < 300:
-			return 'stale'
-		else:
-			return 'offline'
-
-	def respond_nodestatus(self, rawid):
-		"""Display node status."""
-
-		status = self.get_nodestatus(rawid)
-
-		if status is None:
-			self.send_error(404, 'No node with id \'' + rawid + '\' present.')
-
-		self.send_headers('text/plain')
-		self.wfile.write(status)
-
-	def respond_nodeidmac2name(self, ids):
-		"""Return a mapping of the given IDs (or MACs) into their hostname."""
-
-		self.send_headers('text/plain')
-		for nodeid in ids:
-			node = self.find_node(nodeid) if not ':' in nodeid else self.find_node_by_mac(nodeid)
-			nodename = node['hostname'] if (not node is None) and 'hostname' in node else nodeid
-			self.wfile.write('{0}={1}\n'.format(nodeid, nodename))
-			
-	def respond_nodedetail(self, nodeid, field):
-		"""Return a field from the given node - a string is returned as text, all other as JSON."""
-
-		node = self.find_node(nodeid)
-		if node is None:
-			self.send_error(404, 'No node with id \'' + nodeid + '\' present.')
-			return
-
-		return_count = False
-		if field.endswith('.count'):
-			return_count = True
-			field = field[0:-6]
-
-		if not field in node:
-			self.send_error(404, 'The node \'' + nodeid + '\' does not have a field named \'' + str(field) + '\'.')
-			return
-
-		value = node[field]
-		if return_count: value = len(value)
-
-		self.send_headers('text/plain' if isinstance(value, basestring) or isinstance(value, int) else 'text/json')
-		self.wfile.write(value if isinstance(value, basestring) else json.dumps(value))
-
-	def respond_vpn(self, query):
-		storage = self.server.storage
-		peername = query['peer'] if 'peer' in query else None
-		key = query['key'] if 'key' in query else None
-		action = query['action'] if 'action' in query else None
-		remote = query['remote'] if 'remote' in query else None
-		gw = query['gw'] if 'gw' in query else None
-		ts = query['ts'] if 'ts' in query else time.time()
-
-		if action == 'list':
-			self.respond_vpnlist()
-			return
-
-		if action != 'establish' and action != 'disestablish':
-			self.logger.error('VPN: unknown action \'{0}\''.format(action))
-			self.send_error(400, 'Invalid action.')
-			return
-
-		for k,v in { 'peername': peername, 'key': key, 'remote': remote, 'gw': gw }.items():
-			if v is None or len(v.strip()) == 0:
-				self.logger.error('VPN {0}: no or empty {1}'.format(action, k))
-				self.send_error(400, 'Missing value for ' + str(k))
-				return
-
-		if key is None or re.match(r'^[a-fA-F0-9]+$', key) is None:
-			self.logger.error('VPN peer \'{0}\' {1}: bad key \'{2}\''.format(peername, action, key))
-			self.send_error(400, 'Bad key.')
-			return
-
-		if not self.DATAKEY_VPN in storage.data: storage.data[self.DATAKEY_VPN] = {}
-		if not key in storage.data[self.DATAKEY_VPN]: storage.data[self.DATAKEY_VPN][key] = { 'active': {}, 'last': {} }
-		item = storage.data[self.DATAKEY_VPN][key]
-
-		# resolve remote addr to its netblock
-		remote_raw = remote
-		remote_resolved = None
-		if not remote is None:
-			remote_resolved = ffstatus.resolve_ipblock(remote)
-			if not remote_resolved is None:
-				self.logger.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(remote, remote_resolved['name']))
-				remote = remote_resolved
-
-		if action == 'establish':
-			item['active'][gw] = {
-				'establish': ts,
-				'peer': peername,
-				'remote': remote,
-				'remote_raw': remote_raw,
-			}
-
-		elif action == 'disestablish':
-			active = {}
-			if gw in item['active']:
-				active = item['active'][gw]
-				del(item['active'][gw])
-			active['disestablish'] = ts
-			item['last'][gw] = active
-		else:
-			self.send_error(500, 'Unknown action not filtered (' + str(action) + ')')
-			return
-
-		self.send_headers('text/plain')
-		self.wfile.write('OK')
-
-		storage.save()
-
-	def respond_vpnlist(self):
-		storage = self.server.storage
-
-		gateways = ['gw01','gw02','gw03','gw04','gw05','gw06']
-
-		self.send_headers()
-		self.wfile.write('<!DOCTYPE html>\n')
-		self.wfile.write('<html><head><title>BATCAVE - VPN LIST</title></head>\n')
-		self.wfile.write('<body>\n')
-		self.wfile.write('<style type="text/css">\n')
-		self.wfile.write('table { border: 2px solid #999; border-collapse: collapse; }\n')
-		self.wfile.write('th, td { border: 1px solid #CCC; }\n')
-		self.wfile.write('table tbody tr.online { background-color: #CFC; }\n')
-		self.wfile.write('table tbody tr.offline { background-color: #FCC; }\n')
-		self.wfile.write('</style>\n')
-		self.wfile.write('<table>\n<thead>\n')
-		self.wfile.write('<tr><th rowspan="2">names (key)</th><th colspan="' + str(len(gateways)) + '">active</th><th colspan="' + str(len(gateways)) + '">last</th></tr>\n')
-		self.wfile.write('<tr><th>' + '</th><th>'.join(gateways) + '</th><th>' + '</th><th>'.join(gateways) + '</th></tr>\n')
-		self.wfile.write('</thead>\n')
-
-		if self.DATAKEY_VPN in storage.data:
-			for key in storage.data[self.DATAKEY_VPN]:
-				item = storage.data[self.DATAKEY_VPN][key]
-				if not isinstance(item, dict):
-					continue
-
-				names = set()
-				count = {}
-				for t in [ 'active', 'last' ]:
-					count[t] = 0
-					if t in item:
-						for gw in item[t]:
-							if 'remote' in item[t][gw] and len(item[t][gw]['remote']) > 0:
-								count[t] += 1
-							if 'peer' in item[t][gw]:
-								names.add(item[t][gw]['peer'])
-
-				self.wfile.write('<tr class="online">' if count['active'] > 0 else '<tr class="offline">')
-				self.wfile.write('<td title="' + str(key) + '">' + (' / '.join(names) if len(names) > 0 else '?') + '</td>')
-				for t in [ 'active', 'last' ]:
-					for gw in gateways:
-						ip = ''
-						if t in item and gw in item[t]:
-							ip = item[t][gw]['remote'] if 'remote' in item[t][gw] else ''
-							if isinstance(ip, dict):
-								ip = ip['name']
-						self.wfile.write('<td title="' + ip + '">' + ('&check;' if len(ip) > 0 else '&times;') + '</td>')
-
-				self.wfile.write('</tr>\n')
-
-		self.wfile.write('</table>\n')
-		self.wfile.write('</body>')
-		self.wfile.write('</html>')
-
-	def respond_providers(self, query):
-		"""Return a summary of providers."""
-
-		vpn = self.server.storage.data[self.DATAKEY_VPN]
-		outputformat = query['format'].lower() if 'format' in query else 'html'
-
-		isps = {}
-		ispblocks = {}
-		vpnstorage_updated = False
-		vpnstorage_update_allowed = 'update' in query and query['update'] == 'allowed'
-		for key in vpn:
-			if key is None: continue
-			item = vpn[key]
-			if not isinstance(item, dict): continue
-			if not 'active' in item: continue
-
-			ips = []
-			for gw in item['active']:
-				if 'remote' in item['active'][gw]:
-					ip = item['active'][gw]['remote']
-					if vpnstorage_update_allowed and not isinstance(ip, dict):
-						# try to resolve ip now
-						resolved = ffstatus.resolve_ipblock(ip)
-						if not resolved is None:
-							self.logger.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(ip, resolved))
-							item['active'][gw]['remote'] = resolved
-							vpnstorage_updated = True
-							ip = resolved
-						else:
-							self.logger.debug('Failed to resolve IP \'{0}\'.'.format(ip))
-					ips.append(ip)
-
-			if len(ips) == 0:
-				# no active dialins -> no need to process this key any further
-				continue
-
-			item_isps = set()
-			for ip in ips:
-				isp = "UNKNOWN"
-				ispblock = ip
-				if isinstance(ip, dict):
-					ispblock = ip['name']
-					desc_lines = ip['description'].split('\n')
-					isp = desc_lines[0].strip()
-
-					# normalize name: strip company indication
-					isp = re.sub(r'(AG|UG|G?mbH( ?& ?Co\.? ?(OH|K)G)?)$', '', isp, flags=re.IGNORECASE).strip()
-
-					# normalize name: strip "pool" suffixes
-					isp = re.sub(r'(dynamic )?(customer |subscriber )?(ip )?(pool|(address )?range|addresses)$', '', isp, flags=re.IGNORECASE).strip()
-
-					# normalize name: strip "B2B" and aggregation suffixes
-					isp = re.sub(r'(aggregate|aggregation)?$', '', isp, flags=re.IGNORECASE).strip()
-					isp = re.sub(r'(B2B)?$', '', isp, flags=re.IGNORECASE).strip()
-
-					# normalize name: strip country suffixes (in Germany)
-					isp = re.sub(r'(DE|Deutschland|Germany|Nordrhein[- ]Westfalen|NRW|Baden[- ]Wuerttemburg|BW|Hessen|Niedersachsen|Rheinland[- ]Pfalz|RLP)$', '', isp, flags=re.IGNORECASE).strip()
-
-				isp = str(isp)
-				if not isp in ispblocks:
-					ispblocks[isp] = set()
-				ispblocks[isp].add(ispblock)
-
-				item_isps.add(isp)
-
-			if len(item_isps) == 0:
-				item_isps.add('unknown')
-
-			elif len(item_isps) > 1:
-				self.logger.warn('VPN key \'{0}\' has {1} active IPs which resolved to {2} ISPs: \'{3}\''.format(key, len(ips), len(item_isps), '\', \''.join(item_isps)))
-
-			for isp in item_isps:
-				if not isp in isps: isps[isp] = 0
-				isps[isp] += 1.0 / len(item_isps)
-
-		isps_sum = sum([isps[x] for x in isps])
-
-		if vpnstorage_updated:
-			self.server.storage.save()
+    DATAKEY_VPN = '__VPN__'
+    FIELDKEY_UPDATED = '__UPDATED__'
+
+    def __init__(self, request, client_address, server):
+        self.logger = logging.getLogger('API')
+        BaseHTTPRequestHandler.__init__(self, request, client_address, server)
+
+    def parse_url_pathquery(self):
+        """Extracts the query parameters from the request path."""
+        url = re.match(r'^/(?P<path>.*?)(\?(?P<query>.+))?$', self.path.strip())
+        if url is None:
+            logging.warn('Failed to parse URL \'' + str(self.path) + '\'.')
+            return ( None, None )
+
+        path = url.group('path')
+        query = {}
+        if not url.group('query') is None:
+            for m in re.finditer(r'(?P<key>.+?)=(?P<value>.+?)(&|$)', url.group('query')):
+                query[m.group('key')] = m.group('value')
+        return ( path, query )
+
+    def do_GET(self):
+        """Handles all HTTP GET requests."""
+
+        path, query = self.parse_url_pathquery()
+        if path is None:
+            self.send_error(400, 'Could not parse URL (' + str(self.path) + ')')
+            return
+
+        # / - index page, shows generic help
+        if path == '':
+            self.respond_index(query)
+            return
+
+        # /list - list stored nodes
+        if path == 'list':
+            self.respond_list(query)
+            return
+
+        # /vpn - notification endpoint for gateway's VPN connections
+        if path == 'vpn':
+            self.respond_vpn(query)
+            return
+
+        # /providers
+        if path == 'providers':
+            self.respond_providers(query)
+            return
+
+        # /node/<id>.json - node's data
+        # /node/<id>/field - return specific field from node's data
+        m = re.match(r'node/(?P<id>[a-fA-F0-9]{12})(?P<cmd>\.json|/[a-zA-Z0-9_\-\.]+)$', path)
+        if m != None:
+            cmd = m.group('cmd')
+            nodeid = m.group('id').lower()
+            if cmd == '.json':
+                self.respond_node(nodeid)
+            else:
+                self.respond_nodedetail(nodeid, cmd[1:])
+            return
+
+        # /status/<id> - node's status
+        m = re.match(r'status/([a-f0-9]{12})$', path)
+        if m != None:
+            self.respond_nodestatus(m.group(1))
+            return
+
+        # no match -> 404
+        self.send_error(404, 'The URL \'{0}\' was not found here.'.format(path))
+
+    def do_POST(self):
+        """Handles all HTTP POST requests."""
+
+        path, query = self.parse_url_pathquery()
+        if path is None:
+            self.send_error(400, 'Could not parse URL (' + str(self.path) + ')')
+            return
+        params = self.parse_post_params()
+
+        # node id/mac to name mapping
+        if path == 'idmac2name':
+            self.respond_nodeidmac2name(params)
+            return
+
+        # no match -> 404
+        self.send_error(404, 'The URL \'{0}\' was not found here.'.format(path))
+
+    def send_nocache_headers(self):
+        """Sets HTTP headers indicating that this response shall not be cached."""
+
+        self.send_header('Cache-Control', 'no-cache, no-store, must-revalidate')
+        self.send_header('Pragma', 'no-cache')
+        self.send_header('Expires', '0')
+
+    def send_headers(self, content_type='text/html; charset=utf-8', nocache=True):
+        """Send HTTP 200 Response header with the given Content-Type.
+        Optionally send no-caching headers, too."""
+
+        self.send_response(200)
+        self.send_header('Content-Type', content_type)
+        if nocache:
+            self.send_nocache_headers()
+        self.end_headers()
+
+    def parse_post_params(self):
+        ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
+        if ctype == 'multipart/form-data':
+            postvars = cgi.parse_multipart(self.rfile, pdict)
+        elif ctype == 'application/x-www-form-urlencoded':
+            length = int(self.headers.getheader('content-length'))
+            postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
+        else:
+            postvars = {}
+        return postvars
+
+    def respond_index(self, query):
+        """Display the index page."""
+
+        storage = self.server.storage
+        self.send_headers()
+
+        self.wfile.write('<!DOCTYPE html><html><head><title>BATCAVE</title></head>\n')
+        self.wfile.write('<body>\n')
+        self.wfile.write('<H1 title="Batman/Alfred Transmission Collection, Aggregation & Value Engine">BATCAVE</H1>\n')
+
+        self.wfile.write('<p>Dies ist ein interner Hintergrund-Dienst. Er wird nur von anderen Diensten\n')
+        self.wfile.write('angesprochen und sollte aus einer Mehrzahl von Gr&uuml;nden nicht &ouml;ffentlich\n')
+        self.wfile.write('zug&auml;nglich sein.</p>\n')
+
+        self.wfile.write('<H2>Status</H2>\n')
+        self.wfile.write('Daten: <span id="datacount" class="value">')
+        self.wfile.write(len(storage.data))
+        self.wfile.write('</span>\n')
+
+        self.wfile.write('<H2>API</H2>\n')
+        self.wfile.write('<p>Grundsätzlich ist das Antwort-Format JSON und alle Daten sind Live-Daten (kein Cache) die ggf. etwas Bearbeitungs-Zeit erfordern.</p>')
+        self.wfile.write('<dl>\n')
+        self.wfile.write('<dt><a href="/nodes.json">nodes.json</a></dt><dd>zur Verwendung mit ffmap (MACs anonymisiert)</dd>\n')
+        self.wfile.write('<dt><a href="/node/ff00ff00ff00.json">/node/&lt;id&gt;.json</a></dt><dd><u>alle</u> vorhandenen Information zu der gewünschten Node</dd>\n')
+        self.wfile.write('</dl>\n')
+        self.wfile.write('</body></html>')
+
+    def respond_list(self, query):
+        """List stored data."""
+
+        storage = self.server.storage
+        self.send_headers()
+
+        self.wfile.write('<!DOCTYPE html><html>\n')
+        self.wfile.write('<head><title>BATCAVE</title></head>\n')
+        self.wfile.write('<body>\n')
+        self.wfile.write('<H1>BATCAVE - LIST</H1>\n')
+
+        self.wfile.write('<table>\n')
+        self.wfile.write('<thead><tr><th>ID</th><th>Name</th></tr></thead>\n')
+        self.wfile.write('<tbody>\n')
+
+        data = storage.data
+        if 'sort' in query:
+            if query['sort'] == 'name':
+                sorteddata = sorted(data, key=lambda x: data[x]['hostname'].lower())
+                data = sorteddata
+            elif query['sort'] == 'id':
+                sorteddata = sorted(data)
+                data = sorteddata
+
+        for nodeid in data:
+            if nodeid.startswith('__'):
+                continue
+            nodename = storage.data[nodeid]['hostname'] if 'hostname' in storage.data[nodeid] else '&lt;?&gt;'
+            self.wfile.write('<tr><td><a href="/node/' + nodeid + '.json">' + nodeid + '</a></td><td>' + nodename + '</td></tr>')
+
+        self.wfile.write('</tbody>\n')
+        self.wfile.write('</table>\n')
+
+    def find_node(self, rawid):
+        """Fetch node data from storage by given id, if necessary looking thorugh node aliases."""
+
+        storage = self.server.storage
+
+        # if we have a direct hit, return it immediately
+        if rawid in storage.data:
+            return storage.data[rawid]
+
+        # no direct hit -> search via aliases
+        nodeid = rawid
+        for nid in storage.data:
+            if 'aliases' in storage.data[nid] and rawid in storage.data[nid]['aliases']:
+                nodeid = nid
+
+        # return found node
+        return storage.data[nodeid] if nodeid in storage.data else None
+
+    def find_node_by_mac(self, mac):
+        """Fetch node data from storage by given MAC address."""
+
+        storage = self.server.storage
+        needle = mac.lower()
+
+        # iterate over all nodes
+        for nodeid in storage.data:
+            if nodeid.startswith('__'):
+                continue
+            node = storage.data[nodeid]
+
+            # check node's primary MAC
+            if 'mac' in node and needle == node['mac'].lower():
+                return node
+
+            # check alias MACs
+            if 'macs' in node:
+                haystack = [x.lower() for x in node['macs']]
+                if mac in haystack:
+                    return node
+
+        # MAC address not found
+        return None
+
+    def respond_node(self, rawid):
+        """Display node data."""
+
+        # handle API example linked on index page
+        if rawid == 'ff00ff00ff00':
+            self.send_headers('text/json')
+            self.wfile.write(json.dumps({
+                'name': 'API-Example',
+                'nodeid': rawid,
+                'META': 'Dies ist ein minimaler Beispiel-Datensatz. Herzlichen Glückwunsch, du hast das Prinzip der API kapiert.',
+            }))
+            return
+
+        # search node by the given id
+        node = self.find_node(rawid)
+
+        # handle unknown nodes
+        if node is None:
+            self.send_error(404, 'No node with id \'' + rawid + '\' present.')
+            return
+
+        # remove fields from output: __RAW__
+        export = ffstatus.dict_merge({}, node)
+        if '__RAW__' in export:
+            del export['__RAW__']
+
+        # dump node data as JSON
+        self.send_headers('text/json')
+        self.wfile.write(json.dumps(export))
+
+    def get_nodestatus(self, rawid):
+        """Determine node's status."""
+
+        # search node by the given id
+        node = self.find_node(rawid)
+
+        # handle unknown nodes
+        if node is None:
+            return None
+
+        # check that the last batadv update is noted in the data
+        updated = node[self.FIELDKEY_UPDATED] if self.FIELDKEY_UPDATED in node else None
+        if updated is None or not 'batadv' in updated:
+            return 'unknown'
+
+        # make decision based on time of last batadv update
+        diff = time.time() - updated['batadv']
+        if diff < 150:
+            return 'active'
+        elif diff < 300:
+            return 'stale'
+        else:
+            return 'offline'
+
+    def respond_nodestatus(self, rawid):
+        """Display node status."""
+
+        status = self.get_nodestatus(rawid)
+
+        if status is None:
+            self.send_error(404, 'No node with id \'' + rawid + '\' present.')
+
+        self.send_headers('text/plain')
+        self.wfile.write(status)
+
+    def respond_nodeidmac2name(self, ids):
+        """Return a mapping of the given IDs (or MACs) into their hostname."""
+
+        self.send_headers('text/plain')
+        for nodeid in ids:
+            node = self.find_node(nodeid) if not ':' in nodeid else self.find_node_by_mac(nodeid)
+            nodename = node['hostname'] if (not node is None) and 'hostname' in node else nodeid
+            self.wfile.write('{0}={1}\n'.format(nodeid, nodename))
+
+    def respond_nodedetail(self, nodeid, field):
+        """Return a field from the given node - a string is returned as text, all other as JSON."""
+
+        node = self.find_node(nodeid)
+        if node is None:
+            self.send_error(404, 'No node with id \'' + nodeid + '\' present.')
+            return
+
+        return_count = False
+        if field.endswith('.count'):
+            return_count = True
+            field = field[0:-6]
+
+        if not field in node:
+            self.send_error(404, 'The node \'' + nodeid + '\' does not have a field named \'' + str(field) + '\'.')
+            return
+
+        value = node[field]
+        if return_count:
+            value = len(value)
+
+        self.send_headers('text/plain' if isinstance(value, basestring) or isinstance(value, int) else 'text/json')
+        self.wfile.write(value if isinstance(value, basestring) else json.dumps(value))
+
+    def respond_vpn(self, query):
+        storage = self.server.storage
+        peername = query['peer'] if 'peer' in query else None
+        key = query['key'] if 'key' in query else None
+        action = query['action'] if 'action' in query else None
+        remote = query['remote'] if 'remote' in query else None
+        gw = query['gw'] if 'gw' in query else None
+        ts = query['ts'] if 'ts' in query else time.time()
+
+        if action == 'list':
+            self.respond_vpnlist()
+            return
+
+        if action != 'establish' and action != 'disestablish':
+            self.logger.error('VPN: unknown action \'{0}\''.format(action))
+            self.send_error(400, 'Invalid action.')
+            return
+
+        check = {'peername': peername, 'key': key, 'remote': remote, 'gw': gw}
+        for k, val in check.items():
+            if val is None or len(val.strip()) == 0:
+                self.logger.error('VPN {0}: no or empty {1}'.format(action, k))
+                self.send_error(400, 'Missing value for ' + str(k))
+                return
+
+        if key is None or re.match(r'^[a-fA-F0-9]+$', key) is None:
+            self.logger.error('VPN peer \'{0}\' {1}: bad key \'{2}\''.format(peername, action, key))
+            self.send_error(400, 'Bad key.')
+            return
+
+        if not self.DATAKEY_VPN in storage.data:
+            storage.data[self.DATAKEY_VPN] = {}
+        if not key in storage.data[self.DATAKEY_VPN]:
+            storage.data[self.DATAKEY_VPN][key] = {'active': {}, 'last': {}}
+        item = storage.data[self.DATAKEY_VPN][key]
+
+        # resolve remote addr to its netblock
+        remote_raw = remote
+        remote_resolved = None
+        if not remote is None:
+            remote_resolved = ffstatus.resolve_ipblock(remote)
+            if not remote_resolved is None:
+                self.logger.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(remote, remote_resolved['name']))
+                remote = remote_resolved
+
+        if action == 'establish':
+            item['active'][gw] = {
+                'establish': ts,
+                'peer': peername,
+                'remote': remote,
+                'remote_raw': remote_raw,
+            }
+
+        elif action == 'disestablish':
+            active = {}
+            if gw in item['active']:
+                active = item['active'][gw]
+                del item['active'][gw]
+            active['disestablish'] = ts
+            item['last'][gw] = active
+
+        else:
+            self.send_error(500, 'Unknown action not filtered: ' + str(action))
+            return
+
+        self.send_headers('text/plain')
+        self.wfile.write('OK')
+
+        storage.save()
+
+    def respond_vpnlist(self):
+        storage = self.server.storage
+
+        gateways = ['gw01', 'gw02', 'gw03', 'gw04', 'gw05', 'gw06']
+
+        self.send_headers()
+        self.wfile.write('<!DOCTYPE html>\n')
+        self.wfile.write('<html><head><title>BATCAVE - VPN LIST</title></head>\n')
+        self.wfile.write('<body>\n')
+        self.wfile.write('<style type="text/css">\n')
+        self.wfile.write('table { border: 2px solid #999; border-collapse: collapse; }\n')
+        self.wfile.write('th, td { border: 1px solid #CCC; }\n')
+        self.wfile.write('table tbody tr.online { background-color: #CFC; }\n')
+        self.wfile.write('table tbody tr.offline { background-color: #FCC; }\n')
+        self.wfile.write('</style>\n')
+        self.wfile.write('<table>\n<thead>\n')
+        self.wfile.write('<tr><th rowspan="2">names (key)</th><th colspan="' + str(len(gateways)) + '">active</th><th colspan="' + str(len(gateways)) + '">last</th></tr>\n')
+        self.wfile.write('<tr><th>' + '</th><th>'.join(gateways) + '</th><th>' + '</th><th>'.join(gateways) + '</th></tr>\n')
+        self.wfile.write('</thead>\n')
+
+        if self.DATAKEY_VPN in storage.data:
+            for key in storage.data[self.DATAKEY_VPN]:
+                item = storage.data[self.DATAKEY_VPN][key]
+                if not isinstance(item, dict):
+                    continue
+
+                names = set()
+                count = {}
+                for t in [ 'active', 'last' ]:
+                    count[t] = 0
+                    if t in item:
+                        for gw in item[t]:
+                            if 'remote' in item[t][gw] and len(item[t][gw]['remote']) > 0:
+                                count[t] += 1
+                            if 'peer' in item[t][gw]:
+                                names.add(item[t][gw]['peer'])
+
+                self.wfile.write('<tr class="online">' if count['active'] > 0 else '<tr class="offline">')
+                self.wfile.write('<td title="' + str(key) + '">' + (' / '.join(names) if len(names) > 0 else '?') + '</td>')
+                for t in [ 'active', 'last' ]:
+                    for gw in gateways:
+                        ip = ''
+                        if t in item and gw in item[t]:
+                            ip = item[t][gw]['remote'] if 'remote' in item[t][gw] else ''
+                            if isinstance(ip, dict):
+                                ip = ip['name']
+                        self.wfile.write('<td title="' + ip + '">' + ('&check;' if len(ip) > 0 else '&times;') + '</td>')
+
+                self.wfile.write('</tr>\n')
+
+        self.wfile.write('</table>\n')
+        self.wfile.write('</body>')
+        self.wfile.write('</html>')
+
+    def respond_providers(self, query):
+        """Return a summary of providers."""
+
+        vpn = self.server.storage.data[self.DATAKEY_VPN]
+        outputformat = query['format'].lower() if 'format' in query else 'html'
+
+        isps = {}
+        ispblocks = {}
+        vpnstorage_updated = False
+        vpnstorage_update_allowed = 'update' in query and query['update'] == 'allowed'
+        for key in vpn:
+            if key is None:
+                continue
+            item = vpn[key]
+            if not isinstance(item, dict):
+                continue
+            if not 'active' in item:
+                continue
+
+            ips = []
+            for gw in item['active']:
+                if 'remote' in item['active'][gw]:
+                    ip = item['active'][gw]['remote']
+                    if vpnstorage_update_allowed and not isinstance(ip, dict):
+                        # try to resolve ip now
+                        resolved = ffstatus.resolve_ipblock(ip)
+                        if not resolved is None:
+                            self.logger.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(ip, resolved))
+                            item['active'][gw]['remote'] = resolved
+                            vpnstorage_updated = True
+                            ip = resolved
+                        else:
+                            self.logger.debug('Failed to resolve IP \'{0}\'.'.format(ip))
+                    ips.append(ip)
+
+            if len(ips) == 0:
+                # no active dialins -> no need to process this key any further
+                continue
+
+            item_isps = set()
+            for ip in ips:
+                isp = "UNKNOWN"
+                ispblock = ip
+                if isinstance(ip, dict):
+                    ispblock = ip['name']
+                    desc_lines = ip['description'].split('\n')
+                    isp = desc_lines[0].strip()
+
+                    # normalize name: strip company indication
+                    isp = re.sub(r'(AG|UG|G?mbH( ?& ?Co\.? ?(OH|K)G)?)$', '', isp, flags=re.IGNORECASE).strip()
+
+                    # normalize name: strip "pool" suffixes
+                    isp = re.sub(r'(dynamic )?(customer |subscriber )?(ip )?(pool|(address )?range|addresses)$', '', isp, flags=re.IGNORECASE).strip()
+
+                    # normalize name: strip "B2B" and aggregation suffixes
+                    isp = re.sub(r'(aggregate|aggregation)?$', '', isp, flags=re.IGNORECASE).strip()
+                    isp = re.sub(r'(B2B)?$', '', isp, flags=re.IGNORECASE).strip()
+
+                    # normalize name: strip country suffixes (in Germany)
+                    isp = re.sub(r'(DE|Deutschland|Germany|Nordrhein[- ]Westfalen|NRW|Baden[- ]Wuerttemburg|BW|Hessen|Niedersachsen|Rheinland[- ]Pfalz|RLP)$', '', isp, flags=re.IGNORECASE).strip()
+
+                isp = str(isp)
+                if not isp in ispblocks:
+                    ispblocks[isp] = set()
+                ispblocks[isp].add(ispblock)
+
+                item_isps.add(isp)
+
+            if len(item_isps) == 0:
+                item_isps.add('unknown')
+
+            elif len(item_isps) > 1:
+                self.logger.warn('VPN key \'{0}\' has {1} active IPs which resolved to {2} ISPs: \'{3}\''.format(key, len(ips), len(item_isps), '\', \''.join(item_isps)))
+
+            for isp in item_isps:
+                if not isp in isps:
+                    isps[isp] = 0
+                isps[isp] += 1.0 / len(item_isps)
+
+        isps_sum = sum([isps[x] for x in isps])
+
+        if vpnstorage_updated:
+            self.server.storage.save()
  
-		if outputformat == 'csv':
-			self.send_headers('text/csv')
-
-			self.wfile.write('Count;Name\n')
-			for isp in isps:
-				self.wfile.write('{0};"{1}"\n'.format(isps[isp], isp))
-
-		elif outputformat == 'json':
-			self.send_headers('text/json')
-
-			data = [ { 'name': isp, 'count': isps[isp], 'percentage': isps[isp]*100.0/isps_sum, 'blocks': [block for block in ispblocks[isp]] } for isp in isps ]
-			self.wfile.write(json.dumps(data))
-
-		elif outputformat == 'html':
-			self.send_headers()
-
-			self.wfile.write('<!DOCTYPE html><html><head><title>BATCAVE - PROVIDERS</title></head><body>\n')
-			self.wfile.write('<table border="2"><thead><tr><th>Count</th><th>Percentage</th><th>Name</th><th>Blocks</th></tr></thead><tbody>\n')
-
-			for isp in sorted(isps, key=lambda x: isps[x], reverse=True):
-				self.wfile.write('<tr><td>{0}</td><td>{1:.1f}%</td><td>{2}</td><td>{3}</td></tr>\n'.format(
-					isps[isp],
-					isps[isp]*100.0/isps_sum,
-					isp,
-					', '.join(sorted(ispblocks[isp])) if isp in ispblocks else '?',
-				))
-
-			self.wfile.write('</tbody></table>\n')
-			self.wfile.write('<p>Totals: {0} ISPs, {1} connections</p>\n'.format(len(isps), isps_sum))
-			self.wfile.write('</body></html>')
-
-		else:
-			self.send_error(400, 'Unknown output format.')
+        if outputformat == 'csv':
+            self.send_headers('text/csv')
+
+            self.wfile.write('Count;Name\n')
+            for isp in isps:
+                self.wfile.write('{0};"{1}"\n'.format(isps[isp], isp))
+
+        elif outputformat == 'json':
+            self.send_headers('text/json')
+
+            data = [
+                {
+                    'name': isp,
+                    'count': isps[isp],
+                    'percentage': isps[isp]*100.0/isps_sum,
+                    'blocks': [block for block in ispblocks[isp]],
+                } for isp in isps
+            ]
+            self.wfile.write(json.dumps(data))
+
+        elif outputformat == 'html':
+            self.send_headers()
+
+            self.wfile.write('<!DOCTYPE html><html>\n')
+            self.wfile.write('<head><title>BATCAVE - PROVIDERS</title></head>\n')
+            self.wfile.write('<body>\n')
+            self.wfile.write('<table border="2">\n')
+            self.wfile.write('<thead><tr><th>Count</th><th>Percentage</th><th>Name</th><th>Blocks</th></tr></thead>\n')
+            self.wfile.write('<tbody>\n')
+
+            for isp in sorted(isps, key=lambda x: isps[x], reverse=True):
+                self.wfile.write('<tr><td>{0}</td><td>{1:.1f}%</td><td>{2}</td><td>{3}</td></tr>\n'.format(
+                    isps[isp],
+                    isps[isp]*100.0/isps_sum,
+                    isp,
+                    ', '.join(sorted(ispblocks[isp])) if isp in ispblocks else '?',
+                ))
+
+            self.wfile.write('</tbody></table>\n')
+            self.wfile.write('<p>Totals: {0} ISPs, {1} connections</p>\n'.format(len(isps), isps_sum))
+            self.wfile.write('</body></html>')
+
+        else:
+            self.send_error(400, 'Unknown output format.')
 
 class ApiServer(ThreadingMixIn, HTTPServer):
-	def __init__(self, endpoint, storage):
-		if ':' in endpoint[0]: self.address_family = socket.AF_INET6
-		HTTPServer.__init__(self, endpoint, BatcaveHttpRequestHandler)
-		self.storage = storage
-
-		# check all entries for a proper 'remote' entry
-		vpn = storage.data[BatcaveHttpRequestHandler.DATAKEY_VPN] if BatcaveHttpRequestHandler.DATAKEY_VPN in storage.data else {}
-		init_vpn_cache = {}
-		for key in vpn:
-			if not isinstance(vpn[key], dict):
-				continue
-			for mode in vpn[key]:
-				if not isinstance(vpn[key][mode], dict):
-					continue
-				for gw in vpn[key][mode]:
-					if not isinstance(vpn[key][mode][gw], dict):
-						continue
-					item = vpn[key][mode][gw]
-					if 'remote' in item and not 'remote_raw' in item:
-						item['remote_raw'] = item['remote']
-						resolved = None
-						if item['remote'] in init_vpn_cache:
-							resolved = init_vpn_cache[item['remote']]
-						else:
-							resolved = ffstatus.resolve_ipblock(item['remote'])
-							init_vpn_cache[item['remote']] = resolved
-							if not resolved is None:
-								self.logger.info('Startup: resolved VPN entry \'{0}\' to net \'{1}\'.'.format(item['remote'], resolved['name']))
-						if not resolved is None:
-							item['remote'] = resolved
-		storage.save()
-
-
-	def __str__(self):
-		return 'ApiServer on {0}'.format(self.server_address)
+    def __init__(self, endpoint, storage):
+        if ':' in endpoint[0]:
+            self.address_family = socket.AF_INET6
+        HTTPServer.__init__(self, endpoint, BatcaveHttpRequestHandler)
+        self.storage = storage
+
+        # check all entries for a proper 'remote' entry
+        vpn = storage.data[BatcaveHttpRequestHandler.DATAKEY_VPN] if BatcaveHttpRequestHandler.DATAKEY_VPN in storage.data else {}
+        init_vpn_cache = {}
+        for key in vpn:
+            if not isinstance(vpn[key], dict):
+                continue
+            for mode in vpn[key]:
+                if not isinstance(vpn[key][mode], dict):
+                    continue
+                for gw in vpn[key][mode]:
+                    if not isinstance(vpn[key][mode][gw], dict):
+                        continue
+                    item = vpn[key][mode][gw]
+                    if 'remote' in item and not 'remote_raw' in item:
+                        item['remote_raw'] = item['remote']
+                        resolved = None
+                        if item['remote'] in init_vpn_cache:
+                            resolved = init_vpn_cache[item['remote']]
+                        else:
+                            resolved = ffstatus.resolve_ipblock(item['remote'])
+                            init_vpn_cache[item['remote']] = resolved
+                            if not resolved is None:
+                                logging.info('Startup: resolved VPN entry \'{0}\' to net \'{1}\'.'.format(item['remote'], resolved['name']))
+                        if not resolved is None:
+                            item['remote'] = resolved
+        storage.save()
+
+
+    def __str__(self):
+        return 'ApiServer on {0}'.format(self.server_address)
 
 if __name__ == '__main__':
-	dummystorage = Storage()
-	server = ApiServer(('0.0.0.0', 8888), dummystorage)
+    dummystorage = Storage()
+    server = ApiServer(('0.0.0.0', 8888), dummystorage)
 
-	print("Server:", str(server))
-	server.serve_forever()
+    print("Server:", str(server))
+    server.serve_forever()

+ 32 - 28
ffstatus/storage.py

@@ -1,37 +1,41 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
 import cPickle as pickle
 import logging
 import os
 
 class Storage:
-	data = None
+    data = None
 
-	def __init__(self, storage_dir):
-		self.logger = logging.getLogger('Storage')
-		self.storage_dir = storage_dir
+    def __init__(self, storage_dir):
+        self.logger = logging.getLogger('Storage')
+        self.storage_dir = storage_dir
 
-		self.logger.debug('Using storage at \'{0}\''.format(storage_dir))
-		self.storage_file = open(os.path.join(storage_dir, 'storage.dat'), 'a+b')
-		try:
-			self.storage_file.seek(0, os.SEEK_SET)
-			self.data = pickle.load(self.storage_file)
-		except EOFError:
-			self.logger.warn('The storage file was empty. I\'ll assume this is the first start with this storage directory.')
-		except Exception as err:
-			self.logger.error('Loading the storage failed. Please resolve the error and/or remove the storage file \'{0}\': {1}'.format(
-				os.path.join(storage_dir, self.storage_file.name),
-				str(err),
-			))
-			raise err
-		if self.data is None: self.data = {}
-		self.logger.info('Opened storage with ' + str(len(self.data)) + ' entries.')
+        self.logger.debug('Using storage at \'{0}\''.format(storage_dir))
+        self.storage_file = open(os.path.join(storage_dir, 'storage.dat'), 'a+b')
+        try:
+            self.storage_file.seek(0, os.SEEK_SET)
+            self.data = pickle.load(self.storage_file)
+        except EOFError:
+            self.logger.warn('The storage file was empty. I\'ll assume this is the first start with this storage directory.')
+        except Exception as err:
+            self.logger.error('Loading the storage failed. Please resolve the error and/or remove the storage file \'{0}\': {1}'.format(
+                os.path.join(storage_dir, self.storage_file.name),
+                str(err),
+            ))
+            raise err
+        if self.data is None:
+            self.data = {}
+        self.logger.info('Opened storage with ' + str(len(self.data)) + ' entries.')
 
-	def save(self):
-		self.storage_file.seek(0, os.SEEK_SET)
-		self.storage_file.truncate()
-		pickle.dump(self.data, self.storage_file, protocol=2)
-		self.storage_file.flush()
+    def save(self):
+        self.storage_file.seek(0, os.SEEK_SET)
+        self.storage_file.truncate()
+        pickle.dump(self.data, self.storage_file, protocol=2)
+        self.storage_file.flush()
 
-	def close(self):
-		self.save()
-		self.storage_file.close()
-		self.data = None
+    def close(self):
+        self.save()
+        self.storage_file.close()
+        self.data = None