Browse Source

improve code readability

* shorten lines
* use more expressive variable names
* change `if not var is None` to `if var is not None`
* use `dict.get(key)` instead of `dict[key] if key in dict else None`
Helge Jung 9 years ago
parent
commit
b3027aed8c
8 changed files with 229 additions and 129 deletions
  1. 64 38
      batcave.py
  2. 1 1
      ffstatus/__init__.py
  3. 44 23
      ffstatus/alfred.py
  4. 5 5
      ffstatus/basestorage.py
  5. 20 21
      ffstatus/batman.py
  6. 7 6
      ffstatus/dashing.py
  7. 22 12
      ffstatus/graphite.py
  8. 66 23
      ffstatus/server.py

+ 64 - 38
batcave.py

@@ -22,6 +22,8 @@ DEFAULT_INTERVAL = 15
 
 
 def get_args():
+    """Parse commandline arguments."""
+
     parser = argparse.ArgumentParser(description=BATCAVE)
     parser.add_argument('--logfile',
                         help='path for log file')
@@ -54,52 +56,71 @@ def get_args():
     return parser.parse_args()
 
 
-def main():
-    args = get_args()
-
-    if args.interval < 5:
-        print('A poll interval lower than 5s is not supported.')
-        sys.exit(1)
-
-    shall_daemonize = not args.no_detach
+def prepare_logging(args):
+    """Configures Python's logging according to args."""
 
     logger = logging.getLogger()
     logger.setLevel(logging.DEBUG if args.verbose else logging.INFO)
 
+    fmt = logging.Formatter(
+        '%(asctime)s [%(levelname)s] %(message)s',
+        '%Y-%m-%d %H:%M:%S')
+
     if not args.logfile is None:
-        fh = logging.FileHandler(args.logfile)
-        fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
-        logger.addHandler(fh)
+        file_handler = logging.FileHandler(args.logfile)
+        file_handler.setFormatter(fmt)
+        logger.addHandler(file_handler)
 
     if args.no_detach:
-        ch = logging.StreamHandler(sys.stdout)
-        ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
-        logger.addHandler(ch)
+        console_handler = logging.StreamHandler(sys.stdout)
+        console_handler.setFormatter(fmt)
+        logger.addHandler(console_handler)
+
+    return logger
+
 
+def main():
+    args = get_args()
+    if args.interval < 5:
+        print('A poll interval lower than 5s is not supported.')
+        sys.exit(1)
+
+    shall_daemonize = not args.no_detach
+
+    logger = prepare_logging(args)
     logger.info('Starting up')
 
     storage = Storage(args.storage_dir)
     storage.open()
     logger.info('Storage: ' + str(storage))
 
-    a = AlfredParser()
-    b = BatmanParser()
-    d = DashingClient(args.dashing_url, args.dashing_token) if not args.dashing_url is None else None
-    g = GraphitePush(args.graphite_host, args.graphite_port) if not args.graphite_host is None else None
+    alfred = AlfredParser()
+    batman = BatmanParser()
+
+    dashing = None
+    if args.dashing_url is not None:
+        dashing = DashingClient(args.dashing_url, args.dashing_token)
+
+    graphite = None
+    if args.graphite_host is not None:
+        graphite = GraphitePush(args.graphite_host, args.graphite_port)
 
     if args.no_send:
-        if not g is None: g.dont_send = True
+        if graphite is not None:
+            graphite.dont_send = True
 
-    if not args.alfred_json is None: a.alfred_json = args.alfred_json
-    if not args.batadv_vis is None: b.batadv_vis = args.batadv_vis
+    if not args.alfred_json is None:
+        alfred.alfred_json = args.alfred_json
+    if not args.batadv_vis is None:
+        batman.batadv_vis = args.batadv_vis
 
-    logger.debug('Configured A.L.F.R.E.D. source: ' + str(a))
-    logger.debug('Configured B.A.T.M.A.N. source: ' + str(b))
-    logger.debug('Configured Dashing: ' + str(d))
-    logger.debug('Configured Graphite: ' + str(g))
+    logger.debug('Configured A.L.F.R.E.D. source: %s', alfred)
+    logger.debug('Configured B.A.T.M.A.N. source: %s', batman)
+    logger.debug('Configured Dashing: %s', dashing)
+    logger.debug('Configured Graphite: %s', graphite)
 
     # execute sanitycheck() where possible
-    for i in [('AlfredParser', a), ('BatmanParser', b)]:
+    for i in [('AlfredParser', alfred), ('BatmanParser', batman)]:
         try:
             i[1].sanitycheck()
         except SanityCheckError as err:
@@ -109,32 +130,37 @@ def main():
 
     server = ApiServer((args.api_bind_host, args.api_bind_port), storage)
     server_thread = threading.Thread(target=server.serve_forever)
-    server_thread.daemon = True # exit thread when main thread terminates
+    server_thread.daemon = True  # exit thread when main thread terminates
     server_thread.start()
     logger.info('Started server: ' + str(server))
 
     if shall_daemonize:
+        streams = [handler.stream for handler in logger.handlers
+                   if isinstance(handler, logging.FileHandler)]
         daemon_context = daemon.DaemonContext(
-            files_preserve=[fh.stream],
+            files_preserve=streams,
         )
-
         daemon_context.open()
 
     while True:
         try:
-            ts = int(time.time())
+            now = int(time.time())
             logger.debug('Step 1/3: Fetching data ...')
-            alfreddata = a.fetch()
-            batmandata = b.fetch()
+            alfreddata = alfred.fetch()
+            batmandata = batman.fetch()
             newdata = merge_alfred_batman(alfreddata, batmandata)
-            logger.debug('Fetched data: {0} ALFRED with {1} BATMAN makes {2} total'.format(len(alfreddata), len(batmandata), len(newdata)))
+            logger.debug(
+                'Fetched data: %d ALFRED with %d BATMAN makes %d total',
+                len(alfreddata), len(batmandata), len(newdata))
 
             logger.debug('Step 2/3: Pushing update data ...')
-            if not g is None:
-                graphitedata = g.push(newdata, ts=ts)
-                logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
-            if not d is None:
-                d.push(newdata)
+            if graphite is not None:
+                graphitedata = graphite.push(newdata, timestamp=now)
+                logger.info(
+                    'Sent %d lines to Graphite.',
+                    graphitedata.count('\n')+1)
+            if dashing is not None:
+                dashing.push(newdata)
 
             logger.debug('Step 3/3: Merging current data ...')
             storage.merge_new_data(newdata)

+ 1 - 1
ffstatus/__init__.py

@@ -88,7 +88,7 @@ def resolve_ipblock(ipaddr):
     """Resolve the given IP address to its inetnum entry at RIPE."""
     global no_ipblock_resolves_until
 
-    if not no_ipblock_resolves_until is None:
+    if no_ipblock_resolves_until is not None:
         if no_ipblock_resolves_until < time.time():
             no_ipblock_resolves_until = None
         else:

+ 44 - 23
ffstatus/alfred.py

@@ -21,9 +21,15 @@ class AlfredParser:
         )
 
     def sanitycheck(self):
+        """
+        Checks that the necessary tool 'alfred-json' is callable
+        and outputs syntactically correct JSON data.
+        """
+
         testdata = None
         try:
-            cmd = [self.alfred_json, '-z', '-r', str(int(self.alfred_datatypes[0][1]))]
+            first_datatype = int(self.alfred_datatypes[0][1])
+            cmd = [self.alfred_json, '-z', '-r', str(first_datatype)]
             testdata = subprocess.check_output(cmd)
         except Exception as err:
             raise SanityCheckError(
@@ -37,22 +43,25 @@ class AlfredParser:
 
         return True
 
-    def fetch(self, alfred_dump=None, include_rawdata=False):
-        data = { }
-        ts = int(time.time())
+    def fetch(self, alfred_dump=None):
+        """Fetches the current data from alfred-json."""
+
+        data = {}
+        timestamp = int(time.time())
 
         alfreddata = {}
         for datatype in self.alfred_datatypes:
-            rawdata = subprocess.check_output([self.alfred_json, '-z', '-r', str(int(datatype[1]))])
+            cmd = [self.alfred_json, '-z', '-r', str(int(datatype[1]))]
+            rawdata = subprocess.check_output(cmd)
             newdata = json.loads(rawdata)
-            
+
             for item in newdata:
                 if not item in alfreddata:
                     alfreddata[item] = {}
 
                 alfreddata[item][datatype[0]] = newdata[item]
 
-        if not alfred_dump is None:
+        if alfred_dump is not None:
             jsondata = json.dumps(alfreddata, ensure_ascii=False)
             dumpfile = io.open(alfred_dump, 'w')
             dumpfile.write(jsondata)
@@ -71,50 +80,62 @@ class AlfredParser:
                 'mac': None,
                 'software': {},
                 'statistics': {},
-                '__UPDATED__': {'alfred': ts,},
-                '__RAW__': {'alfred': alfredinfo,},
+                '__UPDATED__': {'alfred': timestamp},
+                '__RAW__': {'alfred': alfredinfo},
             }
             data[myid] = nodeinfo
 
             nodestatic = alfredinfo['static']
+
             if 'hostname' in nodestatic:
                 nodeinfo['hostname'] = nodestatic['hostname']
+
             if 'network' in nodestatic:
                 if 'mac' in nodestatic['network']:
                     nodeinfo['mac'] = nodestatic['network']['mac']
                 if 'mesh_interfaces' in nodestatic['network']:
-                    nodeinfo['macs'] = [x for x in nodestatic['network']['mesh_interfaces']]
+                    macs = [x for x in nodestatic['network']['mesh_interfaces']]
+                    nodeinfo['macs'] = macs
                 else:
                     nodeinfo['macs'] = []
+
             if 'software' in nodestatic:
-                sw = nodestatic['software']
+                software = nodestatic['software']
+
+                nodeinfo['software']['firmware'] = \
+                    software.get('firmware', {}).get('release')
 
-                nodeinfo['software']['firmware'] = sw['firmware']['release'] if 'firmware' in sw and 'release' in sw['firmware'] else None
-                nodeinfo['software']['autoupdater'] = sw['autoupdater']['branch'] if sw['autoupdater']['enabled'] else 'off'
+                autoupdater = software.get('autoupdater', {})
+                autoupdater_enabled = autoupdater.get('enabled', False)
+                nodeinfo['software']['autoupdater'] = \
+                    autoupdater.get('branch') if autoupdater_enabled else 'off'
 
-            nodedyn = alfredinfo['dynamic'] if 'dynamic' in alfredinfo else nodestatic['statistics']
+            nodedyn = alfredinfo.get('dynamic',
+                                     nodestatic.get('statistics', {}))
             if 'uptime' in nodedyn:
                 nodeinfo['uptime'] = int(float(nodedyn['uptime']))
             if 'gateway' in nodedyn:
                 nodeinfo['gateway'] = nodedyn['gateway']
-                
+
             traffic = nodedyn["traffic"] if "traffic" in nodedyn else None
-            if not traffic is None:
+            if traffic is not None:
                 if not 'traffic' in nodeinfo['statistics']:
-                    nodeinfo['statistics']['traffic'] = { }
-                t = nodeinfo['statistics']['traffic']
-                t['rxbytes'] = int(traffic["rx"]["bytes"])
-                t['txbytes'] = int(traffic["tx"]["bytes"])
+                    nodeinfo['statistics']['traffic'] = {}
+                item = nodeinfo['statistics']['traffic']
+                item['rxbytes'] = int(traffic["rx"]["bytes"])
+                item['txbytes'] = int(traffic["tx"]["bytes"])
 
         return data
 
 if __name__ == "__main__":
-    a = AlfredParser()
+    parser = AlfredParser()
+
     try:
-        a.sanitycheck()
+        parser.sanitycheck()
     except SanityCheckError as err:
         print('SANITY-CHECK failed:', str(err))
         import sys
         sys.exit(1)
-    adata = a.fetch()
+
+    adata = parser.fetch()
     print(json.dumps(adata))

+ 5 - 5
ffstatus/basestorage.py

@@ -123,7 +123,7 @@ class BaseStorage(object):
         """Gets a list of all known nodes."""
 
         sorted_ids = self.data.keys()
-        if not sortby is None:
+        if sortby is not None:
             if sortby == 'name':
                 sortkey = lambda x: self.data[x]['hostname'].lower()
                 sorted_ids = sorted(self.data, key=sortkey)
@@ -237,14 +237,14 @@ class BaseStorage(object):
                         else:
                             resolved = ffstatus.resolve_ipblock(item['remote'])
                             init_vpn_cache[item['remote']] = resolved
-                            if not resolved is None:
+                            if resolved is not None:
                                 logging.info(
                                     'Resolved VPN entry \'%s\' to net \'%s\'.',
                                     item['remote'],
                                     resolved['name'],
                                 )
 
-                        if not resolved is None:
+                        if resolved is not None:
                             item['remote'] = resolved
 
         self.save()
@@ -304,7 +304,7 @@ class BaseStorage(object):
                     for gateway in vpn_entry[conntype]:
                         if 'remote' in vpn_entry[conntype][gateway]:
                             remote = vpn_entry[conntype][gateway]['remote']
-                            if isinstance(remote, basestring) and len(remote) == 0:
+                            if remote is None or remote == '':
                                 continue
 
                             item['count'][conntype] += 1
@@ -326,7 +326,7 @@ class BaseStorage(object):
         remote_resolved = None
         if remote is not None:
             remote_resolved = ffstatus.resolve_ipblock(remote)
-            if not remote_resolved is None:
+            if remote_resolved is not None:
                 logging.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(
                     remote, remote_resolved['name'],
                 ))

+ 20 - 21
ffstatus/batman.py

@@ -8,6 +8,7 @@ import string
 import subprocess
 import time
 
+import ffstatus
 from .exceptions import SanityCheckError
 
 
@@ -23,7 +24,8 @@ class BatmanParser:
 
         testdata = None
         try:
-            testdata = subprocess.check_output([self.batadv_vis, '-f', 'jsondoc'])
+            cmd = [self.batadv_vis, '-f', 'jsondoc']
+            testdata = subprocess.check_output(cmd)
         except Exception as err:
             raise SanityCheckError(
                 self, "batadv-vis not found or incompatible", err)
@@ -36,63 +38,60 @@ class BatmanParser:
 
         return True
 
-    def mac2id(self, mac):
-        """Derives a nodeid from the given MAC address."""
-
-        temp = str(mac.lower().replace(':', ''))
-#       temp = temp[0] + temp[1].translate(self.mactranslation) + temp[2:]
-        return temp
-
-    def fetch(self, batadv_dump=None, include_rawdata=False):
-        """Fetches the current data from batadv-vis and returns it."""
+    def fetch(self, batadv_dump=None):
+        """Fetches the current data from batadv-vis."""
 
         data = {}
-        ts = int(time.time())
+        timestamp = int(time.time())
 
         # call batadv-vis and parse output as JSON
         rawdata = subprocess.check_output([self.batadv_vis, '-f', 'jsondoc'])
         batmandata = json.loads(rawdata)
 
         # dump raw data into file if requested
-        if not batadv_dump is None:
+        if batadv_dump is not None:
             dumpfile = io.open(batadv_dump, 'w')
             dumpfile.write(rawdata)
             dumpfile.close()
 
         # parse raw data, convert all MAC into nodeid
         for item in batmandata['vis']:
-            itemid = self.mac2id(item['primary'])
+            itemid = ffstatus.mac2id(item['primary'])
             aliases = []
             if 'secondary' in item:
                 for mac in item['secondary']:
-                    aliases.append(self.mac2id(mac))
+                    aliases.append(ffstatus.mac2id(mac))
 
             neighbours = {}
             if 'neighbors' in item:
                 for neighbour in item['neighbors']:
                     #if neighbour['router'] != item['primary']:
                     #   print('node {0}\'s neighbor {1} has unexpected router {2}'.format(itemid, neighbour['neighbor'], neighbour['router']))
-                    neighbours[neighbour['neighbor']] = float(neighbour['metric'])
+
+                    metric = float(neighbour['metric'])
+                    neighbours[neighbour['neighbor']] = metric
 
             # construct dict entry as expected by BATCAVE
             data[itemid] = {
                 'aliases': aliases,
                 'neighbours': neighbours,
-                'clients': [x for x in item['clients']] if 'clients' in item else [],
-                '__UPDATED__': {'batadv': ts,},
-                '__RAW__': {'batadv': {itemid: item,}},
+                'clients': [x for x in item.get('clients', [])],
+                '__UPDATED__': {'batadv': timestamp},
+                '__RAW__': {'batadv': {itemid: item}},
             }
 
         return data
 
 # standalone test mode
 if __name__ == "__main__":
-    b = BatmanParser()
+    parser = BatmanParser()
+
     try:
-        b.sanitycheck()
+        parser.sanitycheck()
     except SanityCheckError as err:
         print('SANITY-CHECK failed:', str(err))
         import sys
         sys.exit(1)
-    bdata = b.fetch()
+
+    bdata = parser.fetch()
     print(json.dumps(bdata))

+ 7 - 6
ffstatus/dashing.py

@@ -21,18 +21,19 @@ class DashingClient:
             'auth_token': self.auth_token,
             'current': int(current),
         }
-        if not previous is None:
+        if previous is not None:
             info['previous'] = previous
 
         url = self.base_url + metric
-        r = requests.post(url, data=json.dumps(info))
+        resp = requests.post(url, data=json.dumps(info)).read()
         self.logger.debug('Sent metric "{0}" = "{1}"'.format(metric, current))
-        return r
+        return resp
 
     def push(self, data):
         self.logger.warn('push() not implemented yet')
 
 if __name__ == "__main__":
-    d = DashingClient('http://dashing.krombel.de:3030/widgets/', 'bitnhmlj47hamrftxkiug')
-    d.send('testNumber', 42)
-
+    client = DashingClient(
+        'http://dashing.krombel.de:3030/widgets/',
+        'bitnhmlj47hamrftxkiug')
+    client.send('testNumber', 42)

+ 22 - 12
ffstatus/graphite.py

@@ -2,6 +2,7 @@
 # -*- coding: utf-8 -*-
 
 from __future__ import print_function
+import logging
 import socket
 import time
 import StringIO
@@ -13,7 +14,7 @@ class GraphitePush:
     target_host = None
     target_port = 2003
 
-    whitelist = None #[ '24a43cf85efa', '24a43cf85edb', '24a43cd94f69', '24a43ca367f0', '24a43ca36807', '24a43cd221d5' ]
+    whitelist = None
 
     def __init__(self, host, port=2003):
         self.target_host = host
@@ -24,31 +25,40 @@ class GraphitePush:
             self.target_host, self.target_port,
             self.prefix, self.whitelist)
 
-    def push(self, data, ts=None):
-        if ts is None:
-            ts = time.time()
-        ts = int(ts)
+    def __print_graphite_line(self, output, nodeid, item, value, timestamp):
+        print(
+            self.prefix, nodeid, '.', item, ' ', value, ' ', timestamp,
+            sep='', file=output)
+
+    def push(self, data, timestamp=None):
+        if timestamp is None:
+            timestamp = time.time()
+        timestamp = int(timestamp)
 
         output = StringIO.StringIO()
         whitelist = None
-        if not self.whitelist is None and len(self.whitelist) > 0:
+        if self.whitelist is not None and len(self.whitelist) > 0:
             whitelist = [x for x in self.whitelist]
 
         for nodeid in data:
-            if (not whitelist is None) and (not nodeid in whitelist):
-                #print("Skipping node {0} as it is not in the configured whitelist.".format(nodeid))
+            if whitelist is not None and nodeid not in whitelist:
+                logging.debug(
+                    'Skipping node \'%s\' (it is not in the whitelist).',
+                    nodeid)
                 continue
 
             nodeinfo = data[nodeid]
 
             for item in ['uptime']:
                 if item in nodeinfo:
-                    print(self.prefix, nodeid, '.', item, ' ', nodeinfo[item], ' ', ts, sep='', file=output)
+                    self.__print_graphite_line(
+                        output, nodeid, item, nodeinfo[item], timestamp)
 
-            traffic = nodeinfo['statistics']['traffic'] if 'statistics' in nodeinfo and 'traffic' in nodeinfo['statistics'] else None
-            if not traffic is None:
+            traffic = nodeinfo.get('statistics', {}).get('traffic')
+            if traffic is not None:
                 for item in ['rxbytes', 'txbytes']:
-                    print(self.prefix, nodeid, '.', item, ' ', traffic[item], ' ', ts, sep='', file=output)
+                    self.__print_graphite_line(
+                        output, nodeid, item, traffic[item], timestamp)
 
         all_output = output.getvalue()
 

+ 66 - 23
ffstatus/server.py

@@ -57,24 +57,26 @@ def normalize_ispname(isp):
 
 
 class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
+    """Handles a single HTTP request to the BATCAVE."""
 
-    def __init__(self, request, client_address, server):
+    def __init__(self, request, client_address, sockserver):
         self.logger = logging.getLogger('API')
-        BaseHTTPRequestHandler.__init__(self, request, client_address, server)
+        BaseHTTPRequestHandler.__init__(
+            self, request, client_address, sockserver)
 
     def parse_url_pathquery(self):
         """Extracts the query parameters from the request path."""
         url = re.match(r'^/(?P<path>.*?)(\?(?P<query>.+))?$', self.path.strip())
         if url is None:
             logging.warn('Failed to parse URL \'' + str(self.path) + '\'.')
-            return ( None, None )
+            return (None, None)
 
         path = url.group('path')
         query = {}
         if not url.group('query') is None:
-        return ( path, query )
             for match in REGEX_QUERYPARAM.finditer(url.group('query')):
                 query[match.group('key')] = match.group('value')
+        return (path, query)
 
     def do_GET(self):
         """Handles all HTTP GET requests."""
@@ -143,13 +145,17 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
         self.send_error(404, 'The URL \'{0}\' was not found here.'.format(path))
 
     def send_nocache_headers(self):
-        """Sets HTTP headers indicating that this response shall not be cached."""
+        """
+        Sets HTTP headers indicating that this response shall not be cached.
+        """
 
         self.send_header('Cache-Control', 'no-cache, no-store, must-revalidate')
         self.send_header('Pragma', 'no-cache')
         self.send_header('Expires', '0')
 
-    def send_headers(self, content_type='text/html; charset=utf-8', nocache=True):
+    def send_headers(self,
+                     content_type='text/html; charset=utf-8',
+                     nocache=True):
         """Send HTTP 200 Response header with the given Content-Type.
         Optionally send no-caching headers, too."""
 
@@ -161,13 +167,20 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
 
     def parse_post_params(self):
         ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
+
         if ctype == 'multipart/form-data':
             postvars = cgi.parse_multipart(self.rfile, pdict)
+
         elif ctype == 'application/x-www-form-urlencoded':
             length = int(self.headers.getheader('content-length'))
-            postvars = cgi.parse_qs(self.rfile.read(length), keep_blank_values=1)
+            postvars = cgi.parse_qs(
+                self.rfile.read(length),
+                keep_blank_values=1,
+            )
+
         else:
             postvars = {}
+
         return postvars
 
     def respond_index(self, query):
@@ -217,7 +230,10 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
             nodeid = node['node_id']
             nodename = node['hostname'] if 'hostname' in node else '&lt;?&gt;'
 
-            self.wfile.write('<tr><td><a href="/node/' + nodeid + '.json">' + nodeid + '</a></td><td>' + nodename + '</td></tr>')
+            self.wfile.write('<tr>\n')
+            self.wfile.write('  <td><a href="/node/{0}.json">{0}</a></td>\n'.format(nodeid))
+            self.wfile.write('  <td>{0}</td>\n'.format(nodename))
+            self.wfile.write('</tr>\n')
 
         self.wfile.write('</tbody>\n')
         self.wfile.write('</table>\n')
@@ -253,12 +269,20 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
 
         self.send_headers('text/plain')
         for nodeid in ids:
-            node = self.server.storage.find_node(nodeid) if not ':' in nodeid else self.server.storage.find_node_by_mac(nodeid)
-            nodename = node['hostname'] if (not node is None) and 'hostname' in node else nodeid
+            node = None
+            if not ':' in nodeid:
+                node = self.server.storage.find_node(nodeid)
+            else:
+                node = self.server.storage.find_node_by_mac(nodeid)
+            nodename = node.get('hostname', nodeid) if node is not None else nodeid
             self.wfile.write('{0}={1}\n'.format(nodeid, nodename))
 
     def respond_nodedetail(self, nodeid, field):
-        """Return a field from the given node - a string is returned as text, all other as JSON."""
+        """
+        Return a field from the given node.
+        String and integers are returned as text/plain,
+        all other as JSON.
+        """
 
         node = self.server.storage.find_node(nodeid)
         if node is None:
@@ -271,24 +295,30 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
             field = field[0:-6]
 
         if not field in node:
-            self.send_error(404, 'The node \'' + nodeid + '\' does not have a field named \'' + str(field) + '\'.')
+            self.send_error(
+                404,
+                'The node \'{0}\' does not have a field named \'{1}\'.'.format(
+                    nodeid, field
+                )
+            )
             return
 
         value = node[field]
         if return_count:
             value = len(value)
 
-        self.send_headers('text/plain' if isinstance(value, basestring) or isinstance(value, int) else 'text/json')
-        self.wfile.write(value if isinstance(value, basestring) else json.dumps(value))
+        no_json = isinstance(value, basestring) or isinstance(value, int)
+        self.send_headers('text/plain' if no_json else 'text/json')
+        self.wfile.write(value if no_json else json.dumps(value))
 
     def respond_vpn(self, query):
         storage = self.server.storage
-        peername = query['peer'] if 'peer' in query else None
-        key = query['key'] if 'key' in query else None
-        action = query['action'] if 'action' in query else None
-        remote = query['remote'] if 'remote' in query else None
-        gw = query['gw'] if 'gw' in query else None
-        ts = query['ts'] if 'ts' in query else time.time()
+        peername = query.get('peer')
+        key = query.get('key')
+        action = query.get('action')
+        remote = query.get('remote')
+        gateway = query.get('gw')
+        timestamp = query.get('ts', time.time())
 
         if action == 'list':
             self.respond_vpnlist()
@@ -299,7 +329,12 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
             self.send_error(400, 'Invalid action.')
             return
 
-        check = {'peername': peername, 'key': key, 'remote': remote, 'gw': gw}
+        check = {
+            'peername': peername,
+            'key': key,
+            'remote': remote,
+            'gw': gateway,
+        }
         for k, val in check.items():
             if val is None or len(val.strip()) == 0:
                 self.logger.error('VPN {0}: no or empty {1}'.format(action, k))
@@ -343,8 +378,10 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
         self.wfile.write('table tbody tr.online { background-color: #CFC; }\n')
         self.wfile.write('table tbody tr.offline { background-color: #FCC; }\n')
         self.wfile.write('</style>\n')
-        self.wfile.write('<table>\n<thead>\n')
+        self.wfile.write('<table>\n')
+
         gateways = self.server.storage.get_vpn_gateways()
+        self.wfile.write('<thead>\n')
         self.wfile.write('<tr><th rowspan="2">names (key)</th><th colspan="' + str(len(gateways)) + '">active</th><th colspan="' + str(len(gateways)) + '">last</th></tr>\n')
         self.wfile.write('<tr><th>' + '</th><th>'.join(gateways) + '</th><th>' + '</th><th>'.join(gateways) + '</th></tr>\n')
         self.wfile.write('</thead>\n')
@@ -414,7 +451,13 @@ class BatcaveHttpRequestHandler(BaseHTTPRequestHandler):
                 item_isps.add('unknown')
 
             elif len(item_isps) > 1:
-                self.logger.warn('VPN key \'{0}\' has {1} active IPs which resolved to {2} ISPs: \'{3}\''.format(key, len(ips), len(item_isps), '\', \''.join(item_isps)))
+                self.logger.warn(
+                    'VPN key \'%s\' has %d active IPs which resolved to %d ISPs: \'%s\'',
+                    item['key'],
+                    len(remotes),
+                    len(item_isps),
+                    '\', \''.join(item_isps)
+                )
 
             for isp in item_isps:
                 if not isp in isps: