123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352 |
- #!/usr/bin/python
- # -*- coding: utf-8 -*-
- from __future__ import print_function, unicode_literals
- import logging
- import re
- import time
- import ffstatus
- from .exceptions import VpnKeyFormatError
- def sanitize_node(data, include_raw_data=False):
- """
- Filters potentially harmful entries from the node's data.
- """
- export = ffstatus.dict_merge({}, data)
- # remove fields from output: __RAW__
- if '__RAW__' in export and not include_raw_data:
- del export['__RAW__']
- return export
- class BaseStorage(object):
- """
- Provides operations on the storage data.
- This class gets subclassed to actually write the data
- to a file, database, whatever.
- """
- DATAKEY_VPN = '__VPN__'
- FIELDKEY_UPDATED = '__UPDATED__'
- __data = None
- @property
- def data(self):
- """Contains the data handled by this storage."""
- return self.__data
- def init_data(self, data):
- """setter for data property"""
- if self.__data is not None:
- msg = 'Tried to initialize storage data a second time.'
- logging.error(msg)
- raise Exception(msg)
- logging.debug('Setting initial storage data (%d items).',
- len(data) if data is not None else 0)
- self.__data = data
- def open(self):
- """
- When overridden in a subclass,
- closes the persistent storage.
- """
- pass
- def save(self):
- """
- When overriden in a subclass,
- stores the data to a persistent storage.
- """
- pass
- def close(self):
- """
- When overridden in a subclass,
- closes the persistent storage.
- """
- pass
- def merge_new_data(self, newdata):
- """Updates data in the storage by merging the new data."""
- if newdata is None or not isinstance(newdata, dict):
- raise ValueError("Expected a dict as new data.")
- # start merge on a copy of the current data
- current = ffstatus.dict_merge(self.data, {})
- for item_id in current:
- if not item_id in newdata:
- continue
- current[item_id]['aliases'] = []
- current[item_id]['clients'] = []
- current[item_id]['neighbours'] = []
- if not '__RAW__' in current[item_id]:
- current[item_id]['__RAW__'] = {}
- if '__RAW__' in newdata[item_id]:
- for key in newdata[item_id]['__RAW__']:
- if key in current[item_id]['__RAW__']:
- del current[item_id]['__RAW__'][key]
- # merge the dictionaries
- updated = ffstatus.dict_merge(current, newdata)
- # sanitize each item's data
- for itemid in updated:
- if itemid.startswith('__'):
- continue
- item = updated[itemid]
- # remove node's MACs from clients list
- clients = [x for x in item.get('clients', [])]
- if 'mac' in item and item['mac'] in clients:
- clients.remove(item['mac'])
- for mac in item.get('macs', []):
- if mac in clients:
- clients.remove(mac)
- # set clientcount
- updated[itemid]['clientcount'] = len(clients)
- # set the new data
- self.__data = updated
- def get_nodes(self, sortby=None, include_raw_data=False):
- """Gets a list of all known nodes."""
- sorted_ids = self.data.keys()
- if not sortby is None:
- if sortby == 'name':
- sortkey = lambda x: self.data[x]['hostname'].lower()
- sorted_ids = sorted(self.data, key=sortkey)
- elif sortby == 'id':
- sorted_ids = sorted(self.data)
- result = []
- for nodeid in sorted_ids:
- if nodeid.startswith('__'):
- continue
- node = sanitize_node(self.data[nodeid], include_raw_data)
- result.append(node)
- return result
- def find_node(self, rawid):
- """
- Fetch node data by given id.
- If necessary, look through node aliases.
- """
- # if we have a direct hit, return it immediately
- if rawid in self.data:
- return sanitize_node(self.data[rawid])
- # no direct hit -> search via aliases
- nodeid = rawid
- for nid in self.data:
- node = self.data[nid]
- if 'aliases' in node and rawid in node['aliases']:
- nodeid = nid
- # return found node
- if nodeid in self.data:
- return sanitize_node(self.data[nodeid])
- else:
- return None
- def find_node_by_mac(self, mac):
- """Fetch node data by given MAC address."""
- needle = mac.lower()
- # iterate over all nodes
- for nodeid in self.data:
- if nodeid.startswith('__'):
- continue
- node = self.data[nodeid]
- # check node's primary MAC
- if 'mac' in node and needle == node['mac'].lower():
- return sanitize_node(node)
- # check alias MACs
- if 'macs' in node:
- haystack = [x.lower() for x in node['macs']]
- if mac in haystack:
- return sanitize_node(node)
- # MAC address not found
- return None
- def get_nodestatus(self, rawid):
- """Determine node's status."""
- # search node by the given id
- node = self.find_node(rawid)
- # handle unknown nodes
- if node is None:
- return None
- # check that the last batadv update is noted in the data
- updated = node.get(self.FIELDKEY_UPDATED, None)
- if updated is None or not 'batadv' in updated:
- return 'unknown'
- # make decision based on time of last batadv update
- diff = time.time() - updated['batadv']
- if diff < 150:
- return 'active'
- elif diff < 300:
- return 'stale'
- else:
- return 'offline'
- def resolve_vpn_remotes(self):
- if not self.DATAKEY_VPN in self.data:
- return
- vpn = self.data[self.DATAKEY_VPN]
- init_vpn_cache = {}
- for key in vpn:
- if not isinstance(vpn[key], dict):
- continue
- for mode in vpn[key]:
- if not isinstance(vpn[key][mode], dict):
- continue
- for gateway in vpn[key][mode]:
- if not isinstance(vpn[key][mode][gateway], dict):
- continue
- item = vpn[key][mode][gateway]
- if 'remote' in item and not 'remote_raw' in item:
- item['remote_raw'] = item['remote']
- resolved = None
- if item['remote'] in init_vpn_cache:
- resolved = init_vpn_cache[item['remote']]
- else:
- resolved = ffstatus.resolve_ipblock(item['remote'])
- init_vpn_cache[item['remote']] = resolved
- if not resolved is None:
- logging.info(
- 'Resolved VPN entry \'%s\' to net \'%s\'.',
- item['remote'],
- resolved['name'],
- )
- if not resolved is None:
- item['remote'] = resolved
- self.save()
- def __get_vpn_item(self, key, create=False):
- if key is None or re.match(r'^[a-fA-F0-9]+$', key) is None:
- raise VpnKeyFormatError(key)
- return
- if not self.DATAKEY_VPN in self.data:
- if not create:
- return None
- self.data[self.DATAKEY_VPN] = {}
- if not key in self.data[self.DATAKEY_VPN]:
- if not create:
- return None
- self.data[self.DATAKEY_VPN][key] = {'active': {}, 'last': {}}
- return self.data[self.DATAKEY_VPN][key]
- def get_vpn_gateways(self):
- if not self.DATAKEY_VPN in self.data:
- return []
- gateways = set()
- vpn = self.data[self.DATAKEY_VPN]
- for key in vpn:
- for conntype in vpn[key]:
- for gateway in vpn[key][conntype]:
- gateways.add(gateway)
- return sorted(gateways)
- def get_vpn_connections(self):
- if not self.DATAKEY_VPN in self.data:
- return []
- conntypes = ['active', 'last']
- result = []
- vpn = self.data[self.DATAKEY_VPN]
- for key in vpn:
- vpn_entry = vpn[key]
- if not isinstance(vpn_entry, dict):
- continue
- item = {
- 'key': key,
- 'count': {},
- 'remote': {},
- }
- names = set()
- for conntype in conntypes:
- item['count'][conntype] = 0
- item['remote'][conntype] = {}
- if conntype in vpn_entry:
- for gateway in vpn_entry[conntype]:
- if 'remote' in vpn_entry[conntype][gateway]:
- remote = vpn_entry[conntype][gateway]['remote']
- if isinstance(remote, basestring) and len(remote) == 0:
- continue
- item['count'][conntype] += 1
- item['remote'][conntype][gateway] = remote
- if 'peer' in vpn_entry[conntype][gateway]:
- names.add(vpn_entry[conntype][gateway]['peer'])
- item['names'] = sorted(names)
- item['online'] = item['count']['active'] > 0
- result.append(item)
- return result
- def log_vpn_connect(self, key, peername, remote, gateway, timestamp):
- item = self.__get_vpn_item(key, create=True)
- # resolve remote addr to its netblock
- remote_raw = remote
- remote_resolved = None
- if remote is not None:
- remote_resolved = ffstatus.resolve_ipblock(remote)
- if not remote_resolved is None:
- logging.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(
- remote, remote_resolved['name'],
- ))
- remote = remote_resolved
- # store connection info
- item['active'][gateway] = {
- 'establish': timestamp,
- 'peer': peername,
- 'remote': remote,
- 'remote_raw': remote_raw,
- }
- def log_vpn_disconnect(self, key, gateway, timestamp):
- item = self.__get_vpn_item(key, create=True)
- active = {}
- if gateway in item['active']:
- active = item['active'][gateway]
- del item['active'][gateway]
- active['disestablish'] = timestamp
- item['last'][gateway] = active
|