basestorage.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from __future__ import print_function, unicode_literals
  4. import logging
  5. import re
  6. import time
  7. import ffstatus
  8. from .exceptions import VpnKeyFormatError
  9. def sanitize_node(data, include_raw_data=False):
  10. """
  11. Filters potentially harmful entries from the node's data.
  12. """
  13. export = ffstatus.dict_merge({}, data)
  14. # remove fields from output: __RAW__
  15. if '__RAW__' in export and not include_raw_data:
  16. del export['__RAW__']
  17. return export
  18. class BaseStorage(object):
  19. """
  20. Provides operations on the storage data.
  21. This class gets subclassed to actually write the data
  22. to a file, database, whatever.
  23. """
  24. DATAKEY_VPN = '__VPN__'
  25. FIELDKEY_UPDATED = '__UPDATED__'
  26. __data = None
  27. @property
  28. def data(self):
  29. """Contains the data handled by this storage."""
  30. return self.__data
  31. def init_data(self, data):
  32. """setter for data property"""
  33. if self.__data is not None:
  34. msg = 'Tried to initialize storage data a second time.'
  35. logging.error(msg)
  36. raise Exception(msg)
  37. logging.debug('Setting initial storage data (%d items).',
  38. len(data) if data is not None else 0)
  39. self.__data = data
  40. def open(self):
  41. """
  42. When overridden in a subclass,
  43. closes the persistent storage.
  44. """
  45. pass
  46. def save(self):
  47. """
  48. When overriden in a subclass,
  49. stores the data to a persistent storage.
  50. """
  51. pass
  52. def close(self):
  53. """
  54. When overridden in a subclass,
  55. closes the persistent storage.
  56. """
  57. pass
  58. @property
  59. def status(self):
  60. """Gets status information on the storage."""
  61. nodes = 0
  62. nodes_active = 0
  63. sum_clients = 0
  64. clients = set()
  65. for item_id in self.data:
  66. if item_id.startswith('__'):
  67. continue
  68. node = self.data[item_id]
  69. nodes += 1
  70. if self.get_nodestatus(item_id) == 'active':
  71. nodes_active += 1
  72. sum_clients += node.get('clientcount', 0)
  73. nodemacs = [x for x in node.get('macs', [])]
  74. if 'mac' in node:
  75. nodemacs.append(node['mac'])
  76. for client in node.get('clients', []):
  77. if client in nodemacs:
  78. continue
  79. clients.add(client)
  80. return {
  81. 'clients_sum': sum_clients,
  82. 'clients_unique': len(clients),
  83. 'nodes': nodes,
  84. 'nodes_active': nodes_active,
  85. }
  86. def merge_new_data(self, newdata):
  87. """Updates data in the storage by merging the new data."""
  88. if newdata is None or not isinstance(newdata, dict):
  89. raise ValueError("Expected a dict as new data.")
  90. # start merge on a copy of the current data
  91. current = ffstatus.dict_merge(self.data, {})
  92. for item_id in current:
  93. if not item_id in newdata:
  94. continue
  95. current[item_id]['aliases'] = []
  96. current[item_id]['clients'] = []
  97. current[item_id]['neighbours'] = []
  98. if not '__RAW__' in current[item_id]:
  99. current[item_id]['__RAW__'] = {}
  100. if '__RAW__' in newdata[item_id]:
  101. for key in newdata[item_id]['__RAW__']:
  102. if key in current[item_id]['__RAW__']:
  103. del current[item_id]['__RAW__'][key]
  104. # merge the dictionaries
  105. updated = ffstatus.dict_merge(current, newdata)
  106. # sanitize each item's data
  107. for itemid in updated:
  108. if itemid.startswith('__'):
  109. continue
  110. item = updated[itemid]
  111. # ensure 'node_id' is set
  112. if not 'node_id' in item:
  113. item['node_id'] = itemid
  114. # remove node's MACs from clients list
  115. clients = [x for x in item.get('clients', [])]
  116. if 'mac' in item and item['mac'] in clients:
  117. clients.remove(item['mac'])
  118. for mac in item.get('macs', []):
  119. if mac in clients:
  120. clients.remove(mac)
  121. # set clientcount
  122. updated[itemid]['clientcount'] = len(clients)
  123. # set the new data
  124. self.__data = updated
  125. def get_nodes(self, sortby=None, include_raw_data=False):
  126. """Gets a list of all known nodes."""
  127. sorted_ids = self.data.keys()
  128. if sortby is not None:
  129. if sortby == 'name':
  130. sortkey = lambda x: self.data[x]['hostname'].lower()
  131. sorted_ids = sorted(self.data, key=sortkey)
  132. elif sortby == 'id':
  133. sorted_ids = sorted(self.data)
  134. result = []
  135. for nodeid in sorted_ids:
  136. if nodeid.startswith('__'):
  137. continue
  138. node = sanitize_node(self.data[nodeid], include_raw_data)
  139. result.append(node)
  140. return result
  141. def find_node(self, rawid):
  142. """
  143. Fetch node data by given id.
  144. If necessary, look through node aliases.
  145. """
  146. # if we have a direct hit, return it immediately
  147. if rawid in self.data:
  148. return sanitize_node(self.data[rawid])
  149. # no direct hit -> search via aliases
  150. nodeid = rawid
  151. for nid in self.data:
  152. node = self.data[nid]
  153. if 'aliases' in node and rawid in node['aliases']:
  154. nodeid = nid
  155. # return found node
  156. if nodeid in self.data:
  157. return sanitize_node(self.data[nodeid])
  158. else:
  159. return None
  160. def find_node_by_mac(self, mac):
  161. """Fetch node data by given MAC address."""
  162. needle = mac.lower()
  163. # iterate over all nodes
  164. for nodeid in self.data:
  165. if nodeid.startswith('__'):
  166. continue
  167. node = self.data[nodeid]
  168. # check node's primary MAC
  169. if 'mac' in node and needle == node['mac'].lower():
  170. return sanitize_node(node)
  171. # check alias MACs
  172. if 'macs' in node:
  173. haystack = [x.lower() for x in node['macs']]
  174. if mac in haystack:
  175. return sanitize_node(node)
  176. # MAC address not found
  177. return None
  178. def get_nodestatus(self, rawid):
  179. """Determine node's status."""
  180. # search node by the given id
  181. node = self.find_node(rawid)
  182. # handle unknown nodes
  183. if node is None:
  184. return None
  185. # check that the last batadv update is noted in the data
  186. updated = node.get(self.FIELDKEY_UPDATED, None)
  187. if updated is None or not 'batadv' in updated:
  188. return 'unknown'
  189. # make decision based on time of last batadv update
  190. diff = time.time() - updated['batadv']
  191. if diff < 150:
  192. return 'active'
  193. elif diff < 300:
  194. return 'stale'
  195. else:
  196. return 'offline'
  197. def resolve_vpn_remotes(self):
  198. if not self.DATAKEY_VPN in self.data:
  199. return
  200. vpn = self.data[self.DATAKEY_VPN]
  201. init_vpn_cache = {}
  202. for key in vpn:
  203. if not isinstance(vpn[key], dict):
  204. continue
  205. for mode in vpn[key]:
  206. if not isinstance(vpn[key][mode], dict):
  207. continue
  208. for gateway in vpn[key][mode]:
  209. if not isinstance(vpn[key][mode][gateway], dict):
  210. continue
  211. item = vpn[key][mode][gateway]
  212. if 'remote' in item and not 'remote_raw' in item:
  213. item['remote_raw'] = item['remote']
  214. resolved = None
  215. if item['remote'] in init_vpn_cache:
  216. resolved = init_vpn_cache[item['remote']]
  217. else:
  218. resolved = ffstatus.resolve_ipblock(item['remote'])
  219. init_vpn_cache[item['remote']] = resolved
  220. if resolved is not None:
  221. logging.info(
  222. 'Resolved VPN entry \'%s\' to net \'%s\'.',
  223. item['remote'],
  224. resolved['name'],
  225. )
  226. if resolved is not None:
  227. item['remote'] = resolved
  228. self.save()
  229. def __get_vpn_item(self, key, create=False):
  230. if key is None or re.match(r'^[a-fA-F0-9]+$', key) is None:
  231. raise VpnKeyFormatError(key)
  232. return
  233. if not self.DATAKEY_VPN in self.data:
  234. if not create:
  235. return None
  236. self.data[self.DATAKEY_VPN] = {}
  237. if not key in self.data[self.DATAKEY_VPN]:
  238. if not create:
  239. return None
  240. self.data[self.DATAKEY_VPN][key] = {'active': {}, 'last': {}}
  241. return self.data[self.DATAKEY_VPN][key]
  242. def get_vpn_gateways(self):
  243. if not self.DATAKEY_VPN in self.data:
  244. return []
  245. gateways = set()
  246. vpn = self.data[self.DATAKEY_VPN]
  247. for key in vpn:
  248. for conntype in vpn[key]:
  249. for gateway in vpn[key][conntype]:
  250. gateways.add(gateway)
  251. return sorted(gateways)
  252. def get_vpn_connections(self):
  253. if not self.DATAKEY_VPN in self.data:
  254. return []
  255. conntypes = ['active', 'last']
  256. result = []
  257. vpn = self.data[self.DATAKEY_VPN]
  258. for key in vpn:
  259. vpn_entry = vpn[key]
  260. if not isinstance(vpn_entry, dict):
  261. continue
  262. item = {
  263. 'key': key,
  264. 'count': {},
  265. 'remote': {},
  266. }
  267. names = set()
  268. for conntype in conntypes:
  269. item['count'][conntype] = 0
  270. item['remote'][conntype] = {}
  271. if conntype in vpn_entry:
  272. for gateway in vpn_entry[conntype]:
  273. if 'remote' in vpn_entry[conntype][gateway]:
  274. remote = vpn_entry[conntype][gateway]['remote']
  275. if remote is None or remote == '':
  276. continue
  277. item['count'][conntype] += 1
  278. item['remote'][conntype][gateway] = remote
  279. if 'peer' in vpn_entry[conntype][gateway]:
  280. names.add(vpn_entry[conntype][gateway]['peer'])
  281. item['names'] = sorted(names)
  282. item['online'] = item['count']['active'] > 0
  283. result.append(item)
  284. return result
  285. def log_vpn_connect(self, key, peername, remote, gateway, timestamp):
  286. item = self.__get_vpn_item(key, create=True)
  287. # resolve remote addr to its netblock
  288. remote_raw = remote
  289. remote_resolved = None
  290. if remote is not None:
  291. remote_resolved = ffstatus.resolve_ipblock(remote)
  292. if remote_resolved is not None:
  293. logging.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(
  294. remote, remote_resolved['name'],
  295. ))
  296. remote = remote_resolved
  297. # store connection info
  298. item['active'][gateway] = {
  299. 'establish': timestamp,
  300. 'peer': peername,
  301. 'remote': remote,
  302. 'remote_raw': remote_raw,
  303. }
  304. def log_vpn_disconnect(self, key, gateway, timestamp):
  305. item = self.__get_vpn_item(key, create=True)
  306. active = {}
  307. if gateway in item['active']:
  308. active = item['active'][gateway]
  309. del item['active'][gateway]
  310. active['disestablish'] = timestamp
  311. item['last'][gateway] = active