basestorage.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from __future__ import print_function, unicode_literals
  4. import logging
  5. import re
  6. import time
  7. import ffstatus
  8. from .exceptions import VpnKeyFormatError
  9. def sanitize_node(data, include_raw_data=False):
  10. """
  11. Filters potentially harmful entries from the node's data.
  12. """
  13. export = ffstatus.dict_merge({}, data)
  14. # remove fields from output: __RAW__
  15. if '__RAW__' in export and not include_raw_data:
  16. del export['__RAW__']
  17. return export
  18. class BaseStorage(object):
  19. """
  20. Provides operations on the storage data.
  21. This class gets subclassed to actually write the data
  22. to a file, database, whatever.
  23. """
  24. def open(self):
  25. """
  26. When overridden in a subclass,
  27. closes the persistent storage.
  28. """
  29. pass
  30. def save(self):
  31. """
  32. When overriden in a subclass,
  33. stores the data to a persistent storage.
  34. """
  35. pass
  36. def close(self):
  37. """
  38. When overridden in a subclass,
  39. closes the persistent storage.
  40. """
  41. pass
  42. @property
  43. def status(self):
  44. """Gets status information on the storage."""
  45. nodes = 0
  46. nodes_active = 0
  47. sum_clients = 0
  48. clients = set()
  49. for node in self.get_nodes():
  50. nodes += 1
  51. if self.get_nodestatus(node=node) == 'active':
  52. nodes_active += 1
  53. sum_clients += node.get('clientcount', 0)
  54. nodemacs = [x for x in node.get('macs', [])]
  55. if 'mac' in node:
  56. nodemacs.append(node['mac'])
  57. for client in node.get('clients', []):
  58. if client in nodemacs:
  59. continue
  60. clients.add(client)
  61. return {
  62. 'clients_sum': sum_clients,
  63. 'clients_unique': len(clients),
  64. 'nodes': nodes,
  65. 'nodes_active': nodes_active,
  66. }
  67. def merge_new_data(self, newdata):
  68. """Updates data in the storage by merging the new data."""
  69. if newdata is None or not isinstance(newdata, dict):
  70. raise ValueError("Expected a dict as new data.")
  71. # start merge on a copy of the current data
  72. current = {}
  73. for node in self.get_nodes():
  74. item_id = node['node_id']
  75. if not item_id in newdata:
  76. continue
  77. current[item_id] = ffstatus.dict_merge(node, {})
  78. current[item_id]['aliases'] = []
  79. current[item_id]['clients'] = []
  80. current[item_id]['neighbours'] = []
  81. if not '__RAW__' in current[item_id]:
  82. current[item_id]['__RAW__'] = {}
  83. if '__RAW__' in newdata[item_id]:
  84. for key in newdata[item_id]['__RAW__']:
  85. if key in current[item_id]['__RAW__']:
  86. del current[item_id]['__RAW__'][key]
  87. # merge the dictionaries
  88. updated = {}
  89. for itemid in newdata:
  90. if not itemid in current:
  91. # new element which did not exist in storage before, that's easy
  92. updated[itemid] = newdata[itemid]
  93. continue
  94. # merge the old and new element
  95. update = ffstatus.dict_merge(current[itemid], newdata[itemid])
  96. updated[itemid] = update
  97. # sanitize each item's data
  98. for itemid in updated:
  99. if itemid.startswith('__'):
  100. continue
  101. item = updated[itemid]
  102. # ensure 'node_id' is set
  103. if not 'node_id' in item:
  104. item['node_id'] = itemid
  105. # remove node's MACs from clients list
  106. clients = [x for x in item.get('clients', [])]
  107. if 'mac' in item and item['mac'] in clients:
  108. clients.remove(item['mac'])
  109. for mac in item.get('macs', []):
  110. if mac in clients:
  111. clients.remove(mac)
  112. # set clientcount
  113. item['clientcount'] = len(clients)
  114. # finally, set each new data
  115. self.set_node_data(itemid, item)
  116. def get_nodes(self, sortby=None, include_raw_data=False):
  117. """Gets a list of all known nodes."""
  118. nodes = self.get_all_nodes_raw()
  119. sorted_ids = [x for x in nodes]
  120. if sortby is not None:
  121. if sortby == 'name':
  122. sortkey = lambda x: nodes[x]['hostname'].lower()
  123. sorted_ids = sorted(sorted_ids, key=sortkey)
  124. elif sortby == 'id':
  125. sorted_ids = sorted(sorted_ids)
  126. result = []
  127. for nodeid in sorted_ids:
  128. if nodeid.startswith('__'):
  129. continue
  130. node = sanitize_node(nodes[nodeid], include_raw_data)
  131. result.append(node)
  132. return result
  133. def find_node(self, rawid):
  134. """
  135. Fetch node data by given id.
  136. If necessary, look through node aliases.
  137. """
  138. # look through all nodes
  139. found = None
  140. for node in self.get_nodes():
  141. # if we have a direct hit, return it immediately
  142. if node['node_id'] == rawid:
  143. return sanitize_node(node)
  144. # search through aliases
  145. if 'aliases' in node and rawid in node['aliases']:
  146. found = node
  147. # return found node
  148. if not found is None:
  149. return sanitize_node(found)
  150. else:
  151. return None
  152. def find_node_by_mac(self, mac):
  153. """Fetch node data by given MAC address."""
  154. needle = mac.lower()
  155. # iterate over all nodes
  156. for node in self.get_nodes():
  157. # check node's primary MAC
  158. if 'mac' in node and needle == node['mac'].lower():
  159. return sanitize_node(node)
  160. # check alias MACs
  161. if 'macs' in node:
  162. haystack = [x.lower() for x in node['macs']]
  163. if mac in haystack:
  164. return sanitize_node(node)
  165. # MAC address not found
  166. return None
  167. def get_nodestatus(self, rawid=None, node=None):
  168. """Determine node's status."""
  169. # search node by the given id
  170. if node is None and not rawid is None:
  171. node = self.find_node(rawid)
  172. # handle unknown nodes
  173. if node is None:
  174. return None
  175. # check that the last batadv update is noted in the data
  176. updated = node.get(self.FIELDKEY_UPDATED, None)
  177. if updated is None or not 'batadv' in updated:
  178. return 'unknown'
  179. # make decision based on time of last batadv update
  180. diff = time.time() - updated['batadv']
  181. if diff < 150:
  182. return 'active'
  183. elif diff < 300:
  184. return 'stale'
  185. else:
  186. return 'offline'
  187. def set_node_data(self, key, data):
  188. """Overwrite data for the node with the given key."""
  189. raise NotImplementedError("set_node_data was not overridden")
  190. def check_vpn_key(self, key):
  191. if key is None or re.match(r'^[a-fA-F0-9]+$', key) is None:
  192. raise VpnKeyFormatError(key)
  193. def get_vpn_keys(self):
  194. """Gets a list of VPN keys."""
  195. raise NotImplementedError("get_vpn_keys was not overriden")
  196. def get_vpn_item(self, key, create=False):
  197. self.check_vpn_key(key)
  198. raise NotImplementedError("store_vpn_item was not overriden")
  199. def store_vpn_item(self, key, data):
  200. raise NotImplementedError("store_vpn_item was not overriden")
  201. def resolve_vpn_remotes(self):
  202. """Iterates all remotes and resolves IP blocks not yet resolved."""
  203. vpn = self.get_vpn_keys()
  204. init_vpn_cache = {}
  205. for key in vpn:
  206. entry = self.get_vpn_item(key)
  207. entry_modified = False
  208. for mode in entry:
  209. if not isinstance(entry[mode], dict):
  210. continue
  211. for gateway in entry[mode]:
  212. if not isinstance(entry[mode][gateway], dict):
  213. continue
  214. item = entry[mode][gateway]
  215. if 'remote' in item and not 'remote_raw' in item:
  216. item['remote_raw'] = item['remote']
  217. resolved = None
  218. if item['remote'] in init_vpn_cache:
  219. resolved = init_vpn_cache[item['remote']]
  220. else:
  221. resolved = ffstatus.resolve_ipblock(item['remote'])
  222. init_vpn_cache[item['remote']] = resolved
  223. if resolved is not None:
  224. logging.info(
  225. 'Resolved VPN entry \'%s\' to net \'%s\'.',
  226. item['remote'],
  227. resolved['name'],
  228. )
  229. if resolved is not None:
  230. item['remote'] = resolved
  231. entry_modified = True
  232. if entry_modified:
  233. self.store_vpn_item(key, entry)
  234. def get_vpn_gateways(self):
  235. gateways = set()
  236. vpn = self.get_vpn_keys()
  237. for key in vpn:
  238. entry = self.get_vpn_item(key)
  239. for conntype in entry:
  240. for gateway in entry[conntype]:
  241. gateways.add(gateway)
  242. return sorted(gateways)
  243. def get_vpn_connections(self):
  244. conntypes = ['active', 'last']
  245. result = []
  246. vpnkeys = self.get_vpn_keys()
  247. for key in vpnkeys:
  248. vpn_entry = self.get_vpn_item(key)
  249. if not isinstance(vpn_entry, dict):
  250. continue
  251. item = {
  252. 'key': key,
  253. 'count': {},
  254. 'remote': {},
  255. }
  256. names = set()
  257. for conntype in conntypes:
  258. item['count'][conntype] = 0
  259. item['remote'][conntype] = {}
  260. if conntype in vpn_entry:
  261. for gateway in vpn_entry[conntype]:
  262. if 'remote' in vpn_entry[conntype][gateway]:
  263. remote = vpn_entry[conntype][gateway]['remote']
  264. if remote is None or remote == '':
  265. continue
  266. item['count'][conntype] += 1
  267. item['remote'][conntype][gateway] = remote
  268. if 'peer' in vpn_entry[conntype][gateway]:
  269. names.add(vpn_entry[conntype][gateway]['peer'])
  270. item['names'] = sorted(names)
  271. item['online'] = item['count']['active'] > 0
  272. result.append(item)
  273. return result
  274. def log_vpn_connect(self, key, peername, remote, gateway, timestamp):
  275. item = self.get_vpn_item(key, create=True)
  276. # resolve remote addr to its netblock
  277. remote_raw = remote
  278. remote_resolved = None
  279. if remote is not None:
  280. remote_resolved = ffstatus.resolve_ipblock(remote)
  281. if remote_resolved is not None:
  282. logging.debug('Resolved IP \'{0}\' to block \'{1}\'.'.format(
  283. remote, remote_resolved['name'],
  284. ))
  285. remote = remote_resolved
  286. # store connection info
  287. item['active'][gateway] = {
  288. 'establish': timestamp,
  289. 'peer': peername,
  290. 'remote': remote,
  291. 'remote_raw': remote_raw,
  292. }
  293. self.store_vpn_item(key, item)
  294. def log_vpn_disconnect(self, key, gateway, timestamp):
  295. item = self.get_vpn_item(key, create=True)
  296. active = {}
  297. if gateway in item['active']:
  298. active = item['active'][gateway]
  299. del item['active'][gateway]
  300. active['disestablish'] = timestamp
  301. item['last'][gateway] = active
  302. self.store_vpn_item(key, item)