__init__.py 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from copy import deepcopy
  4. import json
  5. import logging
  6. import time
  7. import urllib2
  8. from .alfred import AlfredParser
  9. from .batman import BatmanParser
  10. from .dashing import DashingClient
  11. from .graphite import GraphitePush
  12. from .server import ApiServer
  13. from .filestorage import FileStorage
  14. from .redisstorage import RedisStorage
  15. __all__ = [
  16. 'AlfredParser', 'BatmanParser',
  17. 'DashingClient', 'GraphitePush',
  18. 'FileStorage', 'ApiServer',
  19. 'RedisStorage',
  20. 'dict_merge', 'merge_alfred_batman',
  21. 'resolve_ipblock', 'mac2id',
  22. ]
  23. logger = logging.getLogger('ffstatus')
  24. def mac2id(mac):
  25. if mac is None:
  26. return None
  27. return mac.lower().replace(':', '')
  28. def guess_mac_from_nodeid(nodeid):
  29. if len(nodeid) != 12:
  30. return None
  31. return ':'.join([
  32. nodeid[0:2],
  33. nodeid[2:4],
  34. nodeid[4:6],
  35. nodeid[6:8],
  36. nodeid[8:10],
  37. nodeid[10:12],
  38. ])
  39. def dict_merge(a, b, overwrite_lists=True):
  40. '''recursively merges dict's. not just simple a['key'] = b['key'], if
  41. both a and bhave a key who's value is a dict then dict_merge is called
  42. on both values and the result stored in the returned dictionary.'''
  43. if not isinstance(b, dict):
  44. return b
  45. result = deepcopy(a)
  46. for k, v in b.iteritems():
  47. if k in result:
  48. if isinstance(result[k], dict):
  49. result[k] = dict_merge(result[k], v,
  50. overwrite_lists=overwrite_lists)
  51. continue
  52. if isinstance(result[k], list):
  53. if overwrite_lists:
  54. result[k] = [deepcopy(x) for x in v]
  55. else:
  56. for x in v:
  57. if x not in result[k]:
  58. result[k].append(deepcopy(x))
  59. continue
  60. result[k] = deepcopy(v)
  61. return result
  62. def merge_alfred_batman(alfreddata, batmandata):
  63. merged = {}
  64. # lookup dict to map MACs to node ids
  65. batlookup = {}
  66. # list of (yet un-)handled BATMAN nodes
  67. unhandled_batnodes = set()
  68. # fill above variables from BATMAN data
  69. for nodeid in batmandata:
  70. batlookup[nodeid] = nodeid
  71. unhandled_batnodes.add(str(nodeid))
  72. for bda in batmandata[nodeid]['aliases']:
  73. batlookup[bda] = nodeid
  74. # iterate over ALFRED data
  75. for nodeid in alfreddata:
  76. nodeinfo = dict_merge({}, alfreddata[nodeid])
  77. candidates = set()
  78. candidates.add(nodeid)
  79. if 'mac' in nodeinfo:
  80. candidates.add(mac2id(nodeinfo['mac']))
  81. if 'macs' in nodeinfo:
  82. for mac in nodeinfo['macs']:
  83. candidates.add(mac2id(mac))
  84. if 'network' in nodeinfo:
  85. net = nodeinfo['network']
  86. if 'mac' in net:
  87. candidates.add(mac2id(net['mac']))
  88. if 'mesh_interfaces' in net:
  89. for mac in net['mesh_interfaces']:
  90. candidates.add(mac2id(mac))
  91. if not 'neighbours' in nodeinfo:
  92. nodeinfo['neighbours'] = {}
  93. for candidate_raw in candidates:
  94. candidate = batlookup.get(candidate_raw, candidate_raw)
  95. if candidate in batmandata:
  96. nodeinfo = dict_merge(nodeinfo, batmandata[candidate])
  97. if candidate in unhandled_batnodes:
  98. unhandled_batnodes.remove(str(candidate))
  99. merged[nodeid] = nodeinfo
  100. # handle BATMAN nodes which aren't in ALFRED
  101. for nodeid in unhandled_batnodes:
  102. logger.debug("unhandled BATMAN node '%s'", nodeid)
  103. nodeinfo = dict_merge({}, batmandata[nodeid])
  104. if not 'node_id' in nodeinfo:
  105. nodeinfo['node_id'] = nodeid
  106. merged[nodeid] = nodeinfo
  107. return merged
  108. no_ipblock_resolves_until = None
  109. def resolve_ipblock(ipaddr):
  110. """Resolve the given IP address to its inetnum entry at RIPE."""
  111. global no_ipblock_resolves_until
  112. if no_ipblock_resolves_until is not None:
  113. if no_ipblock_resolves_until < time.time():
  114. no_ipblock_resolves_until = None
  115. else:
  116. logger.info('IP-Block-Resolving suspended for %d seconds. ' +
  117. 'Won\'t resolve \'%s\' now.',
  118. int(no_ipblock_resolves_until-time.time()), ipaddr)
  119. return None
  120. url = 'http://rest.db.ripe.net/search.json?query-string=' + str(ipaddr)
  121. try:
  122. response = json.load(urllib2.urlopen(url))
  123. assert isinstance(response, dict)
  124. obj = [x for x in response['objects']['object'] if x['type'] in ['inetnum', 'inet6num']][0]
  125. attrib = obj['attributes']['attribute']
  126. netname = '\n'.join([x['value'] for x in attrib if x['name'] == 'netname'])
  127. netblock = '\n'.join([x['value'] for x in attrib if x['name'] in ['inetnum', 'inet6num']])
  128. desc = '\n'.join([x['value'] for x in attrib if x['name'] == 'descr'])
  129. return {
  130. 'name': netname,
  131. 'block': netblock,
  132. 'description': desc,
  133. }
  134. except urllib2.URLError as err:
  135. output = err.read()
  136. logger.error('Error "%s" querying ip \'%s\' from RIPE API: %s',
  137. err, ipaddr, output)
  138. if 'Retry-After' in err.headers:
  139. retry = int(err.headers['Retry-After'])
  140. logger.warn(
  141. 'I won\'t resolve IPs for %d seconds as requested by RIPE API' +
  142. ' (header=\'%s\').',
  143. retry, err.header['Retry-After'])
  144. no_ipblock_resolves_until = \
  145. time.time() + int(err.headers['Retry-After'])
  146. else:
  147. logger.warn('I won\'t resolve IPs for the next hour ' +
  148. '(API didn\'t give better hint).')
  149. no_ipblock_resolves_until = time.time() + 3600