__init__.py 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from copy import deepcopy
  4. import json
  5. import logging
  6. import time
  7. import urllib2
  8. from .alfred import AlfredParser
  9. from .batman import BatmanParser
  10. from .dashing import DashingClient
  11. from .graphite import GraphitePush
  12. from .server import ApiServer
  13. from .filestorage import FileStorage
  14. from .redisstorage import RedisStorage
  15. __all__ = [
  16. 'AlfredParser', 'BatmanParser',
  17. 'DashingClient', 'GraphitePush',
  18. 'FileStorage', 'ApiServer',
  19. 'RedisStorage',
  20. 'dict_merge', 'merge_alfred_batman',
  21. 'resolve_ipblock', 'mac2id',
  22. ]
  23. logger = logging.getLogger('ffstatus')
  24. def mac2id(mac):
  25. if mac is None:
  26. return None
  27. return mac.lower().replace(':', '')
  28. def dict_merge(a, b, overwrite_lists=True):
  29. '''recursively merges dict's. not just simple a['key'] = b['key'], if
  30. both a and bhave a key who's value is a dict then dict_merge is called
  31. on both values and the result stored in the returned dictionary.'''
  32. if not isinstance(b, dict):
  33. return b
  34. result = deepcopy(a)
  35. for k, v in b.iteritems():
  36. if k in result:
  37. if isinstance(result[k], dict):
  38. result[k] = dict_merge(result[k], v,
  39. overwrite_lists=overwrite_lists)
  40. continue
  41. if isinstance(result[k], list):
  42. if overwrite_lists:
  43. result[k] = [deepcopy(x) for x in v]
  44. else:
  45. for x in v:
  46. if x not in result[k]:
  47. result[k].append(deepcopy(x))
  48. continue
  49. result[k] = deepcopy(v)
  50. return result
  51. def merge_alfred_batman(alfreddata, batmandata):
  52. merged = {}
  53. # lookup dict to map MACs to node ids
  54. batlookup = {}
  55. # list of (yet un-)handled BATMAN nodes
  56. unhandled_batnodes = set()
  57. # fill above variables from BATMAN data
  58. for nodeid in batmandata:
  59. batlookup[nodeid] = nodeid
  60. unhandled_batnodes.add(str(nodeid))
  61. for bda in batmandata[nodeid]['aliases']:
  62. batlookup[bda] = nodeid
  63. # iterate over ALFRED data
  64. for nodeid in alfreddata:
  65. nodeinfo = dict_merge({}, alfreddata[nodeid])
  66. candidates = set()
  67. candidates.add(nodeid)
  68. if 'mac' in nodeinfo:
  69. candidates.add(mac2id(nodeinfo['mac']))
  70. if 'macs' in nodeinfo:
  71. for mac in nodeinfo['macs']:
  72. candidates.add(mac2id(mac))
  73. if 'network' in nodeinfo:
  74. net = nodeinfo['network']
  75. if 'mac' in net:
  76. candidates.add(mac2id(net['mac']))
  77. if 'mesh_interfaces' in net:
  78. for mac in net['mesh_interfaces']:
  79. candidates.add(mac2id(mac))
  80. if not 'neighbours' in nodeinfo:
  81. nodeinfo['neighbours'] = {}
  82. for candidate_raw in candidates:
  83. candidate = batlookup.get(candidate_raw, candidate_raw)
  84. if candidate in batmandata:
  85. nodeinfo = dict_merge(nodeinfo, batmandata[candidate])
  86. if candidate in unhandled_batnodes:
  87. unhandled_batnodes.remove(str(candidate))
  88. merged[nodeid] = nodeinfo
  89. # handle BATMAN nodes which aren't in ALFRED
  90. for nodeid in unhandled_batnodes:
  91. logger.debug("unhandled BATMAN node '%s'", nodeid)
  92. nodeinfo = dict_merge({}, batmandata[nodeid])
  93. if not 'node_id' in nodeinfo:
  94. nodeinfo['node_id'] = nodeid
  95. merged[nodeid] = nodeinfo
  96. return merged
  97. no_ipblock_resolves_until = None
  98. def resolve_ipblock(ipaddr):
  99. """Resolve the given IP address to its inetnum entry at RIPE."""
  100. global no_ipblock_resolves_until
  101. if no_ipblock_resolves_until is not None:
  102. if no_ipblock_resolves_until < time.time():
  103. no_ipblock_resolves_until = None
  104. else:
  105. logger.info('IP-Block-Resolving suspended for %d seconds. ' +
  106. 'Won\'t resolve \'%s\' now.',
  107. int(no_ipblock_resolves_until-time.time()), ipaddr)
  108. return None
  109. url = 'http://rest.db.ripe.net/search.json?query-string=' + str(ipaddr)
  110. try:
  111. response = json.load(urllib2.urlopen(url))
  112. assert isinstance(response, dict)
  113. obj = [x for x in response['objects']['object'] if x['type'] in ['inetnum', 'inet6num']][0]
  114. attrib = obj['attributes']['attribute']
  115. netname = '\n'.join([x['value'] for x in attrib if x['name'] == 'netname'])
  116. netblock = '\n'.join([x['value'] for x in attrib if x['name'] in ['inetnum', 'inet6num']])
  117. desc = '\n'.join([x['value'] for x in attrib if x['name'] == 'descr'])
  118. return {
  119. 'name': netname,
  120. 'block': netblock,
  121. 'description': desc,
  122. }
  123. except urllib2.URLError as err:
  124. output = err.read()
  125. logger.error('Error "%s" querying ip \'%s\' from RIPE API: %s',
  126. err, ipaddr, output)
  127. if 'Retry-After' in err.headers:
  128. retry = int(err.headers['Retry-After'])
  129. logger.warn(
  130. 'I won\'t resolve IPs for %d seconds as requested by RIPE API' +
  131. ' (header=\'%s\').',
  132. retry, err.header['Retry-After'])
  133. no_ipblock_resolves_until = \
  134. time.time() + int(err.headers['Retry-After'])
  135. else:
  136. logger.warn('I won\'t resolve IPs for the next hour ' +
  137. '(API didn\'t give better hint).')
  138. no_ipblock_resolves_until = time.time() + 3600