#!/usr/bin/python # -*- coding: utf-8 -*- from copy import deepcopy import json import logging import time import urllib2 from .alfred import AlfredParser from .batman import BatmanParser from .dashing import DashingClient from .graphite import GraphitePush from .server import ApiServer from .storage import Storage __all__ = [ 'AlfredParser', 'BatmanParser', 'DashingClient', 'GraphitePush', 'Storage', 'ApiServer', 'dict_merge', 'merge_alfred_batman', 'resolve_ipblock', 'mac2id', ] logger = logging.getLogger('ffstatus') def mac2id(mac): return mac.lower().replace(':', '') def dict_merge(a, b): '''recursively merges dict's. not just simple a['key'] = b['key'], if both a and bhave a key who's value is a dict then dict_merge is called on both values and the result stored in the returned dictionary.''' if not isinstance(b, dict): return b result = deepcopy(a) for k, v in b.iteritems(): if k in result and isinstance(result[k], dict): result[k] = dict_merge(result[k], v) elif k in result and isinstance(result[k], list): result[k] = result[k] + [deepcopy(x) for x in v if x not in result[k]] else: result[k] = deepcopy(v) return result def merge_alfred_batman(alfreddata, batmandata): merged = {} batlookup = {} for nodeid in batmandata: batlookup[nodeid] = nodeid for bda in batmandata[nodeid]['aliases']: batlookup[bda] = nodeid for nodeid in alfreddata: nodeinfo = alfreddata[nodeid] candidates = set() candidates.add(nodeid) if 'mac' in nodeinfo: candidates.add(mac2id(nodeinfo['mac'])) if 'macs' in nodeinfo: for mac in nodeinfo['macs']: candidates.add(mac2id(mac)) if 'network' in nodeinfo: net = nodeinfo['network'] if 'mac' in net: candidates.add(mac2id(net['mac'])) if 'mesh_interfaces' in net: for mac in net['mesh_interfaces']: candidates.add(mac2id(mac)) if not 'neighbours' in nodeinfo: nodeinfo['neighbours'] = [] for candidate_raw in candidates: candidate = batlookup[candidate_raw] if candidate_raw in batlookup else candidate_raw if candidate in batmandata: nodeinfo = dict_merge(nodeinfo, batmandata[candidate]) merged[nodeid] = nodeinfo return merged no_ipblock_resolves_until = None def resolve_ipblock(ipaddr): """Resolve the given IP address to its inetnum entry at RIPE.""" global no_ipblock_resolves_until if no_ipblock_resolves_until is not None: if no_ipblock_resolves_until < time.time(): no_ipblock_resolves_until = None else: logger.info('IP-Block-Resolving suspended for {0} seconds. Won\'t resolve \'{1}\' now.'.format(int(no_ipblock_resolves_until-time.time()), ipaddr)) return None url = 'http://rest.db.ripe.net/search.json?query-string=' + str(ipaddr) try: response = json.load(urllib2.urlopen(url)) assert isinstance(response, dict) obj = [x for x in response['objects']['object'] if x['type'] in ['inetnum', 'inet6num']][0] attrib = obj['attributes']['attribute'] netname = '\n'.join([x['value'] for x in attrib if x['name'] == 'netname']) netblock = '\n'.join([x['value'] for x in attrib if x['name'] in ['inetnum', 'inet6num']]) desc = '\n'.join([x['value'] for x in attrib if x['name'] == 'descr']) return { 'name': netname, 'block': netblock, 'description': desc, } except urllib2.URLError as err: output = err.read() logger.error('Error "{1}" querying ip \'{0}\' from RIPE API: {2}'.format(ipaddr, err, output)) if 'Retry-After' in err.headers: retry = int(err.headers['Retry-After']) logger.warn('I won\'t resolve IPs for {0} seconds as requested by RIPE API (header=\'{1}\').'.format(retry, err.header['Retry-After'])) no_ipblock_resolves_until = time.time() + int(err.headers['Retry-After']) else: logger.warn('I won\'t resolve IPs for the next hour (API didn\'t give better hint).') no_ipblock_resolves_until = time.time() + 3600