batcave.py 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091
  1. #!/usr/bin/python
  2. from __future__ import print_function
  3. import argparse
  4. from copy import deepcopy
  5. import daemon
  6. import logging
  7. import sys
  8. import time
  9. from ffstatus import *
  10. DEFAULT_INTERVAL = 15
  11. parser = argparse.ArgumentParser(description='Batman/Alfred Transmission Collection, Aggregation & Value Engine')
  12. parser.add_argument('--logfile', help='path for log file')
  13. parser.add_argument('--interval', type=int, default=DEFAULT_INTERVAL, help='data poll interval')
  14. parser.add_argument('-n', '--no-send', action='store_true', help='Fetch data but don\'t send it')
  15. parser.add_argument('-A', '--alfred-json', help='executable path for alfred-json')
  16. parser.add_argument('-B', '--batadv-vis', help='executable path for batadv-vis')
  17. parser.add_argument('-G', '--graphite-host', help='Graphite host')
  18. parser.add_argument('--graphite-port', type=int, default=2003, help='Graphite port')
  19. parser.add_argument('--dashing-url', help='Dashing URL')
  20. parser.add_argument('--dashing-token', help='Dashing\'s secret update token')
  21. args = parser.parse_args()
  22. if args.interval < 5:
  23. print('A poll interval lower than 5s is not supported.')
  24. sys.exit(1)
  25. logger = logging.getLogger()
  26. logger.setLevel(logging.DEBUG)
  27. if not args.logfile is None:
  28. fh = logging.FileHandler(args.logfile)
  29. fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
  30. logger.addHandler(fh)
  31. logger.info('Starting up')
  32. a = AlfredParser()
  33. b = BatmanParser()
  34. d = DashingClient(args.dashing_url, args.dashing_token) if not args.dashing_url is None else None
  35. g = GraphitePush(args.graphite_host, args.graphite_port) if not args.graphite_host is None else None
  36. data = { }
  37. if args.no_send:
  38. if not g is None: g.dont_send = True
  39. if not args.alfred_json is None: a.alfred_json = args.alfred_json
  40. if not args.batadv_vis is None: b.batadv_vis = args.batadv_vis
  41. for i in [ ('AlfredParser', a), ('BatmanParser', b) ]:
  42. try:
  43. i[1].sanitycheck()
  44. except Exception as err:
  45. logger.critical(i[0] + '.sanitycheck() failed: ' + str(err))
  46. print('FAILED SANITY CHECK: ' + str(err))
  47. sys.exit(1)
  48. daemon_context = daemon.DaemonContext(
  49. files_preserve = [ fh.stream ],
  50. )
  51. with daemon_context:
  52. while True:
  53. try:
  54. ts = int(time.time())
  55. logger.debug('Step 1/3: Fetching data ...')
  56. alfreddata = a.fetch()
  57. batmandata = b.fetch()
  58. newdata = merge_alfred_batman(alfreddata, batmandata)
  59. logger.info('Fetched data: {0} ALFRED with {1} BATMAN makes {2} total'.format(len(alfreddata), len(batmandata), len(newdata)))
  60. logger.debug('Step 2/3: Pushing update data ...')
  61. if not g is None:
  62. graphitedata = g.push(newdata, ts=ts)
  63. logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
  64. if not d is None:
  65. d.push(newdata)
  66. logger.debug('Step 3/3: Merging current data ...')
  67. data = dict_merge(data, newdata)
  68. logger.info('I have data for ' + str(len(data)) + ' nodes.')
  69. except Exception as err:
  70. logger.error(str(err))
  71. logger.debug('Sleeping for {0} seconds'.format(args.interval))
  72. time.sleep(args.interval)
  73. logger.info('Shutting down')