batcave.py 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from __future__ import print_function
  4. import argparse
  5. import daemon
  6. import logging
  7. import sys
  8. import time
  9. import threading
  10. from ffstatus import \
  11. merge_alfred_batman, \
  12. ApiServer, \
  13. AlfredParser, BatmanParser, \
  14. DashingClient, GraphitePush, \
  15. FileStorage, RedisStorage
  16. from ffstatus.exceptions import SanityCheckError
  17. BATCAVE = 'Batman/Alfred Transmission Collection, Aggregation & Value Engine'
  18. DEFAULT_INTERVAL = 15
  19. def get_args():
  20. """Parse commandline arguments."""
  21. parser = argparse.ArgumentParser(description=BATCAVE)
  22. parser.add_argument('--logfile',
  23. help='path for log file')
  24. parser.add_argument('--interval', type=int, default=DEFAULT_INTERVAL,
  25. help='data poll interval')
  26. parser.add_argument('-v', '--verbose', action='store_true',
  27. help='increase output verbosity')
  28. parser.add_argument('-d', '--no-detach', action='store_true',
  29. help='Don\'t detach (daemonize) ourself')
  30. parser.add_argument('-n', '--no-send', action='store_true',
  31. help='Fetch data but don\'t send it')
  32. parser.add_argument('-A', '--alfred-json',
  33. help='executable path for alfred-json')
  34. parser.add_argument('-B', '--batadv-vis',
  35. help='executable path for batadv-vis')
  36. parser.add_argument('-G', '--graphite-host',
  37. help='Graphite host')
  38. parser.add_argument('--graphite-port', type=int, default=2003,
  39. help='Graphite port')
  40. parser.add_argument('--dashing-url',
  41. help='Dashing URL')
  42. parser.add_argument('--dashing-token',
  43. help='Dashing\'s secret update token')
  44. parser.add_argument('--api-bind-host', default='',
  45. help='API-Server Hostname')
  46. parser.add_argument('--api-bind-port', type=int, default=8888,
  47. help='API-Server Port')
  48. parser.add_argument('-S', '--storage', default='.',
  49. help='Path where to store data or ' +
  50. '"redis:[<host>[:<port>[:<password>]]]')
  51. return parser.parse_args()
  52. def prepare_logging(args):
  53. """Configures Python's logging according to args."""
  54. logger = logging.getLogger()
  55. logger.setLevel(logging.DEBUG if args.verbose else logging.INFO)
  56. fmt = logging.Formatter(
  57. '%(asctime)s [%(levelname)s] %(message)s',
  58. '%Y-%m-%d %H:%M:%S')
  59. if not args.logfile is None:
  60. file_handler = logging.FileHandler(args.logfile)
  61. file_handler.setFormatter(fmt)
  62. logger.addHandler(file_handler)
  63. if args.no_detach:
  64. console_handler = logging.StreamHandler(sys.stdout)
  65. console_handler.setFormatter(fmt)
  66. logger.addHandler(console_handler)
  67. return logger
  68. def main():
  69. args = get_args()
  70. if args.interval < 5:
  71. print('A poll interval lower than 5s is not supported.')
  72. sys.exit(1)
  73. shall_daemonize = not args.no_detach
  74. logger = prepare_logging(args)
  75. logger.info('Starting up')
  76. storage = None
  77. storage_target = args.storage
  78. if storage_target.startswith('redis:'):
  79. redis_opts = storage_target.split(':')
  80. redis_host = redis_opts[1] if len(redis_opts) > 1 else 'localhost'
  81. redis_port = int(redis_opts[2]) if len(redis_opts) > 2 else 6379
  82. redis_pass = redis_opts[3] if len(redis_opts) > 3 else None
  83. storage = RedisStorage(redis_host, redis_port, redis_pass)
  84. else:
  85. storage = FileStorage(args.storage)
  86. storage.open()
  87. logger.info('Storage: ' + str(storage))
  88. alfred = AlfredParser()
  89. batman = BatmanParser()
  90. dashing = None
  91. if args.dashing_url is not None:
  92. dashing = DashingClient(args.dashing_url, args.dashing_token)
  93. graphite = None
  94. if args.graphite_host is not None:
  95. graphite = GraphitePush(args.graphite_host, args.graphite_port)
  96. if args.no_send:
  97. if graphite is not None:
  98. graphite.dont_send = True
  99. if not args.alfred_json is None:
  100. alfred.alfred_json = args.alfred_json
  101. if not args.batadv_vis is None:
  102. batman.batadv_vis = args.batadv_vis
  103. logger.debug('Configured A.L.F.R.E.D. source: %s', alfred)
  104. logger.debug('Configured B.A.T.M.A.N. source: %s', batman)
  105. logger.debug('Configured Dashing: %s', dashing)
  106. logger.debug('Configured Graphite: %s', graphite)
  107. # execute sanitycheck() where possible
  108. for i in [('AlfredParser', alfred), ('BatmanParser', batman)]:
  109. try:
  110. i[1].sanitycheck()
  111. except SanityCheckError as err:
  112. logger.critical(i[0] + '.sanitycheck() failed: ' + str(err))
  113. print('FAILED SANITY CHECK: ' + str(err))
  114. sys.exit(1)
  115. server = ApiServer((args.api_bind_host, args.api_bind_port), storage)
  116. server_thread = threading.Thread(target=server.serve_forever)
  117. server_thread.daemon = True # exit thread when main thread terminates
  118. server_thread.start()
  119. logger.info('Started server: ' + str(server))
  120. if shall_daemonize:
  121. streams = [handler.stream for handler in logger.handlers
  122. if isinstance(handler, logging.FileHandler)]
  123. daemon_context = daemon.DaemonContext(
  124. files_preserve=streams,
  125. )
  126. daemon_context.open()
  127. while True:
  128. try:
  129. now = int(time.time())
  130. logger.debug('Step 1/3: Fetching data ...')
  131. alfreddata = alfred.fetch()
  132. batmandata = batman.fetch()
  133. newdata = merge_alfred_batman(alfreddata, batmandata)
  134. logger.debug(
  135. 'Fetched data: %d ALFRED with %d BATMAN makes %d total',
  136. len(alfreddata), len(batmandata), len(newdata))
  137. logger.debug('Step 2/3: Pushing update data ...')
  138. if graphite is not None:
  139. graphitedata = graphite.push(newdata, timestamp=now)
  140. logger.info(
  141. 'Sent %d lines to Graphite.',
  142. graphitedata.count('\n')+1)
  143. if dashing is not None:
  144. dashing.push(newdata)
  145. logger.debug('Step 3/3: Merging current data ...')
  146. storage.merge_new_data(newdata)
  147. storage.save()
  148. logger.debug('I have data for %d nodes.', storage.status['nodes'])
  149. except Exception as err:
  150. import traceback
  151. logger.error(str(err) + "\n" + traceback.format_exc())
  152. logger.debug('Sleeping for {0} seconds'.format(args.interval))
  153. time.sleep(args.interval)
  154. storage.close()
  155. logger.info('Shut down.')
  156. if __name__ == '__main__':
  157. main()