ffstatus-daemon.py 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364
  1. #!/usr/bin/python
  2. from __future__ import print_function
  3. from copy import deepcopy
  4. import daemon
  5. import logging
  6. import sys
  7. import time
  8. from ffstatus import *
  9. INTERVAL = 15
  10. LOGFILE = '/var/log/ffstatus.log'
  11. DUMMY_MODE = 1
  12. logger = logging.getLogger()
  13. logger.setLevel(logging.DEBUG)
  14. fh = logging.FileHandler(LOGFILE)
  15. fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
  16. logger.addHandler(fh)
  17. logger.info('Starting up')
  18. a = AlfredParser()
  19. g = GraphitePush('fdca:ffee:ff12:a254::da7a', 2003)
  20. data = { }
  21. if DUMMY_MODE:
  22. a.alfred_json = '/home/ffpb-statusbot/status-daemon/alfred-json'
  23. g.dont_send = True
  24. try:
  25. a.sanitycheck()
  26. except Exception as err:
  27. logger.critical('AlfredParser.sanitycheck() failed: ' + str(err))
  28. print('FAILED SANITY CHECK: ' + str(err))
  29. sys.exit(1)
  30. daemon_context = daemon.DaemonContext(
  31. files_preserve = [ fh.stream ],
  32. )
  33. with daemon_context:
  34. while True:
  35. try:
  36. ts = int(time.time())
  37. logger.debug('Step 1/3: Fetching data ...')
  38. newdata = a.fetch()
  39. logger.debug('Step 2/3: Pushing update data ...')
  40. graphitedata = g.push(newdata, ts=ts)
  41. logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
  42. logger.debug('Step 3/3: Merging current data ...')
  43. data = dict_merge(data, newdata)
  44. logger.info('I have data for ' + str(len(data)) + ' nodes.')
  45. except Exception as err:
  46. logger.error(str(err))
  47. time.sleep(INTERVAL)
  48. logger.info('Shutting down')