status-daemon.py 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566
  1. #!/usr/bin/python
  2. from __future__ import print_function
  3. from copy import deepcopy
  4. import daemon
  5. import logging
  6. import sys
  7. import time
  8. from ffstatus import *
  9. INTERVAL = 15
  10. LOGFILE = '/var/log/ffstatus.log'
  11. DUMMY_MODE = 1
  12. logger = logging.getLogger()
  13. logger.setLevel(logging.DEBUG)
  14. fh = logging.FileHandler(LOGFILE)
  15. fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
  16. logger.addHandler(fh)
  17. logger.info('Starting up')
  18. a = AlfredParser()
  19. d = DashingClient('dashing.krombel.de', 'TODO')
  20. g = GraphitePush('fdca:ffee:ff12:a254::da7a', 2003)
  21. data = { }
  22. if DUMMY_MODE:
  23. a.alfred_json = '/home/ffpb-statusbot/status-daemon/alfred-json'
  24. g.dont_send = True
  25. try:
  26. a.sanitycheck()
  27. except Exception as err:
  28. logger.critical('AlfredParser.sanitycheck() failed: ' + str(err))
  29. print('FAILED SANITY CHECK: ' + str(err))
  30. sys.exit(1)
  31. daemon_context = daemon.DaemonContext(
  32. files_preserve = [ fh.stream ],
  33. )
  34. with daemon_context:
  35. while True:
  36. try:
  37. ts = int(time.time())
  38. logger.debug('Step 1/3: Fetching data ...')
  39. newdata = a.fetch()
  40. logger.debug('Step 2/3: Pushing update data ...')
  41. graphitedata = g.push(newdata, ts=ts)
  42. d.push(newdata)
  43. logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
  44. logger.debug('Step 3/3: Merging current data ...')
  45. data = dict_merge(data, newdata)
  46. logger.info('I have data for ' + str(len(data)) + ' nodes.')
  47. except Exception as err:
  48. logger.error(str(err))
  49. time.sleep(INTERVAL)
  50. logger.info('Shutting down')