ffstatus.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. #!/usr/bin/python
  2. from __future__ import print_function
  3. from copy import deepcopy
  4. import io
  5. import json
  6. import socket
  7. import subprocess
  8. import time
  9. import StringIO
  10. def dict_merge(a, b):
  11. '''recursively merges dict's. not just simple a['key'] = b['key'], if
  12. both a and bhave a key who's value is a dict then dict_merge is called
  13. on both values and the result stored in the returned dictionary.'''
  14. if not isinstance(b, dict):
  15. return b
  16. result = deepcopy(a)
  17. for k, v in b.iteritems():
  18. if k in result and isinstance(result[k], dict):
  19. result[k] = dict_merge(result[k], v)
  20. else:
  21. result[k] = deepcopy(v)
  22. return result
  23. class AlfredParser:
  24. alfred_datatypes = [ 158, 159 ]
  25. prefix = "ffpb.nodes."
  26. target_host = "fdca:ffee:ff12:a254::da7a"
  27. target_port = 2003
  28. alfred_dump = '/www/alfred.json'
  29. whitelist = [ "24:a4:3c:f8:5e:fa", "24:a4:3c:f8:5e:db", "24:a4:3c:d9:4f:69", "24:a4:3c:a3:67:f0", "24:a4:3c:a3:68:07", "24:a4:3c:d2:21:d5" ]
  30. def sanitycheck(self):
  31. testdata = None
  32. try:
  33. testdata = subprocess.check_output(['alfred-json', '-z', 'r', str(int(self.alfred_datatypes[0]))])
  34. except Exception as err:
  35. raise Exception("alfred-json not found or incompatible: " + str(err))
  36. try:
  37. check = json.loads(testdata)
  38. except Exception as err:
  39. raise Exception("alfred-json does not return valid JSON data: " + str(err))
  40. return True
  41. def execute(self):
  42. data = { }
  43. ts = int(time.time())
  44. for datatype in self.alfred_datatypes:
  45. rawdata = subprocess.check_output(['alfred-json', '-z', '-r', str(int(datatype))])
  46. newdata = json.loads(rawdata)
  47. data = dict_merge(data, newdata)
  48. if not self.alfred_dump is None:
  49. jsondata = json.dumps(data, ensure_ascii=False)
  50. f = io.open(self.alfred_dump, 'w')
  51. f.write(jsondata)
  52. f.close()
  53. output = StringIO.StringIO()
  54. for nodeid in data:
  55. if (not self.whitelist is None) and (not nodeid in self.whitelist):
  56. #print("Skipping node {0} as it is not in the configured whitelist.".format(nodeid))
  57. continue
  58. nodeinfo = data[nodeid]
  59. nodestats = None
  60. if "statistics" in nodeinfo: nodestats = nodeinfo["statistics"]
  61. if not nodestats is None:
  62. print(self.prefix, nodeid, ".uptime", " ", int(float(nodestats["uptime"])), " ", ts, sep='', file=output)
  63. traffic = None
  64. if "traffic" in nodestats: traffic = nodestats["traffic"]
  65. if not traffic is None:
  66. print(self.prefix, nodeid, ".rxbytes", " ", int(traffic["rx"]["bytes"]), " ", ts, sep='', file=output)
  67. print(self.prefix, nodeid, ".rxpackets", " ", int(traffic["rx"]["packets"]), " ", ts, sep='', file=output)
  68. print(self.prefix, nodeid, ".txbytes", " ", int(traffic["tx"]["bytes"]), " ", ts, sep='', file=output)
  69. print(self.prefix, nodeid, ".txpackets", " ", int(traffic["tx"]["packets"]), " ", ts, sep='', file=output)
  70. else:
  71. print("Node {0} does not provide statistics information.".format(nodeid))
  72. s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
  73. s.connect((self.target_host, self.target_port))
  74. all_output = output.getvalue()
  75. print(all_output)
  76. s.sendall(all_output)
  77. s.shutdown(socket.SHUT_WR)
  78. s.close()
  79. output.close()
  80. if __name__ == "__main__":
  81. a = AlfredParser()
  82. a.sanitycheck()
  83. a.execute()