2
0
Quellcode durchsuchen

split ffstatus.py into submodules

Helge Jung vor 10 Jahren
Ursprung
Commit
89db997d18
5 geänderte Dateien mit 209 neuen und 113 gelöschten Zeilen
  1. 41 14
      ffstatus-daemon.py
  2. 0 99
      ffstatus.py
  3. 22 0
      ffstatus/__init__.py
  4. 91 0
      ffstatus/alfred.py
  5. 55 0
      ffstatus/graphite.py

+ 41 - 14
ffstatus-daemon.py

@@ -1,37 +1,64 @@
 #!/usr/bin/python
 from __future__ import print_function
+from copy import deepcopy
 import daemon
 import logging
 import sys
 import time
 
-import ffstatus
+from ffstatus import *
 
+INTERVAL = 15
 LOGFILE = '/var/log/ffstatus.log'
 
-a = ffstatus.AlfredParser()
+DUMMY_MODE = 1
+
+logger = logging.getLogger()
+logger.setLevel(logging.DEBUG)
+fh = logging.FileHandler(LOGFILE)
+fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
+logger.addHandler(fh)
+
+logger.info('Starting up')
+
+a = AlfredParser()
+g = GraphitePush('fdca:ffee:ff12:a254::da7a', 2003)
+data = { }
+
+if DUMMY_MODE:
+	a.alfred_json = '/home/ffpb-statusbot/status-daemon/alfred-json'
+	g.dont_send = True
 
 try:
 	a.sanitycheck()
 except Exception as err:
+	logger.critical('AlfredParser.sanitycheck() failed: ' + str(err))
 	print('FAILED SANITY CHECK: ' + str(err))
 	sys.exit(1)
 
-daemon_context = daemon.DaemonContext()
-
+daemon_context = daemon.DaemonContext(
+	files_preserve = [ fh.stream ],
+)
 with daemon_context:
-	logging.basicConfig(
-		filename=LOGFILE,
-		format='%(asctime)s [%(levelname)s] %(message)s',
-		datefmt='%Y-%m-%d %H:%M:%S',
-		level=logging.INFO,
-	)
 
 	while True:
 		try:
-			logging.debug('Execute')
-			a.execute()
-			time.sleep(15)
+			ts = int(time.time())
+			logger.debug('Step 1/3: Fetching data ...')
+			newdata = a.fetch()
+
+			logger.debug('Step 2/3: Pushing update data ...')
+			graphitedata = g.push(newdata, ts=ts)
+
+			logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
+
+			logger.debug('Step 3/3: Merging current data ...')
+			data = dict_merge(data, newdata)
+			logger.info('I have data for ' + str(len(data)) + ' nodes.')
 		except Exception as err:
-			logging.error(str(err))
+			logger.error(str(err))
+
+		time.sleep(INTERVAL)
+
+	logger.info('Shutting down')
 

+ 0 - 99
ffstatus.py

@@ -1,99 +0,0 @@
-#!/usr/bin/python
-
-from __future__ import print_function
-from copy import deepcopy
-import io
-import json
-import socket
-import subprocess
-import time
-import StringIO
-
-def dict_merge(a, b):
-    '''recursively merges dict's. not just simple a['key'] = b['key'], if
-    both a and bhave a key who's value is a dict then dict_merge is called
-    on both values and the result stored in the returned dictionary.'''
-    if not isinstance(b, dict):
-        return b
-    result = deepcopy(a)
-    for k, v in b.iteritems():
-        if k in result and isinstance(result[k], dict):
-                result[k] = dict_merge(result[k], v)
-        else:
-            result[k] = deepcopy(v)
-    return result
-
-class AlfredParser:
-	alfred_datatypes = [ 158, 159 ]
-	prefix = "ffpb.nodes."
-	target_host = "fdca:ffee:ff12:a254::da7a"
-	target_port = 2003
-	alfred_dump = '/www/alfred.json'
-	whitelist = [ "24:a4:3c:f8:5e:fa", "24:a4:3c:f8:5e:db", "24:a4:3c:d9:4f:69", "24:a4:3c:a3:67:f0", "24:a4:3c:a3:68:07", "24:a4:3c:d2:21:d5" ]
-
-	def sanitycheck(self):
-		testdata = None
-		try:
-			testdata = subprocess.check_output(['alfred-json', '-z', '-r', str(int(self.alfred_datatypes[0]))])
-		except Exception as err:
-			raise Exception("alfred-json not found or incompatible: " + str(err))
-
-		try:
-			check = json.loads(testdata)
-		except Exception as err:
-			raise Exception("alfred-json does not return valid JSON data: " + str(err))
-
-		return True
-
-	def execute(self):
-		data = { }
-		ts = int(time.time())
-
-		for datatype in self.alfred_datatypes:
-			rawdata = subprocess.check_output(['alfred-json', '-z', '-r', str(int(datatype))])
-			newdata = json.loads(rawdata)
-			data = dict_merge(data, newdata)
-
-		if not self.alfred_dump is None:
-			jsondata = json.dumps(data, ensure_ascii=False)
-			f = io.open(self.alfred_dump, 'w')
-			f.write(jsondata)
-			f.close()
-
-		output = StringIO.StringIO()
-		for nodeid in data:
-			if (not self.whitelist is None) and (not nodeid in self.whitelist):
-				#print("Skipping node {0} as it is not in the configured whitelist.".format(nodeid))
-				continue
-
-
-			nodeinfo = data[nodeid]
-			nodestats = None
-			if "statistics" in nodeinfo: nodestats = nodeinfo["statistics"]
-
-			if not nodestats is None:
-				print(self.prefix, nodeid, ".uptime", " ", int(float(nodestats["uptime"])), " ", ts, sep='', file=output)
-				traffic = None
-				if "traffic" in nodestats: traffic = nodestats["traffic"]
-				if not traffic is None:
-					print(self.prefix, nodeid, ".rxbytes", " ", int(traffic["rx"]["bytes"]), " ", ts, sep='', file=output)
-					print(self.prefix, nodeid, ".rxpackets", " ", int(traffic["rx"]["packets"]), " ", ts, sep='', file=output)
-					print(self.prefix, nodeid, ".txbytes", " ", int(traffic["tx"]["bytes"]), " ", ts, sep='', file=output)
-					print(self.prefix, nodeid, ".txpackets", " ", int(traffic["tx"]["packets"]), " ", ts, sep='', file=output)
-			else:
-				print("Node {0} does not provide statistics information.".format(nodeid))
-
-		s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
-		s.connect((self.target_host, self.target_port))
-		all_output = output.getvalue()
-		print(all_output)
-		s.sendall(all_output)
-		s.shutdown(socket.SHUT_WR)
-		s.close()
-
-		output.close()
-
-if __name__ == "__main__":
-	a = AlfredParser()
-	a.sanitycheck()
-	a.execute()

+ 22 - 0
ffstatus/__init__.py

@@ -0,0 +1,22 @@
+from copy import deepcopy
+
+from .alfred import AlfredParser
+from .graphite import GraphitePush
+
+__all__ = [ 'AlfredParser', 'GraphitePush', 'dict_merge' ]
+
+def dict_merge(a, b):
+    '''recursively merges dict's. not just simple a['key'] = b['key'], if
+    both a and bhave a key who's value is a dict then dict_merge is called
+    on both values and the result stored in the returned dictionary.'''
+    if not isinstance(b, dict):
+        return b
+    result = deepcopy(a)
+    for k, v in b.iteritems():
+        if k in result and isinstance(result[k], dict):
+                result[k] = dict_merge(result[k], v)
+        else:
+            result[k] = deepcopy(v)
+    return result
+
+

+ 91 - 0
ffstatus/alfred.py

@@ -0,0 +1,91 @@
+#!/usr/bin/python
+
+from __future__ import print_function
+from copy import deepcopy
+import io
+import json
+import socket
+import subprocess
+import time
+import StringIO
+
+class AlfredParser:
+	alfred_json = 'alfred-json'
+	alfred_datatypes = [ ('static', 158), ('dynamic', 159) ]
+
+	def sanitycheck(self):
+		testdata = None
+		try:
+			testdata = subprocess.check_output([self.alfred_json, '-z', '-r', str(int(self.alfred_datatypes[0][1]))])
+		except Exception as err:
+			raise Exception("alfred-json not found or incompatible: " + str(err))
+
+		try:
+			check = json.loads(testdata)
+		except Exception as err:
+			raise Exception("alfred-json does not return valid JSON data: " + str(err))
+
+		return True
+
+	def fetch(self, alfred_dump=None, include_rawdata=False):
+		data = { }
+		ts = int(time.time())
+
+		alfreddata = { }
+		for datatype in self.alfred_datatypes:
+			rawdata = subprocess.check_output([self.alfred_json, '-z', '-r', str(int(datatype[1]))])
+			newdata = json.loads(rawdata)
+			
+			for item in newdata:
+				if not item in alfreddata:
+					alfreddata[item] = { }
+
+				alfreddata[item][datatype[0]] = newdata[item]
+
+		if not alfred_dump is None:
+			jsondata = json.dumps(alfreddata, ensure_ascii=False)
+			f = io.open(alfred_dump, 'w')
+			f.write(jsondata)
+			f.close()
+
+		for alfredid in alfreddata:
+			alfredinfo = alfreddata[alfredid]
+
+			myid = alfredinfo['static']['node_id'] if 'node_id' in alfredinfo['static'] else alfredid.lower().replace(':', '')
+
+			nodeinfo = { 'hostname': None, 'mac': None, 'software': {}, 'statistics': {} }
+			data[myid] = nodeinfo
+
+			nodestatic = alfredinfo['static']
+			if 'hostname' in nodestatic: nodeinfo['hostname'] = nodestatic['hostname']
+			if 'network' in nodestatic and 'mac' in nodestatic['network']:
+				nodeinfo['mac']= nodestatic['network']['mac']
+			if 'software' in nodestatic:
+				sw = nodestatic['software']
+
+				nodeinfo['software']['firmware'] = sw['firmware']['release'] if 'firmware' in sw and 'release' in sw['firmware'] else None
+				nodeinfo['software']['autoupdater'] = sw['autoupdater']['branch'] if sw['autoupdater']['enabled'] else 'off'
+
+			nodedyn = alfredinfo['dynamic'] if 'dynamic' in alfredinfo else nodestatic['statistics']
+			if 'uptime' in nodedyn: nodeinfo['uptime'] = int(float(nodedyn['uptime']))
+			if 'gateway' in nodedyn: nodeinfo['gateway'] = nodedyn['gateway']
+				
+			traffic = nodedyn["traffic"] if "traffic" in nodedyn else None
+			if not traffic is None:
+				if not 'traffic' in nodeinfo['statistics']: nodeinfo['statistics']['traffic'] = { }
+				t = nodeinfo['statistics']['traffic']
+				t['rxbytes'] = int(traffic["rx"]["bytes"])
+				t['txbytes'] = int(traffic["tx"]["bytes"])
+
+		return data
+
+if __name__ == "__main__":
+	a = AlfredParser()
+	try:
+		a.sanitycheck()
+	except Exception  as err:
+		print('SANITY-CHECK failed:', str(err))
+		import sys
+		sys.exit(1)
+	data = a.fetch()
+	print(json.dumps(data))

+ 55 - 0
ffstatus/graphite.py

@@ -0,0 +1,55 @@
+#!/usr/bin/python
+
+from __future__ import print_function
+import socket
+import time
+import StringIO
+
+class GraphitePush:
+	dont_send = False
+
+	prefix = 'ffpb.nodes.'
+	target_host = None
+	target_port = 2003
+
+	whitelist = None #[ '24a43cf85efa', '24a43cf85edb', '24a43cd94f69', '24a43ca367f0', '24a43ca36807', '24a43cd221d5' ]
+
+	def __init__(self, host, port=2003):
+		self.target_host = host
+		self.target_port = port
+
+	def push(self, data, ts=None):
+		if ts is None: ts = time.time()
+		ts = int(ts)
+
+		output = StringIO.StringIO()
+		whitelist = [ x for x in self.whitelist ] if not self.whitelist is None and len(self.whitelist) > 0 else None
+		
+		for nodeid in data:
+			if (not whitelist is None) and (not nodeid in whitelist):
+				#print("Skipping node {0} as it is not in the configured whitelist.".format(nodeid))
+				continue
+
+			nodeinfo = data[nodeid]
+
+			for item in ['uptime']:
+				if item in nodeinfo:
+					print(self.prefix, nodeid, '.', item, ' ', nodeinfo[item], ' ', ts, sep='', file=output)
+
+			traffic = nodeinfo['statistics']['traffic'] if 'statistics' in nodeinfo and 'traffic' in nodeinfo['statistics'] else None
+			if not traffic is None:
+				for item in ['rxbytes', 'txbytes']:
+					print(self.prefix, nodeid, '.', item, ' ', traffic[item], ' ', ts, sep='', file=output)
+
+		all_output = output.getvalue()
+
+		if not self.dont_send:
+			s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
+			s.connect((self.target_host, self.target_port))
+			s.sendall(all_output)
+			s.shutdown(socket.SHUT_WR)
+			s.close()
+
+		output.close()
+
+		return all_output