|
@@ -14,6 +14,7 @@ DEFAULT_INTERVAL = 15
|
|
|
parser = argparse.ArgumentParser(description='Batman/Alfred Transmission Collection, Aggregation & Value Engine')
|
|
|
parser.add_argument('--logfile', help='path for log file')
|
|
|
parser.add_argument('--interval', type=int, default=DEFAULT_INTERVAL, help='data poll interval')
|
|
|
+parser.add_argument('-d', '--no-detach', action='store_true', help='Don\'t detach (daemonize) ourself')
|
|
|
parser.add_argument('-n', '--no-send', action='store_true', help='Fetch data but don\'t send it')
|
|
|
parser.add_argument('-A', '--alfred-json', help='executable path for alfred-json')
|
|
|
parser.add_argument('-B', '--batadv-vis', help='executable path for batadv-vis')
|
|
@@ -27,6 +28,8 @@ if args.interval < 5:
|
|
|
print('A poll interval lower than 5s is not supported.')
|
|
|
sys.exit(1)
|
|
|
|
|
|
+shall_daemonize = not args.no_detach
|
|
|
+
|
|
|
logger = logging.getLogger()
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
|
@@ -35,6 +38,11 @@ if not args.logfile is None:
|
|
|
fh.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
|
|
|
logger.addHandler(fh)
|
|
|
|
|
|
+if args.no_detach:
|
|
|
+ ch = logging.StreamHandler(sys.stdout)
|
|
|
+ ch.setFormatter(logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S'))
|
|
|
+ logger.addHandler(ch)
|
|
|
+
|
|
|
logger.info('Starting up')
|
|
|
|
|
|
a = AlfredParser()
|
|
@@ -57,35 +65,37 @@ for i in [ ('AlfredParser', a), ('BatmanParser', b) ]:
|
|
|
print('FAILED SANITY CHECK: ' + str(err))
|
|
|
sys.exit(1)
|
|
|
|
|
|
-daemon_context = daemon.DaemonContext(
|
|
|
- files_preserve = [ fh.stream ],
|
|
|
-)
|
|
|
-with daemon_context:
|
|
|
- while True:
|
|
|
- try:
|
|
|
- ts = int(time.time())
|
|
|
- logger.debug('Step 1/3: Fetching data ...')
|
|
|
- alfreddata = a.fetch()
|
|
|
- batmandata = b.fetch()
|
|
|
- newdata = merge_alfred_batman(alfreddata, batmandata)
|
|
|
- logger.info('Fetched data: {0} ALFRED with {1} BATMAN makes {2} total'.format(len(alfreddata), len(batmandata), len(newdata)))
|
|
|
-
|
|
|
- logger.debug('Step 2/3: Pushing update data ...')
|
|
|
- if not g is None:
|
|
|
- graphitedata = g.push(newdata, ts=ts)
|
|
|
- logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
|
|
|
- if not d is None:
|
|
|
- d.push(newdata)
|
|
|
-
|
|
|
-
|
|
|
- logger.debug('Step 3/3: Merging current data ...')
|
|
|
- data = dict_merge(data, newdata)
|
|
|
- logger.info('I have data for ' + str(len(data)) + ' nodes.')
|
|
|
- except Exception as err:
|
|
|
- logger.error(str(err))
|
|
|
-
|
|
|
- logger.debug('Sleeping for {0} seconds'.format(args.interval))
|
|
|
- time.sleep(args.interval)
|
|
|
-
|
|
|
- logger.info('Shutting down')
|
|
|
+if shall_daemonize:
|
|
|
+ daemon_context = daemon.DaemonContext(
|
|
|
+ files_preserve = [ fh.stream ],
|
|
|
+ )
|
|
|
+
|
|
|
+ daemon_context.open()
|
|
|
+
|
|
|
+while True:
|
|
|
+ try:
|
|
|
+ ts = int(time.time())
|
|
|
+ logger.debug('Step 1/3: Fetching data ...')
|
|
|
+ alfreddata = a.fetch()
|
|
|
+ batmandata = b.fetch()
|
|
|
+ newdata = merge_alfred_batman(alfreddata, batmandata)
|
|
|
+ logger.info('Fetched data: {0} ALFRED with {1} BATMAN makes {2} total'.format(len(alfreddata), len(batmandata), len(newdata)))
|
|
|
+
|
|
|
+ logger.debug('Step 2/3: Pushing update data ...')
|
|
|
+ if not g is None:
|
|
|
+ graphitedata = g.push(newdata, ts=ts)
|
|
|
+ logger.info('Sent ' + str(graphitedata.count('\n')+1) + ' lines to Graphite.')
|
|
|
+ if not d is None:
|
|
|
+ d.push(newdata)
|
|
|
+
|
|
|
+ logger.debug('Step 3/3: Merging current data ...')
|
|
|
+ data = dict_merge(data, newdata)
|
|
|
+ logger.info('I have data for ' + str(len(data)) + ' nodes.')
|
|
|
+ except Exception as err:
|
|
|
+ logger.error(str(err))
|
|
|
+
|
|
|
+ logger.debug('Sleeping for {0} seconds'.format(args.interval))
|
|
|
+ time.sleep(args.interval)
|
|
|
+
|
|
|
+logger.info('Shut down.')
|
|
|
|