diff options
author | Sridhar K. N. Rao <sridhar.rao@spirent.com> | 2018-07-18 19:01:24 +0530 |
---|---|---|
committer | Sridhar K. N. Rao <sridhar.rao@spirent.com> | 2018-07-27 10:59:35 +0530 |
commit | 65f40a88070baaa58d999cfb358a81befe6b8cc9 (patch) | |
tree | 19e95644a8efebf268ad5743c3066e9eac21fa90 /tools/collectors/collectd | |
parent | 50f5064b274626e030f3584a17c95ef02d8f4a07 (diff) |
Collectd: Additional metrics storing options
In this patch, following options are added for collectd-metrics storing
1. Collect all the metrics written by collectd into csv file - a tar.gz
file is created. The timestamp is retained in the tar.gz file.
2. Selected metrics are written into a log file.
3. Fix Pylint errors
4. Terminate collectd by PID.
JIRA: VSPERF-588
Change-Id: Ib4d89d3dd6c622066fa0296b4415515fdca12252
Signed-off-by: Sridhar K. N. Rao <sridhar.rao@spirent.com>
Diffstat (limited to 'tools/collectors/collectd')
-rw-r--r-- | tools/collectors/collectd/collectd.py | 35 |
1 files changed, 32 insertions, 3 deletions
diff --git a/tools/collectors/collectd/collectd.py b/tools/collectors/collectd/collectd.py index 700aef47..5e996d3a 100644 --- a/tools/collectors/collectd/collectd.py +++ b/tools/collectors/collectd/collectd.py @@ -20,6 +20,7 @@ Plot the values of the stored samples once the test is completed import copy import csv +import glob import logging import multiprocessing import os @@ -30,6 +31,7 @@ import matplotlib.pyplot as plt import numpy as np import tools.collectors.collectd.collectd_bucky as cb from tools.collectors.collector import collector +from tools import tasks from conf import settings # The y-lables. Keys in this dictionary are used as y-labels. @@ -49,6 +51,7 @@ def get_label(sample): return label return None + def plot_graphs(dict_of_arrays): """ Plot the values @@ -194,6 +197,7 @@ class Receiver(multiprocessing.Process): val = self.pd_dict[sample[1]] val.append((sample[2], sample[3])) self.pd_dict[sample[1]] = val + logging.debug("COLLECTD %s", ' '.join(str(p) for p in sample)) def stop(self): """ @@ -216,13 +220,27 @@ class Collectd(collector.ICollector): """ Initialize collection of statistics """ - self._log = os.path.join(results_dir, - settings.getValue('LOG_FILE_COLLECTD') + - '_' + test_name + '.log') + self.logger = logging.getLogger(__name__) + self.resultsdir = results_dir + self.testname = test_name self.results = {} self.sample_dict = multiprocessing.Manager().dict() self.control = multiprocessing.Value('b', False) self.receiver = Receiver(self.sample_dict, self.control) + self.cleanup_metrics() + # Assumption: collected is installed at /opt/collectd + # And collected is configured to write to csv at /tmp/csv + self.pid = tasks.run_background_task( + ['sudo', '/opt/collectd/sbin/collectd'], + self.logger, 'Staring Collectd') + + def cleanup_metrics(self): + """ + Cleaup the old or archived metrics + """ + for name in glob.glob(os.path.join('/tmp/csv/', '*')): + tasks.run_task(['sudo', 'rm', '-rf', name], self.logger, + 'Cleaning up Metrics', True) def start(self): """ @@ -235,6 +253,11 @@ class Collectd(collector.ICollector): """ Stop receiving samples """ + tasks.terminate_task_subtree(self.pid, logger=self.logger) + # At times collectd fails to fully terminate. + # Killing process by name too helps. + tasks.run_task(['sudo', 'pkill', '--signal', '2', 'collectd'], + self.logger, 'Stopping Collectd', True) self.control.value = True self.receiver.stop() self.receiver.server.join(5) @@ -244,6 +267,12 @@ class Collectd(collector.ICollector): if self.receiver.is_alive(): self.receiver.terminate() self.results = copy.deepcopy(self.sample_dict) + # Backup the collectd-metrics for this test into a zipfile + filename = ('/tmp/collectd-' + settings.getValue('LOG_TIMESTAMP') + + '.tar.gz') + tasks.run_task(['sudo', 'tar', '-czvf', filename, '/tmp/csv/'], + self.logger, 'Zipping File', True) + self.cleanup_metrics() def get_results(self): """ |