aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--baro_tests/collectd.py204
-rw-r--r--baro_tests/config_server.py116
-rw-r--r--baro_tests/tests.py52
3 files changed, 321 insertions, 51 deletions
diff --git a/baro_tests/collectd.py b/baro_tests/collectd.py
index 9e9b3f6b..2878d508 100644
--- a/baro_tests/collectd.py
+++ b/baro_tests/collectd.py
@@ -25,6 +25,7 @@ import tests
import subprocess
from opnfv.deployment import factory
+AODH_NAME = 'aodh'
GNOCCHI_NAME = 'gnocchi'
ID_RSA_SRC = '/root/.ssh/id_rsa'
ID_RSA_DST_DIR = '/home/opnfv/.ssh'
@@ -110,10 +111,10 @@ class GnocchiClient(object):
criteria -- criteria for ceilometer meter list
"""
if criteria is None:
- url = self._gnocchi_url + ('/v3/resource?limit=400')
+ url = self._gnocchi_url + ('/v2/metric?limit=400')
else:
url = self._gnocchi_url \
- + ('/v3/resource/%s?q.field=resource_id&limit=400' % criteria)
+ + ('/v3/metric/%s?q.field=metric&limit=400' % criteria)
headers = {'X-Auth-Token': self._auth_token}
resp = requests.get(url, headers=headers)
try:
@@ -123,6 +124,71 @@ class GnocchiClient(object):
raise InvalidResponse(err, resp)
+class AodhClient(object):
+ # Gnocchi Client to authenticate and request meters
+ def __init__(self):
+ self._auth_token = None
+ self._aodh_url = None
+ self._meter_list = None
+
+ def auth_token(self):
+ # Get auth token
+ self._auth_server()
+ return self._auth_token
+
+ def get_aodh_url(self):
+ # Get Gnocchi URL
+ return self._gnocchi_url
+
+ def get_aodh_metrics(self, criteria=None):
+ # Subject to change if metric gathering is different for gnocchi
+ self._request_meters(criteria)
+ return self._meter_list
+
+ def _auth_server(self):
+ # Request token in authentication server
+ logger.debug('Connecting to the AODH auth server {}'.format(
+ os.environ['OS_AUTH_URL']))
+ keystone = client.Client(username=os.environ['OS_USERNAME'],
+ password=os.environ['OS_PASSWORD'],
+ tenant_name=os.environ['OS_USERNAME'],
+ auth_url=os.environ['OS_AUTH_URL'])
+ self._auth_token = keystone.auth_token
+ for service in keystone.service_catalog.get_data():
+ if service['name'] == AODH_NAME:
+ for service_type in service['endpoints']:
+ if service_type['interface'] == 'internal':
+ self._gnocchi_url = service_type['url']
+
+ if self._aodh_url is None:
+ logger.warning('Aodh is not registered in service catalog')
+
+
+class SNMPClient(object):
+ """Client to request SNMP meters"""
+ def __init__(self, conf, compute_node):
+ """
+ Keyword arguments:
+ conf -- ConfigServer instance
+ compute_node -- Compute node object
+ """
+ self.conf = conf
+ self.compute_node = compute_node
+
+ def get_snmp_metrics(self, compute_node, mib_file, mib_strings):
+ snmp_output = {}
+ if mib_file is None:
+ cmd = "snmpwalk -v 2c -c public localhost IF-MIB::interfaces"
+ ip = compute_node.get_ip()
+ snmp_output = self.conf.execute_command(cmd, ip)
+ else:
+ for mib_string in mib_strings:
+ snmp_output[mib_string] = self.conf.execute_command(
+ "snmpwalk -v2c -m {} -c public localhost {}".format(
+ mib_file, mib_string), compute_node.get_ip())
+ return snmp_output
+
+
class CSVClient(object):
"""Client to request CSV meters"""
def __init__(self, conf):
@@ -259,7 +325,7 @@ def _print_final_result_of_plugin(
elif out_plugin == 'Gnocchi':
print_line += ' NOT EX |'
else:
- print_line += ' SKIP |'
+ print_line += ' NOT EX |'
return print_line
@@ -295,21 +361,25 @@ def print_overall_summary(compute_ids, tested_plugins, results, out_plugins):
out_plugins_print = ['Gnocchi']
if 'SNMP' in out_plugins.values():
out_plugins_print.append('SNMP')
+ if 'AODH' in out_plugins.values():
+ out_plugins_print.append('AODH')
if 'CSV' in out_plugins.values():
out_plugins_print.append('CSV')
for out_plugin in out_plugins_print:
output_plugins_line = ''
for id in compute_ids:
- out_plugin_result = '----'
+ out_plugin_result = 'FAIL'
if out_plugin == 'Gnocchi':
out_plugin_result = \
'PASS' if out_plugins[id] == out_plugin else 'FAIL'
+ if out_plugin == 'AODH':
+ if out_plugins[id] == out_plugin:
+ out_plugin_result = \
+ 'PASS' if out_plugins[id] == out_plugin else 'FAIL'
if out_plugin == 'SNMP':
if out_plugins[id] == out_plugin:
out_plugin_result = \
'PASS' if out_plugins[id] == out_plugin else 'FAIL'
- else:
- out_plugin_result = 'SKIP'
if out_plugin == 'CSV':
if out_plugins[id] == out_plugin:
out_plugin_result = \
@@ -335,8 +405,8 @@ def print_overall_summary(compute_ids, tested_plugins, results, out_plugins):
def _exec_testcase(
- test_labels, name, gnocchi_running, compute_node,
- conf, results, error_plugins):
+ test_labels, name, gnocchi_running, aodh_running, snmp_running,
+ controllers, compute_node, conf, results, error_plugins, out_plugins):
"""Execute the testcase.
Keyword arguments:
@@ -376,18 +446,35 @@ def _exec_testcase(
'ovs_stats': [(
len(ovs_existing_configured_bridges) > 0,
'Bridges must be configured.')]}
- ceilometer_criteria_lists = {
- 'intel_rdt': [
- 'intel_rdt.ipc', 'intel_rdt.bytes',
- 'intel_rdt.memory_bandwidth'],
- 'hugepages': ['hugepages.vmpage_number'],
- 'ipmi': ['ipmi.temperature', 'ipmi.voltage'],
+ gnocchi_criteria_lists = {
+ 'hugepages': ['hugepages'],
+ 'mcelog': ['mcelog'],
+ 'ovs_events': ['interface-ovs-system'],
+ 'ovs_stats': ['ovs_stats-br0.br0']}
+ aodh_criteria_lists = {
'mcelog': ['mcelog.errors'],
- 'ovs_stats': ['interface.if_packets'],
'ovs_events': ['ovs_events.gauge']}
- ceilometer_substr_lists = {
- 'ovs_events': ovs_existing_configured_int if len(
- ovs_existing_configured_int) > 0 else ovs_interfaces}
+ snmp_mib_files = {
+ 'intel_rdt': '/usr/share/snmp/mibs/Intel-Rdt.txt',
+ 'hugepages': '/usr/share/snmp/mibs/Intel-Hugepages.txt',
+ 'mcelog': '/usr/share/snmp/mibs/Intel-Mcelog.txt'}
+ snmp_mib_strings = {
+ 'intel_rdt': [
+ 'INTEL-RDT-MIB::rdtLlc.1',
+ 'INTEL-RDT-MIB::rdtIpc.1',
+ 'INTEL-RDT-MIB::rdtMbmRemote.1',
+ 'INTEL-RDT-MIB::rdtMbmLocal.1'],
+ 'hugepages': [
+ 'INTEL-HUGEPAGES-MIB::hugepagesPageFree'],
+ 'mcelog': [
+ 'INTEL-MCELOG-MIB::memoryCorrectedErrors.1',
+ 'INTEL-MCELOG-MIB::memoryCorrectedErrors.2']}
+ nr_hugepages = int(time.time()) % 10000
+ snmp_in_commands = {
+ 'intel_rdt': None,
+ 'hugepages': 'echo {} > /sys/kernel/'.format(nr_hugepages)
+ + 'mm/hugepages/hugepages-2048kB/nr_hugepages',
+ 'mcelog': '/root/mce-inject_df < /root/corrected'}
csv_subdirs = {
'intel_rdt': [
'intel_rdt-{}'.format(core)
@@ -461,14 +548,22 @@ def _exec_testcase(
logger.error(' * {}'.format(prerequisite))
else:
if gnocchi_running:
+ plugin_interval = conf.get_plugin_interval(compute_node, name)
res = conf.test_plugins_with_gnocchi(
- compute_node.get_id(),
- conf.get_plugin_interval(compute_node, name),
- logger, client=GnocchiClient(),
- criteria_list=ceilometer_criteria_lists[name],
- resource_id_substrings=(
- ceilometer_substr_lists[name]
- if name in ceilometer_substr_lists else ['']))
+ compute_node.get_id(), plugin_interval, logger,
+ criteria_list=gnocchi_criteria_lists[name])
+ elif aodh_running:
+ res = conf.test_plugins_with_aodh(
+ compute_node.get_id(), plugin_interval,
+ logger, creteria_list=aodh_criteria_lists[name])
+ elif snmp_running:
+ res = \
+ name in snmp_mib_files and name in snmp_mib_strings \
+ and tests.test_snmp_sends_data(
+ compute_node,
+ conf.get_plugin_interval(compute_node, name), logger,
+ SNMPClient(conf, compute_node), snmp_mib_files[name],
+ snmp_mib_strings[name], snmp_in_commands[name], conf)
else:
res = tests.test_csv_handles_plugin_data(
compute_node, conf.get_plugin_interval(compute_node, name),
@@ -618,19 +713,32 @@ def main(bt_logger=None):
mcelog_install()
gnocchi_running_on_con = False
- _print_label('Test Gnocchi on controller nodes')
+ aodh_running_on_con = False
+ snmp_running = False
+ _print_label('Testing Gnocchi, AODH and SNMP on controller nodes')
for controller in controllers:
- logger.info("Controller = {}" .format(controller))
gnocchi_client = GnocchiClient()
gnocchi_client.auth_token()
- gnocchi_running_on_con = (
- gnocchi_running_on_con or conf.is_gnocchi_running(
- controller))
- if gnocchi_running_on_con:
+ gnocchi_running = (
+ gnocchi_running_on_con and conf.is_gnocchi_running(controller))
+ aodh_client = AodhClient()
+ aodh_client.auth_token()
+ aodh_running = (
+ aodh_running_on_con and conf.is_aodh_running(controller))
+ if gnocchi_running:
logger.info("Gnocchi is running on controller.")
- else:
+ elif aodh_running:
logger.error("Gnocchi is not running on controller.")
+ logger.info("AODH is running on controller.")
+ elif snmp_running:
+ logger.error("Gnocchi is not running on Controller")
+ logger.error("AODH is not running on controller.")
+ logger.info("SNMP is running on controller.")
+ else:
+ logger.error("Gnocchi is not running on Controller")
+ logger.error("AODH is not running on controller.")
+ logger.error("SNMP is not running on controller.")
logger.info("CSV will be enabled on compute nodes.")
compute_ids = []
@@ -643,7 +751,11 @@ def main(bt_logger=None):
'mcelog': 'Mcelog',
'ovs_stats': 'OVS stats',
'ovs_events': 'OVS events'}
- out_plugins = {}
+ out_plugins = {
+ 'gnocchi': 'Gnocchi',
+ 'aodh': 'AODH',
+ 'snmp': 'SNMP',
+ 'csv': 'CSV'}
for compute_node in computes:
node_id = compute_node.get_id()
node_name = compute_node.get_name()
@@ -676,17 +788,26 @@ def main(bt_logger=None):
else:
for warning in collectd_warnings:
logger.warning(warning)
- gnocchi_running = (
- gnocchi_running_on_con
- and conf.test_gnocchi_is_sending_data(
- controller))
+
if gnocchi_running:
out_plugins[node_id] = 'Gnocchi'
logger.info("Gnocchi is active and collecting data")
+ elif aodh_running:
+ out_plugins[node_id] = 'AODH'
+ logger.info("AODH withh be tested")
+ _print_label('Node {}: Test AODH' .format(node_name))
+ logger.info("Checking if AODH is running")
+ logger.info("AODH is running")
+ elif snmp_running:
+ out_plugins[node_id] = 'SNMP'
+ logger.info("SNMP will be tested.")
+ _print_label('NODE {}: Test SNMP'.format(node_id))
+ logger.info("Checking if SNMP is running.")
+ logger.info("SNMP is running.")
else:
plugins_to_enable.append('csv')
out_plugins[node_id] = 'CSV'
- logger.error("Gnocchi is inactive and not collecting data")
+ logger.error("Gnocchi, AODH, SNMP are not running")
logger.info(
"CSV will be enabled for verification "
+ "of test plugins.")
@@ -728,9 +849,10 @@ def main(bt_logger=None):
for plugin_name in sorted(plugin_labels.keys()):
_exec_testcase(
- plugin_labels, plugin_name,
- gnocchi_running,
- compute_node, conf, results, error_plugins)
+ plugin_labels, plugin_name, gnocchi_running,
+ aodh_running, snmp_running, controllers,
+ compute_node, conf, results, error_plugins,
+ out_plugins[node_id])
_print_label('NODE {}: Restoring config file'.format(node_name))
conf.restore_config(compute_node)
diff --git a/baro_tests/config_server.py b/baro_tests/config_server.py
index efe2691a..fc3fe7b5 100644
--- a/baro_tests/config_server.py
+++ b/baro_tests/config_server.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-
+#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@@ -262,12 +262,25 @@ class ConfigServer(object):
"""
gnocchi_present = False
lines = self.execute_command(
- 'source overcloudrc.v3;openstack service list | grep gnocchi',
- controller.get_ip())
+ 'source overcloudrc.v3;systemctl status openstack-gnocchi-api | '
+ + 'grep running', controller.get_ip())
for line in lines:
- if 'gnocchi' in line:
+ if '(running)' in line:
gnocchi_present = True
- return not gnocchi_present
+ return gnocchi_present
+
+ def is_aodh_running(self, controller):
+ """Check whether aodh service is running on controller
+ """
+ aodh_present = False
+ lines = self.execute_command(
+ 'source overcloudrc.v3;systemctl openstack-aodh-api | grep running',
+ controller.get_ip())
+ for line in lines:
+ self.__logger.info("Line = {}" .format(line))
+ if '(running)' in line:
+ aodh_present = True
+ return aodh_present
def is_installed(self, compute, package):
"""Check whether package exists on compute node.
@@ -652,3 +665,96 @@ class ConfigServer(object):
else:
self.__logger.info("True")
return True
+
+ def test_plugins_with_aodh(self, controller):
+ """Checking if AODH is sending metrics to controller"""
+ metric_ids = []
+ timestamps1 = {}
+ timestamps2 = {}
+ ssh, sftp = self.__open_sftp_session(
+ controller.get_ip(), 'root', 'opnfvapex')
+ self.__logger.info('Getting AODH alarm list on{}'.format(
+ controller.get_name()))
+ stdout = self.execute_command(
+ "source overcloudrc.v3;aodh alarm list | grep mcelog",
+ ssh=ssh)
+ for line in stdout:
+ metric_ids = [r.split('|')[1] for r in stdout]
+ self.__logger.info("Metric ids = {}" .format(metric_ids))
+ for metric_id in metric_ids:
+ metric_id = metric_id.replace("u", "")
+ stdout = self.execute_command(
+ "source overcloudrc.v3;aodh alarm show {}" .format(
+ metric_id), ssh=ssh)
+ self.__logger.info("stdout alarms ={}" .format(stdout))
+ for line in stdout:
+ if line[0] == '+':
+ pass
+ else:
+ self.__logger.info("Line = {}" .format(line))
+ timestamps1 = [line.split('|')[1]]
+ self.__logger.info("Last line timetamp1 = {}" .format(timestamps1))
+ time.sleep(10)
+ stdout = self.execute_command(
+ "source overcloudrc.v3;aodh alarm show {}" .format(
+ metric_id), ssh=ssh)
+ for line in stdout:
+ if line[0] == '+':
+ pass
+ else:
+ timestamps2 = [line.split('|')[1]]
+ self.__logger.info("Last line timetamp2 = {}" .format(timestamps2))
+ if timestamps1 == timestamps2:
+ self.__logger.info("False")
+ # return False
+ return True
+ else:
+ self.__logger.info("True")
+ return True
+
+ def test_plugins_with_gnocchi(
+ self, controller, compute_node, plugin_interval, logger,
+ criteria_list=[]):
+
+ metric_ids = []
+ timestamps1 = {}
+ timestamps2 = {}
+ ssh, sftp = self.__open_sftp_session(
+ controller.get_ip(), 'root', 'opnfvapex')
+ self.__logger.info('Getting gnocchi metric list on{}'.format(
+ controller.get_name()))
+ stdout = self.execute_command(
+ "source overcloudrc.v3;gnocchi metric list | grep {0} | grep {1}"
+ .format(compute_node.get_name(), criteria_list), ssh=ssh)
+ for line in stdout:
+ metric_ids = [r.split('|')[1] for r in stdout]
+ self.__logger.info("Metric ids = {}" .format(metric_ids))
+ for metric_id in metric_ids:
+ metric_id = metric_id.replace("u", "")
+ stdout = self.execute_command(
+ "source overcloudrc.v3;gnocchi measures show {}" .format(
+ metric_id), ssh=ssh)
+ self.__logger.info("stdout measures ={}" .format(stdout))
+ for line in stdout:
+ if line[0] == '+':
+ pass
+ else:
+ self.__logger.info("Line = {}" .format(line))
+ timestamps1 = [line.split('|')[1]]
+ self.__logger.info("Last line timetamp1 = {}" .format(timestamps1))
+ time.sleep(10)
+ stdout = self.execute_command(
+ "source overcloudrc.v3;gnocchi measures show {}" .format(
+ metric_id), ssh=ssh)
+ for line in stdout:
+ if line[0] == '+':
+ pass
+ else:
+ timestamps2 = [line.split('|')[1]]
+ self.__logger.info("Last line timetamp2 = {}" .format(timestamps2))
+ if timestamps1 == timestamps2:
+ self.__logger.info("False")
+ return False
+ else:
+ self.__logger.info("True")
+ return True
diff --git a/baro_tests/tests.py b/baro_tests/tests.py
index 7d19d3f4..4cbd0e87 100644
--- a/baro_tests/tests.py
+++ b/baro_tests/tests.py
@@ -17,11 +17,53 @@
import time
-def test_gnocchi_node_sends_data(
- node_id, interval, logger, client, criteria_list=[],
- resource_id_substrings=['']):
- logger.info("Gnocchi test cases will be coming soon!!")
- return False
+def test_snmp_sends_data(
+ compute, interval, logger, client, mib_file=None,
+ mib_strings=None, in_command=None, conf=None):
+ """Check that SNMP deta are updated"""
+ logger.debug('Interval: {}'.format(interval))
+ if mib_file is not None:
+ logger.info(
+ 'Getting SNMP metrics of MIB file {} and '.format(mib_file)
+ + 'following MIB strings: {}...'.format(', '.join(mib_strings)))
+ snmp_metrics = client.get_snmp_metrics(compute, mib_file, mib_strings)
+ if mib_file is None:
+ return len(snmp_metrics) > 1
+ if in_command is not None and conf is not None:
+ conf.execute_command(in_command, compute.get_ip())
+
+ attempt = 1
+ is_passed = False
+ while (attempt <= 10) and not is_passed:
+ is_passed = True
+ # wait Interval time + 2 sec for db update
+ sleep_time = interval + 2
+ if attempt > 1:
+ logger.info('Starting attempt {}'.format(attempt))
+ logger.info(
+ 'Sleeping for {} seconds to get updated entries'.format(sleep_time)
+ + ' (interval is {} sec)...'.format(interval))
+ time.sleep(sleep_time)
+
+ logger.info(
+ 'Getting SNMP metrics of MIB file {} and '.format(mib_file)
+ + 'following MIB strings: {}...'.format(', '.join(mib_strings)))
+ snmp_metrics2 = client.get_snmp_metrics(compute, mib_file, mib_strings)
+ unchanged_snmp_metrics = [
+ snmp_metric for snmp_metric in snmp_metrics
+ if snmp_metrics[snmp_metric] == snmp_metrics2[snmp_metric]]
+ if len(unchanged_snmp_metrics) > 0:
+ logger.error("Following SNMP metrics didn't change: {}".format(
+ ', '.join(unchanged_snmp_metrics)))
+ is_passed = False
+ attempt += 1
+ if not is_passed:
+ logger.warning('After sleep new entries were not found.')
+ if not is_passed:
+ logger.error('This was the last attempt.')
+ return False
+ logger.info('All SNMP metrics are changed.')
+ return True
def test_ceilometer_node_sends_data(