diff options
-rw-r--r-- | sfc/tests/functest/config.yaml | 3 | ||||
-rw-r--r-- | sfc/tests/functest/run_tests.py | 290 |
2 files changed, 133 insertions, 160 deletions
diff --git a/sfc/tests/functest/config.yaml b/sfc/tests/functest/config.yaml index bc6e1726..77e5ffc1 100644 --- a/sfc/tests/functest/config.yaml +++ b/sfc/tests/functest/config.yaml @@ -22,7 +22,6 @@ testcases: enabled: true order: 1 description: "ODL-SFC tests" - testname_db: "sfc_two_chains_SSH_and_HTTP" net_name: example-net subnet_name: example-subnet router_name: example-router @@ -36,7 +35,6 @@ testcases: enabled: true order: 0 description: "ODL-SFC Testing SFs when they are located on the same chain" - testname_db: "sfc_one_chain_two_service_functions" net_name: example-net subnet_name: example-subnet router_name: example-router @@ -50,7 +48,6 @@ testcases: enabled: false order: 2 description: "Verify the behavior of a symmetric service chain" - testname_db: "sfc_symmetric_chain" net_name: example-net subnet_name: example-subnet router_name: example-router diff --git a/sfc/tests/functest/run_tests.py b/sfc/tests/functest/run_tests.py index 6760e168..c569165b 100644 --- a/sfc/tests/functest/run_tests.py +++ b/sfc/tests/functest/run_tests.py @@ -8,175 +8,151 @@ # http://www.apache.org/licenses/LICENSE-2.0 # -import argparse import importlib import os -import sys import time +import sys import yaml -import logging -import functest.utils.functest_utils as ft_utils -import functest.utils.openstack_utils as os_utils -import opnfv.utils.ovs_logger as ovs_log -import sfc.lib.cleanup as sfc_cleanup -import sfc.lib.config as sfc_config -import sfc.lib.utils as sfc_utils -from functest.utils.constants import CONST - -from collections import OrderedDict +from functest.core import testcase +from functest.utils import openstack_utils as os_utils +from opnfv.utils import ovs_logger as ovs_log from opnfv.deployment.factory import Factory as DeploymentFactory +from sfc.lib import cleanup as sfc_cleanup +from sfc.lib import config as sfc_config +from sfc.lib import utils as sfc_utils +from collections import OrderedDict +import logging logger = logging.getLogger(__name__) COMMON_CONFIG = sfc_config.CommonConfig() -def push_results(testname, start_time, end_time, criteria, details): - logger.info("Push testcase '%s' results into the DB...\n" % testname) - ft_utils.push_results_to_db("sfc", - testname, - start_time, - end_time, - criteria, - details) - - -def fetch_tackerc_file(controller_node): - rc_file = os.path.join(COMMON_CONFIG.sfc_test_dir, 'tackerc') - if not os.path.exists(rc_file): - logger.info("tackerc file not found, fetching it from controller") - controller_node.get_file("/root/tackerc", rc_file) - else: - logger.info("found tackerc file") - return rc_file - - -def disable_heat_resource_finder_cache(nodes): - controllers = [node for node in nodes if node.is_controller()] - remote_heat_conf = '/etc/heat/heat.conf' - local_heat_conf = '/tmp/heat.conf' - for controller in controllers: - logger.info("Fetch {0} from controller {1}" - .format(remote_heat_conf, controller.ip)) - controller.get_file(remote_heat_conf, local_heat_conf) - with open(local_heat_conf, 'a') as cfg: - cfg.write('\n[resource_finder_cache]\n') - cfg.write('caching=False\n') - logger.info("Replace {0} with {1} in controller {2}" - .format(remote_heat_conf, local_heat_conf, controller.ip)) - controller.run_cmd('rm -f {0}'.format(remote_heat_conf)) - controller.put_file(local_heat_conf, remote_heat_conf) - logger.info("Restart heat-engine in {0}".format(controller.ip)) - controller.run_cmd('service heat-engine restart') - os.remove(local_heat_conf) - logger.info("Waiting for heat-engine to restart in controllers") - time.sleep(10) - - -def main(report=False): - deploymentHandler = DeploymentFactory.get_handler( - COMMON_CONFIG.installer_type, - COMMON_CONFIG.installer_ip, - COMMON_CONFIG.installer_user, - installer_pwd=COMMON_CONFIG.installer_password) - - cluster = COMMON_CONFIG.installer_cluster - nodes = (deploymentHandler.get_nodes({'cluster': cluster}) - if cluster is not None - else deploymentHandler.get_nodes()) - - a_controller = [node for node in nodes - if node.is_controller()][0] - - disable_heat_resource_finder_cache(nodes) - - rc_file = fetch_tackerc_file(a_controller) - os_utils.source_credentials(rc_file) - - logger.info("Updating env with {0}".format(rc_file)) - logger.info("OS credentials:") - for var, value in os.environ.items(): - if var.startswith("OS_"): - logger.info("\t{0}={1}".format(var, value)) - - odl_ip, odl_port = sfc_utils.get_odl_ip_port(nodes) - - ovs_logger = ovs_log.OVSLogger( - os.path.join(COMMON_CONFIG.sfc_test_dir, 'ovs-logs'), - COMMON_CONFIG.functest_results_dir) - - config_file = os.path.join(COMMON_CONFIG.config_file) - with open(config_file) as f: - config_yaml = yaml.safe_load(f) - - testcases = config_yaml.get("testcases") - testcases_ordered = OrderedDict(sorted(testcases.items(), - key=lambda x: x[1]['order'])) - overall_details = {} - overall_status = "NOT TESTED" - overall_start_time = time.time() - for testcase, test_cfg in testcases_ordered.items(): - if test_cfg['enabled']: - test_name = testcase - test_descr = test_cfg['description'] - test_name_db = test_cfg['testname_db'] - title = ("Running '%s - %s'" % - (test_name, test_descr)) - logger.info(title) - logger.info("%s\n" % ("=" * len(title))) - t = importlib.import_module( - "sfc.tests.functest.{0}".format(testcase), - package=None) - start_time = time.time() - try: - result = t.main() - except Exception, e: - logger.error("Exception when executing: %s" % testcase) - logger.error(e) - result = {'status': 'FAILED'} - end_time = time.time() - duration = end_time - start_time - logger.info("Results of test case '%s - %s':\n%s\n" % - (test_name, test_descr, result)) - if result['status'] == 'PASS': - status = 'PASS' - overall_details.update({test_name_db: "worked"}) - if overall_status != "FAIL": - overall_status = "PASS" - else: - status = 'FAIL' - overall_status = "FAIL" - overall_details.update({test_name_db: "execution error."}) - ovs_logger.create_artifact_archive() - - if report: - details = result.get("details") - push_results( - test_name_db, start_time, end_time, status, details) - - dic = {"duration": duration, "status": status} - overall_details.update({test_name_db: dic}) - sfc_cleanup.cleanup(odl_ip=odl_ip, odl_port=odl_port) - - overall_end_time = time.time() - if report: - push_results( - "odl-sfc", overall_start_time, overall_end_time, - overall_status, overall_details) - - if overall_status == "FAIL": - sys.exit(-1) - - sys.exit(0) +class SfcFunctest(testcase.OSGCTestCase): + + def __fetch_tackerc_file(self, controller_node): + rc_file = os.path.join(COMMON_CONFIG.sfc_test_dir, 'tackerc') + if not os.path.exists(rc_file): + logger.info("tackerc file not found, fetching it from controller") + controller_node.get_file("/root/tackerc", rc_file) + else: + logger.info("found tackerc file") + return rc_file + + def __disable_heat_resource_finder_cache(self, nodes): + controllers = [node for node in nodes if node.is_controller()] + remote_heat_conf = '/etc/heat/heat.conf' + local_heat_conf = '/tmp/heat.conf' + for controller in controllers: + logger.info("Fetch {0} from controller {1}" + .format(remote_heat_conf, controller.ip)) + controller.get_file(remote_heat_conf, local_heat_conf) + with open(local_heat_conf, 'a') as cfg: + cfg.write('\n[resource_finder_cache]\n') + cfg.write('caching=False\n') + logger.info("Replace {0} with {1} in controller {2}" + .format(remote_heat_conf, + local_heat_conf, + controller.ip)) + controller.run_cmd('rm -f {0}'.format(remote_heat_conf)) + controller.put_file(local_heat_conf, remote_heat_conf) + logger.info("Restart heat-engine in {0}".format(controller.ip)) + controller.run_cmd('service heat-engine restart') + os.remove(local_heat_conf) + logger.info("Waiting for heat-engine to restart in controllers") + time.sleep(10) + + def run(self): + + deploymentHandler = DeploymentFactory.get_handler( + COMMON_CONFIG.installer_type, + COMMON_CONFIG.installer_ip, + COMMON_CONFIG.installer_user, + installer_pwd=COMMON_CONFIG.installer_password) + + cluster = COMMON_CONFIG.installer_cluster + nodes = (deploymentHandler.get_nodes({'cluster': cluster}) + if cluster is not None + else deploymentHandler.get_nodes()) + + a_controller = [node for node in nodes + if node.is_controller()][0] + + self.__disable_heat_resource_finder_cache(nodes) + + rc_file = self.__fetch_tackerc_file(a_controller) + os_utils.source_credentials(rc_file) + + logger.info("Updating env with {0}".format(rc_file)) + logger.info("OS credentials:") + for var, value in os.environ.items(): + if var.startswith("OS_"): + logger.info("\t{0}={1}".format(var, value)) + + odl_ip, odl_port = sfc_utils.get_odl_ip_port(nodes) + + ovs_logger = ovs_log.OVSLogger( + os.path.join(COMMON_CONFIG.sfc_test_dir, 'ovs-logs'), + COMMON_CONFIG.functest_results_dir) + + config_file = os.path.join(COMMON_CONFIG.config_file) + with open(config_file) as f: + config_yaml = yaml.safe_load(f) + + testcases = config_yaml.get("testcases") + testcases_ordered = OrderedDict(sorted(testcases.items(), + key=lambda x: x[1]['order'])) + overall_status = "NOT TESTED" + self.start_time = time.time() + for tc, test_cfg in testcases_ordered.items(): + if test_cfg['enabled']: + test_name = tc + test_descr = test_cfg['description'] + title = ("Running '%s - %s'" % + (test_name, test_descr)) + logger.info(title) + logger.info("%s\n" % ("=" * len(title))) + t = importlib.import_module( + "sfc.tests.functest.{0}".format(test_name), + package=None) + start_time = time.time() + try: + result = t.main() + except Exception as e: + logger.error("Exception when executing: %s" % test_name) + logger.error(e) + result = {'status': 'FAILED'} + end_time = time.time() + duration = end_time - start_time + logger.info("Results of test case '%s - %s':\n%s\n" % + (test_name, test_descr, result)) + if result['status'] == 'PASS': + status = 'PASS' + self.details.update({test_name: "worked"}) + if overall_status != "FAIL": + overall_status = "PASS" + else: + status = 'FAIL' + overall_status = "FAIL" + self.details.update({test_name: "execution error."}) + ovs_logger.create_artifact_archive() + + dic = {"duration": duration, "status": status} + self.details.update({test_name: dic}) + sfc_cleanup.cleanup(odl_ip=odl_ip, odl_port=odl_port) + + self.stop_time = time.time() + + if overall_status == "PASS": + self.result = 100 + return testcase.TestCase.EX_OK + + return testcase.TestCase.EX_RUN_ERROR if __name__ == '__main__': - logging.config.fileConfig( - CONST.__getattribute__('dir_functest_logging_cfg')) - parser = argparse.ArgumentParser() - parser.add_argument("-r", "--report", - help="Create json result file", - action="store_true") - args = parser.parse_args() - main(report=args.report) + logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s' + '- %(levelname)s - %(message)s') + SFC = SfcFunctest() + sys.exit(SFC.run()) |