aboutsummaryrefslogtreecommitdiffstats
path: root/sfc/tests/functest/run_tests.py
diff options
context:
space:
mode:
Diffstat (limited to 'sfc/tests/functest/run_tests.py')
-rw-r--r--sfc/tests/functest/run_tests.py119
1 files changed, 88 insertions, 31 deletions
diff --git a/sfc/tests/functest/run_tests.py b/sfc/tests/functest/run_tests.py
index 762e6b3d..a6c218ab 100644
--- a/sfc/tests/functest/run_tests.py
+++ b/sfc/tests/functest/run_tests.py
@@ -19,15 +19,13 @@ import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
import functest.utils.openstack_utils as os_utils
import opnfv.utils.ovs_logger as ovs_log
+import sfc.lib.cleanup as sfc_cleanup
import sfc.lib.config as sfc_config
-from opnfv.deployment.factory import Factory as DeploymentFactory
+import sfc.lib.utils as sfc_utils
+from collections import OrderedDict
+from opnfv.deployment.factory import Factory as DeploymentFactory
-parser = argparse.ArgumentParser()
-parser.add_argument("-r", "--report",
- help="Create json result file",
- action="store_true")
-args = parser.parse_args()
logger = ft_logger.Logger(__name__).getLogger()
COMMON_CONFIG = sfc_config.CommonConfig()
@@ -53,7 +51,29 @@ def fetch_tackerc_file(controller_node):
return rc_file
-def main():
+def disable_heat_resource_finder_cache(nodes):
+ controllers = [node for node in nodes if node.is_controller()]
+ remote_heat_conf = '/etc/heat/heat.conf'
+ local_heat_conf = '/tmp/heat.conf'
+ for controller in controllers:
+ logger.info("Fetch {0} from controller {1}"
+ .format(remote_heat_conf, controller.ip))
+ controller.get_file(remote_heat_conf, local_heat_conf)
+ with open(local_heat_conf, 'a') as cfg:
+ cfg.write('\n[resource_finder_cache]\n')
+ cfg.write('caching=False\n')
+ logger.info("Replace {0} with {1} in controller {2}"
+ .format(remote_heat_conf, local_heat_conf, controller.ip))
+ controller.run_cmd('rm -f {0}'.format(remote_heat_conf))
+ controller.put_file(local_heat_conf, remote_heat_conf)
+ logger.info("Restart heat-engine in {0}".format(controller.ip))
+ controller.run_cmd('service heat-engine restart')
+ os.remove(local_heat_conf)
+ logger.info("Waiting for heat-engine to restart in controllers")
+ time.sleep(10)
+
+
+def main(report=False):
deploymentHandler = DeploymentFactory.get_handler(
COMMON_CONFIG.installer_type,
COMMON_CONFIG.installer_ip,
@@ -67,10 +87,20 @@ def main():
a_controller = [node for node in nodes
if node.is_controller()][0]
+
+ disable_heat_resource_finder_cache(nodes)
+
rc_file = fetch_tackerc_file(a_controller)
+ os_utils.source_credentials(rc_file)
+
+ logger.info("Updating env with {0}".format(rc_file))
+ logger.info("OS credentials:")
+ for var, value in os.environ.items():
+ if var.startswith("OS_"):
+ logger.info("\t{0}={1}".format(var, value))
+
+ odl_ip, odl_port = sfc_utils.get_odl_ip_port(nodes)
- creds = os_utils.source_credentials(rc_file)
- logger.info("Updating env with {0}".format(creds))
ovs_logger = ovs_log.OVSLogger(
os.path.join(COMMON_CONFIG.sfc_test_dir, 'ovs-logs'),
COMMON_CONFIG.functest_results_dir)
@@ -80,44 +110,66 @@ def main():
config_yaml = yaml.safe_load(f)
testcases = config_yaml.get("testcases")
+ testcases_ordered = OrderedDict(sorted(testcases.items(),
+ key=lambda x: x[1]['order']))
overall_details = {}
- overall_status = "FAIL"
+ overall_status = "NOT TESTED"
overall_start_time = time.time()
- for testcase in testcases:
- if testcases[testcase]['enabled']:
+ for testcase, test_cfg in testcases_ordered.items():
+ if test_cfg['enabled']:
test_name = testcase
- test_descr = testcases[testcase]['description']
- test_name_db = testcases[testcase]['testname_db']
+ test_descr = test_cfg['description']
+ test_name_db = test_cfg['testname_db']
title = ("Running '%s - %s'" %
(test_name, test_descr))
logger.info(title)
logger.info("%s\n" % ("=" * len(title)))
- t = importlib.import_module(testcase, package=None)
+ t = importlib.import_module(
+ "sfc.tests.functest.{0}".format(testcase),
+ package=None)
start_time = time.time()
- result = t.main()
+ try:
+ result = t.main()
+ except Exception, e:
+ logger.error("Exception when executing: %s" % testcase)
+ logger.error(e)
+ result = {'status': 'FAILED'}
+ for node in nodes:
+ if node.get_file("/usr/lib/python2.7/dist-packages/tacker/"
+ "sfc/plugin.py", "/tmp/plugin.py"):
+ node.get_file("/var/log/tacker/tacker-server.log",
+ "/tmp/tacker-server.log")
+ break
+ with open("/tmp/plugin.py") as fd:
+ logger.info(fd.read())
+ with open("/tmp/tacker-server.log") as fd1:
+ logger.info(fd1.read())
end_time = time.time()
duration = end_time - start_time
- status = "FAIL"
- if result != 0:
- overall_details.update({test_name_db: "execution error."})
+ logger.info("Results of test case '%s - %s':\n%s\n" %
+ (test_name, test_descr, result))
+ if result['status'] == 'PASS':
+ status = 'PASS'
+ overall_details.update({test_name_db: "worked"})
+ if overall_status != "FAIL":
+ overall_status = "PASS"
else:
- status = result.get("status")
- if status == "FAIL":
- overall_status = "FAIL"
- ovs_logger.create_artifact_archive()
-
- logger.info("Results of test case '%s - %s':\n%s\n" %
- (test_name, test_descr, result))
+ status = 'FAIL'
+ overall_status = "FAIL"
+ overall_details.update({test_name_db: "execution error."})
+ ovs_logger.create_artifact_archive()
- dic = {"duration": duration, "status": overall_status}
- overall_details.update({test_name_db: dic})
- if args.report:
+ if report:
details = result.get("details")
push_results(
test_name_db, start_time, end_time, status, details)
+ dic = {"duration": duration, "status": status}
+ overall_details.update({test_name_db: dic})
+ sfc_cleanup.cleanup(odl_ip=odl_ip, odl_port=odl_port)
+
overall_end_time = time.time()
- if args.report:
+ if report:
push_results(
"odl-sfc", overall_start_time, overall_end_time,
overall_status, overall_details)
@@ -129,4 +181,9 @@ def main():
if __name__ == '__main__':
- main()
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-r", "--report",
+ help="Create json result file",
+ action="store_true")
+ args = parser.parse_args()
+ main(report=args.report)