aboutsummaryrefslogtreecommitdiffstats
path: root/functest/opnfv_tests/features/domino.py
diff options
context:
space:
mode:
Diffstat (limited to 'functest/opnfv_tests/features/domino.py')
-rwxr-xr-xfunctest/opnfv_tests/features/domino.py117
1 files changed, 56 insertions, 61 deletions
diff --git a/functest/opnfv_tests/features/domino.py b/functest/opnfv_tests/features/domino.py
index 7705c07b..445a7a64 100755
--- a/functest/opnfv_tests/features/domino.py
+++ b/functest/opnfv_tests/features/domino.py
@@ -12,76 +12,71 @@
# After successful ping, both the VMs are deleted.
# 0.2: measure test duration and publish results under json format
# 0.3: add report flag to push results when needed
-#
+# 0.4: refactoring to match Test abstraction class
import argparse
+import sys
import time
+from functest.core import TestCasesBase
import functest.utils.functest_logger as ft_logger
import functest.utils.functest_utils as ft_utils
-parser = argparse.ArgumentParser()
-
-parser.add_argument("-r", "--report",
- help="Create json result file",
- action="store_true")
-args = parser.parse_args()
-
-
-DOMINO_REPO = \
- ft_utils.get_functest_config('general.directories.dir_repo_domino')
-RESULTS_DIR = \
- ft_utils.get_functest_config('general.directories.dir_results')
-
-logger = ft_logger.Logger("domino").getLogger()
-
-
-def main():
- cmd = 'cd %s && ./tests/run_multinode.sh' % DOMINO_REPO
- log_file = RESULTS_DIR + "/domino.log"
- start_time = time.time()
-
- ret = ft_utils.execute_command(cmd,
- output_file=log_file)
-
- stop_time = time.time()
- duration = round(stop_time - start_time, 1)
- if ret == 0 and duration > 1:
- logger.info("domino OK")
- test_status = 'OK'
- elif ret == 0 and duration <= 1:
- logger.info("domino TEST SKIPPED")
- test_status = 'SKIPPED'
- else:
- logger.info("domino FAILED")
- test_status = 'NOK'
-
- details = {
- 'timestart': start_time,
- 'duration': duration,
- 'status': test_status,
- }
-
- status = "FAIL"
- if details['status'] == "OK":
- status = "PASS"
- elif details['status'] == "SKIPPED":
- status = "SKIP"
- ft_utils.logger_test_results("Domino",
- "domino-multinode",
- status,
- details)
- if args.report:
+class DominoCases(TestCasesBase.TestCasesBase):
+ DOMINO_REPO = \
+ ft_utils.get_functest_config('general.directories.dir_repo_domino')
+ RESULTS_DIR = \
+ ft_utils.get_functest_config('general.directories.dir_results')
+ logger = ft_logger.Logger("domino").getLogger()
+
+ def __init__(self):
+ self.project_name = "domino"
+ self.case_name = "domino-multinode"
+
+ def main(self, **kwargs):
+ cmd = 'cd %s && ./tests/run_multinode.sh' % self.DOMINO_REPO
+ log_file = self.RESULTS_DIR + "/domino.log"
+ start_time = time.time()
+
+ ret = ft_utils.execute_command(cmd,
+ output_file=log_file)
+
+ stop_time = time.time()
+ duration = round(stop_time - start_time, 1)
+ if ret == 0 and duration > 1:
+ self.logger.info("domino OK")
+ status = 'PASS'
+ elif ret == 0 and duration <= 1:
+ self.logger.info("domino TEST SKIPPED")
+ status = 'SKIP'
+ else:
+ self.logger.info("domino FAILED")
+ status = "FAIL"
+
+ # report status only if tests run (FAIL OR PASS)
if status is not "SKIP":
- ft_utils.push_results_to_db("domino",
- "domino-multinode",
- start_time,
- stop_time,
- status,
- details)
- logger.info("Domino results pushed to DB")
+ self.criteria = status
+ self.start_time = start_time
+ self.stop_time = stop_time
+ self.details = {}
+ def run(self):
+ kwargs = {}
+ return self.main(**kwargs)
if __name__ == '__main__':
- main()
+ parser = argparse.ArgumentParser()
+ parser.add_argument("-r", "--report",
+ help="Create json result file",
+ action="store_true")
+ args = parser.parse_args()
+ domino = DominoCases()
+ try:
+ result = domino.main(**args)
+ if result != TestCasesBase.TestCasesBase.EX_OK:
+ sys.exit(result)
+ if args['report']:
+ sys.exit(domino.push_to_db())
+ except Exception:
+ sys.exit(TestCasesBase.TestCasesBase.EX_RUN_ERROR)