aboutsummaryrefslogtreecommitdiffstats
path: root/xtesting
diff options
context:
space:
mode:
authorCédric Ollivier <cedric.ollivier@orange.com>2018-07-15 09:49:57 +0200
committerCédric Ollivier <cedric.ollivier@orange.com>2018-07-15 10:27:49 +0200
commit3aee76caccf9701e2c90652a920c1bca66903bb8 (patch)
tree94347a35558f5d15030efc3e8e3d3f5d9ff98555 /xtesting
parent3030d25d2a25347b0b6c7fcdd14bfe31fc5a86b6 (diff)
Allow dynamically skipping testcases0.52
Change-Id: I7cf47f0625bdb8345fef43b24c51bd84ef4486ea Signed-off-by: Cédric Ollivier <cedric.ollivier@orange.com>
Diffstat (limited to 'xtesting')
-rw-r--r--xtesting/ci/run_tests.py17
-rw-r--r--xtesting/core/testcase.py33
-rw-r--r--xtesting/tests/unit/core/test_testcase.py32
3 files changed, 67 insertions, 15 deletions
diff --git a/xtesting/ci/run_tests.py b/xtesting/ci/run_tests.py
index 31451090..6ecd0cfc 100644
--- a/xtesting/ci/run_tests.py
+++ b/xtesting/ci/run_tests.py
@@ -146,7 +146,6 @@ class Runner(object):
if not test.is_enabled():
raise TestNotEnabled(
"The test case {} is not enabled".format(test.get_name()))
- LOGGER.info("Running test case '%s'...", test.get_name())
result = testcase.TestCase.EX_RUN_ERROR
run_dict = self.get_run_dict(test.get_name())
if run_dict:
@@ -156,6 +155,11 @@ class Runner(object):
test_dict = Runner.get_dict_by_test(test.get_name())
test_case = cls(**test_dict)
self.executed_test_cases[test.get_name()] = test_case
+ test_case.check_requirements()
+ if test_case.is_skipped:
+ LOGGER.info("Skipping test case '%s'...", test.get_name())
+ return testcase.TestCase.EX_TESTCASE_SKIPPED
+ LOGGER.info("Running test case '%s'...", test.get_name())
try:
kwargs = run_dict['args']
test_case.run(**kwargs)
@@ -177,18 +181,16 @@ class Runner(object):
def run_tier(self, tier):
"""Run one tier"""
- tier_name = tier.get_name()
tests = tier.get_tests()
if not tests:
LOGGER.info("There are no supported test cases in this tier "
"for the given scenario")
self.overall_result = Result.EX_ERROR
else:
- LOGGER.info("Running tier '%s'", tier_name)
for test in tests:
self.run_test(test)
test_case = self.executed_test_cases[test.get_name()]
- if test_case.is_successful() != testcase.TestCase.EX_OK:
+ if test_case.is_successful() == test_case.EX_TESTCASE_FAILED:
LOGGER.error("The test case '%s' failed.", test.get_name())
self.overall_result = Result.EX_ERROR
if test.is_blocking():
@@ -272,8 +274,11 @@ class Runner(object):
msg.add_row([test.get_name(), test.get_project(),
each_tier.get_name(), "00:00", "SKIP"])
else:
- result = 'PASS' if(test_case.is_successful(
- ) == test_case.EX_OK) else 'FAIL'
+ if test_case.is_skipped:
+ result = 'SKIP'
+ else:
+ result = 'PASS' if(test_case.is_successful(
+ ) == test_case.EX_OK) else 'FAIL'
msg.add_row(
[test_case.case_name, test_case.project_name,
self.tiers.get_tier_name(test_case.case_name),
diff --git a/xtesting/core/testcase.py b/xtesting/core/testcase.py
index 4effa932..61a2e8c8 100644
--- a/xtesting/core/testcase.py
+++ b/xtesting/core/testcase.py
@@ -16,17 +16,15 @@ import os
import re
import requests
-from xtesting.utils import decorators
-from xtesting.utils import env
-
-
import prettytable
+from xtesting.utils import decorators
+from xtesting.utils import env
__author__ = "Cedric Ollivier <cedric.ollivier@orange.com>"
-class TestCase(object):
+class TestCase(object): # pylint: disable=too-many-instance-attributes
"""Base model for single test case."""
EX_OK = os.EX_OK
@@ -41,6 +39,9 @@ class TestCase(object):
EX_TESTCASE_FAILED = os.EX_SOFTWARE - 2
"""results are false"""
+ EX_TESTCASE_SKIPPED = os.EX_SOFTWARE - 3
+ """requirements are unmet"""
+
_job_name_rule = "(dai|week)ly-(.+?)-[0-9]*"
_headers = {'Content-Type': 'application/json'}
__logger = logging.getLogger(__name__)
@@ -53,13 +54,17 @@ class TestCase(object):
self.result = 0
self.start_time = 0
self.stop_time = 0
+ self.is_skipped = False
def __str__(self):
try:
assert self.project_name
assert self.case_name
- result = 'PASS' if(self.is_successful(
- ) == TestCase.EX_OK) else 'FAIL'
+ if self.is_skipped:
+ result = 'SKIP'
+ else:
+ result = 'PASS' if(self.is_successful(
+ ) == TestCase.EX_OK) else 'FAIL'
msg = prettytable.PrettyTable(
header_style='upper', padding_width=5,
field_names=['test case', 'project', 'duration',
@@ -79,6 +84,8 @@ class TestCase(object):
"XX:XX" otherwise.
"""
try:
+ if self.is_skipped:
+ return "00:00"
assert self.start_time
assert self.stop_time
if self.stop_time < self.start_time:
@@ -99,9 +106,12 @@ class TestCase(object):
Returns:
TestCase.EX_OK if result is 'PASS'.
+ TestCase.EX_TESTCASE_SKIPPED if test case is skipped.
TestCase.EX_TESTCASE_FAILED otherwise.
"""
try:
+ if self.is_skipped:
+ return TestCase.EX_TESTCASE_SKIPPED
assert self.criteria
assert self.result is not None
if (not isinstance(self.result, str) and
@@ -121,6 +131,13 @@ class TestCase(object):
self.__logger.error("Please run test before checking the results")
return TestCase.EX_TESTCASE_FAILED
+ def check_requirements(self): # pylint: disable=no-self-use
+ """Check the requirements of the test case.
+
+ It can be overriden on purpose.
+ """
+ self.is_skipped = False
+
def run(self, **kwargs):
"""Run the test case.
@@ -177,6 +194,8 @@ class TestCase(object):
TestCase.EX_PUSH_TO_DB_ERROR otherwise.
"""
try:
+ if self.is_skipped:
+ return TestCase.EX_PUSH_TO_DB_ERROR
assert self.project_name
assert self.case_name
assert self.start_time
diff --git a/xtesting/tests/unit/core/test_testcase.py b/xtesting/tests/unit/core/test_testcase.py
index e2f56f8f..6b83b97c 100644
--- a/xtesting/tests/unit/core/test_testcase.py
+++ b/xtesting/tests/unit/core/test_testcase.py
@@ -15,11 +15,10 @@ import logging
import os
import unittest
-from xtesting.core import testcase
-
import mock
import requests
+from xtesting.core import testcase
__author__ = "Cedric Ollivier <cedric.ollivier@orange.com>"
@@ -108,6 +107,14 @@ class TestCaseTesting(unittest.TestCase):
testcase.TestCase.EX_PUSH_TO_DB_ERROR)
mock_function.assert_not_called()
+ @mock.patch('requests.post')
+ def test_pushdb_skipped_test(self, mock_function=None):
+ self.test.is_skipped = True
+ self.assertEqual(
+ self.test.push_to_db(),
+ testcase.TestCase.EX_PUSH_TO_DB_ERROR)
+ mock_function.assert_not_called()
+
def _get_data(self):
return {
"build_tag": os.environ['BUILD_TAG'],
@@ -158,6 +165,10 @@ class TestCaseTesting(unittest.TestCase):
data=json.dumps(self._get_data(), sort_keys=True),
headers=self._headers)
+ def test_check_requirements(self):
+ self.test.check_requirements()
+ self.assertEqual(self.test.is_skipped, False)
+
def test_check_criteria_missing(self):
self.test.criteria = None
self.assertEqual(self.test.is_successful(),
@@ -184,6 +195,11 @@ class TestCaseTesting(unittest.TestCase):
self.assertEqual(self.test.is_successful(),
testcase.TestCase.EX_OK)
+ def test_check_result_skip(self):
+ self.test.is_skipped = True
+ self.assertEqual(self.test.is_successful(),
+ testcase.TestCase.EX_TESTCASE_SKIPPED)
+
def test_check_result_lt(self):
self.test.result = 50
self.assertEqual(self.test.is_successful(),
@@ -233,6 +249,10 @@ class TestCaseTesting(unittest.TestCase):
self.test.stop_time = 180
self.assertEqual(self.test.get_duration(), "02:59")
+ def test_get_duration_skipped_test(self):
+ self.test.is_skipped = True
+ self.assertEqual(self.test.get_duration(), "00:00")
+
def test_str_project_name_ko(self):
self.test.project_name = None
self.assertIn("<xtesting.core.testcase.TestCase object at",
@@ -268,6 +288,14 @@ class TestCaseTesting(unittest.TestCase):
self.assertIn(duration, message)
self.assertIn('FAIL', message)
+ def test_str_skip(self):
+ self.test.is_skipped = True
+ message = str(self.test)
+ self.assertIn(self._project_name, message)
+ self.assertIn(self._case_name, message)
+ self.assertIn("00:00", message)
+ self.assertIn('SKIP', message)
+
def test_clean(self):
self.assertEqual(self.test.clean(), None)