aboutsummaryrefslogtreecommitdiffstats
path: root/xtesting
diff options
context:
space:
mode:
authorDeepak Chandella <deepak.chandella@orange.com>2019-07-05 22:31:36 +0530
committerCédric Ollivier <cedric.ollivier@orange.com>2019-07-11 11:35:54 +0200
commit5dd0d0ffd46e7665fddde8fd2f4da1a9b58506bb (patch)
tree9be57539cde78b3c82dd2442dfb108827d66ca69 /xtesting
parent2d35738769a5118fdcf7a7092e4fd0fb7f15511d (diff)
Adding first patch for behave feature
Change-Id: Ic975c301103b49cdec2bd26746b708388f21e892 Signed-off-by: Deepak Chandella <deepak.chandella@orange.com>
Diffstat (limited to 'xtesting')
-rw-r--r--xtesting/ci/testcases.yaml16
-rw-r--r--xtesting/core/behaveframework.py120
-rw-r--r--xtesting/core/robotframework.py12
-rw-r--r--xtesting/samples/features/hello.feature7
-rw-r--r--xtesting/samples/features/steps/hello.py16
-rw-r--r--xtesting/tests/unit/core/test_behaveframework.py162
-rw-r--r--xtesting/tests/unit/core/test_robotframework.py14
7 files changed, 332 insertions, 15 deletions
diff --git a/xtesting/ci/testcases.yaml b/xtesting/ci/testcases.yaml
index d3df1efb..9de9c4bb 100644
--- a/xtesting/ci/testcases.yaml
+++ b/xtesting/ci/testcases.yaml
@@ -65,3 +65,19 @@ tiers:
variable:
- 'var01:foo'
- 'var02:bar'
+
+ -
+ case_name: sixth
+ project_name: xtesting
+ enabled: false
+ criteria: 100
+ blocking: false
+ clean_flag: false
+ description: ''
+ run:
+ name: 'behaveframework'
+ args:
+ suites:
+ - /usr/lib/python3.6/site-packages/xtesting/samples/features/
+ tags:
+ - foo
diff --git a/xtesting/core/behaveframework.py b/xtesting/core/behaveframework.py
new file mode 100644
index 00000000..d8a61ef3
--- /dev/null
+++ b/xtesting/core/behaveframework.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2019 Orange and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+"""Define classes required to run any Behave test suites."""
+
+from __future__ import division
+
+import logging
+import os
+import time
+import json
+
+from behave.__main__ import main as behave_main
+
+from xtesting.core import testcase
+
+__author__ = "Deepak Chandella <deepak.chandella@orange.com>"
+
+
+class BehaveFramework(testcase.TestCase):
+ """BehaveFramework runner."""
+ # pylint: disable=too-many-instance-attributes
+
+ __logger = logging.getLogger(__name__)
+ dir_results = "/var/lib/xtesting/results"
+
+ def __init__(self, **kwargs):
+ super(BehaveFramework, self).__init__(**kwargs)
+ self.res_dir = os.path.join(self.dir_results, self.case_name)
+ self.json_file = os.path.join(self.res_dir, 'output.json')
+ self.total_tests = 0
+ self.pass_tests = 0
+ self.fail_tests = 0
+ self.skip_tests = 0
+ self.response = None
+
+ def parse_results(self):
+ """Parse output.json and get the details in it."""
+
+ try:
+ with open(self.json_file) as stream_:
+ self.response = json.load(stream_)
+ except IOError:
+ self.__logger.error("Error reading the file %s", self.json_file)
+
+ try:
+ if self.response:
+ self.total_tests = len(self.response)
+ for item in self.response:
+ if item['status'] == 'passed':
+ self.pass_tests += 1
+ elif item['status'] == 'failed':
+ self.fail_tests += 1
+ elif item['status'] == 'skipped':
+ self.skip_tests += 1
+ except KeyError:
+ self.__logger.error("Error in json - %s", self.response)
+
+ try:
+ self.result = 100 * (
+ self.pass_tests / self.total_tests)
+ except ZeroDivisionError:
+ self.__logger.error("No test has been run")
+
+ self.details = {}
+ self.details['total_tests'] = self.total_tests
+ self.details['pass_tests'] = self.pass_tests
+ self.details['fail_tests'] = self.fail_tests
+ self.details['skip_tests'] = self.skip_tests
+ self.details['tests'] = self.response
+
+ def run(self, **kwargs):
+ """Run the BehaveFramework feature files
+
+ Here are the steps:
+ * create the output directories if required,
+ * run behave features with parameters
+ * get the results in output.json,
+
+ Args:
+ kwargs: Arbitrary keyword arguments.
+
+ Returns:
+ EX_OK if all suites ran well.
+ EX_RUN_ERROR otherwise.
+ """
+ try:
+ suites = kwargs["suites"]
+ tags = kwargs.get("tags", [])
+ except KeyError:
+ self.__logger.exception("Mandatory args were not passed")
+ return self.EX_RUN_ERROR
+ if not os.path.exists(self.res_dir):
+ try:
+ os.makedirs(self.res_dir)
+ except Exception: # pylint: disable=broad-except
+ self.__logger.exception("Cannot create %s", self.res_dir)
+ return self.EX_RUN_ERROR
+ config = ['--tags='+','.join(tags),
+ '--format=json',
+ '--outfile='+self.json_file]
+ for feature in suites:
+ config.append(feature)
+ self.start_time = time.time()
+ behave_main(config)
+ self.stop_time = time.time()
+
+ try:
+ self.parse_results()
+ self.__logger.info("Results were successfully parsed")
+ except Exception: # pylint: disable=broad-except
+ self.__logger.exception("Cannot parse results")
+ return self.EX_RUN_ERROR
+ return self.EX_OK
diff --git a/xtesting/core/robotframework.py b/xtesting/core/robotframework.py
index 2791b559..3cb0ad31 100644
--- a/xtesting/core/robotframework.py
+++ b/xtesting/core/robotframework.py
@@ -11,7 +11,6 @@
from __future__ import division
-import errno
import logging
import os
@@ -110,15 +109,12 @@ class RobotFramework(testcase.TestCase):
except KeyError:
self.__logger.exception("Mandatory args were not passed")
return self.EX_RUN_ERROR
- try:
- os.makedirs(self.res_dir)
- except OSError as ex:
- if ex.errno != errno.EEXIST:
+ if not os.path.exists(self.res_dir):
+ try:
+ os.makedirs(self.res_dir)
+ except Exception: # pylint: disable=broad-except
self.__logger.exception("Cannot create %s", self.res_dir)
return self.EX_RUN_ERROR
- except Exception: # pylint: disable=broad-except
- self.__logger.exception("Cannot create %s", self.res_dir)
- return self.EX_RUN_ERROR
stream = StringIO()
robot.run(*suites, variable=variable, variablefile=variablefile,
include=include, output=self.xml_file, log='NONE',
diff --git a/xtesting/samples/features/hello.feature b/xtesting/samples/features/hello.feature
new file mode 100644
index 00000000..7975d28a
--- /dev/null
+++ b/xtesting/samples/features/hello.feature
@@ -0,0 +1,7 @@
+Feature: showing off behave
+
+ @foo
+ Scenario: run a simple test
+ Given we have behave installed
+ When we implement a test
+ Then behave will test it for us!
diff --git a/xtesting/samples/features/steps/hello.py b/xtesting/samples/features/steps/hello.py
new file mode 100644
index 00000000..8d780168
--- /dev/null
+++ b/xtesting/samples/features/steps/hello.py
@@ -0,0 +1,16 @@
+from behave import when, then, step
+
+
+class Hello():
+
+ @step('we have behave installed')
+ def step_impl_installation(context):
+ pass
+
+ @when('we implement a test')
+ def step_impl_test(context):
+ assert True is not False
+
+ @then('behave will test it for us!')
+ def step_impl_verify(context):
+ assert context.failed is False
diff --git a/xtesting/tests/unit/core/test_behaveframework.py b/xtesting/tests/unit/core/test_behaveframework.py
new file mode 100644
index 00000000..f18cac03
--- /dev/null
+++ b/xtesting/tests/unit/core/test_behaveframework.py
@@ -0,0 +1,162 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2019 Orange and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+
+"""Define the classes required to fully cover behave."""
+
+import logging
+import unittest
+
+import mock
+from xtesting.core import behaveframework
+
+__author__ = "Deepak Chandella <deepak.chandella@orange.com>"
+
+
+class ParseResultTesting(unittest.TestCase):
+
+ """The class testing BehaveFramework.parse_results()."""
+ # pylint: disable=missing-docstring
+
+ _response = [{'status': 'passed'}]
+
+ def setUp(self):
+ self.test = behaveframework.BehaveFramework(
+ case_name='behave', project_name='xtesting')
+
+ def test_raises_exc_open(self):
+ self.test.json_file = 'dummy_file'
+ self.test.response = self._response
+ with mock.patch('six.moves.builtins.open',
+ mock.mock_open()) as mock_file:
+ mock_file.side_effect = IOError()
+ self.assertRaises(IOError, self.test.parse_results())
+ mock_file.assert_called_once_with('dummy_file')
+
+ def test_raises_exc_key(self):
+ with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+ mock.patch('json.load', return_value=[{'foo': 'bar'}]):
+ self.assertRaises(KeyError, self.test.parse_results())
+
+ def test_raises_exe_zerodivision(self):
+ with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+ mock.patch('json.load', mock.Mock(return_value=[])):
+ self.assertRaises(ZeroDivisionError, self.test.parse_results())
+
+ def _test_result(self, response, result):
+ with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+ mock.patch('json.load', mock.Mock(return_value=response)):
+ self.test.parse_results()
+ self.assertEqual(self.test.result, result)
+
+ def test_null_passed(self):
+ data = [{'status': 'dummy'}]
+ self._test_result(data, 0)
+
+ def test_half_success(self):
+ data = [{'status': 'passed'}, {'status': 'failed'}]
+ self._test_result(data, 50)
+
+ def test_success(self):
+ data = [{'status': 'passed'}, {'status': 'passed'}]
+ self._test_result(data, 100)
+
+ def test_count(self):
+ self._response.extend([{'status': 'failed'}, {'status': 'skipped'}])
+ with mock.patch('six.moves.builtins.open', mock.mock_open()), \
+ mock.patch('json.load', mock.Mock(
+ return_value=self._response)):
+ self.test.parse_results()
+ self.assertEqual(self.test.details['pass_tests'], 1)
+ self.assertEqual(self.test.details['fail_tests'], 1)
+ self.assertEqual(self.test.details['skip_tests'], 1)
+ self.assertEqual(self.test.details['total_tests'], 3)
+
+
+class RunTesting(unittest.TestCase):
+
+ """The class testing BehaveFramework.run()."""
+ # pylint: disable=missing-docstring
+
+ suites = ["foo"]
+ tags = []
+
+ def setUp(self):
+ self.test = behaveframework.BehaveFramework(
+ case_name='behave', project_name='xtesting')
+
+ def test_exc_key_error(self):
+ self.assertEqual(self.test.run(), self.test.EX_RUN_ERROR)
+
+ @mock.patch('xtesting.core.behaveframework.behave_main')
+ def _test_makedirs_exc(self, *args):
+ with mock.patch.object(self.test, 'parse_results') as mock_method:
+ self.assertEqual(
+ self.test.run(
+ suites=self.suites, tags=self.tags),
+ self.test.EX_RUN_ERROR)
+ args[0].assert_not_called()
+ mock_method.asser_not_called()
+
+ @mock.patch('os.makedirs', side_effect=Exception)
+ @mock.patch('os.path.exists', return_value=False)
+ def test_makedirs_exc(self, *args):
+ self._test_makedirs_exc()
+ args[0].assert_called_once_with(self.test.res_dir)
+ args[1].assert_called_once_with(self.test.res_dir)
+
+ @mock.patch('xtesting.core.behaveframework.behave_main')
+ def _test_makedirs(self, *args):
+ with mock.patch.object(self.test, 'parse_results') as mock_method:
+ self.assertEqual(
+ self.test.run(suites=self.suites, tags=self.tags),
+ self.test.EX_OK)
+ args[0].assert_called_once_with(
+ ['--tags=',
+ '--format=json',
+ '--outfile={}'.format(self.test.json_file),
+ 'foo'])
+ mock_method.assert_called_once_with()
+
+ @mock.patch('os.makedirs')
+ @mock.patch('os.path.exists', return_value=False)
+ def test_makedirs(self, *args):
+ self._test_makedirs()
+ args[0].assert_called_once_with(self.test.res_dir)
+ args[1].assert_called_once_with(self.test.res_dir)
+
+ @mock.patch('os.makedirs')
+ @mock.patch('os.path.exists', return_value=True)
+ def test_makedirs_oserror17(self, *args):
+ self._test_makedirs()
+ args[0].assert_called_once_with(self.test.res_dir)
+ args[1].assert_not_called()
+
+ @mock.patch('os.makedirs')
+ @mock.patch('xtesting.core.behaveframework.behave_main')
+ def _test_parse_results(self, status, *args):
+ self.assertEqual(
+ self.test.run(
+ suites=self.suites, tags=self.tags),
+ status)
+ args[0].assert_called_once_with(
+ ['--tags=',
+ '--format=json',
+ '--outfile={}'.format(self.test.json_file),
+ 'foo'])
+ args[1].assert_called_once_with(self.test.res_dir)
+
+ def test_parse_results_exc(self):
+ with mock.patch.object(self.test, 'parse_results',
+ side_effect=Exception) as mock_method:
+ self._test_parse_results(self.test.EX_RUN_ERROR)
+ mock_method.assert_called_once_with()
+
+if __name__ == "__main__":
+ logging.disable(logging.CRITICAL)
+ unittest.main(verbosity=2)
diff --git a/xtesting/tests/unit/core/test_robotframework.py b/xtesting/tests/unit/core/test_robotframework.py
index 398cf87a..19c4e0f0 100644
--- a/xtesting/tests/unit/core/test_robotframework.py
+++ b/xtesting/tests/unit/core/test_robotframework.py
@@ -9,7 +9,6 @@
"""Define the classes required to fully cover robot."""
-import errno
import logging
import os
import unittest
@@ -194,14 +193,11 @@ class RunTesting(unittest.TestCase):
mmethod.asser_not_called()
@mock.patch('os.makedirs', side_effect=Exception)
+ @mock.patch('os.path.exists', return_value=False)
def test_makedirs_exc(self, *args):
self._test_makedirs_exc()
args[0].assert_called_once_with(self.test.res_dir)
-
- @mock.patch('os.makedirs', side_effect=OSError)
- def test_makedirs_oserror(self, *args):
- self._test_makedirs_exc()
- args[0].assert_called_once_with(self.test.res_dir)
+ args[1].assert_called_once_with(self.test.res_dir)
@mock.patch('robot.run')
def _test_makedirs(self, *args):
@@ -218,15 +214,19 @@ class RunTesting(unittest.TestCase):
mock_method.assert_called_once_with()
mmethod.assert_called_once_with()
- @mock.patch('os.makedirs', side_effect=OSError(errno.EEXIST, ''))
+ @mock.patch('os.makedirs')
+ @mock.patch('os.path.exists', return_value=True)
def test_makedirs_oserror17(self, *args):
self._test_makedirs()
args[0].assert_called_once_with(self.test.res_dir)
+ args[1].assert_not_called()
@mock.patch('os.makedirs')
+ @mock.patch('os.path.exists', return_value=False)
def test_makedirs(self, *args):
self._test_makedirs()
args[0].assert_called_once_with(self.test.res_dir)
+ args[1].assert_called_once_with(self.test.res_dir)
@mock.patch('os.makedirs')
@mock.patch('robot.run')