diff options
Diffstat (limited to 'xtesting')
-rw-r--r-- | xtesting/ci/testcases.yaml | 197 | ||||
-rw-r--r-- | xtesting/core/ansible.py | 70 | ||||
-rw-r--r-- | xtesting/samples/helloworld.yml | 6 | ||||
-rw-r--r-- | xtesting/tests/unit/core/test_ansible.py | 151 | ||||
-rw-r--r-- | xtesting/tests/unit/core/test_behaveframework.py | 2 | ||||
-rw-r--r-- | xtesting/tests/unit/core/test_robotframework.py | 8 |
6 files changed, 330 insertions, 104 deletions
diff --git a/xtesting/ci/testcases.yaml b/xtesting/ci/testcases.yaml index f74d012e..16dd2632 100644 --- a/xtesting/ci/testcases.yaml +++ b/xtesting/ci/testcases.yaml @@ -1,102 +1,101 @@ --- tiers: - - - name: samples + - name: samples + description: '' + testcases: + - case_name: first + project_name: xtesting + criteria: 100 + blocking: true + clean_flag: false description: '' - testcases: - - - case_name: first - project_name: xtesting - criteria: 100 - blocking: true - clean_flag: false - description: '' - run: - name: 'first' - - - - case_name: second - project_name: xtesting - criteria: 100 - blocking: true - clean_flag: false - description: '' - run: - name: 'second' - - - - case_name: third - project_name: xtesting - criteria: 100 - blocking: true - clean_flag: false - description: '' - run: - name: 'bashfeature' - args: - cmd: 'echo -n Hello World; exit 0' - - - - case_name: fourth - project_name: xtesting - criteria: 100 - blocking: true - clean_flag: false - description: '' - run: - name: 'unit' - args: - name: 'xtesting.samples.fourth' - - - - case_name: fifth - project_name: xtesting - enabled: false - criteria: 100 - blocking: false - clean_flag: false - description: '' - run: - name: 'robotframework' - args: - suites: - - /usr/lib/python3.6/site-packages/xtesting/samples/HelloWorld.robot - variable: - - 'var01:foo' - - 'var02:bar' - - - - case_name: sixth - project_name: xtesting - enabled: false - criteria: 100 - blocking: false - clean_flag: false - description: '' - run: - name: 'behaveframework' - args: - suites: - - /usr/lib/python3.6/site-packages/xtesting/samples/features/ - tags: - - foo - - - - case_name: seventh - project_name: xtesting - enabled: true - criteria: 100 - blocking: true - clean_flag: false - description: '' - run: - name: 'mts' - args: - test_file: /opt/mts/bin/test/test.xml - testcases: - - Pause_5_sec - max_duration: 2 # in seconds - log_level: INFO - store_method: FILE - java_memory: 2048 - console: true + run: + name: first + - case_name: second + project_name: xtesting + criteria: 100 + blocking: true + clean_flag: false + description: '' + run: + name: second + - case_name: third + project_name: xtesting + criteria: 100 + blocking: true + clean_flag: false + description: '' + run: + name: bashfeature + args: + cmd: echo -n Hello World; exit 0 + - case_name: fourth + project_name: xtesting + criteria: 100 + blocking: true + clean_flag: false + description: '' + run: + name: unit + args: + name: xtesting.samples.fourth + - case_name: fifth + project_name: xtesting + enabled: false + criteria: 100 + blocking: true + clean_flag: false + description: '' + run: + name: robotframework + args: + suites: + - >- + /usr/lib/python3.8/site-packages/xtesting/samples/HelloWorld.robot + variable: + - 'var01:foo' + - 'var02:bar' + - case_name: sixth + project_name: xtesting + enabled: false + criteria: 100 + blocking: true + clean_flag: false + description: '' + run: + name: behaveframework + args: + suites: + - /usr/lib/python3.8/site-packages/xtesting/samples/features + tags: + - foo + - case_name: seventh + project_name: xtesting + enabled: false + criteria: 100 + blocking: true + clean_flag: false + description: '' + run: + name: mts + args: + test_file: /opt/mts/bin/test/test.xml + testcases: + - Pause_5_sec + max_duration: 2 + log_level: INFO + store_method: FILE + java_memory: 2048 + console: true + - case_name: eighth + project_name: xtesting + enabled: false + criteria: 100 + blocking: true + clean_flag: false + description: '' + run: + name: ansible + args: + private_data_dir: /usr/lib/python3.8/site-packages/xtesting/samples + playbook: helloworld.yml diff --git a/xtesting/core/ansible.py b/xtesting/core/ansible.py new file mode 100644 index 00000000..21148a18 --- /dev/null +++ b/xtesting/core/ansible.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +# Copyright (c) 2021 Orange and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 + +"""Implement a Xtesting driver to run any Ansible playbook.""" + +import logging +import os +import shutil +import time + +import ansible_runner + +from xtesting.core import testcase + + +class Ansible(testcase.TestCase): + """Class designed to run any Ansible playbook via ansible-runner.""" + + __logger = logging.getLogger(__name__) + + def check_requirements(self): + """Check if ansible-playbook is in $PATH""" + self.is_skipped = not shutil.which("ansible-playbook") + if self.is_skipped: + self.__logger.warning("ansible-playbook is missing") + + def run(self, **kwargs): + """ Wrap ansible_runner.interface.run() + + It calls ansible_runner.interface.run() by converting the testcase + description data to kwargs. It only overrides quiet and artifact_dir to + implement the Xtesting behavior. + + Following the playbook logic, criteria is considered as boolean + whatever the value set in testcases.yaml. + + Args: + kwargs: Arbitrary keyword arguments. + + Returns: + EX_OK if the playbook ran well. + EX_RUN_ERROR otherwise. + """ + status = self.EX_RUN_ERROR + self.start_time = time.time() + if ("private_data_dir" in kwargs and + os.path.isdir(kwargs['private_data_dir'])): + try: + if not os.path.exists(self.res_dir): + os.makedirs(self.res_dir) + kwargs["quiet"] = True + kwargs["artifact_dir"] = self.res_dir + runner = ansible_runner.run(**kwargs) + self.details = runner.stats + if runner.rc == 0: + self.result = 100 + status = self.EX_OK + except Exception: # pylint: # pylint: disable=broad-except + self.__logger.exception("Cannot execute the playbook") + else: + self.__logger.error( + "Please set a relevant private_data_dir in testcases.yaml") + self.stop_time = time.time() + return status diff --git a/xtesting/samples/helloworld.yml b/xtesting/samples/helloworld.yml new file mode 100644 index 00000000..399f0c04 --- /dev/null +++ b/xtesting/samples/helloworld.yml @@ -0,0 +1,6 @@ +--- +- name: Hello World! + hosts: 127.0.0.1 + tasks: + - name: Hello World! + shell: echo "Hello World!" diff --git a/xtesting/tests/unit/core/test_ansible.py b/xtesting/tests/unit/core/test_ansible.py new file mode 100644 index 00000000..22785e8f --- /dev/null +++ b/xtesting/tests/unit/core/test_ansible.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python + +# Copyright (c) 2021 Orange and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 + +# pylint: disable=missing-docstring + +import logging +import unittest + +import mock +import munch + +from xtesting.core import ansible + + +class RunTesting(unittest.TestCase): + + def setUp(self): + self.test = ansible.Ansible() + + @mock.patch("shutil.which", return_value=None) + def test_check1(self, which): + self.test.check_requirements() + self.assertEqual(self.test.is_skipped, True) + which.assert_called_once_with("ansible-playbook") + + @mock.patch("shutil.which", return_value='/usr/bin/ansible-playbook') + def test_check2(self, which): + self.test.check_requirements() + self.assertEqual(self.test.is_skipped, False) + which.assert_called_once_with("ansible-playbook") + + @mock.patch("os.path.isdir", return_value=False) + def test_fail1(self, isdir): + self.assertEqual(self.test.run(), self.test.EX_RUN_ERROR) + isdir.assert_not_called() + + @mock.patch("os.path.isdir", return_value=False) + def test_fail2(self, isdir): + private_data_dir = "titi" + self.assertEqual(self.test.run( + private_data_dir=private_data_dir), self.test.EX_RUN_ERROR) + isdir.assert_called_once_with(private_data_dir) + + @mock.patch("ansible_runner.run", side_effect=Exception) + @mock.patch("os.makedirs") + @mock.patch("os.path.exists", return_value=True) + @mock.patch("os.path.isdir", return_value=True) + def test_fail3(self, *args): + private_data_dir = "titi" + self.assertEqual(self.test.run( + private_data_dir=private_data_dir), self.test.EX_RUN_ERROR) + args[0].assert_called_once_with(private_data_dir) + args[1].assert_called_once_with(self.test.res_dir) + args[2].assert_not_called() + args[3].assert_called_with( + private_data_dir=private_data_dir, quiet=True, + artifact_dir=self.test.res_dir) + + @mock.patch("ansible_runner.run", side_effect=Exception) + @mock.patch("os.makedirs") + @mock.patch("os.path.exists", return_value=False) + @mock.patch("os.path.isdir", return_value=True) + def test_fail4(self, *args): + private_data_dir = "titi" + self.assertEqual(self.test.run( + private_data_dir=private_data_dir), self.test.EX_RUN_ERROR) + args[0].assert_called_once_with(private_data_dir) + args[1].assert_called_once_with(self.test.res_dir) + args[2].assert_called_once_with(self.test.res_dir) + args[3].assert_called_with( + private_data_dir=private_data_dir, quiet=True, + artifact_dir=self.test.res_dir) + + @mock.patch("ansible_runner.run") + @mock.patch("os.makedirs", side_effect=Exception) + @mock.patch("os.path.exists", return_value=False) + @mock.patch("os.path.isdir", return_value=True) + def test_fail5(self, *args): + private_data_dir = "titi" + self.assertEqual(self.test.run( + private_data_dir=private_data_dir), self.test.EX_RUN_ERROR) + args[0].assert_called_once_with(private_data_dir) + args[1].assert_called_once_with(self.test.res_dir) + args[2].assert_called_once_with(self.test.res_dir) + args[3].assert_not_called() + + @mock.patch("ansible_runner.run", return_value={}) + @mock.patch("os.makedirs") + @mock.patch("os.path.exists", return_value=False) + @mock.patch("os.path.isdir", return_value=True) + def test_fail6(self, *args): + private_data_dir = "titi" + self.assertEqual(self.test.run( + private_data_dir=private_data_dir, quiet=False, + artifact_dir="overridden"), self.test.EX_RUN_ERROR) + args[0].assert_called_once_with(private_data_dir) + args[1].assert_called_once_with(self.test.res_dir) + args[2].assert_called_once_with(self.test.res_dir) + args[3].assert_called_with( + private_data_dir=private_data_dir, quiet=True, + artifact_dir=self.test.res_dir) + + @mock.patch("ansible_runner.run", + return_value=munch.Munch(rc=0, stats={"foo": "bar"})) + @mock.patch("os.makedirs") + @mock.patch("os.path.exists", return_value=False) + @mock.patch("os.path.isdir", return_value=True) + def test_res_ok(self, *args): + private_data_dir = "titi" + self.assertEqual(self.test.run( + private_data_dir=private_data_dir, quiet=False, + artifact_dir="overridden"), self.test.EX_OK) + args[0].assert_called_once_with(private_data_dir) + args[1].assert_called_once_with(self.test.res_dir) + args[2].assert_called_once_with(self.test.res_dir) + args[3].assert_called_with( + private_data_dir=private_data_dir, quiet=True, + artifact_dir=self.test.res_dir) + self.assertEqual(self.test.is_successful(), self.test.EX_OK) + self.assertEqual(self.test.details, {"foo": "bar"}) + + @mock.patch("ansible_runner.run", + return_value=munch.Munch(rc=1, stats={"foo": "bar"})) + @mock.patch("os.makedirs") + @mock.patch("os.path.exists", return_value=False) + @mock.patch("os.path.isdir", return_value=True) + def test_res_ko(self, *args): + private_data_dir = "titi" + self.assertEqual(self.test.run( + private_data_dir=private_data_dir, quiet=False, + artifact_dir="overridden"), self.test.EX_OK) + args[0].assert_called_once_with(private_data_dir) + args[1].assert_called_once_with(self.test.res_dir) + args[2].assert_called_once_with(self.test.res_dir) + args[3].assert_called_with( + private_data_dir=private_data_dir, quiet=True, + artifact_dir=self.test.res_dir) + self.assertEqual(self.test.is_successful(), + self.test.EX_TESTCASE_FAILED) + self.assertEqual(self.test.details, {"foo": "bar"}) + + +if __name__ == "__main__": + logging.disable(logging.CRITICAL) + unittest.main(verbosity=2) diff --git a/xtesting/tests/unit/core/test_behaveframework.py b/xtesting/tests/unit/core/test_behaveframework.py index 414d96b5..864c77d5 100644 --- a/xtesting/tests/unit/core/test_behaveframework.py +++ b/xtesting/tests/unit/core/test_behaveframework.py @@ -102,7 +102,7 @@ class RunTesting(unittest.TestCase): suites=self.suites, tags=self.tags), self.test.EX_RUN_ERROR) args[0].assert_not_called() - mock_method.asser_not_called() + mock_method.assert_not_called() @mock.patch('os.makedirs', side_effect=Exception) @mock.patch('os.path.exists', return_value=False) diff --git a/xtesting/tests/unit/core/test_robotframework.py b/xtesting/tests/unit/core/test_robotframework.py index 19c4e0f0..c24d33dd 100644 --- a/xtesting/tests/unit/core/test_robotframework.py +++ b/xtesting/tests/unit/core/test_robotframework.py @@ -189,8 +189,8 @@ class RunTesting(unittest.TestCase): variablefile=self.variablefile, include=self.include), self.test.EX_RUN_ERROR) args[0].assert_not_called() - mock_method.asser_not_called() - mmethod.asser_not_called() + mock_method.assert_not_called() + mmethod.assert_not_called() @mock.patch('os.makedirs', side_effect=Exception) @mock.patch('os.path.exists', return_value=False) @@ -248,7 +248,7 @@ class RunTesting(unittest.TestCase): mock.patch.object(self.test, 'generate_report') as mmethod: self._test_parse_results(self.test.EX_RUN_ERROR) mock_method.assert_called_once_with() - mmethod.asser_not_called() + mmethod.assert_not_called() def test_parse_results_robot_error(self): with mock.patch.object(self.test, 'parse_results', @@ -256,7 +256,7 @@ class RunTesting(unittest.TestCase): mock.patch.object(self.test, 'generate_report') as mmethod: self._test_parse_results(self.test.EX_RUN_ERROR) mock_method.assert_called_once_with() - mmethod.asser_not_called() + mmethod.assert_not_called() @mock.patch('os.makedirs') @mock.patch('robot.run') |