diff options
Diffstat (limited to 'functest')
23 files changed, 587 insertions, 115 deletions
diff --git a/functest/ci/check_os.sh b/functest/ci/check_os.sh index e2471026..b875a173 100755 --- a/functest/ci/check_os.sh +++ b/functest/ci/check_os.sh @@ -57,11 +57,11 @@ echo " ...OK" echo "Checking OpenStack basic services:" -commands=('openstack endpoint list' 'nova list' 'neutron net-list' \ - 'glance image-list' 'cinder list') +commands=('openstack endpoint list' 'openstack server list' 'openstack network list' \ + 'openstack image list' 'openstack volume list') for cmd in "${commands[@]}" do - service=$(echo $cmd | awk '{print $1}') + service=$(echo $cmd | awk '{print $1, $2}') echo ">>Checking $service service..." $cmd &>/dev/null result=$? diff --git a/functest/ci/exec_test.sh b/functest/ci/exec_test.sh index b288fe36..6a2b55a2 100755 --- a/functest/ci/exec_test.sh +++ b/functest/ci/exec_test.sh @@ -54,6 +54,10 @@ function odl_tests(){ odl_ip=$SDN_CONTROLLER_IP odl_port=8081 odl_restport=8081 + elif [ "$INSTALLER_TYPE" == "netvirt" ]; then + odl_ip=$SDN_CONTROLLER_IP + odl_port=8081 + odl_restport=8081 elif [ "$INSTALLER_TYPE" == "joid" ]; then odl_ip=$SDN_CONTROLLER elif [ "$INSTALLER_TYPE" == "compass" ]; then diff --git a/functest/ci/logging.json b/functest/ci/logging.json index 3f454e8f..2a2399d3 100644 --- a/functest/ci/logging.json +++ b/functest/ci/logging.json @@ -1,29 +1,29 @@ -{
- "version": 1,
- "disable_existing_loggers": false,
- "formatters": {
- "standard": {
- "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
- }
- },
- "handlers": {
- "console": {
- "level": "INFO",
- "class": "logging.StreamHandler",
- "formatter": "standard"
- },
- "file": {
- "level": "DEBUG",
- "class": "logging.FileHandler",
- "formatter": "standard",
- "filename": "/home/opnfv/functest/results/functest.log"
- }
- },
- "loggers": {
- "": {
- "handlers": ["console", "file"],
- "level": "DEBUG",
- "propagate": "yes"
- }
- }
-}
+{ + "version": 1, + "disable_existing_loggers": false, + "formatters": { + "standard": { + "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + } + }, + "handlers": { + "console": { + "level": "INFO", + "class": "logging.StreamHandler", + "formatter": "standard" + }, + "file": { + "level": "DEBUG", + "class": "logging.FileHandler", + "formatter": "standard", + "filename": "/home/opnfv/functest/results/functest.log" + } + }, + "loggers": { + "": { + "handlers": ["console", "file"], + "level": "DEBUG", + "propagate": "yes" + } + } +} diff --git a/functest/ci/prepare_env.py b/functest/ci/prepare_env.py index 8bbdf18b..b3e59020 100755 --- a/functest/ci/prepare_env.py +++ b/functest/ci/prepare_env.py @@ -261,6 +261,14 @@ def install_tempest(): error_msg="Problem while installing Tempest.") +def create_flavor(): + os_utils.get_or_create_flavor('m1.tiny', + '512', + '1', + '1', + public=True) + + def check_environment(): msg_not_active = "The Functest environment is not installed." if not os.path.isfile(CONST.env_active): @@ -290,6 +298,7 @@ def main(**kwargs): verify_deployment() install_rally() install_tempest() + create_flavor() with open(CONST.env_active, "w") as env_file: env_file.write("1") diff --git a/functest/ci/run_tests.py b/functest/ci/run_tests.py index ef080016..320102dd 100755 --- a/functest/ci/run_tests.py +++ b/functest/ci/run_tests.py @@ -148,7 +148,11 @@ def run_test(test, tier_name, testcases=None): module = importlib.import_module(run_dict['module']) cls = getattr(module, run_dict['class']) test_case = cls() - result = test_case.run() + try: + kwargs = run_dict['args'] + result = test_case.run(**kwargs) + except KeyError: + result = test_case.run() if result == testcase_base.TestcaseBase.EX_OK: if GlobalVariables.REPORT_FLAG: test_case.publish_report() diff --git a/functest/ci/testcases.yaml b/functest/ci/testcases.yaml index 032202e5..6397f764 100755 --- a/functest/ci/testcases.yaml +++ b/functest/ci/testcases.yaml @@ -133,6 +133,10 @@ tiers: run: module: 'functest.opnfv_tests.sdn.odl.odl' class: 'ODLTests' + args: + suites: + - /home/opnfv/repos/odl_test/csit/suites/integration/basic + - /home/opnfv/repos/odl_test/csit/suites/openstack/neutron - name: onos diff --git a/functest/opnfv_tests/openstack/healthcheck/healthcheck.sh b/functest/opnfv_tests/openstack/healthcheck/healthcheck.sh index 57aa0c70..7fa957c0 100755 --- a/functest/opnfv_tests/openstack/healthcheck/healthcheck.sh +++ b/functest/opnfv_tests/openstack/healthcheck/healthcheck.sh @@ -23,17 +23,17 @@ echo "">$LOG_FILE exec 1<>$LOG_FILE info () { - echo -e "$(date '+%Y-%m-%d %H:%M:%S,%3N') - healtcheck - INFO - " "$*" | tee -a $LOG_FILE 1>&2 + echo -e "$(date '+%Y-%m-%d %H:%M:%S,%3N') - healthcheck - INFO - " "$*" | tee -a $LOG_FILE 1>&2 } debug () { if [[ "${CI_DEBUG,,}" == "true" ]]; then - echo -e "$(date '+%Y-%m-%d %H:%M:%S,%3N') - healtcheck - DEBUG - " "$*" | tee -a $LOG_FILE 1>&2 + echo -e "$(date '+%Y-%m-%d %H:%M:%S,%3N') - healthcheck - DEBUG - " "$*" | tee -a $LOG_FILE 1>&2 fi } error () { - echo -e "$(date '+%Y-%m-%d %H:%M:%S,%3N') - healtcheck - ERROR - " "$*" | tee -a $LOG_FILE 1>&2 + echo -e "$(date '+%Y-%m-%d %H:%M:%S,%3N') - healthcheck - ERROR - " "$*" | tee -a $LOG_FILE 1>&2 exit 1 } @@ -125,16 +125,16 @@ kernel_img=$(cat ${YAML_FILE} | shyaml get-value healthcheck.kernel_image 2> /de ramdisk_img=$(cat ${YAML_FILE} | shyaml get-value healthcheck.ramdisk_image 2> /dev/null || true) extra_properties=$(cat ${YAML_FILE} | shyaml get-value healthcheck.extra_properties 2> /dev/null || true) -# Test if we need to create a 3part image +# Test if we need to create a 3party image if [ "X$kernel_img" != "X" ] then - img_id=$(glance image-create --name ${kernel_image} --disk-format aki \ + img_id=$(openstack image create ${kernel_image} --disk-format aki \ --container-format bare < ${kernel_img} | awk '$2 == "id" { print $4 }') extra_opts="--property kernel_id=${img_id}" if [ "X$ramdisk_img" != "X" ] then - img_id=$(glance image-create --name ${ramdisk_image} --disk-format ari \ + img_id=$(openstack image create ${ramdisk_image} --disk-format ari \ --container-format bare < ${ramdisk_img} | awk '$2 == "id" { print $4 }') extra_opts="$extra_opts --property ramdisk_id=${img_id}" fi @@ -152,10 +152,10 @@ fi debug "image extra_properties=${extra_properties}" -eval glance image-create --name ${image_1} --disk-format ${disk_format} --container-format bare \ +eval openstack image create ${image_1} --disk-format ${disk_format} --container-format bare \ ${extra_opts} < ${disk_img} debug "image '${image_1}' created." -eval glance image-create --name ${image_2} --disk-format ${disk_format} --container-format bare \ +eval openstack image create ${image_2} --disk-format ${disk_format} --container-format bare \ ${extra_opts} < ${disk_img} debug "image '${image_2}' created." info "... Glance OK!" diff --git a/functest/opnfv_tests/openstack/rally/rally.py b/functest/opnfv_tests/openstack/rally/rally.py index f984c368..16a872fc 100644 --- a/functest/opnfv_tests/openstack/rally/rally.py +++ b/functest/opnfv_tests/openstack/rally/rally.py @@ -66,6 +66,7 @@ class RallyBase(testcase_base.TestcaseBase): self.cinder_client = os_utils.get_cinder_client() self.network_dict = {} self.volume_type = None + self.smoke = None def _build_task_args(self, test_file_name): task_args = {'service_list': [test_file_name]} @@ -287,7 +288,7 @@ class RallyBase(testcase_base.TestcaseBase): cmd_line = ("rally task validate " "--task {0} " "--task-args \"{1}\"" - .format(task_file, self.__build_task_args(test_name))) + .format(task_file, self._build_task_args(test_name))) logger.debug('running command line: {}'.format(cmd_line)) p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) diff --git a/functest/opnfv_tests/openstack/tempest/conf_utils.py b/functest/opnfv_tests/openstack/tempest/conf_utils.py index 4c5e8663..91a5bb4b 100644 --- a/functest/opnfv_tests/openstack/tempest/conf_utils.py +++ b/functest/opnfv_tests/openstack/tempest/conf_utils.py @@ -172,6 +172,9 @@ def configure_tempest(deployment_dir, IMAGE_ID=None, FLAVOR_ID=None): config.write(config_file) # Copy tempest.conf to /home/opnfv/functest/results/tempest/ + if not os.path.exists(TEMPEST_RESULTS_DIR): + os.makedirs(TEMPEST_RESULTS_DIR) + shutil.copyfile(tempest_conf_file, os.path.join(TEMPEST_RESULTS_DIR, 'tempest.conf')) @@ -205,7 +208,7 @@ def configure_tempest_multisite(deployment_dir): # cmd = ("openstack endpoint show kingbird | grep publicurl |" # "awk '{print $4}' | awk -F '/' '{print $4}'") # kingbird_api_version = os.popen(cmd).read() - kingbird_api_version = os_utils.get_endpoint(service_type='kingbird') + kingbird_api_version = os_utils.get_endpoint(service_type='multisite') if CI_INSTALLER_TYPE == 'fuel': # For MOS based setup, the service is accessible diff --git a/functest/opnfv_tests/openstack/tempest/tempest.py b/functest/opnfv_tests/openstack/tempest/tempest.py index 9c19a147..e1a223a7 100644 --- a/functest/opnfv_tests/openstack/tempest/tempest.py +++ b/functest/opnfv_tests/openstack/tempest/tempest.py @@ -249,12 +249,16 @@ class TempestCommon(testcase_base.TestcaseBase): output = logfile.read() error_logs = "" - for match in re.findall('(.*?)[. ]*FAILED', output): + for match in re.findall('(.*?)[. ]*fail ', output): error_logs += match + skipped_testcase = "" + for match in re.findall('(.*?)[. ]*skip:', output): + skipped_testcase += match self.details = {"tests": int(num_tests), "failures": int(num_failures), - "errors": error_logs} + "errors": error_logs, + "skipped": skipped_testcase} except Exception: success_rate = 0 diff --git a/functest/opnfv_tests/sdn/odl/odl.py b/functest/opnfv_tests/sdn/odl/odl.py index 339c305e..25075957 100755 --- a/functest/opnfv_tests/sdn/odl/odl.py +++ b/functest/opnfv_tests/sdn/odl/odl.py @@ -54,6 +54,7 @@ class ODLTests(testcase_base.TestcaseBase): "csit/suites/openstack/neutron") basic_suite_dir = os.path.join(odl_test_repo, "csit/suites/integration/basic") + default_suites = [basic_suite_dir, neutron_suite_dir] res_dir = '/home/opnfv/functest/results/odl/' logger = ft_logger.Logger("opendaylight").getLogger() @@ -89,8 +90,7 @@ class ODLTests(testcase_base.TestcaseBase): self.details['description'] = result.suite.name self.details['tests'] = visitor.get_data() - def main(self, **kwargs): - dirs = [self.basic_suite_dir, self.neutron_suite_dir] + def main(self, suites=default_suites, **kwargs): try: odlusername = kwargs['odlusername'] odlpassword = kwargs['odlpassword'] @@ -117,7 +117,7 @@ class ODLTests(testcase_base.TestcaseBase): stdout_file = os.path.join(self.res_dir, 'stdout.txt') output_dir = os.path.join(self.res_dir, 'output.xml') with open(stdout_file, 'w+') as stdout: - robot.run(*dirs, variable=variables, + robot.run(*suites, variable=variables, output=output_dir, log='NONE', report='NONE', @@ -140,8 +140,13 @@ class ODLTests(testcase_base.TestcaseBase): else: return self.EX_RUN_ERROR - def run(self): + def run(self, **kwargs): try: + suites = self.default_suites + try: + suites = kwargs["suites"] + except KeyError: + pass keystone_url = op_utils.get_endpoint(service_type='identity') neutron_url = op_utils.get_endpoint(service_type='network') kwargs = {'keystoneip': urlparse.urlparse(keystone_url).hostname} @@ -159,7 +164,7 @@ class ODLTests(testcase_base.TestcaseBase): kwargs['ospassword'] = os.environ['OS_PASSWORD'] if installer_type == 'fuel': kwargs['odlwebport'] = '8282' - elif installer_type == 'apex': + elif installer_type == 'apex' or installer_type == 'netvirt': kwargs['odlip'] = os.environ['SDN_CONTROLLER_IP'] kwargs['odlwebport'] = '8081' kwargs['odlrestconfport'] = '8081' @@ -178,7 +183,7 @@ class ODLTests(testcase_base.TestcaseBase): self.logger.exception("Cannot run ODL testcases.") return self.EX_RUN_ERROR - return self.main(**kwargs) + return self.main(suites, **kwargs) class ODLParser(): @@ -228,7 +233,7 @@ if __name__ == '__main__': parser = ODLParser() args = parser.parse_args(sys.argv[1:]) try: - result = odl.main(**args) + result = odl.main(ODLTests.default_suites, **args) if result != testcase_base.TestcaseBase.EX_OK: sys.exit(result) if args['pushtodb']: diff --git a/functest/opnfv_tests/vnf/aaa/aaa.py b/functest/opnfv_tests/vnf/aaa/aaa.py index f1c265f4..f1c265f4 100644..100755 --- a/functest/opnfv_tests/vnf/aaa/aaa.py +++ b/functest/opnfv_tests/vnf/aaa/aaa.py diff --git a/functest/tests/unit/odl/test_odl.py b/functest/tests/unit/odl/test_odl.py index 568fdc82..8f2a5d7e 100644 --- a/functest/tests/unit/odl/test_odl.py +++ b/functest/tests/unit/odl/test_odl.py @@ -346,6 +346,28 @@ class ODLTesting(unittest.TestCase): self.test.main = mock.Mock(return_value=status) self.assertEqual(self.test.run(), status) self.test.main.assert_called_once_with( + odl.ODLTests.default_suites, + keystoneip=self._keystone_ip, neutronip=self._neutron_ip, + odlip=odlip, odlpassword=self._odl_password, + odlrestconfport=odlrestconfport, + odlusername=self._odl_username, odlwebport=odlwebport, + ospassword=self._os_password, ostenantname=self._os_tenantname, + osusername=self._os_username) + + def _test_run_defining_multiple_suites( + self, suites, + status=testcase_base.TestcaseBase.EX_OK, + exception=None, odlip="127.0.0.3", odlwebport="8080", + odlrestconfport="8181"): + with mock.patch('functest.utils.openstack_utils.get_endpoint', + side_effect=self._fake_url_for): + if exception: + self.test.main = mock.Mock(side_effect=exception) + else: + self.test.main = mock.Mock(return_value=status) + self.assertEqual(self.test.run(suites=suites), status) + self.test.main.assert_called_once_with( + suites, keystoneip=self._keystone_ip, neutronip=self._neutron_ip, odlip=odlip, odlpassword=self._odl_password, odlrestconfport=odlrestconfport, @@ -394,6 +416,14 @@ class ODLTesting(unittest.TestCase): odlip=self._sdn_controller_ip, odlwebport=self._odl_webport) + def test_run_redefining_suites(self): + os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip + self._test_run_defining_multiple_suites( + [odl.ODLTests.basic_suite_dir], + testcase_base.TestcaseBase.EX_OK, + odlip=self._sdn_controller_ip, + odlwebport=self._odl_webport) + def test_run_fuel(self): os.environ["INSTALLER_TYPE"] = "fuel" self._test_run(testcase_base.TestcaseBase.EX_OK, @@ -413,6 +443,20 @@ class ODLTesting(unittest.TestCase): odlip=self._sdn_controller_ip, odlwebport='8081', odlrestconfport='8081') + def test_run_netvirt_missing_sdn_controller_ip(self): + with mock.patch('functest.utils.openstack_utils.get_endpoint', + side_effect=self._fake_url_for): + os.environ["INSTALLER_TYPE"] = "netvirt" + self.assertEqual(self.test.run(), + testcase_base.TestcaseBase.EX_RUN_ERROR) + + def test_run_netvirt(self): + os.environ["SDN_CONTROLLER_IP"] = self._sdn_controller_ip + os.environ["INSTALLER_TYPE"] = "netvirt" + self._test_run(testcase_base.TestcaseBase.EX_OK, + odlip=self._sdn_controller_ip, odlwebport='8081', + odlrestconfport='8081') + def test_run_joid_missing_sdn_controller(self): with mock.patch('functest.utils.openstack_utils.get_endpoint', side_effect=self._fake_url_for): diff --git a/functest/tests/unit/opnfv_tests/__init__.py b/functest/tests/unit/opnfv_tests/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/functest/tests/unit/opnfv_tests/__init__.py diff --git a/functest/tests/unit/opnfv_tests/openstack/__init__.py b/functest/tests/unit/opnfv_tests/openstack/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/functest/tests/unit/opnfv_tests/openstack/__init__.py diff --git a/functest/tests/unit/opnfv_tests/openstack/rally/__init__.py b/functest/tests/unit/opnfv_tests/openstack/rally/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/functest/tests/unit/opnfv_tests/openstack/rally/__init__.py diff --git a/functest/tests/unit/opnfv_tests/openstack/rally/test_rally.py b/functest/tests/unit/opnfv_tests/openstack/rally/test_rally.py new file mode 100644 index 00000000..ad39be48 --- /dev/null +++ b/functest/tests/unit/opnfv_tests/openstack/rally/test_rally.py @@ -0,0 +1,391 @@ +#!/usr/bin/env python + +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 + +import json +import logging +import os +import unittest + +import mock + +from functest.core import testcase_base +from functest.opnfv_tests.openstack.rally import rally +from functest.utils.constants import CONST + + +class OSRallyTesting(unittest.TestCase): + + logging.disable(logging.CRITICAL) + + def setUp(self): + self.nova_client = mock.Mock() + self.neutron_client = mock.Mock() + self.cinder_client = mock.Mock() + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_nova_client', + return_value=self.nova_client), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_neutron_client', + return_value=self.neutron_client), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_cinder_client', + return_value=self.cinder_client): + self.rally_base = rally.RallyBase() + self.rally_base.network_dict['net_id'] = 'test_net_id' + self.polling_iter = 2 + + def test_build_task_args_missing_floating_network(self): + CONST.OS_AUTH_URL = None + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_external_net', + return_value=None): + task_args = self.rally_base._build_task_args('test_file_name') + self.assertEqual(task_args['floating_network'], '') + + def test_build_task_args_missing_net_id(self): + CONST.OS_AUTH_URL = None + self.rally_base.network_dict['net_id'] = '' + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_external_net', + return_value='test_floating_network'): + task_args = self.rally_base._build_task_args('test_file_name') + self.assertEqual(task_args['netid'], '') + + def test_build_task_args_missing_auth_url(self): + CONST.OS_AUTH_URL = None + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_external_net', + return_value='test_floating_network'): + task_args = self.rally_base._build_task_args('test_file_name') + self.assertEqual(task_args['request_url'], '') + + def check_scenario_file(self, value): + yaml_file = 'opnfv-{}.yaml'.format('test_file_name') + if yaml_file in value: + return False + return True + + def test_prepare_test_list_missing_scenario_file(self): + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.path.exists', + side_effect=self.check_scenario_file), \ + self.assertRaises(Exception): + self.rally_base._prepare_test_list('test_file_name') + + def check_temp_dir(self, value): + yaml_file = 'opnfv-{}.yaml'.format('test_file_name') + if yaml_file in value: + return True + return False + + def test_prepare_test_list_missing_temp_dir(self): + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.path.exists', + side_effect=self.check_temp_dir), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.makedirs') as mock_os_makedir, \ + mock.patch.object(self.rally_base, 'apply_blacklist', + return_value=mock.Mock()) as mock_method: + yaml_file = 'opnfv-{}.yaml'.format('test_file_name') + ret_val = os.path.join(self.rally_base.TEMP_DIR, yaml_file) + self.assertEqual(self.rally_base. + _prepare_test_list('test_file_name'), + ret_val) + self.assertTrue(mock_method.called) + self.assertTrue(mock_os_makedir.called) + + def test_get_task_id_default(self): + cmd_raw = 'Task 1: started' + self.assertEqual(self.rally_base.get_task_id(cmd_raw), + '1') + + def test_get_task_id_missing_id(self): + cmd_raw = '' + self.assertEqual(self.rally_base.get_task_id(cmd_raw), + None) + + def test_task_succeed_fail(self): + json_raw = json.dumps([None]) + self.assertEqual(self.rally_base.task_succeed(json_raw), + False) + json_raw = json.dumps([{'result': [{'error': ['test_error']}]}]) + self.assertEqual(self.rally_base.task_succeed(json_raw), + False) + + def test_task_succeed_success(self): + json_raw = json.dumps('') + self.assertEqual(self.rally_base.task_succeed(json_raw), + True) + + def polling(self): + if self.polling_iter == 0: + return "something" + self.polling_iter -= 1 + return None + + def test_get_cmd_output(self): + proc = mock.Mock() + attrs = {'poll.side_effect': self.polling, + 'stdout.readline.return_value': 'line'} + proc.configure_mock(**attrs) + self.assertEqual(self.rally_base.get_cmd_output(proc), + 'lineline') + + def test_excl_scenario_default(self): + CONST.INSTALLER_TYPE = 'test_installer' + CONST.DEPLOY_SCENARIO = 'test_scenario' + dic = {'scenario': [{'scenarios': ['test_scenario'], + 'installers': ['test_installer'], + 'tests': ['test']}]} + with mock.patch('__builtin__.open', mock.mock_open()), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'yaml.safe_load', + return_value=dic): + self.assertEqual(self.rally_base.excl_scenario(), + ['test']) + + def test_excl_scenario_exception(self): + with mock.patch('__builtin__.open', side_effect=Exception): + self.assertEqual(self.rally_base.excl_scenario(), + []) + + def test_excl_func_default(self): + CONST.INSTALLER_TYPE = 'test_installer' + CONST.DEPLOY_SCENARIO = 'test_scenario' + dic = {'functionality': [{'functions': ['no_live_migration'], + 'tests': ['test']}]} + with mock.patch('__builtin__.open', mock.mock_open()), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'yaml.safe_load', + return_value=dic), \ + mock.patch.object(self.rally_base, 'live_migration_supported', + return_value=False): + self.assertEqual(self.rally_base.excl_func(), + ['test']) + + def test_excl_func_exception(self): + with mock.patch('__builtin__.open', side_effect=Exception): + self.assertEqual(self.rally_base.excl_func(), + []) + + def test_file_is_empty_default(self): + mock_obj = mock.Mock() + attrs = {'st_size': 10} + mock_obj.configure_mock(**attrs) + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.stat', + return_value=mock_obj): + self.assertEqual(self.rally_base.file_is_empty('test_file_name'), + False) + + def test_file_is_empty_exception(self): + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.stat', + side_effect=Exception): + self.assertEqual(self.rally_base.file_is_empty('test_file_name'), + True) + + def test_run_task_missing_task_file(self): + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.path.exists', + return_value=False), \ + self.assertRaises(Exception): + self.rally_base._run_task('test_name') + + @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.info') + def test_run_task_no_tests_for_scenario(self, mock_logger_info): + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.path.exists', + return_value=True), \ + mock.patch.object(self.rally_base, '_prepare_test_list', + return_value='test_file_name'), \ + mock.patch.object(self.rally_base, 'file_is_empty', + return_value=True): + self.rally_base._run_task('test_name') + str = 'No tests for scenario "test_name"' + mock_logger_info.assert_any_call(str) + + @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.error') + def test_run_task_taskid_missing(self, mock_logger_error): + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.path.exists', + return_value=True), \ + mock.patch.object(self.rally_base, '_prepare_test_list', + return_value='test_file_name'), \ + mock.patch.object(self.rally_base, 'file_is_empty', + return_value=False), \ + mock.patch.object(self.rally_base, '_build_task_args', + return_value={}), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'subprocess.Popen'), \ + mock.patch.object(self.rally_base, '_get_output', + return_value=mock.Mock()), \ + mock.patch.object(self.rally_base, 'get_task_id', + return_value=None), \ + mock.patch.object(self.rally_base, 'get_cmd_output', + return_value=''): + self.rally_base._run_task('test_name') + str = 'Failed to retrieve task_id, validating task...' + mock_logger_error.assert_any_call(str) + + @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.info') + @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.error') + def test_run_task_default(self, mock_logger_error, + mock_logger_info): + popen = mock.Mock() + attrs = {'read.return_value': 'json_result'} + popen.configure_mock(**attrs) + + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.path.exists', + return_value=True), \ + mock.patch.object(self.rally_base, '_prepare_test_list', + return_value='test_file_name'), \ + mock.patch.object(self.rally_base, 'file_is_empty', + return_value=False), \ + mock.patch.object(self.rally_base, '_build_task_args', + return_value={}), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'subprocess.Popen'), \ + mock.patch.object(self.rally_base, '_get_output', + return_value=mock.Mock()), \ + mock.patch.object(self.rally_base, 'get_task_id', + return_value='1'), \ + mock.patch.object(self.rally_base, 'get_cmd_output', + return_value=''), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.makedirs'), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os.popen', + return_value=popen), \ + mock.patch('__builtin__.open', mock.mock_open()), \ + mock.patch.object(self.rally_base, 'task_succeed', + return_value=True): + self.rally_base._run_task('test_name') + str = 'Test scenario: "test_name" OK.\n' + mock_logger_info.assert_any_call(str) + + def test_prepare_env_testname_invalid(self): + self.rally_base.TESTS = ['test1', 'test2'] + self.rally_base.test_name = 'test' + with self.assertRaises(Exception): + self.rally_base._prepare_env() + + def test_prepare_env_volume_creation_failed(self): + self.rally_base.TESTS = ['test1', 'test2'] + self.rally_base.test_name = 'test1' + volume_type = None + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.list_volume_types', + return_value=None), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.create_volume_type', + return_value=volume_type), \ + self.assertRaises(Exception): + self.rally_base._prepare_env() + + def test_prepare_env_image_missing(self): + self.rally_base.TESTS = ['test1', 'test2'] + self.rally_base.test_name = 'test1' + volume_type = mock.Mock() + image_id = None + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.list_volume_types', + return_value=None), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.create_volume_type', + return_value=volume_type), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_or_create_image', + return_value=(True, image_id)), \ + self.assertRaises(Exception): + self.rally_base._prepare_env() + + def test_prepare_env_image_shared_network_creation_failed(self): + self.rally_base.TESTS = ['test1', 'test2'] + self.rally_base.test_name = 'test1' + volume_type = mock.Mock() + image_id = 'image_id' + network_dict = None + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.list_volume_types', + return_value=None), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.create_volume_type', + return_value=volume_type), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.get_or_create_image', + return_value=(True, image_id)), \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.create_shared_network_full', + return_value=network_dict), \ + self.assertRaises(Exception): + self.rally_base._prepare_env() + + def test_run_tests_all(self): + self.rally_base.TESTS = ['test1', 'test2'] + self.rally_base.test_name = 'all' + with mock.patch.object(self.rally_base, '_run_task', + return_value=mock.Mock()): + self.rally_base._run_tests() + self.rally_base._run_task.assert_any_call('test1') + self.rally_base._run_task.assert_any_call('test2') + + def test_run_tests_default(self): + self.rally_base.TESTS = ['test1', 'test2'] + self.rally_base.test_name = 'test1' + with mock.patch.object(self.rally_base, '_run_task', + return_value=mock.Mock()): + self.rally_base._run_tests() + self.rally_base._run_task.assert_any_call('test1') + + @mock.patch('functest.opnfv_tests.openstack.rally.rally.logger.info') + def test_generate_report(self, mock_logger_info): + summary = [{'test_name': 'test_name', + 'overall_duration': 5, + 'nb_tests': 3, + 'success': 5}] + self.rally_base.summary = summary + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'ft_utils.check_success_rate', + return_value='criteria'): + self.rally_base._generate_report() + self.assertTrue(mock_logger_info.called) + + def test_clean_up_default(self): + self.rally_base.volume_type = mock.Mock() + self.rally_base.cinder_client = mock.Mock() + self.rally_base.image_exists = False + self.rally_base.image_id = 1 + self.rally_base.nova_client = mock.Mock() + with mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.delete_volume_type') as mock_vol_method, \ + mock.patch('functest.opnfv_tests.openstack.rally.rally.' + 'os_utils.delete_glance_image') as mock_glance_method: + self.rally_base._clean_up() + mock_vol_method.assert_any_call(self.rally_base.cinder_client, + self.rally_base.volume_type) + mock_glance_method.assert_any_call(self.rally_base.nova_client, + 1) + + def test_run_default(self): + with mock.patch.object(self.rally_base, '_prepare_env'), \ + mock.patch.object(self.rally_base, '_run_tests'), \ + mock.patch.object(self.rally_base, '_generate_report'), \ + mock.patch.object(self.rally_base, '_clean_up'): + self.assertEqual(self.rally_base.run(), + testcase_base.TestcaseBase.EX_OK) + + def test_run_exception(self): + with mock.patch.object(self.rally_base, '_prepare_env', + side_effect=Exception): + self.assertEqual(self.rally_base.run(), + testcase_base.TestcaseBase.EX_RUN_ERROR) + + +if __name__ == "__main__": + unittest.main(verbosity=2) diff --git a/functest/tests/unit/test_logging.ini b/functest/tests/unit/test_logging.ini index 3d5b947c..492767d1 100644 --- a/functest/tests/unit/test_logging.ini +++ b/functest/tests/unit/test_logging.ini @@ -1,27 +1,27 @@ -[loggers]
-keys=root,functest_logger
-
-[logger_root]
-level=DEBUG
-handlers=console
-
-[logger_functest_logger]
-level=DEBUG
-handlers=console
-qualname=functest.utils.functest_logger
-propagate=0
-
-[handlers]
-keys=console
-
-[handler_console]
-class=StreamHandler
-level=INFO
-formatter=standard
-args=(sys.stdout,)
-
-[formatters]
-keys=standard
-
-[formatter_standard]
+[loggers] +keys=root,functest_logger + +[logger_root] +level=DEBUG +handlers=console + +[logger_functest_logger] +level=DEBUG +handlers=console +qualname=functest.utils.functest_logger +propagate=0 + +[handlers] +keys=console + +[handler_console] +class=StreamHandler +level=INFO +formatter=standard +args=(sys.stdout,) + +[formatters] +keys=standard + +[formatter_standard] format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
\ No newline at end of file diff --git a/functest/tests/unit/utils/test_functest_utils.py b/functest/tests/unit/utils/test_functest_utils.py index ce9086a7..bb836011 100644 --- a/functest/tests/unit/utils/test_functest_utils.py +++ b/functest/tests/unit/utils/test_functest_utils.py @@ -131,15 +131,17 @@ class FunctestUtilsTesting(unittest.TestCase): self.assertEqual(functest_utils.get_installer_type(), self.installer) - @mock.patch('functest.utils.functest_utils.logger.error') - def test_get_scenario_failed(self, mock_logger_error): + @mock.patch('functest.utils.functest_utils.logger.info') + def test_get_scenario_failed(self, mock_logger_info): with mock.patch.dict(os.environ, {}, clear=True): self.assertEqual(functest_utils.get_scenario(), - "Unknown_scenario") - mock_logger_error.assert_called_once_with("Impossible to retrieve" - " the scenario") + "os-nosdn-nofeature-noha") + mock_logger_info.assert_called_once_with("Impossible to retrieve " + "the scenario.Use " + "default " + "os-nosdn-nofeature-noha") def test_get_scenario_default(self): with mock.patch.dict(os.environ, @@ -158,17 +160,17 @@ class FunctestUtilsTesting(unittest.TestCase): mock_get_build_tag.return_value = "unknown_build_tag" self.assertEqual(functest_utils.get_version(), "unknown") - @mock.patch('functest.utils.functest_utils.logger.error') - def test_get_pod_name_failed(self, mock_logger_error): + @mock.patch('functest.utils.functest_utils.logger.info') + def test_get_pod_name_failed(self, mock_logger_info): with mock.patch.dict(os.environ, {}, clear=True): self.assertEqual(functest_utils.get_pod_name(), "unknown-pod") - mock_logger_error.assert_called_once_with("Unable to retrieve " - "the POD name from " - "environment. Using " - "pod name 'unknown-pod'") + mock_logger_info.assert_called_once_with("Unable to retrieve " + "the POD name from " + "environment. Using " + "pod name 'unknown-pod'") def test_get_pod_name_default(self): with mock.patch.dict(os.environ, @@ -177,15 +179,15 @@ class FunctestUtilsTesting(unittest.TestCase): self.assertEqual(functest_utils.get_pod_name(), self.node_name) - @mock.patch('functest.utils.functest_utils.logger.error') - def test_get_build_tag_failed(self, mock_logger_error): + @mock.patch('functest.utils.functest_utils.logger.info') + def test_get_build_tag_failed(self, mock_logger_info): with mock.patch.dict(os.environ, {}, clear=True): self.assertEqual(functest_utils.get_build_tag(), - "unknown_build_tag") - mock_logger_error.assert_called_once_with("Impossible to retrieve" - " the build tag") + "none") + mock_logger_info.assert_called_once_with("Impossible to retrieve" + " the build tag") def test_get_build_tag_default(self): with mock.patch.dict(os.environ, diff --git a/functest/tests/unit/utils/test_openstack_tacker.py b/functest/tests/unit/utils/test_openstack_tacker.py index a8330c0e..dc717258 100644 --- a/functest/tests/unit/utils/test_openstack_tacker.py +++ b/functest/tests/unit/utils/test_openstack_tacker.py @@ -146,8 +146,7 @@ class OSTackerTesting(unittest.TestCase): tosca_file=None) self.assertEqual(resp, self.createvnfd) - @mock.patch('functest.utils.openstack_tacker.logger.error') - def test_create_vnfd_default(self, mock_logger_error): + def test_create_vnfd_default(self): with mock.patch.object(self.tacker_client, 'create_vnfd', return_value=self.createvnfd), \ mock.patch('__builtin__.open', mock.mock_open(read_data='1')) \ @@ -155,16 +154,15 @@ class OSTackerTesting(unittest.TestCase): resp = openstack_tacker.create_vnfd(self.tacker_client, tosca_file=self.tosca_file) m.assert_called_once_with(self.tosca_file) - mock_logger_error.assert_called_once_with('1') self.assertEqual(resp, self.createvnfd) - @mock.patch('functest.utils.openstack_tacker.logger.exception') - def test_create_vnfd_exception(self, mock_logger_excep): + @mock.patch('functest.utils.openstack_tacker.logger.error') + def test_create_vnfd_exception(self, mock_logger_error): with mock.patch.object(self.tacker_client, 'create_vnfd', side_effect=Exception): resp = openstack_tacker.create_vnfd(self.tacker_client, tosca_file=self.tosca_file) - mock_logger_excep.assert_called_once_with(test_utils. + mock_logger_error.assert_called_once_with(test_utils. SubstrMatch("Error" " [create" "_vnfd(" diff --git a/functest/utils/constants.py b/functest/utils/constants.py index 2e8eb3f4..2e8eb3f4 100644..100755 --- a/functest/utils/constants.py +++ b/functest/utils/constants.py diff --git a/functest/utils/functest_utils.py b/functest/utils/functest_utils.py index 2bf87a05..04055464 100644 --- a/functest/utils/functest_utils.py +++ b/functest/utils/functest_utils.py @@ -96,8 +96,9 @@ def get_scenario(): try: scenario = os.environ['DEPLOY_SCENARIO'] except KeyError: - logger.error("Impossible to retrieve the scenario") - scenario = "Unknown_scenario" + logger.info("Impossible to retrieve the scenario." + "Use default os-nosdn-nofeature-noha") + scenario = "os-nosdn-nofeature-noha" return scenario @@ -128,7 +129,7 @@ def get_pod_name(): try: return os.environ['NODE_NAME'] except KeyError: - logger.error( + logger.info( "Unable to retrieve the POD name from environment. " + "Using pod name 'unknown-pod'") return "unknown-pod" @@ -141,8 +142,8 @@ def get_build_tag(): try: build_tag = os.environ['BUILD_TAG'] except KeyError: - logger.error("Impossible to retrieve the build tag") - build_tag = "unknown_build_tag" + logger.info("Impossible to retrieve the build tag") + build_tag = "none" return build_tag diff --git a/functest/utils/openstack_tacker.py b/functest/utils/openstack_tacker.py index f17b421e..c7ac89af 100644 --- a/functest/utils/openstack_tacker.py +++ b/functest/utils/openstack_tacker.py @@ -20,9 +20,9 @@ import time logger = ft_logger.Logger("tacker_utils").getLogger() -def get_tacker_client(): - creds_tacker = os_utils.get_credentials() - return tackerclient.Client(**creds_tacker) +def get_tacker_client(other_creds={}): + sess = os_utils.get_session(other_creds) + return tackerclient.Client(session=sess) # ********************************************* @@ -74,12 +74,12 @@ def create_vnfd(tacker_client, tosca_file=None): if tosca_file is not None: with open(tosca_file) as tosca_fd: vnfd_body = tosca_fd.read() - logger.error(vnfd_body) + logger.info('VNFD template:\n{0}'.format(vnfd_body)) return tacker_client.create_vnfd( body={"vnfd": {"attributes": {"vnfd": vnfd_body}}}) except Exception, e: - logger.exception("Error [create_vnfd(tacker_client, '%s')]: %s" - % (tosca_file, e)) + logger.error("Error [create_vnfd(tacker_client, '%s')]: %s" + % (tosca_file, e)) return None @@ -124,7 +124,8 @@ def create_vnf(tacker_client, vnf_name, vnfd_id=None, vnfd_name=None): vnf_body['vnf']['vnfd_id'] = get_vnfd_id(tacker_client, vnfd_name) return tacker_client.create_vnf(body=vnf_body) except Exception, e: - logger.error("error [create_vnf(tacker_client, '%s', '%s', '%s')]: %s" + logger.error("error [create_vnf(tacker_client," + " '%s', '%s', '%s')]: %s" % (vnf_name, vnfd_id, vnfd_name, e)) return None @@ -206,7 +207,8 @@ def create_sfc(tacker_client, sfc_name, for name in chain_vnf_names] return tacker_client.create_sfc(body=sfc_body) except Exception, e: - logger.error("error [create_sfc(tacker_client, '%s', '%s', '%s')]: %s" + logger.error("error [create_sfc(tacker_client," + " '%s', '%s', '%s')]: %s" % (sfc_name, chain_vnf_ids, chain_vnf_names, e)) return None @@ -263,8 +265,8 @@ def create_sfc_classifier(tacker_client, sfc_clf_name, sfc_id=None, tacker_client, sfc_name) return tacker_client.create_sfc_classifier(body=sfc_clf_body) except Exception, e: - logger.error("error [create_sfc_classifier(tacker_client, '%s', '%s'," - " '%s', '%s')]: '%s'" + logger.error("error [create_sfc_classifier(tacker_client," + " '%s', '%s','%s', '%s')]: '%s'" % (sfc_clf_name, sfc_id, sfc_name, str(match), e)) return None |