diff options
Diffstat (limited to 'legacy/tests')
-rw-r--r-- | legacy/tests/__init__.py | 0 | ||||
-rw-r--r-- | legacy/tests/ansible_api_test.py | 22 | ||||
-rw-r--r-- | legacy/tests/api/__init__.py | 0 | ||||
-rw-r--r-- | legacy/tests/api/test_server.py | 123 | ||||
-rw-r--r-- | legacy/tests/args_handler_test.py | 36 | ||||
-rw-r--r-- | legacy/tests/cli_test.py | 44 | ||||
-rw-r--r-- | legacy/tests/create_zones_test.py | 110 | ||||
-rw-r--r-- | legacy/tests/driver_test.py | 95 | ||||
-rw-r--r-- | legacy/tests/env_setup_test.py | 120 | ||||
-rw-r--r-- | legacy/tests/functional/__init__.py | 0 | ||||
-rw-r--r-- | legacy/tests/functional/yaml_schema_test.py | 16 | ||||
-rw-r--r-- | legacy/tests/helper/perftest.yaml | 5 | ||||
-rw-r--r-- | legacy/tests/helper/suite.yaml | 6 | ||||
-rw-r--r-- | legacy/tests/helper/version.yaml | 12 | ||||
-rw-r--r-- | legacy/tests/spawn_vm_test.py | 56 |
15 files changed, 645 insertions, 0 deletions
diff --git a/legacy/tests/__init__.py b/legacy/tests/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/legacy/tests/__init__.py diff --git a/legacy/tests/ansible_api_test.py b/legacy/tests/ansible_api_test.py new file mode 100644 index 00000000..6f286fc3 --- /dev/null +++ b/legacy/tests/ansible_api_test.py @@ -0,0 +1,22 @@ +############################################################################## +# Copyright (c) 2016 ZTE Corp and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +from tests import BaseTest +from qtip.utils.ansible_api import AnsibleApi + + +class TestClass(BaseTest): + + def test_call_ansible_api_success(self): + ansible_api = AnsibleApi() + ret = ansible_api.execute_playbook(self.abspath('hosts'), + self.abspath('test.yml'), + self.abspath('QtipKey'), + {'keys': 'test'}) + assert ret == 3 diff --git a/legacy/tests/api/__init__.py b/legacy/tests/api/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/legacy/tests/api/__init__.py diff --git a/legacy/tests/api/test_server.py b/legacy/tests/api/test_server.py new file mode 100644 index 00000000..e9364d3d --- /dev/null +++ b/legacy/tests/api/test_server.py @@ -0,0 +1,123 @@ +import json +import time + +import mock +import pytest + +import qtip.api.cmd.server as server + + +def setup_module(): + server.add_routers() + + +@pytest.fixture +def app(): + return server.app + + +@pytest.fixture +def app_client(app): + client = app.test_client() + return client + + +def side_effect_sleep(sleep_time): + time.sleep(sleep_time) + + +def side_effect_pass(): + pass + + +class TestClass: + @pytest.mark.parametrize("body, expected", [ + ({'installer_type': 'fuel', + 'installer_ip': '10.20.0.2'}, + {'job_id': '', + 'installer_type': 'fuel', + 'installer_ip': '10.20.0.2', + 'pod_name': 'default', + 'suite_name': 'compute', + 'max_minutes': 60, + 'type': 'BM', + 'testdb_url': None, + 'node_name': None, + 'state': 'finished', + 'state_detail': [{'state': 'finished', 'benchmark': 'dhrystone_bm.yaml'}, + {'state': 'finished', 'benchmark': 'whetstone_bm.yaml'}, + {'state': 'finished', 'benchmark': 'ramspeed_bm.yaml'}, + {'state': 'finished', 'benchmark': 'dpi_bm.yaml'}, + {'state': 'finished', 'benchmark': 'ssl_bm.yaml'}], + 'result': 0}), + ({'installer_type': 'fuel', + 'installer_ip': '10.20.0.2', + 'pod_name': 'default', + 'max_minutes': 20, + 'suite_name': 'compute', + 'type': 'VM', + 'benchmark_name': 'dhrystone_vm.yaml', + 'testdb_url': 'http://testresults.opnfv.org/test/api/v1', + 'node_name': 'zte-pod2'}, + {'job_id': '', + 'installer_type': 'fuel', + 'installer_ip': '10.20.0.2', + 'pod_name': 'default', + 'suite_name': 'compute', + 'max_minutes': 20, + 'type': 'VM', + 'testdb_url': 'http://testresults.opnfv.org/test/api/v1', + 'node_name': 'zte-pod2', + 'state': 'finished', + 'state_detail': [{u'state': u'finished', u'benchmark': u'dhrystone_vm.yaml'}], + 'result': 0}) + ]) + @mock.patch('qtip.utils.args_handler.prepare_and_run_benchmark') + def test_post_get_delete_job_successful(self, mock_args_handler, app_client, body, expected): + mock_args_handler.return_value = {'result': 0, + 'detail': {'host': [(u'10.20.6.14', {'unreachable': 0, + 'skipped': 13, + 'ok': 27, + 'changed': 26, + 'failures': 0}), + ('localhost', {'unreachable': 0, + 'skipped': 0, + 'ok': 6, + 'changed': 6, + 'failures': 0}), + (u'10.20.6.13', {'unreachable': 0, + 'skipped': 13, + 'ok': 27, + 'changed': 26, + 'failures': 0})]}} + + reply = app_client.post("/api/v1.0/jobs", data=body) + print(reply.data) + id = json.loads(reply.data)['job_id'] + expected['job_id'] = id + post_process = '' + while post_process != 'finished': + get_reply = app_client.get("/api/v1.0/jobs/%s" % id) + reply_data = json.loads(get_reply.data) + post_process = reply_data['state'] + print(reply_data) + assert len(filter(lambda x: reply_data[x] == expected[x], expected.keys())) == len(expected) + delete_reply = app_client.delete("/api/v1.0/jobs/%s" % id) + assert "successful" in delete_reply.data + + @pytest.mark.parametrize("body, expected", [ + ([{'installer_type': 'fuel', + 'installer_ip': '10.20.0.2'}, + {'installer_type': 'compass', + 'installer_ip': '192.168.20.50'}], + ['job_id', + 'It already has one job running now!']) + ]) + @mock.patch('qtip.utils.args_handler.prepare_and_run_benchmark', + side_effect=[side_effect_sleep(0.5), side_effect_pass]) + def test_post_two_jobs_unsuccessful(self, mock_args_hanler, app_client, body, expected): + reply_1 = app_client.post("/api/v1.0/jobs", data=body[0]) + reply_2 = app_client.post("/api/v1.0/jobs", data=body[1]) + assert expected[0] in json.loads(reply_1.data).keys() + app_client.delete("/api/v1.0/jobs/%s" % json.loads(reply_1.data)['job_id']) + assert expected[1] in json.dumps(reply_2.data) diff --git a/legacy/tests/args_handler_test.py b/legacy/tests/args_handler_test.py new file mode 100644 index 00000000..dceca1f5 --- /dev/null +++ b/legacy/tests/args_handler_test.py @@ -0,0 +1,36 @@ +############################################################################## +# Copyright (c) 2016 ZTE Corp and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +import pytest +import mock +import qtip.utils.args_handler + + +@pytest.mark.xfail(reason="to be fixed") +class TestClass: + @pytest.mark.parametrize("test_input, expected", [ + (['fuel', '/home', 'benchmarks/testplan/default/network/iperf_bm.yaml'], + ['fuel', '/home', "iperf", + [('1-server', ['10.20.0.23']), ('2-host', ['10.20.0.24'])], + "iperf_bm.yaml", + [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 10)], + [("10.20.0.24", [None]), ("10.20.0.23", [None])], {}]) + ]) + @mock.patch('qtip.utils.args_handler.Env_setup.call_ping_test') + @mock.patch('qtip.utils.args_handler.Env_setup.call_ssh_test') + @mock.patch('qtip.utils.args_handler.Env_setup.update_ansible') + @mock.patch('qtip.utils.args_handler.SpawnVM') + @mock.patch('qtip.utils.args_handler.Driver.drive_bench') + def test_prepare_and_run_benchmark_successful(self, mock_driver, mock_sqawn_vm, mock_env_setup_ping, + mock_env_setup_ssh, mock_update_ansible, test_input, expected): + mock_ips = mock.Mock(return_value=["10.20.0.23", "10.20.0.24"]) + qtip.utils.args_handler.Env_setup.fetch_compute_ips = mock_ips + qtip.utils.args_handler.prepare_and_run_benchmark(test_input[0], test_input[1], test_input[2]) + call = mock_driver.call_args + call_args, call_kwargs = call + assert sorted(map(sorted, call_args)) == sorted(map(sorted, expected)) diff --git a/legacy/tests/cli_test.py b/legacy/tests/cli_test.py new file mode 100644 index 00000000..0f3e4158 --- /dev/null +++ b/legacy/tests/cli_test.py @@ -0,0 +1,44 @@ +import pytest +import mock +import os +from qtip.utils.cli import Cli +from os.path import expanduser + + +@pytest.mark.skip("TODO(yujunz) recover test after refactoring") +class TestClass: + @pytest.mark.parametrize("test_input, expected", [ + (['-l', + 'zte', + '-f', + 'compute'], "You have specified a lab that is not present under benchmarks/testplan"), + (['-l', + 'default', + '-f', + 'test'], "This suite file test doesn't exist under benchmarks/suite/") + ]) + def test_cli_error(self, capfd, test_input, expected): + k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel', 'PWD': '/home'}) + with pytest.raises(SystemExit): + k.start() + Cli(test_input) + k.stop() + with open(expanduser('~') + "/qtip/logs/cli.log", "r") as file: + data = file.read() + assert expected in data + + @pytest.mark.parametrize("test_input, expected", [ + (['-l', + 'default', + '-f', + 'storage'], [('fuel', '/home', 'benchmarks/testplan/default/storage/fio_bm.yaml'), + ('fuel', '/home', 'benchmarks/testplan/default/storage/fio_vm.yaml')]) + ]) + @mock.patch('qtip.utils.cli.args_handler.prepare_and_run_benchmark') + def test_cli_successful(self, mock_args_handler, test_input, expected): + k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel', 'PWD': '/home'}) + k.start() + Cli(test_input) + k.stop() + call_list = map(lambda x: mock_args_handler.call_args_list[x][0], range(len(expected))) + assert sorted(call_list) == sorted(expected) diff --git a/legacy/tests/create_zones_test.py b/legacy/tests/create_zones_test.py new file mode 100644 index 00000000..dcfff5ec --- /dev/null +++ b/legacy/tests/create_zones_test.py @@ -0,0 +1,110 @@ +import pytest +import mock +from mock import Mock, MagicMock +import os +from qtip.utils.create_zones import AvailabilityZone + +return_list = [] + + +def get_agg_mock(host): + agg = Mock() + agg.name = host + agg.id = host + return agg + + +class HyperMock(MagicMock): + def list(self): + mock_hypervisor = [Mock(service={'host': '10.20.0.4'}), Mock(service={'host': '10.20.0.5'})] + return mock_hypervisor + + +class AggMock(MagicMock): + def get_details(self, agg_id): + print "get_details:{0}".format(agg_id) + return Mock(hosts=[]) + + def create(self, host, agg): + print "create:{0}:{1}".format(host, agg) + return agg + + def list(self): + return return_list + + def delete(self, agg_id): + print "delete:{0}".format(agg_id) + pass + + def add_host(self, aggregate, host): + print "add_host:{0}:{1}".format(aggregate, host) + pass + + def remove_host(self, agg_id, host): + print "remove_host:{0}:{1}".format(agg_id, host) + pass + + +class NovaMock(MagicMock): + hypervisors = HyperMock() + aggregates = AggMock() + + +@pytest.mark.xfail(reason="unstable result") +class TestClass: + @pytest.mark.parametrize("test_input, expected", [ + (['compute1', 'compute2'], + ['create:compute1:compute1', + 'add_host:compute1:10.20.0.4', + 'create:compute2:compute2', + 'add_host:compute2:10.20.0.5']), + (['compute1'], + ['create:compute1:compute1', + 'add_host:compute1:10.20.0.4']), + ]) + @mock.patch('qtip.utils.create_zones.client', autospec=True) + @mock.patch('qtip.utils.create_zones.v2', autospec=True) + @mock.patch('qtip.utils.create_zones.session') + def test_create_zones_success(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd): + nova_obj = NovaMock() + mock_nova_client.Client.return_value = nova_obj() + k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000', + 'OS_USERNAME': 'admin', + 'OS_PASSWORD': 'admin', + 'OS_TENANT_NAME': 'admin'}) + k.start() + azone = AvailabilityZone() + azone.create_aggs(test_input) + k.stop() + resout, reserr = capfd.readouterr() + for x in expected: + assert x in resout + + @pytest.mark.parametrize("test_input, expected", [ + ([get_agg_mock('10.20.0.4'), get_agg_mock('10.20.0.5')], + ['get_details:10.20.0.4', + 'delete:10.20.0.4', + 'get_details:10.20.0.5', + 'delete:10.20.0.5']), + ([], + []), + ]) + @mock.patch('qtip.utils.create_zones.client', autospec=True) + @mock.patch('qtip.utils.create_zones.v2', autospec=True) + @mock.patch('qtip.utils.create_zones.session') + def test_clean_all_aggregates(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd): + global return_list + return_list = test_input + nova_obj = NovaMock() + mock_nova_client.Client.return_value = nova_obj() + k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000', + 'OS_USERNAME': 'admin', + 'OS_PASSWORD': 'admin', + 'OS_TENANT_NAME': 'admin'}) + k.start() + azone = AvailabilityZone() + azone.clean_all_aggregates() + k.stop() + resout, reserr = capfd.readouterr() + for x in expected: + assert x in resout diff --git a/legacy/tests/driver_test.py b/legacy/tests/driver_test.py new file mode 100644 index 00000000..432ce1ae --- /dev/null +++ b/legacy/tests/driver_test.py @@ -0,0 +1,95 @@ +import pytest +import mock +from qtip.utils.driver import Driver +from os.path import expanduser + +HOME_DIR = expanduser('~') + + +class TestClass: + @pytest.mark.parametrize("test_input, expected", [ + (['fuel', + '/home', + "iperf", + [('host', ['10.20.0.13', '10.20.0.15'])], + "iperf_bm.yaml", + [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)], + [("10.20.0.13", [None]), ("10.20.0.15", [None])], + {'http_proxy': 'http://10.20.0.1:8118', + 'https_proxy': 'http://10.20.0.1:8118', + 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}], + [{'Dest_dir': HOME_DIR + '/qtip/results', + 'ip1': '', + 'ip2': '', + 'installer': 'fuel', + 'workingdir': '/home', + 'fname': 'iperf_bm.yaml', + 'username': 'root', + 'http_proxy': 'http://10.20.0.1:8118', + 'https_proxy': 'http://10.20.0.1:8118', + 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*', + 'duration': 20, + 'protocol': 'tcp', + 'bandwidthGbps': 0, + "role": "host"}]), + (['joid', + '/home', + "iperf", + [('1-server', ['10.20.0.13']), ('2-host', ['10.20.0.15'])], + "iperf_vm.yaml", + [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)], + [('1-server', '10.10.17.4'), ('2-host', '10.10.17.5')], + {}], + [{'Dest_dir': HOME_DIR + '/qtip/results', + 'ip1': '10.20.0.13', + 'ip2': '', + 'installer': 'joid', + 'privateip1': '10.10.17.4', + 'workingdir': '/home', + 'fname': 'iperf_vm.yaml', + 'username': 'ubuntu', + 'duration': 20, + 'protocol': 'tcp', + 'bandwidthGbps': 0, + "role": "1-server"}, + {'Dest_dir': HOME_DIR + '/qtip/results', + 'ip1': '10.20.0.13', + 'ip2': '', + 'installer': 'joid', + 'privateip1': '10.10.17.4', + 'workingdir': '/home', + 'fname': 'iperf_vm.yaml', + 'username': 'ubuntu', + 'duration': 20, + 'protocol': 'tcp', + 'bandwidthGbps': 0, + "role": "2-host"}]) + ]) + @mock.patch('qtip.utils.driver.AnsibleApi.execute_playbook') + @mock.patch('qtip.utils.driver.AnsibleApi.get_detail_playbook_stats') + def test_driver_success(self, mock_stats, mock_ansible, test_input, expected): + mock_ansible.return_value = True + mock_stats.return_value = [(u'10.20.6.14', {'unreachable': 0, + 'skipped': 13, + 'ok': 27, + 'changed': 26, + 'failures': 0}), + ('localhost', {'unreachable': 0, + 'skipped': 0, + 'ok': 6, + 'changed': 6, + 'failures': 0}), + (u'10.20.6.13', {'unreachable': 0, + 'skipped': 13, + 'ok': 27, + 'changed': 26, + 'failures': 0})] + dri = Driver() + result = dri.drive_bench(test_input[0], test_input[1], test_input[2], test_input[3], + test_input[4], test_input[5], test_input[6], test_input[7]) + call_list = mock_ansible.call_args_list + for call in call_list: + call_args, call_kwargs = call + real_call = call_args[3] + assert real_call == expected[call_list.index(call)] + assert result['result'] == 0 diff --git a/legacy/tests/env_setup_test.py b/legacy/tests/env_setup_test.py new file mode 100644 index 00000000..dea48190 --- /dev/null +++ b/legacy/tests/env_setup_test.py @@ -0,0 +1,120 @@ +############################################################################## +# Copyright (c) 2016 ZTE and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +import os +import pytest +import filecmp +from qtip.utils.env_setup import Env_setup +import mock + + +DATA_DIR = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'data') + + +def get_test_plan(name): + return os.path.join(DATA_DIR, 'testplan', name) + + +def get_output(name): + return os.path.join(DATA_DIR, 'output', name) + + +class TestClass: + @pytest.mark.parametrize("test_input, expected", [ + (get_test_plan("bm_with_proxy.yaml"), + ["dhrystone", + {}, + [], + {'http_proxy': 'http://10.20.0.1:8118', + 'https_proxy': 'http://10.20.0.1:8118', + 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}]), + (get_test_plan("bm_without_proxy.yaml"), + ["dhrystone", + {}, + [], + {}]), + (get_test_plan("vm.yaml"), + ["iperf", + {'availability_zone': ['compute1', 'compute1'], + 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'], + 'public_network': ['admin-floating_net', 'admin-floating_net'], + 'flavor': ['m1.large', 'm1.large'], + 'role': ['1-server', '2-host']}, + [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)], + {'http_proxy': 'http://10.20.0.1:8118', + 'https_proxy': 'http://10.20.0.1:8118', + 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}])]) + def test_parse_success(self, test_input, expected): + test_class = Env_setup() + mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"]) + test_class.fetch_compute_ips = mock_ips + benchmark, vm_para, details, proxy = \ + test_class.parse(test_input) + assert benchmark == expected[0] + assert vm_para == expected[1] + assert sorted(details) == sorted(expected[2]) + assert proxy == expected[3] + + def test_parse_vm_error(self): + test_class = Env_setup() + mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"]) + test_class.fetch_compute_ips = mock_ips + with pytest.raises(KeyError) as excinfo: + test_class.parse(get_test_plan("vm_error.yaml")) + assert "benchmark" in str(excinfo.value) + + def test_update_ansible(self): + test_class = Env_setup() + mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"]) + test_class.fetch_compute_ips = mock_ips + test_class.parse(get_test_plan("bm_without_proxy.yaml")) + test_class.update_ansible() + result = filecmp.cmp(get_output("hosts"), "config/hosts") + assert result + + @pytest.mark.skip("(yujunz) test hung") + def test_ping(self, capfd): + test_class = Env_setup() + mock_ips = mock.Mock(return_value=["127.0.0.1", "10.20.0.29"]) + test_class.fetch_compute_ips = mock_ips + test_class.parse(get_test_plan("bm_ping.yaml")) + test_class.call_ping_test() + resout, reserr = capfd.readouterr() + assert '127.0.0.1 is UP' in resout + + def test_check_machine_ips_without_ip(self): + test_class = Env_setup() + mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"]) + test_class.fetch_compute_ips = mock_ips + inputs = {"machine_1": {"ip": "", "pw": "", "role": "host"}, + "machine_2": {"ip": "", "pw": "", "role": "host"}} + test_class.check_machine_ips(inputs) + assert inputs["machine_1"]['ip'] in ["10.20.0.28", "10.20.0.29"] + assert inputs["machine_2"]['ip'] in ["10.20.0.28", "10.20.0.29"] + assert inputs["machine_1"]['ip'] != inputs["machine_2"]['ip'] + + def test_check_machine_ips_with_ip(self): + test_class = Env_setup() + mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"]) + test_class.fetch_compute_ips = mock_ips + inputs = {"machine_1": {"ip": "10.20.0.28", "pw": "", "role": "host"}, + "machine_2": {"ip": "10.20.0.29", "pw": "", "role": "host"}} + test_class.check_machine_ips(inputs) + assert inputs["machine_1"]['ip'] in ["10.20.0.28", "10.20.0.29"] + assert inputs["machine_2"]['ip'] in ["10.20.0.28", "10.20.0.29"] + assert inputs["machine_1"]['ip'] != inputs["machine_2"]['ip'] + + def test_check_machine_ips_with_invalid_ip(self): + test_class = Env_setup() + mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"]) + test_class.fetch_compute_ips = mock_ips + inputs = {"machine_1": {"ip": "10.20.0.3", "pw": "", "role": "host"}, + "machine_2": {"ip": "10.20.0.4", "pw": "", "role": "host"}} + with pytest.raises(RuntimeError): + test_class.check_machine_ips(inputs) diff --git a/legacy/tests/functional/__init__.py b/legacy/tests/functional/__init__.py new file mode 100644 index 00000000..e69de29b --- /dev/null +++ b/legacy/tests/functional/__init__.py diff --git a/legacy/tests/functional/yaml_schema_test.py b/legacy/tests/functional/yaml_schema_test.py new file mode 100644 index 00000000..a975dca6 --- /dev/null +++ b/legacy/tests/functional/yaml_schema_test.py @@ -0,0 +1,16 @@ +import os +import os.path +from pykwalify.core import Core + + +class TestClass: + def test_schema_success(self): + for root, dirs, files in os.walk("test_cases"): + for name in files: + print root + "/" + name + if "_bm" in name: + schema = "tests/schema/test_bm_schema.yaml" + if "_vm" in name: + schema = "tests/schema/test_vm_schema.yaml" + c = Core(source_file=root + "/" + name, schema_files=[schema]) + c.validate(raise_exception=True) diff --git a/legacy/tests/helper/perftest.yaml b/legacy/tests/helper/perftest.yaml new file mode 100644 index 00000000..26c58452 --- /dev/null +++ b/legacy/tests/helper/perftest.yaml @@ -0,0 +1,5 @@ +--- + + tests: + - command: ['perftest', 'run'] + output: "Run a perftest\n" diff --git a/legacy/tests/helper/suite.yaml b/legacy/tests/helper/suite.yaml new file mode 100644 index 00000000..718ae440 --- /dev/null +++ b/legacy/tests/helper/suite.yaml @@ -0,0 +1,6 @@ +--- + + tests: + - command: ['suite', 'run'] + output: "Run a suite\n" + diff --git a/legacy/tests/helper/version.yaml b/legacy/tests/helper/version.yaml new file mode 100644 index 00000000..b23f16f2 --- /dev/null +++ b/legacy/tests/helper/version.yaml @@ -0,0 +1,12 @@ +--- + + tests: + - command: ['version', 'list'] + output: "Lists all the different versions\n" + + - command: ['version', 'install', 'Colorado'] + output: "Install: Colorado\n" + + - command: ['version', 'uninstall', 'Arno'] + output: "Uninstall: Arno\n" + diff --git a/legacy/tests/spawn_vm_test.py b/legacy/tests/spawn_vm_test.py new file mode 100644 index 00000000..ba237378 --- /dev/null +++ b/legacy/tests/spawn_vm_test.py @@ -0,0 +1,56 @@ +import pytest +import mock +from mock import Mock, MagicMock +import os +from qtip.utils.spawn_vm import SpawnVM + + +class KeystoneMock(MagicMock): + auth_token = Mock() + v2_0 = Mock() + + +class StackMock(MagicMock): + status = 'COMPLETE' + outputs = [{'output_key': 'availability_instance_1', + 'output_value': 'output_value_1'}, + {'output_key': 'instance_ip_1', + "output_value": "172.10.0.154"}, + {"output_key": "instance_PIP_1", + "output_value": "10.10.17.5"}] + + +class HeatMock(MagicMock): + def list(self): + return [] + + def get(self, stackname): + return StackMock() + + def create(self, stack_name, template): + pass + + +class TestClass: + @pytest.mark.parametrize("test_input, expected", [ + ({'availability_zone': ['compute1', 'compute1'], + 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'], + 'public_network': ['admin-floating_net', 'admin-floating_net'], + 'flavor': ['m1.large', 'm1.large'], + 'role': ['1-server', '2-host']}, + [('172.10.0.154', '')]), + ]) + @mock.patch('qtip.utils.spawn_vm.Env_setup') + @mock.patch('qtip.utils.spawn_vm.AvailabilityZone') + @mock.patch('qtip.utils.spawn_vm.keystoneclient.v2_0', autospec=True) + @mock.patch('qtip.utils.spawn_vm.heatclient.client', autospec=True) + def test_create_zones_success(self, mock_heat, mock_keystone, + mock_zone, mock_setup, test_input, expected): + open('./config/QtipKey.pub', 'a').close() + mock_heat.Client.return_value = Mock(stacks=HeatMock()) + k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel'}) + k.start() + SpawnVM(test_input) + k.stop() + os.remove('./config/QtipKey.pub') + mock_setup.ip_pw_list.append.assert_called_with(expected[0]) |