aboutsummaryrefslogtreecommitdiffstats
path: root/tests/unit/utils
diff options
context:
space:
mode:
authorYujun Zhang <zhang.yujunz@zte.com.cn>2016-11-17 13:52:03 +0800
committerYujun Zhang <zhang.yujunz@zte.com.cn>2016-11-21 10:36:52 +0800
commit539405270b57a5ee7409a164a38b9fdb0b3624e7 (patch)
treec8f9a6fd5d61b8060802ec06bba5f9c94fe66bcb /tests/unit/utils
parentcaa171ac3796bbeacfdac0939713eedfad85e3c3 (diff)
Architecture evolution skeleton
- benchmarks will be driven by qtip.runner - qtip.runner is used by both qtip.cli and qtip.api - unit test for each module will be placed under tests/unit - functional tests will be moved to tests/functional - data as testing sample will be moved to tests/data NOTE: this patch moves files only, it may fails many tests. To be followed up in next step. JIRA: QTIP-148 Change-Id: I27e8169a74783970a1f7818456eb76a7311fb60c Signed-off-by: Yujun Zhang <zhang.yujunz@zte.com.cn>
Diffstat (limited to 'tests/unit/utils')
-rw-r--r--tests/unit/utils/__init__.py0
-rw-r--r--tests/unit/utils/ansible_api_test.py19
-rw-r--r--tests/unit/utils/args_handler_test.py35
-rw-r--r--tests/unit/utils/cli_test.py43
-rw-r--r--tests/unit/utils/create_zones_test.py109
-rw-r--r--tests/unit/utils/driver_test.py95
-rw-r--r--tests/unit/utils/env_setup_test.py106
-rw-r--r--tests/unit/utils/spawn_vm_test.py56
8 files changed, 463 insertions, 0 deletions
diff --git a/tests/unit/utils/__init__.py b/tests/unit/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/utils/__init__.py
diff --git a/tests/unit/utils/ansible_api_test.py b/tests/unit/utils/ansible_api_test.py
new file mode 100644
index 00000000..6b1afb44
--- /dev/null
+++ b/tests/unit/utils/ansible_api_test.py
@@ -0,0 +1,19 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from qtip.utils.ansible_api import AnsibleApi
+
+
+class TestClass:
+ def test_call_ansible_api_success(self):
+ ansible_api = AnsibleApi()
+ ret = ansible_api.execute_playbook('tests/data/hosts',
+ 'tests/data/test.yml',
+ 'config/QtipKey',
+ {'keys': 'test'})
+ assert ret == 3
diff --git a/tests/unit/utils/args_handler_test.py b/tests/unit/utils/args_handler_test.py
new file mode 100644
index 00000000..b67fd993
--- /dev/null
+++ b/tests/unit/utils/args_handler_test.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import pytest
+import mock
+import qtip.utils.args_handler
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['fuel', '/home', 'benchmarks/test_plan/default/network/iperf_bm.yaml'],
+ ['fuel', '/home', "iperf",
+ [('1-server', ['10.20.0.23']), ('2-host', ['10.20.0.24'])],
+ "iperf_bm.yaml",
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 10)],
+ [("10.20.0.24", [None]), ("10.20.0.23", [None])], {}])
+ ])
+ @mock.patch('qtip.utils.args_handler.Env_setup.call_ping_test')
+ @mock.patch('qtip.utils.args_handler.Env_setup.call_ssh_test')
+ @mock.patch('qtip.utils.args_handler.Env_setup.update_ansible')
+ @mock.patch('qtip.utils.args_handler.SpawnVM')
+ @mock.patch('qtip.utils.args_handler.Driver.drive_bench')
+ def test_prepare_and_run_benchmark_successful(self, mock_driver, mock_sqawn_vm, mock_env_setup_ping,
+ mock_env_setup_ssh, mock_update_ansible, test_input, expected):
+ mock_ips = mock.Mock(return_value=["10.20.0.23", "10.20.0.24"])
+ qtip.utils.args_handler.Env_setup.fetch_compute_ips = mock_ips
+ qtip.utils.args_handler.prepare_and_run_benchmark(test_input[0], test_input[1], test_input[2])
+ call = mock_driver.call_args
+ call_args, call_kwargs = call
+ assert sorted(map(sorted, call_args)) == sorted(map(sorted, expected))
diff --git a/tests/unit/utils/cli_test.py b/tests/unit/utils/cli_test.py
new file mode 100644
index 00000000..86dd6b98
--- /dev/null
+++ b/tests/unit/utils/cli_test.py
@@ -0,0 +1,43 @@
+import pytest
+import mock
+import os
+from qtip.utils.cli import Cli
+from os.path import expanduser
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['-l',
+ 'zte',
+ '-f',
+ 'compute'], "You have specified a lab that is not present under benchmarks/test_plan"),
+ (['-l',
+ 'default',
+ '-f',
+ 'test'], "This suite file test doesn't exist under benchmarks/suite/")
+ ])
+ def test_cli_error(self, capfd, test_input, expected):
+ k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel', 'PWD': '/home'})
+ with pytest.raises(SystemExit):
+ k.start()
+ Cli(test_input)
+ k.stop()
+ with open(expanduser('~') + "/qtip/logs/cli.log", "r") as file:
+ data = file.read()
+ assert expected in data
+
+ @pytest.mark.parametrize("test_input, expected", [
+ (['-l',
+ 'default',
+ '-f',
+ 'storage'], [('fuel', '/home', 'benchmarks/test_plan/default/storage/fio_bm.yaml'),
+ ('fuel', '/home', 'benchmarks/test_plan/default/storage/fio_vm.yaml')])
+ ])
+ @mock.patch('qtip.utils.cli.args_handler.prepare_and_run_benchmark')
+ def test_cli_successful(self, mock_args_handler, test_input, expected):
+ k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel', 'PWD': '/home'})
+ k.start()
+ Cli(test_input)
+ k.stop()
+ call_list = map(lambda x: mock_args_handler.call_args_list[x][0], range(len(expected)))
+ assert sorted(call_list) == sorted(expected)
diff --git a/tests/unit/utils/create_zones_test.py b/tests/unit/utils/create_zones_test.py
new file mode 100644
index 00000000..8b1e97cc
--- /dev/null
+++ b/tests/unit/utils/create_zones_test.py
@@ -0,0 +1,109 @@
+import pytest
+import mock
+from mock import Mock, MagicMock
+import os
+from qtip.utils.create_zones import AvailabilityZone
+
+return_list = []
+
+
+def get_agg_mock(host):
+ agg = Mock()
+ agg.name = host
+ agg.id = host
+ return agg
+
+
+class HyperMock(MagicMock):
+ def list(self):
+ mock_hypervisor = [Mock(service={'host': '10.20.0.4'}), Mock(service={'host': '10.20.0.5'})]
+ return mock_hypervisor
+
+
+class AggMock(MagicMock):
+ def get_details(self, agg_id):
+ print "get_details:{0}".format(agg_id)
+ return Mock(hosts=[])
+
+ def create(self, host, agg):
+ print "create:{0}:{1}".format(host, agg)
+ return agg
+
+ def list(self):
+ return return_list
+
+ def delete(self, agg_id):
+ print "delete:{0}".format(agg_id)
+ pass
+
+ def add_host(self, aggregate, host):
+ print "add_host:{0}:{1}".format(aggregate, host)
+ pass
+
+ def remove_host(self, agg_id, host):
+ print "remove_host:{0}:{1}".format(agg_id, host)
+ pass
+
+
+class NovaMock(MagicMock):
+ hypervisors = HyperMock()
+ aggregates = AggMock()
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['compute1', 'compute2'],
+ ['create:compute1:compute1',
+ 'add_host:compute1:10.20.0.4',
+ 'create:compute2:compute2',
+ 'add_host:compute2:10.20.0.5']),
+ (['compute1'],
+ ['create:compute1:compute1',
+ 'add_host:compute1:10.20.0.4']),
+ ])
+ @mock.patch('qtip.utils.create_zones.client', autospec=True)
+ @mock.patch('qtip.utils.create_zones.v2', autospec=True)
+ @mock.patch('qtip.utils.create_zones.session')
+ def test_create_zones_success(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd):
+ nova_obj = NovaMock()
+ mock_nova_client.Client.return_value = nova_obj()
+ k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000',
+ 'OS_USERNAME': 'admin',
+ 'OS_PASSWORD': 'admin',
+ 'OS_TENANT_NAME': 'admin'})
+ k.start()
+ azone = AvailabilityZone()
+ azone.create_aggs(test_input)
+ k.stop()
+ resout, reserr = capfd.readouterr()
+ for x in expected:
+ assert x in resout
+
+ @pytest.mark.parametrize("test_input, expected", [
+ ([get_agg_mock('10.20.0.4'), get_agg_mock('10.20.0.5')],
+ ['get_details:10.20.0.4',
+ 'delete:10.20.0.4',
+ 'get_details:10.20.0.5',
+ 'delete:10.20.0.5']),
+ ([],
+ []),
+ ])
+ @mock.patch('qtip.utils.create_zones.client', autospec=True)
+ @mock.patch('qtip.utils.create_zones.v2', autospec=True)
+ @mock.patch('qtip.utils.create_zones.session')
+ def test_clean_all_aggregates(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd):
+ global return_list
+ return_list = test_input
+ nova_obj = NovaMock()
+ mock_nova_client.Client.return_value = nova_obj()
+ k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000',
+ 'OS_USERNAME': 'admin',
+ 'OS_PASSWORD': 'admin',
+ 'OS_TENANT_NAME': 'admin'})
+ k.start()
+ azone = AvailabilityZone()
+ azone.clean_all_aggregates()
+ k.stop()
+ resout, reserr = capfd.readouterr()
+ for x in expected:
+ assert x in resout
diff --git a/tests/unit/utils/driver_test.py b/tests/unit/utils/driver_test.py
new file mode 100644
index 00000000..432ce1ae
--- /dev/null
+++ b/tests/unit/utils/driver_test.py
@@ -0,0 +1,95 @@
+import pytest
+import mock
+from qtip.utils.driver import Driver
+from os.path import expanduser
+
+HOME_DIR = expanduser('~')
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['fuel',
+ '/home',
+ "iperf",
+ [('host', ['10.20.0.13', '10.20.0.15'])],
+ "iperf_bm.yaml",
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
+ [("10.20.0.13", [None]), ("10.20.0.15", [None])],
+ {'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}],
+ [{'Dest_dir': HOME_DIR + '/qtip/results',
+ 'ip1': '',
+ 'ip2': '',
+ 'installer': 'fuel',
+ 'workingdir': '/home',
+ 'fname': 'iperf_bm.yaml',
+ 'username': 'root',
+ 'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*',
+ 'duration': 20,
+ 'protocol': 'tcp',
+ 'bandwidthGbps': 0,
+ "role": "host"}]),
+ (['joid',
+ '/home',
+ "iperf",
+ [('1-server', ['10.20.0.13']), ('2-host', ['10.20.0.15'])],
+ "iperf_vm.yaml",
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
+ [('1-server', '10.10.17.4'), ('2-host', '10.10.17.5')],
+ {}],
+ [{'Dest_dir': HOME_DIR + '/qtip/results',
+ 'ip1': '10.20.0.13',
+ 'ip2': '',
+ 'installer': 'joid',
+ 'privateip1': '10.10.17.4',
+ 'workingdir': '/home',
+ 'fname': 'iperf_vm.yaml',
+ 'username': 'ubuntu',
+ 'duration': 20,
+ 'protocol': 'tcp',
+ 'bandwidthGbps': 0,
+ "role": "1-server"},
+ {'Dest_dir': HOME_DIR + '/qtip/results',
+ 'ip1': '10.20.0.13',
+ 'ip2': '',
+ 'installer': 'joid',
+ 'privateip1': '10.10.17.4',
+ 'workingdir': '/home',
+ 'fname': 'iperf_vm.yaml',
+ 'username': 'ubuntu',
+ 'duration': 20,
+ 'protocol': 'tcp',
+ 'bandwidthGbps': 0,
+ "role": "2-host"}])
+ ])
+ @mock.patch('qtip.utils.driver.AnsibleApi.execute_playbook')
+ @mock.patch('qtip.utils.driver.AnsibleApi.get_detail_playbook_stats')
+ def test_driver_success(self, mock_stats, mock_ansible, test_input, expected):
+ mock_ansible.return_value = True
+ mock_stats.return_value = [(u'10.20.6.14', {'unreachable': 0,
+ 'skipped': 13,
+ 'ok': 27,
+ 'changed': 26,
+ 'failures': 0}),
+ ('localhost', {'unreachable': 0,
+ 'skipped': 0,
+ 'ok': 6,
+ 'changed': 6,
+ 'failures': 0}),
+ (u'10.20.6.13', {'unreachable': 0,
+ 'skipped': 13,
+ 'ok': 27,
+ 'changed': 26,
+ 'failures': 0})]
+ dri = Driver()
+ result = dri.drive_bench(test_input[0], test_input[1], test_input[2], test_input[3],
+ test_input[4], test_input[5], test_input[6], test_input[7])
+ call_list = mock_ansible.call_args_list
+ for call in call_list:
+ call_args, call_kwargs = call
+ real_call = call_args[3]
+ assert real_call == expected[call_list.index(call)]
+ assert result['result'] == 0
diff --git a/tests/unit/utils/env_setup_test.py b/tests/unit/utils/env_setup_test.py
new file mode 100644
index 00000000..e28c6a18
--- /dev/null
+++ b/tests/unit/utils/env_setup_test.py
@@ -0,0 +1,106 @@
+##############################################################################
+# Copyright (c) 2016 ZTE and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import pytest
+import filecmp
+from qtip.utils.env_setup import Env_setup
+import mock
+
+
+class TestClass:
+
+ @pytest.mark.parametrize("test_input, expected", [
+ ("tests/test_case/bm_with_proxy.yaml", ["dhrystone",
+ {},
+ [],
+ {'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}]),
+ ("tests/test_case/bm_without_proxy.yaml", ["dhrystone",
+ {},
+ [],
+ {}]),
+ ("tests/test_case/vm.yaml", ["iperf",
+ {'availability_zone': ['compute1', 'compute1'],
+ 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'],
+ 'public_network': ['admin-floating_net', 'admin-floating_net'],
+ 'flavor': ['m1.large', 'm1.large'],
+ 'role': ['1-server', '2-host']},
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
+ {'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}])
+ ])
+ def test_parse_success(self, test_input, expected):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ benchmark, vm_para, details, proxy = \
+ test_class.parse(test_input)
+ assert benchmark == expected[0]
+ assert vm_para == expected[1]
+ assert sorted(details) == sorted(expected[2])
+ assert proxy == expected[3]
+
+ def test_parse_vm_error(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ with pytest.raises(KeyError) as excinfo:
+ test_class.parse("tests/test_case/vm_error.yaml")
+ assert "benchmark" in str(excinfo.value)
+
+ def test_update_ansible(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ test_class.parse("tests/test_case/bm_without_proxy.yaml")
+ test_class.update_ansible()
+ result = filecmp.cmp('tests/output/hosts', 'config/hosts')
+ assert result
+
+ def test_ping(self, capfd):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["127.0.0.1", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ test_class.parse("tests/test_case/bm_ping.yaml")
+ test_class.call_ping_test()
+ resout, reserr = capfd.readouterr()
+ assert '127.0.0.1 is UP' in resout
+
+ def test_check_machine_ips_without_ip(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ inputs = {"machine_1": {"ip": "", "pw": "", "role": "host"},
+ "machine_2": {"ip": "", "pw": "", "role": "host"}}
+ test_class.check_machine_ips(inputs)
+ assert inputs["machine_1"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_2"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_1"]['ip'] != inputs["machine_2"]['ip']
+
+ def test_check_machine_ips_with_ip(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ inputs = {"machine_1": {"ip": "10.20.0.28", "pw": "", "role": "host"},
+ "machine_2": {"ip": "10.20.0.29", "pw": "", "role": "host"}}
+ test_class.check_machine_ips(inputs)
+ assert inputs["machine_1"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_2"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_1"]['ip'] != inputs["machine_2"]['ip']
+
+ def test_check_machine_ips_with_invalid_ip(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ inputs = {"machine_1": {"ip": "10.20.0.3", "pw": "", "role": "host"},
+ "machine_2": {"ip": "10.20.0.4", "pw": "", "role": "host"}}
+ with pytest.raises(RuntimeError):
+ test_class.check_machine_ips(inputs)
diff --git a/tests/unit/utils/spawn_vm_test.py b/tests/unit/utils/spawn_vm_test.py
new file mode 100644
index 00000000..ba237378
--- /dev/null
+++ b/tests/unit/utils/spawn_vm_test.py
@@ -0,0 +1,56 @@
+import pytest
+import mock
+from mock import Mock, MagicMock
+import os
+from qtip.utils.spawn_vm import SpawnVM
+
+
+class KeystoneMock(MagicMock):
+ auth_token = Mock()
+ v2_0 = Mock()
+
+
+class StackMock(MagicMock):
+ status = 'COMPLETE'
+ outputs = [{'output_key': 'availability_instance_1',
+ 'output_value': 'output_value_1'},
+ {'output_key': 'instance_ip_1',
+ "output_value": "172.10.0.154"},
+ {"output_key": "instance_PIP_1",
+ "output_value": "10.10.17.5"}]
+
+
+class HeatMock(MagicMock):
+ def list(self):
+ return []
+
+ def get(self, stackname):
+ return StackMock()
+
+ def create(self, stack_name, template):
+ pass
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ ({'availability_zone': ['compute1', 'compute1'],
+ 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'],
+ 'public_network': ['admin-floating_net', 'admin-floating_net'],
+ 'flavor': ['m1.large', 'm1.large'],
+ 'role': ['1-server', '2-host']},
+ [('172.10.0.154', '')]),
+ ])
+ @mock.patch('qtip.utils.spawn_vm.Env_setup')
+ @mock.patch('qtip.utils.spawn_vm.AvailabilityZone')
+ @mock.patch('qtip.utils.spawn_vm.keystoneclient.v2_0', autospec=True)
+ @mock.patch('qtip.utils.spawn_vm.heatclient.client', autospec=True)
+ def test_create_zones_success(self, mock_heat, mock_keystone,
+ mock_zone, mock_setup, test_input, expected):
+ open('./config/QtipKey.pub', 'a').close()
+ mock_heat.Client.return_value = Mock(stacks=HeatMock())
+ k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel'})
+ k.start()
+ SpawnVM(test_input)
+ k.stop()
+ os.remove('./config/QtipKey.pub')
+ mock_setup.ip_pw_list.append.assert_called_with(expected[0])