summaryrefslogtreecommitdiffstats
path: root/tests/unit
diff options
context:
space:
mode:
authorYujun Zhang <zhang.yujunz@zte.com.cn>2016-11-23 07:59:31 +0000
committerGerrit Code Review <gerrit@opnfv.org>2016-11-23 07:59:31 +0000
commit8bc628a0e6821984780ad05688eb6649e1733c65 (patch)
treeb6f440a75e78866f8f2546deb163a2440a93ec4c /tests/unit
parent07ee28b71118469f3946a009a126b7c21267173e (diff)
parent539405270b57a5ee7409a164a38b9fdb0b3624e7 (diff)
Merge "Architecture evolution skeleton"
Diffstat (limited to 'tests/unit')
-rw-r--r--tests/unit/__init__.py0
-rw-r--r--tests/unit/api/__init__.py0
-rw-r--r--tests/unit/api/qtip_server_test.py117
-rw-r--r--tests/unit/cli/__init__.py0
-rw-r--r--tests/unit/runner/__init__.py0
-rw-r--r--tests/unit/runner/perftest_test.py13
-rw-r--r--tests/unit/runner/suite_test.py13
-rw-r--r--tests/unit/runner/test_plan_test.py13
-rw-r--r--tests/unit/utils/__init__.py0
-rw-r--r--tests/unit/utils/ansible_api_test.py19
-rw-r--r--tests/unit/utils/args_handler_test.py35
-rw-r--r--tests/unit/utils/cli_test.py43
-rw-r--r--tests/unit/utils/create_zones_test.py109
-rw-r--r--tests/unit/utils/driver_test.py95
-rw-r--r--tests/unit/utils/env_setup_test.py106
-rw-r--r--tests/unit/utils/spawn_vm_test.py56
16 files changed, 619 insertions, 0 deletions
diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/__init__.py
diff --git a/tests/unit/api/__init__.py b/tests/unit/api/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/api/__init__.py
diff --git a/tests/unit/api/qtip_server_test.py b/tests/unit/api/qtip_server_test.py
new file mode 100644
index 00000000..96544c95
--- /dev/null
+++ b/tests/unit/api/qtip_server_test.py
@@ -0,0 +1,117 @@
+import qtip.api.qtip_server as server
+import pytest
+import json
+import mock
+import time
+
+
+@pytest.fixture
+def app():
+ return server.app
+
+
+@pytest.fixture
+def app_client(app):
+ client = app.test_client()
+ return client
+
+
+def side_effect_sleep(sleep_time):
+ time.sleep(sleep_time)
+
+
+def side_effect_pass():
+ pass
+
+
+class TestClass:
+ @pytest.mark.parametrize("body, expected", [
+ ({'installer_type': 'fuel',
+ 'installer_ip': '10.20.0.2'},
+ {'job_id': '',
+ 'installer_type': 'fuel',
+ 'installer_ip': '10.20.0.2',
+ 'pod_name': 'default',
+ 'suite_name': 'compute',
+ 'max_minutes': 60,
+ 'type': 'BM',
+ 'testdb_url': None,
+ 'node_name': None,
+ 'state': 'finished',
+ 'state_detail': [{'state': 'finished', 'benchmark': 'dhrystone_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'whetstone_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'ramspeed_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'dpi_bm.yaml'},
+ {'state': 'finished', 'benchmark': 'ssl_bm.yaml'}],
+ 'result': 0}),
+ ({'installer_type': 'fuel',
+ 'installer_ip': '10.20.0.2',
+ 'pod_name': 'default',
+ 'max_minutes': 20,
+ 'suite_name': 'compute',
+ 'type': 'VM',
+ 'benchmark_name': 'dhrystone_vm.yaml',
+ 'testdb_url': 'http://testresults.opnfv.org/test/api/v1',
+ 'node_name': 'zte-pod2'},
+ {'job_id': '',
+ 'installer_type': 'fuel',
+ 'installer_ip': '10.20.0.2',
+ 'pod_name': 'default',
+ 'suite_name': 'compute',
+ 'max_minutes': 20,
+ 'type': 'VM',
+ 'testdb_url': 'http://testresults.opnfv.org/test/api/v1',
+ 'node_name': 'zte-pod2',
+ 'state': 'finished',
+ 'state_detail': [{u'state': u'finished', u'benchmark': u'dhrystone_vm.yaml'}],
+ 'result': 0})
+ ])
+ @mock.patch('qtip.api.qtip_server.args_handler.prepare_and_run_benchmark')
+ def test_post_get_delete_job_successful(self, mock_args_handler, app_client, body, expected):
+ mock_args_handler.return_value = {'result': 0,
+ 'detail': {'host': [(u'10.20.6.14', {'unreachable': 0,
+ 'skipped': 13,
+ 'ok': 27,
+ 'changed': 26,
+ 'failures': 0}),
+ ('localhost', {'unreachable': 0,
+ 'skipped': 0,
+ 'ok': 6,
+ 'changed': 6,
+ 'failures': 0}),
+ (u'10.20.6.13', {'unreachable': 0,
+ 'skipped': 13,
+ 'ok': 27,
+ 'changed': 26,
+ 'failures': 0})]}}
+
+ reply = app_client.post("/api/v1.0/jobs", data=body)
+ print(reply.data)
+ id = json.loads(reply.data)['job_id']
+ expected['job_id'] = id
+ post_process = ''
+ while post_process != 'finished':
+ get_reply = app_client.get("/api/v1.0/jobs/%s" % id)
+ reply_data = json.loads(get_reply.data)
+ post_process = reply_data['state']
+ print(reply_data)
+ assert len(filter(lambda x: reply_data[x] == expected[x], expected.keys())) == len(expected)
+ delete_reply = app_client.delete("/api/v1.0/jobs/%s" % id)
+ assert "successful" in delete_reply.data
+
+ @pytest.mark.parametrize("body, expected", [
+ ([{'installer_type': 'fuel',
+ 'installer_ip': '10.20.0.2'},
+ {'installer_type': 'compass',
+ 'installer_ip': '192.168.20.50'}],
+ ['job_id',
+ 'It already has one job running now!'])
+ ])
+ @mock.patch('qtip.api.qtip_server.args_handler.prepare_and_run_benchmark',
+ side_effect=[side_effect_sleep(0.5), side_effect_pass])
+ def test_post_two_jobs_unsuccessful(self, mock_args_hanler, app_client, body, expected):
+ reply_1 = app_client.post("/api/v1.0/jobs", data=body[0])
+ reply_2 = app_client.post("/api/v1.0/jobs", data=body[1])
+ assert expected[0] in json.loads(reply_1.data).keys()
+ app_client.delete("/api/v1.0/jobs/%s" % json.loads(reply_1.data)['job_id'])
+ assert expected[1] in json.dumps(reply_2.data)
diff --git a/tests/unit/cli/__init__.py b/tests/unit/cli/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/cli/__init__.py
diff --git a/tests/unit/runner/__init__.py b/tests/unit/runner/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/runner/__init__.py
diff --git a/tests/unit/runner/perftest_test.py b/tests/unit/runner/perftest_test.py
new file mode 100644
index 00000000..798afadd
--- /dev/null
+++ b/tests/unit/runner/perftest_test.py
@@ -0,0 +1,13 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+class TestPerfTest:
+ def test_list(self):
+ assert True
diff --git a/tests/unit/runner/suite_test.py b/tests/unit/runner/suite_test.py
new file mode 100644
index 00000000..a2023cf8
--- /dev/null
+++ b/tests/unit/runner/suite_test.py
@@ -0,0 +1,13 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+class TestSuite:
+ def test_list(self):
+ assert True
diff --git a/tests/unit/runner/test_plan_test.py b/tests/unit/runner/test_plan_test.py
new file mode 100644
index 00000000..81f618c7
--- /dev/null
+++ b/tests/unit/runner/test_plan_test.py
@@ -0,0 +1,13 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+class TestTestPlan:
+ def test_list(self):
+ assert True
diff --git a/tests/unit/utils/__init__.py b/tests/unit/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/utils/__init__.py
diff --git a/tests/unit/utils/ansible_api_test.py b/tests/unit/utils/ansible_api_test.py
new file mode 100644
index 00000000..6b1afb44
--- /dev/null
+++ b/tests/unit/utils/ansible_api_test.py
@@ -0,0 +1,19 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from qtip.utils.ansible_api import AnsibleApi
+
+
+class TestClass:
+ def test_call_ansible_api_success(self):
+ ansible_api = AnsibleApi()
+ ret = ansible_api.execute_playbook('tests/data/hosts',
+ 'tests/data/test.yml',
+ 'config/QtipKey',
+ {'keys': 'test'})
+ assert ret == 3
diff --git a/tests/unit/utils/args_handler_test.py b/tests/unit/utils/args_handler_test.py
new file mode 100644
index 00000000..b67fd993
--- /dev/null
+++ b/tests/unit/utils/args_handler_test.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2016 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import pytest
+import mock
+import qtip.utils.args_handler
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['fuel', '/home', 'benchmarks/test_plan/default/network/iperf_bm.yaml'],
+ ['fuel', '/home', "iperf",
+ [('1-server', ['10.20.0.23']), ('2-host', ['10.20.0.24'])],
+ "iperf_bm.yaml",
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 10)],
+ [("10.20.0.24", [None]), ("10.20.0.23", [None])], {}])
+ ])
+ @mock.patch('qtip.utils.args_handler.Env_setup.call_ping_test')
+ @mock.patch('qtip.utils.args_handler.Env_setup.call_ssh_test')
+ @mock.patch('qtip.utils.args_handler.Env_setup.update_ansible')
+ @mock.patch('qtip.utils.args_handler.SpawnVM')
+ @mock.patch('qtip.utils.args_handler.Driver.drive_bench')
+ def test_prepare_and_run_benchmark_successful(self, mock_driver, mock_sqawn_vm, mock_env_setup_ping,
+ mock_env_setup_ssh, mock_update_ansible, test_input, expected):
+ mock_ips = mock.Mock(return_value=["10.20.0.23", "10.20.0.24"])
+ qtip.utils.args_handler.Env_setup.fetch_compute_ips = mock_ips
+ qtip.utils.args_handler.prepare_and_run_benchmark(test_input[0], test_input[1], test_input[2])
+ call = mock_driver.call_args
+ call_args, call_kwargs = call
+ assert sorted(map(sorted, call_args)) == sorted(map(sorted, expected))
diff --git a/tests/unit/utils/cli_test.py b/tests/unit/utils/cli_test.py
new file mode 100644
index 00000000..86dd6b98
--- /dev/null
+++ b/tests/unit/utils/cli_test.py
@@ -0,0 +1,43 @@
+import pytest
+import mock
+import os
+from qtip.utils.cli import Cli
+from os.path import expanduser
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['-l',
+ 'zte',
+ '-f',
+ 'compute'], "You have specified a lab that is not present under benchmarks/test_plan"),
+ (['-l',
+ 'default',
+ '-f',
+ 'test'], "This suite file test doesn't exist under benchmarks/suite/")
+ ])
+ def test_cli_error(self, capfd, test_input, expected):
+ k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel', 'PWD': '/home'})
+ with pytest.raises(SystemExit):
+ k.start()
+ Cli(test_input)
+ k.stop()
+ with open(expanduser('~') + "/qtip/logs/cli.log", "r") as file:
+ data = file.read()
+ assert expected in data
+
+ @pytest.mark.parametrize("test_input, expected", [
+ (['-l',
+ 'default',
+ '-f',
+ 'storage'], [('fuel', '/home', 'benchmarks/test_plan/default/storage/fio_bm.yaml'),
+ ('fuel', '/home', 'benchmarks/test_plan/default/storage/fio_vm.yaml')])
+ ])
+ @mock.patch('qtip.utils.cli.args_handler.prepare_and_run_benchmark')
+ def test_cli_successful(self, mock_args_handler, test_input, expected):
+ k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel', 'PWD': '/home'})
+ k.start()
+ Cli(test_input)
+ k.stop()
+ call_list = map(lambda x: mock_args_handler.call_args_list[x][0], range(len(expected)))
+ assert sorted(call_list) == sorted(expected)
diff --git a/tests/unit/utils/create_zones_test.py b/tests/unit/utils/create_zones_test.py
new file mode 100644
index 00000000..8b1e97cc
--- /dev/null
+++ b/tests/unit/utils/create_zones_test.py
@@ -0,0 +1,109 @@
+import pytest
+import mock
+from mock import Mock, MagicMock
+import os
+from qtip.utils.create_zones import AvailabilityZone
+
+return_list = []
+
+
+def get_agg_mock(host):
+ agg = Mock()
+ agg.name = host
+ agg.id = host
+ return agg
+
+
+class HyperMock(MagicMock):
+ def list(self):
+ mock_hypervisor = [Mock(service={'host': '10.20.0.4'}), Mock(service={'host': '10.20.0.5'})]
+ return mock_hypervisor
+
+
+class AggMock(MagicMock):
+ def get_details(self, agg_id):
+ print "get_details:{0}".format(agg_id)
+ return Mock(hosts=[])
+
+ def create(self, host, agg):
+ print "create:{0}:{1}".format(host, agg)
+ return agg
+
+ def list(self):
+ return return_list
+
+ def delete(self, agg_id):
+ print "delete:{0}".format(agg_id)
+ pass
+
+ def add_host(self, aggregate, host):
+ print "add_host:{0}:{1}".format(aggregate, host)
+ pass
+
+ def remove_host(self, agg_id, host):
+ print "remove_host:{0}:{1}".format(agg_id, host)
+ pass
+
+
+class NovaMock(MagicMock):
+ hypervisors = HyperMock()
+ aggregates = AggMock()
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['compute1', 'compute2'],
+ ['create:compute1:compute1',
+ 'add_host:compute1:10.20.0.4',
+ 'create:compute2:compute2',
+ 'add_host:compute2:10.20.0.5']),
+ (['compute1'],
+ ['create:compute1:compute1',
+ 'add_host:compute1:10.20.0.4']),
+ ])
+ @mock.patch('qtip.utils.create_zones.client', autospec=True)
+ @mock.patch('qtip.utils.create_zones.v2', autospec=True)
+ @mock.patch('qtip.utils.create_zones.session')
+ def test_create_zones_success(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd):
+ nova_obj = NovaMock()
+ mock_nova_client.Client.return_value = nova_obj()
+ k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000',
+ 'OS_USERNAME': 'admin',
+ 'OS_PASSWORD': 'admin',
+ 'OS_TENANT_NAME': 'admin'})
+ k.start()
+ azone = AvailabilityZone()
+ azone.create_aggs(test_input)
+ k.stop()
+ resout, reserr = capfd.readouterr()
+ for x in expected:
+ assert x in resout
+
+ @pytest.mark.parametrize("test_input, expected", [
+ ([get_agg_mock('10.20.0.4'), get_agg_mock('10.20.0.5')],
+ ['get_details:10.20.0.4',
+ 'delete:10.20.0.4',
+ 'get_details:10.20.0.5',
+ 'delete:10.20.0.5']),
+ ([],
+ []),
+ ])
+ @mock.patch('qtip.utils.create_zones.client', autospec=True)
+ @mock.patch('qtip.utils.create_zones.v2', autospec=True)
+ @mock.patch('qtip.utils.create_zones.session')
+ def test_clean_all_aggregates(self, mock_keystone_session, mock_keystone_v2, mock_nova_client, test_input, expected, capfd):
+ global return_list
+ return_list = test_input
+ nova_obj = NovaMock()
+ mock_nova_client.Client.return_value = nova_obj()
+ k = mock.patch.dict(os.environ, {'OS_AUTH_URL': 'http://172.10.0.5:5000',
+ 'OS_USERNAME': 'admin',
+ 'OS_PASSWORD': 'admin',
+ 'OS_TENANT_NAME': 'admin'})
+ k.start()
+ azone = AvailabilityZone()
+ azone.clean_all_aggregates()
+ k.stop()
+ resout, reserr = capfd.readouterr()
+ for x in expected:
+ assert x in resout
diff --git a/tests/unit/utils/driver_test.py b/tests/unit/utils/driver_test.py
new file mode 100644
index 00000000..432ce1ae
--- /dev/null
+++ b/tests/unit/utils/driver_test.py
@@ -0,0 +1,95 @@
+import pytest
+import mock
+from qtip.utils.driver import Driver
+from os.path import expanduser
+
+HOME_DIR = expanduser('~')
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ (['fuel',
+ '/home',
+ "iperf",
+ [('host', ['10.20.0.13', '10.20.0.15'])],
+ "iperf_bm.yaml",
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
+ [("10.20.0.13", [None]), ("10.20.0.15", [None])],
+ {'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}],
+ [{'Dest_dir': HOME_DIR + '/qtip/results',
+ 'ip1': '',
+ 'ip2': '',
+ 'installer': 'fuel',
+ 'workingdir': '/home',
+ 'fname': 'iperf_bm.yaml',
+ 'username': 'root',
+ 'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*',
+ 'duration': 20,
+ 'protocol': 'tcp',
+ 'bandwidthGbps': 0,
+ "role": "host"}]),
+ (['joid',
+ '/home',
+ "iperf",
+ [('1-server', ['10.20.0.13']), ('2-host', ['10.20.0.15'])],
+ "iperf_vm.yaml",
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
+ [('1-server', '10.10.17.4'), ('2-host', '10.10.17.5')],
+ {}],
+ [{'Dest_dir': HOME_DIR + '/qtip/results',
+ 'ip1': '10.20.0.13',
+ 'ip2': '',
+ 'installer': 'joid',
+ 'privateip1': '10.10.17.4',
+ 'workingdir': '/home',
+ 'fname': 'iperf_vm.yaml',
+ 'username': 'ubuntu',
+ 'duration': 20,
+ 'protocol': 'tcp',
+ 'bandwidthGbps': 0,
+ "role": "1-server"},
+ {'Dest_dir': HOME_DIR + '/qtip/results',
+ 'ip1': '10.20.0.13',
+ 'ip2': '',
+ 'installer': 'joid',
+ 'privateip1': '10.10.17.4',
+ 'workingdir': '/home',
+ 'fname': 'iperf_vm.yaml',
+ 'username': 'ubuntu',
+ 'duration': 20,
+ 'protocol': 'tcp',
+ 'bandwidthGbps': 0,
+ "role": "2-host"}])
+ ])
+ @mock.patch('qtip.utils.driver.AnsibleApi.execute_playbook')
+ @mock.patch('qtip.utils.driver.AnsibleApi.get_detail_playbook_stats')
+ def test_driver_success(self, mock_stats, mock_ansible, test_input, expected):
+ mock_ansible.return_value = True
+ mock_stats.return_value = [(u'10.20.6.14', {'unreachable': 0,
+ 'skipped': 13,
+ 'ok': 27,
+ 'changed': 26,
+ 'failures': 0}),
+ ('localhost', {'unreachable': 0,
+ 'skipped': 0,
+ 'ok': 6,
+ 'changed': 6,
+ 'failures': 0}),
+ (u'10.20.6.13', {'unreachable': 0,
+ 'skipped': 13,
+ 'ok': 27,
+ 'changed': 26,
+ 'failures': 0})]
+ dri = Driver()
+ result = dri.drive_bench(test_input[0], test_input[1], test_input[2], test_input[3],
+ test_input[4], test_input[5], test_input[6], test_input[7])
+ call_list = mock_ansible.call_args_list
+ for call in call_list:
+ call_args, call_kwargs = call
+ real_call = call_args[3]
+ assert real_call == expected[call_list.index(call)]
+ assert result['result'] == 0
diff --git a/tests/unit/utils/env_setup_test.py b/tests/unit/utils/env_setup_test.py
new file mode 100644
index 00000000..e28c6a18
--- /dev/null
+++ b/tests/unit/utils/env_setup_test.py
@@ -0,0 +1,106 @@
+##############################################################################
+# Copyright (c) 2016 ZTE and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import pytest
+import filecmp
+from qtip.utils.env_setup import Env_setup
+import mock
+
+
+class TestClass:
+
+ @pytest.mark.parametrize("test_input, expected", [
+ ("tests/test_case/bm_with_proxy.yaml", ["dhrystone",
+ {},
+ [],
+ {'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}]),
+ ("tests/test_case/bm_without_proxy.yaml", ["dhrystone",
+ {},
+ [],
+ {}]),
+ ("tests/test_case/vm.yaml", ["iperf",
+ {'availability_zone': ['compute1', 'compute1'],
+ 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'],
+ 'public_network': ['admin-floating_net', 'admin-floating_net'],
+ 'flavor': ['m1.large', 'm1.large'],
+ 'role': ['1-server', '2-host']},
+ [('duration', 20), ('protocol', 'tcp'), ('bandwidthGbps', 0)],
+ {'http_proxy': 'http://10.20.0.1:8118',
+ 'https_proxy': 'http://10.20.0.1:8118',
+ 'no_proxy': 'localhost,127.0.0.1,10.20.*,192.168.*'}])
+ ])
+ def test_parse_success(self, test_input, expected):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ benchmark, vm_para, details, proxy = \
+ test_class.parse(test_input)
+ assert benchmark == expected[0]
+ assert vm_para == expected[1]
+ assert sorted(details) == sorted(expected[2])
+ assert proxy == expected[3]
+
+ def test_parse_vm_error(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ with pytest.raises(KeyError) as excinfo:
+ test_class.parse("tests/test_case/vm_error.yaml")
+ assert "benchmark" in str(excinfo.value)
+
+ def test_update_ansible(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ test_class.parse("tests/test_case/bm_without_proxy.yaml")
+ test_class.update_ansible()
+ result = filecmp.cmp('tests/output/hosts', 'config/hosts')
+ assert result
+
+ def test_ping(self, capfd):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["127.0.0.1", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ test_class.parse("tests/test_case/bm_ping.yaml")
+ test_class.call_ping_test()
+ resout, reserr = capfd.readouterr()
+ assert '127.0.0.1 is UP' in resout
+
+ def test_check_machine_ips_without_ip(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ inputs = {"machine_1": {"ip": "", "pw": "", "role": "host"},
+ "machine_2": {"ip": "", "pw": "", "role": "host"}}
+ test_class.check_machine_ips(inputs)
+ assert inputs["machine_1"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_2"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_1"]['ip'] != inputs["machine_2"]['ip']
+
+ def test_check_machine_ips_with_ip(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ inputs = {"machine_1": {"ip": "10.20.0.28", "pw": "", "role": "host"},
+ "machine_2": {"ip": "10.20.0.29", "pw": "", "role": "host"}}
+ test_class.check_machine_ips(inputs)
+ assert inputs["machine_1"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_2"]['ip'] in ["10.20.0.28", "10.20.0.29"]
+ assert inputs["machine_1"]['ip'] != inputs["machine_2"]['ip']
+
+ def test_check_machine_ips_with_invalid_ip(self):
+ test_class = Env_setup()
+ mock_ips = mock.Mock(return_value=["10.20.0.28", "10.20.0.29"])
+ test_class.fetch_compute_ips = mock_ips
+ inputs = {"machine_1": {"ip": "10.20.0.3", "pw": "", "role": "host"},
+ "machine_2": {"ip": "10.20.0.4", "pw": "", "role": "host"}}
+ with pytest.raises(RuntimeError):
+ test_class.check_machine_ips(inputs)
diff --git a/tests/unit/utils/spawn_vm_test.py b/tests/unit/utils/spawn_vm_test.py
new file mode 100644
index 00000000..ba237378
--- /dev/null
+++ b/tests/unit/utils/spawn_vm_test.py
@@ -0,0 +1,56 @@
+import pytest
+import mock
+from mock import Mock, MagicMock
+import os
+from qtip.utils.spawn_vm import SpawnVM
+
+
+class KeystoneMock(MagicMock):
+ auth_token = Mock()
+ v2_0 = Mock()
+
+
+class StackMock(MagicMock):
+ status = 'COMPLETE'
+ outputs = [{'output_key': 'availability_instance_1',
+ 'output_value': 'output_value_1'},
+ {'output_key': 'instance_ip_1',
+ "output_value": "172.10.0.154"},
+ {"output_key": "instance_PIP_1",
+ "output_value": "10.10.17.5"}]
+
+
+class HeatMock(MagicMock):
+ def list(self):
+ return []
+
+ def get(self, stackname):
+ return StackMock()
+
+ def create(self, stack_name, template):
+ pass
+
+
+class TestClass:
+ @pytest.mark.parametrize("test_input, expected", [
+ ({'availability_zone': ['compute1', 'compute1'],
+ 'OS_image': ['QTIP_CentOS', 'QTIP_CentOS'],
+ 'public_network': ['admin-floating_net', 'admin-floating_net'],
+ 'flavor': ['m1.large', 'm1.large'],
+ 'role': ['1-server', '2-host']},
+ [('172.10.0.154', '')]),
+ ])
+ @mock.patch('qtip.utils.spawn_vm.Env_setup')
+ @mock.patch('qtip.utils.spawn_vm.AvailabilityZone')
+ @mock.patch('qtip.utils.spawn_vm.keystoneclient.v2_0', autospec=True)
+ @mock.patch('qtip.utils.spawn_vm.heatclient.client', autospec=True)
+ def test_create_zones_success(self, mock_heat, mock_keystone,
+ mock_zone, mock_setup, test_input, expected):
+ open('./config/QtipKey.pub', 'a').close()
+ mock_heat.Client.return_value = Mock(stacks=HeatMock())
+ k = mock.patch.dict(os.environ, {'INSTALLER_TYPE': 'fuel'})
+ k.start()
+ SpawnVM(test_input)
+ k.stop()
+ os.remove('./config/QtipKey.pub')
+ mock_setup.ip_pw_list.append.assert_called_with(expected[0])