summaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
Diffstat (limited to 'tests')
-rw-r--r--tests/data/daisy_conf/daisyrc_admin1
-rw-r--r--tests/unit/daisyclient_stub.py164
-rw-r--r--tests/unit/test_daisy_server.py24
-rw-r--r--tests/unit/test_deploy.py24
-rw-r--r--tests/unit/test_environment.py16
-rw-r--r--tests/unit/test_tempest.py324
6 files changed, 527 insertions, 26 deletions
diff --git a/tests/data/daisy_conf/daisyrc_admin b/tests/data/daisy_conf/daisyrc_admin
new file mode 100644
index 00000000..7909a6a4
--- /dev/null
+++ b/tests/data/daisy_conf/daisyrc_admin
@@ -0,0 +1 @@
+export OS_ENDPOINT=http://10.20.11.2:19292 \ No newline at end of file
diff --git a/tests/unit/daisyclient_stub.py b/tests/unit/daisyclient_stub.py
new file mode 100644
index 00000000..59540daf
--- /dev/null
+++ b/tests/unit/daisyclient_stub.py
@@ -0,0 +1,164 @@
+##############################################################################
+# Copyright (c) 2017 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+
+class StubTestInstall():
+ def __init__(self):
+ pass
+
+ def install(self, **cluster_meta):
+ self.cluster_meta = cluster_meta
+
+
+class StubTestHost():
+ def __init__(self, id, name, cluster_id, interfaces):
+ self.id = id
+ self.name = name
+ self.cluster_id = cluster_id
+ self.interfaces = interfaces
+ self.metadata = None
+
+ def to_dict(self):
+ return {'id': self.id, 'name': self.name, 'cluster_id': self.cluster_id,
+ 'interfaces': self.interfaces}
+
+
+class StubTestHosts():
+ def __init__(self):
+ self.hosts = []
+
+ def add(self, host):
+ self.hosts.append(host)
+
+ def get(self, id):
+ for host in self.hosts:
+ if host.id == id:
+ return host
+ return None
+
+ def list(self):
+ return self.hosts
+
+ def update(self, host_id, **metadata):
+ for host in self.hosts:
+ if host.id == host_id:
+ host.metadata = metadata
+
+
+class StubTestCluster():
+ def __init__(self, id, name):
+ self.id = id
+ self.name = name
+
+
+class StubTestClusters():
+ def __init__(self):
+ self.clusters = []
+
+ def add(self, cluster):
+ self.clusters.append(cluster)
+
+ def get(self, id):
+ for cluster in self.clusters:
+ if cluster.id == id:
+ return cluster.name
+ return None
+
+ def list(self):
+ return self.clusters
+
+
+class StubTestNet():
+ def __init__(self, id, name, cluster_id, **metadata):
+ self.id = id
+ self.name = name
+ self.cluster_id = cluster_id
+ self.metadata = metadata
+
+
+class StubTestNetworks():
+ def __init__(self):
+ self.networks = []
+
+ def add(self, net):
+ self.networks.append(net)
+
+ def list(self, **filter):
+ networks = []
+ if filter:
+ filter_item = filter.get('filters')
+ for net in self.networks:
+ cluster_id_is_match = False
+ if filter_item.get('cluster_id'):
+ if filter_item.get('cluster_id') == net.cluster_id:
+ cluster_id_is_match = True
+ else:
+ cluster_id_is_match = True
+ if cluster_id_is_match is True:
+ networks.append(net)
+ return networks
+
+ def update(self, network_id, **network_meta):
+ for net in self.networks:
+ if net.id == network_id:
+ net.metadata = network_meta
+
+
+class StubTestRole():
+ def __init__(self, id, name, cluster_id):
+ self.id = id
+ self.name = name
+ self.cluster_id = cluster_id
+ self.metadata = None
+
+
+class StubTestRoles():
+ def __init__(self):
+ self.roles = []
+
+ def add(self, role):
+ self.roles.append(role)
+
+ def list(self, **filter):
+ roles = []
+ if filter:
+ filter_item = filter.get('filters')
+ for role in self.roles:
+ cluster_id_is_match = False
+ if filter_item.get('cluster_id'):
+ if filter_item.get('cluster_id') == role.cluster_id:
+ cluster_id_is_match = True
+ else:
+ cluster_id_is_match = True
+ if cluster_id_is_match is True:
+ roles.append(role)
+ return roles
+
+ def update(self, role_id, **meta):
+ for role in self.roles:
+ if role.id == role_id:
+ role.metadata = meta
+
+
+class StubTestDisks():
+ def __init__(self):
+ self.disks = []
+
+ def service_disk_add(self, **metadata):
+ self.disks.append(metadata)
+
+
+class StubTestClient():
+ def __init__(self):
+ self.install = StubTestInstall()
+ self.hosts = StubTestHosts()
+ self.networks = StubTestNetworks()
+ self.clusters = StubTestClusters()
+ self.roles = StubTestRoles()
+ self.disk_array = StubTestDisks()
diff --git a/tests/unit/test_daisy_server.py b/tests/unit/test_daisy_server.py
index 65282e82..ea9c495c 100644
--- a/tests/unit/test_daisy_server.py
+++ b/tests/unit/test_daisy_server.py
@@ -49,8 +49,8 @@ data3 = get_ssh_test_command_from_file(ssh_test_file_dir(), 'ssh_stream_data3.tx
(data3, res1, expected_ret1),
(data3, res2, expected_ret1)])
def test_log_from_stream(data, res, expected):
- def log_func(str):
- print str
+ def log_func(msg):
+ print(msg)
pre_val = daisy_server.BLOCK_SIZE
daisy_server.BLOCK_SIZE = 16
ret = log_from_stream(res, data, log_func)
@@ -448,8 +448,9 @@ def test_install_daisy_DaisyServer(mock_prepare_files, mock_ssh_run, tmpdir):
@pytest.mark.parametrize('adapter', [
('libvirt'), ('ipmi')])
+@mock.patch.object(daisy_server.DaisyServer, 'scp_put')
@mock.patch.object(daisy_server.DaisyServer, 'ssh_run')
-def test_prepare_configurations_DaisyServer(mock_ssh_run, adapter, tmpdir):
+def test_prepare_configurations_DaisyServer(mock_ssh_run, mock_scp_put, adapter, tmpdir):
bin_file = os.path.join(tmpdir.dirname, tmpdir.basename, bin_file_name)
DaisyServerInst = DaisyServer(daisy_server_info['name'],
daisy_server_info['address'],
@@ -461,25 +462,24 @@ def test_prepare_configurations_DaisyServer(mock_ssh_run, adapter, tmpdir):
deploy_file_name,
net_file_name)
mock_ssh_run.return_value = 0
+ deploy_file = os.path.join(tmpdir.dirname, tmpdir.basename, deploy_file_name)
+ net_file = os.path.join(tmpdir.dirname, tmpdir.basename, net_file_name)
cmd = 'export PYTHONPATH={python_path}; python {script} -nw {net_file} -b {is_bare}'.format(
python_path=remote_dir,
script=os.path.join(remote_dir, 'deploy/prepare/execute.py'),
net_file=os.path.join(remote_dir, net_file_name),
is_bare=1 if adapter == 'ipmi' else 0)
- DaisyServerInst.prepare_configurations()
+ DaisyServerInst.prepare_configurations(deploy_file, net_file)
if adapter == 'libvirt':
DaisyServerInst.ssh_run.assert_called_once_with(cmd)
else:
DaisyServerInst.ssh_run.assert_not_called()
+ assert DaisyServerInst.scp_put.call_count == 2
tmpdir.remove()
-@mock.patch.object(daisy_server.DaisyServer, 'scp_put')
-@mock.patch.object(daisy_server.DaisyServer, 'prepare_configurations')
@mock.patch.object(daisy_server.DaisyServer, 'ssh_run')
-def test_prepare_cluster_DaisyServer(mock_scp_put,
- mock_prepare_configurations,
- mock_ssh_run,
+def test_prepare_cluster_DaisyServer(mock_ssh_run,
tmpdir):
bin_file = os.path.join(tmpdir.dirname, tmpdir.basename, bin_file_name)
DaisyServerInst = DaisyServer(daisy_server_info['name'],
@@ -496,12 +496,8 @@ def test_prepare_cluster_DaisyServer(mock_scp_put,
script=os.path.join(remote_dir, 'deploy/tempest.py'),
deploy_file=os.path.join(remote_dir, deploy_file_name),
net_file=os.path.join(remote_dir, net_file_name))
- deploy_file = os.path.join(tmpdir.dirname, tmpdir.basename, deploy_file_name)
- net_file = os.path.join(tmpdir.dirname, tmpdir.basename, net_file_name)
- DaisyServerInst.prepare_cluster(deploy_file, net_file)
+ DaisyServerInst.prepare_cluster()
DaisyServerInst.ssh_run.assert_called_once_with(cmd, check=True)
- DaisyServerInst.prepare_configurations.assert_called_once_with()
- assert DaisyServerInst.scp_put.call_count == 2
tmpdir.remove()
diff --git a/tests/unit/test_deploy.py b/tests/unit/test_deploy.py
index db887a01..4b68316a 100644
--- a/tests/unit/test_deploy.py
+++ b/tests/unit/test_deploy.py
@@ -195,6 +195,7 @@ def test__construct_final_deploy_conf_in_DaisyDeployment(mock__use_pod_descripto
'src_deploy_file': 'deploy_baremetal.yml',
'net_file': 'network_baremetal.yml',
'bin_file': 'opnfv.bin',
+ 'skip_daisy': False,
'daisy_only': False,
'cleanup_only': False,
'remote_dir': '/home/daisy',
@@ -212,6 +213,7 @@ def test__construct_final_deploy_conf_in_DaisyDeployment(mock__use_pod_descripto
'src_deploy_file': 'deploy_baremetal.yml',
'net_file': 'network_baremetal.yml',
'bin_file': 'opnfv.bin',
+ 'skip_daisy': False,
'daisy_only': False,
'cleanup_only': True,
'remote_dir': '/home/daisy',
@@ -229,6 +231,7 @@ def test__construct_final_deploy_conf_in_DaisyDeployment(mock__use_pod_descripto
'src_deploy_file': 'deploy_baremetal.yml',
'net_file': 'network_baremetal.yml',
'bin_file': 'opnfv.bin',
+ 'skip_daisy': False,
'daisy_only': True,
'cleanup_only': False,
'remote_dir': '/home/daisy',
@@ -242,8 +245,9 @@ def test__construct_final_deploy_conf_in_DaisyDeployment(mock__use_pod_descripto
@mock.patch.object(environment.BareMetalEnvironment, 'delete_old_environment')
@mock.patch.object(environment.BareMetalEnvironment, 'create_daisy_server')
@mock.patch.object(environment.BareMetalEnvironment, 'install_daisy')
+@mock.patch.object(environment.BareMetalEnvironment, 'connect_daisy_server')
@mock.patch.object(environment.BareMetalEnvironment, 'deploy')
-def test_run_in_DaisyDeployment(mock_deploy, mock_install_daisy,
+def test_run_in_DaisyDeployment(mock_deploy, mock_connect_daisy_server, mock_install_daisy,
mock_create_daisy_server, mock_delete_old_environment,
conf_file_dir, tmpdir, kwargs):
kwargs['src_deploy_file'] = os.path.join(conf_file_dir, kwargs['src_deploy_file'])
@@ -261,12 +265,16 @@ def test_run_in_DaisyDeployment(mock_deploy, mock_install_daisy,
if daisy_deploy.cleanup_only is False:
mock_create_daisy_server.assert_called_once_with()
if daisy_deploy.daisy_only is False:
- mock_deploy.assert_called_once_with(daisy_deploy.deploy_file, daisy_deploy.net_file)
- mock_install_daisy.assert_called_once_with(daisy_deploy.remote_dir, daisy_deploy.bin_file,
- daisy_deploy.deploy_file_name, daisy_deploy.net_file_name)
+ mock_deploy.assert_called_once_with(daisy_deploy.deploy_file,
+ daisy_deploy.net_file,
+ skip_preparation=False)
+ mock_connect_daisy_server.assert_called_once_with(daisy_deploy.remote_dir,
+ daisy_deploy.bin_file,
+ daisy_deploy.deploy_file_name,
+ daisy_deploy.net_file_name)
+ mock_install_daisy.assert_called_once_with()
else:
mock_deploy.assert_not_called()
- mock_install_daisy.assert_not_called()
else:
mock_create_daisy_server.assert_not_called()
tmpdir.remove()
@@ -286,13 +294,14 @@ def test_parse_arguments(mock_confirm_dir_exists, mock_make_file_executable,
mock_save_log_to_file, mock_check_sudo_privilege,
mock_parse_args, cleanup_only, tmpdir):
class MockArg():
- def __init__(self, labs_dir, lab_name, pod_name, bin_file, daisy_only,
+ def __init__(self, labs_dir, lab_name, pod_name, bin_file, skip_daisy, daisy_only,
cleanup_only, remote_dir, work_dir, storage_dir, pxe_bridge,
deploy_log, scenario):
self.labs_dir = labs_dir
self.lab_name = lab_name
self.pod_name = pod_name
self.bin_file = bin_file
+ self.skip_daisy = skip_daisy
self.daisy_only = daisy_only
self.cleanup_only = cleanup_only
self.remote_dir = remote_dir
@@ -315,6 +324,7 @@ def test_parse_arguments(mock_confirm_dir_exists, mock_make_file_executable,
'src_deploy_file': deploy_file,
'net_file': net_file,
'bin_file': bin_file_path,
+ 'skip_daisy': False,
'daisy_only': False,
'cleanup_only': cleanup_only,
'remote_dir': '/home/daisy',
@@ -324,7 +334,7 @@ def test_parse_arguments(mock_confirm_dir_exists, mock_make_file_executable,
'deploy_log': deploy_log_path,
'scenario': 'os-nosdn-nofeature-noha'
}
- mockarg = MockArg('/var/tmp/securedlab', 'zte', 'pod2', bin_file_path, False, cleanup_only, '/home/daisy', '/tmp/workdir',
+ mockarg = MockArg('/var/tmp/securedlab', 'zte', 'pod2', bin_file_path, False, False, cleanup_only, '/home/daisy', '/tmp/workdir',
'/home/qemu/vms', 'pxebr', deploy_log_path, 'os-nosdn-nofeature-noha')
mock_parse_args.return_value = mockarg
ret = parse_arguments()
diff --git a/tests/unit/test_environment.py b/tests/unit/test_environment.py
index aed2c73c..f7cf5985 100644
--- a/tests/unit/test_environment.py
+++ b/tests/unit/test_environment.py
@@ -136,7 +136,8 @@ def test_install_daisy_DaisyEnvironmentBase(mock_install_daisy, mock_connect, tm
DaisyEnvBaseInst = DaisyEnvironmentBase(
deploy_struct, net_struct, adapter, pxe_bridge,
daisy_server, work_dir, storage_dir, scenario)
- DaisyEnvBaseInst.install_daisy(remote_dir, bin_file, deploy_file_name, net_file_name)
+ DaisyEnvBaseInst.connect_daisy_server(remote_dir, bin_file, deploy_file_name, net_file_name)
+ DaisyEnvBaseInst.install_daisy()
mock_install_daisy.assert_called_once_with()
mock_connect.assert_called_once_with()
tmpdir.remove()
@@ -246,6 +247,7 @@ def test_create_daisy_server_BareMetalEnvironment(mock_create_daisy_server_vm, m
@mock.patch('deploy.environment.time.sleep')
@mock.patch.object(daisy_server.DaisyServer, 'prepare_cluster')
+@mock.patch.object(daisy_server.DaisyServer, 'prepare_configurations')
@mock.patch.object(environment.BareMetalEnvironment, 'reboot_nodes')
@mock.patch.object(daisy_server.DaisyServer, 'prepare_host_and_pxe')
@mock.patch.object(daisy_server.DaisyServer, 'check_os_installation')
@@ -253,7 +255,8 @@ def test_create_daisy_server_BareMetalEnvironment(mock_create_daisy_server_vm, m
@mock.patch.object(daisy_server.DaisyServer, 'post_deploy')
def test_deploy_BareMetalEnvironment(mock_post_deploy, mock_check_openstack_installation,
mock_check_os_installation, mock_prepare_host_and_pxe,
- mock_reboot_nodes, mock_prepare_cluster,
+ mock_reboot_nodes, mock_prepare_configurations,
+ mock_prepare_cluster,
mock_sleep,
tmpdir):
work_dir = os.path.join(tmpdir.dirname, tmpdir.basename, work_dir_name)
@@ -280,7 +283,8 @@ def test_deploy_BareMetalEnvironment(mock_post_deploy, mock_check_openstack_inst
deploy_file_name,
net_file_name)
BareMetalEnvironmentInst.deploy(deploy_file, net_file)
- mock_prepare_cluster.assert_called_once_with(deploy_file, net_file)
+ mock_prepare_configurations.assert_called_once_with(deploy_file, net_file)
+ mock_prepare_cluster.assert_called_once_with()
mock_reboot_nodes.assert_called_once_with(boot_dev='pxe')
mock_prepare_host_and_pxe.assert_called_once_with()
mock_check_os_installation.assert_called_once_with(len(BareMetalEnvironmentInst.deploy_struct['hosts']))
@@ -537,7 +541,7 @@ def test_delete_old_environment_VirtualEnvironment(mock_delete_daisy_server,
daisy_server, work_dir, storage_dir, scenario)
VirtualEnvironmentInst.delete_old_environment()
VirtualEnvironmentInst.delete_daisy_server.assert_called_once_with()
- VirtualEnvironmentInst.delete_networks.assert_called_once_with()
+ VirtualEnvironmentInst.delete_networks.assert_called_once_with(skip_daisy=False)
VirtualEnvironmentInst.delete_nodes.assert_called_once_with()
tmpdir.remove()
@@ -550,11 +554,12 @@ def test_delete_old_environment_VirtualEnvironment(mock_delete_daisy_server,
@mock.patch.object(environment.DaisyServer, 'prepare_host_and_pxe')
@mock.patch.object(environment.DaisyServer, 'copy_new_deploy_config')
@mock.patch.object(environment.DaisyServer, 'prepare_cluster')
+@mock.patch.object(environment.DaisyServer, 'prepare_configurations')
@mock.patch.object(environment.VirtualEnvironment, '_post_deploy')
@mock.patch.object(environment.VirtualEnvironment, 'reboot_nodes')
@mock.patch.object(environment.VirtualEnvironment, 'create_nodes')
def test_deploy_VirtualEnvironment(mock_create_nodes, mock_reboot_nodes,
- mock__post_deploy, mock_prepare_cluster,
+ mock__post_deploy, mock_prepare_configurations, mock_prepare_cluster,
mock_copy_new_deploy_config, mock_prepare_host_and_pxe,
mock_install_virtual_nodes, mock_check_os_installation,
mock_check_openstack_installation, mock_post_deploy,
@@ -587,6 +592,7 @@ def test_deploy_VirtualEnvironment(mock_create_nodes, mock_reboot_nodes,
mock_create_nodes.assert_called_once()
assert mock_reboot_nodes.call_count == 2
mock__post_deploy.assert_called_once()
+ mock_prepare_configurations.assert_called_once()
mock_prepare_cluster.assert_called_once()
mock_copy_new_deploy_config.assert_called_once()
mock_prepare_host_and_pxe.assert_called_once()
diff --git a/tests/unit/test_tempest.py b/tests/unit/test_tempest.py
new file mode 100644
index 00000000..34ab4073
--- /dev/null
+++ b/tests/unit/test_tempest.py
@@ -0,0 +1,324 @@
+##############################################################################
+# Copyright (c) 2017 ZTE Corp and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import os
+import sys
+import pytest
+from oslo_config import cfg
+from tests.unit.daisyclient_stub import (
+ StubTestHost,
+ StubTestCluster,
+ StubTestNet,
+ StubTestRole,
+ StubTestClient
+)
+
+import mock
+sys.modules['daisyclient'] = mock.Mock()
+sys.modules['daisyclient.v1'] = mock.Mock()
+import deploy.tempest # noqa: ignore=E402
+from deploy.tempest import (
+ parse,
+ get_configure_from_daisyconf,
+ get_endpoint,
+ build_pxe_for_discover,
+ install_os_for_vm_step1,
+ install_os_for_bm_oneshot,
+ install_os_for_vm_step2,
+ discover_host,
+ update_network,
+ get_hosts,
+ get_cluster,
+ add_hosts_interface,
+ add_host_role,
+ enable_cinder_backend,
+ enable_opendaylight
+) # noqa: ignore=E402
+
+
+def get_val_index_in_list(key, list):
+ return list.index(key) + 1
+
+
+@pytest.mark.parametrize('argv', [
+ (['--dha', '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--network',
+ '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--cluster', 'yes', '--host',
+ 'yes', '--install', 'yes', '--isbare', '1', '--scenario', 'os-nosdn-nofeature-noha']),
+ (['--dha', '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--network',
+ '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--cluster', 'yes']),
+ (['--dha', '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--network',
+ '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--install', 'yes']),
+ (['--dha', '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--network',
+ '/home/zte/dairy/tests/data/deploy_virtual1.ymal', '--cluster', 'yes', '--host',
+ 'yes', '--install', 'yes', '--isbare', '1', '--scenario', 'os-nosdn-nofeature-noha'])])
+def test_parser(argv):
+ options_keys = ['dha', 'network', 'cluster', 'host', 'install', 'isbare', 'scenario']
+
+ conf = cfg.ConfigOpts()
+ parse(conf, argv)
+ for option in options_keys:
+ if conf[option]:
+ if option == 'isbare':
+ argv[argv.index('--' + option) + 1] = int(argv[argv.index('--' + option) + 1])
+ assert conf[option] == argv[argv.index('--' + option) + 1]
+
+
+@pytest.fixture(scope="module")
+def conf_file_dir(data_root):
+ return os.path.join(data_root, 'daisy_conf')
+
+
+@pytest.mark.parametrize('section, key, exp', [
+ ("PXE", "eth_name", 'ens3'),
+ ("PXE", "build_pxe", 'no')])
+def test_get_configure_from_daisyconf(section, key, exp, conf_file_dir):
+ res_old_val = deploy.tempest.daisy_conf_path
+ deploy.tempest.daisy_conf_path = os.path.join(conf_file_dir, 'daisy.conf')
+ ret = get_configure_from_daisyconf(section, key)
+ deploy.tempest.daisy_conf_path = res_old_val
+ assert ret == exp
+
+
+def test_get_endpoint(conf_file_dir):
+ daisyrc_file_path = os.path.join(conf_file_dir, 'daisyrc_admin')
+ exp = 'http://10.20.11.2:19292'
+ ret = get_endpoint(daisyrc_file_path)
+ assert ret == exp
+
+
+def test_build_pxe_for_discover():
+ client = StubTestClient()
+ cluster_id = 0x123456
+ deployment_interface = 'eth3'
+ build_pxe_for_discover(cluster_id, client, deployment_interface)
+
+
+def test_install_os_for_vm_step1():
+ client = StubTestClient()
+ cluster_id = 0x123456
+ install_os_for_vm_step1(cluster_id, client)
+
+
+def test_install_os_for_bm_oneshot():
+ client = StubTestClient()
+ cluster_id = 0x123456
+ install_os_for_bm_oneshot(cluster_id, client)
+
+
+def test_install_os_for_vm_step2():
+ client = StubTestClient()
+ cluster_id = 0x123456
+ install_os_for_vm_step2(cluster_id, client)
+
+
+@mock.patch('time.sleep')
+@mock.patch('deploy.tempest.get_hosts')
+def test_discover_host(mock_get_hosts, mock_sleep):
+ hosts_name = ['computer01', 'computer02', 'controller01', 'controller02', 'controller03']
+ mock_get_hosts.return_value = hosts_name
+ discover_host(hosts_name)
+ mock_sleep.assert_not_called()
+
+
+def test_update_network():
+ client = StubTestClient()
+ cluster_id = 1
+ network_map = {'MANAGEMENT': {'cidr': '10.20.11.0/24', 'gateway': '10.20.11.1',
+ 'ip_ranges': [{'start': '10.20.11.3', 'end': '10.20.11.10'}]},
+ 'STORAGE': {'cidr': '10.20.11.0/24', 'gateway': '10.20.11.1',
+ 'ip_ranges': [{'start': '10.20.11.3', 'end': '10.20.11.10'}]}}
+ metadata_net1 = {'cidr': '10.20.11.0/24', 'gateway': '10.20.11.1',
+ 'ip_ranges': [{'start': '10.20.11.3', 'end': '10.20.11.10'}]}
+ net1 = StubTestNet(0x1234, 'MANAGEMENT', 1, **metadata_net1)
+ client.networks.add(net1)
+ metadata_net2 = {'cidr': '10.20.11.0/24', 'gateway': '10.20.11.1',
+ 'ip_ranges': [{'start': '10.20.11.3', 'end': '10.20.11.10'}]}
+ net2 = StubTestNet(0x2345, 'STORAGE', 1, **metadata_net2)
+ client.networks.add(net2)
+ exp_nets_data = [metadata_net1, metadata_net2]
+ update_network(cluster_id, network_map, client)
+ for i in range(len(exp_nets_data)):
+ assert client.networks.networks[i].metadata == exp_nets_data[i]
+
+
+def test_get_hosts():
+ client = StubTestClient()
+ host1 = StubTestHost(0x1234, 'test_host_1', 1,
+ [{'name': 'ens8', 'mac': '11:11:11:11:11:11'}])
+ client.hosts.add(host1)
+ host2 = StubTestHost(0x2345, 'test_host_2', 1,
+ [{'name': 'ens3', 'mac': '22:22:22:22:22:22'}])
+ client.hosts.add(host2)
+ exp = [host1, host2]
+ ret = get_hosts(client)
+ assert ret == exp
+
+
+def test_get_cluster():
+ client = StubTestClient()
+ cluster1 = StubTestCluster(1, 'test_cluster_1')
+ client.clusters.add(cluster1)
+ cluster2 = StubTestCluster(2, 'test_cluster_2')
+ client.clusters.add(cluster2)
+ exp = 'test_cluster_2'
+ ret = get_cluster(client)
+ assert ret == exp
+
+
+@pytest.mark.parametrize('isbare', [
+ (False), (True)])
+def test_add_hosts_interface(isbare, tmpdir):
+ res_old_val = deploy.tempest.iso_path
+ deploy.tempest.iso_path = os.path.join(tmpdir.dirname, tmpdir.basename) + '/'
+ iso_file_path = os.path.join(deploy.tempest.iso_path, 'test_os.iso')
+ with open(iso_file_path, 'a') as f:
+ f.write('test_data')
+ client = StubTestClient()
+ cluster_id = 1
+ host_id1 = 0x1234
+ host_id2 = 0x2345
+ host_id3 = 0x3456
+ host1 = StubTestHost(host_id1, 'controller01', cluster_id, [{'name': 'ens8', 'mac': '11:11:11:11:11:11'}])
+ client.hosts.add(host1)
+ host2 = StubTestHost(host_id2, 'controller02', cluster_id, [{'name': 'ens3', 'mac': '22:22:22:22:22:22'}])
+ client.hosts.add(host2)
+ host3 = StubTestHost(host_id3, 'computer01', cluster_id, [{'name': 'ens9', 'mac': '33:33:33:33:33:33'}])
+ client.hosts.add(host3)
+ hosts_info = [host1, host2, host3]
+ role1 = StubTestRole(0xaaaa, 'CONTROLLER_LB', cluster_id)
+ client.roles.add(role1)
+ role2 = StubTestRole(0xbbbb, 'COMPUTER', cluster_id)
+ client.roles.add(role2)
+ mac_address_map = {
+ 'controller01': ['11:11:11:11:11:11'], 'controller02': ['22:22:22:22:22:22'], 'controller03': [],
+ 'computer01': ['33:33:33:33:33:33'], 'computer02': []}
+ host_interface_map = {
+ 'ens8': [{'ip': '', 'name': 'EXTERNAL'}],
+ 'ens3': [{'ip': '', 'name': 'MANAGEMENT'},
+ {'ip': '', 'name': 'PUBLICAPI'},
+ {'ip': '', 'name': 'STORAGE'},
+ {'ip': '', 'name': 'physnet1'}],
+ 'ens9': [{'ip': '', 'name': 'HEARTBEAT'}]}
+ vip = '10.20.11.11'
+ add_hosts_interface(1, hosts_info, mac_address_map,
+ host_interface_map,
+ vip, isbare, client)
+ deploy.tempest.iso_path = res_old_val
+ if isbare:
+ assert client.hosts.get(host_id1).metadata == {
+ 'id': host_id1, 'name': 'controller01', 'cluster_id': cluster_id,
+ 'cluster': cluster_id, 'os_version': iso_file_path,
+ 'ipmi_user': 'zteroot', 'ipmi_passwd': 'superuser',
+ 'interfaces': [{'name': 'ens8', 'mac': '11:11:11:11:11:11',
+ 'assigned_networks': [{'ip': '', 'name': 'EXTERNAL'}]}],
+ }
+ assert client.hosts.get(host_id2).metadata == {
+ 'id': host_id2, 'name': 'controller02', 'cluster_id': cluster_id,
+ 'cluster': cluster_id, 'os_version': iso_file_path,
+ 'ipmi_user': 'zteroot', 'ipmi_passwd': 'superuser',
+ 'interfaces': [{'name': 'ens3', 'mac': '22:22:22:22:22:22',
+ 'assigned_networks': [
+ {'ip': '', 'name': 'MANAGEMENT'},
+ {'ip': '', 'name': 'PUBLICAPI'},
+ {'ip': '', 'name': 'STORAGE'},
+ {'ip': '', 'name': 'physnet1'}]}],
+ }
+ assert client.hosts.get(host_id3).metadata == {
+ 'id': host_id3, 'name': 'computer01', 'cluster_id': cluster_id,
+ 'cluster': cluster_id, 'os_version': iso_file_path,
+ 'ipmi_user': 'zteroot', 'ipmi_passwd': 'superuser',
+ 'interfaces': [{'name': 'ens9', 'mac': '33:33:33:33:33:33',
+ 'assigned_networks': [{'ip': '', 'name': 'HEARTBEAT'}]}],
+ }
+ else:
+ assert client.hosts.get(host_id1).metadata == {
+ 'id': host_id1, 'name': 'controller01', 'cluster_id': cluster_id,
+ 'cluster': cluster_id, 'os_version': iso_file_path,
+ 'interfaces': [{'name': 'ens8', 'mac': '11:11:11:11:11:11',
+ 'assigned_networks': [{'ip': '', 'name': 'EXTERNAL'}]}],
+ }
+ assert client.hosts.get(host_id2).metadata == {
+ 'id': host_id2, 'name': 'controller02', 'cluster_id': cluster_id,
+ 'cluster': cluster_id, 'os_version': iso_file_path,
+ 'interfaces': [{'name': 'ens3', 'mac': '22:22:22:22:22:22',
+ 'assigned_networks': [
+ {'ip': '', 'name': 'MANAGEMENT'},
+ {'ip': '', 'name': 'PUBLICAPI'},
+ {'ip': '', 'name': 'STORAGE'},
+ {'ip': '', 'name': 'physnet1'}]}],
+ }
+ assert client.hosts.get(host_id3).metadata == {
+ 'id': host_id3, 'name': 'computer01', 'cluster_id': cluster_id,
+ 'cluster': cluster_id, 'os_version': iso_file_path,
+ 'interfaces': [{'name': 'ens9', 'mac': '33:33:33:33:33:33',
+ 'assigned_networks': [{'ip': '', 'name': 'HEARTBEAT'}]}],
+ }
+ tmpdir.remove()
+
+
+@pytest.mark.parametrize('dha_host_name, cluster_id, host_id, vip, exp', [
+ ('controller01', 1, 0x1234, '10.20.11.11', {'nodes': [0x1234], 'cluster_id': 1, 'vip': '10.20.11.11'}),
+ ('computer01', 1, 0x2345, '10.20.11.11', {'nodes': [0x2345], 'cluster_id': 1}),
+ ('all_in_one', 1, 0x1234, '10.20.11.11',
+ [{'nodes': [0x1234], 'cluster_id': 1, 'vip': '10.20.11.11'},
+ {'nodes': [0x1234], 'cluster_id': 1}])])
+def test_add_host_role(dha_host_name, cluster_id, host_id, vip, exp):
+ client = StubTestClient()
+ role1 = StubTestRole(0x1234, 'CONTROLLER_LB', 1)
+ client.roles.add(role1)
+ role2 = StubTestRole(0x2345, 'COMPUTER', 1)
+ client.roles.add(role2)
+ add_host_role(cluster_id, host_id, dha_host_name, vip, client)
+ if dha_host_name == 'controller01':
+ assert client.roles.roles[0].metadata == exp
+ if dha_host_name == 'computer01':
+ assert client.roles.roles[1].metadata == exp
+ if dha_host_name == 'all_in_one':
+ assert client.roles.roles[0].metadata == exp[0]
+ assert client.roles.roles[1].metadata == exp[1]
+
+
+def test_enable_cinder_backend():
+ client = StubTestClient()
+ role1 = StubTestRole(0x1234, 'CONTROLLER_LB', 1)
+ client.roles.add(role1)
+ service_name = 'ceph'
+ disk_name = '/dev/sdb'
+ protocol_type = 'RAW'
+ exp_disk_meta = {'service': service_name,
+ 'disk_location': 'local',
+ 'partition': disk_name,
+ 'protocol_type': protocol_type,
+ 'role_id': 0x1234}
+ enable_cinder_backend(1, service_name, disk_name, protocol_type, client)
+ assert client.disk_array.disks[0] == exp_disk_meta
+
+
+@pytest.mark.parametrize('layer, exp', [
+ ('odl_l3', {
+ 'neutron_backends_array': [{'zenic_ip': '',
+ 'sdn_controller_type': 'opendaylight',
+ 'zenic_port': '',
+ 'zenic_user_password': '',
+ 'neutron_agent_type': '',
+ 'zenic_user_name': '',
+ 'enable_l2_or_l3': 'l3'}]}),
+ ('odl_l2', {
+ 'neutron_backends_array': [{'zenic_ip': '',
+ 'sdn_controller_type': 'opendaylight',
+ 'zenic_port': '',
+ 'zenic_user_password': '',
+ 'neutron_agent_type': '',
+ 'zenic_user_name': '',
+ 'enable_l2_or_l3': 'l2'}]})])
+def test_enable_opendaylight(layer, exp):
+ client = StubTestClient()
+ role1 = StubTestRole(0x1234, 'CONTROLLER_LB', 1)
+ client.roles.add(role1)
+ enable_opendaylight(1, layer, client)
+ assert client.roles.roles[0].metadata == exp