aboutsummaryrefslogtreecommitdiffstats
path: root/yardstick/tests/unit/benchmark
diff options
context:
space:
mode:
Diffstat (limited to 'yardstick/tests/unit/benchmark')
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/test_model.py207
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py97
-rw-r--r--yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py94
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_base.py139
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_dummy.py16
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_heat.py198
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_kubernetes.py222
-rw-r--r--yardstick/tests/unit/benchmark/contexts/test_node.py60
-rw-r--r--yardstick/tests/unit/benchmark/core/test_plugin.py10
-rw-r--r--yardstick/tests/unit/benchmark/core/test_report.py608
-rw-r--r--yardstick/tests/unit/benchmark/core/test_task.py96
-rw-r--r--yardstick/tests/unit/benchmark/core/test_testcase.py14
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_arithmetic.py446
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_base.py63
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_duration.py315
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_iteration.py45
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_proxduration.py286
-rw-r--r--yardstick/tests/unit/benchmark/runner/test_search.py50
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py77
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py36
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py18
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py17
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py16
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py51
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py75
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py7
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py61
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py7
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py5
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py5
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt14
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt300
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py182
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py52
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py64
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py62
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py67
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py63
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py120
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py51
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py48
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py23
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py51
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py49
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py53
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py52
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py69
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py27
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py3
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py3
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py17
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py3
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py770
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py182
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py3
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py313
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py190
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py162
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml22
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py5
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py340
63 files changed, 5182 insertions, 1575 deletions
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/test_model.py b/yardstick/tests/unit/benchmark/contexts/standalone/test_model.py
index b1dcee209..e76a3ca27 100644
--- a/yardstick/tests/unit/benchmark/contexts/standalone/test_model.py
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/test_model.py
@@ -13,11 +13,12 @@
# limitations under the License.
import copy
-import mock
import os
-import unittest
import uuid
+import mock
+import netaddr
+import unittest
from xml.etree import ElementTree
from yardstick import ssh
@@ -46,6 +47,16 @@ XML_SAMPLE_INTERFACE = """<?xml version="1.0"?>
class ModelLibvirtTestCase(unittest.TestCase):
+ XML_STR = model.VM_TEMPLATE.format(
+ vm_name="vm_name",
+ random_uuid=uuid.uuid4(),
+ mac_addr="00:01:02:03:04:05",
+ memory=2048, vcpu=2, cpu=2,
+ numa_cpus=0 - 10,
+ socket=1, threads=1,
+ vm_image="/var/lib/libvirt/images/yardstick-nsb-image.img",
+ cpuset=2 - 10, cputune='', machine='pc')
+
def setUp(self):
self.pci_address_str = '0001:04:03.2'
self.pci_address = utils.PciAddress(self.pci_address_str)
@@ -66,34 +77,34 @@ class ModelLibvirtTestCase(unittest.TestCase):
ssh_mock.execute = mock.Mock(return_value=(0, "a", ""))
ssh.return_value = ssh_mock
# NOTE(ralonsoh): this test doesn't cover function execution.
- model.Libvirt.check_if_vm_exists_and_delete("vm_0", ssh_mock)
+ model.Libvirt.check_if_vm_exists_and_delete('vm-0', ssh_mock)
def test_virsh_create_vm(self):
self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
- model.Libvirt.virsh_create_vm(self.mock_ssh, 'vm_0')
- self.mock_ssh.execute.assert_called_once_with('virsh create vm_0')
+ model.Libvirt.virsh_create_vm(self.mock_ssh, 'vm-0')
+ self.mock_ssh.execute.assert_called_once_with('virsh create vm-0')
def test_virsh_create_vm_error(self):
self.mock_ssh.execute = mock.Mock(return_value=(1, 0, 'error_create'))
with self.assertRaises(exceptions.LibvirtCreateError) as exc:
- model.Libvirt.virsh_create_vm(self.mock_ssh, 'vm_0')
+ model.Libvirt.virsh_create_vm(self.mock_ssh, 'vm-0')
self.assertEqual('Error creating the virtual machine. Error: '
'error_create.', str(exc.exception))
- self.mock_ssh.execute.assert_called_once_with('virsh create vm_0')
+ self.mock_ssh.execute.assert_called_once_with('virsh create vm-0')
def test_virsh_destroy_vm(self):
self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
- model.Libvirt.virsh_destroy_vm('vm_0', self.mock_ssh)
- self.mock_ssh.execute.assert_called_once_with('virsh destroy vm_0')
+ model.Libvirt.virsh_destroy_vm('vm-0', self.mock_ssh)
+ self.mock_ssh.execute.assert_called_once_with('virsh destroy vm-0')
@mock.patch.object(model, 'LOG')
def test_virsh_destroy_vm_error(self, mock_logger):
self.mock_ssh.execute = mock.Mock(return_value=(1, 0, 'error_destroy'))
mock_logger.warning = mock.Mock()
- model.Libvirt.virsh_destroy_vm('vm_0', self.mock_ssh)
+ model.Libvirt.virsh_destroy_vm('vm-0', self.mock_ssh)
mock_logger.warning.assert_called_once_with(
- 'Error destroying VM %s. Error: %s', 'vm_0', 'error_destroy')
- self.mock_ssh.execute.assert_called_once_with('virsh destroy vm_0')
+ 'Error destroying VM %s. Error: %s', 'vm-0', 'error_destroy')
+ self.mock_ssh.execute.assert_called_once_with('virsh destroy vm-0')
def test_add_interface_address(self):
xml = ElementTree.ElementTree(
@@ -113,7 +124,7 @@ class ModelLibvirtTestCase(unittest.TestCase):
def test_add_ovs_interfaces(self):
xml_input = copy.deepcopy(XML_SAMPLE)
xml_output = model.Libvirt.add_ovs_interface(
- '/usr/local', 0, self.pci_address_str, self.mac, xml_input)
+ '/usr/local', 0, self.pci_address_str, self.mac, xml_input, 4)
root = ElementTree.fromstring(xml_output)
et_out = ElementTree.ElementTree(element=root)
@@ -171,15 +182,148 @@ class ModelLibvirtTestCase(unittest.TestCase):
self.assertEqual('0x' + vm_pci.split(':')[2].split('.')[1],
interface_address.get('function'))
+ def test_add_cdrom(self):
+ xml_input = copy.deepcopy(XML_SAMPLE)
+ xml_output = model.Libvirt.add_cdrom('/var/lib/libvirt/images/data.img', xml_input)
+
+ root = ElementTree.fromstring(xml_output)
+ et_out = ElementTree.ElementTree(element=root)
+ disk = et_out.find('devices').find('disk')
+ self.assertEqual('file', disk.get('type'))
+ self.assertEqual('cdrom', disk.get('device'))
+ driver = disk.find('driver')
+ self.assertEqual('qemu', driver.get('name'))
+ self.assertEqual('raw', driver.get('type'))
+ source = disk.find('source')
+ self.assertEqual('/var/lib/libvirt/images/data.img', source.get('file'))
+ target = disk.find('target')
+ self.assertEqual('hdb', target.get('dev'))
+ self.assertIsNotNone(disk.find('readonly'))
+
+ def test_gen_cdrom_image(self):
+ self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
+ root = ElementTree.fromstring(self.XML_STR)
+ hostname = root.find('name').text
+ meta_data = "/tmp/meta-data"
+ user_data = "/tmp/user-data"
+ network_data = "/tmp/network-config"
+ file_path = "/tmp/cdrom-0.img"
+ key_filename = "id_rsa"
+ pub_key_str = "KEY"
+ user = 'root'
+ mac = "00:11:22:33:44:55"
+ ip = "1.1.1.7/24"
+ user_config = [" - name: {user_name}",
+ " ssh_authorized_keys:",
+ " - {pub_key_str}"]
+
+ user_conf = os.linesep.join(user_config).format(pub_key_str=pub_key_str, user_name=user)
+ with mock.patch('six.moves.builtins.open', mock.mock_open(read_data=pub_key_str),
+ create=True) as mock_file:
+ with open(key_filename, "r") as h:
+ result = h.read()
+ model.Libvirt.gen_cdrom_image(self.mock_ssh, file_path, hostname, user, key_filename,
+ mac, ip)
+ mock_file.assert_called_with(".".join([key_filename, "pub"]), "r")
+ self.assertEqual(result, pub_key_str)
+
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call("touch %s" % meta_data),
+ mock.call(model.USER_DATA_TEMPLATE.format(user_file=user_data, host=hostname,
+ user_config=user_conf)),
+ mock.call(model.NETWORK_DATA_TEMPLATE.format(network_file=network_data,
+ mac_address=mac, ip_address=ip)),
+ mock.call("genisoimage -output {0} -volid cidata"
+ " -joliet -r {1} {2} {3}".format(file_path, meta_data, user_data,
+ network_data)),
+ mock.call("rm {0} {1} {2}".format(meta_data, user_data, network_data))
+ ])
+
def test_create_snapshot_qemu(self):
- result = "/var/lib/libvirt/images/0.qcow2"
- with mock.patch("yardstick.ssh.SSH") as ssh:
- ssh_mock = mock.Mock(autospec=ssh.SSH)
- ssh_mock.execute = \
- mock.Mock(return_value=(0, "a", ""))
- ssh.return_value = ssh_mock
- image = model.Libvirt.create_snapshot_qemu(ssh_mock, "0", "ubuntu.img")
- self.assertEqual(image, result)
+ self.mock_ssh.execute = mock.Mock(return_value=(0, 0, 0))
+ index = 1
+ vm_image = '/var/lib/libvirt/images/%s.qcow2' % index
+ base_image = '/tmp/base_image'
+
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, index, base_image)
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call('rm -- "%s"' % vm_image),
+ mock.call('test -r %s' % base_image),
+ mock.call('qemu-img create -f qcow2 -o backing_file=%s %s' %
+ (base_image, vm_image))
+ ])
+
+ @mock.patch.object(os.path, 'basename', return_value='base_image')
+ @mock.patch.object(os.path, 'normpath')
+ @mock.patch.object(os, 'access', return_value=True)
+ def test_create_snapshot_qemu_no_image_remote(self,
+ mock_os_access, mock_normpath, mock_basename):
+ self.mock_ssh.execute = mock.Mock(
+ side_effect=[(0, 0, 0), (1, 0, 0), (0, 0, 0), (0, 0, 0)])
+ index = 1
+ vm_image = '/var/lib/libvirt/images/%s.qcow2' % index
+ base_image = '/tmp/base_image'
+ mock_normpath.return_value = base_image
+
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, index, base_image)
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call('rm -- "%s"' % vm_image),
+ mock.call('test -r %s' % base_image),
+ mock.call('mv -- "/tmp/%s" "%s"' % ('base_image', base_image)),
+ mock.call('qemu-img create -f qcow2 -o backing_file=%s %s' %
+ (base_image, vm_image))
+ ])
+ mock_os_access.assert_called_once_with(base_image, os.R_OK)
+ mock_normpath.assert_called_once_with(base_image)
+ mock_basename.assert_has_calls([mock.call(base_image)])
+ self.mock_ssh.put_file.assert_called_once_with(base_image,
+ '/tmp/base_image')
+
+ @mock.patch.object(model.Libvirt, 'gen_cdrom_image')
+ def test_check_update_key(self, mock_gen_cdrom_image):
+ node = {
+ 'user': 'defuser',
+ 'key_filename': '/home/ubuntu/id_rsa',
+ 'ip': '1.1.1.7',
+ 'netmask': '255.255.255.0'}
+ cdrom_img = "/var/lib/libvirt/images/data.img"
+ id_name = 'fake_name'
+ key_filename = node.get('key_filename')
+ root = ElementTree.fromstring(self.XML_STR)
+ hostname = root.find('name').text
+ mac = "00:11:22:33:44:55"
+ ip = "{0}/{1}".format(node.get('ip'), node.get('netmask'))
+ ip = "{0}/{1}".format(node.get('ip'), netaddr.IPNetwork(ip).prefixlen)
+ model.StandaloneContextHelper.check_update_key(self.mock_ssh, node, hostname, id_name,
+ cdrom_img, mac)
+ mock_gen_cdrom_image.assert_called_once_with(self.mock_ssh, cdrom_img, hostname,
+ node.get('user'), key_filename, mac, ip)
+
+ @mock.patch.object(os, 'access', return_value=False)
+ def test_create_snapshot_qemu_no_image_local(self, mock_os_access):
+ self.mock_ssh.execute = mock.Mock(side_effect=[(0, 0, 0), (1, 0, 0)])
+ base_image = '/tmp/base_image'
+
+ with self.assertRaises(exceptions.LibvirtQemuImageBaseImageNotPresent):
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, 3, base_image)
+ mock_os_access.assert_called_once_with(base_image, os.R_OK)
+
+ def test_create_snapshot_qemu_error_qemuimg_command(self):
+ self.mock_ssh.execute = mock.Mock(
+ side_effect=[(0, 0, 0), (0, 0, 0), (1, 0, 0)])
+ index = 1
+ vm_image = '/var/lib/libvirt/images/%s.qcow2' % index
+ base_image = '/tmp/base_image'
+
+ with self.assertRaises(exceptions.LibvirtQemuImageCreateError):
+ model.Libvirt.create_snapshot_qemu(self.mock_ssh, index,
+ base_image)
+ self.mock_ssh.execute.assert_has_calls([
+ mock.call('rm -- "%s"' % vm_image),
+ mock.call('test -r %s' % base_image),
+ mock.call('qemu-img create -f qcow2 -o backing_file=%s %s' %
+ (base_image, vm_image))
+ ])
@mock.patch.object(model.Libvirt, 'pin_vcpu_for_perf', return_value='4,5')
@mock.patch.object(model.Libvirt, 'create_snapshot_qemu',
@@ -197,18 +341,21 @@ class ModelLibvirtTestCase(unittest.TestCase):
mac = model.StandaloneContextHelper.get_mac_address(0x00)
_uuid = uuid.uuid4()
connection = mock.Mock()
+ cdrom_img = '/tmp/cdrom-0.img'
with mock.patch.object(model.StandaloneContextHelper,
'get_mac_address', return_value=mac) as \
mock_get_mac_address, \
mock.patch.object(uuid, 'uuid4', return_value=_uuid):
xml_out, mac = model.Libvirt.build_vm_xml(
- connection, flavor, 'vm_name', 100)
+ connection, flavor, 'vm_name', 100, cdrom_img)
xml_ref = model.VM_TEMPLATE.format(vm_name='vm_name',
random_uuid=_uuid, mac_addr=mac, memory='1024', vcpu='8', cpu='4',
numa_cpus='0-7', socket='3', threads='2',
- vm_image='qemu_image', cpuset='4,5', cputune='cool')
- self.assertEqual(xml_ref, xml_out)
+ vm_image='qemu_image', cpuset='4,5', cputune='cool',
+ machine='pc-i440fx-xenial')
+ xml_ref = model.Libvirt.add_cdrom(cdrom_img, xml_ref)
+ self.assertEqual(xml_out, xml_ref)
mock_get_mac_address.assert_called_once_with(0x00)
mock_create_snapshot_qemu.assert_called_once_with(
connection, 100, 'images')
@@ -240,6 +387,7 @@ class ModelLibvirtTestCase(unittest.TestCase):
status = model.Libvirt.pin_vcpu_for_perf(ssh_mock, 4)
self.assertIsNotNone(status)
+
class StandaloneContextHelperTestCase(unittest.TestCase):
NODE_SAMPLE = "nodes_sample.yaml"
@@ -315,11 +463,6 @@ class StandaloneContextHelperTestCase(unittest.TestCase):
file_path = os.path.join(curr_path, filename)
return file_path
- def test_read_config_file(self):
- self.helper.file_path = self._get_file_abspath(self.NODE_SAMPLE)
- status = self.helper.read_config_file()
- self.assertIsNotNone(status)
-
def test_parse_pod_file(self):
self.helper.file_path = self._get_file_abspath("dummy")
self.assertRaises(IOError, self.helper.parse_pod_file,
@@ -412,7 +555,7 @@ class ServerTestCase(unittest.TestCase):
}
}
status = self.server.generate_vnf_instance(
- {}, self.NETWORKS, '1.1.1.1/24', 'vm_0', vnf, '00:00:00:00:00:01')
+ {}, self.NETWORKS, '1.1.1.1/24', 'vm-0', vnf, '00:00:00:00:00:01')
self.assertIsNotNone(status)
@@ -422,7 +565,7 @@ class OvsDeployTestCase(unittest.TestCase):
def setUp(self):
self._mock_ssh = mock.patch.object(ssh, 'SSH')
- self.mock_ssh = self._mock_ssh .start()
+ self.mock_ssh = self._mock_ssh.start()
self.ovs_deploy = model.OvsDeploy(self.mock_ssh,
'/tmp/dpdk-devbind.py',
self.OVS_DETAILS)
@@ -494,4 +637,4 @@ class OvsDeployTestCase(unittest.TestCase):
'dpdk_version': dpdk_version,
'proxy': 'test_proxy'})
mock_execute.assert_called_once_with(cmd)
- mock_env_get.assert_called_once_with('http_proxy', '')
+ mock_env_get.assert_has_calls([mock.call('http_proxy', '')])
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py b/yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py
index bc3bb73cd..413bb68b7 100644
--- a/yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/test_ovs_dpdk.py
@@ -19,9 +19,12 @@ import mock
import six
import unittest
+from yardstick.benchmark import contexts
+from yardstick.benchmark.contexts import base
from yardstick.benchmark.contexts.standalone import model
from yardstick.benchmark.contexts.standalone import ovs_dpdk
from yardstick.common import exceptions
+from yardstick.common import utils as common_utils
from yardstick.network_services import utils
@@ -57,11 +60,19 @@ class OvsDpdkContextTestCase(unittest.TestCase):
'file': self._get_file_abspath(self.NODES_ovs_dpdk_SAMPLE)
}
self.ovs_dpdk = ovs_dpdk.OvsDpdkContext()
+ self._mock_log = mock.patch.object(ovs_dpdk, 'LOG')
+ self.mock_log = self._mock_log.start()
self.addCleanup(self._remove_contexts)
+ self.addCleanup(self._stop_mocks)
- def _remove_contexts(self):
- if self.ovs_dpdk in self.ovs_dpdk.list:
- self.ovs_dpdk._delete_context()
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
+
+ def _stop_mocks(self):
+ self._mock_log.stop()
@mock.patch('yardstick.benchmark.contexts.standalone.model.Server')
@mock.patch('yardstick.benchmark.contexts.standalone.model.StandaloneContextHelper')
@@ -73,7 +84,7 @@ class OvsDpdkContextTestCase(unittest.TestCase):
def test_init(self):
ATTRS = {
- 'name': 'StandaloneOvsDpdk',
+ 'name': contexts.CONTEXT_STANDALONEOVSDPDK,
'task_id': '1234567890',
'file': 'pod',
'flavor': {},
@@ -149,6 +160,13 @@ class OvsDpdkContextTestCase(unittest.TestCase):
}
self.ovs_dpdk.wait_for_vswitchd = 0
self.assertIsNone(self.ovs_dpdk.setup_ovs_bridge_add_flows())
+ self.ovs_dpdk.ovs_properties.update(
+ {'dpdk_pmd-rxq-affinity': {'0': "0:1"}})
+ self.ovs_dpdk.ovs_properties.update(
+ {'vhost_pmd-rxq-affinity': {'0': "0:1"}})
+ self.NETWORKS['private_0'].update({'port_num': '0'})
+ self.NETWORKS['public_0'].update({'port_num': '1'})
+ self.ovs_dpdk.setup_ovs_bridge_add_flows()
@mock.patch("yardstick.ssh.SSH")
def test_cleanup_ovs_dpdk_env(self, mock_ssh):
@@ -161,11 +179,9 @@ class OvsDpdkContextTestCase(unittest.TestCase):
self.ovs_dpdk.wait_for_vswitchd = 0
self.assertIsNone(self.ovs_dpdk.cleanup_ovs_dpdk_env())
- @mock.patch.object(ovs_dpdk.OvsDpdkContext, '_check_hugepages')
@mock.patch.object(utils, 'get_nsb_option')
@mock.patch.object(model.OvsDeploy, 'ovs_deploy')
- def test_check_ovs_dpdk_env(self, mock_ovs_deploy, mock_get_nsb_option,
- mock_check_hugepages):
+ def test_check_ovs_dpdk_env(self, mock_ovs_deploy, mock_get_nsb_option):
self.ovs_dpdk.connection = mock.Mock()
self.ovs_dpdk.connection.execute = mock.Mock(
return_value=(1, 0, 0))
@@ -179,11 +195,9 @@ class OvsDpdkContextTestCase(unittest.TestCase):
self.ovs_dpdk.check_ovs_dpdk_env()
mock_ovs_deploy.assert_called_once()
- mock_check_hugepages.assert_called_once()
mock_get_nsb_option.assert_called_once_with('bin_path')
- @mock.patch.object(ovs_dpdk.OvsDpdkContext, '_check_hugepages')
- def test_check_ovs_dpdk_env_wrong_version(self, mock_check_hugepages):
+ def test_check_ovs_dpdk_env_wrong_version(self):
self.ovs_dpdk.connection = mock.Mock()
self.ovs_dpdk.connection.execute = mock.Mock(
return_value=(1, 0, 0))
@@ -196,7 +210,6 @@ class OvsDpdkContextTestCase(unittest.TestCase):
with self.assertRaises(exceptions.OVSUnsupportedVersion):
self.ovs_dpdk.check_ovs_dpdk_env()
- mock_check_hugepages.assert_called_once()
@mock.patch('yardstick.ssh.SSH')
def test_deploy(self, *args):
@@ -221,8 +234,8 @@ class OvsDpdkContextTestCase(unittest.TestCase):
def test_undeploy(self, mock_libvirt):
self.ovs_dpdk.vm_deploy = True
self.ovs_dpdk.connection = mock.Mock()
- self.ovs_dpdk.vm_names = ['vm_0', 'vm_1']
- self.ovs_dpdk.drivers = ['vm_0', 'vm_1']
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
+ self.ovs_dpdk.drivers = ['vm-0', 'vm-1']
self.ovs_dpdk.cleanup_ovs_dpdk_env = mock.Mock()
self.ovs_dpdk.networks = self.NETWORKS
self.ovs_dpdk.undeploy()
@@ -287,6 +300,22 @@ class OvsDpdkContextTestCase(unittest.TestCase):
self.assertEqual(result['user'], 'root')
self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+ def test__get_physical_node_for_server(self):
+ attrs = self.attrs
+ attrs.update({'servers': {'server1': {}}})
+ self.ovs_dpdk.init(attrs)
+
+ # When server is not from this context
+ result = self.ovs_dpdk._get_physical_node_for_server('server1.another-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.ovs_dpdk._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ result = self.ovs_dpdk._get_physical_node_for_server('server1.foo-12345678')
+ self.assertEqual(result, 'node5.foo')
+
# TODO(elfoley): Split this test for networks that exist and networks that
# don't
def test__get_network(self):
@@ -344,7 +373,7 @@ class OvsDpdkContextTestCase(unittest.TestCase):
ssh.return_value = ssh_mock
self.ovs_dpdk.vm_deploy = True
self.ovs_dpdk.connection = ssh_mock
- self.ovs_dpdk.vm_names = ['vm_0', 'vm_1']
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
self.ovs_dpdk.drivers = []
self.ovs_dpdk.networks = self.NETWORKS
self.ovs_dpdk.helper.get_mac_address = mock.Mock(return_value="")
@@ -355,7 +384,7 @@ class OvsDpdkContextTestCase(unittest.TestCase):
def test__enable_interfaces(self, mock_add_ovs_interface):
self.ovs_dpdk.vm_deploy = True
self.ovs_dpdk.connection = mock.Mock()
- self.ovs_dpdk.vm_names = ['vm_0', 'vm_1']
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
self.ovs_dpdk.drivers = []
self.ovs_dpdk.networks = self.NETWORKS
self.ovs_dpdk.ovs_properties = {'vpath': 'fake_path'}
@@ -363,17 +392,23 @@ class OvsDpdkContextTestCase(unittest.TestCase):
self.ovs_dpdk._enable_interfaces(0, ["private_0"], 'test')
mock_add_ovs_interface.assert_called_once_with(
'fake_path', 0, self.NETWORKS['private_0']['vpci'],
- self.NETWORKS['private_0']['mac'], 'test')
+ self.NETWORKS['private_0']['mac'], 'test', 1)
+ @mock.patch.object(ovs_dpdk.OvsDpdkContext, '_check_hugepages')
+ @mock.patch.object(common_utils, 'setup_hugepages')
+ @mock.patch.object(model.StandaloneContextHelper, 'check_update_key')
@mock.patch.object(model.Libvirt, 'write_file')
@mock.patch.object(model.Libvirt, 'build_vm_xml')
@mock.patch.object(model.Libvirt, 'check_if_vm_exists_and_delete')
@mock.patch.object(model.Libvirt, 'virsh_create_vm')
def test_setup_ovs_dpdk_context(self, mock_create_vm, mock_check_if_exists,
- mock_build_xml, mock_write_file):
+ mock_build_xml, mock_write_file,
+ mock_check_update_key,
+ mock_setup_hugepages,
+ mock__check_hugepages):
self.ovs_dpdk.vm_deploy = True
self.ovs_dpdk.connection = mock.Mock()
- self.ovs_dpdk.vm_names = ['vm_0', 'vm_1']
+ self.ovs_dpdk.vm_names = ['vm-0', 'vm-1']
self.ovs_dpdk.drivers = []
self.ovs_dpdk.servers = {
'vnf_0': {
@@ -386,24 +421,38 @@ class OvsDpdkContextTestCase(unittest.TestCase):
}
self.ovs_dpdk.networks = self.NETWORKS
self.ovs_dpdk.host_mgmt = {}
- self.ovs_dpdk.flavor = {}
+ self.ovs_dpdk.vm_flavor = {'ram': '1024'}
+ self.ovs_dpdk.file_path = '/var/lib/libvirt/images/cdrom-0.img'
self.ovs_dpdk.configure_nics_for_ovs_dpdk = mock.Mock(return_value="")
- xml_str = mock.Mock()
- mock_build_xml.return_value = (xml_str, '00:00:00:00:00:01')
+ self.ovs_dpdk._name_task_id = 'fake_name'
+ xml_str = 'vm-0'
+ self.ovs_dpdk.mac = '00:00:00:00:00:01'
+ mock_build_xml.return_value = (xml_str, self.ovs_dpdk.mac)
self.ovs_dpdk._enable_interfaces = mock.Mock(return_value=xml_str)
vnf_instance = mock.Mock()
+ vnf_instance_2 = mock.Mock()
+ mock_check_update_key.return_value = vnf_instance_2
self.ovs_dpdk.vnf_node.generate_vnf_instance = mock.Mock(
return_value=vnf_instance)
- self.assertEqual([vnf_instance],
+ self.assertEqual([vnf_instance_2],
self.ovs_dpdk.setup_ovs_dpdk_context())
+ mock_setup_hugepages.assert_called_once_with(self.ovs_dpdk.connection,
+ (1024 + 4096) * 1024) # ram + dpdk_socket0_mem + dpdk_socket1_mem
+ mock__check_hugepages.assert_called_once()
mock_create_vm.assert_called_once_with(
self.ovs_dpdk.connection, '/tmp/vm_ovs_0.xml')
mock_check_if_exists.assert_called_once_with(
- 'vm_0', self.ovs_dpdk.connection)
+ 'vm-0', self.ovs_dpdk.connection)
mock_build_xml.assert_called_once_with(
- self.ovs_dpdk.connection, self.ovs_dpdk.vm_flavor, 'vm_0', 0)
+ self.ovs_dpdk.connection, self.ovs_dpdk.vm_flavor, 'vm-0', 0, self.ovs_dpdk.file_path)
mock_write_file.assert_called_once_with('/tmp/vm_ovs_0.xml', xml_str)
+ mock_check_update_key.assert_called_once_with(self.ovs_dpdk.connection,
+ vnf_instance,
+ xml_str,
+ self.ovs_dpdk._name_task_id,
+ self.ovs_dpdk.file_path,
+ self.ovs_dpdk.mac)
@mock.patch.object(io, 'BytesIO')
def test__check_hugepages(self, mock_bytesio):
diff --git a/yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py b/yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py
index e70ab0ae8..0809a983a 100644
--- a/yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py
+++ b/yardstick/tests/unit/benchmark/contexts/standalone/test_sriov.py
@@ -18,8 +18,11 @@ import mock
import unittest
from yardstick import ssh
+from yardstick.benchmark import contexts
+from yardstick.benchmark.contexts import base
from yardstick.benchmark.contexts.standalone import model
from yardstick.benchmark.contexts.standalone import sriov
+from yardstick.common import utils
class SriovContextTestCase(unittest.TestCase):
@@ -29,7 +32,7 @@ class SriovContextTestCase(unittest.TestCase):
NODES_DUPLICATE_SAMPLE = "nodes_duplicate_sample.yaml"
ATTRS = {
- 'name': 'StandaloneSriov',
+ 'name': contexts.CONTEXT_STANDALONESRIOV,
'task_id': '1234567890',
'file': 'pod',
'flavor': {},
@@ -61,14 +64,16 @@ class SriovContextTestCase(unittest.TestCase):
self.attrs = {
'name': 'foo',
'task_id': '1234567890',
- 'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE)
+ 'file': self._get_file_abspath(self.NODES_SRIOV_SAMPLE),
}
self.sriov = sriov.SriovContext()
self.addCleanup(self._remove_contexts)
- def _remove_contexts(self):
- if self.sriov in self.sriov.list:
- self.sriov._delete_context()
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
@mock.patch.object(model, 'StandaloneContextHelper')
@mock.patch.object(model, 'Libvirt')
@@ -109,8 +114,8 @@ class SriovContextTestCase(unittest.TestCase):
self.sriov.vm_deploy = True
self.sriov.connection = mock_ssh
- self.sriov.vm_names = ['vm_0', 'vm_1']
- self.sriov.drivers = ['vm_0', 'vm_1']
+ self.sriov.vm_names = ['vm-0', 'vm-1']
+ self.sriov.drivers = ['vm-0', 'vm-1']
self.assertIsNone(self.sriov.undeploy())
def _get_file_abspath(self, filename):
@@ -168,6 +173,22 @@ class SriovContextTestCase(unittest.TestCase):
self.assertEqual(result['user'], 'root')
self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+ def test__get_physical_node_for_server(self):
+ attrs = self.attrs
+ attrs.update({'servers': {'server1': {}}})
+ self.sriov.init(attrs)
+
+ # When server is not from this context
+ result = self.sriov._get_physical_node_for_server('server1.another-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.sriov._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ result = self.sriov._get_physical_node_for_server('server1.foo-12345678')
+ self.assertEqual(result, 'node5.foo')
+
def test__get_server_no_task_id(self):
self.attrs['flags'] = {'no_setup': True}
self.sriov.init(self.attrs)
@@ -234,7 +255,7 @@ class SriovContextTestCase(unittest.TestCase):
ssh.return_value = ssh_mock
self.sriov.vm_deploy = True
self.sriov.connection = ssh_mock
- self.sriov.vm_names = ['vm_0', 'vm_1']
+ self.sriov.vm_names = ['vm-0', 'vm-1']
self.sriov.drivers = []
self.sriov.networks = self.NETWORKS
self.sriov.helper.get_mac_address = mock.Mock(return_value="")
@@ -242,25 +263,29 @@ class SriovContextTestCase(unittest.TestCase):
self.assertIsNone(self.sriov.configure_nics_for_sriov())
@mock.patch.object(ssh, 'SSH', return_value=(0, "a", ""))
- @mock.patch.object(model, 'Libvirt')
- def test__enable_interfaces(self, mock_libvirt, mock_ssh):
- # pylint: disable=unused-argument
- # NOTE(ralonsoh): the pylint exception should be removed.
+ @mock.patch.object(model.Libvirt, 'add_sriov_interfaces',
+ return_value='out_xml')
+ def test__enable_interfaces(self, mock_add_sriov, mock_ssh):
self.sriov.vm_deploy = True
self.sriov.connection = mock_ssh
- self.sriov.vm_names = ['vm_0', 'vm_1']
+ self.sriov.vm_names = ['vm-0', 'vm-1']
self.sriov.drivers = []
self.sriov.networks = self.NETWORKS
- self.sriov.get_vf_data = mock.Mock(return_value="")
- self.assertIsNone(self.sriov._enable_interfaces(
- 0, 0, ["private_0"], 'test'))
-
+ self.assertEqual(
+ 'out_xml',
+ self.sriov._enable_interfaces(0, 0, ['private_0'], 'test'))
+ mock_add_sriov.assert_called_once_with(
+ '0000:00:0a.0', 0, self.NETWORKS['private_0']['mac'], 'test')
+
+ @mock.patch.object(utils, 'setup_hugepages')
+ @mock.patch.object(model.StandaloneContextHelper, 'check_update_key')
@mock.patch.object(model.Libvirt, 'build_vm_xml')
@mock.patch.object(model.Libvirt, 'check_if_vm_exists_and_delete')
@mock.patch.object(model.Libvirt, 'write_file')
@mock.patch.object(model.Libvirt, 'virsh_create_vm')
def test_setup_sriov_context(self, mock_create_vm, mock_write_file,
- mock_check, mock_build_vm_xml):
+ mock_check, mock_build_vm_xml,
+ mock_check_update_key, mock_setup_hugepages):
self.sriov.servers = {
'vnf_0': {
'network_ports': {
@@ -273,28 +298,41 @@ class SriovContextTestCase(unittest.TestCase):
connection = mock.Mock()
self.sriov.connection = connection
self.sriov.host_mgmt = {'ip': '1.2.3.4'}
- self.sriov.vm_flavor = 'flavor'
+ self.sriov.vm_flavor = {'ram': '1024'}
self.sriov.networks = 'networks'
self.sriov.configure_nics_for_sriov = mock.Mock()
+ self.sriov._name_task_id = 'fake_name'
cfg = '/tmp/vm_sriov_0.xml'
- vm_name = 'vm_0'
+ vm_name = 'vm-0'
+ mac = '00:00:00:00:00:01'
xml_out = mock.Mock()
- mock_build_vm_xml.return_value = (xml_out, '00:00:00:00:00:01')
+ mock_build_vm_xml.return_value = (xml_out, mac)
+ mock_check_update_key.return_value = 'node_2'
+ cdrom_img = '/var/lib/libvirt/images/cdrom-0.img'
with mock.patch.object(self.sriov, 'vnf_node') as mock_vnf_node, \
- mock.patch.object(self.sriov, '_enable_interfaces'):
+ mock.patch.object(self.sriov, '_enable_interfaces') as \
+ mock_enable_interfaces:
+ mock_enable_interfaces.return_value = 'out_xml'
mock_vnf_node.generate_vnf_instance = mock.Mock(
- return_value='node')
+ return_value='node_1')
nodes_out = self.sriov.setup_sriov_context()
- self.assertEqual(['node'], nodes_out)
+ mock_setup_hugepages.assert_called_once_with(connection, 1024*1024)
+ mock_check_update_key.assert_called_once_with(connection, 'node_1', vm_name,
+ self.sriov._name_task_id, cdrom_img,
+ mac)
+ self.assertEqual(['node_2'], nodes_out)
mock_vnf_node.generate_vnf_instance.assert_called_once_with(
- 'flavor', 'networks', '1.2.3.4', 'vnf_0',
+ self.sriov.vm_flavor, 'networks', '1.2.3.4', 'vnf_0',
self.sriov.servers['vnf_0'], '00:00:00:00:00:01')
mock_build_vm_xml.assert_called_once_with(
- connection, 'flavor', vm_name, 0)
+ connection, self.sriov.vm_flavor, vm_name, 0, cdrom_img)
mock_create_vm.assert_called_once_with(connection, cfg)
mock_check.assert_called_once_with(vm_name, connection)
- mock_write_file.assert_called_once_with(cfg, xml_out)
+ mock_write_file.assert_called_once_with(cfg, 'out_xml')
+ mock_enable_interfaces.assert_has_calls([
+ mock.call(0, mock.ANY, ['private_0'], mock.ANY),
+ mock.call(0, mock.ANY, ['public_0'], mock.ANY)], any_order=True)
def test__get_vf_data(self):
with mock.patch("yardstick.ssh.SSH") as ssh:
@@ -306,7 +344,7 @@ class SriovContextTestCase(unittest.TestCase):
ssh.return_value = ssh_mock
self.sriov.vm_deploy = True
self.sriov.connection = ssh_mock
- self.sriov.vm_names = ['vm_0', 'vm_1']
+ self.sriov.vm_names = ['vm-0', 'vm-1']
self.sriov.drivers = []
self.sriov.servers = {
'vnf_0': {
diff --git a/yardstick/tests/unit/benchmark/contexts/test_base.py b/yardstick/tests/unit/benchmark/contexts/test_base.py
index 153c6a527..5fd7352f5 100644
--- a/yardstick/tests/unit/benchmark/contexts/test_base.py
+++ b/yardstick/tests/unit/benchmark/contexts/test_base.py
@@ -12,12 +12,50 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import unittest
+import os
+import errno
+
+import mock
from yardstick.benchmark.contexts import base
+from yardstick.benchmark.contexts.base import Context
+from yardstick.common import yaml_loader
+from yardstick.tests.unit import base as ut_base
+from yardstick.common.constants import YARDSTICK_ROOT_PATH
+
+
+class DummyContextClass(Context):
+
+ __context_type__ = "Dummy"
+
+ def __init__(self, host_name_separator='.'):
+ super(DummyContextClass, self).__init__\
+ (host_name_separator=host_name_separator)
+ self.nodes = []
+ self.controllers = []
+ self.computes = []
+ self.baremetals = []
+
+ def _get_network(self, *args):
+ pass
+
+ def _get_server(self, *args):
+ pass
+
+ def deploy(self):
+ pass
+ def undeploy(self):
+ pass
-class FlagsTestCase(unittest.TestCase):
+ def _get_physical_nodes(self):
+ pass
+
+ def _get_physical_node_for_server(self, server_name):
+ pass
+
+
+class FlagsTestCase(ut_base.BaseUnitTestCase):
def setUp(self):
self.flags = base.Flags()
@@ -25,6 +63,7 @@ class FlagsTestCase(unittest.TestCase):
def test___init__(self):
self.assertFalse(self.flags.no_setup)
self.assertFalse(self.flags.no_teardown)
+ self.assertEqual({'verify': False}, self.flags.os_cloud_config)
def test___init__with_flags(self):
flags = base.Flags(no_setup=True)
@@ -32,12 +71,104 @@ class FlagsTestCase(unittest.TestCase):
self.assertFalse(flags.no_teardown)
def test_parse(self):
- self.flags.parse(no_setup=True, no_teardown="False")
+ self.flags.parse(no_setup=True, no_teardown='False',
+ os_cloud_config={'verify': True})
self.assertTrue(self.flags.no_setup)
- self.assertEqual(self.flags.no_teardown, "False")
+ self.assertEqual('False', self.flags.no_teardown)
+ self.assertEqual({'verify': True}, self.flags.os_cloud_config)
def test_parse_forbidden_flags(self):
self.flags.parse(foo=42)
with self.assertRaises(AttributeError):
_ = self.flags.foo
+
+
+class ContextTestCase(ut_base.BaseUnitTestCase):
+
+ @staticmethod
+ def _remove_ctx(ctx_obj):
+ if ctx_obj in base.Context.list:
+ base.Context.list.remove(ctx_obj)
+
+ def test_split_host_name(self):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+ config_name = 'host_name.ctx_name'
+ self.assertEqual(('host_name', 'ctx_name'),
+ ctx_obj.split_host_name(config_name))
+
+ def test_split_host_name_wrong_separator(self):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+ config_name = 'host_name-ctx_name'
+ self.assertEqual((None, None),
+ ctx_obj.split_host_name(config_name))
+
+ def test_split_host_name_other_separator(self):
+ ctx_obj = DummyContextClass(host_name_separator='-')
+ self.addCleanup(self._remove_ctx, ctx_obj)
+ config_name = 'host_name-ctx_name'
+ self.assertEqual(('host_name', 'ctx_name'),
+ ctx_obj.split_host_name(config_name))
+
+ def test_get_physical_nodes(self):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+
+ result = Context.get_physical_nodes()
+
+ self.assertEqual(result, {None: None})
+
+ @mock.patch.object(Context, 'get_context_from_server')
+ def test_get_physical_node_from_server(self, mock_get_ctx):
+ ctx_obj = DummyContextClass()
+ self.addCleanup(self._remove_ctx, ctx_obj)
+
+ mock_get_ctx.return_value = ctx_obj
+
+ result = Context.get_physical_node_from_server("mock_server")
+
+ mock_get_ctx.assert_called_once()
+ self.assertIsNone(result)
+
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
+ def test_read_pod_file(self, mock_read_yaml_file):
+ attrs = {'name': 'foo',
+ 'task_id': '12345678',
+ 'file': 'pod.yaml'
+ }
+
+ ctx_obj = DummyContextClass()
+ cfg = {"nodes": [
+ {
+ "name": "node1",
+ "role": "Controller",
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ },
+ {
+ "name": "node2",
+ "role": "Compute",
+ "ip": "10.229.47.139",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ }
+ ]
+ }
+
+ mock_read_yaml_file.return_value = cfg
+ result = ctx_obj.read_pod_file(attrs)
+ self.assertEqual(result, cfg)
+
+ mock_read_yaml_file.side_effect = IOError(errno.EPERM, '')
+ with self.assertRaises(IOError):
+ ctx_obj.read_pod_file(attrs)
+
+ mock_read_yaml_file.side_effect = IOError(errno.ENOENT, '')
+ with self.assertRaises(IOError):
+ ctx_obj.read_pod_file(attrs)
+
+ file_path = os.path.join(YARDSTICK_ROOT_PATH, 'pod.yaml')
+ self.assertEqual(ctx_obj.file_path, file_path)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_dummy.py b/yardstick/tests/unit/benchmark/contexts/test_dummy.py
index e393001a1..33832375f 100644
--- a/yardstick/tests/unit/benchmark/contexts/test_dummy.py
+++ b/yardstick/tests/unit/benchmark/contexts/test_dummy.py
@@ -9,6 +9,7 @@
import unittest
+from yardstick.benchmark.contexts import base
from yardstick.benchmark.contexts import dummy
@@ -20,7 +21,12 @@ class DummyContextTestCase(unittest.TestCase):
'task_id': '1234567890',
}
self.test_context = dummy.DummyContext()
- self.addCleanup(self.test_context._delete_context)
+ self.addCleanup(self._delete_contexts)
+
+ @staticmethod
+ def _delete_contexts():
+ for context in base.Context.list:
+ context._delete_context()
def test___init__(self):
self.assertFalse(self.test_context._flags.no_setup)
@@ -70,3 +76,11 @@ class DummyContextTestCase(unittest.TestCase):
self.assertEqual(result, None)
self.test_context.undeploy()
+
+ def test__get_physical_nodes(self):
+ result = self.test_context._get_physical_nodes()
+ self.assertIsNone(result)
+
+ def test__get_physical_node_for_server(self):
+ result = self.test_context._get_physical_node_for_server("fake")
+ self.assertIsNone(result)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_heat.py b/yardstick/tests/unit/benchmark/contexts/test_heat.py
index 625f97bf4..96946cded 100644
--- a/yardstick/tests/unit/benchmark/contexts/test_heat.py
+++ b/yardstick/tests/unit/benchmark/contexts/test_heat.py
@@ -8,18 +8,20 @@
##############################################################################
from collections import OrderedDict
-from itertools import count
import logging
import os
import mock
import unittest
+import collections
from yardstick.benchmark.contexts import base
from yardstick.benchmark.contexts import heat
from yardstick.benchmark.contexts import model
from yardstick.common import constants as consts
from yardstick.common import exceptions as y_exc
+from yardstick.common import openstack_utils
+from yardstick.common import yaml_loader
from yardstick import ssh
@@ -28,9 +30,28 @@ LOG = logging.getLogger(__name__)
class HeatContextTestCase(unittest.TestCase):
+ HEAT_POD_SAMPLE = {
+ "nodes": [
+ {
+ "name": "node1",
+ "role": "Controller",
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ },
+ {
+ "name": "node2",
+ "role": "Compute",
+ "ip": "10.229.47.139",
+ "user": "root",
+ "key_filename": "/root/.yardstick_key"
+ }
+ ]
+ }
+
def __init__(self, *args, **kwargs):
+
super(HeatContextTestCase, self).__init__(*args, **kwargs)
- self.name_iter = ('vnf{:03}'.format(x) for x in count(0, step=3))
def setUp(self):
self.test_context = heat.HeatContext()
@@ -53,6 +74,7 @@ class HeatContextTestCase(unittest.TestCase):
self.assertEqual(self.test_context.server_groups, [])
self.assertIsNone(self.test_context.keypair_name)
self.assertIsNone(self.test_context.secgroup_name)
+ self.assertIsNone(self.test_context.security_group)
self.assertEqual(self.test_context._server_map, {})
self.assertIsNone(self.test_context._image)
self.assertIsNone(self.test_context._flavor)
@@ -60,25 +82,32 @@ class HeatContextTestCase(unittest.TestCase):
self.assertIsNone(self.test_context.template_file)
self.assertIsNone(self.test_context.heat_parameters)
self.assertIsNone(self.test_context.key_filename)
+ self.assertTrue(self.test_context.yardstick_gen_key_file)
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
@mock.patch('yardstick.benchmark.contexts.heat.PlacementGroup')
@mock.patch('yardstick.benchmark.contexts.heat.ServerGroup')
@mock.patch('yardstick.benchmark.contexts.heat.Network')
@mock.patch('yardstick.benchmark.contexts.heat.Server')
- def test_init(self, mock_server, mock_network, mock_sg, mock_pg):
+ def test_init(self, mock_server, mock_network, mock_sg, mock_pg,
+ mock_read_yaml):
+ mock_read_yaml.return_value = self.HEAT_POD_SAMPLE
pgs = {'pgrp1': {'policy': 'availability'}}
sgs = {'servergroup1': {'policy': 'affinity'}}
networks = {'bar': {'cidr': '10.0.1.0/24'}}
servers = {'baz': {'floating_ip': True, 'placement': 'pgrp1'}}
attrs = {'name': 'foo',
+ 'file': 'pod.yaml',
'task_id': '1234567890',
'placement_groups': pgs,
'server_groups': sgs,
'networks': networks,
'servers': servers}
- self.test_context.init(attrs)
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ self.test_context.init(attrs)
self.assertFalse(self.test_context._flags.no_setup)
self.assertFalse(self.test_context._flags.no_teardown)
@@ -132,16 +161,37 @@ class HeatContextTestCase(unittest.TestCase):
'server_groups': {},
'networks': {},
'servers': {},
+ 'file': "pod.yaml",
'flags': {
'no_setup': True,
'no_teardown': True,
},
}
- self.test_context.init(attrs)
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ self.test_context.init(attrs)
+
self.assertTrue(self.test_context._flags.no_setup)
self.assertTrue(self.test_context._flags.no_teardown)
+ def test_init_key_filename(self):
+ attrs = {'name': 'foo',
+ 'file': 'pod.yaml',
+ 'task_id': '1234567890',
+ 'server_groups': {},
+ 'networks': {},
+ 'servers': {},
+ 'heat_template': "/root/clearwater.yaml",
+ 'key_filename': '/etc/yardstick/yardstick.pem'}
+
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ self.test_context.init(attrs)
+
+ self.assertIsNotNone(self.test_context.key_filename)
+ self.assertFalse(self.test_context.yardstick_gen_key_file)
+
@mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
def test__add_resources_to_template_no_servers(self, mock_template):
self.test_context._name = 'ctx'
@@ -162,7 +212,7 @@ class HeatContextTestCase(unittest.TestCase):
mock_template.add_keypair.assert_called_with(
"ctx-key",
"ctx-12345678")
- mock_template.add_security_group.assert_called_with("ctx-secgroup")
+ mock_template.add_security_group.assert_called_with("ctx-secgroup", None)
mock_template.add_network.assert_called_with(
"ctx-12345678-mynet", 'physnet1', None, None, None, None)
mock_template.add_router.assert_called_with(
@@ -229,12 +279,12 @@ class HeatContextTestCase(unittest.TestCase):
self.assertRaises(y_exc.HeatTemplateError,
self.test_context.deploy)
- mock_path_exists.assert_called_once()
+ mock_path_exists.assert_called()
mock_resources_template.assert_called_once()
@mock.patch.object(os.path, 'exists', return_value=False)
@mock.patch.object(ssh.SSH, 'gen_keys')
- @mock.patch('yardstick.benchmark.contexts.heat.HeatTemplate')
+ @mock.patch.object(heat, 'HeatTemplate')
def test_deploy(self, mock_template, mock_genkeys, mock_path_exists):
self.test_context._name = 'foo'
self.test_context._task_id = '1234567890'
@@ -245,16 +295,17 @@ class HeatContextTestCase(unittest.TestCase):
self.test_context.get_neutron_info = mock.MagicMock()
self.test_context.deploy()
- mock_template.assert_called_with('foo-12345678',
- '/bar/baz/some-heat-file',
- {'image': 'cirros'})
+ mock_template.assert_called_with(
+ 'foo-12345678', template_file='/bar/baz/some-heat-file',
+ heat_parameters={'image': 'cirros'},
+ os_cloud_config=self.test_context._flags.os_cloud_config)
self.assertIsNotNone(self.test_context.stack)
key_filename = ''.join(
[consts.YARDSTICK_ROOT_PATH,
'yardstick/resources/files/yardstick_key-',
self.test_context._name_task_id])
mock_genkeys.assert_called_once_with(key_filename)
- mock_path_exists.assert_called_once_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
@mock.patch.object(heat, 'HeatTemplate')
@mock.patch.object(os.path, 'exists', return_value=False)
@@ -280,7 +331,7 @@ class HeatContextTestCase(unittest.TestCase):
'yardstick/resources/files/yardstick_key-',
self.test_context._name])
mock_genkeys.assert_called_once_with(key_filename)
- mock_path_exists.assert_called_once_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
@mock.patch.object(heat, 'HeatTemplate')
@mock.patch.object(os.path, 'exists', return_value=False)
@@ -296,7 +347,6 @@ class HeatContextTestCase(unittest.TestCase):
self.test_context._flags.no_setup = True
self.test_context.template_file = '/bar/baz/some-heat-file'
self.test_context.get_neutron_info = mock.MagicMock()
-
self.test_context.deploy()
mock_retrieve_stack.assert_called_once_with(self.test_context._name)
@@ -306,7 +356,7 @@ class HeatContextTestCase(unittest.TestCase):
'yardstick/resources/files/yardstick_key-',
self.test_context._name])
mock_genkeys.assert_called_once_with(key_filename)
- mock_path_exists.assert_called_once_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
@mock.patch.object(heat, 'HeatTemplate', return_value='heat_template')
@mock.patch.object(heat.HeatContext, '_add_resources_to_template')
@@ -334,7 +384,7 @@ class HeatContextTestCase(unittest.TestCase):
'yardstick/resources/files/yardstick_key-',
self.test_context._name_task_id])
mock_genkeys.assert_called_once_with(key_filename)
- mock_path_exists.assert_called_with(key_filename)
+ mock_path_exists.assert_any_call(key_filename)
mock_call_gen_keys = mock.call.gen_keys(key_filename)
mock_call_add_resources = (
@@ -342,6 +392,25 @@ class HeatContextTestCase(unittest.TestCase):
self.assertTrue(mock_manager.mock_calls.index(mock_call_gen_keys) <
mock_manager.mock_calls.index(mock_call_add_resources))
+ @mock.patch.object(heat, 'HeatTemplate')
+ @mock.patch.object(ssh.SSH, 'gen_keys')
+ @mock.patch.object(heat.HeatContext, '_create_new_stack')
+ def test_deploy_with_key_filename_provided(self, mock_create_new_stack,
+ mock_gen_keys, *args):
+ self.test_context._name = 'foo'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context.template_file = '/bar/baz/some-heat-file'
+ self.test_context.heat_parameters = {'image': 'cirros'}
+ self.test_context.yardstick_gen_key_file = False
+ self.test_context.key_filename = '/etc/yardstick/yardstick.pem'
+ self.test_context.get_neutron_info = mock.MagicMock()
+ self.test_context.deploy()
+
+ mock_create_new_stack.assert_called()
+ mock_gen_keys.assert_not_called()
+
def test_check_for_context(self):
pass
# check that the context exists
@@ -674,6 +743,50 @@ class HeatContextTestCase(unittest.TestCase):
result = self.test_context._get_server(attr_name)
self.assertIsNone(result)
+ @mock.patch("yardstick.benchmark.contexts.heat.pkg_resources")
+ def test__get_server_found_dict_found_interfaces_dict(self, *args):
+ """
+ Use HeatContext._get_server to get a server that matches
+ based on a dictionary input.
+ """
+ self.test_context._name = 'bar'
+ self.test_context._task_id = '1234567890'
+ self.test_context._name_task_id = '{}-{}'.format(
+ self.test_context._name, self.test_context._task_id[:8])
+ self.test_context._user = 'bot'
+ self.test_context.stack = mock.Mock()
+ self.test_context.stack.outputs = {
+ 'private_ip': '10.0.0.1',
+ 'public_ip': '127.0.0.1',
+ 'local_mac_addr': '64:00:6a:18:0f:d6',
+ 'private_netmask': '255.255.255.0',
+ 'private_net_name': 'private_network',
+ 'private_net_gateway': '127.0.0.254'
+ }
+
+ attr_name = {
+ 'name': 'foo.bar-12345678',
+ 'private_ip_attr': 'private_ip',
+ 'public_ip_attr': 'public_ip',
+ 'interfaces': {
+ 'data_net': {
+ 'local_ip': 'private_ip',
+ 'local_mac': 'local_mac_addr',
+ 'netmask': 'private_netmask',
+ 'network': 'private_net_name',
+ 'gateway_ip': 'private_net_gateway'
+ }
+ }
+ }
+ self.test_context.key_uuid = 'foo-42'
+ result = self.test_context._get_server(attr_name)
+ self.assertIsInstance(result['interfaces'], collections.Mapping)
+ for key in attr_name.get("interfaces").keys():
+ self.assertEqual(result['interfaces'][key]['local_ip'], '10.0.0.1')
+ self.assertEqual(result['interfaces'][key]['local_mac'], '64:00:6a:18:0f:d6')
+ self.assertEqual(result['interfaces'][key]['netmask'], '255.255.255.0')
+ self.assertEqual(result['interfaces'][key]['gateway_ip'], '127.0.0.254')
+
# TODO: Split this into more granular tests
def test__get_network(self):
network1 = mock.MagicMock()
@@ -725,3 +838,56 @@ class HeatContextTestCase(unittest.TestCase):
}
result = self.test_context._get_network(attr_name)
self.assertDictEqual(result, expected)
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def test__get_physical_nodes(self):
+ self.test_context.nodes = {}
+ nodes = self.test_context._get_physical_nodes()
+ self.assertEquals(nodes, {})
+
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
+ def test__get_physical_node_for_server(self, mock_read_yaml):
+ attrs = {'name': 'foo',
+ 'task_id': '12345678',
+ 'file': "pod.yaml",
+ 'servers': {'vnf': {}},
+ 'networks': {'mgmt': {'cidr': '10.0.1.0/24'}}
+ }
+
+ with mock.patch.object(openstack_utils, 'get_shade_client'), \
+ mock.patch.object(openstack_utils, 'get_shade_operator_client'):
+ mock_read_yaml.return_value = self.HEAT_POD_SAMPLE
+ self.test_context.init(attrs)
+
+ with mock.patch('yardstick.common.openstack_utils.get_server') as mock_get_server:
+ mock_get_server.return_value = {'vnf': {}}
+
+ # When server is not from this context
+ result = self.test_context._get_physical_node_for_server('node1.foo-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.test_context._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ mock_munch = mock.Mock()
+ mock_munch.toDict = mock.Mock(return_value={
+ 'OS-EXT-SRV-ATTR:hypervisor_hostname': 'hypervisor_hostname'
+ })
+ mock_get_server.return_value = mock_munch
+
+ hypervisor = mock.Mock()
+ hypervisor.hypervisor_hostname = 'hypervisor_hostname'
+ hypervisor.host_ip = '10.229.47.137'
+
+ self.test_context.operator_client.list_hypervisors = mock.Mock(
+ return_value=[hypervisor])
+
+ mock_get_server.return_value = mock_munch
+
+ result = self.test_context._get_physical_node_for_server('vnf.foo-12345678')
+ self.assertEqual(result, 'node1.foo')
diff --git a/yardstick/tests/unit/benchmark/contexts/test_kubernetes.py b/yardstick/tests/unit/benchmark/contexts/test_kubernetes.py
index 4dd9d40d1..b526e7cc7 100644
--- a/yardstick/tests/unit/benchmark/contexts/test_kubernetes.py
+++ b/yardstick/tests/unit/benchmark/contexts/test_kubernetes.py
@@ -7,33 +7,74 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+import collections
+import time
+
import mock
import unittest
+from yardstick.benchmark import contexts
+from yardstick.benchmark.contexts import base
from yardstick.benchmark.contexts import kubernetes
+from yardstick.common import constants
+from yardstick.common import exceptions
+from yardstick.common import kubernetes_utils as k8s_utils
+from yardstick.orchestrator import kubernetes as orchestrator_kubernetes
-context_cfg = {
- 'type': 'Kubernetes',
+CONTEXT_CFG = {
+ 'type': contexts.CONTEXT_KUBERNETES,
'name': 'k8s',
'task_id': '1234567890',
'servers': {
'host': {
'image': 'openretriever/yardstick',
'command': '/bin/bash',
- 'args': ['-c', 'chmod 700 ~/.ssh; chmod 600 ~/.ssh/*; \
-service ssh restart;while true ; do sleep 10000; done']
+ 'args': ['-c', 'chmod 700 ~/.ssh; chmod 600 ~/.ssh/*; '
+ 'service ssh restart;while true ; do sleep 10000; done']
},
'target': {
'image': 'openretriever/yardstick',
'command': '/bin/bash',
- 'args': ['-c', 'chmod 700 ~/.ssh; chmod 600 ~/.ssh/*; \
-service ssh restart;while true ; do sleep 10000; done']
+ 'args': ['-c', 'chmod 700 ~/.ssh; chmod 600 ~/.ssh/*; '
+ 'service ssh restart;while true ; do sleep 10000; done']
}
+ },
+ 'networks': {
+ 'flannel': {
+ 'args': 'flannel_args',
+ 'plugin': 'flannel'
+ },
+ 'sriov01': {
+ 'args': 'sriov_args',
+ 'plugin': 'sriov'
+ },
}
}
-prefix = 'yardstick.benchmark.contexts.kubernetes'
+
+class NodePort(object):
+ def __init__(self):
+ self.node_port = 30000
+ self.port = constants.SSH_PORT
+ self.name = 'port_name'
+ self.protocol = 'TCP'
+ self.target_port = constants.SSH_PORT
+
+
+class Service(object):
+ def __init__(self):
+ self.ports = [NodePort()]
+
+
+class Status(object):
+ def __init__(self):
+ self.pod_ip = '172.16.10.131'
+
+
+class Pod(object):
+ def __init__(self):
+ self.status = Status()
class KubernetesTestCase(unittest.TestCase):
@@ -41,54 +82,60 @@ class KubernetesTestCase(unittest.TestCase):
def setUp(self):
self.k8s_context = kubernetes.KubernetesContext()
self.addCleanup(self._remove_contexts)
- self.k8s_context.init(context_cfg)
+ self.k8s_context.init(CONTEXT_CFG)
- def _remove_contexts(self):
- if self.k8s_context in self.k8s_context.list:
- self.k8s_context._delete_context()
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
@mock.patch.object(kubernetes.KubernetesContext, '_delete_services')
@mock.patch.object(kubernetes.KubernetesContext, '_delete_ssh_key')
@mock.patch.object(kubernetes.KubernetesContext, '_delete_rcs')
@mock.patch.object(kubernetes.KubernetesContext, '_delete_pods')
- def test_undeploy(self,
- mock_delete_pods,
- mock_delete_rcs,
- mock_delete_ssh,
- mock_delete_services):
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_networks')
+ @mock.patch.object(kubernetes.KubernetesContext, '_delete_crd')
+ def test_undeploy(self, mock_delete_pods, mock_delete_rcs,
+ mock_delete_ssh, mock_delete_services,
+ mock_delete_networks, mock_delete_crd):
self.k8s_context.undeploy()
mock_delete_ssh.assert_called_once()
mock_delete_rcs.assert_called_once()
mock_delete_pods.assert_called_once()
mock_delete_services.assert_called_once()
+ mock_delete_networks.assert_called_once()
+ mock_delete_crd.assert_called_once()
@mock.patch.object(kubernetes.KubernetesContext, '_create_services')
@mock.patch.object(kubernetes.KubernetesContext, '_wait_until_running')
- @mock.patch.object(kubernetes.KubernetesTemplate, 'get_rc_pods')
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_pods')
@mock.patch.object(kubernetes.KubernetesContext, '_create_rcs')
@mock.patch.object(kubernetes.KubernetesContext, '_set_ssh_key')
- def test_deploy(self,
- mock_set_ssh_key,
- mock_create_rcs,
- mock_get_rc_pods,
- mock_wait_until_running,
- mock_create_services):
-
- with mock.patch("yardstick.benchmark.contexts.kubernetes.time"):
+ @mock.patch.object(kubernetes.KubernetesContext, '_create_networks')
+ @mock.patch.object(kubernetes.KubernetesContext, '_create_crd')
+ def test_deploy(self, mock_set_ssh_key, mock_create_rcs, mock_get_rc_pods,
+ mock_wait_until_running, mock_create_services,
+ mock_create_networks, mock_create_crd):
+
+ with mock.patch.object(time, 'sleep'):
self.k8s_context.deploy()
mock_set_ssh_key.assert_called_once()
mock_create_rcs.assert_called_once()
mock_create_services.assert_called_once()
mock_get_rc_pods.assert_called_once()
mock_wait_until_running.assert_called_once()
+ mock_create_networks.assert_called_once()
+ mock_create_crd.assert_called_once()
@mock.patch.object(kubernetes, 'paramiko', **{"resource_filename.return_value": ""})
@mock.patch.object(kubernetes, 'pkg_resources', **{"resource_filename.return_value": ""})
@mock.patch.object(kubernetes, 'utils')
@mock.patch.object(kubernetes, 'open', create=True)
- @mock.patch.object(kubernetes.k8s_utils, 'delete_config_map')
- @mock.patch.object(kubernetes.k8s_utils, 'create_config_map')
+ @mock.patch.object(k8s_utils, 'delete_config_map')
+ @mock.patch.object(k8s_utils, 'create_config_map')
def test_ssh_key(self, mock_create, mock_delete, *args):
self.k8s_context._set_ssh_key()
self.k8s_context._delete_ssh_key()
@@ -96,49 +143,32 @@ class KubernetesTestCase(unittest.TestCase):
mock_create.assert_called_once()
mock_delete.assert_called_once()
- @mock.patch.object(kubernetes.k8s_utils, 'read_pod_status')
+ @mock.patch.object(k8s_utils, 'read_pod_status')
def test_wait_until_running(self, mock_read_pod_status):
self.k8s_context.template.pods = ['server']
mock_read_pod_status.return_value = 'Running'
self.k8s_context._wait_until_running()
- @mock.patch.object(kubernetes.k8s_utils, 'get_pod_by_name')
+ @mock.patch.object(k8s_utils, 'get_pod_by_name')
@mock.patch.object(kubernetes.KubernetesContext, '_get_node_ip')
- @mock.patch.object(kubernetes.k8s_utils, 'get_service_by_name')
- def test_get_server(self,
- mock_get_service_by_name,
- mock_get_node_ip,
- mock_get_pod_by_name):
- class Service(object):
- def __init__(self):
- self.name = 'yardstick'
- self.node_port = 30000
-
- class Services(object):
- def __init__(self):
- self.ports = [Service()]
-
- class Status(object):
- def __init__(self):
- self.pod_ip = '172.16.10.131'
-
- class Pod(object):
- def __init__(self):
- self.status = Status()
-
- mock_get_service_by_name.return_value = Services()
+ def test_get_server(self, mock_get_node_ip, mock_get_pod_by_name):
mock_get_pod_by_name.return_value = Pod()
mock_get_node_ip.return_value = '172.16.10.131'
-
- self.assertIsNotNone(self.k8s_context._get_server('server'))
+ with mock.patch.object(self.k8s_context, '_get_service_ports') as \
+ mock_get_sports:
+ mock_get_sports.return_value = [
+ {'port': constants.SSH_PORT, 'node_port': 30000}]
+ server = self.k8s_context._get_server('server_name')
+ self.assertEqual('server_name', server['name'])
+ self.assertEqual(30000, server['ssh_port'])
@mock.patch.object(kubernetes.KubernetesContext, '_create_rc')
def test_create_rcs(self, mock_create_rc):
self.k8s_context._create_rcs()
mock_create_rc.assert_called()
- @mock.patch.object(kubernetes.k8s_utils, 'create_replication_controller')
+ @mock.patch.object(k8s_utils, 'create_replication_controller')
def test_create_rc(self, mock_create_replication_controller):
self.k8s_context._create_rc({})
mock_create_replication_controller.assert_called_once()
@@ -148,22 +178,96 @@ class KubernetesTestCase(unittest.TestCase):
self.k8s_context._delete_rcs()
mock_delete_rc.assert_called()
- @mock.patch.object(kubernetes.k8s_utils, 'delete_replication_controller')
+ @mock.patch.object(k8s_utils, 'delete_replication_controller')
def test_delete_rc(self, mock_delete_replication_controller):
self.k8s_context._delete_rc({})
mock_delete_replication_controller.assert_called_once()
- @mock.patch.object(kubernetes.k8s_utils, 'get_node_list')
+ @mock.patch.object(k8s_utils, 'get_node_list')
def test_get_node_ip(self, mock_get_node_list):
self.k8s_context._get_node_ip()
mock_get_node_list.assert_called_once()
- @mock.patch('yardstick.orchestrator.kubernetes.ServiceObject.create')
+ @mock.patch.object(orchestrator_kubernetes.ServiceNodePortObject, 'create')
def test_create_services(self, mock_create):
self.k8s_context._create_services()
mock_create.assert_called()
- @mock.patch('yardstick.orchestrator.kubernetes.ServiceObject.delete')
+ @mock.patch.object(orchestrator_kubernetes.ServiceNodePortObject, 'delete')
def test_delete_services(self, mock_delete):
self.k8s_context._delete_services()
mock_delete.assert_called()
+
+ def test_init(self):
+ self.k8s_context._delete_context()
+ with mock.patch.object(orchestrator_kubernetes, 'KubernetesTemplate',
+ return_value='fake_template') as mock_k8stemplate:
+ self.k8s_context = kubernetes.KubernetesContext()
+ self.k8s_context.init(CONTEXT_CFG)
+ mock_k8stemplate.assert_called_once_with(self.k8s_context.name,
+ CONTEXT_CFG)
+ self.assertEqual('fake_template', self.k8s_context.template)
+ self.assertEqual(2, len(self.k8s_context._networks))
+ self.assertIn('flannel', self.k8s_context._networks.keys())
+ self.assertIn('sriov01', self.k8s_context._networks.keys())
+
+ def test__get_physical_nodes(self):
+ result = self.k8s_context._get_physical_nodes()
+ self.assertIsNone(result)
+
+ def test__get_physical_node_for_server(self):
+ result = self.k8s_context._get_physical_node_for_server("fake")
+ self.assertIsNone(result)
+
+ def test__get_network(self):
+ networks = collections.OrderedDict([('n1', 'data1'), ('n2', 'data2')])
+ self.k8s_context._networks = networks
+ self.assertEqual({'name': 'n1'}, self.k8s_context._get_network('n1'))
+ self.assertEqual({'name': 'n2'}, self.k8s_context._get_network('n2'))
+ self.assertIsNone(self.k8s_context._get_network('n3'))
+
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_by_name')
+ def test__get_interfaces(self, mock_get_rc):
+ rc = orchestrator_kubernetes.ReplicationControllerObject('rc_name')
+ rc._networks = ['net1', 'net2']
+ mock_get_rc.return_value = rc
+ expected = {'net1': {'network_name': 'net1',
+ 'local_mac': None,
+ 'local_ip': None},
+ 'net2': {'network_name': 'net2',
+ 'local_mac': None,
+ 'local_ip': None}}
+ self.assertEqual(expected, self.k8s_context._get_interfaces('rc_name'))
+
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_by_name')
+ def test__get_interfaces_no_networks(self, mock_get_rc):
+ rc = orchestrator_kubernetes.ReplicationControllerObject('rc_name')
+ mock_get_rc.return_value = rc
+ self.assertEqual({}, self.k8s_context._get_interfaces('rc_name'))
+
+ @mock.patch.object(orchestrator_kubernetes.KubernetesTemplate,
+ 'get_rc_by_name', return_value=None)
+ def test__get_interfaces_no_rc(self, *args):
+ self.assertEqual({}, self.k8s_context._get_interfaces('rc_name'))
+
+ @mock.patch.object(k8s_utils, 'get_service_by_name',
+ return_value=Service())
+ def test__get_service_ports(self, mock_get_service_by_name):
+ name = 'rc_name'
+ service_ports = self.k8s_context._get_service_ports(name)
+ mock_get_service_by_name.assert_called_once_with(name + '-service')
+ expected = {'node_port': 30000,
+ 'port': constants.SSH_PORT,
+ 'name': 'port_name',
+ 'protocol': 'TCP',
+ 'target_port': constants.SSH_PORT}
+ self.assertEqual(expected, service_ports[0])
+
+ @mock.patch.object(k8s_utils, 'get_service_by_name',
+ return_value=None)
+ def test__get_service_ports_exception(self, *args):
+ name = 'rc_name'
+ with self.assertRaises(exceptions.KubernetesServiceObjectNotDefined):
+ self.k8s_context._get_service_ports(name)
diff --git a/yardstick/tests/unit/benchmark/contexts/test_node.py b/yardstick/tests/unit/benchmark/contexts/test_node.py
index 8b232481b..da16074d9 100644
--- a/yardstick/tests/unit/benchmark/contexts/test_node.py
+++ b/yardstick/tests/unit/benchmark/contexts/test_node.py
@@ -8,12 +8,16 @@
##############################################################################
import os
-import unittest
import errno
+
import mock
+import unittest
-from yardstick.common import constants as consts
+from yardstick.benchmark.contexts import base
from yardstick.benchmark.contexts import node
+from yardstick.common import constants as consts
+from yardstick.common import exceptions
+from yardstick.common import yaml_loader
class NodeContextTestCase(unittest.TestCase):
@@ -33,9 +37,11 @@ class NodeContextTestCase(unittest.TestCase):
'file': self._get_file_abspath(self.NODES_SAMPLE)
}
- def _remove_contexts(self):
- if self.test_context in self.test_context.list:
- self.test_context._delete_context()
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
def _get_file_abspath(self, filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
@@ -52,8 +58,9 @@ class NodeContextTestCase(unittest.TestCase):
self.assertEqual(self.test_context.env, {})
self.assertEqual(self.test_context.attrs, {})
+ @mock.patch.object(yaml_loader, 'read_yaml_file')
@mock.patch('{}.os.path.join'.format(PREFIX))
- def test_init_negative(self, mock_path_join):
+ def test_init_negative(self, mock_path_join, read_mock):
special_path = '/foo/bar/error_file'
error_path = self._get_file_abspath("error_file")
@@ -65,7 +72,6 @@ class NodeContextTestCase(unittest.TestCase):
# we can't count mock_path_join calls because
# it can catch join calls for .pyc files.
mock_path_join.side_effect = path_join
- self.test_context.read_config_file = read_mock = mock.Mock()
read_calls = 0
with self.assertRaises(KeyError):
@@ -83,7 +89,7 @@ class NodeContextTestCase(unittest.TestCase):
self.test_context.init(attrs)
read_calls += 1
- self.assertEqual(read_mock.called, read_calls)
+ self.assertEqual(read_mock.call_count, read_calls)
self.assertIn(attrs['file'], self.test_context.file_path)
self.assertEqual(raised.exception.errno, errno.EBUSY)
self.assertEqual(str(raised.exception), str(read_mock.side_effect))
@@ -98,11 +104,6 @@ class NodeContextTestCase(unittest.TestCase):
self.assertEqual(raised.exception.errno, errno.ENOENT)
self.assertEqual(str(raised.exception), str(read_mock.side_effect))
- def test_read_config_file(self):
- self.test_context.init(self.attrs)
-
- self.assertIsNotNone(self.test_context.read_config_file())
-
def test__dispatch_script(self):
self.test_context.init(self.attrs)
@@ -168,6 +169,39 @@ class NodeContextTestCase(unittest.TestCase):
self.assertEqual(result['user'], 'root')
self.assertEqual(result['key_filename'], '/root/.yardstick_key')
+ def test__get_physical_nodes(self):
+ self.test_context.init(self.attrs)
+ nodes = self.test_context._get_physical_nodes()
+ self.assertEqual(nodes, self.test_context.nodes)
+
+ def test__get_physical_node_for_server(self):
+ self.test_context.init(self.attrs)
+
+ # When server is not from this context
+ result = self.test_context._get_physical_node_for_server('node1.another-context')
+ self.assertIsNone(result)
+
+ # When node_name is not from this context
+ result = self.test_context._get_physical_node_for_server('fake.foo-12345678')
+ self.assertIsNone(result)
+
+ result = self.test_context._get_physical_node_for_server('node1.foo-12345678')
+ self.assertEqual(result, 'node1.foo')
+
+ def test_update_collectd_options_for_node(self):
+ self.test_context.init(self.attrs)
+ options = {'collectd': {'interval': 5}}
+
+ with self.assertRaises(exceptions.ContextUpdateCollectdForNodeError):
+ self.test_context.update_collectd_options_for_node(options, 'fake.foo-12345678')
+
+ self.test_context.update_collectd_options_for_node(options, 'node1.foo-12345678')
+
+ node_collectd_options = [node for node in self.test_context.nodes
+ if node['name'] == 'node1'][0]['collectd']
+
+ self.assertEqual(node_collectd_options, options)
+
@mock.patch('{}.NodeContext._dispatch_script'.format(PREFIX))
def test_deploy(self, dispatch_script_mock):
obj = node.NodeContext()
diff --git a/yardstick/tests/unit/benchmark/core/test_plugin.py b/yardstick/tests/unit/benchmark/core/test_plugin.py
index 0d14e4e86..53621316b 100644
--- a/yardstick/tests/unit/benchmark/core/test_plugin.py
+++ b/yardstick/tests/unit/benchmark/core/test_plugin.py
@@ -12,6 +12,7 @@ import os
import pkg_resources
import mock
+import six
import testtools
from yardstick import ssh
@@ -48,13 +49,17 @@ deployment:
self.mock_ssh_from_node.return_value = self.mock_ssh_obj
self.mock_ssh_obj.wait = mock.Mock()
self.mock_ssh_obj._put_file_shell = mock.Mock()
+ self._mock_log_info = mock.patch.object(plugin.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
self.addCleanup(self._cleanup)
def _cleanup(self):
self._mock_ssh_from_node.stop()
+ self._mock_log_info.stop()
- def test_install(self):
+ @mock.patch.object(six.moves.builtins, 'print')
+ def test_install(self, *args):
args = mock.Mock()
args.input_file = [mock.Mock()]
with mock.patch.object(self.plugin, '_install_setup') as \
@@ -65,7 +70,8 @@ deployment:
PluginTestCase.DEPLOYMENT)
mock_run.assert_called_once_with(PluginTestCase.NAME)
- def test_remove(self):
+ @mock.patch.object(six.moves.builtins, 'print')
+ def test_remove(self, *args):
args = mock.Mock()
args.input_file = [mock.Mock()]
with mock.patch.object(self.plugin, '_remove_setup') as \
diff --git a/yardstick/tests/unit/benchmark/core/test_report.py b/yardstick/tests/unit/benchmark/core/test_report.py
index a684ad750..89fb1e90a 100644
--- a/yardstick/tests/unit/benchmark/core/test_report.py
+++ b/yardstick/tests/unit/benchmark/core/test_report.py
@@ -1,5 +1,6 @@
##############################################################################
# Copyright (c) 2017 Rajesh Kudaka.
+# Copyright (c) 2018-2019 Intel Corporation.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
@@ -7,30 +8,155 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# Unittest for yardstick.benchmark.core.report
-
-from __future__ import print_function
-
-from __future__ import absolute_import
-
+import mock
+import six
import unittest
import uuid
-try:
- from unittest import mock
-except ImportError:
- import mock
-
+from api.utils import influx
from yardstick.benchmark.core import report
from yardstick.cmd.commands import change_osloobj_to_paras
-FAKE_YAML_NAME = 'fake_name'
-FAKE_TASK_ID = str(uuid.uuid4())
-FAKE_DB_FIELDKEYS = [{'fieldKey': 'fake_key'}]
-FAKE_TIME = '0000-00-00T00:00:00.000000Z'
-FAKE_DB_TASK = [{'fake_key': 0.000, 'time': FAKE_TIME}]
-FAKE_TIMESTAMP = ['fake_time']
-DUMMY_TASK_ID = 'aaaaaa-aaaaaaaa-aaaaaaaaaa-aaaaaa'
+GOOD_YAML_NAME = 'fake_name'
+GOOD_TASK_ID = str(uuid.uuid4())
+GOOD_DB_FIELDKEYS = [{'fieldKey': 'fake_key'}]
+GOOD_DB_METRICS = [{
+ 'fake_key': 1.234,
+ 'time': '0000-00-00T12:34:56.789012Z',
+ }]
+GOOD_TIMESTAMP = ['12:34:56.789012']
+BAD_YAML_NAME = 'F@KE_NAME'
+BAD_TASK_ID = 'aaaaaa-aaaaaaaa-aaaaaaaaaa-aaaaaa'
+MORE_DB_FIELDKEYS = [
+ {'fieldKey': 'fake_key'},
+ {'fieldKey': 'str_str'},
+ {'fieldKey': u'str_unicode'},
+ {u'fieldKey': 'unicode_str'},
+ {u'fieldKey': u'unicode_unicode'},
+ ]
+MORE_DB_METRICS = [{
+ 'fake_key': None,
+ 'time': '0000-00-00T00:00:00.000000Z',
+ }, {
+ 'fake_key': 123,
+ 'time': '0000-00-00T00:00:01.000000Z',
+ }, {
+ 'fake_key': 4.56,
+ 'time': '0000-00-00T00:00:02.000000Z',
+ }, {
+ 'fake_key': 9876543210987654321,
+ 'time': '0000-00-00T00:00:03.000000Z',
+ }, {
+ 'fake_key': 'str_str value',
+ 'time': '0000-00-00T00:00:04.000000Z',
+ }, {
+ 'fake_key': u'str_unicode value',
+ 'time': '0000-00-00T00:00:05.000000Z',
+ }, {
+ u'fake_key': 'unicode_str value',
+ 'time': '0000-00-00T00:00:06.000000Z',
+ }, {
+ u'fake_key': u'unicode_unicode value',
+ 'time': '0000-00-00T00:00:07.000000Z',
+ }, {
+ 'fake_key': '7.89',
+ 'time': '0000-00-00T00:00:08.000000Z',
+ }, {
+ 'fake_key': '1011',
+ 'time': '0000-00-00T00:00:09.000000Z',
+ }, {
+ 'fake_key': '9876543210123456789',
+ 'time': '0000-00-00T00:00:10.000000Z',
+ }]
+MORE_TIMESTAMP = ['00:00:%02d.000000' % n for n in range(len(MORE_DB_METRICS))]
+MORE_EMPTY_DATA = [None] * len(MORE_DB_METRICS)
+MORE_EXPECTED_TABLE_VALS = {
+ 'Timestamp': MORE_TIMESTAMP,
+ 'fake_key': [
+ None,
+ 123,
+ 4.56,
+ 9876543210987654321 if six.PY3 else 9.876543210987655e+18,
+ None,
+ None,
+ None,
+ None,
+ 7.89,
+ 1011,
+ 9876543210123456789 if six.PY3 else 9.876543210123457e+18,
+ ],
+ 'str_str': MORE_EMPTY_DATA,
+ 'str_unicode': MORE_EMPTY_DATA,
+ 'unicode_str': MORE_EMPTY_DATA,
+ 'unicode_unicode': MORE_EMPTY_DATA,
+ }
+MORE_EXPECTED_DATASETS = [{
+ 'label': key,
+ 'data': MORE_EXPECTED_TABLE_VALS[key],
+ }
+ for key in map(str, [field['fieldKey'] for field in MORE_DB_FIELDKEYS])
+ ]
+
+
+class JSTreeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.jstree = report.JSTree()
+
+ def test__create_node(self):
+ _id = "tg__0.DropPackets"
+
+ expected_data = [
+ {"id": "tg__0", "text": "tg__0", "parent": "#"},
+ {"id": "tg__0.DropPackets", "text": "DropPackets", "parent": "tg__0"}
+ ]
+ self.jstree._create_node(_id)
+
+ self.assertEqual(self.jstree._created_nodes, ['#', 'tg__0', 'tg__0.DropPackets'])
+ self.assertEqual(self.jstree.jstree_data, expected_data)
+
+ def test_format_for_jstree(self):
+ data = [
+ 'tg__0.DropPackets',
+ 'tg__0.LatencyAvg.5', 'tg__0.LatencyAvg.6',
+ 'tg__0.LatencyMax.5', 'tg__0.LatencyMax.6',
+ 'tg__0.RxThroughput', 'tg__0.TxThroughput',
+ 'tg__1.DropPackets',
+ 'tg__1.LatencyAvg.5', 'tg__1.LatencyAvg.6',
+ 'tg__1.LatencyMax.5', 'tg__1.LatencyMax.6',
+ 'tg__1.RxThroughput', 'tg__1.TxThroughput',
+ 'vnf__0.curr_packets_in', 'vnf__0.packets_dropped', 'vnf__0.packets_fwd',
+ ]
+
+ expected_output = [
+ {"id": "tg__0", "text": "tg__0", "parent": "#"},
+ {"id": "tg__0.DropPackets", "text": "DropPackets", "parent": "tg__0"},
+ {"id": "tg__0.LatencyAvg", "text": "LatencyAvg", "parent": "tg__0"},
+ {"id": "tg__0.LatencyAvg.5", "text": "5", "parent": "tg__0.LatencyAvg"},
+ {"id": "tg__0.LatencyAvg.6", "text": "6", "parent": "tg__0.LatencyAvg"},
+ {"id": "tg__0.LatencyMax", "text": "LatencyMax", "parent": "tg__0"},
+ {"id": "tg__0.LatencyMax.5", "text": "5", "parent": "tg__0.LatencyMax"},
+ {"id": "tg__0.LatencyMax.6", "text": "6", "parent": "tg__0.LatencyMax"},
+ {"id": "tg__0.RxThroughput", "text": "RxThroughput", "parent": "tg__0"},
+ {"id": "tg__0.TxThroughput", "text": "TxThroughput", "parent": "tg__0"},
+ {"id": "tg__1", "text": "tg__1", "parent": "#"},
+ {"id": "tg__1.DropPackets", "text": "DropPackets", "parent": "tg__1"},
+ {"id": "tg__1.LatencyAvg", "text": "LatencyAvg", "parent": "tg__1"},
+ {"id": "tg__1.LatencyAvg.5", "text": "5", "parent": "tg__1.LatencyAvg"},
+ {"id": "tg__1.LatencyAvg.6", "text": "6", "parent": "tg__1.LatencyAvg"},
+ {"id": "tg__1.LatencyMax", "text": "LatencyMax", "parent": "tg__1"},
+ {"id": "tg__1.LatencyMax.5", "text": "5", "parent": "tg__1.LatencyMax"},
+ {"id": "tg__1.LatencyMax.6", "text": "6", "parent": "tg__1.LatencyMax"},
+ {"id": "tg__1.RxThroughput", "text": "RxThroughput", "parent": "tg__1"},
+ {"id": "tg__1.TxThroughput", "text": "TxThroughput", "parent": "tg__1"},
+ {"id": "vnf__0", "text": "vnf__0", "parent": "#"},
+ {"id": "vnf__0.curr_packets_in", "text": "curr_packets_in", "parent": "vnf__0"},
+ {"id": "vnf__0.packets_dropped", "text": "packets_dropped", "parent": "vnf__0"},
+ {"id": "vnf__0.packets_fwd", "text": "packets_fwd", "parent": "vnf__0"},
+ ]
+
+ result = self.jstree.format_for_jstree(data)
+ self.assertEqual(expected_output, result)
class ReportTestCase(unittest.TestCase):
@@ -38,37 +164,421 @@ class ReportTestCase(unittest.TestCase):
def setUp(self):
super(ReportTestCase, self).setUp()
self.param = change_osloobj_to_paras({})
- self.param.yaml_name = [FAKE_YAML_NAME]
- self.param.task_id = [FAKE_TASK_ID]
+ self.param.yaml_name = [GOOD_YAML_NAME]
+ self.param.task_id = [GOOD_TASK_ID]
self.rep = report.Report()
- @mock.patch('yardstick.benchmark.core.report.Report._get_tasks')
- @mock.patch('yardstick.benchmark.core.report.Report._get_fieldkeys')
- @mock.patch('yardstick.benchmark.core.report.Report._validate')
- def test_generate_success(self, mock_valid, mock_keys, mock_tasks):
- mock_tasks.return_value = FAKE_DB_TASK
- mock_keys.return_value = FAKE_DB_FIELDKEYS
- self.rep.generate(self.param)
- mock_valid.assert_called_once_with(FAKE_YAML_NAME, FAKE_TASK_ID)
- self.assertEqual(1, mock_tasks.call_count)
- self.assertEqual(1, mock_keys.call_count)
-
- # pylint: disable=deprecated-method
- def test_invalid_yaml_name(self):
- self.assertRaisesRegexp(ValueError, "yaml*", self.rep._validate,
- 'F@KE_NAME', FAKE_TASK_ID)
-
- # pylint: disable=deprecated-method
- def test_invalid_task_id(self):
- self.assertRaisesRegexp(ValueError, "task*", self.rep._validate,
- FAKE_YAML_NAME, DUMMY_TASK_ID)
-
- @mock.patch('api.utils.influx.query')
- def test_task_not_found(self, mock_query):
+ def test___init__(self):
+ self.assertEqual([], self.rep.Timestamp)
+ self.assertEqual("", self.rep.yaml_name)
+ self.assertEqual("", self.rep.task_id)
+
+ def test__validate(self):
+ self.rep._validate(GOOD_YAML_NAME, GOOD_TASK_ID)
+ self.assertEqual(GOOD_YAML_NAME, self.rep.yaml_name)
+ self.assertEqual(GOOD_TASK_ID, str(self.rep.task_id))
+
+ def test__validate_invalid_yaml_name(self):
+ with six.assertRaisesRegex(self, ValueError, "yaml*"):
+ self.rep._validate(BAD_YAML_NAME, GOOD_TASK_ID)
+
+ def test__validate_invalid_task_id(self):
+ with six.assertRaisesRegex(self, ValueError, "task*"):
+ self.rep._validate(GOOD_YAML_NAME, BAD_TASK_ID)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_fieldkeys(self, mock_query):
+ mock_query.return_value = GOOD_DB_FIELDKEYS
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ self.assertEqual(GOOD_DB_FIELDKEYS, self.rep._get_fieldkeys())
+
+ @mock.patch.object(influx, 'query')
+ def test__get_fieldkeys_nodbclient(self, mock_query):
+ mock_query.side_effect = RuntimeError
+ self.assertRaises(RuntimeError, self.rep._get_fieldkeys)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_fieldkeys_testcase_not_found(self, mock_query):
mock_query.return_value = []
- self.rep.yaml_name = FAKE_YAML_NAME
- self.rep.task_id = FAKE_TASK_ID
- # pylint: disable=deprecated-method
- self.assertRaisesRegexp(KeyError, "Task ID", self.rep._get_fieldkeys)
- self.assertRaisesRegexp(KeyError, "Task ID", self.rep._get_tasks)
- # pylint: enable=deprecated-method
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ six.assertRaisesRegex(self, KeyError, "Test case", self.rep._get_fieldkeys)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_metrics(self, mock_query):
+ mock_query.return_value = GOOD_DB_METRICS
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ self.assertEqual(GOOD_DB_METRICS, self.rep._get_metrics())
+
+ @mock.patch.object(influx, 'query')
+ def test__get_metrics_task_not_found(self, mock_query):
+ mock_query.return_value = []
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ six.assertRaisesRegex(self, KeyError, "Task ID", self.rep._get_metrics)
+
+ @mock.patch.object(influx, 'query')
+ def test__get_task_start_time(self, mock_query):
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ mock_query.return_value = [{
+ u'free.memory0.used': u'9789088',
+ u'free.memory0.available': u'22192984',
+ u'free.memory0.shared': u'219152',
+ u'time': u'2019-01-22T16:20:14.568075776Z',
+ }]
+ expected = "2019-01-22T16:20:14.568075776Z"
+
+ self.assertEqual(
+ expected,
+ self.rep._get_task_start_time()
+ )
+
+ def test__get_task_start_time_task_not_found(self):
+ pass
+
+ @mock.patch.object(influx, 'query')
+ def test__get_task_end_time(self, mock_query):
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ # TODO(elfoley): write this test!
+ mock_query.return_value = [{
+
+ }]
+
+ @mock.patch.object(influx, 'query')
+ def test__get_baro_metrics(self, mock_query):
+ self.rep.yaml_name = GOOD_YAML_NAME
+ self.rep.task_id = GOOD_TASK_ID
+ self.rep._get_task_start_time = mock.Mock(return_value=0)
+ self.rep._get_task_end_time = mock.Mock(return_value=0)
+
+ influx_return_values = ([{
+ u'value': 324050, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:25.383698038Z',
+ u'type_instance': u'user', u'type': u'cpu',
+ }, {
+ u'value': 193798, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:25.383712594Z',
+ u'type_instance': u'system', u'type': u'cpu',
+ }, {
+ u'value': 324051, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:35.383696624Z',
+ u'type_instance': u'user', u'type': u'cpu',
+ }, {
+ u'value': 193800, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:35.383713481Z',
+ u'type_instance': u'system', u'type': u'cpu',
+ }, {
+ u'value': 324054, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:45.3836966789Z',
+ u'type_instance': u'user', u'type': u'cpu',
+ }, {
+ u'value': 193801, u'instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:45.383716296Z',
+ u'type_instance': u'system', u'type': u'cpu',
+ }],
+ [{
+ u'value': 3598453000, u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:25.383698038Z',
+ u'type_instance': u'0', u'type': u'cpufreq',
+ }, {
+ u'value': 3530250000, u'type_instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:35.383712594Z', u'type': u'cpufreq',
+ }, {
+ u'value': 3600281000, u'type_instance': u'0', u'host': u'myhostname',
+ u'time': u'2018-12-19T14:11:45.383696624Z', u'type': u'cpufreq',
+ }],
+ )
+
+ def ret_vals(vals):
+ for x in vals:
+ yield x
+ while True:
+ yield []
+
+ mock_query.side_effect = ret_vals(influx_return_values)
+
+ BARO_EXPECTED_METRICS = {
+ 'Timestamp': [
+ '14:11:25.3836', '14:11:25.3837',
+ '14:11:35.3836', '14:11:35.3837',
+ '14:11:45.3836', '14:11:45.3837'],
+ 'myhostname.cpu_value.cpu.user.0': {
+ '14:11:25.3836': 324050,
+ '14:11:35.3836': 324051,
+ '14:11:45.3836': 324054,
+ },
+ 'myhostname.cpu_value.cpu.system.0': {
+ '14:11:25.3837': 193798,
+ '14:11:35.3837': 193800,
+ '14:11:45.3837': 193801,
+ },
+ 'myhostname.cpufreq_value.cpufreq.0': {
+ '14:11:25.3836': 3598453000,
+ '14:11:35.3837': 3530250000,
+ '14:11:45.3836': 3600281000,
+ }
+ }
+ self.assertEqual(
+ BARO_EXPECTED_METRICS,
+ self.rep._get_baro_metrics()
+ )
+
+ def test__get_timestamps(self):
+
+ metrics = MORE_DB_METRICS
+ self.assertEqual(
+ MORE_TIMESTAMP,
+ self.rep._get_timestamps(metrics)
+ )
+
+ def test__format_datasets(self):
+ metric_name = "free.memory0.used"
+ metrics = [{
+ u'free.memory1.free': u'1958664',
+ u'free.memory0.used': u'9789560',
+ }, {
+ u'free.memory1.free': u'1958228',
+ u'free.memory0.used': u'9789790',
+ }, {
+ u'free.memory1.free': u'1956156',
+ u'free.memory0.used': u'9791092',
+ }, {
+ u'free.memory1.free': u'1956280',
+ u'free.memory0.used': u'9790796',
+ }]
+ self.assertEqual(
+ [9789560, 9789790, 9791092, 9790796,],
+ self.rep._format_datasets(metric_name, metrics)
+ )
+
+ def test__format_datasets_val_none(self):
+ metric_name = "free.memory0.used"
+ metrics = [{
+ u'free.memory1.free': u'1958664',
+ u'free.memory0.used': 9876543109876543210,
+ }, {
+ u'free.memory1.free': u'1958228',
+ }, {
+ u'free.memory1.free': u'1956156',
+ u'free.memory0.used': u'9791092',
+ }, {
+ u'free.memory1.free': u'1956280',
+ u'free.memory0.used': u'9790796',
+ }]
+
+ exp0 = 9876543109876543210 if six.PY3 else 9.876543109876543e+18
+ self.assertEqual(
+ [exp0, None, 9791092, 9790796],
+ self.rep._format_datasets(metric_name, metrics)
+ )
+
+ def test__format_datasets_val_incompatible(self):
+ metric_name = "free.memory0.used"
+ metrics = [{
+ u'free.memory0.used': "some incompatible value",
+ }, {
+ }]
+ self.assertEqual(
+ [None, None],
+ self.rep._format_datasets(metric_name, metrics)
+ )
+
+ def test__combine_times(self):
+ yard_times = [
+ '00:00:00.000000',
+ '00:00:01.000000',
+ '00:00:02.000000',
+ '00:00:06.000000',
+ '00:00:08.000000',
+ '00:00:09.000000',
+ ]
+ baro_times = [
+ '00:00:01.000000',
+ '00:00:03.000000',
+ '00:00:04.000000',
+ '00:00:05.000000',
+ '00:00:07.000000',
+ '00:00:10.000000',
+ ]
+ expected_combo = [
+ '00:00:00.000000',
+ '00:00:01.000000',
+ '00:00:02.000000',
+ '00:00:03.000000',
+ '00:00:04.000000',
+ '00:00:05.000000',
+ '00:00:06.000000',
+ '00:00:07.000000',
+ '00:00:08.000000',
+ '00:00:09.000000',
+ '00:00:10.000000',
+ ]
+
+ actual_combo = self.rep._combine_times(yard_times, baro_times)
+ self.assertEqual(len(expected_combo), len(actual_combo))
+
+ self.assertEqual(
+ expected_combo,
+ actual_combo,
+ )
+
+ def test__combine_times_2(self):
+ time1 = ['14:11:25.383698', '14:11:25.383712', '14:11:35.383696',]
+ time2 = [
+ '16:20:14.568075', '16:20:24.575083',
+ '16:20:34.580989', '16:20:44.586801', ]
+ time_exp = [
+ '14:11:25.383698', '14:11:25.383712', '14:11:35.383696',
+ '16:20:14.568075', '16:20:24.575083', '16:20:34.580989',
+ '16:20:44.586801',
+ ]
+ self.assertEqual(time_exp, self.rep._combine_times(time1, time2))
+
+ def test__combine_metrics(self):
+ BARO_METRICS = {
+ 'myhostname.cpu_value.cpu.user.0': {
+ '14:11:25.3836': 324050, '14:11:35.3836': 324051,
+ '14:11:45.3836': 324054,
+ },
+ 'myhostname.cpu_value.cpu.system.0': {
+ '14:11:25.3837': 193798, '14:11:35.3837': 193800,
+ '14:11:45.3837': 193801,
+ }
+ }
+ BARO_TIMES = [
+ '14:11:25.3836', '14:11:25.3837', '14:11:35.3836',
+ '14:11:35.3837', '14:11:45.3836', '14:11:45.3837',
+ ]
+ YARD_METRICS = {
+ 'free.memory9.free': {
+ '16:20:14.5680': 1958244, '16:20:24.5750': 1955964,
+ '16:20:34.5809': 1956040, '16:20:44.5868': 1956428,
+ },
+ 'free.memory7.used': {
+ '16:20:14.5680': 9789068, '16:20:24.5750': 9791284,
+ '16:20:34.5809': 9791228, '16:20:44.5868': 9790692,
+ },
+ 'free.memory2.total':{
+ '16:20:14.5680': 32671288, '16:20:24.5750': 32671288,
+ '16:20:34.5809': 32671288, '16:20:44.5868': 32671288,
+ },
+ 'free.memory7.free': {
+ '16:20:14.5680': 1958368, '16:20:24.5750': 1956104,
+ '16:20:34.5809': 1956040, '16:20:44.5868': 1956552,
+ },
+ 'free.memory1.used': {
+ '16:20:14.5680': 9788872, '16:20:24.5750': 9789212,
+ '16:20:34.5809': 9791168, '16:20:44.5868': 9790996,
+ },
+ }
+ YARD_TIMES = [
+ '16:20:14.5680', '16:20:24.5750',
+ '16:20:34.5809', '16:20:44.5868',
+ ]
+
+ expected_output = {
+ 'myhostname.cpu_value.cpu.user.0': [{
+ 'x': '14:11:25.3836', 'y': 324050, }, {
+ 'x': '14:11:35.3836', 'y': 324051, }, {
+ 'x': '14:11:45.3836', 'y': 324054, }],
+ 'myhostname.cpu_value.cpu.system.0' : [{
+ 'x': '14:11:25.3837', 'y': 193798, }, {
+ 'x': '14:11:35.3837', 'y': 193800, }, {
+ 'x': '14:11:45.3837', 'y': 193801, }],
+ 'free.memory9.free': [{
+ 'x': '16:20:14.5680', 'y': 1958244, }, {
+ 'x': '16:20:24.5750', 'y': 1955964, }, {
+ 'x': '16:20:34.5809', 'y': 1956040, }, {
+ 'x': '16:20:44.5868', 'y': 1956428, }],
+ 'free.memory7.used': [{
+ 'x': '16:20:14.5680', 'y': 9789068, }, {
+ 'x': '16:20:24.5750', 'y': 9791284, }, {
+ 'x': '16:20:34.5809', 'y': 9791228, }, {
+ 'x': '16:20:44.5868', 'y': 9790692, }],
+ 'free.memory2.total': [{
+ 'x': '16:20:14.5680', 'y': 32671288, }, {
+ 'x': '16:20:24.5750', 'y': 32671288, }, {
+ 'x': '16:20:34.5809', 'y': 32671288, }, {
+ 'x': '16:20:44.5868', 'y': 32671288, }],
+ 'free.memory7.free': [{
+ 'x': '16:20:14.5680', 'y': 1958368, }, {
+ 'x': '16:20:24.5750', 'y': 1956104, }, {
+ 'x': '16:20:34.5809', 'y': 1956040, }, {
+ 'x': '16:20:44.5868', 'y': 1956552, }],
+ 'free.memory1.used': [{
+ 'x': '16:20:14.5680', 'y': 9788872, }, {
+ 'x': '16:20:24.5750', 'y': 9789212, }, {
+ 'x': '16:20:34.5809', 'y': 9791168, }, {
+ 'x': '16:20:44.5868', 'y': 9790996, }],
+ }
+
+ actual_output, _, _ = self.rep._combine_metrics(
+ BARO_METRICS, BARO_TIMES, YARD_METRICS, YARD_TIMES
+ )
+ self.assertEquals(
+ sorted(expected_output.keys()),
+ sorted(actual_output.keys())
+ )
+
+ self.assertEquals(
+ expected_output,
+ actual_output,
+ )
+
+ @mock.patch.object(report.Report, '_get_metrics')
+ @mock.patch.object(report.Report, '_get_fieldkeys')
+ def test__generate_common(self, mock_keys, mock_metrics):
+ mock_metrics.return_value = MORE_DB_METRICS
+ mock_keys.return_value = MORE_DB_FIELDKEYS
+ datasets, table_vals = self.rep._generate_common(self.param)
+ self.assertEqual(MORE_EXPECTED_DATASETS, datasets)
+ self.assertEqual(MORE_EXPECTED_TABLE_VALS, table_vals)
+
+ @mock.patch.object(report.Report, '_get_metrics')
+ @mock.patch.object(report.Report, '_get_fieldkeys')
+ @mock.patch.object(report.Report, '_validate')
+ def test_generate(self, mock_valid, mock_keys, mock_metrics):
+ mock_metrics.return_value = GOOD_DB_METRICS
+ mock_keys.return_value = GOOD_DB_FIELDKEYS
+ self.rep.generate(self.param)
+ mock_valid.assert_called_once_with(GOOD_YAML_NAME, GOOD_TASK_ID)
+ mock_metrics.assert_called_once_with()
+ mock_keys.assert_called_once_with()
+ self.assertEqual(GOOD_TIMESTAMP, self.rep.Timestamp)
+
+ @mock.patch.object(report.Report, '_get_baro_metrics')
+ @mock.patch.object(report.Report, '_get_metrics')
+ @mock.patch.object(report.Report, '_get_fieldkeys')
+ @mock.patch.object(report.Report, '_validate')
+ def test_generate_nsb(
+ self, mock_valid, mock_keys, mock_metrics, mock_baro_metrics):
+
+ mock_metrics.return_value = GOOD_DB_METRICS
+ mock_keys.return_value = GOOD_DB_FIELDKEYS
+ BARO_METRICS = {
+ # TODO: is timestamp needed here?
+ 'Timestamp': [
+ '14:11:25.383698', '14:11:25.383712', '14:11:35.383696',
+ '14:11:35.383713', '14:11:45.383700', '14:11:45.383716'],
+ 'myhostname.cpu_value.cpu.user.0': {
+ '14:11:25.383698': 324050,
+ '14:11:35.383696': 324051,
+ '14:11:45.383700': 324054,
+ },
+ 'myhostname.cpu_value.cpu.system.0': {
+ '14:11:25.383712': 193798,
+ '14:11:35.383713': 193800,
+ '14:11:45.383716': 193801,
+ }
+ }
+ mock_baro_metrics.return_value = BARO_METRICS
+
+ self.rep.generate_nsb(self.param)
+ mock_valid.assert_called_once_with(GOOD_YAML_NAME, GOOD_TASK_ID)
+ mock_metrics.assert_called_once_with()
+ mock_keys.assert_called_once_with()
+ self.assertEqual(GOOD_TIMESTAMP, self.rep.Timestamp)
diff --git a/yardstick/tests/unit/benchmark/core/test_task.py b/yardstick/tests/unit/benchmark/core/test_task.py
index 9e8e4e9f7..0f09b3e59 100644
--- a/yardstick/tests/unit/benchmark/core/test_task.py
+++ b/yardstick/tests/unit/benchmark/core/test_task.py
@@ -9,14 +9,18 @@
import copy
import io
+import logging
import os
import sys
import mock
import six
+from six.moves import builtins
import unittest
import uuid
+import collections
+from yardstick.benchmark.contexts import base
from yardstick.benchmark.contexts import dummy
from yardstick.benchmark.core import task
from yardstick.common import constants as consts
@@ -27,7 +31,15 @@ from yardstick.common import utils
class TaskTestCase(unittest.TestCase):
- @mock.patch.object(task, 'Context')
+ def setUp(self):
+ self._mock_log = mock.patch.object(task, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_log.stop()
+
+ @mock.patch.object(base, 'Context')
def test_parse_nodes_with_context_same_context(self, mock_context):
scenario_cfg = {
"nodes": {
@@ -68,7 +80,7 @@ class TaskTestCase(unittest.TestCase):
dispatcher2])
self.assertIsNone(t._do_output(output_config, {}))
- @mock.patch.object(task, 'Context')
+ @mock.patch.object(base, 'Context')
def test_parse_networks_from_nodes(self, mock_context):
nodes = {
'node1': {
@@ -132,7 +144,7 @@ class TaskTestCase(unittest.TestCase):
self.assertEqual(mock_context.get_network.call_count, expected_get_network_calls)
self.assertDictEqual(networks, expected)
- @mock.patch.object(task, 'Context')
+ @mock.patch.object(base, 'Context')
@mock.patch.object(task, 'base_runner')
def test_run(self, mock_base_runner, *args):
scenario = {
@@ -155,6 +167,31 @@ class TaskTestCase(unittest.TestCase):
t._run([scenario], False, "yardstick.out")
runner.run.assert_called_once()
+ @mock.patch.object(base, 'Context')
+ @mock.patch.object(task, 'base_runner')
+ def test_run_ProxDuration(self, mock_base_runner, *args):
+ scenario = {
+ 'host': 'athena.demo',
+ 'target': 'ares.demo',
+ 'runner': {
+ 'duration': 60,
+ 'interval': 1,
+ 'sampled': 'yes',
+ 'confirmation': 1,
+ 'type': 'ProxDuration'
+ },
+ 'type': 'Ping'
+ }
+
+ t = task.Task()
+ runner = mock.Mock()
+ runner.join.return_value = 0
+ runner.get_output.return_value = {}
+ runner.get_result.return_value = []
+ mock_base_runner.Runner.get.return_value = runner
+ t._run([scenario], False, "yardstick.out")
+ runner.run.assert_called_once()
+
@mock.patch.object(os, 'environ')
def test_check_precondition(self, mock_os_environ):
cfg = {
@@ -296,9 +333,9 @@ class TaskTestCase(unittest.TestCase):
actual_result = t._parse_options(options)
self.assertEqual(expected_result, actual_result)
- @mock.patch('six.moves.builtins.open', side_effect=mock.mock_open())
+ @mock.patch.object(builtins, 'open', side_effect=mock.mock_open())
@mock.patch.object(task, 'utils')
- @mock.patch('logging.root')
+ @mock.patch.object(logging, 'root')
def test_set_log(self, mock_logging_root, *args):
task_obj = task.Task()
task_obj.task_id = 'task_id'
@@ -357,6 +394,12 @@ key2:
}
}
+ @staticmethod
+ def _remove_contexts():
+ for context in base.Context.list:
+ context._delete_context()
+ base.Context.list = []
+
def test__change_node_names(self):
ctx_attrs = {
@@ -371,6 +414,7 @@ key2:
}
my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
my_context.init(ctx_attrs)
expected_scenario = {
@@ -413,6 +457,7 @@ key2:
}
my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
my_context.init(ctx_attrs)
scenario = copy.deepcopy(self.scenario)
@@ -428,6 +473,7 @@ key2:
}
my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
my_context.init(ctx_attrs)
scenario = copy.deepcopy(self.scenario)
scenario['options'] = None
@@ -442,6 +488,7 @@ key2:
}
my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
my_context.init(ctx_attrs)
scenario = copy.deepcopy(self.scenario)
scenario['options']['server_name'] = None
@@ -449,6 +496,42 @@ key2:
self.parser._change_node_names(scenario, [my_context])
self.assertIsNone(scenario['options']['server_name'])
+ def test__change_node_names_target_map(self):
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890'
+ }
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+ scenario = copy.deepcopy(self.scenario)
+ scenario['nodes'] = {
+ 'tg__0': {
+ 'name': 'tg__0.demo',
+ 'public_ip_attr': "1.1.1.1",
+ },
+ 'vnf__0': {
+ 'name': 'vnf__0.demo',
+ 'public_ip_attr': "2.2.2.2",
+ }
+ }
+ self.parser._change_node_names(scenario, [my_context])
+ for target in scenario['nodes'].values():
+ self.assertIsInstance(target, collections.Mapping)
+
+ def test__change_node_names_not_target_map(self):
+ ctx_attrs = {
+ 'name': 'demo',
+ 'task_id': '1234567890'
+ }
+ my_context = dummy.DummyContext()
+ self.addCleanup(self._remove_contexts)
+ my_context.init(ctx_attrs)
+ scenario = copy.deepcopy(self.scenario)
+ self.parser._change_node_names(scenario, [my_context])
+ for target in scenario['nodes'].values():
+ self.assertNotIsInstance(target, collections.Mapping)
+
def test__parse_tasks(self):
task_obj = task.Task()
_uuid = uuid.uuid4()
@@ -525,7 +608,8 @@ key2:
mock_open.assert_has_calls([mock.call('args_file'),
mock.call('task_file')])
- def test__render_task_error_arguments(self):
+ @mock.patch.object(builtins, 'print')
+ def test__render_task_error_arguments(self, *args):
with self.assertRaises(exceptions.TaskRenderArgumentError):
task.TaskParser('task_file')._render_task('value1="var3"', None)
diff --git a/yardstick/tests/unit/benchmark/core/test_testcase.py b/yardstick/tests/unit/benchmark/core/test_testcase.py
index 119465887..077848d77 100644
--- a/yardstick/tests/unit/benchmark/core/test_testcase.py
+++ b/yardstick/tests/unit/benchmark/core/test_testcase.py
@@ -7,28 +7,28 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# Unittest for yardstick.cmd.commands.testcase
-
-from __future__ import absolute_import
-import unittest
+import mock
+from six.moves import builtins
from yardstick.benchmark.core import testcase
+from yardstick.tests.unit import base as ut_base
class Arg(object):
def __init__(self):
- self.casename = ('opnfv_yardstick_tc001',)
+ self.casename = ('opnfv_yardstick_tc001', )
-class TestcaseUT(unittest.TestCase):
+class TestcaseTestCase(ut_base.BaseUnitTestCase):
def test_list_all(self):
t = testcase.Testcase()
result = t.list_all("")
self.assertIsInstance(result, list)
- def test_show(self):
+ @mock.patch.object(builtins, 'print')
+ def test_show(self, *args):
t = testcase.Testcase()
casename = Arg()
result = t.show(casename)
diff --git a/yardstick/tests/unit/benchmark/runner/test_arithmetic.py b/yardstick/tests/unit/benchmark/runner/test_arithmetic.py
new file mode 100644
index 000000000..35d935cd5
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_arithmetic.py
@@ -0,0 +1,446 @@
+##############################################################################
+# Copyright (c) 2018 Nokia and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import multiprocessing
+import os
+import time
+
+from yardstick.benchmark.runners import arithmetic
+from yardstick.common import exceptions as y_exc
+
+
+class ArithmeticRunnerTest(unittest.TestCase):
+ class MyMethod(object):
+ SLA_VALIDATION_ERROR_SIDE_EFFECT = 1
+ BROAD_EXCEPTION_SIDE_EFFECT = 2
+
+ def __init__(self, side_effect=0):
+ self.count = 101
+ self.side_effect = side_effect
+
+ def __call__(self, data):
+ self.count += 1
+ data['my_key'] = self.count
+ if self.side_effect == self.SLA_VALIDATION_ERROR_SIDE_EFFECT:
+ raise y_exc.SLAValidationError(case_name='My Case',
+ error_msg='my error message')
+ elif self.side_effect == self.BROAD_EXCEPTION_SIDE_EFFECT:
+ raise y_exc.YardstickException
+ return self.count
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {
+ 'interval': 0,
+ 'iter_type': 'nested_for_loops',
+ 'iterators': [
+ {
+ 'name': 'stride',
+ 'start': 64,
+ 'stop': 128,
+ 'step': 64
+ },
+ {
+ 'name': 'size',
+ 'start': 500,
+ 'stop': 2000,
+ 'step': 500
+ }
+ ]
+ },
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_process_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ @mock.patch.object(os, 'getpid')
+ @mock.patch.object(multiprocessing, 'Process')
+ def test__run_benchmark_called_with(self, mock_multiprocessing_process,
+ mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ runner = arithmetic.ArithmeticRunner({})
+ benchmark_cls = mock.Mock()
+ runner._run_benchmark(benchmark_cls, 'my_method', self.scenario_cfg,
+ {})
+ mock_multiprocessing_process.assert_called_once_with(
+ name='Arithmetic-some_type-101',
+ target=arithmetic._worker_process,
+ args=(runner.result_queue, benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, runner.aborted, runner.output_queue))
+
+ @mock.patch.object(os, 'getpid')
+ def test__worker_process_runner_id(self, mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertEqual(self.scenario_cfg['runner']['runner_id'], 101)
+
+ @mock.patch.object(time, 'sleep')
+ def test__worker_process_calls_nested_for_loops(self, mock_time_sleep):
+ self.scenario_cfg['runner']['interval'] = 99
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.benchmark.my_method.assert_has_calls([mock.call({})] * 8)
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ mock_time_sleep.assert_has_calls([mock.call(99)] * 8)
+ self.assertEqual(mock_time_sleep.call_count, 8)
+
+ @mock.patch.object(time, 'sleep')
+ def test__worker_process_calls_tuple_loops(self, mock_time_sleep):
+ self.scenario_cfg['runner']['interval'] = 99
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.benchmark.my_method.assert_has_calls([mock.call({})] * 2)
+ self.assertEqual(self.benchmark.my_method.call_count, 2)
+ mock_time_sleep.assert_has_calls([mock.call(99)] * 2)
+ self.assertEqual(mock_time_sleep.call_count, 2)
+
+ def test__worker_process_stored_options_nested_for_loops(self):
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_stored_options_tuple_loops(self):
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 1000})
+
+ def test__worker_process_aborted_set_early(self):
+ aborted = multiprocessing.Event()
+ aborted.set()
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ aborted, mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.scenario_cfg['options'], {})
+ self.benchmark.my_method.assert_not_called()
+
+ def test__worker_process_output_queue_nested_for_loops(self):
+ self.benchmark.my_method = self.MyMethod()
+
+ output_queue = multiprocessing.Queue()
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ result = []
+ while not output_queue.empty():
+ result.append(output_queue.get())
+ self.assertListEqual(result, [102, 103, 104, 105, 106, 107, 108, 109])
+
+ def test__worker_process_output_queue_tuple_loops(self):
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+ self.benchmark.my_method = self.MyMethod()
+
+ output_queue = multiprocessing.Queue()
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 103)
+ result = []
+ while not output_queue.empty():
+ result.append(output_queue.get())
+ self.assertListEqual(result, [102, 103])
+
+ def test__worker_process_queue_nested_for_loops(self):
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+
+ def test__worker_process_queue_tuple_loops(self):
+ self.scenario_cfg['runner']['iter_type'] = 'tuple_loops'
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 103)
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+
+ def test__worker_process_except_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_output_on_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+ self.assertEqual(count, 8)
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_except_sla_validation_error_sla_cfg_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_output_sla_validation_error_sla_cfg_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertEqual(result['errors'],
+ ('My Case SLA validation failed. '
+ 'Error: my error message',))
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+ self.assertGreater(result['timestamp'], timestamp)
+ timestamp = result['timestamp']
+ self.assertEqual(count, 8)
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.my_method.assert_called_once()
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_not_called()
+
+ def test__worker_process_output_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ with self.assertRaisesRegexp(
+ y_exc.SLAValidationError,
+ 'My Case SLA validation failed. Error: my error message'):
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.assertEqual(self.benchmark.my_method.count, 102)
+ self.benchmark.teardown.assert_not_called()
+ self.assertTrue(queue.empty())
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_broad_exception_no_sla_cfg_early_exit(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.benchmark.my_method.assert_called_once()
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 64, 'size': 500})
+
+ def test__worker_process_output_on_broad_exception_no_sla_cfg(self):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 102)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 64, 'size': 500})
+ self.assertEqual(queue.qsize(), 1)
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertRegexpMatches(
+ result['errors'],
+ 'YardstickException: An unknown exception occurred.')
+ self.assertEqual(result['sequence'], 1)
+ self.assertTrue(output_queue.empty())
+
+ def test__worker_process_broad_exception_sla_cfg_not_none(self):
+ self.scenario_cfg['sla'] = {'action': 'some action'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+
+ def test__worker_process_output_on_broad_exception_sla_cfg_not_none(self):
+ self.scenario_cfg['sla'] = {'action': 'some action'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ output_queue = multiprocessing.Queue()
+ timestamp = time.time()
+ arithmetic._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.01)
+
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.count, 109)
+ self.assertDictEqual(self.scenario_cfg['options'],
+ {'stride': 128, 'size': 2000})
+ self.assertTrue(output_queue.empty())
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertRegexpMatches(
+ result['errors'],
+ 'YardstickException: An unknown exception occurred.')
+ self.assertEqual(result['sequence'], count)
+
+ def test__worker_process_benchmark_teardown_on_broad_exception(self):
+ self.benchmark.teardown = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ with self.assertRaises(SystemExit) as raised:
+ arithmetic._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ self.assertEqual(raised.exception.code, 1)
+ self._assert_defaults__worker_process_run_setup_and_teardown()
+ self.assertEqual(self.benchmark.my_method.call_count, 8)
diff --git a/yardstick/tests/unit/benchmark/runner/test_base.py b/yardstick/tests/unit/benchmark/runner/test_base.py
index 727207f5a..07d6f1843 100644
--- a/yardstick/tests/unit/benchmark/runner/test_base.py
+++ b/yardstick/tests/unit/benchmark/runner/test_base.py
@@ -10,36 +10,63 @@
import time
import mock
-import unittest
-from subprocess import CalledProcessError
+import subprocess
-
-from yardstick.benchmark.runners import base
+from yardstick.benchmark.runners import base as runner_base
from yardstick.benchmark.runners import iteration
+from yardstick.tests.unit import base as ut_base
-class ActionTestCase(unittest.TestCase):
+class ActionTestCase(ut_base.BaseUnitTestCase):
- @mock.patch("yardstick.benchmark.runners.base.subprocess")
- def test__execute_shell_command(self, mock_subprocess):
- mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
+ def setUp(self):
+ self._mock_log = mock.patch.object(runner_base.log, 'error')
+ self.mock_log = self._mock_log.start()
+ self.addCleanup(self._stop_mocks)
- self.assertEqual(base._execute_shell_command("")[0], -1)
+ def _stop_mocks(self):
+ self._mock_log.stop()
- @mock.patch("yardstick.benchmark.runners.base.subprocess")
- def test__single_action(self, mock_subprocess):
- mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
+ @mock.patch.object(subprocess, 'check_output')
+ def test__execute_shell_command(self, mock_subprocess):
+ mock_subprocess.side_effect = subprocess.CalledProcessError(-1, '')
+ self.assertEqual(runner_base._execute_shell_command("")[0], -1)
- base._single_action(0, "echo", mock.MagicMock())
+ @mock.patch.object(subprocess, 'check_output')
+ def test__single_action(self, mock_subprocess):
+ mock_subprocess.side_effect = subprocess.CalledProcessError(-1, '')
+ runner_base._single_action(0, 'echo', mock.Mock())
- @mock.patch("yardstick.benchmark.runners.base.subprocess")
+ @mock.patch.object(subprocess, 'check_output')
def test__periodic_action(self, mock_subprocess):
- mock_subprocess.check_output.side_effect = CalledProcessError(-1, '')
+ mock_subprocess.side_effect = subprocess.CalledProcessError(-1, '')
+ runner_base._periodic_action(0, 'echo', mock.Mock())
+
+
+class ScenarioOutputTestCase(ut_base.BaseUnitTestCase):
+
+ def setUp(self):
+ self.output_queue = mock.Mock()
+ self.scenario_output = runner_base.ScenarioOutput(self.output_queue,
+ sequence=1)
+
+ @mock.patch.object(time, 'time')
+ def test_push(self, mock_time):
+ mock_time.return_value = 2
+ data = {"value1": 1}
+ self.scenario_output.push(data)
+ self.output_queue.put.assert_called_once_with({'timestamp': 2,
+ 'sequence': 1,
+ 'data': data}, True, 10)
- base._periodic_action(0, "echo", mock.MagicMock())
+ def test_push_no_timestamp(self):
+ self.scenario_output["value1"] = 1
+ self.scenario_output.push(None, False)
+ self.output_queue.put.assert_called_once_with({'sequence': 1,
+ 'value1': 1}, True, 10)
-class RunnerTestCase(unittest.TestCase):
+class RunnerTestCase(ut_base.BaseUnitTestCase):
def setUp(self):
config = {
@@ -86,7 +113,7 @@ class RunnerTestCase(unittest.TestCase):
self.assertEqual(idle_result, actual_result)
def test__run_benchmark(self):
- runner = base.Runner(mock.Mock())
+ runner = runner_base.Runner(mock.Mock())
with self.assertRaises(NotImplementedError):
runner._run_benchmark(mock.Mock(), mock.Mock(), mock.Mock(), mock.Mock())
diff --git a/yardstick/tests/unit/benchmark/runner/test_duration.py b/yardstick/tests/unit/benchmark/runner/test_duration.py
new file mode 100644
index 000000000..fa47e96bf
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_duration.py
@@ -0,0 +1,315 @@
+##############################################################################
+# Copyright (c) 2018 Nokia and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import multiprocessing
+import os
+import time
+
+from yardstick.benchmark.runners import duration
+from yardstick.common import exceptions as y_exc
+
+
+class DurationRunnerTest(unittest.TestCase):
+ class MyMethod(object):
+ SLA_VALIDATION_ERROR_SIDE_EFFECT = 1
+ BROAD_EXCEPTION_SIDE_EFFECT = 2
+
+ def __init__(self, side_effect=0):
+ self.count = 101
+ self.side_effect = side_effect
+
+ def __call__(self, data):
+ self.count += 1
+ data['my_key'] = self.count
+ if self.side_effect == self.SLA_VALIDATION_ERROR_SIDE_EFFECT:
+ raise y_exc.SLAValidationError(case_name='My Case',
+ error_msg='my error message')
+ elif self.side_effect == self.BROAD_EXCEPTION_SIDE_EFFECT:
+ raise y_exc.YardstickException
+ return self.count
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {'interval': 0, "duration": 0},
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ def _assert_defaults__worker_run_one_iteration(self):
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ @mock.patch.object(os, 'getpid')
+ @mock.patch.object(multiprocessing, 'Process')
+ def test__run_benchmark_called_with(self, mock_multiprocessing_process,
+ mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ runner = duration.DurationRunner({})
+ benchmark_cls = mock.Mock()
+ runner._run_benchmark(benchmark_cls, 'my_method', self.scenario_cfg,
+ {})
+ mock_multiprocessing_process.assert_called_once_with(
+ name='Duration-some_type-101',
+ target=duration._worker_process,
+ args=(runner.result_queue, benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, runner.aborted, runner.output_queue))
+
+ @mock.patch.object(os, 'getpid')
+ def test__worker_process_runner_id(self, mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.assertEqual(self.scenario_cfg['runner']['runner_id'], 101)
+
+ def test__worker_process_called_with_cfg(self):
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_called_with_cfg_loop(self):
+ self.scenario_cfg['runner']['duration'] = 0.01
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.pre_run_wait_time.call_count, 0)
+ self.assertGreater(self.benchmark.my_method.call_count, 0)
+ self.assertGreater(self.benchmark.post_run_wait_time.call_count, 0)
+
+ def test__worker_process_called_without_cfg(self):
+ scenario_cfg = {'runner': {}}
+ aborted = multiprocessing.Event()
+ aborted.set()
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ scenario_cfg, {}, aborted, mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(1)
+ self.benchmark.my_method.assert_called_once_with({})
+ self.benchmark.post_run_wait_time.assert_called_once_with(1)
+ self.benchmark.teardown.assert_called_once()
+
+ def test__worker_process_output_queue(self):
+ self.benchmark.my_method = mock.Mock(return_value='my_result')
+
+ output_queue = multiprocessing.Queue()
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+ self.assertEquals(output_queue.get(), 'my_result')
+
+ def test__worker_process_output_queue_multiple_iterations(self):
+ self.scenario_cfg['runner']['duration'] = 0.01
+ self.benchmark.my_method = self.MyMethod()
+
+ output_queue = multiprocessing.Queue()
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), output_queue)
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.pre_run_wait_time.call_count, 0)
+ self.assertGreater(self.benchmark.my_method.count, 1)
+ self.assertGreater(self.benchmark.post_run_wait_time.call_count, 0)
+
+ count = 101
+ while not output_queue.empty():
+ count += 1
+ self.assertEquals(output_queue.get(), count)
+
+ def test__worker_process_queue(self):
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertEqual(result['sequence'], 1)
+
+ def test__worker_process_queue_multiple_iterations(self):
+ self.scenario_cfg['runner']['duration'] = 0.5
+ self.benchmark.my_method = self.MyMethod()
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.pre_run_wait_time.call_count, 0)
+ self.assertGreater(self.benchmark.my_method.count, 1)
+ self.assertGreater(self.benchmark.post_run_wait_time.call_count, 0)
+
+ count = 0
+ while not queue.empty():
+ count += 1
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': count + 101})
+ self.assertEqual(result['sequence'], count)
+
+ def test__worker_process_except_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_except_sla_validation_error_sla_cfg_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_default(self):
+ self.scenario_cfg['sla'] = {}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ duration._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ duration._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_queue_on_sla_validation_error_monitor(self):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertEqual(result['errors'], ('My Case SLA validation failed. '
+ 'Error: my error message',))
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertEqual(result['sequence'], 1)
+
+ def test__worker_process_broad_exception(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ duration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
+
+ def test__worker_process_queue_on_broad_exception(self):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+
+ queue = multiprocessing.Queue()
+ timestamp = time.time()
+ duration._worker_process(queue, self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ time.sleep(0.1)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.post_run_wait_time.assert_called_once_with(0)
+
+ result = queue.get()
+ self.assertGreater(result['timestamp'], timestamp)
+ self.assertNotEqual(result['errors'], '')
+ self.assertEqual(result['data'], {'my_key': 102})
+ self.assertEqual(result['sequence'], 1)
+
+ def test__worker_process_benchmark_teardown_on_broad_exception(self):
+ self.benchmark.teardown = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ with self.assertRaises(SystemExit) as raised:
+ duration._worker_process(mock.Mock(), self.benchmark_cls,
+ 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+ self.assertEqual(raised.exception.code, 1)
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self._assert_defaults__worker_run_one_iteration()
diff --git a/yardstick/tests/unit/benchmark/runner/test_iteration.py b/yardstick/tests/unit/benchmark/runner/test_iteration.py
new file mode 100644
index 000000000..783b236f5
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_iteration.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import multiprocessing
+from yardstick.benchmark.runners import iteration
+from yardstick.common import exceptions as y_exc
+
+
+class IterationRunnerTest(unittest.TestCase):
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {'interval': 0, "duration": 0},
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+
+ def _assert_defaults__worker_run_one_iteration(self):
+ self.benchmark.pre_run_wait_time.assert_called_once_with(0)
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_broad_exception(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+
+ with self.assertRaises(Exception):
+ iteration._worker_process(mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_one_iteration()
+ self._assert_defaults__worker_run_setup_and_teardown()
diff --git a/yardstick/tests/unit/benchmark/runner/test_proxduration.py b/yardstick/tests/unit/benchmark/runner/test_proxduration.py
new file mode 100644
index 000000000..056195fd3
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/runner/test_proxduration.py
@@ -0,0 +1,286 @@
+# Copyright (c) 2018 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import unittest
+import multiprocessing
+import os
+
+from yardstick.benchmark.runners import proxduration
+from yardstick.common import constants
+from yardstick.common import exceptions as y_exc
+
+
+class ProxDurationRunnerTest(unittest.TestCase):
+
+ class MyMethod(object):
+ SLA_VALIDATION_ERROR_SIDE_EFFECT = 1
+ BROAD_EXCEPTION_SIDE_EFFECT = 2
+
+ def __init__(self, side_effect=0):
+ self.count = 101
+ self.side_effect = side_effect
+
+ def __call__(self, data):
+ self.count += 1
+ data['my_key'] = self.count
+ if self.side_effect == self.SLA_VALIDATION_ERROR_SIDE_EFFECT:
+ raise y_exc.SLAValidationError(case_name='My Case',
+ error_msg='my error message')
+ elif self.side_effect == self.BROAD_EXCEPTION_SIDE_EFFECT:
+ raise y_exc.YardstickException
+ return self.count
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'runner': {'interval': 0, "duration": 0},
+ 'type': 'some_type'
+ }
+
+ self.benchmark = mock.Mock()
+ self.benchmark_cls = mock.Mock(return_value=self.benchmark)
+
+ def _assert_defaults__worker_run_setup_and_teardown(self):
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ @mock.patch.object(os, 'getpid')
+ @mock.patch.object(multiprocessing, 'Process')
+ def test__run_benchmark_called_with(self, mock_multiprocessing_process,
+ mock_os_getpid):
+ mock_os_getpid.return_value = 101
+
+ runner = proxduration.ProxDurationRunner({})
+ benchmark_cls = mock.Mock()
+ runner._run_benchmark(benchmark_cls, 'my_method', self.scenario_cfg,
+ {})
+ mock_multiprocessing_process.assert_called_once_with(
+ name='ProxDuration-some_type-101',
+ target=proxduration._worker_process,
+ args=(runner.result_queue, benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, runner.aborted, runner.output_queue))
+
+ @mock.patch.object(os, 'getpid')
+ def test__worker_process_runner_id(self, mock_os_getpid):
+ mock_os_getpid.return_value = 101
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self.assertEqual(101, self.scenario_cfg['runner']['runner_id'])
+
+ def test__worker_process_called_with_cfg(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ def test__worker_process_called_with_cfg_loop(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ self.assertGreater(self.benchmark.my_method.call_count, 0)
+
+ def test__worker_process_called_without_cfg(self):
+ scenario_cfg = {'runner': {}}
+ aborted = multiprocessing.Event()
+ aborted.set()
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', scenario_cfg, {},
+ aborted, mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.teardown.assert_called_once()
+
+ def test__worker_process_output_queue(self):
+ self.benchmark.my_method = mock.Mock(return_value='my_result')
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ output_queue = mock.Mock()
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), output_queue)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ output_queue.put.assert_has_calls(
+ [mock.call('my_result', True, constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_output_queue_multiple_iterations(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = self.MyMethod()
+ output_queue = mock.Mock()
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), output_queue)
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ for idx in range(102, 101 + len(output_queue.method_calls)):
+ output_queue.put.assert_has_calls(
+ [mock.call(idx, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_queue(self):
+ self.benchmark.my_method = self.MyMethod()
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': 1,
+ 'data': {'my_key': 102},
+ 'errors': ''}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_queue_multiple_iterations(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = self.MyMethod()
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ for idx in range(102, 101 + len(queue.method_calls)):
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': idx - 101,
+ 'data': {'my_key': idx},
+ 'errors': ''}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True,
+ constants.QUEUE_PUT_TIMEOUT)])
+
+ def test__worker_process_except_sla_validation_error_no_sla_cfg(self):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ @mock.patch.object(proxduration.LOG, 'warning')
+ def test__worker_process_except_sla_validation_error_sla_cfg_monitor(
+ self, *args):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method', self.scenario_cfg,
+ {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_default(self):
+ self.scenario_cfg['sla'] = {}
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+ with self.assertRaises(y_exc.SLAValidationError):
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.my_method.assert_called_once_with({})
+
+ def test__worker_process_raise_sla_validation_error_sla_cfg_assert(self):
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.scenario_cfg['sla'] = {'action': 'assert'}
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.SLAValidationError)
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+
+ self.benchmark_cls.assert_called_once_with(self.scenario_cfg, {})
+ self.benchmark.setup.assert_called_once()
+ self.benchmark.my_method.assert_called_once_with({})
+
+ @mock.patch.object(proxduration.LOG, 'warning')
+ def test__worker_process_queue_on_sla_validation_error_monitor(
+ self, *args):
+ self.scenario_cfg['sla'] = {'action': 'monitor'}
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.SLA_VALIDATION_ERROR_SIDE_EFFECT)
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': 1,
+ 'data': {'my_key': 102},
+ 'errors': ('My Case SLA validation failed. '
+ 'Error: my error message', )}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ @mock.patch.object(proxduration.LOG, 'exception')
+ def test__worker_process_broad_exception(self, *args):
+ self.benchmark.my_method = mock.Mock(
+ side_effect=y_exc.YardstickException)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+
+ self._assert_defaults__worker_run_setup_and_teardown()
+
+ @mock.patch.object(proxduration.LOG, 'exception')
+ def test__worker_process_queue_on_broad_exception(self, *args):
+ self.benchmark.my_method = self.MyMethod(
+ side_effect=self.MyMethod.BROAD_EXCEPTION_SIDE_EFFECT)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+ queue = mock.Mock()
+ proxduration._worker_process(
+ queue, self.benchmark_cls, 'my_method', self.scenario_cfg, {},
+ multiprocessing.Event(), mock.Mock())
+
+ benchmark_output = {'timestamp': mock.ANY,
+ 'sequence': 1,
+ 'data': {'my_key': 102},
+ 'errors': mock.ANY}
+ queue.put.assert_has_calls(
+ [mock.call(benchmark_output, True, constants.QUEUE_PUT_TIMEOUT)])
+
+ @mock.patch.object(proxduration.LOG, 'exception')
+ def test__worker_process_benchmark_teardown_on_broad_exception(
+ self, *args):
+ self.benchmark.teardown = mock.Mock(
+ side_effect=y_exc.YardstickException)
+ self.scenario_cfg["runner"] = {"sampled": True, "duration": 0.1}
+
+ with self.assertRaises(SystemExit) as raised:
+ proxduration._worker_process(
+ mock.Mock(), self.benchmark_cls, 'my_method',
+ self.scenario_cfg, {}, multiprocessing.Event(), mock.Mock())
+ self.assertEqual(1, raised.exception.code)
+ self._assert_defaults__worker_run_setup_and_teardown()
diff --git a/yardstick/tests/unit/benchmark/runner/test_search.py b/yardstick/tests/unit/benchmark/runner/test_search.py
index 4e5b4fe77..d5d1b8ded 100644
--- a/yardstick/tests/unit/benchmark/runner/test_search.py
+++ b/yardstick/tests/unit/benchmark/runner/test_search.py
@@ -19,36 +19,33 @@ import unittest
from yardstick.benchmark.runners.search import SearchRunner
from yardstick.benchmark.runners.search import SearchRunnerHelper
+from yardstick.common import exceptions as y_exc
class TestSearchRunnerHelper(unittest.TestCase):
def test___call__(self):
- cls = mock.MagicMock()
- aborted = mock.MagicMock()
scenario_cfg = {
'runner': {},
}
- benchmark = cls()
- method = getattr(benchmark, 'my_method')
+ benchmark = mock.Mock()
+ method = getattr(benchmark(), 'my_method')
helper = SearchRunnerHelper(
- cls, 'my_method', scenario_cfg, {}, aborted)
+ benchmark, 'my_method', scenario_cfg, {}, mock.Mock())
with helper.get_benchmark_instance():
helper()
- self.assertEqual(method.call_count, 1)
+ method.assert_called_once()
def test___call___error(self):
- cls = mock.MagicMock()
- aborted = mock.MagicMock()
scenario_cfg = {
'runner': {},
}
helper = SearchRunnerHelper(
- cls, 'my_method', scenario_cfg, {}, aborted)
+ mock.Mock(), 'my_method', scenario_cfg, {}, mock.Mock())
with self.assertRaises(RuntimeError):
helper()
@@ -56,8 +53,6 @@ class TestSearchRunnerHelper(unittest.TestCase):
@mock.patch.object(time, 'sleep')
@mock.patch.object(time, 'time')
def test_is_not_done(self, mock_time, *args):
- cls = mock.MagicMock()
- aborted = mock.MagicMock()
scenario_cfg = {
'runner': {},
}
@@ -65,7 +60,7 @@ class TestSearchRunnerHelper(unittest.TestCase):
mock_time.side_effect = range(1000)
helper = SearchRunnerHelper(
- cls, 'my_method', scenario_cfg, {}, aborted)
+ mock.Mock(), 'my_method', scenario_cfg, {}, mock.Mock())
index = -1
for index in helper.is_not_done():
@@ -76,8 +71,6 @@ class TestSearchRunnerHelper(unittest.TestCase):
@mock.patch.object(time, 'sleep')
def test_is_not_done_immediate_stop(self, *args):
- cls = mock.MagicMock()
- aborted = mock.MagicMock()
scenario_cfg = {
'runner': {
'run_step': '',
@@ -85,7 +78,7 @@ class TestSearchRunnerHelper(unittest.TestCase):
}
helper = SearchRunnerHelper(
- cls, 'my_method', scenario_cfg, {}, aborted)
+ mock.Mock(), 'my_method', scenario_cfg, {}, mock.Mock())
index = -1
for index in helper.is_not_done():
@@ -112,7 +105,7 @@ class TestSearchRunner(unittest.TestCase):
}
runner = SearchRunner({})
- runner.worker_helper = mock.MagicMock(side_effect=update)
+ runner.worker_helper = mock.Mock(side_effect=update)
self.assertFalse(runner._worker_run_once('sequence 1'))
@@ -136,51 +129,49 @@ class TestSearchRunner(unittest.TestCase):
}
runner = SearchRunner({})
- runner.worker_helper = mock.MagicMock(side_effect=update)
+ runner.worker_helper = mock.Mock(side_effect=update)
self.assertTrue(runner._worker_run_once('sequence 1'))
def test__worker_run_once_assertion_error_assert(self):
runner = SearchRunner({})
runner.sla_action = 'assert'
- runner.worker_helper = mock.MagicMock(side_effect=AssertionError)
+ runner.worker_helper = mock.Mock(side_effect=y_exc.SLAValidationError)
- with self.assertRaises(AssertionError):
+ with self.assertRaises(y_exc.SLAValidationError):
runner._worker_run_once('sequence 1')
def test__worker_run_once_assertion_error_monitor(self):
runner = SearchRunner({})
runner.sla_action = 'monitor'
- runner.worker_helper = mock.MagicMock(side_effect=AssertionError)
+ runner.worker_helper = mock.Mock(side_effect=y_exc.SLAValidationError)
self.assertFalse(runner._worker_run_once('sequence 1'))
def test__worker_run_once_non_assertion_error_none(self):
runner = SearchRunner({})
- runner.worker_helper = mock.MagicMock(side_effect=RuntimeError)
+ runner.worker_helper = mock.Mock(side_effect=RuntimeError)
self.assertTrue(runner._worker_run_once('sequence 1'))
def test__worker_run_once_non_assertion_error(self):
runner = SearchRunner({})
runner.sla_action = 'monitor'
- runner.worker_helper = mock.MagicMock(side_effect=RuntimeError)
+ runner.worker_helper = mock.Mock(side_effect=RuntimeError)
self.assertFalse(runner._worker_run_once('sequence 1'))
def test__worker_run(self):
- cls = mock.MagicMock()
scenario_cfg = {
'runner': {'interval': 0, 'timeout': 1},
}
runner = SearchRunner({})
- runner._worker_run_once = mock.MagicMock(side_effect=[0, 0, 1])
+ runner._worker_run_once = mock.Mock(side_effect=[0, 0, 1])
- runner._worker_run(cls, 'my_method', scenario_cfg, {})
+ runner._worker_run(mock.Mock(), 'my_method', scenario_cfg, {})
def test__worker_run_immediate_stop(self):
- cls = mock.MagicMock()
scenario_cfg = {
'runner': {
'run_step': '',
@@ -188,15 +179,14 @@ class TestSearchRunner(unittest.TestCase):
}
runner = SearchRunner({})
- runner._worker_run(cls, 'my_method', scenario_cfg, {})
+ runner._worker_run(mock.Mock(), 'my_method', scenario_cfg, {})
@mock.patch('yardstick.benchmark.runners.search.multiprocessing')
def test__run_benchmark(self, mock_multi_process):
- cls = mock.MagicMock()
scenario_cfg = {
'runner': {},
}
runner = SearchRunner({})
- runner._run_benchmark(cls, 'my_method', scenario_cfg, {})
- self.assertEqual(mock_multi_process.Process.call_count, 1)
+ runner._run_benchmark(mock.Mock(), 'my_method', scenario_cfg, {})
+ mock_multi_process.Process.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
index d5c95a086..35455a49c 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
@@ -7,10 +7,6 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# Unittest for
-# yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal
-
-from __future__ import absolute_import
import mock
import unittest
@@ -18,37 +14,48 @@ from yardstick.benchmark.scenarios.availability.attacker import \
attacker_baremetal
-# pylint: disable=unused-argument
-# disable this for now because I keep forgetting mock patch arg ordering
+class ExecuteShellTestCase(unittest.TestCase):
+ def setUp(self):
+ self._mock_subprocess = mock.patch.object(attacker_baremetal,
+ 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.subprocess')
-class ExecuteShellTestCase(unittest.TestCase):
+ self.addCleanup(self._stop_mocks)
- def test__fun_execute_shell_command_successful(self, mock_subprocess):
- cmd = "env"
- mock_subprocess.check_output.return_value = (0, 'unittest')
- exitcode, _ = attacker_baremetal._execute_shell_command(cmd)
+ def _stop_mocks(self):
+ self._mock_subprocess.stop()
+
+ def test__execute_shell_command_successful(self):
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, _ = attacker_baremetal._execute_shell_command("env")
self.assertEqual(exitcode, 0)
- @mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.LOG')
- def test__fun_execute_shell_command_fail_cmd_exception(self, mock_log, mock_subprocess):
- cmd = "env"
- mock_subprocess.check_output.side_effect = RuntimeError
- exitcode, _ = attacker_baremetal._execute_shell_command(cmd)
+ @mock.patch.object(attacker_baremetal, 'LOG')
+ def test__execute_shell_command_fail_cmd_exception(self, mock_log):
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, _ = attacker_baremetal._execute_shell_command("env")
self.assertEqual(exitcode, -1)
mock_log.error.assert_called_once()
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.subprocess')
-@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.ssh')
class AttackerBaremetalTestCase(unittest.TestCase):
def setUp(self):
+ self._mock_ssh = mock.patch.object(attacker_baremetal, 'ssh')
+ self.mock_ssh = self._mock_ssh.start()
+ self._mock_subprocess = mock.patch.object(attacker_baremetal,
+ 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mocks)
+
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, "running", '')
+
host = {
"ipmi_ip": "10.20.0.5",
"ipmi_user": "root",
- "ipmi_pwd": "123456",
+ "ipmi_password": "123456",
"ip": "10.20.0.5",
"user": "root",
"key_filename": "/root/.ssh/id_rsa"
@@ -59,26 +66,26 @@ class AttackerBaremetalTestCase(unittest.TestCase):
'host': 'node1',
}
- def test__attacker_baremetal_all_successful(self, mock_ssh, mock_subprocess):
- mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
- ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
- self.context)
+ self.ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+ self.context)
- ins.setup()
- ins.inject_fault()
- ins.recover()
+ def _stop_mocks(self):
+ self._mock_ssh.stop()
+ self._mock_subprocess.stop()
- def test__attacker_baremetal_check_failuer(self, mock_ssh, mock_subprocess):
- mock_ssh.SSH.from_node().execute.return_value = (0, "error check", '')
- ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
- self.context)
- ins.setup()
+ def test__attacker_baremetal_all_successful(self):
+ self.ins.setup()
+ self.ins.inject_fault()
+ self.ins.recover()
- def test__attacker_baremetal_recover_successful(self, mock_ssh, mock_subprocess):
+ def test__attacker_baremetal_check_failure(self):
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, "error check", '')
+ self.ins.setup()
+ def test__attacker_baremetal_recover_successful(self):
self.attacker_cfg["jump_host"] = 'node1'
- self.context["node1"]["pwd"] = "123456"
- mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ self.context["node1"]["password"] = "123456"
ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
self.context)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py
new file mode 100644
index 000000000..74f86983b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+
+
+class BaseAttackerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.attacker_cfg = {
+ 'fault_type': 'test-attacker',
+ 'action_parameter': {'process_name': 'nova_api'},
+ 'rollback_parameter': {'process_name': 'nova_api'},
+ 'key': 'stop-service',
+ 'attack_key': 'stop-service',
+ 'host': 'node1',
+ }
+ self.base_attacker = baseattacker.BaseAttacker({}, {})
+
+ def test__init__(self):
+ self.assertEqual(self.base_attacker.data, {})
+ self.assertFalse(self.base_attacker.mandatory)
+ self.assertEqual(self.base_attacker.intermediate_variables, {})
+ self.assertFalse(self.base_attacker.mandatory)
+
+ def test_get_attacker_cls(self):
+ with self.assertRaises(RuntimeError):
+ baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
index ce972779d..8d042c406 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
@@ -7,6 +7,8 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+import time
+
import mock
import unittest
@@ -86,13 +88,19 @@ class BaseMonitorTestCase(unittest.TestCase):
'sla': {'max_outage_time': 5}
}
+ def _close_queue(self, instace):
+ time.sleep(0.1)
+ instace._queue.close()
+
def test__basemonitor_start_wait_successful(self):
ins = basemonitor.BaseMonitor(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
ins.start_monitor()
ins.wait_monitor()
def test__basemonitor_all_successful(self):
ins = self.MonitorSimple(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
ins.setup()
ins.run()
ins.verify_SLA()
@@ -100,16 +108,12 @@ class BaseMonitorTestCase(unittest.TestCase):
@mock.patch.object(basemonitor, 'multiprocessing')
def test__basemonitor_func_false(self, mock_multiprocess):
ins = self.MonitorSimple(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
ins.setup()
mock_multiprocess.Event().is_set.return_value = False
ins.run()
ins.verify_SLA()
- # TODO(elfoley): fix this test to not throw an error
def test__basemonitor_getmonitorcls_successfule(self):
- cls = None
- try:
- cls = basemonitor.BaseMonitor.get_monitor_cls(self.monitor_cfg)
- except Exception: # pylint: disable=broad-except
- pass
- self.assertIsNone(cls)
+ with self.assertRaises(RuntimeError):
+ basemonitor.BaseMonitor.get_monitor_cls(self.monitor_cfg)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
index e9c680257..dc3a4b99a 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
@@ -63,3 +63,20 @@ class MultiMonitorServiceTestCase(unittest.TestCase):
ins.start_monitor()
ins.wait_monitor()
ins.verify_SLA()
+
+ def test__monitor_multi_no_sla(self, mock_open, mock_ssh):
+ monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'monitor_number': 3,
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 0.1,
+ 'parameter': {'serviceName': 'haproxy'}
+ }
+ ins = monitor_multi.MultiMonitor(
+ monitor_cfg, self.context, {"nova-api": 10})
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.start_monitor()
+ ins.wait_monitor()
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
index a6d2ca398..8c73bf221 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
@@ -55,3 +55,19 @@ class MonitorProcessTestCase(unittest.TestCase):
ins.monitor_func()
ins._result = {"outage_time": 10}
ins.verify_SLA()
+
+ def test__monitor_process_no_sla(self, mock_ssh):
+
+ monitor_cfg = {
+ 'monitor_type': 'process',
+ 'process_name': 'nova-api',
+ 'host': "node1",
+ 'monitor_time': 1,
+ }
+ ins = monitor_process.MonitorProcess(monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 10}
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py
index 45840d569..dbf3d83b2 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py
@@ -11,10 +11,13 @@ import mock
import unittest
from yardstick.benchmark.scenarios.availability import scenario_general
+from yardstick.common import exceptions as y_exc
+
class ScenarioGeneralTestCase(unittest.TestCase):
- def setUp(self):
+ @mock.patch.object(scenario_general, 'Director')
+ def setUp(self, *args):
self.scenario_cfg = {
'type': "general_scenario",
'options': {
@@ -35,33 +38,39 @@ class ScenarioGeneralTestCase(unittest.TestCase):
'index': 2}]
}
}
- self.instance = scenario_general.ScenarioGeneral(self.scenario_cfg, None)
-
- self._mock_director = mock.patch.object(scenario_general, 'Director')
- self.mock_director = self._mock_director.start()
- self.addCleanup(self._stop_mock)
-
- def _stop_mock(self):
- self._mock_director.stop()
+ self.instance = scenario_general.ScenarioGeneral(self.scenario_cfg,
+ None)
+ self.instance.setup()
+ self.instance.director.verify.return_value = True
def test_scenario_general_all_successful(self):
- self.instance.setup()
- self.instance.run({})
+ ret = {}
+ self.instance.run(ret)
self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 1)
- def test_scenario_general_exception(self):
- mock_obj = mock.Mock()
- mock_obj.createActionPlayer.side_effect = KeyError('Wrong')
- self.instance.director = mock_obj
+ @mock.patch.object(scenario_general.LOG, 'exception')
+ def test_scenario_general_exception(self, *args):
+ self.instance.director.createActionPlayer.side_effect = (
+ KeyError('Wrong'))
self.instance.director.data = {}
- self.instance.run({})
+ ret = {}
+ self.instance.run(ret)
self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 1)
def test_scenario_general_case_fail(self):
- mock_obj = mock.Mock()
- mock_obj.verify.return_value = False
- self.instance.director = mock_obj
+ self.instance.director.verify.return_value = False
self.instance.director.data = {}
- self.instance.run({})
- self.instance.pass_flag = True
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, self.instance.run, ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 0)
+
+ def test_scenario_general_case_service_not_found_fail(self):
+ self.instance.director.verify.return_value = True
+ self.instance.director.data = {"general-attacker": 0}
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, self.instance.run, ret)
self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 0)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py
index 6bb3ec63b..d61fa67c7 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py
@@ -11,6 +11,7 @@ import mock
import unittest
from yardstick.benchmark.scenarios.availability import serviceha
+from yardstick.common import exceptions as y_exc
class ServicehaTestCase(unittest.TestCase):
@@ -42,6 +43,13 @@ class ServicehaTestCase(unittest.TestCase):
}
sla = {"outage_time": 5}
self.args = {"options": options, "sla": sla}
+ self.test__serviceha = serviceha.ServiceHA(self.args, self.ctx)
+
+ def test___init__(self):
+
+ self.assertEqual(self.test__serviceha.data, {})
+ self.assertFalse(self.test__serviceha.setup_done)
+ self.assertFalse(self.test__serviceha.sla_pass)
# NOTE(elfoley): This should be split into test_setup and test_run
# NOTE(elfoley): This should explicitly test outcomes and states
@@ -60,15 +68,64 @@ class ServicehaTestCase(unittest.TestCase):
p.setup()
self.assertTrue(p.setup_done)
- # def test__serviceha_run_sla_error(self, mock_attacker, mock_monitor):
- # p = serviceha.ServiceHA(self.args, self.ctx)
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_run_sla_error(self, mock_monitor, *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertEqual(p.setup_done, True)
+
+ mock_monitor.MonitorMgr().verify_SLA.return_value = False
+
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, p.run, ret)
+ self.assertEqual(ret['sla_pass'], 0)
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_run_service_not_found_sla_error(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertTrue(p.setup_done)
+ p.data["kill-process"] = 0
+
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
- # p.setup()
- # self.assertEqual(p.setup_done, True)
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, p.run, ret)
+ self.assertEqual(ret['sla_pass'], 0)
- # result = {}
- # result["outage_time"] = 10
- # mock_monitor.Monitor().get_result.return_value = result
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_no_teardown_when_sla_pass(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ attacker = mock.Mock()
+ attacker.mandatory = False
+ p.attackers = [attacker]
+ p.teardown()
+ attacker.recover.assert_not_called()
- # ret = {}
- # self.assertRaises(AssertionError, p.run, ret)
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_teardown_when_mandatory(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ attacker = mock.Mock()
+ attacker.mandatory = True
+ p.attackers = [attacker]
+ p.teardown()
+ attacker.recover.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
index f24ec24ec..4fadde4dc 100644
--- a/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
@@ -17,6 +17,7 @@ import mock
from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.compute import cyclictest
+from yardstick.common import exceptions as y_exc
@mock.patch('yardstick.benchmark.scenarios.compute.cyclictest.ssh')
@@ -122,7 +123,7 @@ class CyclictestTestCase(unittest.TestCase):
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, result)
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
def test_cyclictest_unsuccessful_sla_avg_latency(self, mock_ssh):
@@ -136,7 +137,7 @@ class CyclictestTestCase(unittest.TestCase):
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, result)
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
def test_cyclictest_unsuccessful_sla_max_latency(self, mock_ssh):
@@ -150,7 +151,7 @@ class CyclictestTestCase(unittest.TestCase):
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, result)
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
def test_cyclictest_unsuccessful_script_error(self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
index 9640ce000..ba63e5f9e 100644
--- a/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
@@ -6,24 +6,16 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-
-# Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
-
-from __future__ import absolute_import
-
import unittest
import mock
from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.compute import lmbench
+from yardstick.common import exceptions as y_exc
+from yardstick import ssh
-# pylint: disable=unused-argument
-# disable this for now because I keep forgetting mock patch arg ordering
-
-
-@mock.patch('yardstick.benchmark.scenarios.compute.lmbench.ssh')
class LmbenchTestCase(unittest.TestCase):
def setUp(self):
@@ -37,16 +29,23 @@ class LmbenchTestCase(unittest.TestCase):
self.result = {}
- def test_successful_setup(self, mock_ssh):
+ self._mock_ssh = mock.patch.object(ssh, 'SSH')
+ self.mock_ssh = self._mock_ssh.start()
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_ssh.stop()
+
+ def test_successful_setup(self):
l = lmbench.Lmbench({}, self.ctx)
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
l.setup()
self.assertIsNotNone(l.client)
self.assertTrue(l.setup_done)
- def test_unsuccessful_unknown_type_run(self, mock_ssh):
+ def test_unsuccessful_unknown_type_run(self):
options = {
"test_type": "foo"
@@ -57,7 +56,7 @@ class LmbenchTestCase(unittest.TestCase):
self.assertRaises(RuntimeError, l.run, self.result)
- def test_successful_latency_run_no_sla(self, mock_ssh):
+ def test_successful_latency_run_no_sla(self):
options = {
"test_type": "latency",
@@ -68,12 +67,12 @@ class LmbenchTestCase(unittest.TestCase):
l = lmbench.Lmbench(args, self.ctx)
sample_output = '[{"latency": 4.944, "size": 0.00049}]'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
l.run(self.result)
expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
self.assertEqual(self.result, expected_result)
- def test_successful_bandwidth_run_no_sla(self, mock_ssh):
+ def test_successful_bandwidth_run_no_sla(self):
options = {
"test_type": "bandwidth",
@@ -85,12 +84,12 @@ class LmbenchTestCase(unittest.TestCase):
l = lmbench.Lmbench(args, self.ctx)
sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
l.run(self.result)
expected_result = jsonutils.loads(sample_output)
self.assertEqual(self.result, expected_result)
- def test_successful_latency_run_sla(self, mock_ssh):
+ def test_successful_latency_run_sla(self):
options = {
"test_type": "latency",
@@ -104,12 +103,12 @@ class LmbenchTestCase(unittest.TestCase):
l = lmbench.Lmbench(args, self.ctx)
sample_output = '[{"latency": 4.944, "size": 0.00049}]'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
l.run(self.result)
expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
self.assertEqual(self.result, expected_result)
- def test_successful_bandwidth_run_sla(self, mock_ssh):
+ def test_successful_bandwidth_run_sla(self):
options = {
"test_type": "bandwidth",
@@ -124,12 +123,12 @@ class LmbenchTestCase(unittest.TestCase):
l = lmbench.Lmbench(args, self.ctx)
sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
l.run(self.result)
expected_result = jsonutils.loads(sample_output)
self.assertEqual(self.result, expected_result)
- def test_unsuccessful_latency_run_sla(self, mock_ssh):
+ def test_unsuccessful_latency_run_sla(self):
options = {
"test_type": "latency",
@@ -143,10 +142,10 @@ class LmbenchTestCase(unittest.TestCase):
l = lmbench.Lmbench(args, self.ctx)
sample_output = '[{"latency": 37.5, "size": 0.00049}]'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, l.run, self.result)
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, l.run, self.result)
- def test_unsuccessful_bandwidth_run_sla(self, mock_ssh):
+ def test_unsuccessful_bandwidth_run_sla(self):
options = {
"test_type": "bandwidth",
@@ -161,10 +160,10 @@ class LmbenchTestCase(unittest.TestCase):
l = lmbench.Lmbench(args, self.ctx)
sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 9925.5}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, l.run, self.result)
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, l.run, self.result)
- def test_successful_latency_for_cache_run_sla(self, mock_ssh):
+ def test_successful_latency_for_cache_run_sla(self):
options = {
"test_type": "latency_for_cache",
@@ -178,16 +177,16 @@ class LmbenchTestCase(unittest.TestCase):
l = lmbench.Lmbench(args, self.ctx)
sample_output = "{\"L1cache\": 1.6}"
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
l.run(self.result)
expected_result = jsonutils.loads(sample_output)
self.assertEqual(self.result, expected_result)
- def test_unsuccessful_script_error(self, mock_ssh):
+ def test_unsuccessful_script_error(self):
options = {"test_type": "bandwidth"}
args = {"options": options}
l = lmbench.Lmbench(args, self.ctx)
- mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.mock_ssh.from_node().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, l.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
index 03003d01f..02040ca01 100644
--- a/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
@@ -17,6 +17,7 @@ import mock
from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.compute import qemu_migrate
+from yardstick.common import exceptions as y_exc
@mock.patch('yardstick.benchmark.scenarios.compute.qemu_migrate.ssh')
@@ -116,7 +117,7 @@ class QemuMigrateTestCase(unittest.TestCase):
sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, q.run, result)
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
def test_qemu_migrate_unsuccessful_sla_downtime(self, mock_ssh):
@@ -129,7 +130,7 @@ class QemuMigrateTestCase(unittest.TestCase):
sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, q.run, result)
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
def test_qemu_migrate_unsuccessful_sla_setuptime(self, mock_ssh):
@@ -142,7 +143,7 @@ class QemuMigrateTestCase(unittest.TestCase):
sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, q.run, result)
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
def test_qemu_migrate_unsuccessful_script_error(self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
index dcc0e810d..9e055befe 100644
--- a/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
@@ -18,6 +18,7 @@ from oslo_serialization import jsonutils
from yardstick.common import utils
from yardstick.benchmark.scenarios.compute import ramspeed
+from yardstick.common import exceptions as y_exc
@mock.patch('yardstick.benchmark.scenarios.compute.ramspeed.ssh')
@@ -146,7 +147,7 @@ class RamspeedTestCase(unittest.TestCase):
"Block_size(kb)": 16384, "Bandwidth(MBps)": 14128.94}, {"Test_type":\
"INTEGER & WRITING", "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, r.run, self.result)
+ self.assertRaises(y_exc.SLAValidationError, r.run, self.result)
def test_ramspeed_unsuccessful_script_error(self, mock_ssh):
options = {
@@ -219,7 +220,7 @@ class RamspeedTestCase(unittest.TestCase):
"Bandwidth(MBps)": 1300.27}, {"Test_type": "INTEGER AVERAGE:",\
"Bandwidth(MBps)": 2401.58}]}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, r.run, self.result)
+ self.assertRaises(y_exc.SLAValidationError, r.run, self.result)
def test_ramspeed_unsuccessful_unknown_type_run(self, mock_ssh):
options = {
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
index 6339a2dcd..e4a8d6e26 100644
--- a/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
@@ -17,6 +17,7 @@ import mock
from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.compute import unixbench
+from yardstick.common import exceptions as y_exc
@mock.patch('yardstick.benchmark.scenarios.compute.unixbench.ssh')
@@ -122,7 +123,7 @@ class UnixbenchTestCase(unittest.TestCase):
sample_output = '{"single_score":"200.7","parallel_score":"4395.9"}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, u.run, result)
+ self.assertRaises(y_exc.SLAValidationError, u.run, result)
def test_unixbench_unsuccessful_sla_parallel_score(self, mock_ssh):
@@ -137,7 +138,7 @@ class UnixbenchTestCase(unittest.TestCase):
sample_output = '{"signle_score":"2251.7","parallel_score":"3395.9"}'
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, u.run, result)
+ self.assertRaises(y_exc.SLAValidationError, u.run, result)
def test_unixbench_unsuccessful_script_error(self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py b/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt
new file mode 100644
index 000000000..9b3afd1fb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt
@@ -0,0 +1,14 @@
+{
+ "@odata.id": "/redfish/v1/Chassis",
+ "Name": "ChassisCollection",
+ "@odata.context": "/redfish/v1/$metadata#ChassisCollection.ChassisCollection",
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.type": "#ChassisCollection.ChassisCollection",
+ "@odata.etag": "\"af5a94479815eb5f87fe91ea08fde0ac\"",
+ "Members@odata.count": 1,
+ "Description": "A collection of Chassis resource instances."
+}
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt
new file mode 100644
index 000000000..343ed3667
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt
@@ -0,0 +1,300 @@
+{
+ "PowerControl@odata.count": 1,
+ "@odata.id": "/redfish/v1/Chassis/1/Power",
+ "Redundancy@odata.count": 1,
+ "@odata.context": "/redfish/v1/$metadata#Power.Power",
+ "Voltages": [
+ {
+ "MaxReadingRange": 14.28,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/0",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 140,
+ "Name": "SysBrd 12V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 10.81,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "0",
+ "MinReadingRange": null,
+ "ReadingVolts": 12.15,
+ "UpperThresholdCritical": 13.22
+ },
+ {
+ "MaxReadingRange": 3.95,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/1",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 141,
+ "Name": "SysBrd 3.3V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 2.98,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "1",
+ "MinReadingRange": null,
+ "UpperThresholdCritical": 3.63,
+ "ReadingVolts": 3.36
+ },
+ {
+ "MaxReadingRange": 5.97,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/2",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 142,
+ "Name": "SysBrd 5V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 4.49,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "2",
+ "MinReadingRange": null,
+ "UpperThresholdCritical": 5.5,
+ "ReadingVolts": 5.03
+ },
+ {
+ "MaxReadingRange": 3.32,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/3",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 3,
+ "Name": "CMOS Battery",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 2.25,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "3",
+ "MinReadingRange": null,
+ "LowerThresholdNonCritical": 2.39,
+ "ReadingVolts": 3.12
+ }
+ ],
+ "Voltages@odata.count": 4,
+ "Redundancy": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Redundancy/0",
+ "Status": {
+ "State": "Enabled",
+ "Health": "OK"
+ },
+ "Name": "PSU Redundancy",
+ "MinNumNeeded": 2,
+ "Oem": {
+ "Lenovo": {
+ "NonRedundantAvailablePower": 1100,
+ "@odata.type": "#LenovoRedundancy.v1_0_0.LenovoRedundancyProperties",
+ "PowerRedundancySettings": {
+ "EstimatedUsage": "58.55%",
+ "MaxPowerLimitWatts": 1100,
+ "PowerFailureLimit": 0,
+ "PowerRedundancyPolicy": "RedundantWithThrottling"
+ }
+ }
+ },
+ "RedundancyEnabled": true,
+ "RedundancySet": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/0"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/1"
+ }
+ ],
+ "RedundancySet@odata.count": 2,
+ "MaxNumSupported": 2,
+ "Mode": "N+m",
+ "MemberId": "0"
+ }
+ ],
+ "Description": "Power Consumption and Power Limiting",
+ "Name": "Power",
+ "PowerSupplies@odata.count": 2,
+ "Oem": {
+ "Lenovo": {
+ "@odata.type": "#LenovoPower.v1_0_0.Capabilities",
+ "LocalPowerControlEnabled": true,
+ "PowerOnPermissionEnabled": true,
+ "PowerRestorePolicy": "Restore",
+ "WakeOnLANEnabled": true
+ }
+ },
+ "@odata.type": "#Power.v1_5_1.Power",
+ "Id": "Power",
+ "@odata.etag": "\"ad85a1403e07a433386e9907d00565cc\"",
+ "PowerControl": [
+ {
+ "PowerAllocatedWatts": 1100,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerControl/0",
+ "Status": {
+ "HealthRollup": "Warning",
+ "State": "Enabled"
+ },
+ "PowerLimit": {
+ "LimitException": "NoAction",
+ "LimitInWatts": null
+ },
+ "Name": "Server Power Control",
+ "Oem": {
+ "Lenovo": {
+ "PowerUtilization": {
+ "MaxLimitInWatts": 1100,
+ "EnablePowerCapping": false,
+ "LimitMode": "AC",
+ "EnablePowerCapping@Redfish.Deprecated": "The property is deprecated. Please use LimitInWatts instead.",
+ "CapacityMinAC": 617,
+ "MinLimitInWatts": 0,
+ "GuaranteedInWatts": 617,
+ "CapacityMinDC": 578,
+ "CapacityMaxDC": 749,
+ "CapacityMaxAC": 802
+ },
+ "HistoryPowerMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerControl/0/Oem/Lenovo/HistoryPowerMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerControl"
+ }
+ },
+ "PowerAvailableWatts": 0,
+ "PowerMetrics": {
+ "IntervalInMin": 60,
+ "AverageConsumedWatts": 314.716675,
+ "MinConsumedWatts": 311,
+ "MaxConsumedWatts": 318
+ },
+ "RelatedItem@odata.count": 1,
+ "MemberId": "0",
+ "PowerRequestedWatts": 802,
+ "PowerConsumedWatts": 344,
+ "PowerCapacityWatts": 1100
+ }
+ ],
+ "PowerSupplies": [
+ {
+ "SerialNumber": "A4DB8BP11WJ",
+ "InputRanges": [
+ {
+ "InputType": null,
+ "OutputWattage": null,
+ "MinimumVoltage": null,
+ "MaximumVoltage": null
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/0",
+ "RelatedItem@odata.count": 1,
+ "MemberId": "0",
+ "PartNumber": "SP57A02023",
+ "FirmwareVersion": "4.52",
+ "Status": {
+ "State": "Enabled",
+ "Health": "Warning"
+ },
+ "LineInputVoltage": null,
+ "Name": "PSU1",
+ "PowerSupplyType": "Unknown",
+ "LastPowerOutputWatts": 316,
+ "Oem": {
+ "Lenovo": {
+ "Location": {
+ "InfoFormat": "Slot X",
+ "Info": "Slot 1"
+ },
+ "HistoryPowerSupplyMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerSupplies/0/Oem/Lenovo/HistoryPowerSupplyMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerSupply"
+ }
+ },
+ "PowerCapacityWatts": null,
+ "Manufacturer": "ACBE",
+ "LineInputVoltageType": "Unknown",
+ "Model": "LENOVO-SP57A02023",
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ]
+ },
+ {
+ "SerialNumber": "A4DB8BP12J7",
+ "InputRanges": [
+ {
+ "InputType": "AC",
+ "OutputWattage": 1100,
+ "MinimumVoltage": 200,
+ "MaximumVoltage": 240
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/1",
+ "RelatedItem@odata.count": 1,
+ "MemberId": "1",
+ "PartNumber": "SP57A02023",
+ "FirmwareVersion": "4.52",
+ "Status": {
+ "State": "Enabled",
+ "Health": "OK"
+ },
+ "LineInputVoltage": 220,
+ "Name": "PSU2",
+ "PowerSupplyType": "AC",
+ "LastPowerOutputWatts": 316,
+ "Oem": {
+ "Lenovo": {
+ "Location": {
+ "InfoFormat": "Slot X",
+ "Info": "Slot 2"
+ },
+ "HistoryPowerSupplyMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerSupplies/1/Oem/Lenovo/HistoryPowerSupplyMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerSupply"
+ }
+ },
+ "PowerCapacityWatts": 1100,
+ "Manufacturer": "ACBE",
+ "LineInputVoltageType": "ACMidLine",
+ "Model": "LENOVO-SP57A02023",
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ]
+ }
+ ]
+}
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py b/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py
new file mode 100644
index 000000000..98daefeb7
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py
@@ -0,0 +1,182 @@
+##############################################################################
+# Copyright (c) 2019 Lenovo Group Limited Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.energy.energy.Energy
+
+from __future__ import absolute_import
+import unittest
+import mock
+import os
+from yardstick.benchmark.scenarios.energy import energy
+
+
+class EnergyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'target': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'password': 'passw0rd',
+ 'redfish_ip': '10.229.17.105',
+ 'redfish_user': 'USERID',
+ 'redfish_pwd': "PASSW0RD",
+ }
+ }
+ self.result = {}
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_setup_response_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ p.setup()
+ self.assertTrue(p.get_response)
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_setup_response_failed(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ p.setup()
+ self.assertFalse(p.get_response)
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_list_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ expect_result = self._read_file("energy_sample_chassis_output.txt")
+ expect_result = str(expect_result)
+ expect_result = expect_result.replace("'", '"')
+ mock_send_request.return_value.status_code = 200
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, ["/redfish/v1/Chassis/1"])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_response_fail(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_wrongtype_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = {}
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_inproper_key(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = '{"some_key": "some_value"}'
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ expect_result = self._read_file("energy_sample_power_metrics.txt")
+ expect_result = str(expect_result)
+ expect_result = expect_result.replace("'", '"')
+ mock_send_request.return_value.status_code = 200
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, 344)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_response_fail(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_wrongtype_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = {}
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_inproper_key(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = '{"some_key": "some_value"}'
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ chassis_list = mock.Mock(return_value=["/redfish/v1/Chassis/1"])
+ p.load_chassis_list = chassis_list
+ power = mock.Mock(return_value=344)
+ p.get_power = power
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": 344})
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_no_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ chassis_list = mock.Mock(return_value=["/redfish/v1/Chassis/1"])
+ p.load_chassis_list = chassis_list
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": -1})
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_wrong_chassis(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ chassis_list = mock.Mock(return_value=[])
+ p.load_chassis_list = chassis_list
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": -1})
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
index 2964ecc14..bb7fa4536 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
@@ -6,21 +6,51 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+from oslo_utils import uuidutils
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.attach_volume import AttachVolume
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import attach_volume
class AttachVolumeTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.attach_server_volume')
- def test_attach_volume(self, mock_attach_server_volume):
- options = {
- 'volume_id': '123-456-000',
- 'server_id': '000-123-456'
- }
- args = {"options": options}
- obj = AttachVolume(args, {})
- obj.run({})
- mock_attach_server_volume.assert_called_once()
+ def setUp(self):
+
+ self._mock_attach_volume_to_server = mock.patch.object(
+ openstack_utils, 'attach_volume_to_server')
+ self.mock_attach_volume_to_server = (
+ self._mock_attach_volume_to_server.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(attach_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ _uuid = uuidutils.generate_uuid()
+ self.args = {'options': {'server_name_or_id': _uuid,
+ 'volume_name_or_id': _uuid}}
+ self.result = {}
+ self.addCleanup(self._stop_mock)
+ self.attachvol_obj = attach_volume.AttachVolume(self.args, mock.ANY)
+
+ def _stop_mock(self):
+ self._mock_attach_volume_to_server.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_attach_volume_to_server.return_value = True
+ self.assertIsNone(self.attachvol_obj.run(self.result))
+ self.assertEqual({'attach_volume': 1}, self.result)
+ self.mock_log.info.asset_called_once_with(
+ 'Attach volume to server successful!')
+
+ def test_run_fail(self):
+ self.mock_attach_volume_to_server.return_value = False
+ with self.assertRaises(exceptions.ScenarioAttachVolumeError):
+ self.attachvol_obj.run(self.result)
+ self.assertEqual({'attach_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Attach volume to server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
index 7a2324b3d..b0488bacd 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
@@ -8,28 +8,56 @@
##############################################################################
import unittest
-from yardstick.benchmark.scenarios.lib.check_value import CheckValue
+from yardstick.benchmark.scenarios.lib import check_value
+from yardstick.common import exceptions as y_exc
+
class CheckValueTestCase(unittest.TestCase):
- def setUp(self):
- self.result = {}
+ def test_eq_pass(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 1}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertEqual({}, result)
+
+ def test_ne_pass(self):
+ scenario_cfg = {'options': {'operator': 'ne',
+ 'value1': 1,
+ 'value2': 2}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertEqual({}, result)
+
+ def test_result(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 1},
+ 'output': 'foo'}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertDictEqual(result, {'foo': 'PASS'})
- def test_check_value_eq(self):
- scenario_cfg = {'options': {'operator': 'eq', 'value1': 1, 'value2': 2}}
- obj = CheckValue(scenario_cfg, {})
- self.assertRaises(AssertionError, obj.run, self.result)
- self.assertEqual({}, self.result)
+ def test_eq(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 2}}
+ obj = check_value.CheckValue(scenario_cfg, {})
- def test_check_value_eq_pass(self):
- scenario_cfg = {'options': {'operator': 'eq', 'value1': 1, 'value2': 1}}
- obj = CheckValue(scenario_cfg, {})
+ with self.assertRaises(y_exc.ValueCheckError):
+ result = obj.run({})
+ self.assertEqual({}, result)
- obj.run(self.result)
- self.assertEqual({}, self.result)
+ def test_ne(self):
+ scenario_cfg = {'options': {'operator': 'ne',
+ 'value1': 1,
+ 'value2': 1}}
+ obj = check_value.CheckValue(scenario_cfg, {})
- def test_check_value_ne(self):
- scenario_cfg = {'options': {'operator': 'ne', 'value1': 1, 'value2': 1}}
- obj = CheckValue(scenario_cfg, {})
- self.assertRaises(AssertionError, obj.run, self.result)
- self.assertEqual({}, self.result)
+ with self.assertRaises(y_exc.ValueCheckError):
+ result = obj.run({})
+ self.assertEqual({}, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
index 639cf2906..aebd1dfe8 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
@@ -6,30 +6,50 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import unittest
+
import mock
+from oslo_utils import uuidutils
+import unittest
-from yardstick.benchmark.scenarios.lib import create_image
from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_image
+
-# NOTE(elfoley): There should be more tests here.
class CreateImageTestCase(unittest.TestCase):
- @mock.patch.object(openstack_utils, 'create_image')
- @mock.patch.object(openstack_utils, 'get_glance_client')
- def test_create_image(self, mock_get_glance_client, mock_create_image):
- options = {
- 'image_name': 'yardstick_test_image_01',
- 'disk_format': 'qcow2',
- 'container_format': 'bare',
- 'min_disk': '1',
- 'min_ram': '512',
- 'protected': 'False',
- 'tags': '["yardstick automatic test image"]',
- 'file_path': '/home/opnfv/images/cirros-0.3.5-x86_64-disk.img'
- }
- args = {"options": options}
- obj = create_image.CreateImage(args, {})
- obj.run({})
- mock_create_image.assert_called_once()
- mock_get_glance_client.assert_called_once()
+ def setUp(self):
+ self._mock_create_image = mock.patch.object(
+ openstack_utils, 'create_image')
+ self.mock_create_image = (
+ self._mock_create_image.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_image, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'image_name': 'yardstick_image'}}
+ self.result = {}
+ self.cimage_obj = create_image.CreateImage(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_image.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cimage_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_image.return_value = _uuid
+ output = self.cimage_obj.run(self.result)
+ self.assertEqual({'image_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create image successful!')
+
+ def test_run_fail(self):
+ self.mock_create_image.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateImageError):
+ self.cimage_obj.run(self.result)
+ self.assertEqual({'image_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create image failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
index 1c3d6cebc..a7b683f47 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
@@ -6,22 +6,52 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-
-import mock
+from oslo_utils import uuidutils
import unittest
+import mock
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
from yardstick.benchmark.scenarios.lib import create_keypair
class CreateKeypairTestCase(unittest.TestCase):
- @mock.patch.object(create_keypair, 'paramiko')
- @mock.patch.object(create_keypair, 'op_utils')
- def test_create_keypair(self, mock_op_utils, *args):
- options = {
- 'key_name': 'yardstick_key',
- 'key_path': '/tmp/yardstick_key'
- }
- args = {"options": options}
- obj = create_keypair.CreateKeypair(args, {})
- obj.run({})
- mock_op_utils.create_keypair.assert_called_once()
+
+ def setUp(self):
+
+ self._mock_create_keypair = mock.patch.object(
+ openstack_utils, 'create_keypair')
+ self.mock_create_keypair = (
+ self._mock_create_keypair.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_keypair, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'key_name': 'yardstick_key'}}
+ self.result = {}
+
+ self.ckeypair_obj = create_keypair.CreateKeypair(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_keypair.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.ckeypair_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_keypair.return_value = {
+ 'name': 'key-name', 'type': 'ssh', 'id': _uuid}
+ output = self.ckeypair_obj.run(self.result)
+ self.assertDictEqual({'keypair_create': 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create keypair successful!')
+
+ def test_run_fail(self):
+ self.mock_create_keypair.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateKeypairError):
+ self.ckeypair_obj.run(self.result)
+ self.assertDictEqual({'keypair_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create keypair failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
index 21158ab17..0477a49d4 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
@@ -6,25 +6,54 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+
+from oslo_utils import uuidutils
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.create_sec_group import CreateSecgroup
-
-
-class CreateSecGroupTestCase(unittest.TestCase):
-
- @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
- @mock.patch('yardstick.common.openstack_utils.create_security_group_full')
- def test_create_sec_group(self, mock_get_neutron_client, mock_create_security_group_full):
- options = {
- 'openstack_paras': {
- 'sg_name': 'yardstick_sec_group',
- 'description': 'security group for yardstick manual VM'
- }
- }
- args = {"options": options}
- obj = CreateSecgroup(args, {})
- obj.run({})
- mock_get_neutron_client.assert_called_once()
- mock_create_security_group_full.assert_called_once()
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_sec_group
+
+
+class CreateSecurityGroupTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_security_group_full = mock.patch.object(
+ openstack_utils, 'create_security_group_full')
+ self.mock_create_security_group_full = (
+ self._mock_create_security_group_full.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_sec_group, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'sg_name': 'yardstick_sg'}}
+ self.result = {}
+
+ self.csecgp_obj = create_sec_group.CreateSecgroup(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_security_group_full.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.csecgp_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_security_group_full.return_value = _uuid
+ output = self.csecgp_obj.run(self.result)
+ self.assertEqual({'sg_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with(
+ 'Create security group successful!')
+
+ def test_run_fail(self):
+ self.mock_create_security_group_full.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateSecurityGroupError):
+ self.csecgp_obj.run(self.result)
+ self.assertEqual({'sg_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Create security group failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
index 9d6d8cb1b..b58785112 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
@@ -6,29 +6,54 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+from oslo_utils import uuidutils
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.create_server import CreateServer
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_server
class CreateServerTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.create_instance_and_wait_for_active')
- @mock.patch('yardstick.common.openstack_utils.get_nova_client')
- @mock.patch('yardstick.common.openstack_utils.get_glance_client')
- @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
- def test_create_server(self, mock_get_nova_client, mock_get_neutron_client,
- mock_get_glance_client, mock_create_instance_and_wait_for_active):
- scenario_cfg = {
- 'options': {
- 'openstack_paras': 'example'
- },
- 'output': 'server'
- }
- obj = CreateServer(scenario_cfg, {})
- obj.run({})
- mock_get_nova_client.assert_called_once()
- mock_get_glance_client.assert_called_once()
- mock_get_neutron_client.assert_called_once()
- mock_create_instance_and_wait_for_active.assert_called_once()
+ def setUp(self):
+
+ self._mock_create_instance_and_wait_for_active = mock.patch.object(
+ openstack_utils, 'create_instance_and_wait_for_active')
+ self.mock_create_instance_and_wait_for_active = (
+ self._mock_create_instance_and_wait_for_active.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {
+ 'options': {'name': 'server-name', 'image': 'image-name',
+ 'flavor': 'flavor-name'}}
+ self.result = {}
+
+ self.addCleanup(self._stop_mock)
+ self.cserver_obj = create_server.CreateServer(self.args, mock.ANY)
+
+ def _stop_mock(self):
+ self._mock_create_instance_and_wait_for_active.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cserver_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_instance_and_wait_for_active.return_value = (
+ {'name': 'server-name', 'flavor': 'flavor-name', 'id': _uuid})
+ output = self.cserver_obj.run(self.result)
+ self.assertEqual({'instance_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create server successful!')
+
+ def test_run_fail(self):
+ self.mock_create_instance_and_wait_for_active.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateServerError):
+ self.cserver_obj.run(self.result)
+ self.assertEqual({'instance_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
index 30333dda8..f91d2c3f4 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
@@ -6,95 +6,53 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-import mock
+from oslo_utils import uuidutils
import unittest
+import mock
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
from yardstick.benchmark.scenarios.lib import create_volume
class CreateVolumeTestCase(unittest.TestCase):
def setUp(self):
- self._mock_cinder_client = mock.patch(
- 'yardstick.common.openstack_utils.get_cinder_client')
- self.mock_cinder_client = self._mock_cinder_client.start()
- self._mock_glance_client = mock.patch(
- 'yardstick.common.openstack_utils.get_glance_client')
- self.mock_glance_client = self._mock_glance_client.start()
- self.addCleanup(self._stop_mock)
-
- self.scenario_cfg = {
- "options" :
- {
- 'volume_name': 'yardstick_test_volume_01',
- 'size': '256',
- 'image': 'cirros-0.3.5'
- }
- }
- self.scenario = create_volume.CreateVolume(
- scenario_cfg=self.scenario_cfg,
- context_cfg={})
+ self._mock_create_volume = mock.patch.object(
+ openstack_utils, 'create_volume')
+ self.mock_create_volume = (
+ self._mock_create_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'size_gb': 1}}
+ self.result = {}
+
+ self.cvolume_obj = create_volume.CreateVolume(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
def _stop_mock(self):
- self._mock_cinder_client.stop()
- self._mock_glance_client.stop()
-
- def test_init(self):
- self.mock_cinder_client.return_value = "All volumes are equal"
- self.mock_glance_client.return_value = "Images are more equal"
-
- expected_vol_name = self.scenario_cfg["options"]["volume_name"]
- expected_vol_size = self.scenario_cfg["options"]["size"]
- expected_im_name = self.scenario_cfg["options"]["image"]
- expected_im_id = None
-
- scenario = create_volume.CreateVolume(
- scenario_cfg=self.scenario_cfg,
- context_cfg={})
-
- self.assertEqual(expected_vol_name, scenario.volume_name)
- self.assertEqual(expected_vol_size, scenario.volume_size)
- self.assertEqual(expected_im_name, scenario.image_name)
- self.assertEqual(expected_im_id, scenario.image_id)
- self.assertEqual("All volumes are equal", scenario.cinder_client)
- self.assertEqual("Images are more equal", scenario.glance_client)
-
- def test_setup(self):
- self.assertFalse(self.scenario.setup_done)
- self.scenario.setup()
- self.assertTrue(self.scenario.setup_done)
-
- @mock.patch('yardstick.common.openstack_utils.create_volume')
- @mock.patch('yardstick.common.openstack_utils.get_image_id')
- def test_run(self, mock_image_id, mock_create_volume):
- self.scenario.run()
-
- mock_image_id.assert_called_once()
- mock_create_volume.assert_called_once()
-
- @mock.patch.object(create_volume.CreateVolume, 'setup')
- def test_run_no_setup(self, scenario_setup):
- self.scenario.setup_done = False
- self.scenario.run()
- scenario_setup.assert_called_once()
-
- @mock.patch('yardstick.common.openstack_utils.create_volume')
- @mock.patch('yardstick.common.openstack_utils.get_image_id')
- @mock.patch('yardstick.common.openstack_utils.get_cinder_client')
- @mock.patch('yardstick.common.openstack_utils.get_glance_client')
- def test_create_volume(self, mock_get_glance_client,
- mock_get_cinder_client, mock_image_id,
- mock_create_volume):
- options = {
- 'volume_name': 'yardstick_test_volume_01',
- 'size': '256',
- 'image': 'cirros-0.3.5'
- }
- args = {"options": options}
- scenario = create_volume.CreateVolume(args, {})
- scenario.run()
- mock_create_volume.assert_called_once()
- mock_image_id.assert_called_once()
- mock_get_glance_client.assert_called_once()
- mock_get_cinder_client.assert_called_once()
+ self._mock_create_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cvolume_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_volume.return_value = {'name': 'yardstick_volume',
+ 'id': _uuid,
+ 'status': 'available'}
+ output = self.cvolume_obj.run(self.result)
+ self.assertDictEqual({'volume_create': 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create volume successful!')
+
+ def test_run_fail(self):
+ self.mock_create_volume.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateVolumeError):
+ self.cvolume_obj.run(self.result)
+ self.assertDictEqual({'volume_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create volume failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
index e382d46fa..8a1d6d695 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
@@ -9,21 +9,44 @@
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.delete_image import DeleteImage
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_image
class DeleteImageTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.delete_image')
- @mock.patch('yardstick.common.openstack_utils.get_image_id')
- @mock.patch('yardstick.common.openstack_utils.get_glance_client')
- def test_delete_image(self, mock_get_glance_client, mock_image_id, mock_delete_image):
- options = {
- 'image_name': 'yardstick_test_image_01'
- }
- args = {"options": options}
- obj = DeleteImage(args, {})
- obj.run({})
- mock_delete_image.assert_called_once()
- mock_image_id.assert_called_once()
- mock_get_glance_client.assert_called_once()
+ def setUp(self):
+ self._mock_delete_image = mock.patch.object(
+ openstack_utils, 'delete_image')
+ self.mock_delete_image = (
+ self._mock_delete_image.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_image, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_image'}}
+ self.result = {}
+
+ self.delimg_obj = delete_image.DeleteImage(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_image.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_image.return_value = True
+ self.assertIsNone(self.delimg_obj.run(self.result))
+ self.assertEqual({'delete_image': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete image successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_image.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteImageError):
+ self.delimg_obj.run(self.result)
+ self.assertEqual({'delete_image': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete image failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
index 6e790ba90..c7940251e 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
@@ -9,19 +9,43 @@
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.delete_keypair import DeleteKeypair
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_keypair
class DeleteKeypairTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.get_nova_client')
- @mock.patch('yardstick.common.openstack_utils.delete_keypair')
- def test_detach_volume(self, mock_get_nova_client, mock_delete_keypair):
- options = {
- 'key_name': 'yardstick_key'
- }
- args = {"options": options}
- obj = DeleteKeypair(args, {})
- obj.run({})
- mock_get_nova_client.assert_called_once()
- mock_delete_keypair.assert_called_once()
+ def setUp(self):
+ self._mock_delete_keypair = mock.patch.object(
+ openstack_utils, 'delete_keypair')
+ self.mock_delete_keypair = self._mock_delete_keypair.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_keypair, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'key_name': 'yardstick_key'}}
+ self.result = {}
+ self.delkey_obj = delete_keypair.DeleteKeypair(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_keypair.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_keypair.return_value = True
+ self.assertIsNone(self.delkey_obj.run(self.result))
+ self.assertEqual({'delete_keypair': 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ 'Delete keypair successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_keypair.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteKeypairError):
+ self.delkey_obj.run(self.result)
+ self.assertEqual({'delete_keypair': 0}, self.result)
+ self.mock_log.error.assert_called_once_with("Delete keypair failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
index aef99ee94..b6dbf4791 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
@@ -11,7 +11,8 @@ from oslo_utils import uuidutils
import unittest
import mock
-import yardstick.common.openstack_utils as op_utils
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
from yardstick.benchmark.scenarios.lib import delete_network
@@ -19,16 +20,17 @@ class DeleteNetworkTestCase(unittest.TestCase):
def setUp(self):
self._mock_delete_neutron_net = mock.patch.object(
- op_utils, 'delete_neutron_net')
+ openstack_utils, "delete_neutron_net")
self.mock_delete_neutron_net = self._mock_delete_neutron_net.start()
self._mock_get_shade_client = mock.patch.object(
- op_utils, 'get_shade_client')
+ openstack_utils, "get_shade_client")
self.mock_get_shade_client = self._mock_get_shade_client.start()
- self._mock_log = mock.patch.object(delete_network, 'LOG')
+ self._mock_log = mock.patch.object(delete_network, "LOG")
self.mock_log = self._mock_log.start()
- _uuid = uuidutils.generate_uuid()
- self.args = {'options': {'network_id': _uuid}}
- self._del_obj = delete_network.DeleteNetwork(self.args, mock.ANY)
+ self.args = {"options": {"network_name_or_id": (
+ uuidutils.generate_uuid())}}
+ self.result = {}
+ self.del_obj = delete_network.DeleteNetwork(self.args, mock.ANY)
self.addCleanup(self._stop_mock)
@@ -39,11 +41,14 @@ class DeleteNetworkTestCase(unittest.TestCase):
def test_run(self):
self.mock_delete_neutron_net.return_value = True
- self.assertTrue(self._del_obj.run({}))
+ self.assertIsNone(self.del_obj.run(self.result))
+ self.assertEqual({"delete_network": 1}, self.result)
self.mock_log.info.assert_called_once_with(
"Delete network successful!")
def test_run_fail(self):
self.mock_delete_neutron_net.return_value = False
- self.assertFalse(self._del_obj.run({}))
+ with self.assertRaises(exceptions.ScenarioDeleteNetworkError):
+ self.del_obj.run(self.result)
+ self.assertEqual({"delete_network": 0}, self.result)
self.mock_log.error.assert_called_once_with("Delete network failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
index eee565de7..55fe53df8 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
@@ -6,22 +6,49 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+from oslo_utils import uuidutils
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.delete_server import DeleteServer
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_server
class DeleteServerTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.delete_instance')
- @mock.patch('yardstick.common.openstack_utils.get_nova_client')
- def test_delete_server(self, mock_get_nova_client, mock_delete_instance):
- options = {
- 'server_id': '1234-4567-0000'
- }
- args = {"options": options}
- obj = DeleteServer(args, {})
- obj.run({})
- mock_get_nova_client.assert_called_once()
- mock_delete_instance.assert_called_once()
+ def setUp(self):
+ self._mock_delete_instance = mock.patch.object(
+ openstack_utils, 'delete_instance')
+ self.mock_delete_instance = (
+ self._mock_delete_instance.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': uuidutils.generate_uuid()
+ }}
+ self.result = {}
+
+ self.delserver_obj = delete_server.DeleteServer(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_instance.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_instance.return_value = True
+ self.assertIsNone(self.delserver_obj.run(self.result))
+ self.assertEqual({'delete_server': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete server successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_instance.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteServerError):
+ self.delserver_obj.run(self.result)
+ self.assertEqual({'delete_server': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
index 93f76e819..0db16f396 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
@@ -9,19 +9,44 @@
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.delete_volume import DeleteVolume
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_volume
class DeleteVolumeTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.get_cinder_client')
- @mock.patch('yardstick.common.openstack_utils.delete_volume')
- def test_delete_volume(self, mock_get_cinder_client, mock_delete_volume):
- options = {
- 'volume_id': '123-123-123'
- }
- args = {"options": options}
- obj = DeleteVolume(args, {})
- obj.run({})
- mock_get_cinder_client.assert_called_once()
- mock_delete_volume.assert_called_once()
+ def setUp(self):
+ self._mock_delete_volume = mock.patch.object(
+ openstack_utils, 'delete_volume')
+ self.mock_delete_volume = (
+ self._mock_delete_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_volume'}}
+ self.result = {}
+
+ self.delvol_obj = delete_volume.DeleteVolume(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_volume.return_value = True
+ self.assertIsNone(self.delvol_obj.run(self.result))
+ self.assertEqual({'delete_volume': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete volume successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_volume.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteVolumeError):
+ self.delvol_obj.run(self.result)
+ self.assertEqual({'delete_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete volume failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
index 9794d2129..2bc57f495 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
@@ -6,21 +6,52 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+from oslo_utils import uuidutils
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.detach_volume import DetachVolume
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import detach_volume
class DetachVolumeTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.detach_volume')
- def test_detach_volume(self, mock_detach_volume):
- options = {
- 'server_id': '321-321-321',
- 'volume_id': '123-123-123'
- }
- args = {"options": options}
- obj = DetachVolume(args, {})
- obj.run({})
- mock_detach_volume.assert_called_once()
+ def setUp(self):
+ self._mock_detach_volume = mock.patch.object(
+ openstack_utils, 'detach_volume')
+ self.mock_detach_volume = (
+ self._mock_detach_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(detach_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ _uuid = uuidutils.generate_uuid()
+ self.args = {'options': {'server_name_or_id': _uuid,
+ 'volume_name_or_id': _uuid}}
+ self.result = {}
+
+ self.detachvol_obj = detach_volume.DetachVolume(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_detach_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_detach_volume.return_value = True
+ self.assertIsNone(self.detachvol_obj.run(self.result))
+ self.assertEqual({'detach_volume': 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ 'Detach volume from server successful!')
+
+ def test_run_fail(self):
+ self.mock_detach_volume.return_value = False
+ with self.assertRaises(exceptions.ScenarioDetachVolumeError):
+ self.detachvol_obj.run(self.result)
+ self.assertEqual({'detach_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Detach volume from server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
index 15a6f7c8f..1c1364348 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
@@ -6,20 +6,52 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+from oslo_utils import uuidutils
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.get_flavor import GetFlavor
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import get_flavor
class GetFlavorTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.get_flavor_by_name')
- def test_get_flavor(self, mock_get_flavor_by_name):
- options = {
- 'flavor_name': 'yardstick_test_flavor'
- }
- args = {"options": options}
- obj = GetFlavor(args, {})
- obj.run({})
- mock_get_flavor_by_name.assert_called_once()
+ def setUp(self):
+
+ self._mock_get_flavor = mock.patch.object(
+ openstack_utils, 'get_flavor')
+ self.mock_get_flavor = self._mock_get_flavor.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(get_flavor, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_flavor'}}
+ self.result = {}
+
+ self.getflavor_obj = get_flavor.GetFlavor(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_get_flavor.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.getflavor_obj.scenario_cfg = {'output': 'flavor'}
+ self.mock_get_flavor.return_value = (
+ {'name': 'flavor-name', 'id': _uuid})
+ output = self.getflavor_obj.run(self.result)
+ self.assertDictEqual({'get_flavor': 1}, self.result)
+ self.assertDictEqual({'flavor': {'name': 'flavor-name', 'id': _uuid}},
+ output)
+ self.mock_log.info.asset_called_once_with('Get flavor successful!')
+
+ def test_run_fail(self):
+ self.mock_get_flavor.return_value = None
+ with self.assertRaises(exceptions.ScenarioGetFlavorError):
+ self.getflavor_obj.run(self.result)
+ self.assertDictEqual({'get_flavor': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Get flavor failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
index 83ec903bc..5b5329cb0 100644
--- a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
@@ -6,37 +6,52 @@
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
+from oslo_utils import uuidutils
import unittest
import mock
-from yardstick.benchmark.scenarios.lib.get_server import GetServer
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import get_server
class GetServerTestCase(unittest.TestCase):
- @mock.patch('yardstick.common.openstack_utils.get_server_by_name')
- @mock.patch('yardstick.common.openstack_utils.get_nova_client')
- def test_get_server_with_name(self, mock_get_nova_client, mock_get_server_by_name):
- scenario_cfg = {
- 'options': {
- 'server_name': 'yardstick_server'
- },
- 'output': 'status server'
- }
- obj = GetServer(scenario_cfg, {})
- obj.run({})
- mock_get_nova_client.assert_called_once()
- mock_get_server_by_name.assert_called_once()
-
- @mock.patch('yardstick.common.openstack_utils.get_nova_client')
- def test_get_server_with_id(self, mock_get_nova_client):
- scenario_cfg = {
- 'options': {
- 'server_id': '1'
- },
- 'output': 'status server'
- }
- mock_get_nova_client().servers.get.return_value = None
- obj = GetServer(scenario_cfg, {})
- obj.run({})
- mock_get_nova_client.assert_called()
+ def setUp(self):
+
+ self._mock_get_server = mock.patch.object(
+ openstack_utils, 'get_server')
+ self.mock_get_server = self._mock_get_server.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(get_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_key'}}
+ self.result = {}
+
+ self.getserver_obj = get_server.GetServer(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_get_server.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.getserver_obj.scenario_cfg = {'output': 'server'}
+ self.mock_get_server.return_value = (
+ {'name': 'server-name', 'id': _uuid})
+ output = self.getserver_obj.run(self.result)
+ self.assertDictEqual({'get_server': 1}, self.result)
+ self.assertDictEqual({'server': {'name': 'server-name', 'id': _uuid}},
+ output)
+ self.mock_log.info.asset_called_once_with('Get Server successful!')
+
+ def test_run_fail(self):
+ self.mock_get_server.return_value = None
+ with self.assertRaises(exceptions.ScenarioGetServerError):
+ self.getserver_obj.run(self.result)
+ self.assertDictEqual({'get_server': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Get Server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
index 74144afd5..5f342df7d 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
@@ -7,10 +7,6 @@
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
-# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
-
-from __future__ import absolute_import
-
import os
import unittest
@@ -19,9 +15,10 @@ from oslo_serialization import jsonutils
from yardstick.common import utils
from yardstick.benchmark.scenarios.networking import iperf3
+from yardstick.common import exceptions as y_exc
-@mock.patch('yardstick.benchmark.scenarios.networking.iperf3.ssh')
+@mock.patch.object(iperf3, 'ssh')
class IperfTestCase(unittest.TestCase):
output_name_tcp = 'iperf3_sample_output.json'
output_name_udp = 'iperf3_sample_output_udp.json'
@@ -40,9 +37,14 @@ class IperfTestCase(unittest.TestCase):
'ipaddr': '172.16.0.138',
}
}
+ self._mock_log_info = mock.patch.object(iperf3.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
+ self.addCleanup(self._stop_mocks)
- def test_iperf_successful_setup(self, mock_ssh):
+ def _stop_mocks(self):
+ self._mock_log_info.stop()
+ def test_iperf_successful_setup(self, mock_ssh):
p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
@@ -52,13 +54,11 @@ class IperfTestCase(unittest.TestCase):
mock_ssh.SSH.from_node().execute.assert_called_with("iperf3 -s -D")
def test_iperf_unsuccessful_setup(self, mock_ssh):
-
p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.setup)
def test_iperf_successful_teardown(self, mock_ssh):
-
p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
p.host = mock_ssh.SSH.from_node()
@@ -69,7 +69,6 @@ class IperfTestCase(unittest.TestCase):
mock_ssh.SSH.from_node().execute.assert_called_with("pkill iperf3")
def test_iperf_successful_no_sla(self, mock_ssh):
-
options = {}
args = {'options': options}
result = {}
@@ -85,7 +84,6 @@ class IperfTestCase(unittest.TestCase):
self.assertEqual(result, expected_result)
def test_iperf_successful_sla(self, mock_ssh):
-
options = {}
args = {
'options': options,
@@ -104,7 +102,6 @@ class IperfTestCase(unittest.TestCase):
self.assertEqual(result, expected_result)
def test_iperf_unsuccessful_sla(self, mock_ssh):
-
options = {}
args = {
'options': options,
@@ -118,7 +115,7 @@ class IperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
def test_iperf_successful_sla_jitter(self, mock_ssh):
options = {"protocol": "udp", "bandwidth": "20m"}
@@ -152,7 +149,7 @@ class IperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.output_name_udp)
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
def test_iperf_successful_tcp_protocal(self, mock_ssh):
options = {"protocol": "tcp", "nodelay": "yes"}
@@ -173,7 +170,6 @@ class IperfTestCase(unittest.TestCase):
self.assertEqual(result, expected_result)
def test_iperf_unsuccessful_script_error(self, mock_ssh):
-
options = {}
args = {'options': options}
result = {}
@@ -185,7 +181,8 @@ class IperfTestCase(unittest.TestCase):
mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.run, result)
- def _read_sample_output(self, filename):
+ @staticmethod
+ def _read_sample_output(filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
output = os.path.join(curr_path, filename)
with open(output) as f:
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
index 5907562c2..a7abcd98a 100755
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
@@ -18,6 +18,7 @@ import mock
from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.networking import netperf
+from yardstick.common import exceptions as y_exc
@mock.patch('yardstick.benchmark.scenarios.networking.netperf.ssh')
@@ -98,7 +99,7 @@ class NetperfTestCase(unittest.TestCase):
sample_output = self._read_sample_output()
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
def test_netperf_unsuccessful_script_error(self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
index 956a9c078..a577dba59 100755
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
@@ -19,6 +19,7 @@ import mock
from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.networking import netperf_node
+from yardstick.common import exceptions as y_exc
@mock.patch('yardstick.benchmark.scenarios.networking.netperf_node.ssh')
@@ -98,7 +99,7 @@ class NetperfNodeTestCase(unittest.TestCase):
sample_output = self._read_sample_output()
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
def test_netperf_node_unsuccessful_script_error(self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
index 4adfab120..944202658 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
@@ -14,6 +14,7 @@ import mock
import unittest
from yardstick.benchmark.scenarios.networking import ping
+from yardstick.common import exceptions as y_exc
class PingTestCase(unittest.TestCase):
@@ -74,7 +75,7 @@ class PingTestCase(unittest.TestCase):
p = ping.Ping(args, self.ctx)
mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_unsuccessful_script_error(self, mock_ssh):
@@ -90,3 +91,17 @@ class PingTestCase(unittest.TestCase):
mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_unsuccessful_no_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
index 4662c8537..ad5217a14 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
@@ -14,6 +14,7 @@ import mock
import unittest
from yardstick.benchmark.scenarios.networking import ping6
+from yardstick.common import exceptions as y_exc
class PingTestCase(unittest.TestCase):
@@ -98,7 +99,7 @@ class PingTestCase(unittest.TestCase):
p = ping6.Ping6(args, self.ctx)
p.client = mock_ssh.SSH.from_node()
mock_ssh.SSH.from_node().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
@mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
def test_ping_unsuccessful_script_error(self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
index 6aea03aee..5761e2403 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
@@ -9,17 +9,22 @@
import mock
import unittest
+import logging
from oslo_serialization import jsonutils
+from yardstick import ssh
from yardstick.benchmark.scenarios.networking import pktgen
+from yardstick.common import exceptions as y_exc
+
+
+logging.disable(logging.CRITICAL)
-@mock.patch('yardstick.benchmark.scenarios.networking.pktgen.ssh')
class PktgenTestCase(unittest.TestCase):
def setUp(self):
- self.ctx = {
+ self.context_cfg = {
'host': {
'ip': '172.16.0.137',
'user': 'root',
@@ -32,635 +37,416 @@ class PktgenTestCase(unittest.TestCase):
'ipaddr': '172.16.0.138'
}
}
+ self.scenario_cfg = {
+ 'options': {'packetsize': 60}
+ }
- def test_pktgen_successful_setup(self, mock_ssh):
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.setup()
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_SSH.from_node().run.return_value = 0
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- self.assertIsNotNone(p.server)
- self.assertIsNotNone(p.client)
- self.assertTrue(p.setup_done)
+ self.addCleanup(self._stop_mock)
- def test_pktgen_successful_iptables_setup(self, mock_ssh):
+ self.scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ self.scenario.setup()
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.number_of_ports = args['options']['number_of_ports']
+ def _stop_mock(self):
+ self._mock_SSH.stop()
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ def test_setup_successful(self):
+ self.assertIsNotNone(self.scenario.server)
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
- p._iptables_setup()
+ def test_iptables_setup_successful(self):
+ self.scenario.number_of_ports = 10
+ self.scenario._iptables_setup()
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.mock_SSH.from_node().run.assert_called_with(
"sudo iptables -F; "
"sudo iptables -A INPUT -p udp --dport 1000:%s -j DROP"
% 1010, timeout=60)
- def test_pktgen_unsuccessful_iptables_setup(self, mock_ssh):
-
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- }
-
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.number_of_ports = args['options']['number_of_ports']
+ def test_iptables_setup_unsuccessful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
- mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p._iptables_setup)
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._iptables_setup()
- def test_pktgen_successful_iptables_get_result(self, mock_ssh):
+ def test_iptables_get_result_successful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().execute.return_value = (0, '150000', '')
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- }
-
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.number_of_ports = args['options']['number_of_ports']
+ result = self.scenario._iptables_get_result()
- mock_ssh.SSH.from_node().execute.return_value = (0, '150000', '')
- p._iptables_get_result()
-
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.assertEqual(result, 150000)
+ self.mock_SSH.from_node().execute.assert_called_with(
"sudo iptables -L INPUT -vnx |"
"awk '/dpts:1000:%s/ {{printf \"%%s\", $1}}'"
- % 1010)
-
- def test_pktgen_unsuccessful_iptables_get_result(self, mock_ssh):
+ % 1010, raise_on_error=True)
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- }
-
- p = pktgen.Pktgen(args, self.ctx)
+ def test_iptables_get_result_unsuccessful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- p.server = mock_ssh.SSH.from_node()
- p.number_of_ports = args['options']['number_of_ports']
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._iptables_get_result()
- mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p._iptables_get_result)
+ def test_run_successful_no_sla(self):
+ self.scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110,
+ "ppm": 3179})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- def test_pktgen_successful_no_sla(self, mock_ssh):
-
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- }
result = {}
+ self.scenario.run(result)
- p = pktgen.Pktgen(args, self.ctx)
-
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- p._iptables_get_result = mock.Mock(return_value=149300)
-
- sample_output = '{"packets_per_second": 9753, "errors": 0, \
- "packets_sent": 149776, "packetsize": 60, "flows": 110, "ppm": 3179}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
-
- p.run(result)
expected_result = jsonutils.loads(sample_output)
expected_result["packets_received"] = 149300
expected_result["packetsize"] = 60
self.assertEqual(result, expected_result)
- def test_pktgen_successful_sla(self, mock_ssh):
+ def test_run_successful_sla(self):
+ self.scenario_cfg['sla'] = {'max_ppm': 10000}
+ scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110,
+ "ppm": 3179})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- 'sla': {'max_ppm': 10000}
- }
result = {}
+ scenario.run(result)
- p = pktgen.Pktgen(args, self.ctx)
-
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- p._iptables_get_result = mock.Mock(return_value=149300)
-
- sample_output = '{"packets_per_second": 9753, "errors": 0, \
- "packets_sent": 149776, "packetsize": 60, "flows": 110, "ppm": 3179}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
-
- p.run(result)
expected_result = jsonutils.loads(sample_output)
expected_result["packets_received"] = 149300
expected_result["packetsize"] = 60
self.assertEqual(result, expected_result)
- def test_pktgen_unsuccessful_sla(self, mock_ssh):
-
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- 'sla': {'max_ppm': 1000}
- }
- result = {}
+ def test_run_unsuccessful_sla(self):
+ self.scenario_cfg['sla'] = {'max_ppm': 1000}
+ scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- p = pktgen.Pktgen(args, self.ctx)
+ with self.assertRaises(y_exc.SLAValidationError):
+ scenario.run({})
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ def test_run_ssh_error_not_caught(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- p._iptables_get_result = mock.Mock(return_value=149300)
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario.run({})
- sample_output = '{"packets_per_second": 9753, "errors": 0, \
- "packets_sent": 149776, "packetsize": 60, "flows": 110}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ def test_get_vnic_driver_name(self):
+ self.mock_SSH.from_node().execute.return_value = (0, 'ixgbevf', '')
+ vnic_driver_name = self.scenario._get_vnic_driver_name()
- def test_pktgen_unsuccessful_script_error(self, mock_ssh):
-
- args = {
- 'options': {'packetsize': 60, 'number_of_ports': 10},
- 'sla': {'max_ppm': 1000}
- }
- result = {}
-
- p = pktgen.Pktgen(args, self.ctx)
-
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, result)
-
- def test_pktgen_get_vnic_driver_name(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, 'ixgbevf', '')
-
- vnic_driver_name = p._get_vnic_driver_name()
self.assertEqual(vnic_driver_name, 'ixgbevf')
- def test_pktgen_unsuccessful_get_vnic_driver_name(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
+ def test_get_vnic_driver_name_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
-
- self.assertRaises(RuntimeError, p._get_vnic_driver_name)
-
- def test_pktgen_get_sriov_queue_number(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '2', '')
-
- p.queue_number = p._get_sriov_queue_number()
- self.assertEqual(p.queue_number, 2)
-
- def test_pktgen_unsuccessful_get_sriov_queue_number(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_vnic_driver_name()
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ def test_get_sriov_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '2', '')
- self.assertRaises(RuntimeError, p._get_sriov_queue_number)
+ self.scenario.queue_number = self.scenario._get_sriov_queue_number()
+ self.assertEqual(self.scenario.queue_number, 2)
- def test_pktgen_get_available_queue_number(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
+ def test_get_sriov_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- mock_ssh.SSH.from_node().execute.return_value = (0, '4', '')
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_sriov_queue_number()
- p._get_available_queue_number()
+ def test_get_available_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '4', '')
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.assertEqual(self.scenario._get_available_queue_number(), 4)
+ self.mock_SSH.from_node().execute.assert_called_with(
"sudo ethtool -l eth0 | grep Combined | head -1 |"
- "awk '{printf $2}'")
-
- def test_pktgen_unsuccessful_get_available_queue_number(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ "awk '{printf $2}'", raise_on_error=True)
- self.assertRaises(RuntimeError, p._get_available_queue_number)
+ def test_get_available_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- def test_pktgen_get_usable_queue_number(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '1', '')
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_available_queue_number()
- p._get_usable_queue_number()
+ def test_get_usable_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.assertEqual(self.scenario._get_usable_queue_number(), 1)
+ self.mock_SSH.from_node().execute.assert_called_with(
"sudo ethtool -l eth0 | grep Combined | tail -1 |"
- "awk '{printf $2}'")
+ "awk '{printf $2}'", raise_on_error=True)
- def test_pktgen_unsuccessful_get_usable_queue_number(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
+ def test_get_usable_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_usable_queue_number()
- self.assertRaises(RuntimeError, p._get_usable_queue_number)
+ def test_enable_ovs_multiqueue(self):
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
+ self.scenario.queue_number = self.scenario._enable_ovs_multiqueue()
- def test_pktgen_enable_ovs_multiqueue(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ self.assertEqual(self.scenario.queue_number, 4)
+ self.mock_SSH.from_node().run.assert_has_calls(
+ (mock.call("sudo ethtool -L eth0 combined 4"),
+ mock.call("sudo ethtool -L eth0 combined 4")))
- mock_ssh.SSH.from_node().execute.return_value = (0, '4', '')
+ def test_enable_ovs_multiqueue_1q(self):
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=1)
+ self.scenario.queue_number = self.scenario._enable_ovs_multiqueue()
- p._get_usable_queue_number = mock.Mock(return_value=1)
- p._get_available_queue_number = mock.Mock(return_value=4)
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.mock_SSH.from_node().run.assert_not_called()
- p.queue_number = p._enable_ovs_multiqueue()
- self.assertEqual(p.queue_number, 4)
-
- def test_pktgen_enable_ovs_multiqueue_1q(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '1', '')
-
- p._get_usable_queue_number = mock.Mock(return_value=1)
- p._get_available_queue_number = mock.Mock(return_value=1)
-
- p.queue_number = p._enable_ovs_multiqueue()
- self.assertEqual(p.queue_number, 1)
-
- def test_pktgen_unsuccessful_enable_ovs_multiqueue(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ def test_enable_ovs_multiqueue_unsuccessful(self):
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._enable_ovs_multiqueue()
- p._get_usable_queue_number = mock.Mock(return_value=1)
- p._get_available_queue_number = mock.Mock(return_value=4)
+ def test_setup_irqmapping_ovs(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_ovs(4)
- self.assertRaises(RuntimeError, p._enable_ovs_multiqueue)
-
- def test_pktgen_setup_irqmapping_ovs(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '10', '')
-
- p._setup_irqmapping_ovs(4)
-
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.mock_SSH.from_node().run.assert_called_with(
"echo 8 | sudo tee /proc/irq/10/smp_affinity")
- def test_pktgen_setup_irqmapping_ovs_1q(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '10', '')
-
- p._setup_irqmapping_ovs(1)
+ def test_setup_irqmapping_ovs_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_ovs(1)
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.mock_SSH.from_node().run.assert_called_with(
"echo 1 | sudo tee /proc/irq/10/smp_affinity")
- def test_pktgen_unsuccessful_setup_irqmapping_ovs(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ def test_setup_irqmapping_ovs_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- self.assertRaises(RuntimeError, p._setup_irqmapping_ovs, 4)
-
- def test_pktgen_unsuccessful_setup_irqmapping_ovs_1q(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_ovs(4)
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
-
- self.assertRaises(RuntimeError, p._setup_irqmapping_ovs, 1)
-
- def test_pktgen_setup_irqmapping_sriov(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ def test_setup_irqmapping_ovs_1q_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- mock_ssh.SSH.from_node().execute.return_value = (0, '10', '')
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_ovs(1)
- p._setup_irqmapping_sriov(2)
+ def test_setup_irqmapping_sriov(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_sriov(2)
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.mock_SSH.from_node().run.assert_called_with(
"echo 2 | sudo tee /proc/irq/10/smp_affinity")
- def test_pktgen_setup_irqmapping_sriov_1q(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '10', '')
+ def test_setup_irqmapping_sriov_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_sriov(1)
- p._setup_irqmapping_sriov(1)
-
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.mock_SSH.from_node().run.assert_called_with(
"echo 1 | sudo tee /proc/irq/10/smp_affinity")
- def test_pktgen_unsuccessful_setup_irqmapping_sriov(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
-
- self.assertRaises(RuntimeError, p._setup_irqmapping_sriov, 2)
+ def test_setup_irqmapping_sriov_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- def test_pktgen_unsuccessful_setup_irqmapping_sriov_1q(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_sriov(2)
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ def test_setup_irqmapping_sriov_1q_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- self.assertRaises(RuntimeError, p._setup_irqmapping_sriov, 1)
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_sriov(1)
- def test_pktgen_is_irqbalance_disabled(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
+ def test_is_irqbalance_disabled(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertFalse(self.scenario._is_irqbalance_disabled())
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "grep ENABLED /etc/default/irqbalance", raise_on_error=True)
- p._is_irqbalance_disabled()
-
- mock_ssh.SSH.from_node().execute.assert_called_with(
- "grep ENABLED /etc/default/irqbalance")
-
- def test_pktgen_unsuccessful_is_irqbalance_disabled(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
-
- self.assertRaises(RuntimeError, p._is_irqbalance_disabled)
-
- def test_pktgen_disable_irqbalance(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ def test_is_irqbalance_disabled_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._is_irqbalance_disabled()
- p._disable_irqbalance()
+ def test_disable_irqbalance(self):
+ self.scenario._disable_irqbalance()
- mock_ssh.SSH.from_node().execute.assert_called_with(
+ self.mock_SSH.from_node().run.assert_called_with(
"sudo service irqbalance disable")
- def test_pktgen_unsuccessful_disable_irqbalance(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
-
- self.assertRaises(RuntimeError, p._disable_irqbalance)
-
- def test_pktgen_multiqueue_setup_ovs(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60, 'multiqueue': True},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '4', '')
+ def test_disable_irqbalance_unsuccessful(self):
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
- p._is_irqbalance_disabled = mock.Mock(return_value=False)
- p._get_vnic_driver_name = mock.Mock(return_value="virtio_net")
- p._get_usable_queue_number = mock.Mock(return_value=1)
- p._get_available_queue_number = mock.Mock(return_value=4)
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._disable_irqbalance()
- p.multiqueue_setup()
+ def test_multiqueue_setup_ovs(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '4', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(
+ return_value="virtio_net")
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
- self.assertEqual(p.queue_number, 4)
+ self.scenario.multiqueue_setup()
- def test_pktgen_multiqueue_setup_ovs_1q(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60, 'multiqueue': True},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ self.assertEqual(self.scenario.queue_number, 4)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
- mock_ssh.SSH.from_node().execute.return_value = (0, '1', '')
+ def test_multiqueue_setup_ovs_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(
+ return_value="virtio_net")
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=1)
- p._is_irqbalance_disabled = mock.Mock(return_value=False)
- p._get_vnic_driver_name = mock.Mock(return_value="virtio_net")
- p._get_usable_queue_number = mock.Mock(return_value=1)
- p._get_available_queue_number = mock.Mock(return_value=1)
+ self.scenario.multiqueue_setup()
- p.multiqueue_setup()
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
- self.assertEqual(p.queue_number, 1)
+ def test_multiqueue_setup_sriov(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '2', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
- def test_pktgen_multiqueue_setup_sriov(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60, 'multiqueue': True},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ self.scenario.multiqueue_setup()
- mock_ssh.SSH.from_node().execute.return_value = (0, '2', '')
+ self.assertEqual(self.scenario.queue_number, 2)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
- p._is_irqbalance_disabled = mock.Mock(return_value=False)
- p._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+ def test_multiqueue_setup_sriov_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
- p.multiqueue_setup()
+ self.scenario.multiqueue_setup()
- self.assertEqual(p.queue_number, 2)
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
- def test_pktgen_multiqueue_setup_sriov_1q(self, mock_ssh):
- args = {
- 'options': {'packetsize': 60, 'multiqueue': True},
- }
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- mock_ssh.SSH.from_node().execute.return_value = (0, '1', '')
-
- p._is_irqbalance_disabled = mock.Mock(return_value=False)
- p._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
-
- p.multiqueue_setup()
-
- self.assertEqual(p.queue_number, 1)
-
- def test_pktgen_run_with_setup_done(self, mock_ssh):
- args = {
+ def test_run_with_setup_done(self):
+ scenario_cfg = {
'options': {
'packetsize': 60,
'number_of_ports': 10,
'duration': 20,
'multiqueue': True},
'sla': {
- 'max_ppm': 1}}
- result = {}
- p = pktgen.Pktgen(args, self.ctx)
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
+ 'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.server = self.mock_SSH.from_node()
+ scenario.client = self.mock_SSH.from_node()
+ scenario.setup_done = True
+ scenario.multiqueue_setup_done = True
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- p.setup_done = True
- p.multiqueue_setup_done = True
-
- mock_iptables_result = mock.Mock()
- mock_iptables_result.return_value = 149300
- p._iptables_get_result = mock_iptables_result
-
- sample_output = '{"packets_per_second": 9753, "errors": 0, \
- "packets_sent": 149300, "flows": 110, "ppm": 0}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ result = {}
+ scenario.run(result)
- p.run(result)
expected_result = jsonutils.loads(sample_output)
expected_result["packets_received"] = 149300
expected_result["packetsize"] = 60
self.assertEqual(result, expected_result)
- def test_pktgen_run_with_ovs_multiqueque(self, mock_ssh):
- args = {
+ def test_run_with_ovs_multiqueque(self):
+ scenario_cfg = {
'options': {
'packetsize': 60,
'number_of_ports': 10,
'duration': 20,
'multiqueue': True},
- 'sla': {
- 'max_ppm': 1}}
- result = {}
-
- p = pktgen.Pktgen(args, self.ctx)
-
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- p._get_vnic_driver_name = mock.Mock(return_value="virtio_net")
- p._get_usable_queue_number = mock.Mock(return_value=1)
- p._get_available_queue_number = mock.Mock(return_value=4)
- p._enable_ovs_multiqueue = mock.Mock(return_value=4)
- p._setup_irqmapping_ovs = mock.Mock()
- p._iptables_get_result = mock.Mock(return_value=149300)
+ 'sla': {'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._get_vnic_driver_name = mock.Mock(return_value="virtio_net")
+ scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ scenario._get_available_queue_number = mock.Mock(return_value=4)
+ scenario._enable_ovs_multiqueue = mock.Mock(return_value=4)
+ scenario._setup_irqmapping_ovs = mock.Mock()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- sample_output = '{"packets_per_second": 9753, "errors": 0, \
- "packets_sent": 149300, "flows": 110, "ppm": 0}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ result = {}
+ scenario.run(result)
- p.run(result)
expected_result = jsonutils.loads(sample_output)
expected_result["packets_received"] = 149300
expected_result["packetsize"] = 60
self.assertEqual(result, expected_result)
- def test_pktgen_run_with_sriov_multiqueque(self, mock_ssh):
- args = {
+ def test_run_with_sriov_multiqueque(self):
+ scenario_cfg = {
'options': {
'packetsize': 60,
'number_of_ports': 10,
'duration': 20,
'multiqueue': True},
- 'sla': {
- 'max_ppm': 1}}
- result = {}
-
- p = pktgen.Pktgen(args, self.ctx)
+ 'sla': {'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+ scenario._get_sriov_queue_number = mock.Mock(return_value=2)
+ scenario._setup_irqmapping_sriov = mock.Mock()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- p.server = mock_ssh.SSH.from_node()
- p.client = mock_ssh.SSH.from_node()
-
- p._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
- p._get_sriov_queue_number = mock.Mock(return_value=2)
- p._setup_irqmapping_sriov = mock.Mock()
- p._iptables_get_result = mock.Mock(return_value=149300)
-
- sample_output = '{"packets_per_second": 9753, "errors": 0, \
- "packets_sent": 149300, "flows": 110, "ppm": 0}'
- mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ result = {}
+ scenario.run(result)
- p.run(result)
expected_result = jsonutils.loads(sample_output)
expected_result["packets_received"] = 149300
expected_result["packetsize"] = 60
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
index 976087148..70cd8ad40 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
@@ -9,15 +9,22 @@
import mock
import unittest
+import time
+import logging
import yardstick.common.utils as utils
+from yardstick import ssh
from yardstick.benchmark.scenarios.networking import pktgen_dpdk
+from yardstick.common import exceptions as y_exc
+
+
+logging.disable(logging.CRITICAL)
class PktgenDPDKLatencyTestCase(unittest.TestCase):
def setUp(self):
- self.ctx = {
+ self.context_cfg = {
'host': {
'ip': '172.16.0.137',
'user': 'root',
@@ -30,149 +37,100 @@ class PktgenDPDKLatencyTestCase(unittest.TestCase):
'ipaddr': '172.16.0.138'
}
}
-
- self._mock_ssh = mock.patch(
- 'yardstick.benchmark.scenarios.networking.pktgen_dpdk.ssh')
- self.mock_ssh = self._mock_ssh.start()
- self._mock_time = mock.patch(
- 'yardstick.benchmark.scenarios.networking.pktgen_dpdk.time')
- self.mock_time = self._mock_time.start()
-
- self.addCleanup(self._stop_mock)
-
- def _stop_mock(self):
- self._mock_ssh.stop()
- self._mock_time.stop()
-
- def test_pktgen_dpdk_successful_setup(self):
-
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
- p.setup()
-
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- self.assertIsNotNone(p.server)
- self.assertIsNotNone(p.client)
- self.assertTrue(p.setup_done)
-
- def test_pktgen_dpdk_successful_get_port_ip(self):
-
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
- p.server = self.mock_ssh.SSH.from_node()
-
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
-
- utils.get_port_ip(p.server, "eth1")
-
- self.mock_ssh.SSH.from_node().execute.assert_called_with(
- "ifconfig eth1 |grep 'inet addr' |awk '{print $2}' |cut -d ':' -f2 ")
-
- def test_pktgen_dpdk_unsuccessful_get_port_ip(self):
-
- args = {
- 'options': {'packetsize': 60},
+ self.scenario_cfg = {
+ 'options': {'packetsize': 60}
}
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
- p.server = self.mock_ssh.SSH.from_node()
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
- self.mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, utils.get_port_ip, p.server, "eth1")
+ self._mock_time_sleep = mock.patch.object(time, 'sleep')
+ self.mock_time_sleep = self._mock_time_sleep.start()
- def test_pktgen_dpdk_successful_get_port_mac(self):
+ self._mock_utils_get_port_ip = mock.patch.object(utils, 'get_port_ip')
+ self.mock_utils_get_port_ip = self._mock_utils_get_port_ip.start()
- args = {
- 'options': {'packetsize': 60},
- }
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
- p.server = self.mock_ssh.SSH.from_node()
+ self._mock_utils_get_port_mac = mock.patch.object(utils,
+ 'get_port_mac')
+ self.mock_utils_get_port_mac = self._mock_utils_get_port_mac.start()
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
- utils.get_port_mac(p.server, "eth1")
+ self.addCleanup(self._stop_mock)
- self.mock_ssh.SSH.from_node().execute.assert_called_with(
- "ifconfig |grep HWaddr |grep eth1 |awk '{print $5}' ")
+ self.scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+ self.scenario.server = self.mock_SSH.from_node()
+ self.scenario.client = self.mock_SSH.from_node()
- def test_pktgen_dpdk_unsuccessful_get_port_mac(self):
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+ self._mock_time_sleep.stop()
+ self._mock_utils_get_port_ip.stop()
+ self._mock_utils_get_port_mac.stop()
- args = {
- 'options': {'packetsize': 60},
- }
+ def test_setup(self):
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+ scenario.setup()
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
- p.server = self.mock_ssh.SSH.from_node()
+ self.assertIsNotNone(scenario.server)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
- self.mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, utils.get_port_mac, p.server, "eth1")
+ def test_run_get_port_ip_command(self):
+ self.scenario.run({})
- def test_pktgen_dpdk_successful_no_sla(self):
+ self.mock_utils_get_port_ip.assert_has_calls(
+ [mock.call(self.scenario.server, 'ens4'),
+ mock.call(self.scenario.server, 'ens5')])
- args = {
- 'options': {'packetsize': 60},
- }
+ def test_get_port_mac_command(self):
+ self.scenario.run({})
- result = {}
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
+ self.mock_utils_get_port_mac.assert_has_calls(
+ [mock.call(self.scenario.server, 'ens5'),
+ mock.call(self.scenario.server, 'ens4'),
+ mock.call(self.scenario.server, 'ens5')])
+ def test_run_no_sla(self):
sample_output = '100\n110\n112\n130\n149\n150\n90\n150\n200\n162\n'
- self.mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- p.run(result)
+ result = {}
+ self.scenario.run(result)
# with python 3 we get float, might be due python division changes
# AssertionError: {'avg_latency': 132.33333333333334} != {
# 'avg_latency': 132}
delta = result['avg_latency'] - 132
self.assertLessEqual(delta, 1)
- def test_pktgen_dpdk_successful_sla(self):
-
- args = {
- 'options': {'packetsize': 60},
- 'sla': {'max_latency': 100}
- }
- result = {}
-
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
+ def test_run_sla(self):
+ self.scenario_cfg['sla'] = {'max_latency': 100}
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
sample_output = '100\n100\n100\n100\n100\n100\n100\n100\n100\n100\n'
- self.mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- p.run(result)
-
- self.assertEqual(result, {"avg_latency": 100})
-
- def test_pktgen_dpdk_unsuccessful_sla(self):
-
- args = {
- 'options': {'packetsize': 60},
- 'sla': {'max_latency': 100}
- }
result = {}
+ scenario.run(result)
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
+ self.assertEqual(result, {"avg_latency": 100})
- p.server = self.mock_ssh.SSH.from_node()
- p.client = self.mock_ssh.SSH.from_node()
+ def test_run_sla_error(self):
+ self.scenario_cfg['sla'] = {'max_latency': 100}
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
sample_output = '100\n110\n112\n130\n149\n150\n90\n150\n200\n162\n'
- self.mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
-
- def test_pktgen_dpdk_unsuccessful_script_error(self):
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
- args = {
- 'options': {'packetsize': 60},
- 'sla': {'max_latency': 100}
- }
- result = {}
+ with self.assertRaises(y_exc.SLAValidationError):
+ scenario.run({})
- p = pktgen_dpdk.PktgenDPDKLatency(args, self.ctx)
+ def test_run_last_command_raise_on_error(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
- self.mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, result)
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario.run({})
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
index e90fb07c7..39392e4bb 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
@@ -16,6 +16,7 @@ from oslo_serialization import jsonutils
import mock
from yardstick.benchmark.scenarios.networking import pktgen_dpdk_throughput
+from yardstick.common import exceptions as y_exc
# pylint: disable=unused-argument
@@ -131,7 +132,7 @@ class PktgenDPDKTestCase(unittest.TestCase):
sample_output = '{"packets_per_second": 9753, "errors": 0, \
"packets_sent": 149776, "flows": 110}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
def test_pktgen_dpdk_throughput_unsuccessful_script_error(
self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
index 9bfbf0752..cf9a26a76 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,11 +20,11 @@ import mock
import unittest
from yardstick import tests
+from yardstick.common import exceptions
from yardstick.common import utils
from yardstick.network_services.collector.subscriber import Collector
from yardstick.network_services.traffic_profile import base
from yardstick.network_services.vnf_generic import vnfdgen
-from yardstick.error import IncorrectConfig
from yardstick.network_services.vnf_generic.vnf.base import GenericTrafficGen
from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
@@ -159,7 +159,7 @@ TRAFFIC_PROFILE = {
class TestNetworkServiceTestCase(unittest.TestCase):
def setUp(self):
- self.tg__1 = {
+ self.tg__0 = {
'name': 'trafficgen_1.yardstick',
'ip': '10.10.10.11',
'role': 'TrafficGen',
@@ -185,7 +185,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
},
}
- self.vnf__1 = {
+ self.vnf__0 = {
'name': 'vnf.yardstick',
'ip': '10.10.10.12',
'host': '10.223.197.164',
@@ -242,8 +242,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.context_cfg = {
'nodes': {
- 'tg__1': self.tg__1,
- 'vnf__1': self.vnf__1,
+ 'tg__0': self.tg__0,
+ 'vnf__0': self.vnf__0,
},
'networks': {
GenericVNF.UPLINK: {
@@ -270,7 +270,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
],
'type': 'ELAN',
'id': GenericVNF.UPLINK,
- 'name': 'tg__1 to vnf__1 link 1'
+ 'name': 'tg__0 to vnf__0 link 1'
}
self.vld1 = {
@@ -288,7 +288,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
],
'type': 'ELAN',
'id': GenericVNF.DOWNLINK,
- 'name': 'vnf__1 to tg__1 link 2'
+ 'name': 'vnf__0 to tg__0 link 2'
}
self.topology = {
@@ -300,12 +300,12 @@ class TestNetworkServiceTestCase(unittest.TestCase):
{
'member-vnf-index': '1',
'VNF model': 'tg_trex_tpl.yaml',
- 'vnfd-id-ref': 'tg__1',
+ 'vnfd-id-ref': 'tg__0',
},
{
'member-vnf-index': '2',
'VNF model': 'tg_trex_tpl.yaml',
- 'vnfd-id-ref': 'vnf__1',
+ 'vnfd-id-ref': 'vnf__0',
},
],
'vld': [self.vld0, self.vld1],
@@ -325,6 +325,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
},
},
'options': {
+ 'simulated_users': {'uplink': [1, 2]},
+ 'page_object': {'uplink': [1, 2]},
'framesize': {'64B': 100}
},
'runner': {
@@ -341,8 +343,8 @@ class TestNetworkServiceTestCase(unittest.TestCase):
},
'nodes': {
'tg__2': 'trafficgen_2.yardstick',
- 'tg__1': 'trafficgen_1.yardstick',
- 'vnf__1': 'vnf.yardstick',
+ 'tg__0': 'trafficgen_1.yardstick',
+ 'vnf__0': 'vnf.yardstick',
},
}
@@ -358,60 +360,72 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.assertIsNotNone(self.topology)
def test__get_ip_flow_range_string(self):
- self.scenario_cfg["traffic_options"]["flow"] = \
- self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
result = '152.16.100.2-152.16.100.254'
self.assertEqual(result, self.s._get_ip_flow_range(
'152.16.100.2-152.16.100.254'))
- def test__get_ip_flow_range(self):
- self.scenario_cfg["traffic_options"]["flow"] = \
- self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
- result = '152.16.100.2-152.16.100.254'
- self.assertEqual(result, self.s._get_ip_flow_range({"tg__1": 'xe0'}))
+ def test__get_ip_flow_range_no_nodes(self):
+ self.assertEqual('0.0.0.0', self.s._get_ip_flow_range({}))
- @mock.patch('yardstick.benchmark.scenarios.networking.vnf_generic.ipaddress')
- def test__get_ip_flow_range_no_node_data(self, mock_ipaddress):
- scenario_cfg = deepcopy(self.scenario_cfg)
- scenario_cfg["traffic_options"]["flow"] = \
- self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
+ def test__get_ip_flow_range_no_node_data(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {}
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('0.0.0.2-0.0.0.254', result)
- mock_ipaddress.ip_network.return_value = ipaddr = mock.Mock()
- ipaddr.hosts.return_value = []
+ def test__et_ip_flow_range_ipv4(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '192.168.1.15',
+ 'netmask': '255.255.255.128'}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('192.168.1.2-192.168.1.126', result)
- expected = '0.0.0.0'
- result = self.s._get_ip_flow_range({"tg__2": 'xe0'})
- self.assertEqual(result, expected)
+ def test__get_ip_flow_range_ipv4_mask_30(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '192.168.1.15', 'netmask': 30}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('192.168.1.15', result)
- def test__get_ip_flow_range_no_nodes(self):
- expected = '0.0.0.0'
- result = self.s._get_ip_flow_range({})
- self.assertEqual(result, expected)
+ def test__get_ip_flow_range_ipv6(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '2001::11', 'netmask': 64}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('2001::2-2001::ffff:ffff:ffff:fffe', result)
def test___get_traffic_flow(self):
self.scenario_cfg["traffic_options"]["flow"] = \
self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
- self.scenario_cfg["options"] = {}
self.scenario_cfg['options'] = {
'flow': {
'src_ip': [
{
- 'tg__1': 'xe0',
+ 'tg__0': 'xe0',
},
],
'dst_ip': [
{
- 'tg__1': 'xe1',
+ 'tg__0': 'xe1',
},
],
'public_ip': ['1.1.1.1'],
},
}
- # NOTE(ralonsoh): check the expected output. This test could be
- # incorrect
- # result = {'flow': {'dst_ip0': '152.16.40.2-152.16.40.254',
- # 'src_ip0': '152.16.100.2-152.16.100.254'}}
- self.assertEqual({'flow': {}}, self.s._get_traffic_flow())
+ expected_flow = {'flow': {'dst_ip_0': '152.16.40.2-152.16.40.254',
+ 'public_ip_0': '1.1.1.1',
+ 'src_ip_0': '152.16.100.2-152.16.100.254'}}
+ self.assertEqual(expected_flow, self.s._get_traffic_flow())
def test___get_traffic_flow_error(self):
self.scenario_cfg["traffic_options"]["flow"] = \
@@ -423,7 +437,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
with mock.patch.dict(sys.modules, tests.STL_MOCKS):
self.assertIsNotNone(self.s.get_vnf_impl(vnfd))
- with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
self.s.get_vnf_impl('NonExistentClass')
exc_str = str(raised.exception)
@@ -432,9 +446,9 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.assertIn('found in', exc_str)
def test_load_vnf_models_invalid(self):
- self.context_cfg["nodes"]['tg__1']['VNF model'] = \
+ self.context_cfg["nodes"]['tg__0']['VNF model'] = \
self._get_file_abspath("tg_trex_tpl.yaml")
- self.context_cfg["nodes"]['vnf__1']['VNF model'] = \
+ self.context_cfg["nodes"]['vnf__0']['VNF model'] = \
self._get_file_abspath("tg_trex_tpl.yaml")
vnf = mock.Mock(autospec=GenericVNF)
@@ -455,17 +469,17 @@ class TestNetworkServiceTestCase(unittest.TestCase):
nodes = self.context_cfg["nodes"]
self.assertEqual('../../vnf_descriptors/tg_rfc2544_tpl.yaml',
- nodes['tg__1']['VNF model'])
+ nodes['tg__0']['VNF model'])
self.assertEqual('../../vnf_descriptors/vpe_vnf.yaml',
- nodes['vnf__1']['VNF model'])
+ nodes['vnf__0']['VNF model'])
def test_map_topology_to_infrastructure_insufficient_nodes(self):
cfg = deepcopy(self.context_cfg)
- del cfg['nodes']['vnf__1']
+ del cfg['nodes']['vnf__0']
cfg_patch = mock.patch.object(self.s, 'context_cfg', cfg)
with cfg_patch:
- with self.assertRaises(IncorrectConfig):
+ with self.assertRaises(exceptions.IncorrectConfig):
self.s.map_topology_to_infrastructure()
def test_map_topology_to_infrastructure_config_invalid(self):
@@ -475,14 +489,14 @@ class TestNetworkServiceTestCase(unittest.TestCase):
cfg = deepcopy(self.s.context_cfg)
# delete all, we don't know which will come first
- del cfg['nodes']['vnf__1']['interfaces']['xe0']['local_mac']
- del cfg['nodes']['vnf__1']['interfaces']['xe1']['local_mac']
- del cfg['nodes']['tg__1']['interfaces']['xe0']['local_mac']
- del cfg['nodes']['tg__1']['interfaces']['xe1']['local_mac']
+ del cfg['nodes']['vnf__0']['interfaces']['xe0']['local_mac']
+ del cfg['nodes']['vnf__0']['interfaces']['xe1']['local_mac']
+ del cfg['nodes']['tg__0']['interfaces']['xe0']['local_mac']
+ del cfg['nodes']['tg__0']['interfaces']['xe1']['local_mac']
config_patch = mock.patch.object(self.s, 'context_cfg', cfg)
with config_patch:
- with self.assertRaises(IncorrectConfig):
+ with self.assertRaises(exceptions.IncorrectConfig):
self.s.map_topology_to_infrastructure()
def test__resolve_topology_invalid_config(self):
@@ -493,23 +507,23 @@ class TestNetworkServiceTestCase(unittest.TestCase):
ssh.from_node.return_value = ssh_mock
# purge an important key from the data structure
- for interface in self.tg__1['interfaces'].values():
+ for interface in self.tg__0['interfaces'].values():
del interface['local_mac']
- with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
self.s._resolve_topology()
self.assertIn('not found', str(raised.exception))
# restore local_mac
- for index, interface in enumerate(self.tg__1['interfaces'].values()):
+ for index, interface in enumerate(self.tg__0['interfaces'].values()):
interface['local_mac'] = '00:00:00:00:00:{:2x}'.format(index)
# make a connection point ref with 3 points
self.s.topology["vld"][0]['vnfd-connection-point-ref'].append(
self.s.topology["vld"][0]['vnfd-connection-point-ref'][0])
- with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
self.s._resolve_topology()
self.assertIn('wrong endpoint count', str(raised.exception))
@@ -518,7 +532,7 @@ class TestNetworkServiceTestCase(unittest.TestCase):
self.s.topology["vld"][0]['vnfd-connection-point-ref'] = \
self.s.topology["vld"][0]['vnfd-connection-point-ref'][:1]
- with self.assertRaises(vnf_generic.IncorrectConfig) as raised:
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
self.s._resolve_topology()
self.assertIn('wrong endpoint count', str(raised.exception))
@@ -607,16 +621,38 @@ class TestNetworkServiceTestCase(unittest.TestCase):
with self.assertRaises(IOError):
self.s._get_traffic_profile()
+ def test__key_list_to_dict(self):
+ result = self.s._key_list_to_dict("uplink", {"uplink": [1, 2]})
+ self.assertEqual({"uplink_0": 1, "uplink_1": 2}, result)
+
+ def test__get_simulated_users(self):
+ result = self.s._get_simulated_users()
+ self.assertEqual({'simulated_users': {'uplink_0': 1, 'uplink_1': 2}},
+ result)
+
+ def test__get_page_object(self):
+ result = self.s._get_page_object()
+ self.assertEqual({'page_object': {'uplink_0': 1, 'uplink_1': 2}},
+ result)
+
def test___get_traffic_imix_exception(self):
with mock.patch.dict(self.scenario_cfg["traffic_options"], {'imix': ''}):
self.assertEqual({'imix': {'64B': 100}},
self.s._get_traffic_imix())
+ def test__get_ip_priority(self):
+ with mock.patch.dict(self.scenario_cfg["options"],
+ {'priority': {'raw': '0x01'}}):
+ self.assertEqual({'raw': '0x01'}, self.s._get_ip_priority())
+
+ def test__get_ip_priority_exception(self):
+ self.assertEqual({}, self.s._get_ip_priority())
+
@mock.patch.object(base.TrafficProfile, 'get')
@mock.patch.object(vnfdgen, 'generate_vnfd')
def test__fill_traffic_profile(self, mock_generate, mock_tprofile_get):
fake_tprofile = mock.Mock()
- fake_vnfd = mock.Mock()
+ fake_vnfd = mock.MagicMock()
with mock.patch.object(self.s, '_get_traffic_profile',
return_value=fake_tprofile) as mock_get_tp:
mock_generate.return_value = fake_vnfd
@@ -628,10 +664,32 @@ class TestNetworkServiceTestCase(unittest.TestCase):
'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
'flow': {'flow': {}},
'imix': {'imix': {'64B': 100}},
- 'uplink': {}}
+ 'priority': {},
+ 'uplink': {},
+ 'duration': 30,
+ 'simulated_users': {
+ 'simulated_users': {'uplink_0': 1, 'uplink_1': 2}},
+ 'page_object': {
+ 'page_object': {'uplink_0': 1, 'uplink_1': 2}},}
)
mock_tprofile_get.assert_called_once_with(fake_vnfd)
+ @mock.patch.object(base.TrafficProfile, 'get')
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__fill_traffic_profile2(self, mock_generate, mock_tprofile_get):
+ fake_tprofile = mock.Mock()
+ fake_vnfd = {}
+ with mock.patch.object(self.s, '_get_traffic_profile',
+ return_value=fake_tprofile) as mock_get_tp:
+ mock_generate.return_value = fake_vnfd
+
+ self.s.scenario_cfg["options"] = {"traffic_config": {"duration": 99899}}
+ self.s._fill_traffic_profile()
+ mock_get_tp.assert_called_once()
+ self.assertIn("traffic_profile", fake_vnfd)
+ self.assertIn("duration", fake_vnfd["traffic_profile"])
+ self.assertEqual(99899, fake_vnfd["traffic_profile"]["duration"])
+
@mock.patch.object(utils, 'open_relative_file')
def test__get_topology(self, mock_open_path):
self.s.scenario_cfg['topology'] = 'fake_topology'
@@ -678,3 +736,138 @@ class TestNetworkServiceTestCase(unittest.TestCase):
mock.Mock(return_value=True)
with self.assertRaises(RuntimeError):
self.s.teardown()
+
+
+class TestNetworkServiceRFC2544TestCase(TestNetworkServiceTestCase):
+
+ def setUp(self):
+ super(TestNetworkServiceRFC2544TestCase, self).setUp()
+ self.s = vnf_generic.NetworkServiceRFC2544(self.scenario_cfg,
+ self.context_cfg)
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ tgen.wait_on_trafic.return_value = 'COMPLETE'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s._fill_traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = mock.Mock(
+ return_value={tgen.name: verified_dict})
+ result = mock.Mock()
+ self.s.run(result)
+ self.s._fill_traffic_profile.assert_called_once()
+ result.push.assert_called_once()
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ tgen.run_traffic.return_value = 'tg_id'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s.setup()
+
+ def test_setup_exception(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.instantiate.side_effect = RuntimeError(
+ "error during instantiate")
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s._fill_traffic_profile = \
+ mock.Mock(return_value=TRAFFIC_PROFILE)
+ with self.assertRaises(RuntimeError):
+ self.s.setup()
+
+class TestNetworkServiceRFC3511TestCase(TestNetworkServiceTestCase):
+
+ def setUp(self):
+ super(TestNetworkServiceRFC3511TestCase, self).setUp()
+ self.s = vnf_generic.NetworkServiceRFC3511(self.scenario_cfg,
+ self.context_cfg)
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s._fill_traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = mock.Mock()
+ result = mock.Mock()
+ self.s.run(result)
+ self.s._fill_traffic_profile.assert_called_once()
+ result.push.assert_called_once()
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ tgen.run_traffic.return_value = 'tg_id'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s.setup()
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
index 419605b26..a1c27f5fb 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
@@ -12,31 +12,26 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# Unittest for yardstick.benchmark.scenarios.networking.vsperf.Vsperf
-
-from __future__ import absolute_import
-try:
- from unittest import mock
-except ImportError:
- import mock
+import mock
import unittest
+import subprocess
+import yardstick.ssh as ssh
from yardstick.benchmark.scenarios.networking import vsperf
+from yardstick import exceptions as y_exc
-@mock.patch('yardstick.benchmark.scenarios.networking.vsperf.subprocess')
-@mock.patch('yardstick.benchmark.scenarios.networking.vsperf.ssh')
class VsperfTestCase(unittest.TestCase):
def setUp(self):
- self.ctx = {
+ self.context_cfg = {
"host": {
"ip": "10.229.47.137",
"user": "ubuntu",
"password": "ubuntu",
},
}
- self.args = {
+ self.scenario_cfg = {
'options': {
'testname': 'p2p_rfc2544_continuous',
'traffic_type': 'continuous',
@@ -57,70 +52,145 @@ class VsperfTestCase(unittest.TestCase):
}
}
- def test_vsperf_setup(self, mock_ssh, mock_subprocess):
- p = vsperf.Vsperf(self.args, self.ctx)
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- mock_subprocess.call().execute.return_value = None
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
- p.setup()
- self.assertIsNotNone(p.client)
- self.assertTrue(p.setup_done)
+ self._mock_subprocess_call = mock.patch.object(subprocess, 'call')
+ self.mock_subprocess_call = self._mock_subprocess_call.start()
+ self.mock_subprocess_call.return_value = None
+
+ self.addCleanup(self._stop_mock)
+
+ self.scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+ self._mock_subprocess_call.stop()
+
+ def test_setup(self):
+ self.scenario.setup()
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_setup_tg_port_not_set(self):
+ del self.scenario_cfg['options']['trafficgen_port1']
+ del self.scenario_cfg['options']['trafficgen_port2']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ self.mock_subprocess_call.assert_called_once_with(
+ 'setup_yardstick.sh setup', shell=True)
+ self.assertIsNone(scenario.tg_port1)
+ self.assertIsNone(scenario.tg_port2)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_setup_no_setup_script(self):
+ del self.scenario_cfg['options']['setup_script']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ self.mock_subprocess_call.assert_has_calls(
+ (mock.call('sudo bash -c "ovs-vsctl add-port br-ex eth1"',
+ shell=True),
+ mock.call('sudo bash -c "ovs-vsctl add-port br-ex eth3"',
+ shell=True)))
+ self.assertEqual(2, self.mock_subprocess_call.call_count)
+ self.assertIsNone(scenario.setup_script)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_run_ok(self):
+ self.scenario.setup()
- def test_vsperf_teardown(self, mock_ssh, mock_subprocess):
- p = vsperf.Vsperf(self.args, self.ctx)
+ result = {}
+ self.scenario.run(result)
- # setup() specific mocks
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- mock_subprocess.call().execute.return_value = None
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
- p.setup()
- self.assertIsNotNone(p.client)
- self.assertTrue(p.setup_done)
+ def test_run_ok_setup_not_done(self):
+ result = {}
+ self.scenario.run(result)
- p.teardown()
- self.assertFalse(p.setup_done)
+ self.assertTrue(self.scenario.setup_done)
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
- def test_vsperf_run_ok(self, mock_ssh, mock_subprocess):
- p = vsperf.Vsperf(self.args, self.ctx)
+ def test_run_ssh_command_call_counts(self):
+ self.scenario.run({})
- # setup() specific mocks
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- mock_subprocess.call().execute.return_value = None
+ self.assertEqual(self.mock_SSH.from_node().execute.call_count, 2)
+ self.mock_SSH.from_node().run.assert_called_once()
- # run() specific mocks
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- mock_ssh.SSH.from_node().execute.return_value = (
- 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+ def test_run_sla_fail(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n123456.000\r\n', '')
- result = {}
- p.run(result)
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
- self.assertEqual(result['throughput_rx_fps'], '14797660.000')
+ self.assertTrue('VSPERF_throughput_rx_fps(123456.000000) < '
+ 'SLA_throughput_rx_fps(500000.000000)'
+ in str(raised.exception))
- def test_vsperf_run_falied_vsperf_execution(self, mock_ssh,
- mock_subprocess):
- p = vsperf.Vsperf(self.args, self.ctx)
+ def test_run_sla_fail_metric_not_collected(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'nonexisting_metric\r\n14797660.000\r\n', '')
- # setup() specific mocks
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- mock_subprocess.call().execute.return_value = None
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
- # run() specific mocks
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ self.assertTrue('throughput_rx_fps was not collected by VSPERF'
+ in str(raised.exception))
- result = {}
- self.assertRaises(RuntimeError, p.run, result)
+ def test_run_faulty_result_csv(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'faulty output not csv', '')
- def test_vsperf_run_falied_csv_report(self, mock_ssh, mock_subprocess):
- p = vsperf.Vsperf(self.args, self.ctx)
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
- # setup() specific mocks
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- mock_subprocess.call().execute.return_value = None
+ self.assertTrue('throughput_rx_fps was not collected by VSPERF'
+ in str(raised.exception))
- # run() specific mocks
- mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ def test_run_sla_fail_metric_not_defined_in_sla(self):
+ del self.scenario_cfg['sla']['throughput_rx_fps']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
- result = {}
- self.assertRaises(RuntimeError, p.run, result)
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ scenario.run({})
+ self.assertTrue('throughput_rx_fps is not defined in SLA'
+ in str(raised.exception))
+
+ def test_teardown(self):
+ self.scenario.setup()
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ self.scenario.teardown()
+ self.assertFalse(self.scenario.setup_done)
+
+ def test_teardown_tg_port_not_set(self):
+ del self.scenario_cfg['options']['trafficgen_port1']
+ del self.scenario_cfg['options']['trafficgen_port2']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.teardown()
+
+ self.mock_subprocess_call.assert_called_once_with(
+ 'setup_yardstick.sh teardown', shell=True)
+ self.assertFalse(scenario.setup_done)
+
+ def test_teardown_no_setup_script(self):
+ del self.scenario_cfg['options']['setup_script']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.teardown()
+
+ self.mock_subprocess_call.assert_has_calls(
+ (mock.call('sudo bash -c "ovs-vsctl del-port br-ex eth1"',
+ shell=True),
+ mock.call('sudo bash -c "ovs-vsctl del-port br-ex eth3"',
+ shell=True)))
+ self.assertEqual(2, self.mock_subprocess_call.call_count)
+ self.assertFalse(scenario.setup_done)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
index 1d2278e21..8bbe6911e 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
@@ -18,7 +18,10 @@ import time
import mock
import unittest
+from yardstick import exceptions as y_exc
from yardstick.benchmark.scenarios.networking import vsperf_dpdk
+from yardstick.common import exceptions as y_exc
+from yardstick import ssh
class VsperfDPDKTestCase(unittest.TestCase):
@@ -55,79 +58,51 @@ class VsperfDPDKTestCase(unittest.TestCase):
'action': 'monitor',
}
}
-
- self.scenario = vsperf_dpdk.VsperfDPDK(self.args, self.ctx)
-
- self._mock_ssh = mock.patch(
- 'yardstick.benchmark.scenarios.networking.vsperf_dpdk.ssh')
+ self._mock_ssh = mock.patch.object(ssh, 'SSH')
self.mock_ssh = self._mock_ssh.start()
self._mock_subprocess_call = mock.patch.object(subprocess, 'call')
self.mock_subprocess_call = self._mock_subprocess_call.start()
+ mock_call_obj = mock.Mock()
+ mock_call_obj.execute.return_value = None
+ self.mock_subprocess_call.return_value = mock_call_obj
+
+ self._mock_log_info = mock.patch.object(vsperf_dpdk.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
self.addCleanup(self._cleanup)
+ self.scenario = vsperf_dpdk.VsperfDPDK(self.args, self.ctx)
+ self.scenario.setup()
+
def _cleanup(self):
self._mock_ssh.stop()
self._mock_subprocess_call.stop()
+ self._mock_log_info.stop()
def test_setup(self):
- # setup() specific mocks
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
self.assertIsNotNone(self.scenario.client)
self.assertTrue(self.scenario.setup_done)
def test_teardown(self):
- # setup() specific mocks
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
-
self.scenario.teardown()
self.assertFalse(self.scenario.setup_done)
def test_is_dpdk_setup_no(self):
- # setup() specific mocks
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
-
# is_dpdk_setup() specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+ self.mock_ssh.from_node().execute.return_value = (0, 'dummy', '')
- result = self.scenario._is_dpdk_setup()
- self.assertFalse(result)
+ self.assertFalse(self.scenario._is_dpdk_setup())
def test_is_dpdk_setup_yes(self):
- # setup() specific mocks
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
-
# is_dpdk_setup() specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
- result = self.scenario._is_dpdk_setup()
- self.assertTrue(result)
+ self.assertTrue(self.scenario._is_dpdk_setup())
@mock.patch.object(time, 'sleep')
def test_dpdk_setup_first(self, *args):
- # setup() specific mocks
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
-
# is_dpdk_setup() specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+ self.mock_ssh.from_node().execute.return_value = (0, 'dummy', '')
self.scenario.dpdk_setup()
self.assertFalse(self.scenario._is_dpdk_setup())
@@ -135,79 +110,72 @@ class VsperfDPDKTestCase(unittest.TestCase):
@mock.patch.object(time, 'sleep')
def test_dpdk_setup_next(self, *args):
- # setup() specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
self.scenario.dpdk_setup()
self.assertTrue(self.scenario._is_dpdk_setup())
self.assertTrue(self.scenario.dpdk_setup_done)
- @mock.patch.object(time, 'sleep')
- def test_dpdk_setup_runtime_error(self, *args):
-
- # setup specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
- self.assertTrue(self.scenario.setup_done)
-
- self.assertRaises(RuntimeError, self.scenario.dpdk_setup)
-
@mock.patch.object(subprocess, 'check_output')
- @mock.patch('time.sleep')
def test_run_ok(self, *args):
- # setup() specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- self.mock_subprocess_call().execute.return_value = None
-
- self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
-
# run() specific mocks
- self.mock_subprocess_call().execute.return_value = None
- self.mock_ssh.SSH.from_node().execute.return_value = (
+ self.mock_ssh.from_node().execute.return_value = (
0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
result = {}
self.scenario.run(result)
-
self.assertEqual(result['throughput_rx_fps'], '14797660.000')
- def test_run_failed_vsperf_execution(self):
- # setup() specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- self.mock_subprocess_call().execute.return_value = None
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+ self.assertIn('VSPERF_throughput_rx_fps(123456.000000) < '
+ 'SLA_throughput_rx_fps(500000.000000)',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_metric_not_collected(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'nonexisting_metric\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps was not collected by VSPERF',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_metric_not_collected_faulty_csv(self, *args):
self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
- self.mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'faulty output not csv', '')
- result = {}
- self.assertRaises(RuntimeError, self.scenario.run, result)
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
- def test_run_falied_csv_report(self):
- # setup() specific mocks
- self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
- self.mock_subprocess_call().execute.return_value = None
+ self.assertIn('throughput_rx_fps was not collected by VSPERF',
+ str(raised.exception))
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_sla_not_defined(self, *args):
+ del self.scenario.scenario_cfg['sla']['throughput_rx_fps']
self.scenario.setup()
- self.assertIsNotNone(self.scenario.client)
- self.assertTrue(self.scenario.setup_done)
- # run() specific mocks
- self.mock_subprocess_call().execute.return_value = None
- self.mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
- result = {}
- self.assertRaises(RuntimeError, self.scenario.run, result)
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps is not defined in SLA',
+ str(raised.exception))
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml
index 1ac6c1f89..aaf84bb5e 100644
--- a/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,31 +20,31 @@ nsd:nsd-catalog:
description: scenario with VPE,L3fwd and VNF
constituent-vnfd:
- member-vnf-index: '1'
- vnfd-id-ref: tg__1
- VNF model: ../../vnf_descriptors/tg_rfc2544_tpl.yaml #tg_vpe_upstream.yaml #VPE VNF
+ vnfd-id-ref: tg__0
+ VNF model: ../../vnf_descriptors/tg_rfc2544_tpl.yaml #tg_trex_tpl.yaml #TREX
- member-vnf-index: '2'
- vnfd-id-ref: vnf__1
- VNF model: ../../vnf_descriptors/vpe_vnf.yaml #tg_l3fwd.yaml #tg_trex_tpl.yaml #TREX
+ vnfd-id-ref: vnf__0
+ VNF model: ../../vnf_descriptors/vpe_vnf.yaml #VPE VNF
vld:
- id: uplink
- name: tg__1 to vnf__1 link 1
+ name: tg__0 to vnf__0 link 1
type: ELAN
vnfd-connection-point-ref:
- member-vnf-index-ref: '1'
vnfd-connection-point-ref: xe0
- vnfd-id-ref: tg__1 #TREX
+ vnfd-id-ref: tg__0
- member-vnf-index-ref: '2'
vnfd-connection-point-ref: xe0
- vnfd-id-ref: vnf__1 #VNF
+ vnfd-id-ref: vnf__0
- id: downlink
- name: vnf__1 to tg__1 link 2
+ name: vnf__0 to tg__0 link 2
type: ELAN
vnfd-connection-point-ref:
- member-vnf-index-ref: '2'
vnfd-connection-point-ref: xe1
- vnfd-id-ref: vnf__1 #L3fwd
+ vnfd-id-ref: vnf__0
- member-vnf-index-ref: '1'
vnfd-connection-point-ref: xe1
- vnfd-id-ref: tg__1 #VPE VNF
+ vnfd-id-ref: tg__0
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
index f149cee69..6e69ddc6d 100644
--- a/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
@@ -18,6 +18,7 @@ import mock
from oslo_serialization import jsonutils
from yardstick.benchmark.scenarios.storage import fio
+from yardstick.common import exceptions as y_exc
@mock.patch('yardstick.benchmark.scenarios.storage.fio.ssh')
@@ -203,7 +204,7 @@ class FioTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
def test_fio_successful_bw_iops_sla(self, mock_ssh):
@@ -252,7 +253,7 @@ class FioTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, result)
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
def test_fio_unsuccessful_script_error(self, mock_ssh):
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
index 5844746ab..2ba53cb93 100644
--- a/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
@@ -11,18 +11,18 @@
from __future__ import absolute_import
+import json
import unittest
import mock
from oslo_serialization import jsonutils
+import requests
from yardstick.benchmark.scenarios.storage import storperf
# pylint: disable=unused-argument
# disable this for now because I keep forgetting mock patch arg ordering
-
-
def mocked_requests_config_post(*args, **kwargs):
class MockResponseConfigPost(object):
@@ -32,10 +32,24 @@ def mocked_requests_config_post(*args, **kwargs):
return MockResponseConfigPost(
'{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
- '"stack_created": "false"}',
+ '"stack_created": false}',
200)
+def mocked_requests_config_post_fail(*args, **kwargs):
+ class MockResponseConfigPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigPost(
+ '{"message": "ERROR: Parameter \'public_network\' is invalid: ' +
+ 'Error validating value \'foo\': Unable to find network with ' +
+ 'name or id \'foo\'"}',
+ 400)
+
+
def mocked_requests_config_get(*args, **kwargs):
class MockResponseConfigGet(object):
@@ -45,10 +59,47 @@ def mocked_requests_config_get(*args, **kwargs):
return MockResponseConfigGet(
'{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
- '"stack_created": "true"}',
+ '"stack_created": true}',
200)
+def mocked_requests_config_get_not_created(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{"stack_id": "",'
+ '"stack_created": false}',
+ 200)
+
+
+def mocked_requests_config_get_no_payload(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{}',
+ 200)
+
+
+def mocked_requests_initialize_post_fail(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost(
+ '{"message": "ERROR: Stack StorPerfAgentGroup does not exist"}',
+ 400)
+
+
def mocked_requests_job_get(*args, **kwargs):
class MockResponseJobGet(object):
@@ -73,6 +124,18 @@ def mocked_requests_job_post(*args, **kwargs):
"d46bfb8c-36f4-4a40-813b-c4b4a437f728"}', 200)
+def mocked_requests_job_post_fail(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost(
+ '{"message": "ERROR: Stack StorPerfAgentGroup does not exist"}',
+ 400)
+
+
def mocked_requests_job_delete(*args, **kwargs):
class MockResponseJobDelete(object):
@@ -100,10 +163,7 @@ def mocked_requests_delete_failed(*args, **kwargs):
self.json_data = json_data
self.status_code = status_code
- if args[0] == "http://172.16.0.137:5000/api/v1.0/configurations":
- return MockResponseDeleteFailed('{"message": "Teardown failed"}', 400)
-
- return MockResponseDeleteFailed('{}', 404)
+ return MockResponseDeleteFailed('{"message": "Teardown failed"}', 400)
class StorPerfTestCase(unittest.TestCase):
@@ -119,11 +179,14 @@ class StorPerfTestCase(unittest.TestCase):
self.result = {}
- @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.post',
- side_effect=mocked_requests_config_post)
- @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.get',
- side_effect=mocked_requests_config_get)
- def test_successful_setup(self, mock_post, mock_get):
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(requests, 'get')
+ def test_setup(self, mock_get, mock_post):
+ mock_post.side_effect = [mocked_requests_config_post(),
+ mocked_requests_job_post()]
+ mock_get.side_effect = [mocked_requests_config_get(),
+ mocked_requests_job_get()]
+
options = {
"agent_count": 8,
"public_network": 'ext-net',
@@ -146,14 +209,47 @@ class StorPerfTestCase(unittest.TestCase):
self.assertTrue(s.setup_done)
- @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.post',
- side_effect=mocked_requests_job_post)
- @mock.patch('yardstick.benchmark.scenarios.storage.storperf.requests.get',
- side_effect=mocked_requests_job_get)
- @mock.patch(
- 'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
- side_effect=mocked_requests_job_delete)
- def test_successful_run(self, mock_post, mock_get, mock_delete):
+ @mock.patch.object(requests, 'get')
+ def test_query_setup_state_unsuccessful(self, mock_get):
+ mock_get.side_effect = mocked_requests_config_get_not_created
+ args = {
+ "options": {}
+ }
+ s = storperf.StorPerf(args, self.ctx)
+ result = s._query_setup_state()
+ self.assertFalse(result)
+
+ @mock.patch.object(requests, 'get')
+ def test_query_setup_state_no_payload(self, mock_get):
+ mock_get.side_effect = mocked_requests_config_get_no_payload
+ args = {
+ "options": {}
+ }
+ s = storperf.StorPerf(args, self.ctx)
+ result = s._query_setup_state()
+ self.assertFalse(result)
+
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(requests, 'get')
+ def test_setup_config_post_failed(self, mock_get, mock_post):
+ mock_post.side_effect = mocked_requests_config_post_fail
+
+ args = {
+ "options": {
+ "public_network": "foo"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.setup)
+
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_v1_successful(self, mock_post, mock_get):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
options = {
"agent_count": 8,
"public_network": 'ext-net',
@@ -165,6 +261,74 @@ class StorPerfTestCase(unittest.TestCase):
"query_interval": 0,
"timeout": 60
}
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ "workload": "rs",
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ sample_output = '{"Status": "Completed",\
+ "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
+
+ expected_result = jsonutils.loads(sample_output)
+
+ s.run(self.result)
+
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/jobs',
+ json=jsonutils.loads(json.dumps(expected_post)))
+
+ self.assertEqual(self.result, expected_result)
+
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_v2_successful(self, mock_post, mock_get):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workloads": {
+ "read_sequential": {
+ "rw": "rs"
+ }
+ },
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ 'workloads': {
+ 'read_sequential': {
+ 'rw': 'rs'
+ }
+ },
+ 'agent_count': 8
+ }
args = {
"options": options
@@ -179,13 +343,126 @@ class StorPerfTestCase(unittest.TestCase):
expected_result = jsonutils.loads(sample_output)
s.run(self.result)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v2.0/jobs',
+ json=expected_post)
self.assertEqual(self.result, expected_result)
- @mock.patch(
- 'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
- side_effect=mocked_requests_delete)
- def test_successful_teardown(self, mock_delete):
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_failed(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post_fail
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workloads": {
+ "read_sequential": {
+ "rw": "rs"
+ }
+ },
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ 'workloads': {
+ 'read_sequential': {
+ 'rw': 'rs'
+ }
+ },
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ self.assertRaises(RuntimeError, s.run, self.ctx)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v2.0/jobs',
+ json=expected_post)
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(storperf.StorPerf, 'setup')
+ def test_run_calls_setup(self, mock_setup, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ 'timeout': 60,
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.run(self.result)
+
+ mock_setup.assert_called_once()
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_initialize_disks(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ "StorPerf_ip": "192.168.23.2"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.initialize_disks()
+
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/initializations',
+ json={})
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_initialize_disks_post_failed(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_initialize_post_fail
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ "StorPerf_ip": "192.168.23.2"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.initialize_disks)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/initializations',
+ json={})
+
+ @mock.patch.object(requests, 'delete')
+ def test_teardown(self, mock_delete):
+ mock_delete.side_effect = mocked_requests_job_delete
options = {
"agent_count": 8,
"public_network": 'ext-net',
@@ -207,11 +484,12 @@ class StorPerfTestCase(unittest.TestCase):
s.teardown()
self.assertFalse(s.setup_done)
+ mock_delete.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/configurations')
- @mock.patch(
- 'yardstick.benchmark.scenarios.storage.storperf.requests.delete',
- side_effect=mocked_requests_delete_failed)
- def test_failed_teardown(self, mock_delete):
+ @mock.patch.object(requests, 'delete')
+ def test_teardown_request_delete_failed(self, mock_delete):
+ mock_delete.side_effect = mocked_requests_delete_failed
options = {
"agent_count": 8,
"public_network": 'ext-net',
@@ -230,4 +508,6 @@ class StorPerfTestCase(unittest.TestCase):
s = storperf.StorPerf(args, self.ctx)
- self.assertRaises(AssertionError, s.teardown(), self.result)
+ self.assertRaises(RuntimeError, s.teardown)
+ mock_delete.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/configurations')