aboutsummaryrefslogtreecommitdiffstats
path: root/yardstick/tests/unit/benchmark/scenarios
diff options
context:
space:
mode:
Diffstat (limited to 'yardstick/tests/unit/benchmark/scenarios')
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py93
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py53
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py36
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py119
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py79
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py89
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_director.py106
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py95
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py82
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py82
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py73
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py74
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py118
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py76
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py131
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_util.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt5
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt9
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt2
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt3
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py95
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py64
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py262
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py167
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py192
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py109
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py60
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py158
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py236
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py75
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py76
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py163
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py32
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt14
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt300
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py182
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py78
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py76
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py63
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py29
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py55
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py27
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py59
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py59
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py58
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py58
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py27
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py55
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py52
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py51
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py54
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py25
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py54
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py27
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py56
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py54
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py52
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py43
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py103
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py57
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py33
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml41
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml18
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml101
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt9
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt13
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py190
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py353
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py122
-rwxr-xr-xyardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py122
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py225
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py59
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py105
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py107
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py117
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py453
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py136
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py194
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py68
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py873
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py196
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py181
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml75
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml50
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/parser/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py70
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/__init__.py0
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json1
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py63
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py280
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py99
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py513
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/test_base.py135
114 files changed, 10356 insertions, 0 deletions
diff --git a/yardstick/tests/unit/benchmark/scenarios/__init__.py b/yardstick/tests/unit/benchmark/scenarios/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/__init__.py b/yardstick/tests/unit/benchmark/scenarios/availability/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
new file mode 100644
index 000000000..35455a49c
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
@@ -0,0 +1,93 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import \
+ attacker_baremetal
+
+
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_subprocess = mock.patch.object(attacker_baremetal,
+ 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_subprocess.stop()
+
+ def test__execute_shell_command_successful(self):
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, _ = attacker_baremetal._execute_shell_command("env")
+ self.assertEqual(exitcode, 0)
+
+ @mock.patch.object(attacker_baremetal, 'LOG')
+ def test__execute_shell_command_fail_cmd_exception(self, mock_log):
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, _ = attacker_baremetal._execute_shell_command("env")
+ self.assertEqual(exitcode, -1)
+ mock_log.error.assert_called_once()
+
+
+class AttackerBaremetalTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_ssh = mock.patch.object(attacker_baremetal, 'ssh')
+ self.mock_ssh = self._mock_ssh.start()
+ self._mock_subprocess = mock.patch.object(attacker_baremetal,
+ 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mocks)
+
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, "running", '')
+
+ host = {
+ "ipmi_ip": "10.20.0.5",
+ "ipmi_user": "root",
+ "ipmi_password": "123456",
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.attacker_cfg = {
+ 'fault_type': 'bear-metal-down',
+ 'host': 'node1',
+ }
+
+ self.ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+ self.context)
+
+ def _stop_mocks(self):
+ self._mock_ssh.stop()
+ self._mock_subprocess.stop()
+
+ def test__attacker_baremetal_all_successful(self):
+ self.ins.setup()
+ self.ins.inject_fault()
+ self.ins.recover()
+
+ def test__attacker_baremetal_check_failure(self):
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, "error check", '')
+ self.ins.setup()
+
+ def test__attacker_baremetal_recover_successful(self):
+ self.attacker_cfg["jump_host"] = 'node1'
+ self.context["node1"]["password"] = "123456"
+ ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg,
+ self.context)
+
+ ins.setup()
+ ins.recover()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py
new file mode 100644
index 000000000..c1b3c0d72
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_general.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2016 Juan Qiu and others
+# juan_ qiu@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.attacker
+# .attacker_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.attacker.'
+ 'attacker_general.ssh')
+class GeneralAttackerServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.attacker_cfg = {
+ 'fault_type': 'general-attacker',
+ 'action_parameter': {'process_name': 'nova_api'},
+ 'rollback_parameter': {'process_name': 'nova_api'},
+ 'key': 'stop-service',
+ 'attack_key': 'stop-service',
+ 'host': 'node1',
+ }
+
+ def test__attacker_service_all_successful(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.setup()
+ ins.inject_fault()
+ ins.recover()
+
+ def test__attacker_service_check_failuer(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "error check", '')
+ ins.setup()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py
new file mode 100644
index 000000000..2e9f1c6bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_attacker_process.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.availability.attacker.attacker_process
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+
+
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.attacker.attacker_process.ssh')
+class AttackerServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.attacker_cfg = {
+ 'fault_type': 'kill-process',
+ 'process_name': 'nova-api',
+ 'host': 'node1',
+ }
+
+ def test__attacker_service_all_successful(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "10", '')
+ ins.setup()
+ ins.inject_fault()
+ ins.recover()
+
+ def test__attacker_service_check_failuer(self, mock_ssh):
+
+ cls = baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
+ ins = cls(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, None, '')
+ ins.setup()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py
new file mode 100644
index 000000000..74f86983b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseattacker.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2018 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+
+
+class BaseAttackerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.attacker_cfg = {
+ 'fault_type': 'test-attacker',
+ 'action_parameter': {'process_name': 'nova_api'},
+ 'rollback_parameter': {'process_name': 'nova_api'},
+ 'key': 'stop-service',
+ 'attack_key': 'stop-service',
+ 'host': 'node1',
+ }
+ self.base_attacker = baseattacker.BaseAttacker({}, {})
+
+ def test__init__(self):
+ self.assertEqual(self.base_attacker.data, {})
+ self.assertFalse(self.base_attacker.mandatory)
+ self.assertEqual(self.base_attacker.intermediate_variables, {})
+ self.assertFalse(self.base_attacker.mandatory)
+
+ def test_get_attacker_cls(self):
+ with self.assertRaises(RuntimeError):
+ baseattacker.BaseAttacker.get_attacker_cls(self.attacker_cfg)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
new file mode 100644
index 000000000..8d042c406
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
@@ -0,0 +1,119 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import time
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
+
+
+class MonitorMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.monitor_configs = [
+ {
+ "monitor_type": "openstack-cmd",
+ "command_name": "openstack router list",
+ "monitor_time": 10,
+ "monitor_number": 3,
+ "sla": {
+ "max_outage_time": 5
+ }
+ },
+ {
+ "monitor_type": "process",
+ "process_name": "neutron-server",
+ "host": "node1",
+ "monitor_time": 20,
+ "monitor_number": 3,
+ "sla": {
+ "max_recover_time": 20
+ }
+ }
+ ]
+ self.MonitorMgr = basemonitor.MonitorMgr([])
+ self.MonitorMgr.init_monitors(self.monitor_configs, None)
+ self.monitor_list = self.MonitorMgr._monitor_list
+ for mo in self.monitor_list:
+ mo._result = {"outage_time": 10}
+
+ @mock.patch.object(basemonitor, 'BaseMonitor')
+ def test__MonitorMgr_setup_successful(self, *args):
+ instance = basemonitor.MonitorMgr({"nova-api": 10})
+ instance.init_monitors(self.monitor_configs, None)
+ instance.start_monitors()
+ instance.wait_monitors()
+
+ # TODO(elfoley): Check the return value
+ ret = instance.verify_SLA() # pylint: disable=unused-variable
+
+ @mock.patch.object(basemonitor, 'BaseMonitor')
+ def test_MonitorMgr_getitem(self, *args):
+ monitorMgr = basemonitor.MonitorMgr({"nova-api": 10})
+ monitorMgr.init_monitors(self.monitor_configs, None)
+
+ @mock.patch.object(basemonitor, 'BaseMonitor')
+ def test_store_result(self, *args):
+ expect = {'process_neutron-server_outage_time': 10,
+ 'openstack-router-list_outage_time': 10}
+ result = {}
+ self.MonitorMgr.store_result(result)
+ self.assertDictEqual(result, expect)
+
+
+class BaseMonitorTestCase(unittest.TestCase):
+
+ class MonitorSimple(basemonitor.BaseMonitor):
+ __monitor_type__ = "MonitorForTest"
+
+ def setup(self):
+ self.monitor_result = False
+
+ def monitor_func(self):
+ return self.monitor_result
+
+ def setUp(self):
+ self.monitor_cfg = {
+ 'monitor_type': 'MonitorForTest',
+ 'command_name': 'nova image-list',
+ 'monitor_time': 0.01,
+ 'sla': {'max_outage_time': 5}
+ }
+
+ def _close_queue(self, instace):
+ time.sleep(0.1)
+ instace._queue.close()
+
+ def test__basemonitor_start_wait_successful(self):
+ ins = basemonitor.BaseMonitor(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
+ ins.start_monitor()
+ ins.wait_monitor()
+
+ def test__basemonitor_all_successful(self):
+ ins = self.MonitorSimple(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
+ ins.setup()
+ ins.run()
+ ins.verify_SLA()
+
+ @mock.patch.object(basemonitor, 'multiprocessing')
+ def test__basemonitor_func_false(self, mock_multiprocess):
+ ins = self.MonitorSimple(self.monitor_cfg, None, {"nova-api": 10})
+ self.addCleanup(self._close_queue, ins)
+ ins.setup()
+ mock_multiprocess.Event().is_set.return_value = False
+ ins.run()
+ ins.verify_SLA()
+
+ def test__basemonitor_getmonitorcls_successfule(self):
+ with self.assertRaises(RuntimeError):
+ basemonitor.BaseMonitor.get_monitor_cls(self.monitor_cfg)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py
new file mode 100644
index 000000000..b7c9f62ff
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseoperation.py
@@ -0,0 +1,79 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.operation import baseoperation
+
+
+class OperationMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ config = {
+ 'operation_type': 'general-operation',
+ 'key': 'service-status'
+ }
+
+ self.operation_configs = []
+ self.operation_configs.append(config)
+
+ @mock.patch.object(baseoperation, 'BaseOperation')
+ def test_all_successful(self, *args):
+ mgr_ins = baseoperation.OperationMgr()
+ mgr_ins.init_operations(self.operation_configs, None)
+ _ = mgr_ins["service-status"]
+ mgr_ins.rollback()
+
+ @mock.patch.object(baseoperation, 'BaseOperation')
+ def test_getitem_fail(self, *args):
+ mgr_ins = baseoperation.OperationMgr()
+ mgr_ins.init_operations(self.operation_configs, None)
+ with self.assertRaises(KeyError):
+ _ = mgr_ins["operation-not-exist"]
+
+
+class TestOperation(baseoperation.BaseOperation):
+ __operation__type__ = "test-operation"
+
+ def setup(self):
+ pass
+
+ def run(self):
+ pass
+
+ def rollback(self):
+ pass
+
+
+class BaseOperationTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.config = {
+ 'operation_type': 'general-operation',
+ 'key': 'service-status'
+ }
+ self.base_ins = baseoperation.BaseOperation(self.config, None)
+
+ def test_all_successful(self):
+ self.base_ins.setup()
+ self.base_ins.run()
+ self.base_ins.rollback()
+
+ def test_get_script_fullpath(self):
+ self.base_ins.get_script_fullpath("ha_tools/test.bash")
+
+ # TODO(elfoley): Fix test to check on expected outputs
+ # pylint: disable=unused-variable
+ def test_get_operation_cls_successful(self):
+ operation_ins = self.base_ins.get_operation_cls("test-operation")
+
+ def test_get_operation_cls_fail(self):
+ with self.assertRaises(RuntimeError):
+ self.base_ins.get_operation_cls("operation-not-exist")
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py
new file mode 100644
index 000000000..d4df02819
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_baseresultchecker.py
@@ -0,0 +1,89 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.result_checker import \
+ baseresultchecker
+
+
+class ResultCheckerMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ config = {
+ 'checker_type': 'general-result-checker',
+ 'key': 'process-checker'
+ }
+
+ self.checker_configs = []
+ self.checker_configs.append(config)
+
+ self.mgr_ins = baseresultchecker.ResultCheckerMgr()
+
+ self._mock_basechecker = mock.patch.object(baseresultchecker,
+ 'BaseResultChecker')
+ self.mock_basechecker = self._mock_basechecker.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_basechecker.stop()
+
+ def test_ResultCheckerMgr_setup_successful(self):
+ self.mgr_ins.verify()
+
+ def test_getitem_succeessful(self):
+ self.mgr_ins.init_ResultChecker(self.checker_configs, None)
+ _ = self.mgr_ins["process-checker"]
+
+ def test_getitem_fail(self):
+ self.mgr_ins.init_ResultChecker(self.checker_configs, None)
+ with self.assertRaises(KeyError):
+ _ = self.mgr_ins["checker-not-exist"]
+
+
+class BaseResultCheckerTestCase(unittest.TestCase):
+
+ class ResultCheckeSimple(baseresultchecker.BaseResultChecker):
+ __result_checker__type__ = "ResultCheckeForTest"
+
+ def setup(self):
+ self.success = False
+
+ def verify(self):
+ return self.success
+
+ def setUp(self):
+ self.checker_cfg = {
+ 'checker_type': 'general-result-checker',
+ 'key': 'process-checker'
+ }
+ self.ins = baseresultchecker.BaseResultChecker(self.checker_cfg, None)
+
+ def test_baseresultchecker_setup_verify_successful(self):
+ self.ins.setup()
+ self.ins.verify()
+
+ def test_baseresultchecker_verfiy_pass(self):
+ self.ins.setup()
+ self.ins.actualResult = True
+ self.ins.expectedResult = True
+ self.ins.verify()
+
+ def test_get_script_fullpath(self):
+ self.ins.get_script_fullpath("test.bash")
+
+ def test_get_resultchecker_cls_successful(self):
+ baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+ "ResultCheckeForTest")
+
+ def test_get_resultchecker_cls_fail(self):
+ with self.assertRaises(RuntimeError):
+ baseresultchecker.BaseResultChecker.get_resultchecker_cls(
+ "ResultCheckeNotExist")
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_director.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_director.py
new file mode 100644
index 000000000..e49544e1c
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_director.py
@@ -0,0 +1,106 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.director
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.director import Director
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.director.basemonitor')
+@mock.patch('yardstick.benchmark.scenarios.availability.director.baseattacker')
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.director.baseoperation')
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.director.baseresultchecker')
+class DirectorTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scenario_cfg = {
+ 'type': "general_scenario",
+ 'options': {
+ 'attackers': [{
+ 'fault_type': "general-attacker",
+ 'key': "kill-process"}],
+ 'monitors': [{
+ 'monitor_type': "general-monitor",
+ 'key': "service-status"}],
+ 'operations': [{
+ 'operation_type': 'general-operation',
+ 'key': 'service-status'}],
+ 'resultCheckers': [{
+ 'checker_type': 'general-result-checker',
+ 'key': 'process-checker', }],
+ 'steps': [
+ {
+ 'actionKey': "service-status",
+ 'actionType': "operation",
+ 'index': 1},
+ {
+ 'actionKey': "kill-process",
+ 'actionType': "attacker",
+ 'index': 2},
+ {
+ 'actionKey': "process-checker",
+ 'actionType': "resultchecker",
+ 'index': 3},
+ {
+ 'actionKey': "service-status",
+ 'actionType': "monitor",
+ 'index': 4},
+ ]
+ }
+ }
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.ctx = {"nodes": {"node1": host}}
+
+ def test_director_all_successful(self, mock_checer, mock_opertion,
+ mock_attacker, mock_monitor):
+ ins = Director(self.scenario_cfg, self.ctx)
+ opertion_action = ins.createActionPlayer("operation", "service-status")
+ attacker_action = ins.createActionPlayer("attacker", "kill-process")
+ checker_action = ins.createActionPlayer("resultchecker",
+ "process-checker")
+ monitor_action = ins.createActionPlayer("monitor", "service-status")
+
+ opertion_rollback = ins.createActionRollbacker("operation",
+ "service-status")
+ attacker_rollback = ins.createActionRollbacker("attacker",
+ "kill-process")
+ ins.executionSteps.append(opertion_rollback)
+ ins.executionSteps.append(attacker_rollback)
+
+ opertion_action.action()
+ attacker_action.action()
+ checker_action.action()
+ monitor_action.action()
+
+ attacker_rollback.rollback()
+ opertion_rollback.rollback()
+
+ ins.stopMonitors()
+ ins.verify()
+ ins.knockoff()
+
+ def test_director_get_wrong_item(self, mock_checer, mock_opertion,
+ mock_attacker, mock_monitor):
+ ins = Director(self.scenario_cfg, self.ctx)
+ ins.createActionPlayer("wrong_type", "wrong_key")
+ ins.createActionRollbacker("wrong_type", "wrong_key")
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
new file mode 100644
index 000000000..1aebcc85b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
@@ -0,0 +1,95 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import monitor_command
+
+
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_subprocess = mock.patch.object(monitor_command, 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_subprocess.stop()
+
+ def test__fun_execute_shell_command_successful(self):
+ cmd = "env"
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, _t = monitor_command._execute_shell_command(cmd)
+ self.assertEqual(exitcode, 0)
+
+ @mock.patch.object(monitor_command, 'LOG')
+ def test__fun_execute_shell_command_fail_cmd_exception(self, mock_log):
+ cmd = "env"
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, _ = monitor_command._execute_shell_command(cmd)
+ self.assertEqual(exitcode, -1)
+ mock_log.error.assert_called_once()
+
+
+class MonitorOpenstackCmdTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.config = {
+ 'monitor_type': 'openstack-api',
+ 'command_name': 'nova image-list',
+ 'monitor_time': 1,
+ 'sla': {'max_outage_time': 5}
+ }
+ self._mock_subprocess = mock.patch.object(monitor_command, 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_subprocess.stop()
+
+ def test__monitor_command_monitor_func_successful(self):
+
+ instance = monitor_command.MonitorOpenstackCmd(self.config, None, {"nova-api": 10})
+ instance.setup()
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ ret = instance.monitor_func()
+ self.assertTrue(ret)
+ instance._result = {"outage_time": 0}
+ instance.verify_SLA()
+
+ @mock.patch.object(monitor_command, 'LOG')
+ def test__monitor_command_monitor_func_failure(self, mock_log):
+ self.mock_subprocess.check_output.return_value = (1, 'unittest')
+ instance = monitor_command.MonitorOpenstackCmd(self.config, None, {"nova-api": 10})
+ instance.setup()
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ ret = instance.monitor_func()
+ self.assertFalse(ret)
+ mock_log.error.assert_called_once()
+ instance._result = {"outage_time": 10}
+ instance.verify_SLA()
+
+ @mock.patch.object(monitor_command, 'ssh')
+ def test__monitor_command_ssh_monitor_successful(self, mock_ssh):
+
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ self.config["host"] = "node1"
+ instance = monitor_command.MonitorOpenstackCmd(
+ self.config, self.context, {"nova-api": 10})
+ instance.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ret = instance.monitor_func()
+ self.assertTrue(ret)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py
new file mode 100644
index 000000000..5907c8b6a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_general.py
@@ -0,0 +1,82 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.monitor
+# .monitor_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+from yardstick.benchmark.scenarios.availability.monitor import monitor_general
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.open')
+class GeneralMonitorServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 3,
+ 'parameter': {'serviceName': 'haproxy'},
+ 'sla': {'max_outage_time': 1}
+ }
+ self.monitor_cfg_noparam = {
+ 'monitor_type': 'general-monitor',
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 3,
+ 'sla': {'max_outage_time': 1}
+ }
+
+ def test__monitor_general_all_successful(self, mock_open, mock_ssh):
+ ins = monitor_general.GeneralMonitor(self.monitor_cfg, self.context, {"nova-api": 10})
+
+ ins.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.monitor_func()
+ ins._result = {'outage_time': 0}
+ ins.verify_SLA()
+
+ def test__monitor_general_all_successful_noparam(self, mock_open,
+ mock_ssh):
+ ins = monitor_general.GeneralMonitor(
+ self.monitor_cfg_noparam, self.context, {"nova-api": 10})
+
+ ins.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.monitor_func()
+ ins._result = {'outage_time': 0}
+ ins.verify_SLA()
+
+ def test__monitor_general_failure(self, mock_open, mock_ssh):
+ ins = monitor_general.GeneralMonitor(
+ self.monitor_cfg_noparam, self.context, {"nova-api": 10})
+
+ ins.setup()
+ mock_ssh.SSH.from_node().execute.return_value = (1, "error", 'error')
+ ins.monitor_func()
+ ins._result = {'outage_time': 2}
+ ins.verify_SLA()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
new file mode 100644
index 000000000..dc3a4b99a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
@@ -0,0 +1,82 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.monitor
+# .monitor_multi
+
+from __future__ import absolute_import
+import mock
+import unittest
+from yardstick.benchmark.scenarios.availability.monitor import monitor_multi
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.'
+ 'monitor_general.open')
+class MultiMonitorServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'monitor_number': 3,
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 0.1,
+ 'parameter': {'serviceName': 'haproxy'},
+ 'sla': {'max_outage_time': 1}
+ }
+
+ def test__monitor_multi_all_successful(self, mock_open, mock_ssh):
+ ins = monitor_multi.MultiMonitor(
+ self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+
+ ins.start_monitor()
+ ins.wait_monitor()
+ ins.verify_SLA()
+
+ def test__monitor_multi_all_fail(self, mock_open, mock_ssh):
+ ins = monitor_multi.MultiMonitor(
+ self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+
+ ins.start_monitor()
+ ins.wait_monitor()
+ ins.verify_SLA()
+
+ def test__monitor_multi_no_sla(self, mock_open, mock_ssh):
+ monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'monitor_number': 3,
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 0.1,
+ 'parameter': {'serviceName': 'haproxy'}
+ }
+ ins = monitor_multi.MultiMonitor(
+ monitor_cfg, self.context, {"nova-api": 10})
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.start_monitor()
+ ins.wait_monitor()
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
new file mode 100644
index 000000000..8c73bf221
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
@@ -0,0 +1,73 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.availability.monitor.monitor_process
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import monitor_process
+
+
+@mock.patch(
+ 'yardstick.benchmark.scenarios.availability.monitor.monitor_process.ssh')
+class MonitorProcessTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.monitor_cfg = {
+ 'monitor_type': 'process',
+ 'process_name': 'nova-api',
+ 'host': "node1",
+ 'monitor_time': 1,
+ 'sla': {'max_recover_time': 5}
+ }
+
+ def test__monitor_process_all_successful(self, mock_ssh):
+
+ ins = monitor_process.MonitorProcess(self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 0}
+ ins.verify_SLA()
+
+ def test__monitor_process_down_failuer(self, mock_ssh):
+
+ ins = monitor_process.MonitorProcess(self.monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 10}
+ ins.verify_SLA()
+
+ def test__monitor_process_no_sla(self, mock_ssh):
+
+ monitor_cfg = {
+ 'monitor_type': 'process',
+ 'process_name': 'nova-api',
+ 'host': "node1",
+ 'monitor_time': 1,
+ }
+ ins = monitor_process.MonitorProcess(monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 10}
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py
new file mode 100644
index 000000000..2b09c0385
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_operation_general.py
@@ -0,0 +1,74 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.operation
+# .operation_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+from yardstick.benchmark.scenarios.availability.operation import \
+ operation_general
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.operation.'
+ 'operation_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.operation.'
+ 'operation_general.open')
+class GeneralOperaionTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.operation_cfg = {
+ 'operation_type': 'general-operation',
+ 'action_parameter': {'ins_cup': 2},
+ 'rollback_parameter': {'ins_id': 'id123456'},
+ 'key': 'nova-create-instance',
+ 'operation_key': 'nova-create-instance',
+ 'host': 'node1',
+ }
+ self.operation_cfg_noparam = {
+ 'operation_type': 'general-operation',
+ 'key': 'nova-create-instance',
+ 'operation_key': 'nova-create-instance',
+ 'host': 'node1',
+ }
+
+ def test__operation_successful(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "success", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
+
+ def test__operation_successful_noparam(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg_noparam,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "success", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
+
+ def test__operation_fail(self, mock_open, mock_ssh):
+ ins = operation_general.GeneralOperaion(self.operation_cfg,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (1, "failed", '')
+ ins.setup()
+ ins.run()
+ ins.rollback()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py
new file mode 100644
index 000000000..324a5bda2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_result_checker_general.py
@@ -0,0 +1,118 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.result_checker
+# .result_checker_general
+
+from __future__ import absolute_import
+import mock
+import unittest
+import copy
+
+from yardstick.benchmark.scenarios.availability.result_checker import \
+ result_checker_general
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.result_checker.'
+ 'result_checker_general.ssh')
+@mock.patch('yardstick.benchmark.scenarios.availability.result_checker.'
+ 'result_checker_general.open')
+class GeneralResultCheckerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.checker_cfg = {
+ 'parameter': {'processname': 'process'},
+ 'checker_type': 'general-result-checker',
+ 'condition': 'eq',
+ 'expectedValue': 1,
+ 'key': 'process-checker',
+ 'checker_key': 'process-checker',
+ 'host': 'node1'
+ }
+
+ def test__result_checker_eq(self, mock_open, mock_ssh):
+ ins = result_checker_general.GeneralResultChecker(self.checker_cfg,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_gt(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'gt'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "2", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_gt_eq(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'gt_eq'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_lt(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'lt'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_lt_eq(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'lt_eq'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_in(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'in'
+ config['expectedValue'] = "value"
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "value return", '')
+ ins.setup()
+ self.assertTrue(ins.verify())
+
+ def test__result_checker_wrong(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config['condition'] = 'wrong'
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (0, "1", '')
+ ins.setup()
+ self.assertFalse(ins.verify())
+
+ def test__result_checker_fail(self, mock_open, mock_ssh):
+ config = copy.deepcopy(self.checker_cfg)
+ config.pop('parameter')
+ ins = result_checker_general.GeneralResultChecker(config,
+ self.context)
+ mock_ssh.SSH.from_node().execute.return_value = (1, "fail", '')
+ ins.setup()
+ ins.verify()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py
new file mode 100644
index 000000000..dbf3d83b2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_scenario_general.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2016 Huan Li and others
+# lihuansse@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability import scenario_general
+from yardstick.common import exceptions as y_exc
+
+
+class ScenarioGeneralTestCase(unittest.TestCase):
+
+ @mock.patch.object(scenario_general, 'Director')
+ def setUp(self, *args):
+ self.scenario_cfg = {
+ 'type': "general_scenario",
+ 'options': {
+ 'attackers': [{
+ 'fault_type': "general-attacker",
+ 'key': "kill-process"}],
+ 'monitors': [{
+ 'monitor_type': "general-monitor",
+ 'key': "service-status"}],
+ 'steps': [
+ {
+ 'actionKey': "kill-process",
+ 'actionType': "attacker",
+ 'index': 1},
+ {
+ 'actionKey': "service-status",
+ 'actionType': "monitor",
+ 'index': 2}]
+ }
+ }
+ self.instance = scenario_general.ScenarioGeneral(self.scenario_cfg,
+ None)
+ self.instance.setup()
+ self.instance.director.verify.return_value = True
+
+ def test_scenario_general_all_successful(self):
+ ret = {}
+ self.instance.run(ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 1)
+
+ @mock.patch.object(scenario_general.LOG, 'exception')
+ def test_scenario_general_exception(self, *args):
+ self.instance.director.createActionPlayer.side_effect = (
+ KeyError('Wrong'))
+ self.instance.director.data = {}
+ ret = {}
+ self.instance.run(ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 1)
+
+ def test_scenario_general_case_fail(self):
+ self.instance.director.verify.return_value = False
+ self.instance.director.data = {}
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, self.instance.run, ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 0)
+
+ def test_scenario_general_case_service_not_found_fail(self):
+ self.instance.director.verify.return_value = True
+ self.instance.director.data = {"general-attacker": 0}
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, self.instance.run, ret)
+ self.instance.teardown()
+ self.assertEqual(ret['sla_pass'], 0)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py
new file mode 100644
index 000000000..d61fa67c7
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_serviceha.py
@@ -0,0 +1,131 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability import serviceha
+from yardstick.common import exceptions as y_exc
+
+
+class ServicehaTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.ctx = {"nodes": {"node1": host}}
+ attacker_cfg = {
+ "fault_type": "kill-process",
+ "process_name": "nova-api",
+ "host": "node1"
+ }
+ attacker_cfgs = []
+ attacker_cfgs.append(attacker_cfg)
+ monitor_cfg = {
+ "monitor_cmd": "nova image-list",
+ "monitor_time": 0.1
+ }
+ monitor_cfgs = []
+ monitor_cfgs.append(monitor_cfg)
+
+ options = {
+ "attackers": attacker_cfgs,
+ "monitors": monitor_cfgs
+ }
+ sla = {"outage_time": 5}
+ self.args = {"options": options, "sla": sla}
+ self.test__serviceha = serviceha.ServiceHA(self.args, self.ctx)
+
+ def test___init__(self):
+
+ self.assertEqual(self.test__serviceha.data, {})
+ self.assertFalse(self.test__serviceha.setup_done)
+ self.assertFalse(self.test__serviceha.sla_pass)
+
+ # NOTE(elfoley): This should be split into test_setup and test_run
+ # NOTE(elfoley): This should explicitly test outcomes and states
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_setup_run_successful(self, mock_monitor, *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ p.teardown()
+
+ p.setup()
+ self.assertTrue(p.setup_done)
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_run_sla_error(self, mock_monitor, *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertEqual(p.setup_done, True)
+
+ mock_monitor.MonitorMgr().verify_SLA.return_value = False
+
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, p.run, ret)
+ self.assertEqual(ret['sla_pass'], 0)
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_run_service_not_found_sla_error(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+
+ p.setup()
+ self.assertTrue(p.setup_done)
+ p.data["kill-process"] = 0
+
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+
+ ret = {}
+ self.assertRaises(y_exc.SLAValidationError, p.run, ret)
+ self.assertEqual(ret['sla_pass'], 0)
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_no_teardown_when_sla_pass(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ attacker = mock.Mock()
+ attacker.mandatory = False
+ p.attackers = [attacker]
+ p.teardown()
+ attacker.recover.assert_not_called()
+
+ @mock.patch.object(serviceha, 'baseattacker')
+ @mock.patch.object(serviceha, 'basemonitor')
+ def test__serviceha_teardown_when_mandatory(self, mock_monitor,
+ *args):
+ p = serviceha.ServiceHA(self.args, self.ctx)
+ p.setup()
+ self.assertTrue(p.setup_done)
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
+ ret = {}
+ p.run(ret)
+ attacker = mock.Mock()
+ attacker.mandatory = True
+ p.attackers = [attacker]
+ p.teardown()
+ attacker.recover.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_util.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_util.py
new file mode 100644
index 000000000..4d97585d4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_util.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2016 Kanglin Yin and others
+# 14_ykl@tongji.edu.cn
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.utils
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability import util
+
+
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.param_config = {'serviceName': '@serviceName', 'value': 1}
+ self.intermediate_variables = {'@serviceName': 'nova-api'}
+ self.std_output = '| id | 1 |'
+ self.cmd_config = {'cmd': 'ls', 'param': '-a'}
+
+ self._mock_subprocess = mock.patch.object(util, 'subprocess')
+ self.mock_subprocess = self._mock_subprocess.start()
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_subprocess.stop()
+
+ def test_util_build_command_shell(self):
+ result = util.build_shell_command(self.param_config, True,
+ self.intermediate_variables)
+ self.assertIn("nova-api", result)
+
+ def test_read_stdout_item(self):
+ result = util.read_stdout_item(self.std_output, 'id')
+ self.assertEqual('1', result)
+
+ def test_buildshellparams(self):
+ result = util.buildshellparams(self.cmd_config, True)
+ self.assertEqual('/bin/bash -s {0} {1}', result)
+
+ def test__fun_execute_shell_command_successful(self):
+ cmd = "env"
+ self.mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, _ = util.execute_shell_command(cmd)
+ self.assertEqual(exitcode, 0)
+
+ def test__fun_execute_shell_command_fail_cmd_exception(self):
+ cmd = "env"
+ self.mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, _ = util.execute_shell_command(cmd)
+ self.assertEqual(exitcode, -1)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/__init__.py b/yardstick/tests/unit/benchmark/scenarios/compute/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt b/yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt
new file mode 100644
index 000000000..e2c79a9b1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/cachestat_sample_output.txt
@@ -0,0 +1,5 @@
+Counting cache functions... Output every 1 seconds.
+ HITS MISSES DIRTIES RATIO BUFFERS_MB CACHE_MB
+ 6462 0 29 100.0% 1157 66782
+
+Ending tracing...
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt
new file mode 100644
index 000000000..723e64bcb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output1.txt
@@ -0,0 +1,9 @@
+Linux 3.13.0-68-generic (elxg482ls42) 11/30/2015 _x86_64_ (1 CPU)
+
+04:34:26 PM CPU %usr %nice %sys %iowait %irq %soft %steal %guest %gnice %idle
+04:34:26 PM all 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
+04:34:26 PM 0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
+
+Average: CPU %usr %nice %sys %iowait %irq %soft %steal %guest %gnice %idle
+Average: all 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
+Average: 0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00 100.00
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt
new file mode 100644
index 000000000..c66520a27
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/cpuload_sample_output2.txt
@@ -0,0 +1,2 @@
+cpu 245813227 366650 17338727 1195600354 2652765 178 177114 0 80439531 0
+cpu0 32334587 35782 1659040 87008833 401178 60 73571 0 8030817 0
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt b/yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt
new file mode 100644
index 000000000..1793e2f10
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/memload_sample_output.txt
@@ -0,0 +1,3 @@
+ total used free shared buff/cache available
+Mem: 263753976 76737332 187016644 2844 853528 67252400
+Swap: 268029948 0 268029948
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py
new file mode 100644
index 000000000..6f66c30f9
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_cachestat.py
@@ -0,0 +1,95 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.cachestat.CACHEstat
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.compute import cachestat
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.cachestat.ssh')
+class CACHEstatTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_cachestat_successful_setup(self, mock_ssh):
+ c = cachestat.CACHEstat({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_execute_command_success(self, mock_ssh):
+ c = cachestat.CACHEstat({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = c._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ c = cachestat.CACHEstat({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'Failed executing \
+ command')
+ self.assertRaises(RuntimeError, c._execute_command,
+ "cat /proc/meminfo")
+
+ def test_get_cache_usage_successful(self, mock_ssh):
+ options = {
+ "interval": 1,
+ }
+ args = {"options": options}
+ c = cachestat.CACHEstat(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ output = self._read_file("cachestat_sample_output.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, output, '')
+ result = c._get_cache_usage()
+ expected_result = {"cachestat": {"cache0": {"HITS": "6462",
+ "DIRTIES": "29",
+ "RATIO": "100.0%",
+ "MISSES": "0",
+ "BUFFERS_MB": "1157",
+ "CACHE_MB": "66782"}},
+ "average": {"HITS": 6462, "DIRTIES": 29,
+ "RATIO": "100.0%",
+ "MISSES": 0, "BUFFERS_MB": 1157,
+ "CACHE_MB": 66782},
+ "max": {"HITS": 6462,
+ "DIRTIES": 29, "RATIO": 100.0, "MISSES": 0,
+ "BUFFERS_MB": 1157, "CACHE_MB": 66782}}
+
+ self.assertEqual(result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py
new file mode 100644
index 000000000..4bef589f4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_computecapacity.py
@@ -0,0 +1,64 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.compute.computecapacity.ComputeCapacity
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import computecapacity
+
+SAMPLE_OUTPUT = '{"Cpu_number": "2", "Core_number": "24",\
+ "Memory_size": "263753976 kB", "Thread_number": "48",\
+ "Cache_size": "30720 KB", "HT_Open": "0"}'
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.computecapacity.ssh')
+class ComputeCapacityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'nodes': {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key",
+ 'password': "root"
+ },
+ }
+ }
+
+ self.result = {}
+
+ def test_capacity_successful_setup(self, mock_ssh):
+ c = computecapacity.ComputeCapacity({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_capacity_successful(self, mock_ssh):
+ c = computecapacity.ComputeCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_unsuccessful_script_error(self, mock_ssh):
+ c = computecapacity.ComputeCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py
new file mode 100644
index 000000000..da6e6a22e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_cpuload.py
@@ -0,0 +1,262 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.lmbench.Lmbench
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.compute import cpuload
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.cpuload.ssh')
+class CPULoadTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_setup_mpstat_installed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ l.setup()
+ self.assertIsNotNone(l.client)
+ self.assertTrue(l.setup_done)
+ self.assertTrue(l.has_mpstat)
+
+ def test_setup_mpstat_not_installed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', '')
+
+ l.setup()
+ self.assertIsNotNone(l.client)
+ self.assertTrue(l.setup_done)
+ self.assertFalse(l.has_mpstat)
+
+ def test_execute_command_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = l._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'abcdefg')
+ self.assertRaises(RuntimeError, l._execute_command,
+ "cat /proc/loadavg")
+
+ def test_get_loadavg(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = \
+ (0, '1.50 1.45 1.51 3/813 14322', '')
+ result = l._get_loadavg()
+ expected_result = \
+ {'loadavg': ['1.50', '1.45', '1.51', '3/813', '14322']}
+ self.assertEqual(result, expected_result)
+
+ def test_get_cpu_usage_mpstat(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ l.interval = 1
+ l.count = 1
+ mpstat_output = self._read_file("cpuload_sample_output1.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, mpstat_output, '')
+ result = l._get_cpu_usage_mpstat()
+
+ expected_result = \
+ {"mpstat_minimum":
+ {"cpu": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"},
+ "cpu0": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"}},
+ "mpstat_average":
+ {"cpu": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"},
+ "cpu0": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"}},
+ "mpstat_maximun":
+ {"cpu": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"},
+ "cpu0": {"%steal": "0.00", "%usr": "0.00", "%gnice": "0.00",
+ "%idle": "100.00", "%guest": "0.00",
+ "%iowait": "0.00", "%sys": "0.00", "%soft": "0.00",
+ "%irq": "0.00", "%nice": "0.00"}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def test_get_cpu_usage(self, mock_ssh):
+ options = {
+ "interval": 0,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ l.setup()
+
+ l.interval = 0
+ output = self._read_file("cpuload_sample_output2.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, output, '')
+ result = l._get_cpu_usage()
+
+ expected_result = \
+ {'mpstat':
+ {'cpu':
+ {'%steal': '0.00',
+ '%usr': '11.31',
+ '%gnice': '0.00',
+ '%idle': '81.78',
+ '%iowait': '0.18',
+ '%guest': '5.50',
+ '%sys': '1.19',
+ '%soft': '0.01',
+ '%irq': '0.00',
+ '%nice': '0.03'},
+ 'cpu0':
+ {'%steal': '0.00',
+ '%usr': '20.00',
+ '%gnice': '0.00',
+ '%idle': '71.60',
+ '%iowait': '0.33',
+ '%guest': '6.61',
+ '%sys': '1.37',
+ '%soft': '0.06',
+ '%irq': '0.00',
+ '%nice': '0.03'}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def test_run_proc_stat(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+
+ args = {'options': options}
+
+ l = cpuload.CPULoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+ l.setup()
+
+ l.interval = 0
+ stat_output = self._read_file("cpuload_sample_output2.txt")
+ mock_ssh.SSH.from_node().execute.side_effect = \
+ [(0, '1.50 1.45 1.51 3/813 14322', ''), (0, stat_output, '')]
+
+ l.run(self.result)
+ expected_result = {
+ 'loadavg': ['1.50', '1.45', '1.51', '3/813', '14322'],
+ 'mpstat':
+ {'cpu':
+ {'%steal': '0.00',
+ '%usr': '11.31',
+ '%gnice': '0.00',
+ '%idle': '81.78',
+ '%iowait': '0.18',
+ '%guest': '5.50',
+ '%sys': '1.19',
+ '%soft': '0.01',
+ '%irq': '0.00',
+ '%nice': '0.03'},
+ 'cpu0':
+ {'%steal': '0.00',
+ '%usr': '20.00',
+ '%gnice': '0.00',
+ '%idle': '71.60',
+ '%iowait': '0.33',
+ '%guest': '6.61',
+ '%sys': '1.37',
+ '%soft': '0.06',
+ '%irq': '0.00',
+ '%nice': '0.03'}}}
+
+ self.assertDictEqual(self.result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
new file mode 100644
index 000000000..4fadde4dc
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
@@ -0,0 +1,167 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.cyclictest.Cyclictest
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import cyclictest
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.cyclictest.ssh')
+class CyclictestTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scenario_cfg = {
+ "host": "kvm.LF",
+ "setup_options": {
+ "rpm_dir": "/opt/rpm",
+ "host_setup_seqs": [
+ "host-setup0.sh",
+ "host-setup1.sh",
+ "host-run-qemu.sh"
+ ],
+ "script_dir": "/opt/scripts",
+ "image_dir": "/opt/image",
+ "guest_setup_seqs": [
+ "guest-setup0.sh",
+ "guest-setup1.sh"
+ ]
+ },
+ "sla": {
+ "action": "monitor",
+ "max_min_latency": 50,
+ "max_avg_latency": 100,
+ "max_max_latency": 1000
+ },
+ "options": {
+ "priority": 99,
+ "threads": 1,
+ "loops": 1000,
+ "affinity": 1,
+ "interval": 1000,
+ "histogram": 90
+ }
+ }
+ self.context_cfg = {
+ "host": {
+ "ip": "10.229.43.154",
+ "key_filename": "/yardstick/resources/files/yardstick_key",
+ "role": "BareMetal",
+ "name": "kvm.LF",
+ "user": "root"
+ }
+ }
+
+ def test_cyclictest_successful_setup(self, mock_ssh):
+
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ c.setup()
+ self.assertIsNotNone(c.guest)
+ self.assertIsNotNone(c.host)
+ self.assertTrue(c.setup_done)
+
+ def test_cyclictest_successful_no_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.pop("sla", None)
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ c.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_cyclictest_successful_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.update({"sla": {
+ "action": "monitor",
+ "max_min_latency": 100,
+ "max_avg_latency": 500,
+ "max_max_latency": 1000
+ }
+ })
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ c.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_cyclictest_unsuccessful_sla_min_latency(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_min_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
+
+ def test_cyclictest_unsuccessful_sla_avg_latency(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_avg_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
+
+ def test_cyclictest_unsuccessful_sla_max_latency(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_max_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+ sample_output = '{"min": 100, "avg": 500, "max": 1000}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, c.run, result)
+
+ def test_cyclictest_unsuccessful_script_error(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_max_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+
+ c.guest = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
new file mode 100644
index 000000000..ba63e5f9e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_lmbench.py
@@ -0,0 +1,192 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import lmbench
+from yardstick.common import exceptions as y_exc
+from yardstick import ssh
+
+
+class LmbenchTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ self._mock_ssh = mock.patch.object(ssh, 'SSH')
+ self.mock_ssh = self._mock_ssh.start()
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_ssh.stop()
+
+ def test_successful_setup(self):
+
+ l = lmbench.Lmbench({}, self.ctx)
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
+
+ l.setup()
+ self.assertIsNotNone(l.client)
+ self.assertTrue(l.setup_done)
+
+ def test_unsuccessful_unknown_type_run(self):
+
+ options = {
+ "test_type": "foo"
+ }
+ args = {'options': options}
+
+ l = lmbench.Lmbench(args, self.ctx)
+
+ self.assertRaises(RuntimeError, l.run, self.result)
+
+ def test_successful_latency_run_no_sla(self):
+
+ options = {
+ "test_type": "latency",
+ "stride": 64,
+ "stop_size": 16
+ }
+ args = {'options': options}
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '[{"latency": 4.944, "size": 0.00049}]'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
+ self.assertEqual(self.result, expected_result)
+
+ def test_successful_bandwidth_run_no_sla(self):
+
+ options = {
+ "test_type": "bandwidth",
+ "size": 500,
+ "benchmark": "rd",
+ "warmup": 0
+ }
+ args = {"options": options}
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_successful_latency_run_sla(self):
+
+ options = {
+ "test_type": "latency",
+ "stride": 64,
+ "stop_size": 16
+ }
+ args = {
+ "options": options,
+ "sla": {"max_latency": 35}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '[{"latency": 4.944, "size": 0.00049}]'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = {"latencies0.latency": 4.944, "latencies0.size": 0.00049}
+ self.assertEqual(self.result, expected_result)
+
+ def test_successful_bandwidth_run_sla(self):
+
+ options = {
+ "test_type": "bandwidth",
+ "size": 500,
+ "benchmark": "rd",
+ "warmup": 0
+ }
+ args = {
+ "options": options,
+ "sla": {"min_bandwidth": 10000}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 11025.5}'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_unsuccessful_latency_run_sla(self):
+
+ options = {
+ "test_type": "latency",
+ "stride": 64,
+ "stop_size": 16
+ }
+ args = {
+ "options": options,
+ "sla": {"max_latency": 35}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '[{"latency": 37.5, "size": 0.00049}]'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, l.run, self.result)
+
+ def test_unsuccessful_bandwidth_run_sla(self):
+
+ options = {
+ "test_type": "bandwidth",
+ "size": 500,
+ "benchmark": "rd",
+ "warmup": 0
+ }
+ args = {
+ "options": options,
+ "sla": {"min_bandwidth": 10000}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = '{"size(MB)": 0.262144, "bandwidth(MBps)": 9925.5}'
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, l.run, self.result)
+
+ def test_successful_latency_for_cache_run_sla(self):
+
+ options = {
+ "test_type": "latency_for_cache",
+ "repetition": 1,
+ "warmup": 0
+ }
+ args = {
+ "options": options,
+ "sla": {"max_latency": 35}
+ }
+ l = lmbench.Lmbench(args, self.ctx)
+
+ sample_output = "{\"L1cache\": 1.6}"
+ self.mock_ssh.from_node().execute.return_value = (0, sample_output, '')
+ l.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_unsuccessful_script_error(self):
+
+ options = {"test_type": "bandwidth"}
+ args = {"options": options}
+ l = lmbench.Lmbench(args, self.ctx)
+
+ self.mock_ssh.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, l.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py
new file mode 100644
index 000000000..8213d4490
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_memload.py
@@ -0,0 +1,109 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.memload.MEMLoad
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.compute import memload
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.memload.ssh')
+class MEMLoadTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_memload_successful_setup(self, mock_ssh):
+ m = memload.MEMLoad({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ m.setup()
+ self.assertIsNotNone(m.client)
+ self.assertTrue(m.setup_done)
+
+ def test_execute_command_success(self, mock_ssh):
+ m = memload.MEMLoad({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ m.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = m._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ m = memload.MEMLoad({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ m.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'Failed executing \
+ command')
+ self.assertRaises(RuntimeError, m._execute_command,
+ "cat /proc/meminfo")
+
+ def test_get_mem_usage_successful(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {"options": options}
+ m = memload.MEMLoad(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ m.setup()
+
+ output = self._read_file("memload_sample_output.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, output, '')
+ result = m._get_mem_usage()
+ expected_result = {
+ "max": {
+ 'shared': 2844,
+ 'buff/cache': 853528,
+ 'total': 263753976,
+ 'free': 187016644,
+ 'used': 76737332
+ },
+ "average": {
+ 'shared': 2844,
+ 'buff/cache': 853528,
+ 'total': 263753976,
+ 'free': 187016644,
+ 'used': 76737332
+ },
+ "free": {
+ "memory0": {
+ "used": "76737332",
+ "buff/cache": "853528",
+ "free": "187016644",
+ "shared": "2844",
+ "total": "263753976",
+ "available": "67252400"
+ }
+ }
+ }
+
+ self.assertEqual(result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py
new file mode 100644
index 000000000..875301729
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_plugintest.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.plugintest.PluginTest
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import plugintest
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.plugintest.ssh')
+class PluginTestTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'nodes': {
+ 'host1': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key",
+ 'password': "root"
+ },
+ }
+ }
+
+ self.result = {}
+
+ def test_sample_successful_setup(self, mock_ssh):
+ s = plugintest.PluginTest({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ s.setup()
+ self.assertIsNotNone(s.client)
+ self.assertTrue(s.setup_done)
+
+ def test_sample_successful(self, mock_ssh):
+ s = plugintest.PluginTest({}, self.ctx)
+
+ sample_output = '{"Test Output": "Hello world!"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ s.run(self.result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(self.result, expected_result)
+
+ def test_sample_unsuccessful_script_error(self, mock_ssh):
+ s = plugintest.PluginTest({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, s.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
new file mode 100644
index 000000000..02040ca01
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_qemumigrate.py
@@ -0,0 +1,158 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.qemu_migrate.QemuMigrate
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import qemu_migrate
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.qemu_migrate.ssh')
+class QemuMigrateTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scenario_cfg = {
+ "host": "kvm.LF",
+ "setup_options": {
+ "rpm_dir": "/opt/rpm",
+ "script_dir": "/opt/scripts",
+ "image_dir": "/opt/image",
+ "host_setup_seqs": [
+ "host-setup0.sh",
+ "host-setup1.sh",
+ "setup-ovsdpdk.sh",
+ "host-install-qemu.sh",
+ "host-run-qemu4lm.sh"
+ ]
+ },
+ "sla": {
+ "action": "monitor",
+ "max_totaltime": 10,
+ "max_downtime": 0.10,
+ "max_setuptime": 0.50
+ },
+ "options": {
+ "smp": 99,
+ "migrate_to_port": 4444,
+ "incoming_ip": 0,
+ "qmp_src_path": "/tmp/qmp-sock-src",
+ "qmp_dst_path": "/tmp/qmp-sock-dst",
+ "max_down_time": "0.10"
+ }
+ }
+ self.context_cfg = {
+ "host": {
+ "ip": "10.229.43.154",
+ "key_filename": "/yardstick/resources/files/yardstick_key",
+ "role": "BareMetal",
+ "name": "kvm.LF",
+ "user": "root"
+ }
+ }
+
+ def test_qemu_migrate_successful_setup(self, mock_ssh):
+
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ q.setup()
+ self.assertIsNotNone(q.host)
+ self.assertTrue(q.setup_done)
+
+ def test_qemu_migrate_successful_no_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.pop("sla", None)
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ q.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_qemu_migrate_successful_sla(self, mock_ssh):
+ result = {}
+ self.scenario_cfg.update({"sla": {
+ "action": "monitor",
+ "max_totaltime": 15,
+ "max_downtime": 2,
+ "max_setuptime": 1
+ }
+ })
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ q.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_qemu_migrate_unsuccessful_sla_totaltime(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_totaltime": 10}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
+
+ def test_qemu_migrate_unsuccessful_sla_downtime(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_downtime": 0.10}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
+
+ def test_qemu_migrate_unsuccessful_sla_setuptime(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_setuptime": 0.50}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+ sample_output = '{"totaltime": 15, "downtime": 2, "setuptime": 1}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, q.run, result)
+
+ def test_qemu_migrate_unsuccessful_script_error(self, mock_ssh):
+
+ result = {}
+ self.scenario_cfg.update({"sla": {"max_totaltime": 10}})
+ q = qemu_migrate.QemuMigrate(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ q.setup()
+
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, q.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
new file mode 100644
index 000000000..9e055befe
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_ramspeed.py
@@ -0,0 +1,236 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.ramspeed.Ramspeed
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.common import utils
+from yardstick.benchmark.scenarios.compute import ramspeed
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.ramspeed.ssh')
+class RamspeedTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_ramspeed_successful_setup(self, mock_ssh):
+
+ r = ramspeed.Ramspeed({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ r.setup()
+ self.assertIsNotNone(r.client)
+ self.assertTrue(r.setup_done, True)
+
+ def test_ramspeed_successful__run_no_sla(self, mock_ssh):
+
+ options = {
+ "test_id": 1,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {"options": options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1, "Bandwidth(MBps)": 19909.18}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2, "Bandwidth(MBps)": 19873.89},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4, "Bandwidth(MBps)":\
+ 19907.56}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 8,\
+ "Bandwidth(MBps)": 19906.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 16, "Bandwidth(MBps)": 19881.74}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 32, "Bandwidth(MBps)": 19395.65},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 64, "Bandwidth(MBps)":\
+ 17623.14}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 128,\
+ "Bandwidth(MBps)": 17677.36}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 256, "Bandwidth(MBps)": 16113.49}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 512, "Bandwidth(MBps)": 14659.19},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 1024, "Bandwidth(MBps)":\
+ 14680.75}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 2048,\
+ "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
+ "Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_successful_run_sla(self, mock_ssh):
+
+ options = {
+ "test_id": 1,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 6000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1, "Bandwidth(MBps)": 19909.18}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2, "Bandwidth(MBps)": 19873.89},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4, "Bandwidth(MBps)":\
+ 19907.56}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 8,\
+ "Bandwidth(MBps)": 19906.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 16, "Bandwidth(MBps)": 19881.74}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 32, "Bandwidth(MBps)": 19395.65},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 64, "Bandwidth(MBps)":\
+ 17623.14}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 128,\
+ "Bandwidth(MBps)": 17677.36}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 256, "Bandwidth(MBps)": 16113.49}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 512, "Bandwidth(MBps)": 14659.19},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 1024, "Bandwidth(MBps)":\
+ 14680.75}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 2048,\
+ "Bandwidth(MBps)": 14756.45}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 4096, "Bandwidth(MBps)": 14604.44}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 8192, "Bandwidth(MBps)": 14159.86},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 16384,\
+ "Bandwidth(MBps)": 14128.94}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_unsuccessful_run_sla(self, mock_ssh):
+ options = {
+ "test_id": 1,
+ "load": 8,
+ "block_size": 64
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 100000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1, "Bandwidth(MBps)": 5000.18}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2, "Bandwidth(MBps)": 5000.89},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4,\
+ "Bandwidth(MBps)": 5000.56}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 8, "Bandwidth(MBps)": 19906.94}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 16, "Bandwidth(MBps)": 19881.74},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 32,\
+ "Bandwidth(MBps)": 19395.65}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 64, "Bandwidth(MBps)": 17623.14}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 128, "Bandwidth(MBps)": 17677.36},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 256, "Bandwidth(MBps)":\
+ 16113.49}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 512,\
+ "Bandwidth(MBps)": 14659.19}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 1024, "Bandwidth(MBps)": 14680.75}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 2048, "Bandwidth(MBps)": 14756.45},\
+ {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 4096, "Bandwidth(MBps)":\
+ 14604.44}, {"Test_type": "INTEGER & WRITING", "Block_size(kb)": 8192,\
+ "Bandwidth(MBps)": 14159.86}, {"Test_type": "INTEGER & WRITING",\
+ "Block_size(kb)": 16384, "Bandwidth(MBps)": 14128.94}, {"Test_type":\
+ "INTEGER & WRITING", "Block_size(kb)": 32768, "Bandwidth(MBps)": 8340.85}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, r.run, self.result)
+
+ def test_ramspeed_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "test_id": 1,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {"options": options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, r.run, self.result)
+
+ def test_ramspeed_mem_successful_run_no_sla(self, mock_ssh):
+ options = {
+ "test_id": 3,
+ "load": 16,
+ "block_size": 32,
+ "iteration": 1
+ }
+ args = {"options": options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER Copy:",\
+ "Bandwidth(MBps)": 8353.97}, {"Test_type": "INTEGER Scale:",\
+ "Bandwidth(MBps)": 9078.59}, {"Test_type": "INTEGER Add:",\
+ "Bandwidth(MBps)": 10057.48}, {"Test_type": "INTEGER Triad:",\
+ "Bandwidth(MBps)": 10116.27}, {"Test_type": "INTEGER AVERAGE:",\
+ "Bandwidth(MBps)": 9401.58}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_mem_successful_run_sla(self, mock_ssh):
+ options = {
+ "test_id": 3,
+ "load": 16,
+ "block_size": 32,
+ "iteration": 1
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 6000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER Copy:",\
+ "Bandwidth(MBps)": 8353.97}, {"Test_type": "INTEGER Scale:",\
+ "Bandwidth(MBps)": 9078.59}, {"Test_type": "INTEGER Add:",\
+ "Bandwidth(MBps)": 10057.48}, {"Test_type": "INTEGER Triad:",\
+ "Bandwidth(MBps)": 10116.27}, {"Test_type": "INTEGER AVERAGE:",\
+ "Bandwidth(MBps)": 9401.58}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ r.run(self.result)
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_mem_unsuccessful_run_sla(self, mock_ssh):
+ options = {
+ "test_id": 3,
+ "load": 16,
+ "block_size": 32,
+ "iteration": 1
+ }
+ args = {"options": options, "sla": {"min_bandwidth": 86000}}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ sample_output = '{"Result": [{"Test_type": "INTEGER Copy:",\
+ "Bandwidth(MBps)": 4000.97}, {"Test_type": "INTEGER Scale:",\
+ "Bandwidth(MBps)": 4400.59}, {"Test_type": "INTEGER Add:",\
+ "Bandwidth(MBps)": 4300.48}, {"Test_type": "INTEGER Triad:",\
+ "Bandwidth(MBps)": 1300.27}, {"Test_type": "INTEGER AVERAGE:",\
+ "Bandwidth(MBps)": 2401.58}]}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, r.run, self.result)
+
+ def test_ramspeed_unsuccessful_unknown_type_run(self, mock_ssh):
+ options = {
+ "test_id": 30,
+ "load": 16,
+ "block_size": 32
+ }
+ args = {'options': options}
+ r = ramspeed.Ramspeed(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'No such type_id: 30 for \
+ Ramspeed scenario')
+ self.assertRaises(RuntimeError, r.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py
new file mode 100644
index 000000000..643e1eae2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.spec_cpu.SpecCPU
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+
+from yardstick.benchmark.scenarios.compute import spec_cpu
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.spec_cpu.ssh')
+class SpecCPUTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_spec_cpu_successful_setup(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "output_format": "all",
+ "runspec_iterations": "1",
+ "runspec_tune": "base",
+ "runspec_size": "test"
+ }
+ args = {"options": options}
+ s = spec_cpu.SpecCPU(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ s.setup()
+ self.assertIsNotNone(s.client)
+ self.assertTrue(s.setup_done, True)
+
+ def test_spec_cpu_successful__run_no_sla(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "runspec_tune": "all",
+ "output_format": "all"
+ }
+ args = {"options": options}
+ s = spec_cpu.SpecCPU(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ s.run(self.result)
+ expected_result = {}
+ self.assertEqual(self.result, expected_result)
+
+ def test_ramspeed_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "benchmark_subset": "int"
+ }
+ args = {"options": options}
+ s = spec_cpu.SpecCPU(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, s.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py
new file mode 100644
index 000000000..74ef576b6
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_spec_cpu_for_vm.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.spec_cpu_for_vm.SpecCPUforVM
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+
+from yardstick.benchmark.scenarios.compute import spec_cpu_for_vm
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.spec_cpu_for_vm.ssh')
+class SpecCPUforVMTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_spec_cpu_successful_setup(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "runspec_tune": "all",
+ "output_format": "all",
+ "runspec_iterations": "1",
+ "runspec_size": "test"
+ }
+ args = {"options": options}
+ s = spec_cpu_for_vm.SpecCPUforVM(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ s.setup()
+ self.assertIsNotNone(s.client)
+ self.assertTrue(s.setup_done, True)
+
+ def test_spec_cpu_successful__run_no_sla(self, mock_ssh):
+
+ options = {
+ "SPECint_benchmark": "perlbench",
+ "runspec_tune": "all",
+ "output_format": "all"
+ }
+ args = {"options": options}
+ s = spec_cpu_for_vm.SpecCPUforVM(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_ssh.SSH.from_node().get.return_value = (0, '', '')
+ s.run(self.result)
+ expected_result = {'SPEC_CPU_result': ''}
+ self.assertEqual(self.result, expected_result)
+
+ def test_spec_cpu_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "benchmark_subset": "int"
+ }
+ args = {"options": options}
+ s = spec_cpu_for_vm.SpecCPUforVM(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, s.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py b/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
new file mode 100644
index 000000000..e4a8d6e26
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/compute/test_unixbench.py
@@ -0,0 +1,163 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.compute.unixbench.Unixbench
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.compute import unixbench
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.compute.unixbench.ssh')
+class UnixbenchTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "192.168.50.28",
+ "user": "root",
+ "key_filename": "mykey.key"
+ }
+ }
+
+ def test_unixbench_successful_setup(self, mock_ssh):
+
+ u = unixbench.Unixbench({}, self.ctx)
+ u.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(u.client)
+ self.assertTrue(u.setup_done)
+
+ def test_unixbench_successful_no_sla(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'verbose'
+ }
+ args = {
+ "options": options,
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ sample_output = '{"Score":"4425.4"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ u.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_unixbench_successful_in_quiet_mode(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'quiet',
+ "copies": 1
+ }
+ args = {
+ "options": options,
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ sample_output = '{"Score":"4425.4"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ u.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_unixbench_successful_sla(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'verbose'
+ }
+ sla = {
+ "single_score": '100',
+ "parallel_score": '500'
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ sample_output = '{"signle_score":"2251.7","parallel_score":"4395.9"}'
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ u.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ self.assertEqual(result, expected_result)
+
+ def test_unixbench_unsuccessful_sla_single_score(self, mock_ssh):
+
+ args = {
+ "options": {},
+ "sla": {"single_score": "500"}
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+ sample_output = '{"single_score":"200.7","parallel_score":"4395.9"}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, u.run, result)
+
+ def test_unixbench_unsuccessful_sla_parallel_score(self, mock_ssh):
+
+ args = {
+ "options": {},
+ "sla": {"parallel_score": "4000"}
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+ sample_output = '{"signle_score":"2251.7","parallel_score":"3395.9"}'
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, u.run, result)
+
+ def test_unixbench_unsuccessful_script_error(self, mock_ssh):
+
+ options = {
+ "test_type": 'dhry2reg',
+ "run_mode": 'verbose'
+ }
+ sla = {
+ "single_score": '100',
+ "parallel_score": '500'
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ u = unixbench.Unixbench(args, self.ctx)
+ result = {}
+
+ u.server = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, u.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py b/yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/dummy/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py b/yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py
new file mode 100644
index 000000000..875302da8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/dummy/test_dummy.py
@@ -0,0 +1,32 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.dummy.dummy
+
+from __future__ import absolute_import
+import unittest
+
+from yardstick.benchmark.scenarios.dummy import dummy
+
+
+class DummyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.test_context = dummy.Dummy(None, None)
+
+ self.assertIsNone(self.test_context.scenario_cfg)
+ self.assertIsNone(self.test_context.context_cfg)
+ self.assertFalse(self.test_context.setup_done)
+
+ def test_run(self):
+ result = {}
+ self.test_context.run(result)
+
+ self.assertEqual(result["hello"], "yardstick")
+ self.assertTrue(self.test_context.setup_done)
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py b/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt
new file mode 100644
index 000000000..9b3afd1fb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_chassis_output.txt
@@ -0,0 +1,14 @@
+{
+ "@odata.id": "/redfish/v1/Chassis",
+ "Name": "ChassisCollection",
+ "@odata.context": "/redfish/v1/$metadata#ChassisCollection.ChassisCollection",
+ "Members": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.type": "#ChassisCollection.ChassisCollection",
+ "@odata.etag": "\"af5a94479815eb5f87fe91ea08fde0ac\"",
+ "Members@odata.count": 1,
+ "Description": "A collection of Chassis resource instances."
+}
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt
new file mode 100644
index 000000000..343ed3667
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/energy_sample_power_metrics.txt
@@ -0,0 +1,300 @@
+{
+ "PowerControl@odata.count": 1,
+ "@odata.id": "/redfish/v1/Chassis/1/Power",
+ "Redundancy@odata.count": 1,
+ "@odata.context": "/redfish/v1/$metadata#Power.Power",
+ "Voltages": [
+ {
+ "MaxReadingRange": 14.28,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/0",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 140,
+ "Name": "SysBrd 12V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 10.81,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "0",
+ "MinReadingRange": null,
+ "ReadingVolts": 12.15,
+ "UpperThresholdCritical": 13.22
+ },
+ {
+ "MaxReadingRange": 3.95,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/1",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 141,
+ "Name": "SysBrd 3.3V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 2.98,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "1",
+ "MinReadingRange": null,
+ "UpperThresholdCritical": 3.63,
+ "ReadingVolts": 3.36
+ },
+ {
+ "MaxReadingRange": 5.97,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/2",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 142,
+ "Name": "SysBrd 5V",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 4.49,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "2",
+ "MinReadingRange": null,
+ "UpperThresholdCritical": 5.5,
+ "ReadingVolts": 5.03
+ },
+ {
+ "MaxReadingRange": 3.32,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Systems/1"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Voltages/3",
+ "Status": {
+ "State": "Enabled"
+ },
+ "SensorNumber": 3,
+ "Name": "CMOS Battery",
+ "PhysicalContext": "VoltageRegulator",
+ "LowerThresholdCritical": 2.25,
+ "RelatedItem@odata.count": 2,
+ "MemberId": "3",
+ "MinReadingRange": null,
+ "LowerThresholdNonCritical": 2.39,
+ "ReadingVolts": 3.12
+ }
+ ],
+ "Voltages@odata.count": 4,
+ "Redundancy": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/Redundancy/0",
+ "Status": {
+ "State": "Enabled",
+ "Health": "OK"
+ },
+ "Name": "PSU Redundancy",
+ "MinNumNeeded": 2,
+ "Oem": {
+ "Lenovo": {
+ "NonRedundantAvailablePower": 1100,
+ "@odata.type": "#LenovoRedundancy.v1_0_0.LenovoRedundancyProperties",
+ "PowerRedundancySettings": {
+ "EstimatedUsage": "58.55%",
+ "MaxPowerLimitWatts": 1100,
+ "PowerFailureLimit": 0,
+ "PowerRedundancyPolicy": "RedundantWithThrottling"
+ }
+ }
+ },
+ "RedundancyEnabled": true,
+ "RedundancySet": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/0"
+ },
+ {
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/1"
+ }
+ ],
+ "RedundancySet@odata.count": 2,
+ "MaxNumSupported": 2,
+ "Mode": "N+m",
+ "MemberId": "0"
+ }
+ ],
+ "Description": "Power Consumption and Power Limiting",
+ "Name": "Power",
+ "PowerSupplies@odata.count": 2,
+ "Oem": {
+ "Lenovo": {
+ "@odata.type": "#LenovoPower.v1_0_0.Capabilities",
+ "LocalPowerControlEnabled": true,
+ "PowerOnPermissionEnabled": true,
+ "PowerRestorePolicy": "Restore",
+ "WakeOnLANEnabled": true
+ }
+ },
+ "@odata.type": "#Power.v1_5_1.Power",
+ "Id": "Power",
+ "@odata.etag": "\"ad85a1403e07a433386e9907d00565cc\"",
+ "PowerControl": [
+ {
+ "PowerAllocatedWatts": 1100,
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerControl/0",
+ "Status": {
+ "HealthRollup": "Warning",
+ "State": "Enabled"
+ },
+ "PowerLimit": {
+ "LimitException": "NoAction",
+ "LimitInWatts": null
+ },
+ "Name": "Server Power Control",
+ "Oem": {
+ "Lenovo": {
+ "PowerUtilization": {
+ "MaxLimitInWatts": 1100,
+ "EnablePowerCapping": false,
+ "LimitMode": "AC",
+ "EnablePowerCapping@Redfish.Deprecated": "The property is deprecated. Please use LimitInWatts instead.",
+ "CapacityMinAC": 617,
+ "MinLimitInWatts": 0,
+ "GuaranteedInWatts": 617,
+ "CapacityMinDC": 578,
+ "CapacityMaxDC": 749,
+ "CapacityMaxAC": 802
+ },
+ "HistoryPowerMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerControl/0/Oem/Lenovo/HistoryPowerMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerControl"
+ }
+ },
+ "PowerAvailableWatts": 0,
+ "PowerMetrics": {
+ "IntervalInMin": 60,
+ "AverageConsumedWatts": 314.716675,
+ "MinConsumedWatts": 311,
+ "MaxConsumedWatts": 318
+ },
+ "RelatedItem@odata.count": 1,
+ "MemberId": "0",
+ "PowerRequestedWatts": 802,
+ "PowerConsumedWatts": 344,
+ "PowerCapacityWatts": 1100
+ }
+ ],
+ "PowerSupplies": [
+ {
+ "SerialNumber": "A4DB8BP11WJ",
+ "InputRanges": [
+ {
+ "InputType": null,
+ "OutputWattage": null,
+ "MinimumVoltage": null,
+ "MaximumVoltage": null
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/0",
+ "RelatedItem@odata.count": 1,
+ "MemberId": "0",
+ "PartNumber": "SP57A02023",
+ "FirmwareVersion": "4.52",
+ "Status": {
+ "State": "Enabled",
+ "Health": "Warning"
+ },
+ "LineInputVoltage": null,
+ "Name": "PSU1",
+ "PowerSupplyType": "Unknown",
+ "LastPowerOutputWatts": 316,
+ "Oem": {
+ "Lenovo": {
+ "Location": {
+ "InfoFormat": "Slot X",
+ "Info": "Slot 1"
+ },
+ "HistoryPowerSupplyMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerSupplies/0/Oem/Lenovo/HistoryPowerSupplyMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerSupply"
+ }
+ },
+ "PowerCapacityWatts": null,
+ "Manufacturer": "ACBE",
+ "LineInputVoltageType": "Unknown",
+ "Model": "LENOVO-SP57A02023",
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ]
+ },
+ {
+ "SerialNumber": "A4DB8BP12J7",
+ "InputRanges": [
+ {
+ "InputType": "AC",
+ "OutputWattage": 1100,
+ "MinimumVoltage": 200,
+ "MaximumVoltage": 240
+ }
+ ],
+ "@odata.id": "/redfish/v1/Chassis/1/Power#/PowerSupplies/1",
+ "RelatedItem@odata.count": 1,
+ "MemberId": "1",
+ "PartNumber": "SP57A02023",
+ "FirmwareVersion": "4.52",
+ "Status": {
+ "State": "Enabled",
+ "Health": "OK"
+ },
+ "LineInputVoltage": 220,
+ "Name": "PSU2",
+ "PowerSupplyType": "AC",
+ "LastPowerOutputWatts": 316,
+ "Oem": {
+ "Lenovo": {
+ "Location": {
+ "InfoFormat": "Slot X",
+ "Info": "Slot 2"
+ },
+ "HistoryPowerSupplyMetric": {
+ "@odata.id": "/redfish/v1/Chassis/1/Power/PowerSupplies/1/Oem/Lenovo/HistoryPowerSupplyMetric"
+ },
+ "@odata.type": "#LenovoPower.v1_0_0.PowerSupply"
+ }
+ },
+ "PowerCapacityWatts": 1100,
+ "Manufacturer": "ACBE",
+ "LineInputVoltageType": "ACMidLine",
+ "Model": "LENOVO-SP57A02023",
+ "RelatedItem": [
+ {
+ "@odata.id": "/redfish/v1/Chassis/1"
+ }
+ ]
+ }
+ ]
+}
diff --git a/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py b/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py
new file mode 100644
index 000000000..98daefeb7
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/energy/test_energy.py
@@ -0,0 +1,182 @@
+##############################################################################
+# Copyright (c) 2019 Lenovo Group Limited Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.energy.energy.Energy
+
+from __future__ import absolute_import
+import unittest
+import mock
+import os
+from yardstick.benchmark.scenarios.energy import energy
+
+
+class EnergyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'target': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'password': 'passw0rd',
+ 'redfish_ip': '10.229.17.105',
+ 'redfish_user': 'USERID',
+ 'redfish_pwd': "PASSW0RD",
+ }
+ }
+ self.result = {}
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_setup_response_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ p.setup()
+ self.assertTrue(p.get_response)
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_setup_response_failed(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ p.setup()
+ self.assertFalse(p.get_response)
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_list_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ expect_result = self._read_file("energy_sample_chassis_output.txt")
+ expect_result = str(expect_result)
+ expect_result = expect_result.replace("'", '"')
+ mock_send_request.return_value.status_code = 200
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, ["/redfish/v1/Chassis/1"])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_response_fail(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_wrongtype_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = {}
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_load_chassis_inproper_key(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = '{"some_key": "some_value"}'
+ mock_send_request.return_value.text = expect_result
+ self.result = p.load_chassis_list()
+ self.assertEqual(self.result, [])
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ expect_result = self._read_file("energy_sample_power_metrics.txt")
+ expect_result = str(expect_result)
+ expect_result = expect_result.replace("'", '"')
+ mock_send_request.return_value.status_code = 200
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, 344)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_response_fail(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_wrongtype_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = {}
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_energy_getpower_inproper_key(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ expect_result = '{"some_key": "some_value"}'
+ mock_send_request.return_value.text = expect_result
+ self.result = p.get_power("/redfish/v1/Chassis/1")
+ self.assertEqual(self.result, -1)
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_success(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ chassis_list = mock.Mock(return_value=["/redfish/v1/Chassis/1"])
+ p.load_chassis_list = chassis_list
+ power = mock.Mock(return_value=344)
+ p.get_power = power
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": 344})
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_no_response(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 404
+ chassis_list = mock.Mock(return_value=["/redfish/v1/Chassis/1"])
+ p.load_chassis_list = chassis_list
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": -1})
+
+ @mock.patch('yardstick.benchmark.scenarios.'
+ 'energy.energy.Energy._send_request')
+ def test_run_wrong_chassis(self, mock_send_request):
+ args = {}
+ p = energy.Energy(args, self.ctx)
+ mock_send_request.return_value.status_code = 200
+ chassis_list = mock.Mock(return_value=[])
+ p.load_chassis_list = chassis_list
+ p.run(self.result)
+ self.assertEqual(self.result, {"power": -1})
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/__init__.py b/yardstick/tests/unit/benchmark/scenarios/lib/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py
new file mode 100644
index 000000000..af4f0c8ab
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_add_memory_load.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.add_memory_load import AddMemoryLoad
+
+
+class AddMemoryLoadTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ def test_add_memory_load_with_load(self, mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ 'memory_load': 0.5
+ }
+ }
+ context_cfg = {
+ 'host': {}
+ }
+ mock_from_node().execute.return_value = (0, '0 2048 512', '')
+ obj = AddMemoryLoad(scenario_cfg, context_cfg)
+ obj.run({})
+ mock_from_node.assert_called()
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ def test_add_memory_load_without_load(self, mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ 'memory_load': 0
+ }
+ }
+ context_cfg = {
+ 'host': {}
+ }
+ obj = AddMemoryLoad(scenario_cfg, context_cfg)
+ obj.run({})
+ mock_from_node.assert_called_once()
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ def test_add_memory_load_without_args(self, mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ }
+ }
+ context_cfg = {
+ 'host': {}
+ }
+ obj = AddMemoryLoad(scenario_cfg, context_cfg)
+ obj.run({})
+ mock_from_node.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
new file mode 100644
index 000000000..bb7fa4536
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_attach_volume.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import attach_volume
+
+
+class AttachVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_attach_volume_to_server = mock.patch.object(
+ openstack_utils, 'attach_volume_to_server')
+ self.mock_attach_volume_to_server = (
+ self._mock_attach_volume_to_server.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(attach_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ _uuid = uuidutils.generate_uuid()
+ self.args = {'options': {'server_name_or_id': _uuid,
+ 'volume_name_or_id': _uuid}}
+ self.result = {}
+ self.addCleanup(self._stop_mock)
+ self.attachvol_obj = attach_volume.AttachVolume(self.args, mock.ANY)
+
+ def _stop_mock(self):
+ self._mock_attach_volume_to_server.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_attach_volume_to_server.return_value = True
+ self.assertIsNone(self.attachvol_obj.run(self.result))
+ self.assertEqual({'attach_volume': 1}, self.result)
+ self.mock_log.info.asset_called_once_with(
+ 'Attach volume to server successful!')
+
+ def test_run_fail(self):
+ self.mock_attach_volume_to_server.return_value = False
+ with self.assertRaises(exceptions.ScenarioAttachVolumeError):
+ self.attachvol_obj.run(self.result)
+ self.assertEqual({'attach_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Attach volume to server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py
new file mode 100644
index 000000000..a48353a4f
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_connectivity.py
@@ -0,0 +1,78 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.lib.check_connectivity.CheckConnectivity
+
+from __future__ import absolute_import
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.lib import check_connectivity
+
+
+class CheckConnectivityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ssh_port': '22'
+ },
+ 'target': {
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+
+ @mock.patch('yardstick.benchmark.scenarios.lib.check_connectivity.ssh')
+ def test_check_connectivity(self, mock_ssh):
+
+ args = {
+ 'options': {'src_ip_addr': '192.168.23.2',
+ 'dest_ip_addr': '192.168.23.10',
+ 'ssh_user': 'root',
+ 'ssh_passwd': 'root',
+ 'ssh_port': '22',
+ 'ssh_timeout': 600,
+ 'ping_parameter': "-s 2048"
+ },
+ 'sla': {'status': 'True',
+ 'action': 'assert'}
+ }
+
+ # TODO(elfoley): Properly check the outputs
+ result = {} # pylint: disable=unused-variable
+
+ obj = check_connectivity.CheckConnectivity(args, {})
+ obj.setup()
+ mock_ssh.SSH.execute.return_value = (0, '100', '')
+
+ @mock.patch('yardstick.benchmark.scenarios.lib.check_connectivity.ssh')
+ def test_check_connectivity_key(self, mock_ssh):
+
+ args = {
+ 'options': {'ssh_user': 'root',
+ 'ssh_key': '/root/.ssh/id_rsa',
+ 'ssh_port': '22',
+ 'ssh_timeout': 600,
+ 'ping_parameter': "-s 2048"
+ },
+ 'sla': {'status': 'True',
+ 'action': 'assert'}
+ }
+
+ # TODO(elfoley): Properly check the outputs
+ result = {} # pylint: disable=unused-variable
+
+ obj = check_connectivity.CheckConnectivity(args, self.ctx)
+ obj.setup()
+
+ mock_ssh.SSH.execute.return_value = (0, '100', '')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py
new file mode 100644
index 000000000..270c9d3c9
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_numa_info.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.lib.check_numa_info import CheckNumaInfo
+
+
+class CheckNumaInfoTestCase(unittest.TestCase):
+
+ @mock.patch.object(CheckNumaInfo, '_check_vm2_status')
+ def test_run(self, mock_check_vm2):
+ scenario_cfg = {'info1': {}, 'info2': {}}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ obj.run({})
+ mock_check_vm2.assert_called_once()
+
+ def test_check_vm2_status_length_eq_1(self):
+ info1 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ info2 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ scenario_cfg = {'info1': info1, 'info2': info2}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ status = obj._check_vm2_status(info1, info2)
+ self.assertTrue(status)
+
+ def test_check_vm2_status_length_gt_1(self):
+ info1 = {
+ 'pinning': [0, 1],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ info2 = {
+ 'pinning': [0, 1],
+ 'vcpupin': [{
+ 'cpuset': '1,2'
+ }]
+ }
+ scenario_cfg = {'info1': info1, 'info2': info2}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ status = obj._check_vm2_status(info1, info2)
+ self.assertFalse(status)
+
+ def test_check_vm2_status_length_not_in_set(self):
+ info1 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,7'
+ }]
+ }
+ info2 = {
+ 'pinning': [0],
+ 'vcpupin': [{
+ 'cpuset': '1,7'
+ }]
+ }
+ scenario_cfg = {'info1': info1, 'info2': info2}
+ obj = CheckNumaInfo(scenario_cfg, {})
+ status = obj._check_vm2_status(info1, info2)
+ self.assertFalse(status)
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
new file mode 100644
index 000000000..b0488bacd
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_check_value.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+
+from yardstick.benchmark.scenarios.lib import check_value
+from yardstick.common import exceptions as y_exc
+
+
+class CheckValueTestCase(unittest.TestCase):
+
+ def test_eq_pass(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 1}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertEqual({}, result)
+
+ def test_ne_pass(self):
+ scenario_cfg = {'options': {'operator': 'ne',
+ 'value1': 1,
+ 'value2': 2}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertEqual({}, result)
+
+ def test_result(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 1},
+ 'output': 'foo'}
+ obj = check_value.CheckValue(scenario_cfg, {})
+ result = obj.run({})
+
+ self.assertDictEqual(result, {'foo': 'PASS'})
+
+ def test_eq(self):
+ scenario_cfg = {'options': {'operator': 'eq',
+ 'value1': 1,
+ 'value2': 2}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+
+ with self.assertRaises(y_exc.ValueCheckError):
+ result = obj.run({})
+ self.assertEqual({}, result)
+
+ def test_ne(self):
+ scenario_cfg = {'options': {'operator': 'ne',
+ 'value1': 1,
+ 'value2': 1}}
+ obj = check_value.CheckValue(scenario_cfg, {})
+
+ with self.assertRaises(y_exc.ValueCheckError):
+ result = obj.run({})
+ self.assertEqual({}, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py
new file mode 100644
index 000000000..0b175fae8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_flavor.py
@@ -0,0 +1,29 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.create_flavor import CreateFlavor
+
+
+class CreateFlavorTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.create_flavor')
+ def test_create_flavor(self, mock_create_flavor):
+ options = {
+ 'flavor_name': 'yardstick_test_flavor',
+ 'vcpus': '2',
+ 'ram': '1024',
+ 'disk': '100',
+ 'is_public': 'True'
+ }
+ args = {"options": options}
+ obj = CreateFlavor(args, {})
+ obj.run({})
+ mock_create_flavor.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py
new file mode 100644
index 000000000..894cc1c2a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_floating_ip.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib import create_floating_ip
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+
+
+class CreateFloatingIpTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_create_floating_ip = mock.patch.object(
+ openstack_utils, 'create_floating_ip')
+ self.mock_create_floating_ip = self._mock_create_floating_ip.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_floating_ip, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'network_name_or_id': 'yardstick_net'}}
+ self.result = {}
+
+ self.fip_obj = create_floating_ip.CreateFloatingIp(self.args, mock.ANY)
+ self.fip_obj.scenario_cfg = {'output': 'key1\nkey2'}
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_floating_ip.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_create_floating_ip.return_value = {'fip_id': 'value1',
+ 'fip_addr': 'value2'}
+ output = self.fip_obj.run(self.result)
+ self.assertEqual({'floating_ip_create': 1}, self.result)
+ self.assertEqual({'key1': 'value1', 'key2': 'value2'}, output)
+ self.mock_log.info.asset_called_once_with(
+ 'Creating floating ip successful!')
+
+ def test_run_no_fip(self):
+ self.mock_create_floating_ip.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateFloatingIPError):
+ self.fip_obj.run(self.result)
+ self.assertEqual({'floating_ip_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Creating floating ip failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
new file mode 100644
index 000000000..aebd1dfe8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_image.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+from oslo_utils import uuidutils
+import unittest
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_image
+
+
+class CreateImageTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_create_image = mock.patch.object(
+ openstack_utils, 'create_image')
+ self.mock_create_image = (
+ self._mock_create_image.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_image, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'image_name': 'yardstick_image'}}
+ self.result = {}
+ self.cimage_obj = create_image.CreateImage(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_image.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cimage_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_image.return_value = _uuid
+ output = self.cimage_obj.run(self.result)
+ self.assertEqual({'image_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create image successful!')
+
+ def test_run_fail(self):
+ self.mock_create_image.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateImageError):
+ self.cimage_obj.run(self.result)
+ self.assertEqual({'image_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create image failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
new file mode 100644
index 000000000..a7b683f47
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_keypair.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_keypair
+
+
+class CreateKeypairTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_keypair = mock.patch.object(
+ openstack_utils, 'create_keypair')
+ self.mock_create_keypair = (
+ self._mock_create_keypair.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_keypair, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'key_name': 'yardstick_key'}}
+ self.result = {}
+
+ self.ckeypair_obj = create_keypair.CreateKeypair(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_keypair.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.ckeypair_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_keypair.return_value = {
+ 'name': 'key-name', 'type': 'ssh', 'id': _uuid}
+ output = self.ckeypair_obj.run(self.result)
+ self.assertDictEqual({'keypair_create': 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create keypair successful!')
+
+ def test_run_fail(self):
+ self.mock_create_keypair.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateKeypairError):
+ self.ckeypair_obj.run(self.result)
+ self.assertDictEqual({'keypair_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create keypair failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py
new file mode 100644
index 000000000..17a4ef2e1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_network.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_network
+
+
+class CreateNetworkTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_neutron_net = mock.patch.object(
+ openstack_utils, 'create_neutron_net')
+ self.mock_create_neutron_net = self._mock_create_neutron_net.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_network, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'network_name': 'yardstick_net'}}
+ self.result = {}
+
+ self._cnet_obj = create_network.CreateNetwork(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_neutron_net.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self._cnet_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_net.return_value = _uuid
+ output = self._cnet_obj.run(self.result)
+ self.assertEqual({"network_create": 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create network successful!')
+
+ def test_run_fail_exception(self):
+ self.mock_create_neutron_net.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateNetworkError):
+ self._cnet_obj.run(self.result)
+ self.assertEqual({"network_create": 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Create network failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py
new file mode 100644
index 000000000..bea02a630
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_port.py
@@ -0,0 +1,27 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.create_port import CreatePort
+
+
+class CreatePortTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
+ def test_create_port(self, mock_get_neutron_client):
+ options = {
+ 'openstack_paras': {
+ 'name': 'yardstick_port'
+ }
+ }
+ args = {"options": options}
+ obj = CreatePort(args, {})
+ obj.run({})
+ mock_get_neutron_client.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py
new file mode 100644
index 000000000..8d6f119ab
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_router.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_router
+
+
+class CreateRouterTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_neutron_router = mock.patch.object(
+ openstack_utils, 'create_neutron_router')
+ self.mock_create_neutron_router = (
+ self._mock_create_neutron_router.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_router, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name': 'yardstick_net'}}
+ self.result = {}
+
+ self.crouter_obj = create_router.CreateRouter(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_neutron_router.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.crouter_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_router.return_value = _uuid
+ output = self.crouter_obj.run(self.result)
+ self.assertEqual({"router_create": 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create router successful!')
+
+ def test_run_fail(self):
+ self.mock_create_neutron_router.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateRouterError):
+ self.crouter_obj.run(self.result)
+ self.assertEqual({"router_create": 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create router failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
new file mode 100644
index 000000000..0477a49d4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_sec_group.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_sec_group
+
+
+class CreateSecurityGroupTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_security_group_full = mock.patch.object(
+ openstack_utils, 'create_security_group_full')
+ self.mock_create_security_group_full = (
+ self._mock_create_security_group_full.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_sec_group, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'sg_name': 'yardstick_sg'}}
+ self.result = {}
+
+ self.csecgp_obj = create_sec_group.CreateSecgroup(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_security_group_full.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.csecgp_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_security_group_full.return_value = _uuid
+ output = self.csecgp_obj.run(self.result)
+ self.assertEqual({'sg_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with(
+ 'Create security group successful!')
+
+ def test_run_fail(self):
+ self.mock_create_security_group_full.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateSecurityGroupError):
+ self.csecgp_obj.run(self.result)
+ self.assertEqual({'sg_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Create security group failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
new file mode 100644
index 000000000..b58785112
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_server.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_server
+
+
+class CreateServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_instance_and_wait_for_active = mock.patch.object(
+ openstack_utils, 'create_instance_and_wait_for_active')
+ self.mock_create_instance_and_wait_for_active = (
+ self._mock_create_instance_and_wait_for_active.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {
+ 'options': {'name': 'server-name', 'image': 'image-name',
+ 'flavor': 'flavor-name'}}
+ self.result = {}
+
+ self.addCleanup(self._stop_mock)
+ self.cserver_obj = create_server.CreateServer(self.args, mock.ANY)
+
+ def _stop_mock(self):
+ self._mock_create_instance_and_wait_for_active.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cserver_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_instance_and_wait_for_active.return_value = (
+ {'name': 'server-name', 'flavor': 'flavor-name', 'id': _uuid})
+ output = self.cserver_obj.run(self.result)
+ self.assertEqual({'instance_create': 1}, self.result)
+ self.assertEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create server successful!')
+
+ def test_run_fail(self):
+ self.mock_create_instance_and_wait_for_active.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateServerError):
+ self.cserver_obj.run(self.result)
+ self.assertEqual({'instance_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py
new file mode 100644
index 000000000..856e985c4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_subnet.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_subnet
+
+
+class CreateSubnetTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_neutron_subnet = mock.patch.object(
+ openstack_utils, 'create_neutron_subnet')
+ self.mock_create_neutron_subnet = (
+ self._mock_create_neutron_subnet.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_subnet, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'network_name_or_id': 'yardstick_net'}}
+ self.result = {"subnet_create": 0}
+
+ self._csubnet_obj = create_subnet.CreateSubnet(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_neutron_subnet.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self._csubnet_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_subnet.return_value = _uuid
+ output = self._csubnet_obj.run(self.result)
+ self.assertDictEqual({"subnet_create": 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create subnet successful!')
+
+ def test_run_fail(self):
+ self._csubnet_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_neutron_subnet.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateSubnetError):
+ self._csubnet_obj.run(self.result)
+ self.assertDictEqual({"subnet_create": 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create subnet failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
new file mode 100644
index 000000000..f91d2c3f4
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_create_volume.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import create_volume
+
+
+class CreateVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_create_volume = mock.patch.object(
+ openstack_utils, 'create_volume')
+ self.mock_create_volume = (
+ self._mock_create_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(create_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'size_gb': 1}}
+ self.result = {}
+
+ self.cvolume_obj = create_volume.CreateVolume(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_create_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.cvolume_obj.scenario_cfg = {'output': 'id'}
+ self.mock_create_volume.return_value = {'name': 'yardstick_volume',
+ 'id': _uuid,
+ 'status': 'available'}
+ output = self.cvolume_obj.run(self.result)
+ self.assertDictEqual({'volume_create': 1}, self.result)
+ self.assertDictEqual({'id': _uuid}, output)
+ self.mock_log.info.asset_called_once_with('Create volume successful!')
+
+ def test_run_fail(self):
+ self.mock_create_volume.return_value = None
+ with self.assertRaises(exceptions.ScenarioCreateVolumeError):
+ self.cvolume_obj.run(self.result)
+ self.assertDictEqual({'volume_create': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Create volume failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py
new file mode 100644
index 000000000..24dbf8a16
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_flavor.py
@@ -0,0 +1,27 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.delete_flavor import DeleteFlavor
+
+
+class DeleteFlavorTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.delete_flavor')
+ @mock.patch('yardstick.common.openstack_utils.get_nova_client')
+ def test_delete_flavor(self, mock_get_nova_client, mock_delete_flavor):
+ options = {
+ 'flavor_name': 'yardstick_test_flavor'
+ }
+ args = {"options": options}
+ obj = DeleteFlavor(args, {})
+ obj.run({})
+ mock_get_nova_client.assert_called_once()
+ mock_delete_flavor.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py
new file mode 100644
index 000000000..45a39eba2
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_floating_ip.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_floating_ip
+
+
+class DeleteFloatingIpTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_floating_ip = mock.patch.object(
+ openstack_utils, 'delete_floating_ip')
+ self.mock_delete_floating_ip = self._mock_delete_floating_ip.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_floating_ip, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'floating_ip_id': uuidutils.generate_uuid()}}
+ self.result = {}
+
+ self.del_obj = delete_floating_ip.DeleteFloatingIp(
+ self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_floating_ip.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_floating_ip.return_value = True
+ self.assertIsNone(self.del_obj.run(self.result))
+ self.assertEqual({"delete_floating_ip": 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ "Delete floating ip successful!")
+
+ def test_run_fail(self):
+ self.mock_delete_floating_ip.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteFloatingIPError):
+ self.del_obj.run(self.result)
+ self.assertEqual({"delete_floating_ip": 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ "Delete floating ip failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
new file mode 100644
index 000000000..8a1d6d695
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_image.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_image
+
+
+class DeleteImageTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_image = mock.patch.object(
+ openstack_utils, 'delete_image')
+ self.mock_delete_image = (
+ self._mock_delete_image.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_image, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_image'}}
+ self.result = {}
+
+ self.delimg_obj = delete_image.DeleteImage(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_image.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_image.return_value = True
+ self.assertIsNone(self.delimg_obj.run(self.result))
+ self.assertEqual({'delete_image': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete image successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_image.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteImageError):
+ self.delimg_obj.run(self.result)
+ self.assertEqual({'delete_image': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete image failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
new file mode 100644
index 000000000..c7940251e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_keypair.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_keypair
+
+
+class DeleteKeypairTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_keypair = mock.patch.object(
+ openstack_utils, 'delete_keypair')
+ self.mock_delete_keypair = self._mock_delete_keypair.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_keypair, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'key_name': 'yardstick_key'}}
+ self.result = {}
+ self.delkey_obj = delete_keypair.DeleteKeypair(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_keypair.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_keypair.return_value = True
+ self.assertIsNone(self.delkey_obj.run(self.result))
+ self.assertEqual({'delete_keypair': 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ 'Delete keypair successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_keypair.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteKeypairError):
+ self.delkey_obj.run(self.result)
+ self.assertEqual({'delete_keypair': 0}, self.result)
+ self.mock_log.error.assert_called_once_with("Delete keypair failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
new file mode 100644
index 000000000..b6dbf4791
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_network.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_network
+
+
+class DeleteNetworkTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_neutron_net = mock.patch.object(
+ openstack_utils, "delete_neutron_net")
+ self.mock_delete_neutron_net = self._mock_delete_neutron_net.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, "get_shade_client")
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_network, "LOG")
+ self.mock_log = self._mock_log.start()
+ self.args = {"options": {"network_name_or_id": (
+ uuidutils.generate_uuid())}}
+ self.result = {}
+ self.del_obj = delete_network.DeleteNetwork(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_neutron_net.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_neutron_net.return_value = True
+ self.assertIsNone(self.del_obj.run(self.result))
+ self.assertEqual({"delete_network": 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ "Delete network successful!")
+
+ def test_run_fail(self):
+ self.mock_delete_neutron_net.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteNetworkError):
+ self.del_obj.run(self.result)
+ self.assertEqual({"delete_network": 0}, self.result)
+ self.mock_log.error.assert_called_once_with("Delete network failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py
new file mode 100644
index 000000000..9fd318580
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_port.py
@@ -0,0 +1,25 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.delete_port import DeletePort
+
+
+class DeletePortTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
+ def test_delete_port(self, mock_get_neutron_client):
+ options = {
+ 'port_id': '123-123-123'
+ }
+ args = {"options": options}
+ obj = DeletePort(args, {})
+ obj.run({})
+ mock_get_neutron_client.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py
new file mode 100644
index 000000000..b76100f19
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_router
+
+
+class DeleteRouterTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_neutron_router = mock.patch.object(
+ openstack_utils, 'delete_neutron_router')
+ self.mock_delete_neutron_router = (
+ self._mock_delete_neutron_router.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_router, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'router_id': uuidutils.generate_uuid()}}
+ self.result = {"delete_router": 0}
+
+ self._del_obj = delete_router.DeleteRouter(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_neutron_router.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_neutron_router.return_value = True
+ self.assertIsNone(self._del_obj.run(self.result))
+ self.assertEqual({"delete_router": 1}, self.result)
+ self.mock_log.info.assert_called_once_with("Delete router successful!")
+
+ def test_run_fail(self):
+ self.mock_delete_neutron_router.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteRouterError):
+ self._del_obj.run(self.result)
+ self.assertEqual({"delete_router": 0}, self.result)
+ self.mock_log.error.assert_called_once_with("Delete router failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py
new file mode 100644
index 000000000..0c9cf7c17
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_gateway.py
@@ -0,0 +1,27 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.delete_router_gateway import DeleteRouterGateway
+
+
+class DeleteRouterGatewayTestCase(unittest.TestCase):
+
+ @mock.patch('yardstick.common.openstack_utils.get_neutron_client')
+ @mock.patch('yardstick.common.openstack_utils.remove_gateway_router')
+ def test_delete_router_gateway(self, mock_get_neutron_client, mock_remove_gateway_router):
+ options = {
+ 'router_id': '123-123-123'
+ }
+ args = {"options": options}
+ obj = DeleteRouterGateway(args, {})
+ obj.run({})
+ mock_get_neutron_client.assert_called_once()
+ mock_remove_gateway_router.assert_called_once()
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py
new file mode 100644
index 000000000..823cb951a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_router_interface.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib import delete_router_interface
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+
+
+class DeleteRouterInterfaceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_remove_router_interface = mock.patch.object(
+ openstack_utils, 'remove_router_interface')
+ self.mock_remove_router_interface = (
+ self._mock_remove_router_interface.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_router_interface, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'router': uuidutils.generate_uuid()}}
+ self.result = {}
+ self.delrout_obj = delete_router_interface.DeleteRouterInterface(
+ self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_remove_router_interface.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_remove_router_interface.return_value = True
+ self.assertIsNone(self.delrout_obj.run(self.result))
+ self.assertEqual({"delete_router_interface": 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ "Delete router interface successful!")
+
+ def test_run_fail(self):
+ self.mock_remove_router_interface.return_value = False
+ with self.assertRaises(exceptions.ScenarioRemoveRouterIntError):
+ self.delrout_obj.run(self.result)
+ self.assertEqual({"delete_router_interface": 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ "Delete router interface failed!")
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
new file mode 100644
index 000000000..55fe53df8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_server.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_server
+
+
+class DeleteServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_instance = mock.patch.object(
+ openstack_utils, 'delete_instance')
+ self.mock_delete_instance = (
+ self._mock_delete_instance.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': uuidutils.generate_uuid()
+ }}
+ self.result = {}
+
+ self.delserver_obj = delete_server.DeleteServer(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_instance.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_instance.return_value = True
+ self.assertIsNone(self.delserver_obj.run(self.result))
+ self.assertEqual({'delete_server': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete server successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_instance.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteServerError):
+ self.delserver_obj.run(self.result)
+ self.assertEqual({'delete_server': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
new file mode 100644
index 000000000..0db16f396
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_delete_volume.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import delete_volume
+
+
+class DeleteVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_delete_volume = mock.patch.object(
+ openstack_utils, 'delete_volume')
+ self.mock_delete_volume = (
+ self._mock_delete_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(delete_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_volume'}}
+ self.result = {}
+
+ self.delvol_obj = delete_volume.DeleteVolume(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_delete_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_delete_volume.return_value = True
+ self.assertIsNone(self.delvol_obj.run(self.result))
+ self.assertEqual({'delete_volume': 1}, self.result)
+ self.mock_log.info.assert_called_once_with('Delete volume successful!')
+
+ def test_run_fail(self):
+ self.mock_delete_volume.return_value = False
+ with self.assertRaises(exceptions.ScenarioDeleteVolumeError):
+ self.delvol_obj.run(self.result)
+ self.assertEqual({'delete_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Delete volume failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
new file mode 100644
index 000000000..2bc57f495
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_detach_volume.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import detach_volume
+
+
+class DetachVolumeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self._mock_detach_volume = mock.patch.object(
+ openstack_utils, 'detach_volume')
+ self.mock_detach_volume = (
+ self._mock_detach_volume.start())
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(detach_volume, 'LOG')
+ self.mock_log = self._mock_log.start()
+ _uuid = uuidutils.generate_uuid()
+ self.args = {'options': {'server_name_or_id': _uuid,
+ 'volume_name_or_id': _uuid}}
+ self.result = {}
+
+ self.detachvol_obj = detach_volume.DetachVolume(self.args, mock.ANY)
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_detach_volume.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ self.mock_detach_volume.return_value = True
+ self.assertIsNone(self.detachvol_obj.run(self.result))
+ self.assertEqual({'detach_volume': 1}, self.result)
+ self.mock_log.info.assert_called_once_with(
+ 'Detach volume from server successful!')
+
+ def test_run_fail(self):
+ self.mock_detach_volume.return_value = False
+ with self.assertRaises(exceptions.ScenarioDetachVolumeError):
+ self.detachvol_obj.run(self.result)
+ self.assertEqual({'detach_volume': 0}, self.result)
+ self.mock_log.error.assert_called_once_with(
+ 'Detach volume from server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
new file mode 100644
index 000000000..1c1364348
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_flavor.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import get_flavor
+
+
+class GetFlavorTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_get_flavor = mock.patch.object(
+ openstack_utils, 'get_flavor')
+ self.mock_get_flavor = self._mock_get_flavor.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(get_flavor, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_flavor'}}
+ self.result = {}
+
+ self.getflavor_obj = get_flavor.GetFlavor(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_get_flavor.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.getflavor_obj.scenario_cfg = {'output': 'flavor'}
+ self.mock_get_flavor.return_value = (
+ {'name': 'flavor-name', 'id': _uuid})
+ output = self.getflavor_obj.run(self.result)
+ self.assertDictEqual({'get_flavor': 1}, self.result)
+ self.assertDictEqual({'flavor': {'name': 'flavor-name', 'id': _uuid}},
+ output)
+ self.mock_log.info.asset_called_once_with('Get flavor successful!')
+
+ def test_run_fail(self):
+ self.mock_get_flavor.return_value = None
+ with self.assertRaises(exceptions.ScenarioGetFlavorError):
+ self.getflavor_obj.run(self.result)
+ self.assertDictEqual({'get_flavor': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Get flavor failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py
new file mode 100644
index 000000000..879b2b988
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_migrate_target_host.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.get_migrate_target_host import GetMigrateTargetHost
+
+BASE = 'yardstick.benchmark.scenarios.lib.get_migrate_target_host'
+
+
+class GetMigrateTargetHostTestCase(unittest.TestCase):
+
+ @mock.patch('{}.openstack_utils.get_nova_client'.format(BASE))
+ @mock.patch('{}.GetMigrateTargetHost._get_migrate_host'.format(BASE))
+ @mock.patch('{}.GetMigrateTargetHost._get_current_host_name'.format(BASE))
+ def test_get_migrate_target_host(self,
+ mock_get_current_host_name,
+ mock_get_migrate_host,
+ mock_get_nova_client):
+ obj = GetMigrateTargetHost({}, {})
+ obj.run({})
+ mock_get_nova_client.assert_called_once()
+ mock_get_current_host_name.assert_called_once()
+ mock_get_migrate_host.assert_called_once()
+
+ @mock.patch('{}.openstack_utils.get_nova_client'.format(BASE))
+ def test_get_migrate_host(self, mock_get_nova_client):
+ class A(object):
+ def __init__(self, service):
+ self.service = service
+ self.host = 'host4'
+
+ mock_get_nova_client().hosts.list_all.return_value = [A('compute')]
+ obj = GetMigrateTargetHost({}, {})
+ host = obj._get_migrate_host('host5')
+ mock_get_nova_client.assert_called()
+ self.assertEqual(host, 'host4')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py
new file mode 100644
index 000000000..bea978b8a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_numa_info.py
@@ -0,0 +1,103 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+import mock
+
+from yardstick.benchmark.scenarios.lib.get_numa_info import GetNumaInfo
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+BASE = 'yardstick.benchmark.scenarios.lib.get_numa_info'
+
+
+class GetNumaInfoTestCase(unittest.TestCase):
+
+ @mock.patch('{}.GetNumaInfo._check_numa_node'.format(BASE))
+ @mock.patch('{}.GetNumaInfo._get_current_host_name'.format(BASE))
+ @mock.patch('yardstick.benchmark.scenarios.lib.get_numa_info.yaml_load')
+ @mock.patch('yardstick.common.task_template.TaskTemplate.render')
+ def test_get_numa_info(self,
+ mock_render,
+ mock_safe_load,
+ mock_get_current_host_name,
+ mock_check_numa_node):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'id': '1'
+ },
+ 'file': 'yardstick/ssh.py'
+ },
+ 'output': 'numa_info'
+ }
+ mock_safe_load.return_value = {
+ 'nodes': []
+ }
+ obj = GetNumaInfo(scenario_cfg, {})
+ obj.run({})
+ mock_get_current_host_name.assert_called_once()
+ mock_check_numa_node.assert_called_once()
+
+ @mock.patch('yardstick.ssh.SSH.from_node')
+ @mock.patch('{}.GetNumaInfo._get_current_host_name'.format(BASE))
+ @mock.patch('yardstick.benchmark.scenarios.lib.get_numa_info.yaml_load')
+ @mock.patch('yardstick.common.task_template.TaskTemplate.render')
+ def test_check_numa_node(self,
+ mock_render,
+ mock_safe_load,
+ mock_get_current_host_name,
+ mock_from_node):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'id': '1'
+ },
+ 'file': 'yardstick/ssh.py'
+ },
+ 'output': 'numa_info'
+ }
+ mock_safe_load.return_value = {
+ 'nodes': []
+ }
+ data = """
+ <data>
+ </data>
+ """
+ mock_from_node().execute.return_value = (0, data, '')
+ obj = GetNumaInfo(scenario_cfg, {})
+ result = obj._check_numa_node('1', 'host4')
+ self.assertEqual(result, {'pinning': [], 'vcpupin': []})
+
+ @mock.patch('{}.change_obj_to_dict'.format(BASE))
+ @mock.patch('{}.get_nova_client'.format(BASE))
+ @mock.patch('yardstick.benchmark.scenarios.lib.get_numa_info.yaml_load')
+ @mock.patch('yardstick.common.task_template.TaskTemplate.render')
+ def test_get_current_host_name(self,
+ mock_render,
+ mock_safe_load,
+ mock_get_nova_client,
+ mock_change_obj_to_dict):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'id': '1'
+ },
+ 'file': 'yardstick/ssh.py'
+ },
+ 'output': 'numa_info'
+ }
+ mock_get_nova_client().servers.get.return_value = ''
+ mock_change_obj_to_dict.return_value = {'OS-EXT-SRV-ATTR:host': 'host5'}
+
+ obj = GetNumaInfo(scenario_cfg, {})
+ result = obj._get_current_host_name('1')
+ self.assertEqual(result, 'host5')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
new file mode 100644
index 000000000..5b5329cb0
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+from oslo_utils import uuidutils
+import unittest
+import mock
+
+from yardstick.common import openstack_utils
+from yardstick.common import exceptions
+from yardstick.benchmark.scenarios.lib import get_server
+
+
+class GetServerTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self._mock_get_server = mock.patch.object(
+ openstack_utils, 'get_server')
+ self.mock_get_server = self._mock_get_server.start()
+ self._mock_get_shade_client = mock.patch.object(
+ openstack_utils, 'get_shade_client')
+ self.mock_get_shade_client = self._mock_get_shade_client.start()
+ self._mock_log = mock.patch.object(get_server, 'LOG')
+ self.mock_log = self._mock_log.start()
+ self.args = {'options': {'name_or_id': 'yardstick_key'}}
+ self.result = {}
+
+ self.getserver_obj = get_server.GetServer(self.args, mock.ANY)
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_get_server.stop()
+ self._mock_get_shade_client.stop()
+ self._mock_log.stop()
+
+ def test_run(self):
+ _uuid = uuidutils.generate_uuid()
+ self.getserver_obj.scenario_cfg = {'output': 'server'}
+ self.mock_get_server.return_value = (
+ {'name': 'server-name', 'id': _uuid})
+ output = self.getserver_obj.run(self.result)
+ self.assertDictEqual({'get_server': 1}, self.result)
+ self.assertDictEqual({'server': {'name': 'server-name', 'id': _uuid}},
+ output)
+ self.mock_log.info.asset_called_once_with('Get Server successful!')
+
+ def test_run_fail(self):
+ self.mock_get_server.return_value = None
+ with self.assertRaises(exceptions.ScenarioGetServerError):
+ self.getserver_obj.run(self.result)
+ self.assertDictEqual({'get_server': 0}, self.result)
+ self.mock_log.error.assert_called_once_with('Get Server failed!')
diff --git a/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py
new file mode 100644
index 000000000..04fca16aa
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/lib/test_get_server_ip.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+import unittest
+
+from yardstick.benchmark.scenarios.lib.get_server_ip import GetServerIp
+
+
+class GetServerIpTestCase(unittest.TestCase):
+ def test_get_server_ip(self):
+ scenario_cfg = {
+ 'options': {
+ 'server': {
+ 'addresses': {
+ 'net1': [
+ {
+ 'OS-EXT-IPS:type': 'floating',
+ 'addr': '127.0.0.1'
+ }
+ ]
+ }
+ }
+ },
+ 'output': 'ip'
+ }
+ obj = GetServerIp(scenario_cfg, {})
+ result = obj.run({})
+ self.assertEqual(result, {'ip': '127.0.0.1'})
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/__init__.py b/yardstick/tests/unit/benchmark/scenarios/networking/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml
new file mode 100644
index 000000000..b8f8e5358
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/imix_voice.yaml
@@ -0,0 +1,41 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#imix definition for voice traffic
+#
+# it is a typical case for testing the synthetic VNF performance.
+#
+#percentage of the packets can be less than 100%
+#the traffic in downstream and upstream direction could be different
+
+schema: "nsb:imix:0.1"
+
+imix:
+ private:
+ imix_small: 100 #ipv4 case - 72B should be 0 ipv6 case - 84B
+ imix_128B: 0
+ imix_256B: 0
+ imix_373B: 0
+ imix_570B: 0
+ imix_1400B: 0
+ imix_1500B: 0
+
+ public:
+ imix_small: 100 #ipv4 case - 72B ipv6 - 84B
+ imix_128B: 0
+ imix_256B: 0
+ imix_373B: 0
+ imix_570B: 0
+ imix_1400B: 0
+ imix_1500B: 0
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json
new file mode 100644
index 000000000..b56009ba1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output.json
@@ -0,0 +1 @@
+{"start": {"connecting_to": {"host": "172.16.0.252", "port": 5201}, "timestamp": {"timesecs": 1436254758, "time": "Tue, 07 Jul 2015 07:39:18 GMT"}, "test_start": {"protocol": "TCP", "num_streams": 1, "omit": 0, "bytes": 0, "blksize": 131072, "duration": 10, "blocks": 0, "reverse": 0}, "system_info": "Linux client 3.13.0-55-generic #94-Ubuntu SMP Thu Jun 18 00:27:10 UTC 2015 x86_64 x86_64 x86_64 GNU/Linux\n", "version": "iperf 3.0.7", "connected": [{"local_host": "10.0.1.2", "local_port": 37633, "remote_host": "172.16.0.252", "socket": 4, "remote_port": 5201}], "cookie": "client.1436254758.606879.1fb328dc230", "tcp_mss_default": 1448}, "intervals": [{"sum": {"end": 1.00068, "seconds": 1.00068, "bytes": 16996624, "bits_per_second": 135881000.0, "start": 0, "retransmits": 0, "omitted": false}, "streams": [{"end": 1.00068, "socket": 4, "seconds": 1.00068, "bytes": 16996624, "bits_per_second": 135881000.0, "start": 0, "retransmits": 0, "omitted": false, "snd_cwnd": 451776}]}, {"sum": {"end": 2.00048, "seconds": 0.999804, "bytes": 20010192, "bits_per_second": 160113000.0, "start": 1.00068, "retransmits": 0, "omitted": false}, "streams": [{"end": 2.00048, "socket": 4, "seconds": 0.999804, "bytes": 20010192, "bits_per_second": 160113000.0, "start": 1.00068, "retransmits": 0, "omitted": false, "snd_cwnd": 713864}]}, {"sum": {"end": 3.00083, "seconds": 1.00035, "bytes": 18330464, "bits_per_second": 146592000.0, "start": 2.00048, "retransmits": 0, "omitted": false}, "streams": [{"end": 3.00083, "socket": 4, "seconds": 1.00035, "bytes": 18330464, "bits_per_second": 146592000.0, "start": 2.00048, "retransmits": 0, "omitted": false, "snd_cwnd": 768888}]}, {"sum": {"end": 4.00707, "seconds": 1.00624, "bytes": 19658376, "bits_per_second": 156292000.0, "start": 3.00083, "retransmits": 0, "omitted": false}, "streams": [{"end": 4.00707, "socket": 4, "seconds": 1.00624, "bytes": 19658376, "bits_per_second": 156292000.0, "start": 3.00083, "retransmits": 0, "omitted": false, "snd_cwnd": 812328}]}, {"sum": {"end": 5.00104, "seconds": 0.993972, "bytes": 15709072, "bits_per_second": 126435000.0, "start": 4.00707, "retransmits": 0, "omitted": false}, "streams": [{"end": 5.00104, "socket": 4, "seconds": 0.993972, "bytes": 15709072, "bits_per_second": 126435000.0, "start": 4.00707, "retransmits": 0, "omitted": false, "snd_cwnd": 849976}]}, {"sum": {"end": 6.00049, "seconds": 0.999443, "bytes": 19616288, "bits_per_second": 157018000.0, "start": 5.00104, "retransmits": 53, "omitted": false}, "streams": [{"end": 6.00049, "socket": 4, "seconds": 0.999443, "bytes": 19616288, "bits_per_second": 157018000.0, "start": 5.00104, "retransmits": 53, "omitted": false, "snd_cwnd": 641464}]}, {"sum": {"end": 7.00085, "seconds": 1.00036, "bytes": 22250480, "bits_per_second": 177939000.0, "start": 6.00049, "retransmits": 0, "omitted": false}, "streams": [{"end": 7.00085, "socket": 4, "seconds": 1.00036, "bytes": 22250480, "bits_per_second": 177939000.0, "start": 6.00049, "retransmits": 0, "omitted": false, "snd_cwnd": 706624}]}, {"sum": {"end": 8.00476, "seconds": 1.00391, "bytes": 22282240, "bits_per_second": 177564000.0, "start": 7.00085, "retransmits": 0, "omitted": false}, "streams": [{"end": 8.00476, "socket": 4, "seconds": 1.00391, "bytes": 22282240, "bits_per_second": 177564000.0, "start": 7.00085, "retransmits": 0, "omitted": false, "snd_cwnd": 761648}]}, {"sum": {"end": 9.0016, "seconds": 0.996847, "bytes": 19657680, "bits_per_second": 157759000.0, "start": 8.00476, "retransmits": 28, "omitted": false}, "streams": [{"end": 9.0016, "socket": 4, "seconds": 0.996847, "bytes": 19657680, "bits_per_second": 157759000.0, "start": 8.00476, "retransmits": 28, "omitted": false, "snd_cwnd": 570512}]}, {"sum": {"end": 10.0112, "seconds": 1.00955, "bytes": 20932520, "bits_per_second": 165876000.0, "start": 9.0016, "retransmits": 0, "omitted": false}, "streams": [{"end": 10.0112, "socket": 4, "seconds": 1.00955, "bytes": 20932520, "bits_per_second": 165876000.0, "start": 9.0016, "retransmits": 0, "omitted": false, "snd_cwnd": 615400}]}], "end": {"sum_received": {"seconds": 10.0112, "start": 0, "end": 10.0112, "bytes": 193366712, "bits_per_second": 154521000.0}, "streams": [{"sender": {"end": 10.0112, "socket": 4, "seconds": 10.0112, "bytes": 195443936, "bits_per_second": 156181000.0, "start": 0, "retransmits": 81}, "receiver": {"end": 10.0112, "socket": 4, "seconds": 10.0112, "bytes": 193366712, "bits_per_second": 154521000.0, "start": 0}}], "sum_sent": {"end": 10.0112, "seconds": 10.0112, "bytes": 195443936, "bits_per_second": 156181000.0, "start": 0, "retransmits": 81}, "cpu_utilization_percent": {"remote_user": 1.10295, "remote_system": 40.0403, "host_user": 2.41785, "remote_total": 41.1438, "host_system": 5.09548, "host_total": 7.51411}}} \ No newline at end of file
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json
new file mode 100644
index 000000000..8173c8f64
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json
@@ -0,0 +1 @@
+{"start":{"connected":[{"socket":4, "local_host":"10.0.1.2", "local_port":46384, "remote_host":"172.16.9.195", "remote_port":5201}], "version":"iperf 3.0.7", "system_info":"Linux zeus 3.13.0-61-generic #100-Ubuntu SMP Wed Jul 29 11:21:34 UTC 2015 x86_64 x86_64 x86_64 GNU/Linux\n", "timestamp":{"time":"Tue, 29 Sep 2015 01:48:23 GMT", "timesecs":1443491303}, "connecting_to":{"host":"172.16.9.195", "port":5201}, "cookie":"zeus.1443491303.539703.3479129b58a5b", "test_start":{"protocol":"UDP", "num_streams":1, "blksize":8192, "omit":0, "duration":10, "bytes":0, "blocks":0, "reverse":0}}, "intervals":[{"streams":[{"socket":4, "start":0, "end":1.00022, "seconds":1.00022, "bytes":2252800, "bits_per_second":1.80184e+07, "packets":275, "omitted":false}], "sum":{"start":0, "end":1.00022, "seconds":1.00022, "bytes":2252800, "bits_per_second":1.80184e+07, "packets":275, "omitted":false}}, {"streams":[{"socket":4, "start":1.00022, "end":2.00022, "seconds":0.999993, "bytes":2498560, "bits_per_second":1.99886e+07, "packets":305, "omitted":false}], "sum":{"start":1.00022, "end":2.00022, "seconds":0.999993, "bytes":2498560, "bits_per_second":1.99886e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":2.00022, "end":3.00022, "seconds":1, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}], "sum":{"start":2.00022, "end":3.00022, "seconds":1, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}}, {"streams":[{"socket":4, "start":3.00022, "end":4.00022, "seconds":1, "bytes":2498560, "bits_per_second":19988480, "packets":305, "omitted":false}], "sum":{"start":3.00022, "end":4.00022, "seconds":1, "bytes":2498560, "bits_per_second":19988480, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":4.00022, "end":5.0002, "seconds":0.999977, "bytes":2498560, "bits_per_second":1.99889e+07, "packets":305, "omitted":false}], "sum":{"start":4.00022, "end":5.0002, "seconds":0.999977, "bytes":2498560, "bits_per_second":1.99889e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":5.0002, "end":6.00024, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99877e+07, "packets":305, "omitted":false}], "sum":{"start":5.0002, "end":6.00024, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99877e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":6.00024, "end":7.00023, "seconds":0.999998, "bytes":2498560, "bits_per_second":1.99885e+07, "packets":305, "omitted":false}], "sum":{"start":6.00024, "end":7.00023, "seconds":0.999998, "bytes":2498560, "bits_per_second":1.99885e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":7.00023, "end":8.00023, "seconds":0.999999, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}], "sum":{"start":7.00023, "end":8.00023, "seconds":0.999999, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}}, {"streams":[{"socket":4, "start":8.00023, "end":9.00018, "seconds":0.999945, "bytes":2498560, "bits_per_second":1.99896e+07, "packets":305, "omitted":false}], "sum":{"start":8.00023, "end":9.00018, "seconds":0.999945, "bytes":2498560, "bits_per_second":1.99896e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":9.00018, "end":10.0002, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99876e+07, "packets":305, "omitted":false}], "sum":{"start":9.00018, "end":10.0002, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99876e+07, "packets":305, "omitted":false}}], "end":{"streams":[{"udp":{"socket":4, "start":0, "end":10.0002, "seconds":10.0002, "bytes":24756224, "bits_per_second":1.98045e+07, "jitter_ms":0.0113579, "lost_packets":0, "packets":3022, "lost_percent":0}}], "sum":{"start":0, "end":10.0002, "seconds":10.0002, "bytes":24756224, "bits_per_second":1.98045e+07, "jitter_ms":0.0113579, "lost_packets":0, "packets":3022, "lost_percent":0}, "cpu_utilization_percent":{"host_total":0.647561, "host_user":0.146468, "host_system":0.501083, "remote_total":0.31751, "remote_user":0, "remote_system":0.31751}}}
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml
new file mode 100644
index 000000000..f3046f463
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_1flow_Packets_vpe.yaml
@@ -0,0 +1,18 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+flow:
+ srcip4_range: '152.16.0.20'
+ dstip4_range: '152.40.0.20'
+ count: 1
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml
new file mode 100644
index 000000000..2123e4705
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/ipv4_throughput_vpe.yaml
@@ -0,0 +1,101 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# flow definition for ACL tests - 1K flows - ipv4 only
+#
+# the number of flows defines the widest range of parameters
+# for example if srcip_range=1.0.0.1-1.0.0.255 and dst_ip_range=10.0.0.1-10.0.1.255
+# and it should define only 16 flows
+#
+#there is assumption that packets generated will have a random sequences of following addresses pairs
+# in the packets
+# 1. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# 2. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+# ...
+# 512. src=1.x.x.x(x.x.x =random from 1..255) dst=10.x.x.x (random from 1..512)
+#
+# not all combination should be filled
+# Any other field with random range will be added to flow definition
+#
+# the example.yaml provides all possibilities for traffic generation
+#
+# the profile defines a public and private side to make limited traffic correlation
+# between private and public side same way as it is made by IXIA solution.
+#
+schema: "isb:traffic_profile:0.1"
+
+# This file is a template, it will be filled with values from tc.yaml before passing to the traffic generator
+
+name: rfc2544
+description: Traffic profile to run RFC2544 latency
+traffic_profile:
+ traffic_type : RFC2544Profile # defines traffic behavior - constant or look for highest possible throughput
+ frame_rate : 100 # pc of linerate
+ # that specifies a range (e.g. ipv4 address, port)
+
+
+private:
+ ipv4:
+ outer_l2:
+ framesize:
+ 64B: "{{ get(imix, 'imix.uplink.imix_small', '0') }}"
+ 128B: "{{ get(imix, 'imix.uplink.imix_128B', '0') }}"
+ 256B: "{{ get(imix, 'imix.uplink.imix_256B', '0') }}"
+ 373b: "{{ get(imix, 'imix.uplink.imix_373B', '0') }}"
+ 570B: "{{get(imix, 'imix.uplink.imix_570B', '0') }}"
+ 1400B: "{{get(imix, 'imix.uplink.imix_1400B', '0') }}"
+ 1518B: "{{get(imix, 'imix.uplink.imix_1500B', '0') }}"
+
+ QinQ:
+ S-VLAN:
+ id: 128
+ priority: 0
+ cfi: 0
+ C-VLAN:
+ id: 512
+ priority: 0
+ cfi: 0
+
+ outer_l3v4:
+ proto: "tcp"
+ srcip4: "{{get(flow, 'flow.srcip4_range', '192.168.0.0-192.168.255.255') }}"
+ dstip4: "{{get(flow, 'flow.dstip4_range', '192.16.0.0-192.16.0.31') }}"
+ ttl: 32
+ dscp: 32
+
+ outer_l4:
+ srcport: "{{get(flow, 'flow.srcport_range', '0') }}"
+ dstport: "{{get(flow, 'flow.dstport_range', '0') }}"
+public:
+ ipv4:
+ outer_l2:
+ framesize:
+ 64B: "{{ get(imix, 'imix.uplink.imix_small', '0') }}"
+ 128B: "{{ get(imix, 'imix.uplink.imix_128B', '0') }}"
+ 256B: "{{ get(imix, 'imix.uplink.imix_256B', '0') }}"
+ 373b: "{{ get(imix, 'imix.uplink.imix_373B', '0') }}"
+ 570B: "{{get(imix, 'imix.uplink.imix_570B', '0') }}"
+ 1400B: "{{get(imix, 'imix.uplink.imix_1400B', '0') }}"
+ 1518B: "{{get(imix, 'imix.uplink.imix_1500B', '0') }}"
+
+ outer_l3v4:
+ proto: "tcp"
+ srcip4: "{{get(flow, 'flow.dstip4_range', '192.16.0.0-192.16.0.31') }}"
+ dstip4: "{{get(flow, 'flow.srcip4_range', '192.168.0.0-192.168.255.255') }}"
+ ttl: 32
+ dscp: 32
+
+ outer_l4:
+ srcport: "{{get(flow, 'flow.dstport_range', '0') }}"
+ dstport: "{{get(flow, 'flow.srcport_range', '0') }}"
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
new file mode 100755
index 000000000..bba76cfa5
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
@@ -0,0 +1 @@
+{"mean_latency":"9.49","troughput":"823.77","troughput_unit":"10^6bits/s"} \ No newline at end of file
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt
new file mode 100644
index 000000000..f90457cb3
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output1.txt
@@ -0,0 +1,9 @@
+Linux 3.19.0-25-generic (huawei-pod4) 07/19/2016 _x86_64_ (1 CPU)
+
+02:01:50 PM IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+02:01:51 PM eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+02:01:51 PM lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+
+Average: IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+Average: eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+Average: lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt
new file mode 100644
index 000000000..417613ec1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/netutilization_sample_output2.txt
@@ -0,0 +1,13 @@
+Linux 3.19.0-25-generic (huawei-pod4) 07/19/2016 _x86_64_ (1 CPU)
+
+02:01:50 PM IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+02:01:51 PM eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+02:01:51 PM lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+
+02:01:52 PM IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+02:01:53 PM eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+02:01:53 PM lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+
+Average: IFACE rxpck/s txpck/s rxkB/s txkB/s rxcmp/s txcmp/s rxmcst/s %ifutil
+Average: eth0 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
+Average: lo 0.00 0.00 0.00 0.00 0.00 0.00 0.00 0.00
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
new file mode 100644
index 000000000..5f342df7d
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_iperf3.py
@@ -0,0 +1,190 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.common import utils
+from yardstick.benchmark.scenarios.networking import iperf3
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch.object(iperf3, 'ssh')
+class IperfTestCase(unittest.TestCase):
+ output_name_tcp = 'iperf3_sample_output.json'
+ output_name_udp = 'iperf3_sample_output_udp.json'
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138',
+ }
+ }
+ self._mock_log_info = mock.patch.object(iperf3.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self._mock_log_info.stop()
+
+ def test_iperf_successful_setup(self, mock_ssh):
+ p = iperf3.Iperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.target)
+ self.assertIsNotNone(p.host)
+ mock_ssh.SSH.from_node().execute.assert_called_with("iperf3 -s -D")
+
+ def test_iperf_unsuccessful_setup(self, mock_ssh):
+ p = iperf3.Iperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.setup)
+
+ def test_iperf_successful_teardown(self, mock_ssh):
+ p = iperf3.Iperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+ p.target = mock_ssh.SSH.from_node()
+
+ p.teardown()
+ mock_ssh.SSH.from_node().close.assert_called()
+ mock_ssh.SSH.from_node().execute.assert_called_with("pkill iperf3")
+
+ def test_iperf_successful_no_sla(self, mock_ssh):
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_successful_sla(self, mock_ssh):
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'bytes_per_second': 15000000}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_unsuccessful_sla(self, mock_ssh):
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'bytes_per_second': 25000000}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_iperf_successful_sla_jitter(self, mock_ssh):
+ options = {"protocol": "udp", "bandwidth": "20m"}
+ args = {
+ 'options': options,
+ 'sla': {'jitter': 10}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_udp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
+ options = {"protocol": "udp", "bandwidth": "20m"}
+ args = {
+ 'options': options,
+ 'sla': {'jitter': 0.0001}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_udp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_iperf_successful_tcp_protocal(self, mock_ssh):
+ options = {"protocol": "tcp", "nodelay": "yes"}
+ args = {
+ 'options': options,
+ 'sla': {'bytes_per_second': 15000000}
+ }
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = utils.flatten_dict_key(jsonutils.loads(sample_output))
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_unsuccessful_script_error(self, mock_ssh):
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ @staticmethod
+ def _read_sample_output(filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py
new file mode 100644
index 000000000..620155c7e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_moongen_testpmd.py
@@ -0,0 +1,353 @@
+#!/usr/bin/env python
+
+# Copyright 2017 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Unittest for yardstick.benchmark.scenarios.networking.MoongenTestPMD
+
+from __future__ import absolute_import
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import moongen_testpmd
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.moongen_testpmd.subprocess')
+class MoongenTestPMDTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "ubuntu",
+ "password": "ubuntu",
+ },
+ }
+ self.TestPMDargs = {
+ 'task_id': "1234-5678",
+ 'options': {
+ 'multistream': 1,
+ 'frame_size': 1024,
+ 'testpmd_queue': 2,
+ 'trafficgen_port1': 'ens5',
+ 'trafficgen_port2': 'ens6',
+ 'moongen_host_user': 'root',
+ 'moongen_host_passwd': 'root',
+ 'moongen_host_ip': '10.5.201.151',
+ 'moongen_dir': '/home/lua-trafficgen',
+ 'moongen_runBidirec': 'true',
+ 'Package_Loss': 0,
+ 'SearchRuntime': 60,
+ 'moongen_port1_mac': '88:cf:98:2f:4d:ed',
+ 'moongen_port2_mac': '88:cf:98:2f:4d:ee',
+ 'forward_type': 'testpmd',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_mpps',
+ 'throughput_rx_mpps': 0.5,
+ 'action': 'monitor',
+ }
+ }
+ self.L2fwdargs = {
+ 'task_id': "1234-5678",
+ 'options': {
+ 'multistream': 1,
+ 'frame_size': 1024,
+ 'testpmd_queue': 2,
+ 'trafficgen_port1': 'ens5',
+ 'trafficgen_port2': 'ens6',
+ 'moongen_host_user': 'root',
+ 'moongen_host_passwd': 'root',
+ 'moongen_host_ip': '10.5.201.151',
+ 'moongen_dir': '/home/lua-trafficgen',
+ 'moongen_runBidirec': 'true',
+ 'Package_Loss': 0,
+ 'SearchRuntime': 60,
+ 'moongen_port1_mac': '88:cf:98:2f:4d:ed',
+ 'moongen_port2_mac': '88:cf:98:2f:4d:ee',
+ 'forward_type': 'l2fwd',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_mpps',
+ 'throughput_rx_mpps': 0.5,
+ 'action': 'monitor',
+ }
+ }
+
+ self._mock_ssh = mock.patch(
+ 'yardstick.benchmark.scenarios.networking.moongen_testpmd.ssh')
+ self.mock_ssh = self._mock_ssh.start()
+
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ self._mock_ssh.stop()
+
+ def test_MoongenTestPMD_setup(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_MoongenTestPMD_teardown(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ p.teardown()
+ self.assertFalse(p.setup_done)
+
+ def test_MoongenTestPMD_l2fwd_is_forward_setup_no(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ result = p._is_forward_setup()
+ self.assertFalse(result)
+
+ def test_MoongenTestPMD_l2fwd_is_forward_setup_yes(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+
+ result = p._is_forward_setup()
+ self.assertTrue(result)
+
+ def test_MoongenTestPMD_testpmd_is_forward_setup_no(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+
+ result = p._is_forward_setup()
+ self.assertFalse(result)
+
+ def test_MoongenTestPMD_testpmd_is_forward_setup_yes(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ result = p._is_forward_setup()
+ self.assertTrue(result)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_testpmd_forward_setup_first(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+
+ p.forward_setup()
+ self.assertFalse(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_testpmd_dpdk_setup_next(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ p.forward_setup()
+ self.assertTrue(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_l2fwd_forward_setup_first(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.forward_setup()
+ self.assertFalse(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ @mock.patch('time.sleep')
+ def test_MoongenTestPMD_l2fwd_dpdk_setup_next(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.L2fwdargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, 'dummy', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ p.forward_setup()
+ self.assertTrue(p._is_forward_setup())
+ self.assertTrue(p.forward_setup_done)
+
+ def test_moongen_testpmd_generate_config_file(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ mock_subprocess.call().execute.return_value = None
+
+ p.generate_config_file(frame_size=1, multistream=1,
+ runBidirec="True", tg_port1_vlan=1,
+ tg_port2_vlan=2, SearchRuntime=1,
+ Package_Loss=0)
+ self.assertTrue(p.CONFIG_FILE)
+
+ def test_moongen_testpmd_result_to_data_match(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ mock_subprocess.call().execute.return_value = None
+ result = ("[REPORT]Device 1->0: Tx frames: 420161490 Rx Frames: 420161490"
+ " frame loss: 0, 0.000000% Rx Mpps: 7.002708\n[REPORT] "
+ "total: Tx frames: 840321216 Rx Frames: 840321216 frame loss: "
+ "0, 0.000000% Tx Mpps: 14.005388 Rx Mpps: 14.005388\n'")
+ p.result_to_data(result=result)
+ self.assertTrue(p.TO_DATA)
+
+ def test_moongen_testpmd_result_to_data_not_match(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ mock_subprocess.call().execute.return_value = None
+ result = ("")
+ p.result_to_data(result=result)
+ self.assertTrue(p.TO_DATA)
+
+ @mock.patch('time.sleep')
+ def test_moongen_testpmd_run_ok(self, _, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+ p.setup_done = True
+ p.forward_setup_done = True
+ p.setup()
+
+ # run() specific mocks
+ p.server = self.mock_ssh.SSH.from_node()
+ mock_subprocess.call().execute.return_value = None
+ mock_subprocess.call().execute.return_value = None
+ result = ("[REPORT]Device 1->0: Tx frames: 420161490 Rx Frames: 420161490"
+ " frame loss: 0, 0.000000% Rx Mpps: 7.002708\n[REPORT] "
+ "total: Tx frames: 840321216 Rx Frames: 840321216 frame loss: "
+ "0, 0.000000% Tx Mpps: 14.005388 Rx Mpps: 14.005388\n'")
+ self.mock_ssh.SSH.from_node().execute.return_value = (
+ 0, result, '')
+
+ test_result = {}
+ p.run(test_result)
+
+ self.assertEqual(test_result['rx_mpps'], 14.005388)
+
+ def test_moongen_testpmd_run_falied_vsperf_execution(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # run() specific mocks
+ mock_subprocess.call().execute.return_value = None
+ mock_subprocess.call().execute.return_value = None
+ self.mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+
+ result = {}
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def test_moongen_testpmd_run_falied_csv_report(self, mock_subprocess):
+ p = moongen_testpmd.MoongenTestPMD(self.TestPMDargs, self.ctx)
+
+ # setup() specific mocks
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ mock_subprocess.call().execute.return_value = None
+
+ p.setup()
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ # run() specific mocks
+ mock_subprocess.call().execute.return_value = None
+ mock_subprocess.call().execute.return_value = None
+ self.mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_ssh.SSH.from_node().execute.return_value = (1, '', '')
+
+ result = {}
+ self.assertRaises(RuntimeError, p.run, result)
+
+def main():
+ unittest.main()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
new file mode 100755
index 000000000..a7abcd98a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf.py
@@ -0,0 +1,122 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.netperf.Netperf
+
+from __future__ import absolute_import
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.networking import netperf
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netperf.ssh')
+class NetperfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+
+ def test_netperf_successful_setup(self, mock_ssh):
+
+ p = netperf.Netperf({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_netperf_successful_no_sla(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_successful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 100}
+ }
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_unsuccessful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 5}
+ }
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_netperf_unsuccessful_script_error(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def _read_sample_output(self):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, 'netperf_sample_output.json')
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
new file mode 100755
index 000000000..a577dba59
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netperf_node.py
@@ -0,0 +1,122 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netperf_node.NetperfNode
+
+from __future__ import absolute_import
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.networking import netperf_node
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netperf_node.ssh')
+class NetperfNodeTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '192.168.10.10',
+ 'user': 'root',
+ 'password': 'root'
+ },
+ 'target': {
+ 'ip': '192.168.10.11',
+ 'user': 'root',
+ 'password': 'root'
+ }
+ }
+
+ def test_netperf_node_successful_setup(self, mock_ssh):
+
+ p = netperf_node.NetperfNode({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_netperf_node_successful_no_sla(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_node_successful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 100}
+ }
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ expected_result = jsonutils.loads(sample_output)
+ p.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_node_unsuccessful_sla(self, mock_ssh):
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 5}
+ }
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_netperf_node_unsuccessful_script_error(self, mock_ssh):
+
+ options = {}
+ args = {'options': options}
+ result = {}
+
+ p = netperf_node.NetperfNode(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def _read_sample_output(self):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, 'netperf_sample_output.json')
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py
new file mode 100644
index 000000000..4cdfde6b1
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_netutilization.py
@@ -0,0 +1,225 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.networking.netutilization.NetUtilization
+
+from __future__ import absolute_import
+import mock
+import unittest
+import os
+
+from yardstick.benchmark.scenarios.networking import netutilization
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netutilization.ssh')
+class NetUtilizationTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ def test_setup_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ n.setup()
+ self.assertIsNotNone(n.client)
+ self.assertTrue(n.setup_done)
+
+ def test_execute_command_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ expected_result = 'abcdefg'
+ mock_ssh.SSH.from_node().execute.return_value = (0, expected_result, '')
+ result = n._execute_command("foo")
+ self.assertEqual(result, expected_result)
+
+ def test_execute_command_failed(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (127, '', 'abcdefg')
+ self.assertRaises(RuntimeError, n._execute_command,
+ "failed")
+
+ def test_get_network_utilization_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 1
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ mpstat_output = self._read_file("netutilization_sample_output1.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, mpstat_output, '')
+ result = n._get_network_utilization()
+
+ expected_result = \
+ {"network_utilization_maximun": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_average": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_minimum": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def test_get_network_utilization_2_success(self, mock_ssh):
+ options = {
+ "interval": 1,
+ "count": 2
+ }
+ args = {'options': options}
+
+ n = netutilization.NetUtilization(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ n.setup()
+
+ mpstat_output = self._read_file("netutilization_sample_output2.txt")
+ mock_ssh.SSH.from_node().execute.return_value = (0, mpstat_output, '')
+ result = n._get_network_utilization()
+
+ expected_result = \
+ {"network_utilization_maximun": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_average": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}},
+ "network_utilization_minimum": {
+ "lo": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"},
+ "eth0": {"rxcmp/s": "0.00",
+ "%ifutil": "0.00",
+ "txcmp/s": "0.00",
+ "txkB/s": "0.00",
+ "rxkB/s": "0.00",
+ "rxpck/s": "0.00",
+ "txpck/s": "0.00",
+ "rxmcst/s": "0.00"}}}
+
+ self.assertDictEqual(result, expected_result)
+
+ def _read_file(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, filename)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
new file mode 100644
index 000000000..36e8c8a77
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_networkcapacity.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.networking.networkcapacity.NetworkCapacity
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.networking import networkcapacity
+
+SAMPLE_OUTPUT = \
+ '{"Number of connections":"308","Number of frames received": "166503"}'
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.networkcapacity.ssh')
+class NetworkCapacityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'password': "root"
+ },
+ }
+
+ self.result = {}
+
+ def test_capacity_successful_setup(self, mock_ssh):
+ c = networkcapacity.NetworkCapacity({}, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_capacity_successful(self, mock_ssh):
+ c = networkcapacity.NetworkCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_unsuccessful_script_error(self, mock_ssh):
+ c = networkcapacity.NetworkCapacity({}, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py
new file mode 100644
index 000000000..b02d58437
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_nstat.py
@@ -0,0 +1,105 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import nstat
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.nstat.ssh')
+class NstatTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "192.168.50.28",
+ "user": "root",
+ "key_filename": "mykey.key"
+ }
+ }
+
+ def test_nstat_successful_setup(self, mock_ssh):
+
+ n = nstat.Nstat({}, self.ctx)
+ n.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(n.client)
+ self.assertTrue(n.setup_done)
+
+ def test_nstat_successful_no_sla(self, mock_ssh):
+
+ options = {
+ "duration": 0
+ }
+ args = {
+ "options": options,
+ }
+ n = nstat.Nstat(args, self.ctx)
+ result = {}
+
+ sample_output = '#kernel\nIpInReceives 1837 0.0\nIpInHdrErrors 0 0.0\nIpInAddrErrors 2 0.0\nIcmpInMsgs 319 0.0\nIcmpInErrors 0 0.0\nTcpInSegs 36 0.0\nTcpInErrs 0 0.0\nUdpInDatagrams 1318 0.0\nUdpInErrors 0 0.0\n' # pylint: disable=line-too-long
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ n.run(result)
+ expected_result = {"TcpInErrs": 0, "UdpInDatagrams": 1318,
+ "Tcp_segment_error_rate": 0.0, "IpInAddrErrors": 2,
+ "IpInHdrErrors": 0, "IcmpInErrors": 0, "IpErrors": 2,
+ "TcpInSegs": 36, "IpInReceives": 1837, "IcmpInMsgs": 319,
+ "IP_datagram_error_rate": 0.001, "Udp_datagram_error_rate": 0.0,
+ "Icmp_message_error_rate": 0.0, "UdpInErrors": 0}
+ self.assertEqual(result, expected_result)
+
+ def test_nstat_successful_sla(self, mock_ssh):
+
+ options = {
+ "duration": 0
+ }
+ sla = {
+ "IP_datagram_error_rate": 0.1
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ n = nstat.Nstat(args, self.ctx)
+ result = {}
+
+ sample_output = '#kernel\nIpInReceives 1837 0.0\nIpInHdrErrors 0 0.0\nIpInAddrErrors 2 0.0\nIcmpInMsgs 319 0.0\nIcmpInErrors 0 0.0\nTcpInSegs 36 0.0\nTcpInErrs 0 0.0\nUdpInDatagrams 1318 0.0\nUdpInErrors 0 0.0\n' # pylint: disable=line-too-long
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ n.run(result)
+ expected_result = {"TcpInErrs": 0, "UdpInDatagrams": 1318,
+ "Tcp_segment_error_rate": 0.0, "IpInAddrErrors": 2,
+ "IpInHdrErrors": 0, "IcmpInErrors": 0, "IpErrors": 2,
+ "TcpInSegs": 36, "IpInReceives": 1837, "IcmpInMsgs": 319,
+ "IP_datagram_error_rate": 0.001, "Udp_datagram_error_rate": 0.0,
+ "Icmp_message_error_rate": 0.0, "UdpInErrors": 0}
+ self.assertEqual(result, expected_result)
+
+ def test_nstat_unsuccessful_cmd_error(self, mock_ssh):
+
+ options = {
+ "duration": 0
+ }
+ sla = {
+ "IP_datagram_error_rate": 0.1
+ }
+ args = {
+ "options": options,
+ "sla": sla
+ }
+ n = nstat.Nstat(args, self.ctx)
+ result = {}
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, n.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
new file mode 100644
index 000000000..944202658
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping.py
@@ -0,0 +1,107 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import ping
+from yardstick.common import exceptions as y_exc
+
+
+class PingTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ },
+ "target": {
+ "ipaddr": "10.229.17.105",
+ }
+ }
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_successful_no_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ p.run(result)
+ self.assertEqual(result, {'rtt.ares': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_successful_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 150},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ p.run(result)
+ self.assertEqual(result, {'rtt.ares': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_unsuccessful_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 50},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_unsuccessful_script_error(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 50},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
+ def test_ping_unsuccessful_no_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'target': 'ares.demo'
+ }
+ result = {}
+
+ p = ping.Ping(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
new file mode 100644
index 000000000..ad5217a14
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_ping6.py
@@ -0,0 +1,117 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import ping6
+from yardstick.common import exceptions as y_exc
+
+
+class PingTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'nodes': {
+ 'host1': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'role': "Controller",
+ 'key_filename': "mykey.key",
+ 'password': "root"
+ },
+ 'host2': {
+ "ip": "172.16.0.138",
+ "key_filename": "/root/.ssh/id_rsa",
+ "role": "Compute",
+ "name": "node3.IPV6",
+ "user": "root"
+ },
+ }
+ }
+
+ def test_get_controller_node(self):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 50}
+ }
+ p = ping6.Ping6(args, self.ctx)
+ controller_node = p._get_controller_node(['host1', 'host2'])
+ self.assertEqual(controller_node, 'host1')
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_setup(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 50}
+ }
+ p = ping6.Ping6(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '0', '')
+ p.setup()
+
+ self.assertTrue(p.setup_done)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_no_sla(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
+ p.run(result)
+ self.assertEqual(result, {'rtt': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_sla(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 150}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
+ p.run(result)
+ self.assertEqual(result, {'rtt': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_unsuccessful_sla(self, mock_ssh):
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 50}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [(0, 'host1', ''), (0, 100, '')]
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_unsuccessful_script_error(self, mock_ssh):
+
+ args = {
+ 'options': {'host': 'host1', 'packetsize': 200, 'ping_count': 5},
+ 'sla': {'max_rtt': 150}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH.from_node()
+ mock_ssh.SSH.from_node().execute.side_effect = [
+ (0, 'host1', ''), (1, '', 'FOOBAR')]
+ self.assertRaises(RuntimeError, p.run, result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
new file mode 100644
index 000000000..5761e2403
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen.py
@@ -0,0 +1,453 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import logging
+
+from oslo_serialization import jsonutils
+
+from yardstick import ssh
+from yardstick.benchmark.scenarios.networking import pktgen
+from yardstick.common import exceptions as y_exc
+
+
+logging.disable(logging.CRITICAL)
+
+
+class PktgenTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.context_cfg = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+ self.scenario_cfg = {
+ 'options': {'packetsize': 60}
+ }
+
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
+
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
+ self.mock_SSH.from_node().run.return_value = 0
+
+ self.addCleanup(self._stop_mock)
+
+ self.scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ self.scenario.setup()
+
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+
+ def test_setup_successful(self):
+ self.assertIsNotNone(self.scenario.server)
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_iptables_setup_successful(self):
+ self.scenario.number_of_ports = 10
+ self.scenario._iptables_setup()
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "sudo iptables -F; "
+ "sudo iptables -A INPUT -p udp --dport 1000:%s -j DROP"
+ % 1010, timeout=60)
+
+ def test_iptables_setup_unsuccessful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._iptables_setup()
+
+ def test_iptables_get_result_successful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().execute.return_value = (0, '150000', '')
+
+ result = self.scenario._iptables_get_result()
+
+ self.assertEqual(result, 150000)
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "sudo iptables -L INPUT -vnx |"
+ "awk '/dpts:1000:%s/ {{printf \"%%s\", $1}}'"
+ % 1010, raise_on_error=True)
+
+ def test_iptables_get_result_unsuccessful(self):
+ self.scenario.number_of_ports = 10
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._iptables_get_result()
+
+ def test_run_successful_no_sla(self):
+ self.scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110,
+ "ppm": 3179})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ self.scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_successful_sla(self):
+ self.scenario_cfg['sla'] = {'max_ppm': 10000}
+ scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110,
+ "ppm": 3179})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_unsuccessful_sla(self):
+ self.scenario_cfg['sla'] = {'max_ppm': 1000}
+ scenario = pktgen.Pktgen(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149776,
+ "packetsize": 60,
+ "flows": 110})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ scenario.run({})
+
+ def test_run_ssh_error_not_caught(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario.run({})
+
+ def test_get_vnic_driver_name(self):
+ self.mock_SSH.from_node().execute.return_value = (0, 'ixgbevf', '')
+ vnic_driver_name = self.scenario._get_vnic_driver_name()
+
+ self.assertEqual(vnic_driver_name, 'ixgbevf')
+
+ def test_get_vnic_driver_name_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_vnic_driver_name()
+
+ def test_get_sriov_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '2', '')
+
+ self.scenario.queue_number = self.scenario._get_sriov_queue_number()
+ self.assertEqual(self.scenario.queue_number, 2)
+
+ def test_get_sriov_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_sriov_queue_number()
+
+ def test_get_available_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '4', '')
+
+ self.assertEqual(self.scenario._get_available_queue_number(), 4)
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "sudo ethtool -l eth0 | grep Combined | head -1 |"
+ "awk '{printf $2}'", raise_on_error=True)
+
+ def test_get_available_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_available_queue_number()
+
+ def test_get_usable_queue_number(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+
+ self.assertEqual(self.scenario._get_usable_queue_number(), 1)
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "sudo ethtool -l eth0 | grep Combined | tail -1 |"
+ "awk '{printf $2}'", raise_on_error=True)
+
+ def test_get_usable_queue_number_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._get_usable_queue_number()
+
+ def test_enable_ovs_multiqueue(self):
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
+ self.scenario.queue_number = self.scenario._enable_ovs_multiqueue()
+
+ self.assertEqual(self.scenario.queue_number, 4)
+ self.mock_SSH.from_node().run.assert_has_calls(
+ (mock.call("sudo ethtool -L eth0 combined 4"),
+ mock.call("sudo ethtool -L eth0 combined 4")))
+
+ def test_enable_ovs_multiqueue_1q(self):
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=1)
+ self.scenario.queue_number = self.scenario._enable_ovs_multiqueue()
+
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.mock_SSH.from_node().run.assert_not_called()
+
+ def test_enable_ovs_multiqueue_unsuccessful(self):
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._enable_ovs_multiqueue()
+
+ def test_setup_irqmapping_ovs(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_ovs(4)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 8 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_ovs_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_ovs(1)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 1 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_ovs_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_ovs(4)
+
+ def test_setup_irqmapping_ovs_1q_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_ovs(1)
+
+ def test_setup_irqmapping_sriov(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_sriov(2)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 2 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_sriov_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '10', '')
+ self.scenario._setup_irqmapping_sriov(1)
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "echo 1 | sudo tee /proc/irq/10/smp_affinity")
+
+ def test_setup_irqmapping_sriov_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_sriov(2)
+
+ def test_setup_irqmapping_sriov_1q_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._setup_irqmapping_sriov(1)
+
+ def test_is_irqbalance_disabled(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
+
+ self.assertFalse(self.scenario._is_irqbalance_disabled())
+ self.mock_SSH.from_node().execute.assert_called_with(
+ "grep ENABLED /etc/default/irqbalance", raise_on_error=True)
+
+ def test_is_irqbalance_disabled_unsuccessful(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._is_irqbalance_disabled()
+
+ def test_disable_irqbalance(self):
+ self.scenario._disable_irqbalance()
+
+ self.mock_SSH.from_node().run.assert_called_with(
+ "sudo service irqbalance disable")
+
+ def test_disable_irqbalance_unsuccessful(self):
+ self.mock_SSH.from_node().run.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario._disable_irqbalance()
+
+ def test_multiqueue_setup_ovs(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '4', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(
+ return_value="virtio_net")
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=4)
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 4)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_multiqueue_setup_ovs_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(
+ return_value="virtio_net")
+ self.scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ self.scenario._get_available_queue_number = mock.Mock(return_value=1)
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_multiqueue_setup_sriov(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '2', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 2)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_multiqueue_setup_sriov_1q(self):
+ self.mock_SSH.from_node().execute.return_value = (0, '1', '')
+ self.scenario._is_irqbalance_disabled = mock.Mock(return_value=False)
+ self.scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+
+ self.scenario.multiqueue_setup()
+
+ self.assertEqual(self.scenario.queue_number, 1)
+ self.assertTrue(self.scenario.multiqueue_setup_done)
+
+ def test_run_with_setup_done(self):
+ scenario_cfg = {
+ 'options': {
+ 'packetsize': 60,
+ 'number_of_ports': 10,
+ 'duration': 20,
+ 'multiqueue': True},
+ 'sla': {
+ 'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.server = self.mock_SSH.from_node()
+ scenario.client = self.mock_SSH.from_node()
+ scenario.setup_done = True
+ scenario.multiqueue_setup_done = True
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_with_ovs_multiqueque(self):
+ scenario_cfg = {
+ 'options': {
+ 'packetsize': 60,
+ 'number_of_ports': 10,
+ 'duration': 20,
+ 'multiqueue': True},
+ 'sla': {'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._get_vnic_driver_name = mock.Mock(return_value="virtio_net")
+ scenario._get_usable_queue_number = mock.Mock(return_value=1)
+ scenario._get_available_queue_number = mock.Mock(return_value=4)
+ scenario._enable_ovs_multiqueue = mock.Mock(return_value=4)
+ scenario._setup_irqmapping_ovs = mock.Mock()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_run_with_sriov_multiqueque(self):
+ scenario_cfg = {
+ 'options': {
+ 'packetsize': 60,
+ 'number_of_ports': 10,
+ 'duration': 20,
+ 'multiqueue': True},
+ 'sla': {'max_ppm': 1}
+ }
+ scenario = pktgen.Pktgen(scenario_cfg, self.context_cfg)
+ scenario.setup()
+ scenario._get_vnic_driver_name = mock.Mock(return_value="ixgbevf")
+ scenario._get_sriov_queue_number = mock.Mock(return_value=2)
+ scenario._setup_irqmapping_sriov = mock.Mock()
+ scenario._iptables_get_result = mock.Mock(return_value=149300)
+
+ sample_output = jsonutils.dumps({"packets_per_second": 9753,
+ "errors": 0,
+ "packets_sent": 149300,
+ "flows": 110,
+ "ppm": 0})
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
new file mode 100644
index 000000000..70cd8ad40
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk.py
@@ -0,0 +1,136 @@
+##############################################################################
+# Copyright (c) 2015 ZTE and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import mock
+import unittest
+import time
+import logging
+
+import yardstick.common.utils as utils
+from yardstick import ssh
+from yardstick.benchmark.scenarios.networking import pktgen_dpdk
+from yardstick.common import exceptions as y_exc
+
+
+logging.disable(logging.CRITICAL)
+
+
+class PktgenDPDKLatencyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.context_cfg = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
+ }
+ self.scenario_cfg = {
+ 'options': {'packetsize': 60}
+ }
+
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
+
+ self._mock_time_sleep = mock.patch.object(time, 'sleep')
+ self.mock_time_sleep = self._mock_time_sleep.start()
+
+ self._mock_utils_get_port_ip = mock.patch.object(utils, 'get_port_ip')
+ self.mock_utils_get_port_ip = self._mock_utils_get_port_ip.start()
+
+ self._mock_utils_get_port_mac = mock.patch.object(utils,
+ 'get_port_mac')
+ self.mock_utils_get_port_mac = self._mock_utils_get_port_mac.start()
+
+ self.mock_SSH.from_node().execute.return_value = (0, '', '')
+
+ self.addCleanup(self._stop_mock)
+
+ self.scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+ self.scenario.server = self.mock_SSH.from_node()
+ self.scenario.client = self.mock_SSH.from_node()
+
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+ self._mock_time_sleep.stop()
+ self._mock_utils_get_port_ip.stop()
+ self._mock_utils_get_port_mac.stop()
+
+ def test_setup(self):
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+ scenario.setup()
+
+ self.assertIsNotNone(scenario.server)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_run_get_port_ip_command(self):
+ self.scenario.run({})
+
+ self.mock_utils_get_port_ip.assert_has_calls(
+ [mock.call(self.scenario.server, 'ens4'),
+ mock.call(self.scenario.server, 'ens5')])
+
+ def test_get_port_mac_command(self):
+ self.scenario.run({})
+
+ self.mock_utils_get_port_mac.assert_has_calls(
+ [mock.call(self.scenario.server, 'ens5'),
+ mock.call(self.scenario.server, 'ens4'),
+ mock.call(self.scenario.server, 'ens5')])
+
+ def test_run_no_sla(self):
+ sample_output = '100\n110\n112\n130\n149\n150\n90\n150\n200\n162\n'
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ self.scenario.run(result)
+ # with python 3 we get float, might be due python division changes
+ # AssertionError: {'avg_latency': 132.33333333333334} != {
+ # 'avg_latency': 132}
+ delta = result['avg_latency'] - 132
+ self.assertLessEqual(delta, 1)
+
+ def test_run_sla(self):
+ self.scenario_cfg['sla'] = {'max_latency': 100}
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+
+ sample_output = '100\n100\n100\n100\n100\n100\n100\n100\n100\n100\n'
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ result = {}
+ scenario.run(result)
+
+ self.assertEqual(result, {"avg_latency": 100})
+
+ def test_run_sla_error(self):
+ self.scenario_cfg['sla'] = {'max_latency': 100}
+ scenario = pktgen_dpdk.PktgenDPDKLatency(self.scenario_cfg,
+ self.context_cfg)
+
+ sample_output = '100\n110\n112\n130\n149\n150\n90\n150\n200\n162\n'
+ self.mock_SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ with self.assertRaises(y_exc.SLAValidationError):
+ scenario.run({})
+
+ def test_run_last_command_raise_on_error(self):
+ self.mock_SSH.from_node().execute.side_effect = y_exc.SSHError
+
+ with self.assertRaises(y_exc.SSHError):
+ self.scenario.run({})
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
new file mode 100644
index 000000000..39392e4bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_pktgen_dpdk_throughput.py
@@ -0,0 +1,194 @@
+##############################################################################
+# Copyright (c) 2017 Nokia and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.pktgen.PktgenDPDK
+
+from __future__ import absolute_import
+import unittest
+
+from oslo_serialization import jsonutils
+import mock
+
+from yardstick.benchmark.scenarios.networking import pktgen_dpdk_throughput
+from yardstick.common import exceptions as y_exc
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.pktgen_dpdk_throughput.ssh')
+class PktgenDPDKTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ }
+ }
+
+ self._mock_time = mock.patch(
+ 'yardstick.benchmark.scenarios.networking.pktgen_dpdk_throughput.time')
+ self.mock_time = self._mock_time.start()
+
+ self.addCleanup(self._cleanup)
+
+ def _cleanup(self):
+ self._mock_time.stop()
+
+ def test_pktgen_dpdk_throughput_successful_setup(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.setup()
+
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_pktgen_dpdk_throughput_successful_no_sla(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ }
+
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_dpdk_result = mock.Mock()
+ mock_dpdk_result.return_value = 149300
+ p._dpdk_get_result = mock_dpdk_result
+
+ sample_output = '{"packets_per_second": 9753, "errors": 0, \
+ "packets_sent": 149776, "flows": 110}'
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_pktgen_dpdk_throughput_successful_sla(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ 'sla': {'max_ppm': 10000}
+ }
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_dpdk_result = mock.Mock()
+ mock_dpdk_result.return_value = 149300
+ p._dpdk_get_result = mock_dpdk_result
+
+ sample_output = '{"packets_per_second": 9753, "errors": 0, \
+ "packets_sent": 149776, "flows": 110}'
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+ expected_result = jsonutils.loads(sample_output)
+ expected_result["packets_received"] = 149300
+ expected_result["packetsize"] = 60
+ self.assertEqual(result, expected_result)
+
+ def test_pktgen_dpdk_throughput_unsuccessful_sla(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ 'sla': {'max_ppm': 1000}
+ }
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_dpdk_result = mock.Mock()
+ mock_dpdk_result.return_value = 149300
+ p._dpdk_get_result = mock_dpdk_result
+
+ sample_output = '{"packets_per_second": 9753, "errors": 0, \
+ "packets_sent": 149776, "flows": 110}'
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_pktgen_dpdk_throughput_unsuccessful_script_error(
+ self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60, 'number_of_ports': 10},
+ 'sla': {'max_ppm': 1000}
+ }
+ result = {}
+
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def test_pktgen_dpdk_throughput_is_dpdk_setup(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.server = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+
+ p._is_dpdk_setup("server")
+
+ mock_ssh.SSH().execute.assert_called_with(
+ "ip a | grep eth1 2>/dev/null")
+
+ def test_pktgen_dpdk_throughput_dpdk_setup(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+
+ p.dpdk_setup()
+
+ self.assertTrue(p.dpdk_setup_done)
+
+ def test_pktgen_dpdk_throughput_dpdk_get_result(self, mock_ssh):
+ args = {
+ 'options': {'packetsize': 60},
+ }
+ p = pktgen_dpdk_throughput.PktgenDPDK(args, self.ctx)
+ p.server = mock_ssh.SSH()
+ p.client = mock_ssh.SSH()
+
+ mock_ssh.SSH().execute.return_value = (0, '10000', '')
+
+ p._dpdk_get_result()
+
+ mock_ssh.SSH().execute.assert_called_with(
+ "sudo /dpdk/destdir/bin/dpdk-procinfo -- --stats-reset > /dev/null 2>&1")
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py
new file mode 100644
index 000000000..a5e5e39dc
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_sfc.py
@@ -0,0 +1,68 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.sfc
+
+from __future__ import absolute_import
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import sfc
+
+
+class SfcTestCase(unittest.TestCase):
+
+ def setUp(self):
+ scenario_cfg = {}
+ context_cfg = {
+ # Used in Sfc.setup()
+ 'target': {
+ 'user': 'root',
+ 'password': 'opnfv',
+ 'ip': '127.0.0.1',
+ },
+
+ # Used in Sfc.run()
+ 'host': {
+ 'user': 'root',
+ 'password': 'opnfv',
+ 'ip': None,
+ }
+ }
+
+ self.sfc = sfc.Sfc(scenario_cfg=scenario_cfg, context_cfg=context_cfg)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.ssh')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.sfc_openstack')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
+ def test_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
+ # Mock a successfull SSH in Sfc.setup() and Sfc.run()
+ mock_ssh.SSH.from_node().execute.return_value = (0, '100', '')
+ mock_openstack.get_an_IP.return_value = "127.0.0.1"
+ mock_subprocess.call.return_value = 'mocked!'
+
+ result = {}
+ self.sfc.setup()
+ self.sfc.run(result)
+ self.sfc.teardown()
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.ssh')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.sfc_openstack')
+ @mock.patch('yardstick.benchmark.scenarios.networking.sfc.subprocess')
+ def test2_run_for_success(self, mock_subprocess, mock_openstack, mock_ssh):
+ # Mock a successfull SSH in Sfc.setup() and Sfc.run()
+ mock_ssh.SSH.from_node().execute.return_value = (
+ 0, 'vxlan_tool.py', 'succeeded timed out')
+ mock_openstack.get_an_IP.return_value = "127.0.0.1"
+ mock_subprocess.call.return_value = 'mocked!'
+
+ result = {}
+ self.sfc.setup()
+ self.sfc.run(result)
+ self.sfc.teardown()
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
new file mode 100644
index 000000000..cf9a26a76
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vnf_generic.py
@@ -0,0 +1,873 @@
+# Copyright (c) 2016-2019 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from copy import deepcopy
+import os
+import sys
+
+import mock
+import unittest
+
+from yardstick import tests
+from yardstick.common import exceptions
+from yardstick.common import utils
+from yardstick.network_services.collector.subscriber import Collector
+from yardstick.network_services.traffic_profile import base
+from yardstick.network_services.vnf_generic import vnfdgen
+from yardstick.network_services.vnf_generic.vnf.base import GenericTrafficGen
+from yardstick.network_services.vnf_generic.vnf.base import GenericVNF
+
+
+stl_patch = mock.patch.dict(sys.modules, tests.STL_MOCKS)
+stl_patch.start()
+
+if stl_patch:
+ from yardstick.benchmark.scenarios.networking import vnf_generic
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+
+
+COMPLETE_TREX_VNFD = {
+ 'vnfd:vnfd-catalog': {
+ 'vnfd': [
+ {
+ 'benchmark': {
+ 'kpi': [
+ 'rx_throughput_fps',
+ 'tx_throughput_fps',
+ 'tx_throughput_mbps',
+ 'rx_throughput_mbps',
+ 'tx_throughput_pc_linerate',
+ 'rx_throughput_pc_linerate',
+ 'min_latency',
+ 'max_latency',
+ 'avg_latency',
+ ],
+ },
+ 'connection-point': [
+ {
+ 'name': 'xe0',
+ 'type': 'VPORT',
+ },
+ {
+ 'name': 'xe1',
+ 'type': 'VPORT',
+ },
+ ],
+ 'description': 'TRex stateless traffic generator for RFC2544',
+ 'id': 'TrexTrafficGen',
+ 'mgmt-interface': {
+ 'ip': '1.1.1.1',
+ 'password': 'berta',
+ 'user': 'berta',
+ 'vdu-id': 'trexgen-baremetal',
+ },
+ 'name': 'trexgen',
+ 'short-name': 'trexgen',
+ 'class-name': 'TrexTrafficGen',
+ 'vdu': [
+ {
+ 'description': 'TRex stateless traffic generator for RFC2544',
+ 'external-interface': [
+ {
+ 'name': 'xe0',
+ 'virtual-interface': {
+ 'bandwidth': '10 Gbps',
+ 'dst_ip': '1.1.1.1',
+ 'dst_mac': '00:01:02:03:04:05',
+ 'local_ip': '1.1.1.2',
+ 'local_mac': '00:01:02:03:05:05',
+ 'type': 'PCI-PASSTHROUGH',
+ 'netmask': "255.255.255.0",
+ 'driver': 'i40',
+ 'vpci': '0000:00:10.2',
+ },
+ 'vnfd-connection-point-ref': 'xe0',
+ },
+ {
+ 'name': 'xe1',
+ 'virtual-interface': {
+ 'bandwidth': '10 Gbps',
+ 'dst_ip': '2.1.1.1',
+ 'dst_mac': '00:01:02:03:04:06',
+ 'local_ip': '2.1.1.2',
+ 'local_mac': '00:01:02:03:05:06',
+ 'type': 'PCI-PASSTHROUGH',
+ 'netmask': "255.255.255.0",
+ 'driver': 'i40',
+ 'vpci': '0000:00:10.1',
+ },
+ 'vnfd-connection-point-ref': 'xe1',
+ },
+ ],
+ 'id': 'trexgen-baremetal',
+ 'name': 'trexgen-baremetal',
+ },
+ ],
+ },
+ ],
+ },
+}
+
+IP_ADDR_SHOW = """
+28: eth1: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP \
+group default qlen 1000
+ link/ether 90:e2:ba:a7:6a:c8 brd ff:ff:ff:ff:ff:ff
+ inet 1.1.1.1/8 brd 1.255.255.255 scope global eth1
+ inet6 fe80::92e2:baff:fea7:6ac8/64 scope link
+ valid_lft forever preferred_lft forever
+29: eth5: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc mq state UP \
+group default qlen 1000
+ link/ether 90:e2:ba:a7:6a:c9 brd ff:ff:ff:ff:ff:ff
+ inet 2.1.1.1/8 brd 2.255.255.255 scope global eth5
+ inet6 fe80::92e2:baff:fea7:6ac9/64 scope link tentative
+ valid_lft forever preferred_lft forever
+"""
+
+SYS_CLASS_NET = """
+lrwxrwxrwx 1 root root 0 sie 10 14:16 eth1 -> \
+../../devices/pci0000:80/0000:80:02.2/0000:84:00.1/net/eth1
+lrwxrwxrwx 1 root root 0 sie 3 10:37 eth2 -> \
+../../devices/pci0000:00/0000:00:01.1/0000:84:00.2/net/eth5
+"""
+
+TRAFFIC_PROFILE = {
+ "schema": "isb:traffic_profile:0.1",
+ "name": "fixed",
+ "description": "Fixed traffic profile to run UDP traffic",
+ "traffic_profile": {
+ "traffic_type": "FixedTraffic",
+ "frame_rate": 100, # pps
+ "flow_number": 10,
+ "frame_size": 64,
+ },
+}
+
+
+class TestNetworkServiceTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.tg__0 = {
+ 'name': 'trafficgen_1.yardstick',
+ 'ip': '10.10.10.11',
+ 'role': 'TrafficGen',
+ 'user': 'root',
+ 'password': 'r00t',
+ 'interfaces': {
+ 'xe0': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.100.20',
+ 'local_mac': '00:00:00:00:00:01',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.0',
+ 'dpdk_port_num': 0,
+ },
+ 'xe1': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.40.20',
+ 'local_mac': '00:00:00:00:00:02',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.1',
+ 'dpdk_port_num': 1,
+ },
+ },
+ }
+
+ self.vnf__0 = {
+ 'name': 'vnf.yardstick',
+ 'ip': '10.10.10.12',
+ 'host': '10.223.197.164',
+ 'role': 'vnf',
+ 'user': 'root',
+ 'password': 'r00t',
+ 'interfaces': {
+ 'xe0': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.100.19',
+ 'local_mac': '00:00:00:00:00:03',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.0',
+ 'dpdk_port_num': 0,
+ },
+ 'xe1': {
+ 'netmask': '255.255.255.0',
+ 'local_ip': '152.16.40.19',
+ 'local_mac': '00:00:00:00:00:04',
+ 'driver': 'i40e',
+ 'vpci': '0000:07:00.1',
+ 'dpdk_port_num': 1,
+ },
+ },
+ 'routing_table': [
+ {
+ 'netmask': '255.255.255.0',
+ 'gateway': '152.16.100.20',
+ 'network': '152.16.100.20',
+ 'if': 'xe0',
+ },
+ {
+ 'netmask': '255.255.255.0',
+ 'gateway': '152.16.40.20',
+ 'network': '152.16.40.20',
+ 'if': 'xe1',
+ },
+ ],
+ 'nd_route_tbl': [
+ {
+ 'netmask': '112',
+ 'gateway': '0064:ff9b:0:0:0:0:9810:6414',
+ 'network': '0064:ff9b:0:0:0:0:9810:6414',
+ 'if': 'xe0',
+ },
+ {
+ 'netmask': '112',
+ 'gateway': '0064:ff9b:0:0:0:0:9810:2814',
+ 'network': '0064:ff9b:0:0:0:0:9810:2814',
+ 'if': 'xe1',
+ },
+ ],
+ }
+
+ self.context_cfg = {
+ 'nodes': {
+ 'tg__0': self.tg__0,
+ 'vnf__0': self.vnf__0,
+ },
+ 'networks': {
+ GenericVNF.UPLINK: {
+ 'vld_id': GenericVNF.UPLINK,
+ },
+ GenericVNF.DOWNLINK: {
+ 'vld_id': GenericVNF.DOWNLINK,
+ },
+ },
+ }
+
+ self.vld0 = {
+ 'vnfd-connection-point-ref': [
+ {
+ 'vnfd-connection-point-ref': 'xe0',
+ 'member-vnf-index-ref': '1',
+ 'vnfd-id-ref': 'trexgen'
+ },
+ {
+ 'vnfd-connection-point-ref': 'xe0',
+ 'member-vnf-index-ref': '2',
+ 'vnfd-id-ref': 'trexgen'
+ }
+ ],
+ 'type': 'ELAN',
+ 'id': GenericVNF.UPLINK,
+ 'name': 'tg__0 to vnf__0 link 1'
+ }
+
+ self.vld1 = {
+ 'vnfd-connection-point-ref': [
+ {
+ 'vnfd-connection-point-ref': 'xe1',
+ 'member-vnf-index-ref': '1',
+ 'vnfd-id-ref': 'trexgen'
+ },
+ {
+ 'vnfd-connection-point-ref': 'xe1',
+ 'member-vnf-index-ref': '2',
+ 'vnfd-id-ref': 'trexgen'
+ }
+ ],
+ 'type': 'ELAN',
+ 'id': GenericVNF.DOWNLINK,
+ 'name': 'vnf__0 to tg__0 link 2'
+ }
+
+ self.topology = {
+ 'id': 'trex-tg-topology',
+ 'short-name': 'trex-tg-topology',
+ 'name': 'trex-tg-topology',
+ 'description': 'trex-tg-topology',
+ 'constituent-vnfd': [
+ {
+ 'member-vnf-index': '1',
+ 'VNF model': 'tg_trex_tpl.yaml',
+ 'vnfd-id-ref': 'tg__0',
+ },
+ {
+ 'member-vnf-index': '2',
+ 'VNF model': 'tg_trex_tpl.yaml',
+ 'vnfd-id-ref': 'vnf__0',
+ },
+ ],
+ 'vld': [self.vld0, self.vld1],
+ }
+
+ self.scenario_cfg = {
+ 'task_path': "",
+ "topology": self._get_file_abspath("vpe_vnf_topology.yaml"),
+ 'task_id': 'a70bdf4a-8e67-47a3-9dc1-273c14506eb7',
+ 'tc': 'tc_ipv4_1Mflow_64B_packetsize',
+ 'traffic_profile': 'ipv4_throughput_vpe.yaml',
+ 'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
+ 'type': 'ISB',
+ 'tc_options': {
+ 'rfc2544': {
+ 'allowed_drop_rate': '0.8 - 1',
+ },
+ },
+ 'options': {
+ 'simulated_users': {'uplink': [1, 2]},
+ 'page_object': {'uplink': [1, 2]},
+ 'framesize': {'64B': 100}
+ },
+ 'runner': {
+ 'object': 'NetworkServiceTestCase',
+ 'interval': 35,
+ 'output_filename': 'yardstick.out',
+ 'runner_id': 74476,
+ 'duration': 400,
+ 'type': 'Duration',
+ },
+ 'traffic_options': {
+ 'flow': 'ipv4_1flow_Packets_vpe.yaml',
+ 'imix': 'imix_voice.yaml'
+ },
+ 'nodes': {
+ 'tg__2': 'trafficgen_2.yardstick',
+ 'tg__0': 'trafficgen_1.yardstick',
+ 'vnf__0': 'vnf.yardstick',
+ },
+ }
+
+ self.s = vnf_generic.NetworkServiceTestCase(self.scenario_cfg,
+ self.context_cfg)
+
+ def _get_file_abspath(self, filename):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ file_path = os.path.join(curr_path, filename)
+ return file_path
+
+ def test___init__(self):
+ self.assertIsNotNone(self.topology)
+
+ def test__get_ip_flow_range_string(self):
+ result = '152.16.100.2-152.16.100.254'
+ self.assertEqual(result, self.s._get_ip_flow_range(
+ '152.16.100.2-152.16.100.254'))
+
+ def test__get_ip_flow_range_no_nodes(self):
+ self.assertEqual('0.0.0.0', self.s._get_ip_flow_range({}))
+
+ def test__get_ip_flow_range_no_node_data(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {}
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('0.0.0.2-0.0.0.254', result)
+
+ def test__et_ip_flow_range_ipv4(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '192.168.1.15',
+ 'netmask': '255.255.255.128'}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('192.168.1.2-192.168.1.126', result)
+
+ def test__get_ip_flow_range_ipv4_mask_30(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '192.168.1.15', 'netmask': 30}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('192.168.1.15', result)
+
+ def test__get_ip_flow_range_ipv6(self):
+ node_data = {'tg__0': 'xe0'}
+ self.s.context_cfg['nodes']['tg__0'] = {
+ 'interfaces': {
+ 'xe0': {'local_ip': '2001::11', 'netmask': 64}
+ }
+ }
+ result = self.s._get_ip_flow_range(node_data)
+ self.assertEqual('2001::2-2001::ffff:ffff:ffff:fffe', result)
+
+ def test___get_traffic_flow(self):
+ self.scenario_cfg["traffic_options"]["flow"] = \
+ self._get_file_abspath("ipv4_1flow_Packets_vpe.yaml")
+ self.scenario_cfg['options'] = {
+ 'flow': {
+ 'src_ip': [
+ {
+ 'tg__0': 'xe0',
+ },
+ ],
+ 'dst_ip': [
+ {
+ 'tg__0': 'xe1',
+ },
+ ],
+ 'public_ip': ['1.1.1.1'],
+ },
+ }
+ expected_flow = {'flow': {'dst_ip_0': '152.16.40.2-152.16.40.254',
+ 'public_ip_0': '1.1.1.1',
+ 'src_ip_0': '152.16.100.2-152.16.100.254'}}
+ self.assertEqual(expected_flow, self.s._get_traffic_flow())
+
+ def test___get_traffic_flow_error(self):
+ self.scenario_cfg["traffic_options"]["flow"] = \
+ "ipv4_1flow_Packets_vpe.yaml1"
+ self.assertEqual({'flow': {}}, self.s._get_traffic_flow())
+
+ def test_get_vnf_imp(self):
+ vnfd = COMPLETE_TREX_VNFD['vnfd:vnfd-catalog']['vnfd'][0]['class-name']
+ with mock.patch.dict(sys.modules, tests.STL_MOCKS):
+ self.assertIsNotNone(self.s.get_vnf_impl(vnfd))
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s.get_vnf_impl('NonExistentClass')
+
+ exc_str = str(raised.exception)
+ print(exc_str)
+ self.assertIn('No implementation', exc_str)
+ self.assertIn('found in', exc_str)
+
+ def test_load_vnf_models_invalid(self):
+ self.context_cfg["nodes"]['tg__0']['VNF model'] = \
+ self._get_file_abspath("tg_trex_tpl.yaml")
+ self.context_cfg["nodes"]['vnf__0']['VNF model'] = \
+ self._get_file_abspath("tg_trex_tpl.yaml")
+
+ vnf = mock.Mock(autospec=GenericVNF)
+ self.s.get_vnf_impl = mock.Mock(return_value=vnf)
+
+ self.assertIsNotNone(
+ self.s.load_vnf_models(self.scenario_cfg, self.context_cfg))
+
+ def test_load_vnf_models_no_model(self):
+ vnf = mock.Mock(autospec=GenericVNF)
+ self.s.get_vnf_impl = mock.Mock(return_value=vnf)
+
+ self.assertIsNotNone(
+ self.s.load_vnf_models(self.scenario_cfg, self.context_cfg))
+
+ def test_map_topology_to_infrastructure(self):
+ self.s.map_topology_to_infrastructure()
+
+ nodes = self.context_cfg["nodes"]
+ self.assertEqual('../../vnf_descriptors/tg_rfc2544_tpl.yaml',
+ nodes['tg__0']['VNF model'])
+ self.assertEqual('../../vnf_descriptors/vpe_vnf.yaml',
+ nodes['vnf__0']['VNF model'])
+
+ def test_map_topology_to_infrastructure_insufficient_nodes(self):
+ cfg = deepcopy(self.context_cfg)
+ del cfg['nodes']['vnf__0']
+
+ cfg_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+ with cfg_patch:
+ with self.assertRaises(exceptions.IncorrectConfig):
+ self.s.map_topology_to_infrastructure()
+
+ def test_map_topology_to_infrastructure_config_invalid(self):
+ ssh_mock = mock.Mock()
+ ssh_mock.execute.return_value = 0, SYS_CLASS_NET + IP_ADDR_SHOW, ""
+
+ cfg = deepcopy(self.s.context_cfg)
+
+ # delete all, we don't know which will come first
+ del cfg['nodes']['vnf__0']['interfaces']['xe0']['local_mac']
+ del cfg['nodes']['vnf__0']['interfaces']['xe1']['local_mac']
+ del cfg['nodes']['tg__0']['interfaces']['xe0']['local_mac']
+ del cfg['nodes']['tg__0']['interfaces']['xe1']['local_mac']
+
+ config_patch = mock.patch.object(self.s, 'context_cfg', cfg)
+ with config_patch:
+ with self.assertRaises(exceptions.IncorrectConfig):
+ self.s.map_topology_to_infrastructure()
+
+ def test__resolve_topology_invalid_config(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ # purge an important key from the data structure
+ for interface in self.tg__0['interfaces'].values():
+ del interface['local_mac']
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s._resolve_topology()
+
+ self.assertIn('not found', str(raised.exception))
+
+ # restore local_mac
+ for index, interface in enumerate(self.tg__0['interfaces'].values()):
+ interface['local_mac'] = '00:00:00:00:00:{:2x}'.format(index)
+
+ # make a connection point ref with 3 points
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'].append(
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'][0])
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s._resolve_topology()
+
+ self.assertIn('wrong endpoint count', str(raised.exception))
+
+ # make a connection point ref with 1 point
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'] = \
+ self.s.topology["vld"][0]['vnfd-connection-point-ref'][:1]
+
+ with self.assertRaises(exceptions.IncorrectConfig) as raised:
+ self.s._resolve_topology()
+
+ self.assertIn('wrong endpoint count', str(raised.exception))
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ result = {}
+ self.s.run(result)
+ self.assertDictEqual(result, {tgen.name: verified_dict})
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s._fill_traffic_profile = \
+ mock.Mock(return_value=TRAFFIC_PROFILE)
+ self.assertIsNone(self.s.setup())
+
+ def test_setup_exception(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.instantiate.side_effect = RuntimeError(
+ "error during instantiate")
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s._fill_traffic_profile = \
+ mock.Mock(return_value=TRAFFIC_PROFILE)
+ with self.assertRaises(RuntimeError):
+ self.s.setup()
+
+ def test__get_traffic_profile(self):
+ self.scenario_cfg["traffic_profile"] = \
+ self._get_file_abspath("ipv4_throughput_vpe.yaml")
+ self.assertIsNotNone(self.s._get_traffic_profile())
+
+ def test__get_traffic_profile_exception(self):
+ with mock.patch.dict(self.scenario_cfg, {'traffic_profile': ''}):
+ with self.assertRaises(IOError):
+ self.s._get_traffic_profile()
+
+ def test__key_list_to_dict(self):
+ result = self.s._key_list_to_dict("uplink", {"uplink": [1, 2]})
+ self.assertEqual({"uplink_0": 1, "uplink_1": 2}, result)
+
+ def test__get_simulated_users(self):
+ result = self.s._get_simulated_users()
+ self.assertEqual({'simulated_users': {'uplink_0': 1, 'uplink_1': 2}},
+ result)
+
+ def test__get_page_object(self):
+ result = self.s._get_page_object()
+ self.assertEqual({'page_object': {'uplink_0': 1, 'uplink_1': 2}},
+ result)
+
+ def test___get_traffic_imix_exception(self):
+ with mock.patch.dict(self.scenario_cfg["traffic_options"], {'imix': ''}):
+ self.assertEqual({'imix': {'64B': 100}},
+ self.s._get_traffic_imix())
+
+ def test__get_ip_priority(self):
+ with mock.patch.dict(self.scenario_cfg["options"],
+ {'priority': {'raw': '0x01'}}):
+ self.assertEqual({'raw': '0x01'}, self.s._get_ip_priority())
+
+ def test__get_ip_priority_exception(self):
+ self.assertEqual({}, self.s._get_ip_priority())
+
+ @mock.patch.object(base.TrafficProfile, 'get')
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__fill_traffic_profile(self, mock_generate, mock_tprofile_get):
+ fake_tprofile = mock.Mock()
+ fake_vnfd = mock.MagicMock()
+ with mock.patch.object(self.s, '_get_traffic_profile',
+ return_value=fake_tprofile) as mock_get_tp:
+ mock_generate.return_value = fake_vnfd
+ self.s._fill_traffic_profile()
+ mock_get_tp.assert_called_once()
+ mock_generate.assert_called_once_with(
+ fake_tprofile,
+ {'downlink': {},
+ 'extra_args': {'arg1': 'value1', 'arg2': 'value2'},
+ 'flow': {'flow': {}},
+ 'imix': {'imix': {'64B': 100}},
+ 'priority': {},
+ 'uplink': {},
+ 'duration': 30,
+ 'simulated_users': {
+ 'simulated_users': {'uplink_0': 1, 'uplink_1': 2}},
+ 'page_object': {
+ 'page_object': {'uplink_0': 1, 'uplink_1': 2}},}
+ )
+ mock_tprofile_get.assert_called_once_with(fake_vnfd)
+
+ @mock.patch.object(base.TrafficProfile, 'get')
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__fill_traffic_profile2(self, mock_generate, mock_tprofile_get):
+ fake_tprofile = mock.Mock()
+ fake_vnfd = {}
+ with mock.patch.object(self.s, '_get_traffic_profile',
+ return_value=fake_tprofile) as mock_get_tp:
+ mock_generate.return_value = fake_vnfd
+
+ self.s.scenario_cfg["options"] = {"traffic_config": {"duration": 99899}}
+ self.s._fill_traffic_profile()
+ mock_get_tp.assert_called_once()
+ self.assertIn("traffic_profile", fake_vnfd)
+ self.assertIn("duration", fake_vnfd["traffic_profile"])
+ self.assertEqual(99899, fake_vnfd["traffic_profile"]["duration"])
+
+ @mock.patch.object(utils, 'open_relative_file')
+ def test__get_topology(self, mock_open_path):
+ self.s.scenario_cfg['topology'] = 'fake_topology'
+ self.s.scenario_cfg['task_path'] = 'fake_path'
+ mock_open_path.side_effect = mock.mock_open(read_data='fake_data')
+ self.assertEqual('fake_data', self.s._get_topology())
+ mock_open_path.assert_called_once_with('fake_topology', 'fake_path')
+
+ @mock.patch.object(vnfdgen, 'generate_vnfd')
+ def test__render_topology(self, mock_generate):
+ fake_topology = 'fake_topology'
+ mock_generate.return_value = {'nsd:nsd-catalog': {'nsd': ['fake_nsd']}}
+ with mock.patch.object(self.s, '_get_topology',
+ return_value=fake_topology) as mock_get_topology:
+ self.s._render_topology()
+ mock_get_topology.assert_called_once()
+
+ mock_generate.assert_called_once_with(
+ fake_topology,
+ {'extra_args': {'arg1': 'value1', 'arg2': 'value2'}}
+ )
+ self.assertEqual(self.s.topology, 'fake_nsd')
+
+ def test_teardown(self):
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.terminate = mock.Mock(return_value=True)
+ vnf.name = str(vnf)
+ self.s.vnfs = [vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.stop = \
+ mock.Mock(return_value=True)
+ self.assertIsNone(self.s.teardown())
+
+ def test_teardown_exception(self):
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.terminate = mock.Mock(
+ side_effect=RuntimeError("error duing terminate"))
+ vnf.name = str(vnf)
+ self.s.vnfs = [vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.stop = \
+ mock.Mock(return_value=True)
+ with self.assertRaises(RuntimeError):
+ self.s.teardown()
+
+
+class TestNetworkServiceRFC2544TestCase(TestNetworkServiceTestCase):
+
+ def setUp(self):
+ super(TestNetworkServiceRFC2544TestCase, self).setUp()
+ self.s = vnf_generic.NetworkServiceRFC2544(self.scenario_cfg,
+ self.context_cfg)
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ tgen.wait_on_trafic.return_value = 'COMPLETE'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s._fill_traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = mock.Mock(
+ return_value={tgen.name: verified_dict})
+ result = mock.Mock()
+ self.s.run(result)
+ self.s._fill_traffic_profile.assert_called_once()
+ result.push.assert_called_once()
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ tgen.run_traffic.return_value = 'tg_id'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s.setup()
+
+ def test_setup_exception(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.instantiate.side_effect = RuntimeError(
+ "error during instantiate")
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s._fill_traffic_profile = \
+ mock.Mock(return_value=TRAFFIC_PROFILE)
+ with self.assertRaises(RuntimeError):
+ self.s.setup()
+
+class TestNetworkServiceRFC3511TestCase(TestNetworkServiceTestCase):
+
+ def setUp(self):
+ super(TestNetworkServiceRFC3511TestCase, self).setUp()
+ self.s = vnf_generic.NetworkServiceRFC3511(self.scenario_cfg,
+ self.context_cfg)
+
+ def test_run(self):
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.name = "tgen__1"
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s._fill_traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = mock.Mock()
+ result = mock.Mock()
+ self.s.run(result)
+ self.s._fill_traffic_profile.assert_called_once()
+ result.push.assert_called_once()
+
+ def test_setup(self):
+ with mock.patch("yardstick.ssh.SSH") as ssh:
+ ssh_mock = mock.Mock(autospec=ssh.SSH)
+ ssh_mock.execute = \
+ mock.Mock(return_value=(0, SYS_CLASS_NET + IP_ADDR_SHOW, ""))
+ ssh.from_node.return_value = ssh_mock
+
+ tgen = mock.Mock(autospec=GenericTrafficGen)
+ tgen.traffic_finished = True
+ verified_dict = {"verified": True}
+ tgen.verify_traffic = lambda x: verified_dict
+ tgen.terminate = mock.Mock(return_value=True)
+ tgen.name = "tgen__1"
+ tgen.run_traffic.return_value = 'tg_id'
+ vnf = mock.Mock(autospec=GenericVNF)
+ vnf.runs_traffic = False
+ vnf.terminate = mock.Mock(return_value=True)
+ self.s.vnfs = [tgen, vnf]
+ self.s.traffic_profile = mock.Mock()
+ self.s.collector = mock.Mock(autospec=Collector)
+ self.s.collector.get_kpi = \
+ mock.Mock(return_value={tgen.name: verified_dict})
+ self.s.map_topology_to_infrastructure = mock.Mock(return_value=0)
+ self.s.load_vnf_models = mock.Mock(return_value=self.s.vnfs)
+ self.s.setup()
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
new file mode 100644
index 000000000..a1c27f5fb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf.py
@@ -0,0 +1,196 @@
+# Copyright 2016 Intel Corporation.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import unittest
+import subprocess
+import yardstick.ssh as ssh
+
+from yardstick.benchmark.scenarios.networking import vsperf
+from yardstick import exceptions as y_exc
+
+
+class VsperfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.context_cfg = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "ubuntu",
+ "password": "ubuntu",
+ },
+ }
+ self.scenario_cfg = {
+ 'options': {
+ 'testname': 'p2p_rfc2544_continuous',
+ 'traffic_type': 'continuous',
+ 'frame_size': '64',
+ 'bidirectional': 'True',
+ 'iload': 100,
+ 'trafficgen_port1': 'eth1',
+ 'trafficgen_port2': 'eth3',
+ 'external_bridge': 'br-ex',
+ 'conf_file': 'vsperf-yardstick.conf',
+ 'setup_script': 'setup_yardstick.sh',
+ 'test_params': 'TRAFFICGEN_DURATION=30;',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_fps',
+ 'throughput_rx_fps': 500000,
+ 'action': 'monitor',
+ }
+ }
+
+ self._mock_SSH = mock.patch.object(ssh, 'SSH')
+ self.mock_SSH = self._mock_SSH.start()
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+
+ self._mock_subprocess_call = mock.patch.object(subprocess, 'call')
+ self.mock_subprocess_call = self._mock_subprocess_call.start()
+ self.mock_subprocess_call.return_value = None
+
+ self.addCleanup(self._stop_mock)
+
+ self.scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+
+ def _stop_mock(self):
+ self._mock_SSH.stop()
+ self._mock_subprocess_call.stop()
+
+ def test_setup(self):
+ self.scenario.setup()
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_setup_tg_port_not_set(self):
+ del self.scenario_cfg['options']['trafficgen_port1']
+ del self.scenario_cfg['options']['trafficgen_port2']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ self.mock_subprocess_call.assert_called_once_with(
+ 'setup_yardstick.sh setup', shell=True)
+ self.assertIsNone(scenario.tg_port1)
+ self.assertIsNone(scenario.tg_port2)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_setup_no_setup_script(self):
+ del self.scenario_cfg['options']['setup_script']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ self.mock_subprocess_call.assert_has_calls(
+ (mock.call('sudo bash -c "ovs-vsctl add-port br-ex eth1"',
+ shell=True),
+ mock.call('sudo bash -c "ovs-vsctl add-port br-ex eth3"',
+ shell=True)))
+ self.assertEqual(2, self.mock_subprocess_call.call_count)
+ self.assertIsNone(scenario.setup_script)
+ self.assertIsNotNone(scenario.client)
+ self.assertTrue(scenario.setup_done)
+
+ def test_run_ok(self):
+ self.scenario.setup()
+
+ result = {}
+ self.scenario.run(result)
+
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
+
+ def test_run_ok_setup_not_done(self):
+ result = {}
+ self.scenario.run(result)
+
+ self.assertTrue(self.scenario.setup_done)
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
+
+ def test_run_ssh_command_call_counts(self):
+ self.scenario.run({})
+
+ self.assertEqual(self.mock_SSH.from_node().execute.call_count, 2)
+ self.mock_SSH.from_node().run.assert_called_once()
+
+ def test_run_sla_fail(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertTrue('VSPERF_throughput_rx_fps(123456.000000) < '
+ 'SLA_throughput_rx_fps(500000.000000)'
+ in str(raised.exception))
+
+ def test_run_sla_fail_metric_not_collected(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'nonexisting_metric\r\n14797660.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertTrue('throughput_rx_fps was not collected by VSPERF'
+ in str(raised.exception))
+
+ def test_run_faulty_result_csv(self):
+ self.mock_SSH.from_node().execute.return_value = (
+ 0, 'faulty output not csv', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertTrue('throughput_rx_fps was not collected by VSPERF'
+ in str(raised.exception))
+
+ def test_run_sla_fail_metric_not_defined_in_sla(self):
+ del self.scenario_cfg['sla']['throughput_rx_fps']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.setup()
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ scenario.run({})
+ self.assertTrue('throughput_rx_fps is not defined in SLA'
+ in str(raised.exception))
+
+ def test_teardown(self):
+ self.scenario.setup()
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ self.scenario.teardown()
+ self.assertFalse(self.scenario.setup_done)
+
+ def test_teardown_tg_port_not_set(self):
+ del self.scenario_cfg['options']['trafficgen_port1']
+ del self.scenario_cfg['options']['trafficgen_port2']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.teardown()
+
+ self.mock_subprocess_call.assert_called_once_with(
+ 'setup_yardstick.sh teardown', shell=True)
+ self.assertFalse(scenario.setup_done)
+
+ def test_teardown_no_setup_script(self):
+ del self.scenario_cfg['options']['setup_script']
+ scenario = vsperf.Vsperf(self.scenario_cfg, self.context_cfg)
+ scenario.teardown()
+
+ self.mock_subprocess_call.assert_has_calls(
+ (mock.call('sudo bash -c "ovs-vsctl del-port br-ex eth1"',
+ shell=True),
+ mock.call('sudo bash -c "ovs-vsctl del-port br-ex eth3"',
+ shell=True)))
+ self.assertEqual(2, self.mock_subprocess_call.call_count)
+ self.assertFalse(scenario.setup_done)
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
new file mode 100644
index 000000000..8bbe6911e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/test_vsperf_dpdk.py
@@ -0,0 +1,181 @@
+# Copyright 2017 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import subprocess
+import time
+
+import mock
+import unittest
+
+from yardstick import exceptions as y_exc
+from yardstick.benchmark.scenarios.networking import vsperf_dpdk
+from yardstick.common import exceptions as y_exc
+from yardstick import ssh
+
+
+class VsperfDPDKTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "ubuntu",
+ "password": "ubuntu",
+ },
+ }
+ self.args = {
+ 'task_id': "1234-5678",
+ 'options': {
+ 'testname': 'pvp_tput',
+ 'traffic_type': 'rfc2544_throughput',
+ 'frame_size': '64',
+ 'test_params': 'TRAFFICGEN_DURATION=30;',
+ 'trafficgen_port1': 'ens4',
+ 'trafficgen_port2': 'ens5',
+ 'conf_file': 'vsperf-yardstick.conf',
+ 'setup_script': 'setup_yardstick.sh',
+ 'moongen_helper_file': '~/moongen.py',
+ 'moongen_host_ip': '10.5.201.151',
+ 'moongen_port1_mac': '8c:dc:d4:ae:7c:5c',
+ 'moongen_port2_mac': '8c:dc:d4:ae:7c:5d',
+ 'trafficgen_port1_nw': 'test2',
+ 'trafficgen_port2_nw': 'test3',
+ },
+ 'sla': {
+ 'metrics': 'throughput_rx_fps',
+ 'throughput_rx_fps': 500000,
+ 'action': 'monitor',
+ }
+ }
+ self._mock_ssh = mock.patch.object(ssh, 'SSH')
+ self.mock_ssh = self._mock_ssh.start()
+ self._mock_subprocess_call = mock.patch.object(subprocess, 'call')
+ self.mock_subprocess_call = self._mock_subprocess_call.start()
+ mock_call_obj = mock.Mock()
+ mock_call_obj.execute.return_value = None
+ self.mock_subprocess_call.return_value = mock_call_obj
+
+ self._mock_log_info = mock.patch.object(vsperf_dpdk.LOG, 'info')
+ self.mock_log_info = self._mock_log_info.start()
+
+ self.addCleanup(self._cleanup)
+
+ self.scenario = vsperf_dpdk.VsperfDPDK(self.args, self.ctx)
+ self.scenario.setup()
+
+ def _cleanup(self):
+ self._mock_ssh.stop()
+ self._mock_subprocess_call.stop()
+ self._mock_log_info.stop()
+
+ def test_setup(self):
+ self.assertIsNotNone(self.scenario.client)
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_teardown(self):
+ self.scenario.teardown()
+ self.assertFalse(self.scenario.setup_done)
+
+ def test_is_dpdk_setup_no(self):
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (0, 'dummy', '')
+
+ self.assertFalse(self.scenario._is_dpdk_setup())
+
+ def test_is_dpdk_setup_yes(self):
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
+
+ self.assertTrue(self.scenario._is_dpdk_setup())
+
+ @mock.patch.object(time, 'sleep')
+ def test_dpdk_setup_first(self, *args):
+ # is_dpdk_setup() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (0, 'dummy', '')
+
+ self.scenario.dpdk_setup()
+ self.assertFalse(self.scenario._is_dpdk_setup())
+ self.assertTrue(self.scenario.dpdk_setup_done)
+
+ @mock.patch.object(time, 'sleep')
+ def test_dpdk_setup_next(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (0, '', '')
+
+ self.scenario.dpdk_setup()
+ self.assertTrue(self.scenario._is_dpdk_setup())
+ self.assertTrue(self.scenario.dpdk_setup_done)
+
+ @mock.patch.object(subprocess, 'check_output')
+ def test_run_ok(self, *args):
+ # run() specific mocks
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+
+ result = {}
+ self.scenario.run(result)
+ self.assertEqual(result['throughput_rx_fps'], '14797660.000')
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('VSPERF_throughput_rx_fps(123456.000000) < '
+ 'SLA_throughput_rx_fps(500000.000000)',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_metric_not_collected(self, *args):
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'nonexisting_metric\r\n123456.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps was not collected by VSPERF',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_metric_not_collected_faulty_csv(self, *args):
+ self.scenario.setup()
+
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'faulty output not csv', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps was not collected by VSPERF',
+ str(raised.exception))
+
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(subprocess, 'check_output')
+ def test_vsperf_run_sla_fail_sla_not_defined(self, *args):
+ del self.scenario.scenario_cfg['sla']['throughput_rx_fps']
+ self.scenario.setup()
+
+ self.mock_ssh.from_node().execute.return_value = (
+ 0, 'throughput_rx_fps\r\n14797660.000\r\n', '')
+
+ with self.assertRaises(y_exc.SLAValidationError) as raised:
+ self.scenario.run({})
+
+ self.assertIn('throughput_rx_fps is not defined in SLA',
+ str(raised.exception))
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml
new file mode 100644
index 000000000..b1641836b
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/tg_trex_tpl.yaml
@@ -0,0 +1,75 @@
+# Copyright (c) 2016-2017 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+vnfd:vnfd-catalog:
+ vnfd:
+ - id: TrexTrafficGen # NSPerf class mapping
+ name: trexgen
+ short-name: trexgen
+ description: TRex stateless traffic generator for tests
+ vm-flavor:
+ vcpu-count: '4'
+ memory-mb: '4096'
+ mgmt-interface:
+ vdu-id: trexgen-baremetal
+ user: '{{user}}' # Value filled by vnfdgen
+ password: '{{password}}' # Value filled by vnfdgen
+ ip: '{{ip}}' # Value filled by vnfdgen
+ connection-point:
+ - name: xe0
+ type: VPORT
+ - name: xe1
+ type: VPORT
+ vdu:
+ - id: trexgen-baremetal
+ name: trexgen-baremetal
+ description: TRex stateless traffic generator for tests
+ external-interface:
+ - name: xe0
+ virtual-interface:
+ type: PCI-PASSTHROUGH
+ # Substitution variables MUST be quoted. Otherwise Python can misinterpet them.
+ vpci: '{{ interfaces.xe0.vpci }}' # Value filled by vnfdgen
+ local_ip: '{{ interfaces.xe0.local_ip }}' # Value filled by vnfdgen
+ driver: '{{ interfaces.xe0.driver}}' # Value filled by vnfdgen
+ dst_ip: '{{ interfaces.xe0.dst_ip }}' # Value filled by vnfdgen
+ local_mac: '{{ interfaces.xe0.local_mac }}' # Value filled by vnfdgen
+ dst_mac: '{{ interfaces.xe0.dst_mac }}' # Value filled by vnfdgen
+ vld_id: '{{ interfaces.xe0.vld_id }}' # Value filled by vnfdgen
+ bandwidth: 10 Gbps
+ vnfd-connection-point-ref: xe0
+ - name: xe1
+ virtual-interface:
+ type: PCI-PASSTHROUGH
+ vpci: '{{ interfaces.xe1.vpci }}' # Value filled by vnfdgen
+ local_ip: '{{ interfaces.xe1.local_ip }}' # Value filled by vnfdgen
+ driver: '{{ interfaces.xe1.driver}}' # Value filled by vnfdgen
+ dst_ip: '{{ interfaces.xe1.dst_ip }}' # Value filled by vnfdgen
+ local_mac: '{{ interfaces.xe1.local_mac }}' # Value filled by vnfdgen
+ dst_mac: '{{ interfaces.xe1.dst_mac }}' # Value filled by vnfdgen
+ vld_id: '{{ interfaces.xe1.vld_id }}' # Value filled by vnfdgen
+ bandwidth: 10 Gbps
+ vnfd-connection-point-ref: xe1
+
+ benchmark:
+ kpi:
+ - rx_throughput_fps
+ - tx_throughput_fps
+ - tx_throughput_mbps
+ - rx_throughput_mbps
+ - tx_throughput_pc_linerate
+ - rx_throughput_pc_linerate
+ - min_latency
+ - max_latency
+ - avg_latency
diff --git a/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml b/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml
new file mode 100644
index 000000000..aaf84bb5e
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/networking/vpe_vnf_topology.yaml
@@ -0,0 +1,50 @@
+# Copyright (c) 2016-2019 Intel Corporation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+nsd:nsd-catalog:
+ nsd:
+ - id: VPE
+ name: VPE
+ short-name: VPE
+ description: scenario with VPE,L3fwd and VNF
+ constituent-vnfd:
+ - member-vnf-index: '1'
+ vnfd-id-ref: tg__0
+ VNF model: ../../vnf_descriptors/tg_rfc2544_tpl.yaml #tg_trex_tpl.yaml #TREX
+ - member-vnf-index: '2'
+ vnfd-id-ref: vnf__0
+ VNF model: ../../vnf_descriptors/vpe_vnf.yaml #VPE VNF
+
+ vld:
+ - id: uplink
+ name: tg__0 to vnf__0 link 1
+ type: ELAN
+ vnfd-connection-point-ref:
+ - member-vnf-index-ref: '1'
+ vnfd-connection-point-ref: xe0
+ vnfd-id-ref: tg__0
+ - member-vnf-index-ref: '2'
+ vnfd-connection-point-ref: xe0
+ vnfd-id-ref: vnf__0
+
+ - id: downlink
+ name: vnf__0 to tg__0 link 2
+ type: ELAN
+ vnfd-connection-point-ref:
+ - member-vnf-index-ref: '2'
+ vnfd-connection-point-ref: xe1
+ vnfd-id-ref: vnf__0
+ - member-vnf-index-ref: '1'
+ vnfd-connection-point-ref: xe1
+ vnfd-id-ref: tg__0
diff --git a/yardstick/tests/unit/benchmark/scenarios/parser/__init__.py b/yardstick/tests/unit/benchmark/scenarios/parser/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/parser/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py b/yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py
new file mode 100644
index 000000000..9fd5cce38
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/parser/test_parser.py
@@ -0,0 +1,70 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import subprocess
+
+import unittest
+import mock
+
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.parser import parser
+
+
+class ParserTestCase(unittest.TestCase):
+
+ def setUp(self):
+ args = {
+ 'options': {'yangfile': '/root/yardstick/samples/yang.yaml',
+ 'toscafile': '/root/yardstick/samples/tosca.yaml'},
+ }
+ self.scenario = parser.Parser(scenario_cfg=args, context_cfg={})
+
+ self._mock_popen = mock.patch.object(subprocess, 'Popen')
+ self.mock_popen = self._mock_popen.start()
+ self._mock_call = mock.patch.object(subprocess, 'call')
+ self.mock_call = self._mock_call.start()
+
+ self.addCleanup(self._stop_mock)
+
+ def _stop_mock(self):
+ self._mock_popen.stop()
+ self._mock_call.stop()
+
+ def test_setup_successful(self):
+
+ self.mock_call.return_value = 0
+ self.scenario.setup()
+ self.assertTrue(self.scenario.setup_done)
+
+ def test_run_successful(self):
+
+ result = {}
+
+ self.mock_popen().returncode = 0
+
+ expected_result = jsonutils.loads('{"yangtotosca": "success"}')
+
+ self.scenario.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_run_fail(self):
+ result = {}
+
+ self.mock_popen().returncode = 1
+ expected_result = jsonutils.loads('{"yangtotosca": "fail"}')
+
+ self.scenario.run(result)
+ self.assertEqual(result, expected_result)
+
+ def test_teardown_successful(self):
+
+ self.mock_call.return_value = 0
+ self.scenario.teardown()
+ self.assertTrue(self.scenario.teardown_done)
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/__init__.py b/yardstick/tests/unit/benchmark/scenarios/storage/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/__init__.py
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json
new file mode 100644
index 000000000..e9f642aba
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json
@@ -0,0 +1 @@
+{"fioversion": "fio-2.1.3","jobs": [{"jobname": "yardstick-fio","groupid": 0,"error": 0,"read": {"io_bytes": 2166860,"bw": 36113,"iops": 9028,"runtime": 60001,"slat": {"min": 7,"max": 1807,"mean": 10.49,"stddev": 3.00},"clat": {"min": 1,"max": 16902,"mean": 97.84,"stddev": 78.16,"percentile": {"1.000000": 84,"5.000000": 86,"10.000000": 87,"20.000000": 88,"30.000000": 89,"40.000000": 90,"50.000000": 91,"60.000000": 93,"70.000000": 98,"80.000000": 103,"90.000000": 111,"95.000000": 127,"99.000000": 161,"99.500000": 177,"99.900000": 215,"99.950000": 266,"99.990000": 4128,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 86,"max": 16912,"mean": 108.70,"stddev": 78.29},"bw_min": 0,"bw_max": 38128,"bw_agg": 35816.54,"bw_mean": 35816.54,"bw_dev": 3579.16},"write": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"trim": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"usr_cpu": 4.86,"sys_cpu": 19.38,"ctx": 632024,"majf": 0,"minf": 30,"iodepth_level": {"1": 116.58,"2": 0.00,"4": 0.00,"8": 0.00,"16": 0.00,"32": 0.00,">=64": 0.00},"latency_us": {"2": 0.01,"4": 0.01,"10": 0.00,"20": 0.00,"50": 0.01,"100": 72.60,"250": 27.34,"500": 0.04,"750": 0.01,"1000": 0.01},"latency_ms": {"2": 0.01,"4": 0.01,"10": 0.01,"20": 0.01,"50": 0.00,"100": 0.00,"250": 0.00,"500": 0.00,"750": 0.00,"1000": 0.00,"2000": 0.00,">=2000": 0.00}}],"disk_util": [{"name": "vda","read_ios": 631084,"write_ios": 212,"read_merges": 0,"write_merges": 232,"read_ticks": 57300,"write_ticks": 324,"in_queue": 57400,"util": 81.55}]}
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json
new file mode 100644
index 000000000..4c7501818
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json
@@ -0,0 +1 @@
+{"jobs": [{"trim": {"io_bytes": 0, "slat": {"max": 0, "mean": 0.0, "stddev": 0.0, "min": 0}, "bw_max": 0, "bw_mean": 0.0, "iops": 0, "bw": 0, "lat": {"max": 0, "mean": 0.0, "stddev": 0.0, "min": 0}, "bw_agg": 0.0, "clat": {"max": 0, "mean": 0.0, "percentile": {"70.000000": 0, "5.000000": 0, "50.000000": 0, "99.990000": 0, "30.000000": 0, "10.000000": 0, "99.000000": 0, "0.00": 0, "90.000000": 0, "95.000000": 0, "60.000000": 0, "40.000000": 0, "20.000000": 0, "99.900000": 0, "99.950000": 0, "1.000000": 0, "99.500000": 0, "80.000000": 0}, "stddev": 0.0, "min": 0}, "runtime": 0, "bw_min": 0, "bw_dev": 0.0}, "latency_us": {"10": 0.01, "750": 0.03, "20": 0.0, "50": 0.02, "2": 0.01, "4": 0.01, "100": 0.75, "250": 88.37, "500": 10.72, "1000": 0.01}, "latency_ms": {"10": 0.01, "750": 0.0, "20": 0.01, ">=2000": 0.0, "50": 0.01, "2000": 0.0, "2": 0.07, "4": 0.01, "100": 0.0, "250": 0.01, "500": 0.0, "1000": 0.01}, "read": {"io_bytes": 839056, "slat": {"max": 1990, "mean": 18.14, "stddev": 15.4, "min": 0}, "bw_max": 10328, "bw_mean": 8721.27, "iops": 20972, "bw": 83888, "lat": {"max": 776676, "mean": 236.8, "stddev": 4668.12, "min": 45}, "bw_agg": 8721.27, "clat": {"max": 776663, "mean": 217.79, "percentile": {"70.000000": 199, "5.000000": 119, "50.000000": 175, "99.990000": 15168, "30.000000": 155, "10.000000": 131, "99.000000": 342, "0.00": 0, "90.000000": 247, "95.000000": 278, "60.000000": 185, "40.000000": 165, "20.000000": 145, "99.900000": 820, "99.950000": 1272, "1.000000": 96, "99.500000": 370, "80.000000": 217}, "stddev": 4667.79, "min": 0}, "runtime": 10002, "bw_min": 4, "bw_dev": 2178.08}, "majf": 0, "ctx": 490590, "minf": 87, "jobname": "yardstick-fio", "write": {"io_bytes": 841992, "slat": {"max": 2594, "mean": 19.78, "stddev": 16.25, "min": 0}, "bw_max": 10472, "bw_mean": 8464.0, "iops": 21045, "bw": 84182, "lat": {"max": 776709, "mean": 233.55, "stddev": 3115.46, "min": 64}, "bw_agg": 8464.0, "clat": {"max": 776685, "mean": 212.87, "percentile": {"70.000000": 211, "5.000000": 135, "50.000000": 187, "99.990000": 3536, "30.000000": 169, "10.000000": 145, "99.000000": 358, "0.00": 0, "90.000000": 258, "95.000000": 290, "60.000000": 197, "40.000000": 177, "20.000000": 159, "99.900000": 756, "99.950000": 1288, "1.000000": 114, "99.500000": 382, "80.000000": 229}, "stddev": 3115.23, "min": 0}, "runtime": 10002, "bw_min": 4, "bw_dev": 2584.23}, "iodepth_level": {"16": 0.0, "32": 0.0, "1": 111.92, "2": 0.0, "4": 0.0, ">=64": 0.0, "8": 0.0}, "usr_cp": 2.87, "error": 0, "sys_cp": 12.37, "groupid": 0}], "fio version": "fio-2.1.3", "disk_util": [{"aggr_write_ticks": 42020, "read_merges": 0, "name": "dm-0", "write_ios": 233547, "aggr_write_ios": 235129, "aggr_read_ticks": 42576, "read_ios": 233492, "util": 97.22, "read_ticks": 42096, "aggr_write_merge": 0, "write_merges": 0, "aggr_in_queue": 84524, "aggr_read_ios": 235224, "aggr_util": 96.96, "aggr_read_merges": 0, "in_queue": 83732, "write_ticks": 41468}, {"read_merges": 0, "name": "vda", "write_ios": 235129, "read_ios": 235224, "util": 96.96, "read_ticks": 42576, "write_merges": 0, "in_queue": 84524, "write_ticks": 42020}]}
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json b/yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json
new file mode 100644
index 000000000..7c760e8bc
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json
@@ -0,0 +1 @@
+{"fioversion": "fio-2.1.3","jobs": [{"jobname": "yardstick-fio","groupid": 0,"error": 0,"read": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"write": {"io_bytes": 2106508,"bw": 35107,"iops": 8776,"runtime": 60001,"slat": {"min": 8,"max": 5166,"mean": 11.83,"stddev": 7.05},"clat": {"min": 1,"max": 23472,"mean": 99.54,"stddev": 44.23,"percentile": {"1.000000": 85,"5.000000": 87,"10.000000": 88,"20.000000": 89,"30.000000": 90,"40.000000": 91,"50.000000": 93,"60.000000": 99,"70.000000": 104,"80.000000": 107,"90.000000": 113,"95.000000": 127,"99.000000": 161,"99.500000": 179,"99.900000": 231,"99.950000": 286,"99.990000": 628,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 87,"max": 23486,"mean": 111.74,"stddev": 45.61},"bw_min": 0,"bw_max": 37288,"bw_agg": 34839.53,"bw_mean": 34839.53,"bw_dev": 3387.37},"trim": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"usr_cpu": 5.25,"sys_cpu": 19.72,"ctx": 616160,"majf": 0,"minf": 27,"iodepth_level": {"1": 116.90,"2": 0.00,"4": 0.00,"8": 0.00,"16": 0.00,"32": 0.00,">=64": 0.00},"latency_us": {"2": 0.01,"4": 0.01,"10": 0.00,"20": 0.00,"50": 0.01,"100": 60.74,"250": 39.18,"500": 0.06,"750": 0.01,"1000": 0.01},"latency_ms": {"2": 0.01,"4": 0.01,"10": 0.01,"20": 0.00,"50": 0.01,"100": 0.00,"250": 0.00,"500": 0.00,"750": 0.00,"1000": 0.00,"2000": 0.00,">=2000": 0.00}}],"disk_util": [{"name": "vda","read_ios": 0,"write_ios": 615418,"read_merges": 0,"write_merges": 231,"read_ticks": 0,"write_ticks": 58284,"in_queue": 58024,"util": 82.45}]}
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py
new file mode 100644
index 000000000..d78506584
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_bonnie.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+
+from yardstick.benchmark.scenarios.storage import bonnie
+
+
+class BonnieTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ @mock.patch('yardstick.benchmark.scenarios.storage.bonnie.ssh')
+ def test_bonnie_successful_setup(self, mock_ssh):
+
+ options = {
+ "file_size": "1024",
+ "ram_size": "512",
+ "test_dir": "/tmp",
+ "concurrency": "1",
+ "test_user": "root"
+ }
+ args = {"options": options}
+ b = bonnie.Bonnie(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+
+ b.setup()
+ self.assertIsNotNone(b.client)
+ self.assertTrue(b.setup_done, True)
+
+ @mock.patch('yardstick.benchmark.scenarios.storage.bonnie.ssh')
+ def test_bonnie_unsuccessful_script_error(self, mock_ssh):
+ options = {
+ "file_size": "1024",
+ "ram_size": "512",
+ "test_dir": "/tmp",
+ "concurrency": "1",
+ "test_user": "root"
+ }
+ args = {"options": options}
+ b = bonnie.Bonnie(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, b.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
new file mode 100644
index 000000000..6e69ddc6d
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_fio.py
@@ -0,0 +1,280 @@
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.storage.fio.Fio
+
+from __future__ import absolute_import
+
+import os
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.storage import fio
+from yardstick.common import exceptions as y_exc
+
+
+@mock.patch('yardstick.benchmark.scenarios.storage.fio.ssh')
+class FioTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ }
+ }
+ self.sample_output = {
+ 'read': 'fio_read_sample_output.json',
+ 'write': 'fio_write_sample_output.json',
+ 'rw': 'fio_rw_sample_output.json'
+ }
+
+ def test_fio_successful_setup(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ p.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_fio_job_file_successful_setup(self, mock_ssh):
+
+ options = {
+ 'job_file': 'job_file.ini',
+ 'directory': '/FIO_Test'
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '/dev/vdb', '')
+ p.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_fio_job_file_no_disk__setup(self, mock_ssh):
+
+ options = {
+ 'job_file': 'job_file.ini',
+ 'directory': '/FIO_Test'
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ p.setup()
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ self.assertIsNotNone(p.client)
+ self.assertTrue(p.setup_done)
+
+ def test_fio_successful_no_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_read_no_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': "read",
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['read'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 36113, "read_iops": 9028,' \
+ '"read_lat": 108.7}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_write_no_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'write',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['write'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"write_bw": 35107, "write_iops": 8776,'\
+ '"write_lat": 111.74}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_lat_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'write_lat': 300.1}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_unsuccessful_lat_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'write_lat': 200.1}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_fio_successful_bw_iops_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'read_iops': 20000}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+
+ p.run(result)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = jsonutils.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_unsuccessful_bw_iops_sla(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'read_iops': 30000}
+ }
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH.from_node().execute.return_value = (0, sample_output, '')
+ self.assertRaises(y_exc.SLAValidationError, p.run, result)
+
+ def test_fio_unsuccessful_script_error(self, mock_ssh):
+
+ options = {
+ 'filename': '/home/ubuntu/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p = fio.Fio(args, self.ctx)
+ result = {}
+
+ p.client = mock_ssh.SSH.from_node()
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+ def _read_sample_output(self, file_name):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, file_name)
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py
new file mode 100644
index 000000000..c1c731b0a
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_storagecapacity.py
@@ -0,0 +1,99 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for
+# yardstick.benchmark.scenarios.storage.storagecapacity.StorageCapacity
+
+from __future__ import absolute_import
+
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+
+from yardstick.benchmark.scenarios.storage import storagecapacity
+
+DISK_SIZE_SAMPLE_OUTPUT = \
+ '{"Numberf of devides": "2", "Total disk size in bytes": "1024000000"}'
+BLOCK_SIZE_SAMPLE_OUTPUT = '{"/dev/sda": 1024, "/dev/sdb": 4096}'
+DISK_UTIL_RAW_OUTPUT = "vda 10.00\nvda 0.00"
+DISK_UTIL_SAMPLE_OUTPUT = \
+ '{"vda": {"avg_util": 5.0, "max_util": 10.0, "min_util": 0.0}}'
+
+
+@mock.patch('yardstick.benchmark.scenarios.storage.storagecapacity.ssh')
+class StorageCapacityTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.scn = {
+ "options": {
+ 'test_type': 'disk_size'
+ }
+ }
+ self.ctx = {
+ "host": {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'password': "root"
+ }
+ }
+ self.result = {}
+
+ def test_capacity_successful_setup(self, mock_ssh):
+ c = storagecapacity.StorageCapacity(self.scn, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, '', '')
+ c.setup()
+ self.assertIsNotNone(c.client)
+ self.assertTrue(c.setup_done)
+
+ def test_capacity_disk_size_successful(self, mock_ssh):
+ c = storagecapacity.StorageCapacity(self.scn, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, DISK_SIZE_SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(
+ DISK_SIZE_SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_block_size_successful(self, mock_ssh):
+ args = {
+ "options": {
+ 'test_type': 'block_size'
+ }
+ }
+ c = storagecapacity.StorageCapacity(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, BLOCK_SIZE_SAMPLE_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(
+ BLOCK_SIZE_SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_disk_utilization_successful(self, mock_ssh):
+ args = {
+ "options": {
+ 'test_type': 'disk_utilization',
+ 'interval': 1,
+ 'count': 2
+ }
+ }
+ c = storagecapacity.StorageCapacity(args, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, DISK_UTIL_RAW_OUTPUT, '')
+ c.run(self.result)
+ expected_result = jsonutils.loads(
+ DISK_UTIL_SAMPLE_OUTPUT)
+ self.assertEqual(self.result, expected_result)
+
+ def test_capacity_unsuccessful_script_error(self, mock_ssh):
+ c = storagecapacity.StorageCapacity(self.scn, self.ctx)
+
+ mock_ssh.SSH.from_node().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, c.run, self.result)
diff --git a/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py b/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
new file mode 100644
index 000000000..2ba53cb93
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/storage/test_storperf.py
@@ -0,0 +1,513 @@
+##############################################################################
+# Copyright (c) 2016 Huawei Technologies Co.,Ltd.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.storage.storperf.StorPerf
+
+from __future__ import absolute_import
+
+import json
+import unittest
+
+import mock
+from oslo_serialization import jsonutils
+import requests
+
+from yardstick.benchmark.scenarios.storage import storperf
+
+
+# pylint: disable=unused-argument
+# disable this for now because I keep forgetting mock patch arg ordering
+def mocked_requests_config_post(*args, **kwargs):
+ class MockResponseConfigPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigPost(
+ '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+ '"stack_created": false}',
+ 200)
+
+
+def mocked_requests_config_post_fail(*args, **kwargs):
+ class MockResponseConfigPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigPost(
+ '{"message": "ERROR: Parameter \'public_network\' is invalid: ' +
+ 'Error validating value \'foo\': Unable to find network with ' +
+ 'name or id \'foo\'"}',
+ 400)
+
+
+def mocked_requests_config_get(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{"stack_id": "dac27db1-3502-4300-b301-91c64e6a1622",'
+ '"stack_created": true}',
+ 200)
+
+
+def mocked_requests_config_get_not_created(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{"stack_id": "",'
+ '"stack_created": false}',
+ 200)
+
+
+def mocked_requests_config_get_no_payload(*args, **kwargs):
+ class MockResponseConfigGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseConfigGet(
+ '{}',
+ 200)
+
+
+def mocked_requests_initialize_post_fail(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost(
+ '{"message": "ERROR: Stack StorPerfAgentGroup does not exist"}',
+ 400)
+
+
+def mocked_requests_job_get(*args, **kwargs):
+ class MockResponseJobGet(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobGet(
+ '{"Status": "Completed",\
+ "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}',
+ 200)
+
+
+def mocked_requests_job_post(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost('{"job_id": \
+ "d46bfb8c-36f4-4a40-813b-c4b4a437f728"}', 200)
+
+
+def mocked_requests_job_post_fail(*args, **kwargs):
+ class MockResponseJobPost(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobPost(
+ '{"message": "ERROR: Stack StorPerfAgentGroup does not exist"}',
+ 400)
+
+
+def mocked_requests_job_delete(*args, **kwargs):
+ class MockResponseJobDelete(object):
+
+ def __init__(self, json_data, status_code):
+ self.content = json_data
+ self.status_code = status_code
+
+ return MockResponseJobDelete('{}', 200)
+
+
+def mocked_requests_delete(*args, **kwargs):
+ class MockResponseDelete(object):
+
+ def __init__(self, json_data, status_code):
+ self.json_data = json_data
+ self.status_code = status_code
+
+ return MockResponseDelete('{}', 200)
+
+
+def mocked_requests_delete_failed(*args, **kwargs):
+ class MockResponseDeleteFailed(object):
+
+ def __init__(self, json_data, status_code):
+ self.json_data = json_data
+ self.status_code = status_code
+
+ return MockResponseDeleteFailed('{"message": "Teardown failed"}', 400)
+
+
+class StorPerfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+ }
+
+ self.result = {}
+
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(requests, 'get')
+ def test_setup(self, mock_get, mock_post):
+ mock_post.side_effect = [mocked_requests_config_post(),
+ mocked_requests_job_post()]
+ mock_get.side_effect = [mocked_requests_config_get(),
+ mocked_requests_job_get()]
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.setup()
+
+ self.assertTrue(s.setup_done)
+
+ @mock.patch.object(requests, 'get')
+ def test_query_setup_state_unsuccessful(self, mock_get):
+ mock_get.side_effect = mocked_requests_config_get_not_created
+ args = {
+ "options": {}
+ }
+ s = storperf.StorPerf(args, self.ctx)
+ result = s._query_setup_state()
+ self.assertFalse(result)
+
+ @mock.patch.object(requests, 'get')
+ def test_query_setup_state_no_payload(self, mock_get):
+ mock_get.side_effect = mocked_requests_config_get_no_payload
+ args = {
+ "options": {}
+ }
+ s = storperf.StorPerf(args, self.ctx)
+ result = s._query_setup_state()
+ self.assertFalse(result)
+
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(requests, 'get')
+ def test_setup_config_post_failed(self, mock_get, mock_post):
+ mock_post.side_effect = mocked_requests_config_post_fail
+
+ args = {
+ "options": {
+ "public_network": "foo"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.setup)
+
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_v1_successful(self, mock_post, mock_get):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ "workload": "rs",
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ sample_output = '{"Status": "Completed",\
+ "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
+
+ expected_result = jsonutils.loads(sample_output)
+
+ s.run(self.result)
+
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/jobs',
+ json=jsonutils.loads(json.dumps(expected_post)))
+
+ self.assertEqual(self.result, expected_result)
+
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_v2_successful(self, mock_post, mock_get):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workloads": {
+ "read_sequential": {
+ "rw": "rs"
+ }
+ },
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ 'workloads': {
+ 'read_sequential': {
+ 'rw': 'rs'
+ }
+ },
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ sample_output = '{"Status": "Completed",\
+ "_ssd_preconditioning.queue-depth.8.block-size.16384.duration": 6}'
+
+ expected_result = jsonutils.loads(sample_output)
+
+ s.run(self.result)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v2.0/jobs',
+ json=expected_post)
+
+ self.assertEqual(self.result, expected_result)
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_run_failed(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post_fail
+ mock_get.side_effect = mocked_requests_job_get
+
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workloads": {
+ "read_sequential": {
+ "rw": "rs"
+ }
+ },
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 0,
+ "timeout": 60
+ }
+ expected_post = {
+ 'metadata': {
+ 'build_tag': 'latest',
+ 'test_case': 'opnfv_yardstick_tc074'
+ },
+ 'deadline': 60,
+ 'block_sizes': 4096,
+ 'queue_depths': 4,
+ 'workloads': {
+ 'read_sequential': {
+ 'rw': 'rs'
+ }
+ },
+ 'agent_count': 8
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+ s.setup_done = True
+
+ self.assertRaises(RuntimeError, s.run, self.ctx)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v2.0/jobs',
+ json=expected_post)
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ @mock.patch.object(storperf.StorPerf, 'setup')
+ def test_run_calls_setup(self, mock_setup, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ 'timeout': 60,
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.run(self.result)
+
+ mock_setup.assert_called_once()
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_initialize_disks(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_job_post
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ "StorPerf_ip": "192.168.23.2"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.initialize_disks()
+
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/initializations',
+ json={})
+
+ @mock.patch('time.sleep')
+ @mock.patch.object(requests, 'get')
+ @mock.patch.object(requests, 'post')
+ def test_initialize_disks_post_failed(self, mock_post, mock_get, _):
+ mock_post.side_effect = mocked_requests_initialize_post_fail
+ mock_get.side_effect = mocked_requests_job_get
+
+ args = {
+ "options": {
+ "StorPerf_ip": "192.168.23.2"
+ }
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.initialize_disks)
+ mock_post.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/initializations',
+ json={})
+
+ @mock.patch.object(requests, 'delete')
+ def test_teardown(self, mock_delete):
+ mock_delete.side_effect = mocked_requests_job_delete
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 10,
+ "timeout": 60
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ s.teardown()
+
+ self.assertFalse(s.setup_done)
+ mock_delete.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/configurations')
+
+ @mock.patch.object(requests, 'delete')
+ def test_teardown_request_delete_failed(self, mock_delete):
+ mock_delete.side_effect = mocked_requests_delete_failed
+ options = {
+ "agent_count": 8,
+ "public_network": 'ext-net',
+ "volume_size": 10,
+ "block_sizes": 4096,
+ "queue_depths": 4,
+ "workload": "rs",
+ "StorPerf_ip": "192.168.23.2",
+ "query_interval": 10,
+ "timeout": 60
+ }
+
+ args = {
+ "options": options
+ }
+
+ s = storperf.StorPerf(args, self.ctx)
+
+ self.assertRaises(RuntimeError, s.teardown)
+ mock_delete.assert_called_once_with(
+ 'http://192.168.23.2:5000/api/v1.0/configurations')
diff --git a/yardstick/tests/unit/benchmark/scenarios/test_base.py b/yardstick/tests/unit/benchmark/scenarios/test_base.py
new file mode 100644
index 000000000..284a71cc8
--- /dev/null
+++ b/yardstick/tests/unit/benchmark/scenarios/test_base.py
@@ -0,0 +1,135 @@
+# Copyright 2017: Intel Ltd.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import time
+
+import mock
+
+from yardstick.benchmark.scenarios import base
+from yardstick.tests.unit import base as ut_base
+
+
+class _TestScenario(base.Scenario):
+ __scenario_type__ = 'Test Scenario'
+
+ def run(self):
+ pass
+
+
+class ScenarioTestCase(ut_base.BaseUnitTestCase):
+
+ def test_get_scenario_type(self):
+ scenario_type = 'dummy scenario'
+
+ class DummyScenario(base.Scenario):
+ __scenario_type__ = scenario_type
+
+ self.assertEqual(scenario_type, DummyScenario.get_scenario_type())
+
+ def test_get_scenario_type_not_defined(self):
+ class DummyScenario(base.Scenario):
+ pass
+
+ self.assertEqual(str(None), DummyScenario.get_scenario_type())
+
+ def test_get_description(self):
+ docstring = """First line
+ Second line
+ Third line
+ """
+
+ class DummyScenario(base.Scenario):
+ __doc__ = docstring
+
+ self.assertEqual(docstring.splitlines()[0],
+ DummyScenario.get_description())
+
+ def test_get_description_empty(self):
+ class DummyScenario(base.Scenario):
+ pass
+
+ self.assertEqual(str(None), DummyScenario.get_description())
+
+ def test_get_types(self):
+ scenario_names = set(
+ scenario.__scenario_type__ for scenario in
+ base.Scenario.get_types() if hasattr(scenario,
+ '__scenario_type__'))
+ existing_scenario_class_names = {
+ 'Iperf3', 'CACHEstat', 'SpecCPU2006', 'Dummy', 'NSPerf', 'Parser'}
+ self.assertTrue(existing_scenario_class_names.issubset(scenario_names))
+
+ def test_get_cls_existing_scenario(self):
+ scenario_name = 'NSPerf'
+ scenario = base.Scenario.get_cls(scenario_name)
+ self.assertEqual(scenario_name, scenario.__scenario_type__)
+
+ def test_get_cls_non_existing_scenario(self):
+ wrong_scenario_name = 'Non-existing-scenario'
+ with self.assertRaises(RuntimeError) as exc:
+ base.Scenario.get_cls(wrong_scenario_name)
+ self.assertEqual('No such scenario type %s' % wrong_scenario_name,
+ str(exc.exception))
+
+ def test_get_existing_scenario(self):
+ scenario_name = 'NSPerf'
+ scenario_module = ('yardstick.benchmark.scenarios.networking.'
+ 'vnf_generic.NetworkServiceTestCase')
+ self.assertEqual(scenario_module, base.Scenario.get(scenario_name))
+
+ def test_get_non_existing_scenario(self):
+ wrong_scenario_name = 'Non-existing-scenario'
+ with self.assertRaises(RuntimeError) as exc:
+ base.Scenario.get(wrong_scenario_name)
+ self.assertEqual('No such scenario type %s' % wrong_scenario_name,
+ str(exc.exception))
+
+ def test_scenario_abstract_class(self):
+ # pylint: disable=abstract-class-instantiated
+ with self.assertRaises(TypeError):
+ base.Scenario()
+
+ @mock.patch.object(time, 'sleep')
+ def test_pre_run_wait_time(self, mock_sleep):
+ """Ensure default behaviour (backwards compatibility): no wait time"""
+ test_scenario = _TestScenario()
+ test_scenario.pre_run_wait_time(mock.ANY)
+ mock_sleep.assert_not_called()
+
+ @mock.patch.object(time, 'sleep')
+ def test_post_run_wait_time(self, mock_sleep):
+ """Ensure default behaviour (backwards compatibility): wait time"""
+ test_scenario = _TestScenario()
+ test_scenario.post_run_wait_time(100)
+ mock_sleep.assert_called_once_with(100)
+
+
+class IterScenarioClassesTestCase(ut_base.BaseUnitTestCase):
+
+ def test_no_scenario_type_defined(self):
+ some_existing_scenario_class_names = [
+ 'Iperf3', 'CACHEstat', 'SpecCPU2006', 'Dummy', 'NSPerf', 'Parser']
+ scenario_types = [scenario.__scenario_type__ for scenario
+ in base._iter_scenario_classes()]
+ for class_name in some_existing_scenario_class_names:
+ self.assertIn(class_name, scenario_types)
+
+ def test_scenario_type_defined(self):
+ some_existing_scenario_class_names = [
+ 'Iperf3', 'CACHEstat', 'SpecCPU2006', 'Dummy', 'NSPerf', 'Parser']
+ for class_name in some_existing_scenario_class_names:
+ scenario_class = next(base._iter_scenario_classes(
+ scenario_type=class_name))
+ self.assertEqual(class_name, scenario_class.__scenario_type__)