aboutsummaryrefslogtreecommitdiffstats
path: root/tests/unit/benchmark
diff options
context:
space:
mode:
Diffstat (limited to 'tests/unit/benchmark')
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py77
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_basemonitor.py84
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_monitor.py83
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_monitor_command.py79
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_monitor_process.py56
-rw-r--r--tests/unit/benchmark/scenarios/availability/test_serviceha.py11
-rw-r--r--tests/unit/benchmark/scenarios/compute/test_cyclictest.py159
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_ping6.py99
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py48
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py51
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py48
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py51
-rw-r--r--tests/unit/benchmark/scenarios/parser/__init__.py0
-rw-r--r--tests/unit/benchmark/scenarios/parser/test_parser.py58
14 files changed, 733 insertions, 171 deletions
diff --git a/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py b/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
new file mode 100644
index 000000000..340f94cb0
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/availability/test_attacker_baremetal.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.attacker import baseattacker
+from yardstick.benchmark.scenarios.availability.attacker import attacker_baremetal
+
+@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.subprocess')
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def test__fun_execute_shell_command_successful(self, mock_subprocess):
+ cmd = "env"
+ mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, output = attacker_baremetal._execute_shell_command(cmd)
+ self.assertEqual(exitcode, 0)
+
+ def test__fun_execute_shell_command_fail_cmd_exception(self, mock_subprocess):
+ cmd = "env"
+ mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, output = attacker_baremetal._execute_shell_command(cmd)
+ self.assertEqual(exitcode, -1)
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.attacker.attacker_baremetal.ssh')
+class AttackerBaremetalTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ipmi_ip": "10.20.0.5",
+ "ipmi_user": "root",
+ "ipmi_pwd": "123456",
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.attacker_cfg = {
+ 'fault_type': 'bear-metal-down',
+ 'host': 'node1',
+ }
+
+ def test__attacker_baremetal_all_successful(self, mock_ssh):
+
+ ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH().execute.return_value = (0, "running", '')
+ ins.setup()
+ ins.inject_fault()
+ ins.recover()
+
+ def test__attacker_baremetal_check_failuer(self, mock_ssh):
+
+ ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+ mock_ssh.SSH().execute.return_value = (0, "error check", '')
+ ins.setup()
+
+ def test__attacker_baremetal_recover_successful(self, mock_ssh):
+
+ self.attacker_cfg["jump_host"] = 'node1'
+ self.context["node1"]["pwd"] = "123456"
+ ins = attacker_baremetal.BaremetalAttacker(self.attacker_cfg, self.context)
+
+ mock_ssh.SSH().execute.return_value = (0, "running", '')
+ ins.setup()
+ ins.recover()
diff --git a/tests/unit/benchmark/scenarios/availability/test_basemonitor.py b/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
new file mode 100644
index 000000000..13295273b
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/availability/test_basemonitor.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_command
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import basemonitor
+
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.basemonitor.BaseMonitor')
+class MonitorMgrTestCase(unittest.TestCase):
+
+ def setUp(self):
+ config = {
+ 'monitor_type': 'openstack-api',
+ }
+
+ self.monitor_configs = []
+ self.monitor_configs.append(config)
+
+ def test__MonitorMgr_setup_successful(self, mock_monitor):
+ instance = basemonitor.MonitorMgr()
+ instance.init_monitors(self.monitor_configs, None)
+ instance.start_monitors()
+ instance.wait_monitors()
+
+ ret = instance.verify_SLA()
+
+class BaseMonitorTestCase(unittest.TestCase):
+
+ class MonitorSimple(basemonitor.BaseMonitor):
+ __monitor_type__ = "MonitorForTest"
+ def setup(self):
+ self.monitor_result = False
+
+ def monitor_func(self):
+ return self.monitor_result
+
+ def setUp(self):
+ self.monitor_cfg = {
+ 'monitor_type': 'MonitorForTest',
+ 'command_name': 'nova image-list',
+ 'monitor_time': 0.01,
+ 'sla': {'max_outage_time': 5}
+ }
+
+ def test__basemonitor_start_wait_successful(self):
+ ins = basemonitor.BaseMonitor(self.monitor_cfg, None)
+ ins.start_monitor()
+ ins.wait_monitor()
+
+
+ def test__basemonitor_all_successful(self):
+ ins = self.MonitorSimple(self.monitor_cfg, None)
+ ins.setup()
+ ins.run()
+ ins.verify_SLA()
+
+ @mock.patch('yardstick.benchmark.scenarios.availability.monitor.basemonitor.multiprocessing')
+ def test__basemonitor_func_false(self, mock_multiprocess):
+ ins = self.MonitorSimple(self.monitor_cfg, None)
+ ins.setup()
+ mock_multiprocess.Event().is_set.return_value = False
+ ins.run()
+ ins.verify_SLA()
+
+ def test__basemonitor_getmonitorcls_successfule(self):
+ cls = None
+ try:
+ cls = basemonitor.BaseMonitor.get_monitor_cls(self.monitor_cfg)
+ except Exception:
+ pass
+ self.assertIsNone(cls)
+
diff --git a/tests/unit/benchmark/scenarios/availability/test_monitor.py b/tests/unit/benchmark/scenarios/availability/test_monitor.py
deleted file mode 100644
index 793871ca3..000000000
--- a/tests/unit/benchmark/scenarios/availability/test_monitor.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python
-
-##############################################################################
-# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
-#
-# All rights reserved. This program and the accompanying materials
-# are made available under the terms of the Apache License, Version 2.0
-# which accompanies this distribution, and is available at
-# http://www.apache.org/licenses/LICENSE-2.0
-##############################################################################
-
-# Unittest for yardstick.benchmark.scenarios.availability.monitor
-
-import mock
-import unittest
-
-from yardstick.benchmark.scenarios.availability import monitor
-
-@mock.patch('yardstick.benchmark.scenarios.availability.monitor.subprocess')
-class MonitorTestCase(unittest.TestCase):
-
- def test__fun_execute_shell_command_successful(self, mock_subprocess):
- cmd = "env"
- mock_subprocess.check_output.return_value = (0, 'unittest')
- exitcode, output = monitor._execute_shell_command(cmd)
- self.assertEqual(exitcode, 0)
-
- def test__fun_execute_shell_command_fail_cmd_exception(self, mock_subprocess):
- cmd = "env"
- mock_subprocess.check_output.side_effect = RuntimeError
- exitcode, output = monitor._execute_shell_command(cmd)
- self.assertEqual(exitcode, -1)
-
- def test__fun_monitor_process_successful(self, mock_subprocess):
- config = {
- 'monitor_cmd':'env',
- 'duration':0
- }
- mock_queue = mock.Mock()
- mock_event = mock.Mock()
-
- mock_subprocess.check_output.return_value = (0, 'unittest')
- monitor._monitor_process(config, mock_queue, mock_event)
-
- def test__fun_monitor_process_fail_cmd_execute_error(self, mock_subprocess):
- config = {
- 'monitor_cmd':'env',
- 'duration':0
- }
- mock_queue = mock.Mock()
- mock_event = mock.Mock()
-
- mock_subprocess.check_output.side_effect = RuntimeError
- monitor._monitor_process(config, mock_queue, mock_event)
-
- def test__fun_monitor_process_fail_no_monitor_cmd(self, mock_subprocess):
- config = {
- 'duration':0
- }
- mock_queue = mock.Mock()
- mock_event = mock.Mock()
-
- mock_subprocess.check_output.return_value = (-1, 'unittest')
- monitor._monitor_process(config, mock_queue, mock_event)
-
- @mock.patch('yardstick.benchmark.scenarios.availability.monitor.multiprocessing')
- def test_monitor_all_successful(self, mock_multip, mock_subprocess):
- config = {
- 'monitor_cmd':'env',
- 'duration':0
- }
- p = monitor.Monitor()
- p.setup(config)
- mock_multip.Queue().get.return_value = 'started'
- p.start()
-
- result = "monitor unitest"
- mock_multip.Queue().get.return_value = result
- p.stop()
-
- ret = p.get_result()
-
- self.assertEqual(result, ret)
diff --git a/tests/unit/benchmark/scenarios/availability/test_monitor_command.py b/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
new file mode 100644
index 000000000..c8cda7dc7
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/availability/test_monitor_command.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_command
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import monitor_command
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.subprocess')
+class ExecuteShellTestCase(unittest.TestCase):
+
+ def test__fun_execute_shell_command_successful(self, mock_subprocess):
+ cmd = "env"
+ mock_subprocess.check_output.return_value = (0, 'unittest')
+ exitcode, output = monitor_command._execute_shell_command(cmd)
+ self.assertEqual(exitcode, 0)
+
+ def test__fun_execute_shell_command_fail_cmd_exception(self, mock_subprocess):
+ cmd = "env"
+ mock_subprocess.check_output.side_effect = RuntimeError
+ exitcode, output = monitor_command._execute_shell_command(cmd)
+ self.assertEqual(exitcode, -1)
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.subprocess')
+class MonitorOpenstackCmdTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.config = {
+ 'monitor_type': 'openstack-api',
+ 'command_name': 'nova image-list',
+ 'monitor_time': 1,
+ 'sla': {'max_outage_time': 5}
+ }
+
+
+ def test__monitor_command_monitor_func_successful(self, mock_subprocess):
+
+ instance = monitor_command.MonitorOpenstackCmd(self.config, None)
+ instance.setup()
+ mock_subprocess.check_output.return_value = (0, 'unittest')
+ ret = instance.monitor_func()
+ self.assertEqual(ret, True)
+ instance._result = {"outage_time": 0}
+ instance.verify_SLA()
+
+ def test__monitor_command_monitor_func_failure(self, mock_subprocess):
+ mock_subprocess.check_output.return_value = (1, 'unittest')
+ instance = monitor_command.MonitorOpenstackCmd(self.config, None)
+ instance.setup()
+ mock_subprocess.check_output.side_effect = RuntimeError
+ ret = instance.monitor_func()
+ self.assertEqual(ret, False)
+ instance._result = {"outage_time": 10}
+ instance.verify_SLA()
+
+ @mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_command.ssh')
+ def test__monitor_command_ssh_monitor_successful(self, mock_ssh, mock_subprocess):
+
+ self.config["host"] = "node1"
+ instance = monitor_command.MonitorOpenstackCmd(self.config, self.context)
+ instance.setup()
+ mock_ssh.SSH().execute.return_value = (0, "0", '')
+ ret = instance.monitor_func()
diff --git a/tests/unit/benchmark/scenarios/availability/test_monitor_process.py b/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
new file mode 100644
index 000000000..dda104b4e
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.availability.monitor.monitor_process
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.availability.monitor import monitor_process
+
+@mock.patch('yardstick.benchmark.scenarios.availability.monitor.monitor_process.ssh')
+class MonitorProcessTestCase(unittest.TestCase):
+
+ def setUp(self):
+ host = {
+ "ip": "10.20.0.5",
+ "user": "root",
+ "key_filename": "/root/.ssh/id_rsa"
+ }
+ self.context = {"node1": host}
+ self.monitor_cfg = {
+ 'monitor_type': 'process',
+ 'process_name': 'nova-api',
+ 'host': "node1",
+ 'monitor_time': 1,
+ 'sla': {'max_recover_time': 5}
+ }
+
+ def test__monitor_process_all_successful(self, mock_ssh):
+
+ ins = monitor_process.MonitorProcess(self.monitor_cfg, self.context)
+
+ mock_ssh.SSH().execute.return_value = (0, "1", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 0}
+ ins.verify_SLA()
+
+ def test__monitor_process_down_failuer(self, mock_ssh):
+
+ ins = monitor_process.MonitorProcess(self.monitor_cfg, self.context)
+
+ mock_ssh.SSH().execute.return_value = (0, "0", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 10}
+ ins.verify_SLA()
+
diff --git a/tests/unit/benchmark/scenarios/availability/test_serviceha.py b/tests/unit/benchmark/scenarios/availability/test_serviceha.py
index 32adf3208..6e58b6e7a 100644
--- a/tests/unit/benchmark/scenarios/availability/test_serviceha.py
+++ b/tests/unit/benchmark/scenarios/availability/test_serviceha.py
@@ -16,7 +16,7 @@ import unittest
from yardstick.benchmark.scenarios.availability import serviceha
-@mock.patch('yardstick.benchmark.scenarios.availability.serviceha.monitor')
+@mock.patch('yardstick.benchmark.scenarios.availability.serviceha.basemonitor')
@mock.patch('yardstick.benchmark.scenarios.availability.serviceha.baseattacker')
class ServicehaTestCase(unittest.TestCase):
@@ -53,15 +53,11 @@ class ServicehaTestCase(unittest.TestCase):
p.setup()
self.assertEqual(p.setup_done, True)
-
- result = {}
- result["outage_time"] = 0
- mock_monitor.Monitor().get_result.return_value = result
+ mock_monitor.MonitorMgr().verify_SLA.return_value = True
ret = {}
p.run(ret)
- self.assertEqual(ret, result)
p.teardown()
-
+"""
def test__serviceha_run_sla_error(self, mock_attacker, mock_monitor):
p = serviceha.ServiceHA(self.args, self.ctx)
@@ -74,3 +70,4 @@ class ServicehaTestCase(unittest.TestCase):
ret = {}
self.assertRaises(AssertionError, p.run, ret)
+"""
diff --git a/tests/unit/benchmark/scenarios/compute/test_cyclictest.py b/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
index a87b39142..807429025 100644
--- a/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
+++ b/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
@@ -22,41 +22,65 @@ from yardstick.benchmark.scenarios.compute import cyclictest
class CyclictestTestCase(unittest.TestCase):
def setUp(self):
- self.ctx = {
+ self.scenario_cfg = {
+ "host": "kvm.LF",
+ "setup_options": {
+ "rpm_dir": "/opt/rpm",
+ "host_setup_seqs": [
+ "host-setup0.sh",
+ "host-setup1.sh",
+ "host-run-qemu.sh"
+ ],
+ "script_dir": "/opt/scripts",
+ "image_dir": "/opt/image",
+ "guest_setup_seqs": [
+ "guest-setup0.sh",
+ "guest-setup1.sh"
+ ]
+ },
+ "sla": {
+ "action": "monitor",
+ "max_min_latency": 50,
+ "max_avg_latency": 100,
+ "max_max_latency": 1000
+ },
+ "options": {
+ "priority": 99,
+ "threads": 1,
+ "loops": 1000,
+ "affinity": 1,
+ "interval": 1000,
+ "histogram": 90
+ }
+ }
+ self.context_cfg = {
"host": {
- "ip": "192.168.50.28",
- "user": "root",
- "key_filename": "mykey.key"
+ "ip": "10.229.43.154",
+ "key_filename": "/yardstick/resources/files/yardstick_key",
+ "role": "BareMetal",
+ "name": "kvm.LF",
+ "user": "root"
}
}
def test_cyclictest_successful_setup(self, mock_ssh):
- c = cyclictest.Cyclictest({}, self.ctx)
- c.setup()
-
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
mock_ssh.SSH().execute.return_value = (0, '', '')
- self.assertIsNotNone(c.client)
+
+ c.setup()
+ self.assertIsNotNone(c.guest)
+ self.assertIsNotNone(c.host)
self.assertEqual(c.setup_done, True)
def test_cyclictest_successful_no_sla(self, mock_ssh):
-
- options = {
- "affinity": 2,
- "interval": 100,
- "priority": 88,
- "loops": 10000,
- "threads": 2,
- "histogram": 80
- }
- args = {
- "options": options,
- }
- c = cyclictest.Cyclictest(args, self.ctx)
result = {}
+ self.scenario_cfg.pop("sla", None)
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ c.setup()
- c.server = mock_ssh.SSH()
-
+ c.guest = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
@@ -65,29 +89,19 @@ class CyclictestTestCase(unittest.TestCase):
self.assertEqual(result, expected_result)
def test_cyclictest_successful_sla(self, mock_ssh):
-
- options = {
- "affinity": 2,
- "interval": 100,
- "priority": 88,
- "loops": 10000,
- "threads": 2,
- "histogram": 80
- }
- sla = {
- "max_min_latency": 100,
- "max_avg_latency": 500,
- "max_max_latency": 1000,
- }
- args = {
- "options": options,
- "sla": sla
- }
- c = cyclictest.Cyclictest(args, self.ctx)
result = {}
+ self.scenario_cfg.update({"sla": {
+ "action": "monitor",
+ "max_min_latency": 100,
+ "max_avg_latency": 500,
+ "max_max_latency": 1000
+ }
+ })
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ c.setup()
- c.server = mock_ssh.SSH()
-
+ c.guest = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
@@ -97,14 +111,13 @@ class CyclictestTestCase(unittest.TestCase):
def test_cyclictest_unsuccessful_sla_min_latency(self, mock_ssh):
- args = {
- "options": {},
- "sla": {"max_min_latency": 10}
- }
- c = cyclictest.Cyclictest(args, self.ctx)
result = {}
+ self.scenario_cfg.update({"sla": {"max_min_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ c.setup()
- c.server = mock_ssh.SSH()
+ c.guest = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
@@ -112,14 +125,13 @@ class CyclictestTestCase(unittest.TestCase):
def test_cyclictest_unsuccessful_sla_avg_latency(self, mock_ssh):
- args = {
- "options": {},
- "sla": {"max_avg_latency": 10}
- }
- c = cyclictest.Cyclictest(args, self.ctx)
result = {}
+ self.scenario_cfg.update({"sla": {"max_avg_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ c.setup()
- c.server = mock_ssh.SSH()
+ c.guest = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
@@ -127,14 +139,13 @@ class CyclictestTestCase(unittest.TestCase):
def test_cyclictest_unsuccessful_sla_max_latency(self, mock_ssh):
- args = {
- "options": {},
- "sla": {"max_max_latency": 10}
- }
- c = cyclictest.Cyclictest(args, self.ctx)
result = {}
+ self.scenario_cfg.update({"sla": {"max_max_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ c.setup()
- c.server = mock_ssh.SSH()
+ c.guest = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
@@ -142,27 +153,13 @@ class CyclictestTestCase(unittest.TestCase):
def test_cyclictest_unsuccessful_script_error(self, mock_ssh):
- options = {
- "affinity": 2,
- "interval": 100,
- "priority": 88,
- "loops": 10000,
- "threads": 2,
- "histogram": 80
- }
- sla = {
- "max_min_latency": 100,
- "max_avg_latency": 500,
- "max_max_latency": 1000,
- }
- args = {
- "options": options,
- "sla": sla
- }
- c = cyclictest.Cyclictest(args, self.ctx)
result = {}
+ self.scenario_cfg.update({"sla": {"max_max_latency": 10}})
+ c = cyclictest.Cyclictest(self.scenario_cfg, self.context_cfg)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ c.setup()
- c.server = mock_ssh.SSH()
+ c.guest = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, c.run, result)
diff --git a/tests/unit/benchmark/scenarios/networking/test_ping6.py b/tests/unit/benchmark/scenarios/networking/test_ping6.py
new file mode 100644
index 000000000..662b85c30
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/test_ping6.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.ping.Ping
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import ping6
+
+
+class PingTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key",
+ 'password': "root"
+ },
+ }
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_pktgen_successful_setup(self, mock_ssh):
+
+ p = ping6.Ping6({}, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '0', '')
+ p.setup()
+
+ self.assertEqual(p.setup_done, True)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_no_sla(self, mock_ssh):
+
+ result = {}
+
+ p = ping6.Ping6({}, self.ctx)
+ p.client = mock_ssh.SSH()
+ mock_ssh.SSH().execute.return_value = (0, '100', '')
+ p.run(result)
+ self.assertEqual(result, {'rtt': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_successful_sla(self, mock_ssh):
+
+ args = {
+ 'sla': {'max_rtt': 150}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH()
+ mock_ssh.SSH().execute.return_value = (0, '100', '')
+ p.run(result)
+ self.assertEqual(result, {'rtt': 100.0})
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_unsuccessful_sla(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 50}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH()
+ mock_ssh.SSH().execute.return_value = (0, '100', '')
+ self.assertRaises(AssertionError, p.run, result)
+
+ @mock.patch('yardstick.benchmark.scenarios.networking.ping6.ssh')
+ def test_ping_unsuccessful_script_error(self, mock_ssh):
+
+ args = {
+ 'options': {'packetsize': 200},
+ 'sla': {'max_rtt': 50}
+ }
+ result = {}
+
+ p = ping6.Ping6(args, self.ctx)
+ p.client = mock_ssh.SSH()
+ mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, result)
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py
new file mode 100644
index 000000000..418dd39e6
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation
+
+
+class VtcInstantiationValidationTestCase(unittest.TestCase):
+
+ def setUp(self):
+ scenario = dict()
+ scenario['options'] = dict()
+ scenario['options']['default_net_name'] = ''
+ scenario['options']['default_subnet_name'] = ''
+ scenario['options']['vlan_net_1_name'] = ''
+ scenario['options']['vlan_subnet_1_name'] = ''
+ scenario['options']['vlan_net_2_name'] = ''
+ scenario['options']['vlan_subnet_2_name'] = ''
+ scenario['options']['vnic_type'] = ''
+ scenario['options']['vtc_flavor'] = ''
+ scenario['options']['packet_size'] = ''
+ scenario['options']['vlan_sender'] = ''
+ scenario['options']['vlan_receiver'] = ''
+
+ self.vt = vtc_instantiation_validation.VtcInstantiationValidation(scenario, '')
+
+ def test_run_for_success(self):
+ result = {}
+ self.vt.run(result)
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py
new file mode 100644
index 000000000..e0a46241c
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_instantiation_validation_noisy.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation_noisy
+
+
+class VtcInstantiationValidationiNoisyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ scenario = dict()
+ scenario['options'] = dict()
+ scenario['options']['default_net_name'] = ''
+ scenario['options']['default_subnet_name'] = ''
+ scenario['options']['vlan_net_1_name'] = ''
+ scenario['options']['vlan_subnet_1_name'] = ''
+ scenario['options']['vlan_net_2_name'] = ''
+ scenario['options']['vlan_subnet_2_name'] = ''
+ scenario['options']['vnic_type'] = ''
+ scenario['options']['vtc_flavor'] = ''
+ scenario['options']['packet_size'] = ''
+ scenario['options']['vlan_sender'] = ''
+ scenario['options']['vlan_receiver'] = ''
+ scenario['options']['num_of_neighbours'] = '1'
+ scenario['options']['amount_of_ram'] = '1G'
+ scenario['options']['number_of_cores'] = '1'
+
+ self.vt = vtc_instantiation_validation_noisy.VtcInstantiationValidationNoisy(scenario, '')
+
+ def test_run_for_success(self):
+ result = {}
+ self.vt.run(result)
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py
new file mode 100644
index 000000000..ecdf555d2
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import vtc_throughput
+
+
+class VtcThroughputTestCase(unittest.TestCase):
+
+ def setUp(self):
+ scenario = dict()
+ scenario['options'] = dict()
+ scenario['options']['default_net_name'] = ''
+ scenario['options']['default_subnet_name'] = ''
+ scenario['options']['vlan_net_1_name'] = ''
+ scenario['options']['vlan_subnet_1_name'] = ''
+ scenario['options']['vlan_net_2_name'] = ''
+ scenario['options']['vlan_subnet_2_name'] = ''
+ scenario['options']['vnic_type'] = ''
+ scenario['options']['vtc_flavor'] = ''
+ scenario['options']['packet_size'] = ''
+ scenario['options']['vlan_sender'] = ''
+ scenario['options']['vlan_receiver'] = ''
+
+ self.vt = vtc_throughput.VtcThroughput(scenario, '')
+
+ def test_run_for_success(self):
+ result = {}
+ self.vt.run(result)
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()
diff --git a/tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py
new file mode 100644
index 000000000..98957b1de
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/test_vtc_throughput_noisy_test.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Ericsson AB and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
+
+import mock
+import unittest
+
+from yardstick.benchmark.scenarios.networking import vtc_throughput_noisy
+
+
+class VtcThroughputNoisyTestCase(unittest.TestCase):
+
+ def setUp(self):
+ scenario = dict()
+ scenario['options'] = dict()
+ scenario['options']['default_net_name'] = ''
+ scenario['options']['default_subnet_name'] = ''
+ scenario['options']['vlan_net_1_name'] = ''
+ scenario['options']['vlan_subnet_1_name'] = ''
+ scenario['options']['vlan_net_2_name'] = ''
+ scenario['options']['vlan_subnet_2_name'] = ''
+ scenario['options']['vnic_type'] = ''
+ scenario['options']['vtc_flavor'] = ''
+ scenario['options']['packet_size'] = ''
+ scenario['options']['vlan_sender'] = ''
+ scenario['options']['vlan_receiver'] = ''
+ scenario['options']['num_of_neighbours'] = '1'
+ scenario['options']['amount_of_ram'] = '1G'
+ scenario['options']['number_of_cores'] = '1'
+
+ self.vt = vtc_throughput_noisy.VtcThroughputNoisy(scenario, '')
+
+ def test_run_for_success(self):
+ result = {}
+ self.vt.run(result)
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()
diff --git a/tests/unit/benchmark/scenarios/parser/__init__.py b/tests/unit/benchmark/scenarios/parser/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/parser/__init__.py
diff --git a/tests/unit/benchmark/scenarios/parser/test_parser.py b/tests/unit/benchmark/scenarios/parser/test_parser.py
new file mode 100644
index 000000000..d11a6d5c8
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/parser/test_parser.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and other.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.parser.Parser
+
+import mock
+import unittest
+import json
+
+from yardstick.benchmark.scenarios.parser import parser
+
+@mock.patch('yardstick.benchmark.scenarios.parser.parser.subprocess')
+class ParserTestCase(unittest.TestCase):
+
+ def setUp(self):
+ pass
+
+ def test_parser_successful_setup(self, mock_subprocess):
+
+ p = parser.Parser({}, {})
+ mock_subprocess.call().return_value = 0
+ p.setup()
+ self.assertEqual(p.setup_done, True)
+
+ def test_parser_successful(self, mock_subprocess):
+ args = {
+ 'options': {'yangfile':'/root/yardstick/samples/yang.yaml',
+ 'toscafile':'/root/yardstick/samples/tosca.yaml'},
+ }
+ p = parser.Parser(args, {})
+ result = {}
+ mock_subprocess.call().return_value = 0
+ sample_output = '{"yangtotosca": "success"}'
+
+ p.run(result)
+ expected_result = json.loads(sample_output)
+
+ def test_parser_teardown_successful(self, mock_subprocess):
+
+ p = parser.Parser({}, {})
+ mock_subprocess.call().return_value = 0
+ p.teardown()
+ self.assertEqual(p.teardown_done, True)
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()