aboutsummaryrefslogtreecommitdiffstats
path: root/tests/unit/benchmark/scenarios
diff options
context:
space:
mode:
Diffstat (limited to 'tests/unit/benchmark/scenarios')
-rwxr-xr-xtests/unit/benchmark/scenarios/networking/netperf_sample_output.json1
-rwxr-xr-xtests/unit/benchmark/scenarios/networking/test_netperf.py116
2 files changed, 117 insertions, 0 deletions
diff --git a/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json b/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
new file mode 100755
index 000000000..bba76cfa5
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
@@ -0,0 +1 @@
+{"mean_latency":"9.49","troughput":"823.77","troughput_unit":"10^6bits/s"} \ No newline at end of file
diff --git a/tests/unit/benchmark/scenarios/networking/test_netperf.py b/tests/unit/benchmark/scenarios/networking/test_netperf.py
new file mode 100755
index 000000000..d5c19918b
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/test_netperf.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.netperf.Netperf
+
+import mock
+import unittest
+import os
+import json
+
+from yardstick.benchmark.scenarios.networking import netperf
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netperf.ssh')
+class NetperfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': '172.16.0.137',
+ 'target': '172.16.0.138',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+
+ def test_netperf_successful_setup(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertEqual(p.setup_done, True)
+
+ def test_netperf_successful_no_sla(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {'options': options}
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ expected_result = json.loads(sample_output)
+ result = p.run(args)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_successful_sla(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 100}
+ }
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ expected_result = json.loads(sample_output)
+ result = p.run(args)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_unsuccessful_sla(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 5}
+ }
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ self.assertRaises(AssertionError, p.run, args)
+
+ def test_netperf_unsuccessful_script_error(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {'options': options}
+
+ mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, args)
+
+ def _read_sample_output(self):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, 'netperf_sample_output.json')
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()