aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorQiLiang <liangqi1@huawei.com>2015-10-21 12:29:53 +0000
committerQiLiang <liangqi1@huawei.com>2015-10-27 03:34:28 +0000
commit2e1094d4aee93180126d3ce86db3cc7df2e87bc5 (patch)
tree221e98fd325ff6fcb4fbbb3e656a3789f3a77342
parent884926d05f435217c7dac038b3bfbd7e9d05826b (diff)
Heat context code refactor part 2
Heat context code refactor to cater for the evolution of the Yardstick framework. Refactor runner_cfg host/target info handle, as specified at https://etherpad.opnfv.org/p/yardstick_framework step 4. Get general Context info (use Context.get). Before this refactor host and target vm must have the same user name and ssh key, that is not general enough for later extension. test_case.yaml do NOT need to change. JIRA: YARDSTICK-168 Change-Id: I5cfe868f3c6f633214ef550bc9676fe1de0709db Signed-off-by: QiLiang <liangqi1@huawei.com>
-rw-r--r--tests/unit/benchmark/contexts/test_heat.py4
-rw-r--r--tests/unit/benchmark/scenarios/compute/test_cyclictest.py34
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_iperf3.py81
-rwxr-xr-xtests/unit/benchmark/scenarios/networking/test_netperf.py57
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_ping.py43
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_pktgen.py59
-rw-r--r--tests/unit/benchmark/scenarios/storage/test_fio.py42
-rw-r--r--yardstick/benchmark/contexts/base.py8
-rw-r--r--yardstick/benchmark/contexts/heat.py23
-rwxr-xr-xyardstick/benchmark/runners/arithmetic.py13
-rwxr-xr-xyardstick/benchmark/runners/base.py6
-rw-r--r--yardstick/benchmark/runners/duration.py13
-rwxr-xr-xyardstick/benchmark/runners/iteration.py13
-rw-r--r--yardstick/benchmark/runners/sequence.py13
-rw-r--r--yardstick/benchmark/scenarios/compute/cyclictest.py41
-rw-r--r--yardstick/benchmark/scenarios/compute/lmbench.py46
-rw-r--r--yardstick/benchmark/scenarios/compute/perf.py49
-rw-r--r--yardstick/benchmark/scenarios/networking/iperf3.py95
-rwxr-xr-xyardstick/benchmark/scenarios/networking/netperf.py88
-rw-r--r--yardstick/benchmark/scenarios/networking/ping.py58
-rw-r--r--yardstick/benchmark/scenarios/networking/pktgen.py69
-rw-r--r--yardstick/benchmark/scenarios/storage/fio.py45
-rwxr-xr-xyardstick/cmd/commands/task.py66
23 files changed, 563 insertions, 403 deletions
diff --git a/tests/unit/benchmark/contexts/test_heat.py b/tests/unit/benchmark/contexts/test_heat.py
index bf1174e27..f891b0a5f 100644
--- a/tests/unit/benchmark/contexts/test_heat.py
+++ b/tests/unit/benchmark/contexts/test_heat.py
@@ -110,5 +110,5 @@ class HeatContextTestCase(unittest.TestCase):
'private_ip_attr': 'private_ip'}
result = heat.HeatContext._get_server(self.mock_context, attr_name)
- self.assertEqual(result.public_ip, '127.0.0.1')
- self.assertEqual(result.private_ip, '10.0.0.1')
+ self.assertEqual(result['ip'], '127.0.0.1')
+ self.assertEqual(result['private_ip'], '10.0.0.1')
diff --git a/tests/unit/benchmark/scenarios/compute/test_cyclictest.py b/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
index 28dc4d6b3..a87b39142 100644
--- a/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
+++ b/tests/unit/benchmark/scenarios/compute/test_cyclictest.py
@@ -23,14 +23,16 @@ class CyclictestTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- "host": "192.168.50.28",
- "user": "root",
- "key_filename": "mykey.key"
+ "host": {
+ "ip": "192.168.50.28",
+ "user": "root",
+ "key_filename": "mykey.key"
+ }
}
def test_cyclictest_successful_setup(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
+ c = cyclictest.Cyclictest({}, self.ctx)
c.setup()
mock_ssh.SSH().execute.return_value = (0, '', '')
@@ -39,7 +41,6 @@ class CyclictestTestCase(unittest.TestCase):
def test_cyclictest_successful_no_sla(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
options = {
"affinity": 2,
"interval": 100,
@@ -51,6 +52,7 @@ class CyclictestTestCase(unittest.TestCase):
args = {
"options": options,
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
@@ -58,13 +60,12 @@ class CyclictestTestCase(unittest.TestCase):
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- c.run(args, result)
+ c.run(result)
expected_result = json.loads(sample_output)
self.assertEqual(result, expected_result)
def test_cyclictest_successful_sla(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
options = {
"affinity": 2,
"interval": 100,
@@ -82,6 +83,7 @@ class CyclictestTestCase(unittest.TestCase):
"options": options,
"sla": sla
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
@@ -89,58 +91,57 @@ class CyclictestTestCase(unittest.TestCase):
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- c.run(args, result)
+ c.run(result)
expected_result = json.loads(sample_output)
self.assertEqual(result, expected_result)
def test_cyclictest_unsuccessful_sla_min_latency(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
args = {
"options": {},
"sla": {"max_min_latency": 10}
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, args, result)
+ self.assertRaises(AssertionError, c.run, result)
def test_cyclictest_unsuccessful_sla_avg_latency(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
args = {
"options": {},
"sla": {"max_avg_latency": 10}
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, args, result)
+ self.assertRaises(AssertionError, c.run, result)
def test_cyclictest_unsuccessful_sla_max_latency(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
args = {
"options": {},
"sla": {"max_max_latency": 10}
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
sample_output = '{"min": 100, "avg": 500, "max": 1000}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, c.run, args, result)
+ self.assertRaises(AssertionError, c.run, result)
def test_cyclictest_unsuccessful_script_error(self, mock_ssh):
- c = cyclictest.Cyclictest(self.ctx)
options = {
"affinity": 2,
"interval": 100,
@@ -158,12 +159,13 @@ class CyclictestTestCase(unittest.TestCase):
"options": options,
"sla": sla
}
+ c = cyclictest.Cyclictest(args, self.ctx)
result = {}
c.server = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, c.run, args, result)
+ self.assertRaises(RuntimeError, c.run, result)
def main():
diff --git a/tests/unit/benchmark/scenarios/networking/test_iperf3.py b/tests/unit/benchmark/scenarios/networking/test_iperf3.py
index 2ec73ebd2..91f800b60 100644
--- a/tests/unit/benchmark/scenarios/networking/test_iperf3.py
+++ b/tests/unit/benchmark/scenarios/networking/test_iperf3.py
@@ -26,15 +26,22 @@ class IperfTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138',
+ }
}
def test_iperf_successful_setup(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
+ p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.setup()
@@ -44,13 +51,13 @@ class IperfTestCase(unittest.TestCase):
def test_iperf_unsuccessful_setup(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
+ p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.setup)
def test_iperf_successful_teardown(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
+ p = iperf3.Iperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.host = mock_ssh.SSH()
p.target = mock_ssh.SSH()
@@ -61,26 +68,22 @@ class IperfTestCase(unittest.TestCase):
def test_iperf_successful_no_sla(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_iperf_successful_sla(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
@@ -88,18 +91,18 @@ class IperfTestCase(unittest.TestCase):
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_iperf_unsuccessful_sla(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
@@ -107,16 +110,16 @@ class IperfTestCase(unittest.TestCase):
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_iperf_successful_sla_jitter(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {"udp":"udp","bandwidth":"20m"}
args = {
'options': options,
@@ -124,18 +127,18 @@ class IperfTestCase(unittest.TestCase):
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_udp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {"udp":"udp","bandwidth":"20m"}
args = {
'options': options,
@@ -143,22 +146,26 @@ class IperfTestCase(unittest.TestCase):
}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output(self.output_name_udp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_iperf_unsuccessful_script_error(self, mock_ssh):
- p = iperf3.Iperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = iperf3.Iperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def _read_sample_output(self,filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
diff --git a/tests/unit/benchmark/scenarios/networking/test_netperf.py b/tests/unit/benchmark/scenarios/networking/test_netperf.py
index 4bb5983c3..3f224733c 100755
--- a/tests/unit/benchmark/scenarios/networking/test_netperf.py
+++ b/tests/unit/benchmark/scenarios/networking/test_netperf.py
@@ -24,15 +24,22 @@ class NetperfTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
}
def test_netperf_successful_setup(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
+ p = netperf.Netperf({}, self.ctx)
mock_ssh.SSH().execute.return_value = (0, '', '')
p.setup()
@@ -42,26 +49,22 @@ class NetperfTestCase(unittest.TestCase):
def test_netperf_successful_no_sla(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_netperf_successful_sla(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
@@ -69,18 +72,18 @@ class NetperfTestCase(unittest.TestCase):
}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, expected_result)
def test_netperf_unsuccessful_sla(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {
'options': options,
@@ -88,22 +91,26 @@ class NetperfTestCase(unittest.TestCase):
}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
sample_output = self._read_sample_output()
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_netperf_unsuccessful_script_error(self, mock_ssh):
- p = netperf.Netperf(self.ctx)
- mock_ssh.SSH().execute.return_value = (0, '', '')
- p.host = mock_ssh.SSH()
-
options = {}
args = {'options': options}
result = {}
+ p = netperf.Netperf(args, self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def _read_sample_output(self):
curr_path = os.path.dirname(os.path.abspath(__file__))
diff --git a/tests/unit/benchmark/scenarios/networking/test_ping.py b/tests/unit/benchmark/scenarios/networking/test_ping.py
index b2c5b9859..3a897d0f8 100644
--- a/tests/unit/benchmark/scenarios/networking/test_ping.py
+++ b/tests/unit/benchmark/scenarios/networking/test_ping.py
@@ -21,71 +21,72 @@ class PingTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ },
+ "target": {
+ "ipaddr": "10.229.17.105",
}
+ }
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_successful_no_sla(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138'
}
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (0, '100', '')
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, {'rtt': 100.0})
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_successful_sla(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138',
'sla': {'max_rtt': 150}
}
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (0, '100', '')
- p.run(args, result)
+ p.run(result)
self.assertEqual(result, {'rtt': 100.0})
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_unsuccessful_sla(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138',
'sla': {'max_rtt': 50}
- }
+ }
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (0, '100', '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
@mock.patch('yardstick.benchmark.scenarios.networking.ping.ssh')
def test_ping_unsuccessful_script_error(self, mock_ssh):
- p = ping.Ping(self.ctx)
-
args = {
'options': {'packetsize': 200},
- 'ipaddr': '172.16.0.138',
'sla': {'max_rtt': 50}
- }
+ }
result = {}
+ p = ping.Ping(args, self.ctx)
+
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def main():
diff --git a/tests/unit/benchmark/scenarios/networking/test_pktgen.py b/tests/unit/benchmark/scenarios/networking/test_pktgen.py
index ae4481f0e..13a4c1bd4 100644
--- a/tests/unit/benchmark/scenarios/networking/test_pktgen.py
+++ b/tests/unit/benchmark/scenarios/networking/test_pktgen.py
@@ -23,19 +23,25 @@ class PktgenTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'root',
+ 'key_filename': 'mykey.key'
+ },
+ 'target': {
+ 'ip': '172.16.0.138',
+ 'user': 'root',
+ 'key_filename': 'mykey.key',
+ 'ipaddr': '172.16.0.138'
+ }
}
def test_pktgen_successful_setup(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60},
- 'ipaddr': '172.16.0.139'
}
+ p = pktgen.Pktgen(args, self.ctx)
p.setup()
mock_ssh.SSH().execute.return_value = (0, '', '')
@@ -45,11 +51,10 @@ class PktgenTestCase(unittest.TestCase):
def test_pktgen_successful_iptables_setup(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+ p = pktgen.Pktgen(args, self.ctx)
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
@@ -64,11 +69,11 @@ class PktgenTestCase(unittest.TestCase):
def test_pktgen_unsuccessful_iptables_setup(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+
+ p = pktgen.Pktgen(args, self.ctx)
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
@@ -77,11 +82,11 @@ class PktgenTestCase(unittest.TestCase):
def test_pktgen_successful_iptables_get_result(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+
+ p = pktgen.Pktgen(args, self.ctx)
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
@@ -95,11 +100,12 @@ class PktgenTestCase(unittest.TestCase):
def test_pktgen_unsuccessful_iptables_get_result(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
+
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.number_of_ports = args['options']['number_of_ports']
@@ -108,13 +114,13 @@ class PktgenTestCase(unittest.TestCase):
def test_pktgen_successful_no_sla(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139'
}
result = {}
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
@@ -126,20 +132,21 @@ class PktgenTestCase(unittest.TestCase):
"packets_sent": 149776, "flows": 110}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = json.loads(sample_output)
expected_result["packets_received"] = 149300
self.assertEqual(result, expected_result)
def test_pktgen_successful_sla(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139',
'sla': {'max_ppm': 10000}
}
result = {}
+
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
@@ -151,21 +158,21 @@ class PktgenTestCase(unittest.TestCase):
"packets_sent": 149776, "flows": 110}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = json.loads(sample_output)
expected_result["packets_received"] = 149300
self.assertEqual(result, expected_result)
def test_pktgen_unsuccessful_sla(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139',
'sla': {'max_ppm': 1000}
}
result = {}
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
@@ -176,23 +183,23 @@ class PktgenTestCase(unittest.TestCase):
sample_output = '{"packets_per_second": 9753, "errors": 0, \
"packets_sent": 149776, "flows": 110}'
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_pktgen_unsuccessful_script_error(self, mock_ssh):
- p = pktgen.Pktgen(self.ctx)
args = {
'options': {'packetsize': 60, 'number_of_ports': 10},
- 'ipaddr': '172.16.0.139',
'sla': {'max_ppm': 1000}
}
result = {}
+ p = pktgen.Pktgen(args, self.ctx)
+
p.server = mock_ssh.SSH()
p.client = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def main():
diff --git a/tests/unit/benchmark/scenarios/storage/test_fio.py b/tests/unit/benchmark/scenarios/storage/test_fio.py
index b47aed968..ac8aa0684 100644
--- a/tests/unit/benchmark/scenarios/storage/test_fio.py
+++ b/tests/unit/benchmark/scenarios/storage/test_fio.py
@@ -24,9 +24,11 @@ class FioTestCase(unittest.TestCase):
def setUp(self):
self.ctx = {
- 'host': '172.16.0.137',
- 'user': 'cirros',
- 'key_filename': 'mykey.key'
+ 'host': {
+ 'ip': '172.16.0.137',
+ 'user': 'cirros',
+ 'key_filename': 'mykey.key'
+ }
}
self.sample_output = {
'read': 'fio_read_sample_output.json',
@@ -36,7 +38,6 @@ class FioTestCase(unittest.TestCase):
def test_fio_successful_setup(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -44,6 +45,7 @@ class FioTestCase(unittest.TestCase):
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
p.setup()
mock_ssh.SSH().execute.return_value = (0, '', '')
@@ -52,7 +54,6 @@ class FioTestCase(unittest.TestCase):
def test_fio_successful_no_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -60,6 +61,7 @@ class FioTestCase(unittest.TestCase):
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
@@ -67,7 +69,7 @@ class FioTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
'"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
@@ -77,7 +79,6 @@ class FioTestCase(unittest.TestCase):
def test_fio_successful_read_no_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -85,6 +86,7 @@ class FioTestCase(unittest.TestCase):
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
@@ -92,7 +94,7 @@ class FioTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.sample_output['read'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 36113, "read_iops": 9028,' \
'"read_lat": 108.7}'
@@ -101,7 +103,6 @@ class FioTestCase(unittest.TestCase):
def test_fio_successful_write_no_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -109,6 +110,7 @@ class FioTestCase(unittest.TestCase):
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
@@ -116,7 +118,7 @@ class FioTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.sample_output['write'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"write_bw": 35107, "write_iops": 8776,'\
'"write_lat": 111.74}'
@@ -125,7 +127,6 @@ class FioTestCase(unittest.TestCase):
def test_fio_successful_lat_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -136,6 +137,7 @@ class FioTestCase(unittest.TestCase):
'options': options,
'sla': {'write_lat': 300.1}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
@@ -143,7 +145,7 @@ class FioTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
'"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
@@ -154,7 +156,6 @@ class FioTestCase(unittest.TestCase):
def test_fio_unsuccessful_lat_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -165,17 +166,17 @@ class FioTestCase(unittest.TestCase):
'options': options,
'sla': {'write_lat': 200.1}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_fio_successful_bw_iops_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -186,6 +187,7 @@ class FioTestCase(unittest.TestCase):
'options': options,
'sla': {'read_iops': 20000}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
@@ -193,7 +195,7 @@ class FioTestCase(unittest.TestCase):
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- p.run(args, result)
+ p.run(result)
expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
'"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
@@ -203,7 +205,6 @@ class FioTestCase(unittest.TestCase):
def test_fio_unsuccessful_bw_iops_sla(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -214,17 +215,17 @@ class FioTestCase(unittest.TestCase):
'options': options,
'sla': {'read_iops': 30000}
}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
- self.assertRaises(AssertionError, p.run, args, result)
+ self.assertRaises(AssertionError, p.run, result)
def test_fio_unsuccessful_script_error(self, mock_ssh):
- p = fio.Fio(self.ctx)
options = {
'filename': '/home/ec2-user/data.raw',
'bs': '4k',
@@ -232,12 +233,13 @@ class FioTestCase(unittest.TestCase):
'ramp_time': 10
}
args = {'options': options}
+ p = fio.Fio(args, self.ctx)
result = {}
p.client = mock_ssh.SSH()
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
- self.assertRaises(RuntimeError, p.run, args, result)
+ self.assertRaises(RuntimeError, p.run, result)
def _read_sample_output(self, file_name):
curr_path = os.path.dirname(os.path.abspath(__file__))
diff --git a/yardstick/benchmark/contexts/base.py b/yardstick/benchmark/contexts/base.py
index ae860accd..76a828811 100644
--- a/yardstick/benchmark/contexts/base.py
+++ b/yardstick/benchmark/contexts/base.py
@@ -48,12 +48,12 @@ class Context(object):
@abc.abstractmethod
def _get_server(self, attr_name):
- '''get server object by name from context
+ '''get server info by name from context
'''
@staticmethod
def get_server(attr_name):
- '''lookup server object by name from context
+ '''lookup server info by name from context
attr_name: either a name for a server created by yardstick or a dict
with attribute name mapping when using external heat templates
'''
@@ -64,7 +64,7 @@ class Context(object):
break
if server is None:
- raise ValueError("context not found for server '%s'" %
- attr_name["name"])
+ raise ValueError("context not found for server '%r'" %
+ attr_name)
return server
diff --git a/yardstick/benchmark/contexts/heat.py b/yardstick/benchmark/contexts/heat.py
index 9cf29981b..7bd430bc5 100644
--- a/yardstick/benchmark/contexts/heat.py
+++ b/yardstick/benchmark/contexts/heat.py
@@ -8,6 +8,7 @@
##############################################################################
import sys
+import pkg_resources
from yardstick.benchmark.contexts.base import Context
from yardstick.benchmark.contexts.model import Server
@@ -195,10 +196,13 @@ class HeatContext(Context):
print "Context '%s' undeployed" % self.name
def _get_server(self, attr_name):
- '''lookup server object by name from context
+ '''lookup server info by name from context
attr_name: either a name for a server created by yardstick or a dict
with attribute name mapping when using external heat templates
'''
+ key_filename = pkg_resources.resource_filename(
+ 'yardstick.resources', 'files/yardstick_key')
+
if type(attr_name) is dict:
cname = attr_name["name"].split(".")[1]
if cname != self.name:
@@ -216,8 +220,21 @@ class HeatContext(Context):
server = Server(attr_name["name"].split(".")[0], self, {})
server.public_ip = public_ip
server.private_ip = private_ip
- return server
else:
if attr_name not in self._server_map:
return None
- return self._server_map[attr_name]
+ server = self._server_map[attr_name]
+
+ if server is None:
+ return None
+
+ result = {
+ "user": server.context.user,
+ "key_filename": key_filename,
+ "private_ip": server.private_ip
+ }
+ # Target server may only have private_ip
+ if server.public_ip:
+ result["ip"] = server.public_ip
+
+ return result
diff --git a/yardstick/benchmark/runners/arithmetic.py b/yardstick/benchmark/runners/arithmetic.py
index 68c8bfdef..af2303479 100755
--- a/yardstick/benchmark/runners/arithmetic.py
+++ b/yardstick/benchmark/runners/arithmetic.py
@@ -22,7 +22,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -40,12 +40,13 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
LOG.info("worker START, step(%s, %d, %d, %d), class %s",
arg_name, start, stop, step, cls)
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
@@ -63,7 +64,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -129,8 +130,8 @@ class ArithmeticRunner(base.Runner):
__execution_type__ = 'Arithmetic'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/runners/base.py b/yardstick/benchmark/runners/base.py
index cc8c93cb6..d443806a7 100755
--- a/yardstick/benchmark/runners/base.py
+++ b/yardstick/benchmark/runners/base.py
@@ -169,7 +169,6 @@ class Runner(object):
Runner.release(runner)
def __init__(self, config, queue):
- self.context = {}
self.config = config
self.periodic_action_process = None
self.result_queue = queue
@@ -189,7 +188,8 @@ class Runner(object):
log.debug("post-stop data: \n%s" % data)
self.result_queue.put({'post-stop-action-data': data})
- def run(self, scenario_type, scenario_cfg):
+ def run(self, scenario_cfg, context_cfg):
+ scenario_type = scenario_cfg["type"]
class_name = base_scenario.Scenario.get(scenario_type)
path_split = class_name.split(".")
module_path = ".".join(path_split[:-1])
@@ -228,7 +228,7 @@ class Runner(object):
self.result_queue))
self.periodic_action_process.start()
- self._run_benchmark(cls, "run", scenario_cfg)
+ self._run_benchmark(cls, "run", scenario_cfg, context_cfg)
def join(self):
self.process.join()
diff --git a/yardstick/benchmark/runners/duration.py b/yardstick/benchmark/runners/duration.py
index e4ad037af..40e0aa708 100644
--- a/yardstick/benchmark/runners/duration.py
+++ b/yardstick/benchmark/runners/duration.py
@@ -21,7 +21,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -33,7 +33,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
runner_cfg['runner_id'] = os.getpid()
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
@@ -42,7 +42,8 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
sla_action = scenario_cfg["sla"].get("action", "assert")
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
start = time.time()
while True:
@@ -54,7 +55,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -109,8 +110,8 @@ If the scenario ends before the time has elapsed, it will be started again.
'''
__execution_type__ = 'Duration'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/runners/iteration.py b/yardstick/benchmark/runners/iteration.py
index b6d861d6c..077e0e813 100755
--- a/yardstick/benchmark/runners/iteration.py
+++ b/yardstick/benchmark/runners/iteration.py
@@ -21,7 +21,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -33,12 +33,13 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
runner_cfg['runner_id'] = os.getpid()
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
@@ -53,7 +54,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -108,8 +109,8 @@ If the scenario ends before the time has elapsed, it will be started again.
'''
__execution_type__ = 'Iteration'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/runners/sequence.py b/yardstick/benchmark/runners/sequence.py
index 29f86e19c..a410eea0e 100644
--- a/yardstick/benchmark/runners/sequence.py
+++ b/yardstick/benchmark/runners/sequence.py
@@ -22,7 +22,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -42,12 +42,13 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
LOG.info("worker START, sequence_values(%s, %s), class %s",
arg_name, sequence_values, cls)
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
@@ -63,7 +64,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -121,8 +122,8 @@ class SequenceRunner(base.Runner):
__execution_type__ = 'Sequence'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/scenarios/compute/cyclictest.py b/yardstick/benchmark/scenarios/compute/cyclictest.py
index 595986f8a..e8fc63cf7 100644
--- a/yardstick/benchmark/scenarios/compute/cyclictest.py
+++ b/yardstick/benchmark/scenarios/compute/cyclictest.py
@@ -54,8 +54,9 @@ class Cyclictest(base.Scenario):
TARGET_SCRIPT = "cyclictest_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -63,13 +64,14 @@ class Cyclictest(base.Scenario):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.compute",
Cyclictest.TARGET_SCRIPT)
- user = self.context.get("user", "root")
- host = self.context.get("host", None)
- key_filename = self.context.get("key_filename", "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "root")
+ ip = host.get("ip", None)
+ key_filename = host.get("key_filename", "~/.ssh/id_rsa")
- LOG.debug("user:%s, host:%s", user, host)
+ LOG.debug("user:%s, host:%s", user, ip)
print "key_filename:" + key_filename
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -78,14 +80,14 @@ class Cyclictest(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
default_args = "-m -n -q"
if not self.setup_done:
self.setup()
- options = args["options"]
+ options = self.scenario_cfg["options"]
affinity = options.get("affinity", 1)
interval = options.get("interval", 1000)
priority = options.get("priority", 99)
@@ -104,13 +106,14 @@ class Cyclictest(base.Scenario):
result.update(json.loads(stdout))
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sla_error = ""
for t, latency in result.items():
- if 'max_%s_latency' % t not in args['sla']:
+ if 'max_%s_latency' % t not in self.scenario_cfg['sla']:
continue
- sla_latency = int(args['sla']['max_%s_latency' % t])
+ sla_latency = int(self.scenario_cfg['sla'][
+ 'max_%s_latency' % t])
latency = int(latency)
if latency > sla_latency:
sla_error += "%s latency %d > sla:max_%s_latency(%d); " % \
@@ -123,16 +126,16 @@ def _test():
key_filename = pkg_resources.resource_filename("yardstick.resources",
"files/yardstick_key")
ctx = {
- "host": "192.168.50.28",
- "user": "root",
- "key_filename": key_filename
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ }
}
logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- cyclictest = Cyclictest(ctx)
-
options = {
"affinity": 2,
"interval": 100,
@@ -150,8 +153,10 @@ def _test():
"options": options,
"sla": sla
}
+ result = {}
- result = cyclictest.run(args)
+ cyclictest = Cyclictest(args, ctx)
+ cyclictest.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/compute/lmbench.py b/yardstick/benchmark/scenarios/compute/lmbench.py
index d2558c936..03caff525 100644
--- a/yardstick/benchmark/scenarios/compute/lmbench.py
+++ b/yardstick/benchmark/scenarios/compute/lmbench.py
@@ -35,8 +35,9 @@ class Lmbench(base.Scenario):
TARGET_SCRIPT = "lmbench_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -44,12 +45,13 @@ class Lmbench(base.Scenario):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.compute",
Lmbench.TARGET_SCRIPT)
- user = self.context.get("user", "ubuntu")
- host = self.context.get("host", None)
- key_filename = self.context.get('key_filename', "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "ubuntu")
+ ip = host.get("ip", None)
+ key_filename = host.get('key_filename', "~/.ssh/id_rsa")
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -58,13 +60,13 @@ class Lmbench(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- options = args['options']
+ options = self.scenario_cfg['options']
stride = options.get('stride', 128)
stop_size = options.get('stop_size', 16)
@@ -75,11 +77,10 @@ class Lmbench(base.Scenario):
if status:
raise RuntimeError(stderr)
- result.update(json.loads(stdout))
-
- if "sla" in args:
+ result.update({"latencies": json.loads(stdout)})
+ if "sla" in self.scenario_cfg:
sla_error = ""
- sla_max_latency = int(args['sla']['max_latency'])
+ sla_max_latency = int(self.scenario_cfg['sla']['max_latency'])
for t_latency in result:
latency = t_latency['latency']
if latency > sla_max_latency:
@@ -92,20 +93,23 @@ def _test():
"""internal test function"""
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Lmbench(ctx)
-
options = {'stride': 128, 'stop_size': 16}
-
args = {'options': options}
- result = p.run(args)
+ result = {}
+
+ p = Lmbench(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/compute/perf.py b/yardstick/benchmark/scenarios/compute/perf.py
index 281bd8e0c..f408e9cb4 100644
--- a/yardstick/benchmark/scenarios/compute/perf.py
+++ b/yardstick/benchmark/scenarios/compute/perf.py
@@ -36,20 +36,22 @@ class Perf(base.Scenario):
TARGET_SCRIPT = 'perf_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
"""scenario setup"""
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.compute', Perf.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ user = host.get('user', 'ubuntu')
+ ip = host.get('ip', None)
+ key_filename = host.get('key_filename', '~/.ssh/id_rsa')
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -58,13 +60,13 @@ class Perf(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- options = args['options']
+ options = self.scenario_cfg['options']
events = options.get('events', ['task-clock'])
events_string = ""
@@ -72,7 +74,8 @@ class Perf(base.Scenario):
events_string += event + " "
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
if duration_time:
@@ -98,10 +101,11 @@ class Perf(base.Scenario):
result.update(json.loads(stdout))
- if "sla" in args:
- metric = args['sla']['metric']
- exp_val = args['sla']['expected_value']
- smaller_than_exp = 'smaller_than_expected' in args['sla']
+ if "sla" in self.scenario_cfg:
+ metric = self.scenario_cfg['sla']['metric']
+ exp_val = self.scenario_cfg['sla']['expected_value']
+ smaller_than_exp = 'smaller_than_expected' \
+ in self.scenario_cfg['sla']
if metric not in result:
assert False, "Metric (%s) not found." % metric
@@ -118,20 +122,23 @@ def _test():
"""internal test function"""
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Perf(ctx)
-
options = {'load': True}
args = {'options': options}
+ result = {}
- result = p.run(args)
+ p = Perf(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/networking/iperf3.py b/yardstick/benchmark/scenarios/networking/iperf3.py
index a324c5b85..86610c88f 100644
--- a/yardstick/benchmark/scenarios/networking/iperf3.py
+++ b/yardstick/benchmark/scenarios/networking/iperf3.py
@@ -48,32 +48,39 @@ For more info see http://software.es.net/iperf
"""
__scenario_type__ = "Iperf3"
- def __init__(self, context):
- self.context = context
- self.user = context.get('user', 'ubuntu')
- self.host_ipaddr = context['host']
- self.target_ipaddr = context['target']
- self.key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
- LOG.debug("setup, key %s", self.key_filename)
- LOG.info("host:%s, user:%s", self.host_ipaddr, self.user)
- self.host = ssh.SSH(self.user, self.host_ipaddr,
- key_filename=self.key_filename)
- self.host.wait(timeout=600)
-
- LOG.info("target:%s, user:%s", self.target_ipaddr, self.user)
- self.target = ssh.SSH(self.user, self.target_ipaddr,
- key_filename=self.key_filename)
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
+
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.target = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.target.wait(timeout=600)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.host = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
+ self.host.wait(timeout=600)
+
cmd = "iperf3 -s -D"
LOG.debug("Starting iperf3 server with command: %s", cmd)
status, _, stderr = self.target.execute(cmd)
if status:
raise RuntimeError(stderr)
+ self.setup_done = True
+
def teardown(self):
LOG.debug("teardown")
self.host.close()
@@ -82,14 +89,17 @@ For more info see http://software.es.net/iperf
LOG.warn(stderr)
self.target.close()
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
+ if not self.setup_done:
+ self.setup()
# if run by a duration runner, get the duration time and setup as arg
- time = self.context.get('duration', None)
- options = args['options']
+ time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
+ options = self.scenario_cfg['options']
- cmd = "iperf3 -c %s --json" % (self.target_ipaddr)
+ cmd = "iperf3 -c %s --json" % (self.context_cfg['target']['ipaddr'])
# If there are no options specified
if not options:
@@ -124,8 +134,8 @@ For more info see http://software.es.net/iperf
result.update(json.loads(stdout))
- if "sla" in args:
- sla_iperf = args["sla"]
+ if "sla" in self.scenario_cfg:
+ sla_iperf = self.scenario_cfg["sla"]
if not use_UDP:
sla_bytes_per_second = int(sla_iperf["bytes_per_second"])
@@ -147,31 +157,32 @@ For more info see http://software.es.net/iperf
def _test():
'''internal test function'''
+ key_filename = pkg_resources.resource_filename('yardstick.resources',
+ 'files/yardstick_key')
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ },
+ 'target': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename,
+ 'ipaddr': '10.229.47.137',
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- key_filename = pkg_resources.resource_filename('yardstick.resources',
- 'files/yardstick_key')
- runner_cfg = {}
- runner_cfg['type'] = 'Duration'
- runner_cfg['duration'] = 5
- runner_cfg['host'] = '10.0.2.33'
- runner_cfg['target_ipaddr'] = '10.0.2.53'
- runner_cfg['user'] = 'ubuntu'
- runner_cfg['output_filename'] = "/tmp/yardstick.out"
- runner_cfg['key_filename'] = key_filename
-
- scenario_args = {}
- scenario_args['options'] = {"bytes": 10000000000}
- scenario_args['sla'] = \
- {"bytes_per_second": 2900000000, "action": "monitor"}
-
- from yardstick.benchmark.runners import base as base_runner
- runner = base_runner.Runner.get(runner_cfg)
- runner.run("Iperf3", scenario_args)
- runner.join()
- base_runner.Runner.release(runner)
+ options = {'packetsize': 120}
+ args = {'options': options}
+ result = {}
+
+ p = Iperf(args, ctx)
+ p.run(result)
+ print result
if __name__ == '__main__':
_test()
diff --git a/yardstick/benchmark/scenarios/networking/netperf.py b/yardstick/benchmark/scenarios/networking/netperf.py
index fb5497089..dcd4ef7b6 100755
--- a/yardstick/benchmark/scenarios/networking/netperf.py
+++ b/yardstick/benchmark/scenarios/networking/netperf.py
@@ -50,8 +50,9 @@ class Netperf(base.Scenario):
TARGET_SCRIPT = 'netperf_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -59,18 +60,24 @@ class Netperf(base.Scenario):
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking',
Netperf.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- target = self.context.get('target', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
# netserver start automatically during the vm boot
- LOG.info("user:%s, target:%s", user, target)
- self.server = ssh.SSH(user, target, key_filename=key_filename)
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.server = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.server.wait(timeout=600)
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.client = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -79,17 +86,18 @@ class Netperf(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
# get global options
- ipaddr = args.get("ipaddr", '127.0.0.1')
- options = args['options']
+ ipaddr = self.context_cfg['target'].get("ipaddr", '127.0.0.1')
+ options = self.scenario_cfg['options']
testname = options.get("testname", 'TCP_STREAM')
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
arithmetic_time = options.get("duration", None)
if duration_time:
testlen = duration_time
@@ -125,8 +133,9 @@ class Netperf(base.Scenario):
# sla check
mean_latency = float(result['mean_latency'])
- if "sla" in args:
- sla_max_mean_latency = int(args["sla"]["mean_latency"])
+ if "sla" in self.scenario_cfg:
+ sla_max_mean_latency = int(
+ self.scenario_cfg["sla"]["mean_latency"])
assert mean_latency <= sla_max_mean_latency, \
"mean_latency %f > sla_max_mean_latency(%f); " % \
@@ -135,28 +144,35 @@ class Netperf(base.Scenario):
def _test():
'''internal test function'''
- logger = logging.getLogger('yardstick')
+ key_filename = pkg_resources.resource_filename("yardstick.resources",
+ "files/yardstick_key")
+ ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ },
+ "target": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename,
+ "ipaddr": "10.229.47.137"
+ }
+ }
+
+ logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- key_filename = pkg_resources.resource_filename('yardstick.resources',
- 'files/yardstick_key')
- runner_cfg = {}
- runner_cfg['type'] = 'Duration'
- runner_cfg['duration'] = 5
- runner_cfg['clinet'] = '10.0.2.33'
- runner_cfg['server'] = '10.0.2.53'
- runner_cfg['user'] = 'ubuntu'
- runner_cfg['output_filename'] = "/tmp/yardstick.out"
- runner_cfg['key_filename'] = key_filename
-
- scenario_args = {}
- scenario_args['options'] = {"testname": 'TCP_STREAM'}
-
- from yardstick.benchmark.runners import base as base_runner
- runner = base_runner.Runner.get(runner_cfg)
- runner.run("Netperf", scenario_args)
- runner.join()
- base_runner.Runner.release(runner)
+ options = {
+ "testname": 'TCP_STREAM'
+ }
+
+ args = {"options": options}
+ result = {}
+
+ netperf = Netperf(args, ctx)
+ netperf.run(result)
+ print result
if __name__ == '__main__':
_test()
diff --git a/yardstick/benchmark/scenarios/networking/ping.py b/yardstick/benchmark/scenarios/networking/ping.py
index 10964350b..34278b90f 100644
--- a/yardstick/benchmark/scenarios/networking/ping.py
+++ b/yardstick/benchmark/scenarios/networking/ping.py
@@ -32,28 +32,31 @@ class Ping(base.Scenario):
TARGET_SCRIPT = 'ping_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking', Ping.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ user = host.get('user', 'ubuntu')
+ ip = host.get('ip', None)
+ key_filename = host.get('key_filename', '~/.ssh/id_rsa')
- LOG.info("user:%s, host:%s", user, host)
+ LOG.info("user:%s, host:%s", user, ip)
- self.connection = ssh.SSH(user, host, key_filename=key_filename)
+ self.connection = ssh.SSH(user, ip, key_filename=key_filename)
self.connection.wait()
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
- if "options" in args:
- options = "-s %s" % args['options'].get("packetsize", '56')
+ if "options" in self.scenario_cfg:
+ options = "-s %s" % \
+ self.scenario_cfg['options'].get("packetsize", '56')
else:
options = ""
- destination = args.get("ipaddr", '127.0.0.1')
+ destination = self.context_cfg['target'].get("ipaddr", '127.0.0.1')
LOG.debug("ping '%s' '%s'", options, destination)
@@ -66,7 +69,36 @@ class Ping(base.Scenario):
result["rtt"] = float(stdout)
- if "sla" in args:
- sla_max_rtt = int(args["sla"]["max_rtt"])
+ if "sla" in self.scenario_cfg:
+ sla_max_rtt = int(self.scenario_cfg["sla"]["max_rtt"])
assert result["rtt"] <= sla_max_rtt, "rtt %f > sla:max_rtt(%f); " % \
(result["rtt"], sla_max_rtt)
+
+
+def _test():
+ '''internal test function'''
+ key_filename = pkg_resources.resource_filename("yardstick.resources",
+ "files/yardstick_key")
+ ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ },
+ "target": {
+ "ipaddr": "10.229.17.105",
+ }
+ }
+
+ logger = logging.getLogger("yardstick")
+ logger.setLevel(logging.DEBUG)
+
+ args = {}
+ result = {}
+
+ p = Ping(args, ctx)
+ p.run(result)
+ print result
+
+if __name__ == '__main__':
+ _test()
diff --git a/yardstick/benchmark/scenarios/networking/pktgen.py b/yardstick/benchmark/scenarios/networking/pktgen.py
index f373fd2ec..9dac4c90c 100644
--- a/yardstick/benchmark/scenarios/networking/pktgen.py
+++ b/yardstick/benchmark/scenarios/networking/pktgen.py
@@ -37,8 +37,9 @@ class Pktgen(base.Scenario):
TARGET_SCRIPT = 'pktgen_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -46,17 +47,23 @@ class Pktgen(base.Scenario):
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking',
Pktgen.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- target = self.context.get('target', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
-
- LOG.info("user:%s, target:%s", user, target)
- self.server = ssh.SSH(user, target, key_filename=key_filename)
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
+
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.server = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.server.wait(timeout=600)
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.client = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -86,19 +93,20 @@ class Pktgen(base.Scenario):
raise RuntimeError(stderr)
return int(stdout)
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- ipaddr = args.get("ipaddr", '127.0.0.1')
+ ipaddr = self.context_cfg["target"].get("ipaddr", '127.0.0.1')
- options = args['options']
+ options = self.scenario_cfg['options']
packetsize = options.get("packetsize", 60)
self.number_of_ports = options.get("number_of_ports", 10)
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
@@ -123,11 +131,11 @@ class Pktgen(base.Scenario):
result['packets_received'] = self._iptables_get_result()
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sent = result['packets_sent']
received = result['packets_received']
ppm = 1000000 * (sent - received) / sent
- sla_max_ppm = int(args["sla"]["max_ppm"])
+ sla_max_ppm = int(self.scenario_cfg["sla"]["max_ppm"])
assert ppm <= sla_max_ppm, "ppm %d > sla_max_ppm %d; " \
% (ppm, sla_max_ppm)
@@ -136,22 +144,29 @@ def _test():
'''internal test function'''
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ },
+ 'target': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename,
+ 'ipaddr': '10.229.47.137',
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Pktgen(ctx)
-
options = {'packetsize': 120}
+ args = {'options': options}
+ result = {}
- args = {'options': options,
- 'ipaddr': '192.168.111.31'}
- result = p.run(args)
+ p = Pktgen(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/storage/fio.py b/yardstick/benchmark/scenarios/storage/fio.py
index af90b0703..8969472e9 100644
--- a/yardstick/benchmark/scenarios/storage/fio.py
+++ b/yardstick/benchmark/scenarios/storage/fio.py
@@ -48,8 +48,9 @@ class Fio(base.Scenario):
TARGET_SCRIPT = "fio_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -57,12 +58,13 @@ class Fio(base.Scenario):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.storage",
Fio.TARGET_SCRIPT)
- user = self.context.get("user", "root")
- host = self.context.get("host", None)
- key_filename = self.context.get("key_filename", "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "root")
+ ip = host.get("ip", None)
+ key_filename = host.get("key_filename", "~/.ssh/id_rsa")
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -71,7 +73,7 @@ class Fio(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
default_args = "-ioengine=libaio -direct=1 -group_reporting " \
"-numjobs=1 -time_based --output-format=json"
@@ -79,7 +81,7 @@ class Fio(base.Scenario):
if not self.setup_done:
self.setup()
- options = args["options"]
+ options = self.scenario_cfg["options"]
filename = options.get("filename", "/home/ec2-user/data.raw")
bs = options.get("bs", "4k")
iodepth = options.get("iodepth", "1")
@@ -87,7 +89,8 @@ class Fio(base.Scenario):
ramp_time = options.get("ramp_time", 20)
name = "yardstick-fio"
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
if duration_time:
@@ -122,20 +125,20 @@ class Fio(base.Scenario):
result["write_iops"] = raw_data["jobs"][0]["write"]["iops"]
result["write_lat"] = raw_data["jobs"][0]["write"]["lat"]["mean"]
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sla_error = ""
for k, v in result.items():
- if k not in args['sla']:
+ if k not in self.scenario_cfg['sla']:
continue
if "lat" in k:
# For lattency small value is better
- max_v = float(args['sla'][k])
+ max_v = float(self.scenario_cfg['sla'][k])
if v > max_v:
sla_error += "%s %f > sla:%s(%f); " % (k, v, k, max_v)
else:
# For bandwidth and iops big value is better
- min_v = int(args['sla'][k])
+ min_v = int(self.scenario_cfg['sla'][k])
if v < min_v:
sla_error += "%s %d < " \
"sla:%s(%d); " % (k, v, k, min_v)
@@ -148,16 +151,16 @@ def _test():
key_filename = pkg_resources.resource_filename("yardstick.resources",
"files/yardstick_key")
ctx = {
- "host": "10.0.0.101",
- "user": "ec2-user",
- "key_filename": key_filename
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ }
}
logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- fio = Fio(ctx)
-
options = {
"filename": "/home/ec2-user/data.raw",
"bs": "4k",
@@ -166,9 +169,11 @@ def _test():
"ramp_time": 1,
"duration": 10
}
+ result = {}
args = {"options": options}
- result = fio.run(args)
+ fio = Fio(args, ctx)
+ fio.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/cmd/commands/task.py b/yardstick/cmd/commands/task.py
index 5eb38989a..8db6e77e6 100755
--- a/yardstick/cmd/commands/task.py
+++ b/yardstick/cmd/commands/task.py
@@ -13,7 +13,6 @@ import sys
import os
import yaml
import atexit
-import pkg_resources
import ipaddress
from yardstick.benchmark.contexts.base import Context
@@ -242,42 +241,61 @@ def is_ip_addr(addr):
return False
-def run_one_scenario(scenario_cfg, output_file):
- '''run one scenario using context'''
- key_filename = pkg_resources.resource_filename(
- 'yardstick.resources', 'files/yardstick_key')
+def _is_same_heat_context(host_attr, target_attr):
+ '''check if two servers are in the same heat context
+ host_attr: either a name for a server created by yardstick or a dict
+ with attribute name mapping when using external heat templates
+ target_attr: either a name for a server created by yardstick or a dict
+ with attribute name mapping when using external heat templates
+ '''
+ host = None
+ target = None
+ for context in Context.list:
+ if context.__context_type__ != "Heat":
+ continue
+
+ host = context._get_server(host_attr)
+ if host is None:
+ continue
+
+ target = context._get_server(target_attr)
+ if target is None:
+ return False
+
+ # Both host and target is not None, then they are in the
+ # same heat context.
+ return True
+
+ return False
- # TODO support get multi hosts/vms info
- host = Context.get_server(scenario_cfg["host"])
+def run_one_scenario(scenario_cfg, output_file):
+ '''run one scenario using context'''
runner_cfg = scenario_cfg["runner"]
- runner_cfg['host'] = host.public_ip
- runner_cfg['user'] = host.context.user
- runner_cfg['key_filename'] = key_filename
runner_cfg['output_filename'] = output_file
+ # TODO support get multi hosts/vms info
+ context_cfg = {}
+ context_cfg['host'] = Context.get_server(scenario_cfg["host"])
+
if "target" in scenario_cfg:
if is_ip_addr(scenario_cfg["target"]):
- scenario_cfg["ipaddr"] = scenario_cfg["target"]
+ context_cfg['target'] = {}
+ context_cfg['target']["ipaddr"] = scenario_cfg["target"]
else:
- target = Context.get_server(scenario_cfg["target"])
-
- # get public IP for target server, some scenarios require it
- if target.public_ip:
- runner_cfg['target'] = target.public_ip
-
- # TODO scenario_cfg["ipaddr"] is bad naming
- if host.context != target.context:
- # target is in another context, get its public IP
- scenario_cfg["ipaddr"] = target.public_ip
+ context_cfg['target'] = Context.get_server(scenario_cfg["target"])
+ if _is_same_heat_context(scenario_cfg["host"],
+ scenario_cfg["target"]):
+ context_cfg["target"]["ipaddr"] = \
+ context_cfg["target"]["private_ip"]
else:
- # target is in the same context, get its private IP
- scenario_cfg["ipaddr"] = target.private_ip
+ context_cfg["target"]["ipaddr"] = \
+ context_cfg["target"]["ip"]
runner = base_runner.Runner.get(runner_cfg)
print "Starting runner of type '%s'" % runner_cfg["type"]
- runner.run(scenario_cfg["type"], scenario_cfg)
+ runner.run(scenario_cfg, context_cfg)
return runner