summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--samples/fio.yaml8
-rw-r--r--samples/iperf3-jitter.yaml45
-rw-r--r--samples/test_suite.yaml18
-rw-r--r--tests/opnfv/test_cases/opnfv_yardstick_tc002.yaml43
-rw-r--r--tests/opnfv/test_suites/pod2_daily.yaml10
-rw-r--r--tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json1
-rw-r--r--tests/unit/benchmark/scenarios/networking/test_iperf3.py46
-rw-r--r--tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json1
-rw-r--r--tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json (renamed from tests/unit/benchmark/scenarios/storage/fio_sample_output.json)0
-rw-r--r--tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json1
-rw-r--r--tests/unit/benchmark/scenarios/storage/test_fio.py170
-rw-r--r--yardstick/benchmark/scenarios/networking/iperf3.py30
-rw-r--r--yardstick/benchmark/scenarios/storage/fio.py32
-rwxr-xr-xyardstick/cmd/commands/task.py123
14 files changed, 469 insertions, 59 deletions
diff --git a/samples/fio.yaml b/samples/fio.yaml
index 083c57549..6e77f681a 100644
--- a/samples/fio.yaml
+++ b/samples/fio.yaml
@@ -26,6 +26,14 @@ scenarios:
type: Duration
duration: 60
interval: 1
+ sla:
+ read_bw: 6000
+ read_iops: 1500
+ read_lat: 500.1
+ write_bw: 6000
+ write_iops: 1500
+ write_lat: 500.1
+ action: monitor
context:
name: demo
diff --git a/samples/iperf3-jitter.yaml b/samples/iperf3-jitter.yaml
new file mode 100644
index 000000000..0544c4186
--- /dev/null
+++ b/samples/iperf3-jitter.yaml
@@ -0,0 +1,45 @@
+---
+# Sample benchmark task config file
+# measure packet delay variation (jitter) using iperf3
+
+schema: "yardstick:task:0.1"
+
+scenarios:
+-
+ type: Iperf3
+ options:
+ udp: udp
+ bandwidth: 20m
+ host: zeus.demo
+ target: hera.demo
+
+ runner:
+ type: Duration
+ duration: 3
+
+ sla:
+ jitter: 10
+ action: monitor
+
+context:
+ name: demo
+ image: yardstick-trusty-server
+ flavor: yardstick-flavor
+ user: ec2-user
+
+ placement_groups:
+ pgrp1:
+ policy: "availability"
+
+ servers:
+ zeus:
+ floating_ip: true
+ placement: "pgrp1"
+ hera:
+ floating_ip: true
+ placement: "pgrp1"
+
+ networks:
+ test:
+ cidr: '10.0.1.0/24'
+
diff --git a/samples/test_suite.yaml b/samples/test_suite.yaml
new file mode 100644
index 000000000..1cb6d3ea0
--- /dev/null
+++ b/samples/test_suite.yaml
@@ -0,0 +1,18 @@
+---
+# Sample test suite file
+# Test cases listed in the suite file should be in the tests/opnfv/test_cases directory
+# or specified in test_cases_dir optional variable as done below
+
+schema: "yardstick:suite:0.1"
+
+name: "Sample test suite"
+test_cases_dir: "samples/"
+test_cases:
+-
+ file_name: ping.yaml
+-
+ file_name: ping-template.yaml
+ task_args: '{"packetsize": "200"}'
+-
+ file_name: ping-template.yaml
+ task_args_file: "/tmp/test-args-file.json"
diff --git a/tests/opnfv/test_cases/opnfv_yardstick_tc002.yaml b/tests/opnfv/test_cases/opnfv_yardstick_tc002.yaml
new file mode 100644
index 000000000..c0cff7d76
--- /dev/null
+++ b/tests/opnfv/test_cases/opnfv_yardstick_tc002.yaml
@@ -0,0 +1,43 @@
+---
+# measure network latency using ping
+
+schema: "yardstick:task:0.1"
+scenarios:
+{% for i in range(2) %}
+-
+ type: Ping
+ options:
+ packetsize: 100
+ host: athena.demo
+ target: ares.demo
+
+ runner:
+ type: Duration
+ duration: 600
+ interval: 10
+
+ sla:
+ max_rtt: 10
+ action: monitor
+{% endfor %}
+
+context:
+ name: demo
+ image: cirros-0.3.3
+ flavor: m1.tiny
+ user: cirros
+
+ placement_groups:
+ pgrp1:
+ policy: "availability"
+
+ servers:
+ athena:
+ floating_ip: true
+ placement: "pgrp1"
+ ares:
+ placement: "pgrp1"
+
+ networks:
+ test:
+ cidr: '10.0.1.0/24'
diff --git a/tests/opnfv/test_suites/pod2_daily.yaml b/tests/opnfv/test_suites/pod2_daily.yaml
new file mode 100644
index 000000000..15252bee8
--- /dev/null
+++ b/tests/opnfv/test_suites/pod2_daily.yaml
@@ -0,0 +1,10 @@
+---
+# LF POD 2 daily task suite
+
+schema: "yardstick:suite:0.1"
+
+name: "opnfv_pod2_daily"
+test_cases_dir: "tests/opnfv/test_cases/"
+test_cases:
+-
+ file_name: opnfv_yardstick_tc002.yaml
diff --git a/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json b/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json
new file mode 100644
index 000000000..8173c8f64
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/iperf3_sample_output_udp.json
@@ -0,0 +1 @@
+{"start":{"connected":[{"socket":4, "local_host":"10.0.1.2", "local_port":46384, "remote_host":"172.16.9.195", "remote_port":5201}], "version":"iperf 3.0.7", "system_info":"Linux zeus 3.13.0-61-generic #100-Ubuntu SMP Wed Jul 29 11:21:34 UTC 2015 x86_64 x86_64 x86_64 GNU/Linux\n", "timestamp":{"time":"Tue, 29 Sep 2015 01:48:23 GMT", "timesecs":1443491303}, "connecting_to":{"host":"172.16.9.195", "port":5201}, "cookie":"zeus.1443491303.539703.3479129b58a5b", "test_start":{"protocol":"UDP", "num_streams":1, "blksize":8192, "omit":0, "duration":10, "bytes":0, "blocks":0, "reverse":0}}, "intervals":[{"streams":[{"socket":4, "start":0, "end":1.00022, "seconds":1.00022, "bytes":2252800, "bits_per_second":1.80184e+07, "packets":275, "omitted":false}], "sum":{"start":0, "end":1.00022, "seconds":1.00022, "bytes":2252800, "bits_per_second":1.80184e+07, "packets":275, "omitted":false}}, {"streams":[{"socket":4, "start":1.00022, "end":2.00022, "seconds":0.999993, "bytes":2498560, "bits_per_second":1.99886e+07, "packets":305, "omitted":false}], "sum":{"start":1.00022, "end":2.00022, "seconds":0.999993, "bytes":2498560, "bits_per_second":1.99886e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":2.00022, "end":3.00022, "seconds":1, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}], "sum":{"start":2.00022, "end":3.00022, "seconds":1, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}}, {"streams":[{"socket":4, "start":3.00022, "end":4.00022, "seconds":1, "bytes":2498560, "bits_per_second":19988480, "packets":305, "omitted":false}], "sum":{"start":3.00022, "end":4.00022, "seconds":1, "bytes":2498560, "bits_per_second":19988480, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":4.00022, "end":5.0002, "seconds":0.999977, "bytes":2498560, "bits_per_second":1.99889e+07, "packets":305, "omitted":false}], "sum":{"start":4.00022, "end":5.0002, "seconds":0.999977, "bytes":2498560, "bits_per_second":1.99889e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":5.0002, "end":6.00024, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99877e+07, "packets":305, "omitted":false}], "sum":{"start":5.0002, "end":6.00024, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99877e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":6.00024, "end":7.00023, "seconds":0.999998, "bytes":2498560, "bits_per_second":1.99885e+07, "packets":305, "omitted":false}], "sum":{"start":6.00024, "end":7.00023, "seconds":0.999998, "bytes":2498560, "bits_per_second":1.99885e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":7.00023, "end":8.00023, "seconds":0.999999, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}], "sum":{"start":7.00023, "end":8.00023, "seconds":0.999999, "bytes":2506752, "bits_per_second":2.0054e+07, "packets":306, "omitted":false}}, {"streams":[{"socket":4, "start":8.00023, "end":9.00018, "seconds":0.999945, "bytes":2498560, "bits_per_second":1.99896e+07, "packets":305, "omitted":false}], "sum":{"start":8.00023, "end":9.00018, "seconds":0.999945, "bytes":2498560, "bits_per_second":1.99896e+07, "packets":305, "omitted":false}}, {"streams":[{"socket":4, "start":9.00018, "end":10.0002, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99876e+07, "packets":305, "omitted":false}], "sum":{"start":9.00018, "end":10.0002, "seconds":1.00004, "bytes":2498560, "bits_per_second":1.99876e+07, "packets":305, "omitted":false}}], "end":{"streams":[{"udp":{"socket":4, "start":0, "end":10.0002, "seconds":10.0002, "bytes":24756224, "bits_per_second":1.98045e+07, "jitter_ms":0.0113579, "lost_packets":0, "packets":3022, "lost_percent":0}}], "sum":{"start":0, "end":10.0002, "seconds":10.0002, "bytes":24756224, "bits_per_second":1.98045e+07, "jitter_ms":0.0113579, "lost_packets":0, "packets":3022, "lost_percent":0}, "cpu_utilization_percent":{"host_total":0.647561, "host_user":0.146468, "host_system":0.501083, "remote_total":0.31751, "remote_user":0, "remote_system":0.31751}}}
diff --git a/tests/unit/benchmark/scenarios/networking/test_iperf3.py b/tests/unit/benchmark/scenarios/networking/test_iperf3.py
index 239e46a1c..8b0da655b 100644
--- a/tests/unit/benchmark/scenarios/networking/test_iperf3.py
+++ b/tests/unit/benchmark/scenarios/networking/test_iperf3.py
@@ -21,6 +21,8 @@ from yardstick.benchmark.scenarios.networking import iperf3
@mock.patch('yardstick.benchmark.scenarios.networking.iperf3.ssh')
class IperfTestCase(unittest.TestCase):
+ output_name_tcp = 'iperf3_sample_output.json'
+ output_name_udp = 'iperf3_sample_output_udp.json'
def setUp(self):
self.ctx = {
@@ -66,7 +68,7 @@ class IperfTestCase(unittest.TestCase):
options = {}
args = {'options': options}
- sample_output = self._read_sample_output()
+ sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
result = p.run(args)
@@ -84,7 +86,7 @@ class IperfTestCase(unittest.TestCase):
'sla': {'bytes_per_second': 15000000}
}
- sample_output = self._read_sample_output()
+ sample_output = self._read_sample_output(self.output_name_tcp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
expected_result = json.loads(sample_output)
result = p.run(args)
@@ -102,7 +104,41 @@ class IperfTestCase(unittest.TestCase):
'sla': {'bytes_per_second': 25000000}
}
- sample_output = self._read_sample_output()
+ sample_output = self._read_sample_output(self.output_name_tcp)
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ self.assertRaises(AssertionError, p.run, args)
+
+ def test_iperf_successful_sla_jitter(self, mock_ssh):
+
+ p = iperf3.Iperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {"udp":"udp","bandwidth":"20m"}
+ args = {
+ 'options': options,
+ 'sla': {'jitter': 10}
+ }
+
+ sample_output = self._read_sample_output(self.output_name_udp)
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ expected_result = json.loads(sample_output)
+ result = p.run(args)
+ self.assertEqual(result, expected_result)
+
+ def test_iperf_unsuccessful_sla_jitter(self, mock_ssh):
+
+ p = iperf3.Iperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {"udp":"udp","bandwidth":"20m"}
+ args = {
+ 'options': options,
+ 'sla': {'jitter': 0.0001}
+ }
+
+ sample_output = self._read_sample_output(self.output_name_udp)
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
self.assertRaises(AssertionError, p.run, args)
@@ -118,9 +154,9 @@ class IperfTestCase(unittest.TestCase):
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.run, args)
- def _read_sample_output(self):
+ def _read_sample_output(self,filename):
curr_path = os.path.dirname(os.path.abspath(__file__))
- output = os.path.join(curr_path, 'iperf3_sample_output.json')
+ output = os.path.join(curr_path, filename)
with open(output) as f:
sample_output = f.read()
return sample_output
diff --git a/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json b/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json
new file mode 100644
index 000000000..e9f642aba
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/storage/fio_read_sample_output.json
@@ -0,0 +1 @@
+{"fioversion": "fio-2.1.3","jobs": [{"jobname": "yardstick-fio","groupid": 0,"error": 0,"read": {"io_bytes": 2166860,"bw": 36113,"iops": 9028,"runtime": 60001,"slat": {"min": 7,"max": 1807,"mean": 10.49,"stddev": 3.00},"clat": {"min": 1,"max": 16902,"mean": 97.84,"stddev": 78.16,"percentile": {"1.000000": 84,"5.000000": 86,"10.000000": 87,"20.000000": 88,"30.000000": 89,"40.000000": 90,"50.000000": 91,"60.000000": 93,"70.000000": 98,"80.000000": 103,"90.000000": 111,"95.000000": 127,"99.000000": 161,"99.500000": 177,"99.900000": 215,"99.950000": 266,"99.990000": 4128,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 86,"max": 16912,"mean": 108.70,"stddev": 78.29},"bw_min": 0,"bw_max": 38128,"bw_agg": 35816.54,"bw_mean": 35816.54,"bw_dev": 3579.16},"write": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"trim": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"usr_cpu": 4.86,"sys_cpu": 19.38,"ctx": 632024,"majf": 0,"minf": 30,"iodepth_level": {"1": 116.58,"2": 0.00,"4": 0.00,"8": 0.00,"16": 0.00,"32": 0.00,">=64": 0.00},"latency_us": {"2": 0.01,"4": 0.01,"10": 0.00,"20": 0.00,"50": 0.01,"100": 72.60,"250": 27.34,"500": 0.04,"750": 0.01,"1000": 0.01},"latency_ms": {"2": 0.01,"4": 0.01,"10": 0.01,"20": 0.01,"50": 0.00,"100": 0.00,"250": 0.00,"500": 0.00,"750": 0.00,"1000": 0.00,"2000": 0.00,">=2000": 0.00}}],"disk_util": [{"name": "vda","read_ios": 631084,"write_ios": 212,"read_merges": 0,"write_merges": 232,"read_ticks": 57300,"write_ticks": 324,"in_queue": 57400,"util": 81.55}]}
diff --git a/tests/unit/benchmark/scenarios/storage/fio_sample_output.json b/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json
index 4c7501818..4c7501818 100644
--- a/tests/unit/benchmark/scenarios/storage/fio_sample_output.json
+++ b/tests/unit/benchmark/scenarios/storage/fio_rw_sample_output.json
diff --git a/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json b/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json
new file mode 100644
index 000000000..7c760e8bc
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/storage/fio_write_sample_output.json
@@ -0,0 +1 @@
+{"fioversion": "fio-2.1.3","jobs": [{"jobname": "yardstick-fio","groupid": 0,"error": 0,"read": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"write": {"io_bytes": 2106508,"bw": 35107,"iops": 8776,"runtime": 60001,"slat": {"min": 8,"max": 5166,"mean": 11.83,"stddev": 7.05},"clat": {"min": 1,"max": 23472,"mean": 99.54,"stddev": 44.23,"percentile": {"1.000000": 85,"5.000000": 87,"10.000000": 88,"20.000000": 89,"30.000000": 90,"40.000000": 91,"50.000000": 93,"60.000000": 99,"70.000000": 104,"80.000000": 107,"90.000000": 113,"95.000000": 127,"99.000000": 161,"99.500000": 179,"99.900000": 231,"99.950000": 286,"99.990000": 628,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 87,"max": 23486,"mean": 111.74,"stddev": 45.61},"bw_min": 0,"bw_max": 37288,"bw_agg": 34839.53,"bw_mean": 34839.53,"bw_dev": 3387.37},"trim": {"io_bytes": 0,"bw": 0,"iops": 0,"runtime": 0,"slat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"clat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00,"percentile": {"1.000000": 0,"5.000000": 0,"10.000000": 0,"20.000000": 0,"30.000000": 0,"40.000000": 0,"50.000000": 0,"60.000000": 0,"70.000000": 0,"80.000000": 0,"90.000000": 0,"95.000000": 0,"99.000000": 0,"99.500000": 0,"99.900000": 0,"99.950000": 0,"99.990000": 0,"0.00": 0,"0.00": 0,"0.00": 0}},"lat": {"min": 0,"max": 0,"mean": 0.00,"stddev": 0.00},"bw_min": 0,"bw_max": 0,"bw_agg": 0.00,"bw_mean": 0.00,"bw_dev": 0.00},"usr_cpu": 5.25,"sys_cpu": 19.72,"ctx": 616160,"majf": 0,"minf": 27,"iodepth_level": {"1": 116.90,"2": 0.00,"4": 0.00,"8": 0.00,"16": 0.00,"32": 0.00,">=64": 0.00},"latency_us": {"2": 0.01,"4": 0.01,"10": 0.00,"20": 0.00,"50": 0.01,"100": 60.74,"250": 39.18,"500": 0.06,"750": 0.01,"1000": 0.01},"latency_ms": {"2": 0.01,"4": 0.01,"10": 0.01,"20": 0.00,"50": 0.01,"100": 0.00,"250": 0.00,"500": 0.00,"750": 0.00,"1000": 0.00,"2000": 0.00,">=2000": 0.00}}],"disk_util": [{"name": "vda","read_ios": 0,"write_ios": 615418,"read_merges": 0,"write_merges": 231,"read_ticks": 0,"write_ticks": 58284,"in_queue": 58024,"util": 82.45}]}
diff --git a/tests/unit/benchmark/scenarios/storage/test_fio.py b/tests/unit/benchmark/scenarios/storage/test_fio.py
index 45e8a9429..6d38e9c53 100644
--- a/tests/unit/benchmark/scenarios/storage/test_fio.py
+++ b/tests/unit/benchmark/scenarios/storage/test_fio.py
@@ -26,16 +26,21 @@ class FioTestCase(unittest.TestCase):
self.ctx = {
'host': '172.16.0.137',
'user': 'cirros',
- 'key_filename': "mykey.key"
+ 'key_filename': 'mykey.key'
+ }
+ self.sample_output = {
+ 'read': 'fio_read_sample_output.json',
+ 'write': 'fio_write_sample_output.json',
+ 'rw': 'fio_rw_sample_output.json'
}
def test_fio_successful_setup(self, mock_ssh):
p = fio.Fio(self.ctx)
options = {
- 'filename': "/home/ec2-user/data.raw",
- 'bs': "4k",
- 'rw': "rw",
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
'ramp_time': 10
}
args = {'options': options}
@@ -49,15 +54,134 @@ class FioTestCase(unittest.TestCase):
p = fio.Fio(self.ctx)
options = {
- 'filename': "/home/ec2-user/data.raw",
- 'bs': "4k",
- 'rw': "rw",
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p.client = mock_ssh.SSH()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ result = p.run(args)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = json.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_read_no_sla(self, mock_ssh):
+
+ p = fio.Fio(self.ctx)
+ options = {
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': "read",
+ 'ramp_time': 10
+ }
+ args = {'options': options}
+ p.client = mock_ssh.SSH()
+
+ sample_output = self._read_sample_output(self.sample_output['read'])
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ result = p.run(args)
+
+ expected_result = '{"read_bw": 36113, "read_iops": 9028,' \
+ '"read_lat": 108.7}'
+ expected_result = json.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_write_no_sla(self, mock_ssh):
+
+ p = fio.Fio(self.ctx)
+ options = {
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'write',
'ramp_time': 10
}
args = {'options': options}
p.client = mock_ssh.SSH()
- sample_output = self._read_sample_output()
+ sample_output = self._read_sample_output(self.sample_output['write'])
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ result = p.run(args)
+
+ expected_result = '{"write_bw": 35107, "write_iops": 8776,'\
+ '"write_lat": 111.74}'
+ expected_result = json.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+ def test_fio_successful_lat_sla(self, mock_ssh):
+
+ p = fio.Fio(self.ctx)
+ options = {
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'write_lat': 300.1}
+ }
+
+ p.client = mock_ssh.SSH()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+
+ result = p.run(args)
+
+ expected_result = '{"read_bw": 83888, "read_iops": 20972,' \
+ '"read_lat": 236.8, "write_bw": 84182, "write_iops": 21045,'\
+ '"write_lat": 233.55}'
+ expected_result = json.loads(expected_result)
+ self.assertEqual(result, expected_result)
+
+
+ def test_fio_unsuccessful_lat_sla(self, mock_ssh):
+
+ p = fio.Fio(self.ctx)
+ options = {
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'write_lat': 200.1}
+ }
+
+ p.client = mock_ssh.SSH()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ self.assertRaises(AssertionError, p.run, args)
+
+ def test_fio_successful_bw_iops_sla(self, mock_ssh):
+
+ p = fio.Fio(self.ctx)
+ options = {
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'read_iops': 20000}
+ }
+
+ p.client = mock_ssh.SSH()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
mock_ssh.SSH().execute.return_value = (0, sample_output, '')
result = p.run(args)
@@ -68,13 +192,33 @@ class FioTestCase(unittest.TestCase):
expected_result = json.loads(expected_result)
self.assertEqual(result, expected_result)
+ def test_fio_unsuccessful_bw_iops_sla(self, mock_ssh):
+
+ p = fio.Fio(self.ctx)
+ options = {
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
+ 'ramp_time': 10
+ }
+ args = {
+ 'options': options,
+ 'sla': {'read_iops': 30000}
+ }
+
+ p.client = mock_ssh.SSH()
+
+ sample_output = self._read_sample_output(self.sample_output['rw'])
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ self.assertRaises(AssertionError, p.run, args)
+
def test_fio_unsuccessful_script_error(self, mock_ssh):
p = fio.Fio(self.ctx)
options = {
- 'filename': "/home/ec2-user/data.raw",
- 'bs': "4k",
- 'rw': "rw",
+ 'filename': '/home/ec2-user/data.raw',
+ 'bs': '4k',
+ 'rw': 'rw',
'ramp_time': 10
}
args = {'options': options}
@@ -83,9 +227,9 @@ class FioTestCase(unittest.TestCase):
mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
self.assertRaises(RuntimeError, p.run, args)
- def _read_sample_output(self):
+ def _read_sample_output(self, file_name):
curr_path = os.path.dirname(os.path.abspath(__file__))
- output = os.path.join(curr_path, 'fio_sample_output.json')
+ output = os.path.join(curr_path, file_name)
with open(output) as f:
sample_output = f.read()
return sample_output
diff --git a/yardstick/benchmark/scenarios/networking/iperf3.py b/yardstick/benchmark/scenarios/networking/iperf3.py
index ff625de4d..e31a892d2 100644
--- a/yardstick/benchmark/scenarios/networking/iperf3.py
+++ b/yardstick/benchmark/scenarios/networking/iperf3.py
@@ -95,8 +95,12 @@ For more info see http://software.es.net/iperf
if not options:
options = ""
+ use_UDP = False
if "udp" in options:
cmd += " --udp"
+ use_UDP = True
+ if "bandwidth" in options:
+ cmd += " --bandwidth %s" % options["bandwidth"]
else:
# tcp obviously
if "nodelay" in options:
@@ -120,15 +124,25 @@ For more info see http://software.es.net/iperf
output = json.loads(stdout)
- # convert bits per second to bytes per second
- bytes_per_second = \
- int((output["end"]["sum_received"]["bits_per_second"])) / 8
-
if "sla" in args:
- sla_bytes_per_second = int(args["sla"]["bytes_per_second"])
- assert bytes_per_second >= sla_bytes_per_second, \
- "bytes_per_second %d < sla (%d)" % \
- (bytes_per_second, sla_bytes_per_second)
+ sla_iperf = args["sla"]
+ if not use_UDP:
+ sla_bytes_per_second = int(sla_iperf["bytes_per_second"])
+
+ # convert bits per second to bytes per second
+ bit_per_second = \
+ int(output["end"]["sum_received"]["bits_per_second"])
+ bytes_per_second = bit_per_second / 8
+ assert bytes_per_second >= sla_bytes_per_second, \
+ "bytes_per_second %d < sla:bytes_per_second (%d)" % \
+ (bytes_per_second, sla_bytes_per_second)
+ else:
+ sla_jitter = float(sla_iperf["jitter"])
+
+ jitter_ms = float(output["end"]["sum"]["jitter_ms"])
+ assert jitter_ms <= sla_jitter, \
+ "jitter_ms %f > sla:jitter %f" % \
+ (jitter_ms, sla_jitter)
return output
diff --git a/yardstick/benchmark/scenarios/storage/fio.py b/yardstick/benchmark/scenarios/storage/fio.py
index 42f159164..1107a8b2c 100644
--- a/yardstick/benchmark/scenarios/storage/fio.py
+++ b/yardstick/benchmark/scenarios/storage/fio.py
@@ -114,14 +114,30 @@ class Fio(base.Scenario):
raw_data = json.loads(stdout)
# The bandwidth unit is KB/s, and latency unit is us
- result["read_bw"] = raw_data["jobs"][0]["read"]["bw"]
- result["read_iops"] = raw_data["jobs"][0]["read"]["iops"]
- result["read_lat"] = raw_data["jobs"][0]["read"]["lat"]["mean"]
- result["write_bw"] = raw_data["jobs"][0]["write"]["bw"]
- result["write_iops"] = raw_data["jobs"][0]["write"]["iops"]
- result["write_lat"] = raw_data["jobs"][0]["write"]["lat"]["mean"]
-
- # TODO: add sla check
+ if rw in ["read", "randread", "rw", "randrw"]:
+ result["read_bw"] = raw_data["jobs"][0]["read"]["bw"]
+ result["read_iops"] = raw_data["jobs"][0]["read"]["iops"]
+ result["read_lat"] = raw_data["jobs"][0]["read"]["lat"]["mean"]
+ if rw in ["write", "randwrite", "rw", "randrw"]:
+ result["write_bw"] = raw_data["jobs"][0]["write"]["bw"]
+ result["write_iops"] = raw_data["jobs"][0]["write"]["iops"]
+ result["write_lat"] = raw_data["jobs"][0]["write"]["lat"]["mean"]
+
+ if "sla" in args:
+ for k, v in result.items():
+ if k not in args['sla']:
+ continue
+
+ if "lat" in k:
+ # For lattency small value is better
+ max_v = float(args['sla'][k])
+ assert v <= max_v, "%s %f > " \
+ "sla:%s(%f)" % (k, v, k, max_v)
+ else:
+ # For bandwidth and iops big value is better
+ min_v = int(args['sla'][k])
+ assert v >= min_v, "%s %d < " \
+ "sla:%s(%d)" % (k, v, k, min_v)
return result
diff --git a/yardstick/cmd/commands/task.py b/yardstick/cmd/commands/task.py
index 6e117edd0..5c25c576a 100755
--- a/yardstick/cmd/commands/task.py
+++ b/yardstick/cmd/commands/task.py
@@ -22,6 +22,7 @@ from yardstick.common.task_template import TaskTemplate
from yardstick.common.utils import cliargs
output_file_default = "/tmp/yardstick.out"
+test_cases_dir_default = "tests/opnfv/test_cases/"
class TaskCommands(object):
@@ -30,7 +31,7 @@ class TaskCommands(object):
Set of commands to manage benchmark tasks.
'''
- @cliargs("taskfile", type=str, help="path to taskfile", nargs=1)
+ @cliargs("inputfile", type=str, help="path to task or suite file", nargs=1)
@cliargs("--task-args", dest="task_args",
help="Input task args (dict in json). These args are used"
"to render input task that is jinja2 template.")
@@ -40,18 +41,33 @@ class TaskCommands(object):
"task that is jinja2 template.")
@cliargs("--keep-deploy", help="keep context deployed in cloud",
action="store_true")
- @cliargs("--parse-only", help="parse the benchmark config file and exit",
+ @cliargs("--parse-only", help="parse the config file and exit",
action="store_true")
@cliargs("--output-file", help="file where output is stored, default %s" %
output_file_default, default=output_file_default)
+ @cliargs("--suite", help="process test suite file instead of a task file",
+ action="store_true")
def do_start(self, args):
'''Start a benchmark scenario.'''
atexit.register(atexit_handler)
- parser = TaskParser(args.taskfile[0])
- scenarios, run_in_parallel = parser.parse(args.task_args,
- args.task_args_file)
+ parser = TaskParser(args.inputfile[0])
+
+ suite_params = {}
+ if args.suite:
+ suite_params = parser.parse_suite()
+ test_cases_dir = suite_params["test_cases_dir"]
+ if test_cases_dir[-1] != os.sep:
+ test_cases_dir += os.sep
+ task_files = [test_cases_dir + task
+ for task in suite_params["task_fnames"]]
+ else:
+ task_files = [parser.path]
+
+ task_args = suite_params.get("task_args", [args.task_args])
+ task_args_fnames = suite_params.get("task_args_fnames",
+ [args.task_args_file])
if args.parse_only:
sys.exit(0)
@@ -59,34 +75,45 @@ class TaskCommands(object):
if os.path.isfile(args.output_file):
os.remove(args.output_file)
+ for i in range(0, len(task_files)):
+ parser.path = task_files[i]
+ scenarios, run_in_parallel = parser.parse_task(task_args[i],
+ task_args_fnames[i])
+
+ self._run(scenarios, run_in_parallel, args.output_file)
+
+ if args.keep_deploy:
+ # keep deployment, forget about stack
+ # (hide it for exit handler)
+ Context.list = []
+ else:
+ for context in Context.list:
+ context.undeploy()
+ Context.list = []
+
+ print "Done, exiting"
+
+ def _run(self, scenarios, run_in_parallel, output_file):
+ '''Deploys context and calls runners'''
for context in Context.list:
context.deploy()
runners = []
if run_in_parallel:
for scenario in scenarios:
- runner = run_one_scenario(scenario, args.output_file)
+ runner = run_one_scenario(scenario, output_file)
runners.append(runner)
# Wait for runners to finish
for runner in runners:
runner_join(runner)
- print "Runner ended, output in", args.output_file
+ print "Runner ended, output in", output_file
else:
# run serially
for scenario in scenarios:
- runner = run_one_scenario(scenario, args.output_file)
+ runner = run_one_scenario(scenario, output_file)
runner_join(runner)
- print "Runner ended, output in", args.output_file
-
- if args.keep_deploy:
- # keep deployment, forget about stack (hide it for exit handler)
- Context.list = []
- else:
- for context in Context.list:
- context.undeploy()
-
- print "Done, exiting"
+ print "Runner ended, output in", output_file
# TODO: Move stuff below into TaskCommands class !?
@@ -96,7 +123,47 @@ class TaskParser(object):
def __init__(self, path):
self.path = path
- def parse(self, task_args=None, task_args_file=None):
+ def parse_suite(self):
+ '''parse the suite file and return a list of task config file paths
+ and lists of optional parameters if present'''
+ print "Parsing suite file:", self.path
+
+ try:
+ with open(self.path) as stream:
+ cfg = yaml.load(stream)
+ except IOError as ioerror:
+ sys.exit(ioerror)
+
+ self._check_schema(cfg["schema"], "suite")
+ print "Starting suite:", cfg["name"]
+
+ test_cases_dir = cfg.get("test_cases_dir", test_cases_dir_default)
+ task_fnames = []
+ task_args = []
+ task_args_fnames = []
+
+ for task in cfg["test_cases"]:
+ task_fnames.append(task["file_name"])
+ if "task_args" in task:
+ task_args.append(task["task_args"])
+ else:
+ task_args.append(None)
+
+ if "task_args_file" in task:
+ task_args_fnames.append(task["task_args_file"])
+ else:
+ task_args_fnames.append(None)
+
+ suite_params = {
+ "test_cases_dir": test_cases_dir,
+ "task_fnames": task_fnames,
+ "task_args": task_args,
+ "task_args_fnames": task_args_fnames
+ }
+
+ return suite_params
+
+ def parse_task(self, task_args=None, task_args_file=None):
'''parses the task file and return an context and scenario instances'''
print "Parsing task config:", self.path
@@ -124,9 +191,7 @@ class TaskParser(object):
except IOError as ioerror:
sys.exit(ioerror)
- if cfg["schema"] != "yardstick:task:0.1":
- sys.exit("error: file %s has unknown schema %s" % (self.path,
- cfg["schema"]))
+ self._check_schema(cfg["schema"], "task")
# TODO: support one or many contexts? Many would simpler and precise
if "context" in cfg:
@@ -136,9 +201,10 @@ class TaskParser(object):
for cfg_attrs in context_cfgs:
# config external_network based on env var
- for _, attrs in cfg_attrs["networks"].items():
- attrs["external_network"] = os.environ.get('EXTERNAL_NETWORK',
- 'net04_ext')
+ if "networks" in cfg_attrs:
+ for _, attrs in cfg_attrs["networks"].items():
+ attrs["external_network"] = os.environ.get(
+ 'EXTERNAL_NETWORK', 'net04_ext')
context = Context()
context.init(cfg_attrs)
@@ -147,6 +213,13 @@ class TaskParser(object):
# TODO we need something better here, a class that represent the file
return cfg["scenarios"], run_in_parallel
+ def _check_schema(self, cfg_schema, schema_type):
+ '''Check if config file is using the correct schema type'''
+
+ if cfg_schema != "yardstick:" + schema_type + ":0.1":
+ sys.exit("error: file %s has unknown schema %s" % (self.path,
+ cfg_schema))
+
def atexit_handler():
'''handler for process termination'''