summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorkubi <jean.gaoliang@huawei.com>2015-10-13 15:22:20 +0800
committerkubi <jean.gaoliang@huawei.com>2015-10-15 09:15:37 +0800
commit070076aea759b0ee1fff37572e75cba13a7bd714 (patch)
treef8ce4116089dfbb5b18b28351da4a043cb7bdc7b
parenta1378b700a9234e7fcbf77c7890030b306bc65ea (diff)
Support for netperf
As agreed in the IRC meeting, use iperf for YARDSTICK-112; support for netperf shall also be included in the framework (Yardstick-121). bulk data test and req/rsp test are supportted by netperf JIRA:YARDSTICK-121 Change-Id: I0e934dc067802c2792d751a19a187cd00ad8741b Signed-off-by: kubi <jean.gaoliang@huawei.com>
-rwxr-xr-xsamples/netperf.yaml69
-rwxr-xr-xtests/unit/benchmark/scenarios/networking/netperf_sample_output.json1
-rwxr-xr-xtests/unit/benchmark/scenarios/networking/test_netperf.py116
-rwxr-xr-xtools/ubuntu-server-cloudimg-modify.sh1
-rwxr-xr-xyardstick/benchmark/scenarios/networking/netperf.py163
-rwxr-xr-xyardstick/benchmark/scenarios/networking/netperf_benchmark.bash47
6 files changed, 397 insertions, 0 deletions
diff --git a/samples/netperf.yaml b/samples/netperf.yaml
new file mode 100755
index 000000000..6d1dd8ec9
--- /dev/null
+++ b/samples/netperf.yaml
@@ -0,0 +1,69 @@
+---
+# Sample benchmark task config file
+# measure network latency and throughput using netperf
+# There are two sample scenarios: bulk test and request/response test
+# In bulk test, UDP_STREAM and TCP_STREAM can be used
+# send_msg_size and recv_msg_size are options of bulk test
+# In req/rsp test, TCP_RR TCP_CRR UDP_RR can be used
+# req_rsp_size is option of req/rsp test
+
+schema: "yardstick:task:0.1"
+
+scenarios:
+-
+ type: Netperf
+ options:
+ testname: 'UDP_STREAM'
+ send_msg_size: 1024
+ duration: 20
+
+ host: Chang'e.demo
+ target: Houyi.demo
+
+ runner:
+ type: Iteration
+ iterations: 1
+ interval: 1
+ sla:
+ mean_latency: 100
+ action: monitor
+-
+ type: Netperf
+ options:
+ testname: 'TCP_RR'
+ req_rsp_size: '32,1024'
+
+ host: Chang'e.demo
+ target: Houyi.demo
+
+ runner:
+ type: Duration
+ duration: 20
+ interval: 1
+ sla:
+ mean_latency: 300
+ action: monitor
+
+context:
+ name: demo
+ image: yardstick-trusty-server
+ flavor: yardstick-flavor
+ user: ec2-user
+
+ placement_groups:
+ pgrp1:
+ policy: "availability"
+
+ servers:
+ Chang'e:
+ floating_ip: true
+ placement: "pgrp1"
+ Houyi:
+ floating_ip: true
+ placement: "pgrp1"
+
+ networks:
+ test:
+ cidr: '10.0.1.0/24'
+
+
diff --git a/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json b/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
new file mode 100755
index 000000000..bba76cfa5
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/netperf_sample_output.json
@@ -0,0 +1 @@
+{"mean_latency":"9.49","troughput":"823.77","troughput_unit":"10^6bits/s"} \ No newline at end of file
diff --git a/tests/unit/benchmark/scenarios/networking/test_netperf.py b/tests/unit/benchmark/scenarios/networking/test_netperf.py
new file mode 100755
index 000000000..d5c19918b
--- /dev/null
+++ b/tests/unit/benchmark/scenarios/networking/test_netperf.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Unittest for yardstick.benchmark.scenarios.networking.netperf.Netperf
+
+import mock
+import unittest
+import os
+import json
+
+from yardstick.benchmark.scenarios.networking import netperf
+
+
+@mock.patch('yardstick.benchmark.scenarios.networking.netperf.ssh')
+class NetperfTestCase(unittest.TestCase):
+
+ def setUp(self):
+ self.ctx = {
+ 'host': '172.16.0.137',
+ 'target': '172.16.0.138',
+ 'user': 'cirros',
+ 'key_filename': "mykey.key"
+ }
+
+ def test_netperf_successful_setup(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+
+ p.setup()
+ self.assertIsNotNone(p.server)
+ self.assertIsNotNone(p.client)
+ self.assertEqual(p.setup_done, True)
+
+ def test_netperf_successful_no_sla(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {'options': options}
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ expected_result = json.loads(sample_output)
+ result = p.run(args)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_successful_sla(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 100}
+ }
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ expected_result = json.loads(sample_output)
+ result = p.run(args)
+ self.assertEqual(result, expected_result)
+
+ def test_netperf_unsuccessful_sla(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {
+ 'options': options,
+ 'sla': {'mean_latency': 5}
+ }
+
+ sample_output = self._read_sample_output()
+ mock_ssh.SSH().execute.return_value = (0, sample_output, '')
+ self.assertRaises(AssertionError, p.run, args)
+
+ def test_netperf_unsuccessful_script_error(self, mock_ssh):
+
+ p = netperf.Netperf(self.ctx)
+ mock_ssh.SSH().execute.return_value = (0, '', '')
+ p.host = mock_ssh.SSH()
+
+ options = {}
+ args = {'options': options}
+
+ mock_ssh.SSH().execute.return_value = (1, '', 'FOOBAR')
+ self.assertRaises(RuntimeError, p.run, args)
+
+ def _read_sample_output(self):
+ curr_path = os.path.dirname(os.path.abspath(__file__))
+ output = os.path.join(curr_path, 'netperf_sample_output.json')
+ with open(output) as f:
+ sample_output = f.read()
+ return sample_output
+
+
+def main():
+ unittest.main()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/ubuntu-server-cloudimg-modify.sh b/tools/ubuntu-server-cloudimg-modify.sh
index 6e750e097..93f2d307b 100755
--- a/tools/ubuntu-server-cloudimg-modify.sh
+++ b/tools/ubuntu-server-cloudimg-modify.sh
@@ -39,6 +39,7 @@ apt-get install -y \
linux-tools-common \
linux-tools-generic \
lmbench \
+ netperf \
stress
# restore symlink
diff --git a/yardstick/benchmark/scenarios/networking/netperf.py b/yardstick/benchmark/scenarios/networking/netperf.py
new file mode 100755
index 000000000..3121fdaf2
--- /dev/null
+++ b/yardstick/benchmark/scenarios/networking/netperf.py
@@ -0,0 +1,163 @@
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+# bulk data test and req/rsp test are supported
+import pkg_resources
+import logging
+import json
+
+import yardstick.ssh as ssh
+from yardstick.benchmark.scenarios import base
+
+LOG = logging.getLogger(__name__)
+
+
+class Netperf(base.Scenario):
+ """Execute netperf between two hosts
+
+ Parameters
+ testname - to specify the test you wish to perform.
+ the valid testnames are TCP_STREAM, TCP_RR, UDP_STREAM, UDP_RR
+ type: string
+ unit: na
+ default: TCP_STREAM
+ send_msg_size - value set the local send size to value bytes.
+ type: int
+ unit: bytes
+ default: na
+ recv_msg_size - setting the receive size for the remote system.
+ type: int
+ unit: bytes
+ default: na
+ req_rsp_size - set the request and/or response sizes based on sizespec.
+ type: string
+ unit: na
+ default: na
+ duration - duration of the test
+ type: int
+ unit: seconds
+ default: 20
+
+ read link below for more netperf args description:
+ http://www.netperf.org/netperf/training/Netperf.html
+ """
+ __scenario_type__ = "Netperf"
+
+ TARGET_SCRIPT = 'netperf_benchmark.bash'
+
+ def __init__(self, context):
+ self.context = context
+ self.setup_done = False
+
+ def setup(self):
+ '''scenario setup'''
+ self.target_script = pkg_resources.resource_filename(
+ 'yardstick.benchmark.scenarios.networking',
+ Netperf.TARGET_SCRIPT)
+ user = self.context.get('user', 'ubuntu')
+ host = self.context.get('host', None)
+ target = self.context.get('target', None)
+ key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+
+ # netserver start automatically during the vm boot
+ LOG.info("user:%s, target:%s", user, target)
+ self.server = ssh.SSH(user, target, key_filename=key_filename)
+ self.server.wait(timeout=600)
+
+ LOG.info("user:%s, host:%s", user, host)
+ self.client = ssh.SSH(user, host, key_filename=key_filename)
+ self.client.wait(timeout=600)
+
+ # copy script to host
+ self.client.run("cat > ~/netperf.sh",
+ stdin=open(self.target_script, "rb"))
+
+ self.setup_done = True
+
+ def run(self, args):
+ """execute the benchmark"""
+
+ if not self.setup_done:
+ self.setup()
+
+ # get global options
+ ipaddr = args.get("ipaddr", '127.0.0.1')
+ options = args['options']
+ testname = options.get("testname", 'TCP_STREAM')
+ duration_time = self.context.get("duration", None)
+ arithmetic_time = options.get("duration", None)
+ if duration_time:
+ testlen = duration_time
+ elif arithmetic_time:
+ testlen = arithmetic_time
+ else:
+ testlen = 20
+
+ cmd_args = "-H %s -l %s -t %s" % (ipaddr, testlen, testname)
+
+ # get test specific options
+ default_args = "-O 'THROUGHPUT,THROUGHPUT_UNITS,MEAN_LATENCY'"
+ cmd_args += " -- %s" % default_args
+ option_pair_list = [("send_msg_size", "-m"),
+ ("recv_msg_size", "-M"),
+ ("req_rsp_size", "-r")]
+ for option_pair in option_pair_list:
+ if option_pair[0] in options:
+ cmd_args += " %s %s" % (option_pair[1],
+ options[option_pair[0]])
+
+ cmd = "sudo bash netperf.sh %s" % (cmd_args)
+ LOG.debug("Executing command: %s", cmd)
+ status, stdout, stderr = self.client.execute(cmd)
+
+ if status:
+ raise RuntimeError(stderr)
+
+ data = json.loads(stdout)
+ if data['mean_latency'] == '':
+ raise RuntimeError(stdout)
+
+ # sla check
+ mean_latency = float(data['mean_latency'])
+ if "sla" in args:
+ sla_max_mean_latency = int(args["sla"]["mean_latency"])
+
+ assert mean_latency <= sla_max_mean_latency, \
+ "mean_latency %f > sla_max_mean_latency(%f)" % \
+ (mean_latency, sla_max_mean_latency)
+
+ return data
+
+
+def _test():
+ '''internal test function'''
+ logger = logging.getLogger('yardstick')
+ logger.setLevel(logging.DEBUG)
+
+ key_filename = pkg_resources.resource_filename('yardstick.resources',
+ 'files/yardstick_key')
+ runner_cfg = {}
+ runner_cfg['type'] = 'Duration'
+ runner_cfg['duration'] = 5
+ runner_cfg['clinet'] = '10.0.2.33'
+ runner_cfg['server'] = '10.0.2.53'
+ runner_cfg['user'] = 'ubuntu'
+ runner_cfg['output_filename'] = "/tmp/yardstick.out"
+ runner_cfg['key_filename'] = key_filename
+
+ scenario_args = {}
+ scenario_args['options'] = {"testname": 'TCP_STREAM'}
+
+ from yardstick.benchmark.runners import base as base_runner
+ runner = base_runner.Runner.get(runner_cfg)
+ runner.run("Netperf", scenario_args)
+ runner.join()
+ base_runner.Runner.release(runner)
+
+if __name__ == '__main__':
+ _test()
diff --git a/yardstick/benchmark/scenarios/networking/netperf_benchmark.bash b/yardstick/benchmark/scenarios/networking/netperf_benchmark.bash
new file mode 100755
index 000000000..a425c5df0
--- /dev/null
+++ b/yardstick/benchmark/scenarios/networking/netperf_benchmark.bash
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+set -e
+
+# Commandline arguments
+OPTIONS="$@"
+OUTPUT_FILE=/tmp/netperf-out.log
+
+# run netperf test
+run_netperf()
+{
+ netperf $OPTIONS > $OUTPUT_FILE
+}
+
+# write the result to stdout in json format
+output_json()
+{
+ mean=$(awk '/\/s/{print $3}' $OUTPUT_FILE)
+ troughput=$(awk '/\/s/{print $1}' $OUTPUT_FILE)
+ unit=$(awk '/\/s/{print $2}' $OUTPUT_FILE)
+ echo -e "{ \
+ \"mean_latency\":\"$mean\", \
+ \"troughput\":\"$troughput\", \
+ \"troughput_unit\":\"$unit\" \
+ }"
+}
+
+# main entry
+main()
+{
+ # run the test
+ run_netperf
+
+ # output result
+ output_json
+}
+
+main \ No newline at end of file