diff options
author | Kristian Hunt <kristian.hunt@gmail.com> | 2015-07-14 13:13:47 +0200 |
---|---|---|
committer | Jörgen Karlsson <jorgen.w.karlsson@ericsson.com> | 2015-08-13 13:23:55 +0000 |
commit | eca1e81cac34d7569fa5dcb15e5df10a6583559b (patch) | |
tree | b33491e7192b1f8a6d8293927fba6dae34d0b7d4 | |
parent | ce671dba3dafe0d7d80d4a7eea1f9395bc4d6d7d (diff) |
Add perf scenario and sample
Supports using perf software event counters. No hardware events are
currently supported. It is possible to add some load to the system
using the load parameter, otherwise the system will sleep while the
measurements are collected.
It is possible to configure SLA for any event measured.
Change-Id: Ic413d940093aadd10dc32888ea416aa94316a6fe
JIRA: YARDSTICK-51
Signed-off-by: Kristian Hunt <kristian.hunt@gmail.com>
-rw-r--r-- | samples/perf.yaml | 43 | ||||
-rwxr-xr-x | tools/ubuntu-server-cloudimg-modify.sh | 2 | ||||
-rw-r--r-- | yardstick/benchmark/scenarios/compute/perf.py | 140 | ||||
-rw-r--r-- | yardstick/benchmark/scenarios/compute/perf_benchmark.bash | 68 |
4 files changed, 253 insertions, 0 deletions
diff --git a/samples/perf.yaml b/samples/perf.yaml new file mode 100644 index 000000000..e7ba2d0e7 --- /dev/null +++ b/samples/perf.yaml @@ -0,0 +1,43 @@ +--- +# Sample benchmark task config file +# use perf to perform Linux performance measurements +# this sample demonstrates measurements of various software perf events + +schema: "yardstick:task:0.1" + +scenarios: +- + type: Perf + options: + load: true + events: + - task-clock + - cpu-clock + - context-switches + - page-faults + - cpu-migrations + host: hades.demo + + runner: + type: Duration + duration: 30 + + sla: + metric: context-switches + smaller_than_expected: true + expected_value: 300 + action: monitor + +context: + name: demo + image: yardstick-trusty-server + flavor: yardstick-flavor + user: ec2-user + + servers: + hades: + floating_ip: true + networks: + test: + cidr: "10.0.1.0/24" + external_network: "net04_ext" diff --git a/tools/ubuntu-server-cloudimg-modify.sh b/tools/ubuntu-server-cloudimg-modify.sh index 96447d8d9..41d654a08 100755 --- a/tools/ubuntu-server-cloudimg-modify.sh +++ b/tools/ubuntu-server-cloudimg-modify.sh @@ -29,6 +29,8 @@ apt-get update apt-get install -y \ fio \ iperf3 \ + linux-tools-common \ + linux-tools-generic \ lmbench \ stress diff --git a/yardstick/benchmark/scenarios/compute/perf.py b/yardstick/benchmark/scenarios/compute/perf.py new file mode 100644 index 000000000..62b4297e3 --- /dev/null +++ b/yardstick/benchmark/scenarios/compute/perf.py @@ -0,0 +1,140 @@ +############################################################################## +# Copyright (c) 2015 Ericsson AB and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## +import pkg_resources +import logging +import json + +import yardstick.ssh as ssh +from yardstick.benchmark.scenarios import base + +LOG = logging.getLogger(__name__) +LOG.setLevel(logging.DEBUG) + + +class Perf(base.Scenario): + """Execute perf benchmark in a host + + Parameters + events - perf tool software, hardware or tracepoint events + type: [str] + unit: na + default: ['task-clock'] + load - simulate load on the host by doing IO operations + type: bool + unit: na + default: false + + For more info about perf and perf events see https://perf.wiki.kernel.org + """ + + __scenario_type__ = "Perf" + + TARGET_SCRIPT = 'perf_benchmark.bash' + + def __init__(self, context): + self.context = context + self.setup_done = False + + def setup(self): + """scenario setup""" + self.target_script = pkg_resources.resource_filename( + 'yardstick.benchmark.scenarios.compute', Perf.TARGET_SCRIPT) + user = self.context.get('user', 'ubuntu') + host = self.context.get('host', None) + key_filename = self.context.get('key_filename', '~/.ssh/id_rsa') + + LOG.debug("user:%s, host:%s", user, host) + self.client = ssh.SSH(user, host, key_filename=key_filename) + self.client.wait(timeout=600) + + # copy script to host + self.client.run("cat > ~/perf_benchmark.sh", + stdin=open(self.target_script, "rb")) + + self.setup_done = True + + def run(self, args): + """execute the benchmark""" + + if not self.setup_done: + self.setup() + + options = args['options'] + events = options.get('events', ['task-clock']) + + events_string = "" + for event in events: + events_string += event + " " + + # if run by a duration runner + duration_time = self.context.get("duration", None) + # if run by an arithmetic runner + arithmetic_time = options.get("duration", None) + if duration_time: + duration = duration_time + elif arithmetic_time: + duration = arithmetic_time + else: + duration = 30 + + if 'load' in options: + load = "dd if=/dev/urandom of=/dev/null" + else: + load = "sleep %d" % duration + + cmd = "sudo bash perf_benchmark.sh '%s' %d %s" \ + % (load, duration, events_string) + + LOG.debug("Executing command: %s", cmd) + status, stdout, stderr = self.client.execute(cmd) + + if status: + raise RuntimeError(stdout) + + output = json.loads(stdout) + + if "sla" in args: + metric = args['sla']['metric'] + exp_val = args['sla']['expected_value'] + smaller_than_exp = 'smaller_than_expected' in args['sla'] + + if metric not in output: + assert False, "Metric (%s) not found." % metric + else: + if smaller_than_exp: + assert output[metric] < exp_val, "%s %d >= %d (sla)" \ + % (metric, output[metric], exp_val) + else: + assert output[metric] >= exp_val, "%s %d < %d (sla)" \ + % (metric, output[metric], exp_val) + return output + + +def _test(): + """internal test function""" + key_filename = pkg_resources.resource_filename('yardstick.resources', + 'files/yardstick_key') + ctx = {'host': '172.16.0.137', + 'user': 'ubuntu', + 'key_filename': key_filename + } + + logger = logging.getLogger('yardstick') + logger.setLevel(logging.DEBUG) + + p = Perf(ctx) + + options = {'load': True} + args = {'options': options} + + result = p.run(args) + print result + +if __name__ == '__main__': + _test() diff --git a/yardstick/benchmark/scenarios/compute/perf_benchmark.bash b/yardstick/benchmark/scenarios/compute/perf_benchmark.bash new file mode 100644 index 000000000..5ae107a52 --- /dev/null +++ b/yardstick/benchmark/scenarios/compute/perf_benchmark.bash @@ -0,0 +1,68 @@ +#!/bin/sh + +############################################################################## +# Copyright (c) 2015 Ericsson AB and others. +# +# All rights reserved. This program and the accompanying materials +# are made available under the terms of the Apache License, Version 2.0 +# which accompanies this distribution, and is available at +# http://www.apache.org/licenses/LICENSE-2.0 +############################################################################## + +set -e + +# Commandline arguments +PAYLOAD_OP=$1 +shift +DURATION=$1 +shift +EVENTS=("$@") +OUTPUT_FILE=/tmp/perfout.txt + +# run perf test +run_perf() +{ + COMMA_SEP_E=$( IFS=$','; echo "${EVENTS[*]}" ) + + if [[ $PAYLOAD_OP == dd* ]] + then + sudo perf stat -o $OUTPUT_FILE -e ${COMMA_SEP_E[@]} $PAYLOAD_OP & + sleep $DURATION + sudo killall -q -u root dd + else + sudo perf stat -o $OUTPUT_FILE -e ${COMMA_SEP_E[@]} $PAYLOAD_OP + fi +} + +# write the result to stdout in json format +output_json() +{ + EVENTS+=('time') + + last_pos=$(( ${#EVENTS[*]} - 1 )) + last=${EVENTS[$last_pos]} + + echo -n { + for EVENT in ${EVENTS[@]} + do + value=$(cat $OUTPUT_FILE | grep $EVENT | awk 'match($0,/[0-9]+|[0-9]+\.[0-9]*/, a) { print a[0]}') + + if [[ $EVENT != $last ]] + then + echo -n \"$EVENT\": $value, + else + echo -n \"$EVENT\": $value + fi + done + echo } +} + +# main entry +main() +{ + run_perf > /dev/null 2>&1 + sleep 1 + output_json +} + +main |