aboutsummaryrefslogtreecommitdiffstats
path: root/yardstick/benchmark/scenarios
diff options
context:
space:
mode:
authorhoujingwen <houjingwen@huawei.com>2015-10-19 15:37:06 +0800
committerHou Jingwen <houjingwen@huawei.com>2015-10-22 00:55:25 +0000
commite4e8688e0633ef22b2ff0ea8ba739313d5299ecc (patch)
tree0fee27d4e504b36e9eb24530cf85a0d33fd0b463 /yardstick/benchmark/scenarios
parent9816c5aa786f7ec831c549b8ed4b5e8ef485da64 (diff)
Update sla check for scenarios
This patch modify the question that SLA check result is not complete. JIRA: YARDSTICK-172 Change-Id: I10438390baee92caf00dbfcdbdb833823ff8ce31 Signed-off-by: houjingwen <houjingwen@huawei.com>
Diffstat (limited to 'yardstick/benchmark/scenarios')
-rw-r--r--yardstick/benchmark/scenarios/compute/cyclictest.py15
-rw-r--r--yardstick/benchmark/scenarios/compute/lmbench.py17
-rw-r--r--yardstick/benchmark/scenarios/compute/perf.py15
-rw-r--r--yardstick/benchmark/scenarios/networking/iperf3.py14
-rwxr-xr-xyardstick/benchmark/scenarios/networking/netperf.py13
-rw-r--r--yardstick/benchmark/scenarios/networking/ping.py10
-rw-r--r--yardstick/benchmark/scenarios/networking/pktgen.py14
-rw-r--r--yardstick/benchmark/scenarios/storage/fio.py15
8 files changed, 54 insertions, 59 deletions
diff --git a/yardstick/benchmark/scenarios/compute/cyclictest.py b/yardstick/benchmark/scenarios/compute/cyclictest.py
index aaa98b881..595986f8a 100644
--- a/yardstick/benchmark/scenarios/compute/cyclictest.py
+++ b/yardstick/benchmark/scenarios/compute/cyclictest.py
@@ -78,7 +78,7 @@ class Cyclictest(base.Scenario):
self.setup_done = True
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
default_args = "-m -n -q"
@@ -102,19 +102,20 @@ class Cyclictest(base.Scenario):
if status:
raise RuntimeError(stderr)
- data = json.loads(stdout)
+ result.update(json.loads(stdout))
if "sla" in args:
- for t, latency in data.items():
+ sla_error = ""
+ for t, latency in result.items():
if 'max_%s_latency' % t not in args['sla']:
continue
sla_latency = int(args['sla']['max_%s_latency' % t])
latency = int(latency)
- assert latency <= sla_latency, "%s latency %d > " \
- "sla:max_%s_latency(%d)" % (t, latency, t, sla_latency)
-
- return data
+ if latency > sla_latency:
+ sla_error += "%s latency %d > sla:max_%s_latency(%d); " % \
+ (t, latency, t, sla_latency)
+ assert sla_error == "", sla_error
def _test():
diff --git a/yardstick/benchmark/scenarios/compute/lmbench.py b/yardstick/benchmark/scenarios/compute/lmbench.py
index 367739128..d2558c936 100644
--- a/yardstick/benchmark/scenarios/compute/lmbench.py
+++ b/yardstick/benchmark/scenarios/compute/lmbench.py
@@ -58,7 +58,7 @@ class Lmbench(base.Scenario):
self.setup_done = True
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
if not self.setup_done:
@@ -75,16 +75,17 @@ class Lmbench(base.Scenario):
if status:
raise RuntimeError(stderr)
- data = json.loads(stdout)
+ result.update(json.loads(stdout))
if "sla" in args:
+ sla_error = ""
sla_max_latency = int(args['sla']['max_latency'])
- for result in data:
- latency = result['latency']
- assert latency <= sla_max_latency, "latency %f > " \
- "sla:max_latency(%f)" % (latency, sla_max_latency)
-
- return data
+ for t_latency in result:
+ latency = t_latency['latency']
+ if latency > sla_max_latency:
+ sla_error += "latency %f > sla:max_latency(%f); " \
+ % (latency, sla_max_latency)
+ assert sla_error == "", sla_error
def _test():
diff --git a/yardstick/benchmark/scenarios/compute/perf.py b/yardstick/benchmark/scenarios/compute/perf.py
index a874ea94c..281bd8e0c 100644
--- a/yardstick/benchmark/scenarios/compute/perf.py
+++ b/yardstick/benchmark/scenarios/compute/perf.py
@@ -58,7 +58,7 @@ class Perf(base.Scenario):
self.setup_done = True
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
if not self.setup_done:
@@ -96,23 +96,22 @@ class Perf(base.Scenario):
if status:
raise RuntimeError(stdout)
- output = json.loads(stdout)
+ result.update(json.loads(stdout))
if "sla" in args:
metric = args['sla']['metric']
exp_val = args['sla']['expected_value']
smaller_than_exp = 'smaller_than_expected' in args['sla']
- if metric not in output:
+ if metric not in result:
assert False, "Metric (%s) not found." % metric
else:
if smaller_than_exp:
- assert output[metric] < exp_val, "%s %d >= %d (sla)" \
- % (metric, output[metric], exp_val)
+ assert result[metric] < exp_val, "%s %d >= %d (sla); " \
+ % (metric, result[metric], exp_val)
else:
- assert output[metric] >= exp_val, "%s %d < %d (sla)" \
- % (metric, output[metric], exp_val)
- return output
+ assert result[metric] >= exp_val, "%s %d < %d (sla); " \
+ % (metric, result[metric], exp_val)
def _test():
diff --git a/yardstick/benchmark/scenarios/networking/iperf3.py b/yardstick/benchmark/scenarios/networking/iperf3.py
index e31a892d2..a324c5b85 100644
--- a/yardstick/benchmark/scenarios/networking/iperf3.py
+++ b/yardstick/benchmark/scenarios/networking/iperf3.py
@@ -82,7 +82,7 @@ For more info see http://software.es.net/iperf
LOG.warn(stderr)
self.target.close()
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
# if run by a duration runner, get the duration time and setup as arg
@@ -122,7 +122,7 @@ For more info see http://software.es.net/iperf
# error cause in json dict on stdout
raise RuntimeError(stdout)
- output = json.loads(stdout)
+ result.update(json.loads(stdout))
if "sla" in args:
sla_iperf = args["sla"]
@@ -131,21 +131,19 @@ For more info see http://software.es.net/iperf
# convert bits per second to bytes per second
bit_per_second = \
- int(output["end"]["sum_received"]["bits_per_second"])
+ int(result["end"]["sum_received"]["bits_per_second"])
bytes_per_second = bit_per_second / 8
assert bytes_per_second >= sla_bytes_per_second, \
- "bytes_per_second %d < sla:bytes_per_second (%d)" % \
+ "bytes_per_second %d < sla:bytes_per_second (%d); " % \
(bytes_per_second, sla_bytes_per_second)
else:
sla_jitter = float(sla_iperf["jitter"])
- jitter_ms = float(output["end"]["sum"]["jitter_ms"])
+ jitter_ms = float(result["end"]["sum"]["jitter_ms"])
assert jitter_ms <= sla_jitter, \
- "jitter_ms %f > sla:jitter %f" % \
+ "jitter_ms %f > sla:jitter %f; " % \
(jitter_ms, sla_jitter)
- return output
-
def _test():
'''internal test function'''
diff --git a/yardstick/benchmark/scenarios/networking/netperf.py b/yardstick/benchmark/scenarios/networking/netperf.py
index 3121fdaf2..fb5497089 100755
--- a/yardstick/benchmark/scenarios/networking/netperf.py
+++ b/yardstick/benchmark/scenarios/networking/netperf.py
@@ -79,7 +79,7 @@ class Netperf(base.Scenario):
self.setup_done = True
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
if not self.setup_done:
@@ -118,21 +118,20 @@ class Netperf(base.Scenario):
if status:
raise RuntimeError(stderr)
- data = json.loads(stdout)
- if data['mean_latency'] == '':
+ result.update(json.loads(stdout))
+
+ if result['mean_latency'] == '':
raise RuntimeError(stdout)
# sla check
- mean_latency = float(data['mean_latency'])
+ mean_latency = float(result['mean_latency'])
if "sla" in args:
sla_max_mean_latency = int(args["sla"]["mean_latency"])
assert mean_latency <= sla_max_mean_latency, \
- "mean_latency %f > sla_max_mean_latency(%f)" % \
+ "mean_latency %f > sla_max_mean_latency(%f); " % \
(mean_latency, sla_max_mean_latency)
- return data
-
def _test():
'''internal test function'''
diff --git a/yardstick/benchmark/scenarios/networking/ping.py b/yardstick/benchmark/scenarios/networking/ping.py
index 41395d8d6..10964350b 100644
--- a/yardstick/benchmark/scenarios/networking/ping.py
+++ b/yardstick/benchmark/scenarios/networking/ping.py
@@ -45,7 +45,7 @@ class Ping(base.Scenario):
self.connection = ssh.SSH(user, host, key_filename=key_filename)
self.connection.wait()
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
if "options" in args:
@@ -64,11 +64,9 @@ class Ping(base.Scenario):
if exit_status != 0:
raise RuntimeError(stderr)
- rtt = float(stdout)
+ result["rtt"] = float(stdout)
if "sla" in args:
sla_max_rtt = int(args["sla"]["max_rtt"])
- assert rtt <= sla_max_rtt, "rtt %f > sla:max_rtt(%f)" % \
- (rtt, sla_max_rtt)
-
- return rtt
+ assert result["rtt"] <= sla_max_rtt, "rtt %f > sla:max_rtt(%f); " % \
+ (result["rtt"], sla_max_rtt)
diff --git a/yardstick/benchmark/scenarios/networking/pktgen.py b/yardstick/benchmark/scenarios/networking/pktgen.py
index cc28b514a..f373fd2ec 100644
--- a/yardstick/benchmark/scenarios/networking/pktgen.py
+++ b/yardstick/benchmark/scenarios/networking/pktgen.py
@@ -86,7 +86,7 @@ class Pktgen(base.Scenario):
raise RuntimeError(stderr)
return int(stdout)
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
if not self.setup_done:
@@ -119,20 +119,18 @@ class Pktgen(base.Scenario):
if status:
raise RuntimeError(stderr)
- data = json.loads(stdout)
+ result.update(json.loads(stdout))
- data['packets_received'] = self._iptables_get_result()
+ result['packets_received'] = self._iptables_get_result()
if "sla" in args:
- sent = data['packets_sent']
- received = data['packets_received']
+ sent = result['packets_sent']
+ received = result['packets_received']
ppm = 1000000 * (sent - received) / sent
sla_max_ppm = int(args["sla"]["max_ppm"])
- assert ppm <= sla_max_ppm, "ppm %d > sla_max_ppm %d" \
+ assert ppm <= sla_max_ppm, "ppm %d > sla_max_ppm %d; " \
% (ppm, sla_max_ppm)
- return data
-
def _test():
'''internal test function'''
diff --git a/yardstick/benchmark/scenarios/storage/fio.py b/yardstick/benchmark/scenarios/storage/fio.py
index 1107a8b2c..af90b0703 100644
--- a/yardstick/benchmark/scenarios/storage/fio.py
+++ b/yardstick/benchmark/scenarios/storage/fio.py
@@ -71,11 +71,10 @@ class Fio(base.Scenario):
self.setup_done = True
- def run(self, args):
+ def run(self, args, result):
"""execute the benchmark"""
default_args = "-ioengine=libaio -direct=1 -group_reporting " \
"-numjobs=1 -time_based --output-format=json"
- result = {}
if not self.setup_done:
self.setup()
@@ -124,6 +123,7 @@ class Fio(base.Scenario):
result["write_lat"] = raw_data["jobs"][0]["write"]["lat"]["mean"]
if "sla" in args:
+ sla_error = ""
for k, v in result.items():
if k not in args['sla']:
continue
@@ -131,15 +131,16 @@ class Fio(base.Scenario):
if "lat" in k:
# For lattency small value is better
max_v = float(args['sla'][k])
- assert v <= max_v, "%s %f > " \
- "sla:%s(%f)" % (k, v, k, max_v)
+ if v > max_v:
+ sla_error += "%s %f > sla:%s(%f); " % (k, v, k, max_v)
else:
# For bandwidth and iops big value is better
min_v = int(args['sla'][k])
- assert v >= min_v, "%s %d < " \
- "sla:%s(%d)" % (k, v, k, min_v)
+ if v < min_v:
+ sla_error += "%s %d < " \
+ "sla:%s(%d); " % (k, v, k, min_v)
- return result
+ assert sla_error == "", sla_error
def _test():