aboutsummaryrefslogtreecommitdiffstats
path: root/yardstick
diff options
context:
space:
mode:
authorQiLiang <liangqi1@huawei.com>2015-10-21 12:29:53 +0000
committerQiLiang <liangqi1@huawei.com>2015-10-27 03:34:28 +0000
commit2e1094d4aee93180126d3ce86db3cc7df2e87bc5 (patch)
tree221e98fd325ff6fcb4fbbb3e656a3789f3a77342 /yardstick
parent884926d05f435217c7dac038b3bfbd7e9d05826b (diff)
Heat context code refactor part 2
Heat context code refactor to cater for the evolution of the Yardstick framework. Refactor runner_cfg host/target info handle, as specified at https://etherpad.opnfv.org/p/yardstick_framework step 4. Get general Context info (use Context.get). Before this refactor host and target vm must have the same user name and ssh key, that is not general enough for later extension. test_case.yaml do NOT need to change. JIRA: YARDSTICK-168 Change-Id: I5cfe868f3c6f633214ef550bc9676fe1de0709db Signed-off-by: QiLiang <liangqi1@huawei.com>
Diffstat (limited to 'yardstick')
-rw-r--r--yardstick/benchmark/contexts/base.py8
-rw-r--r--yardstick/benchmark/contexts/heat.py23
-rwxr-xr-xyardstick/benchmark/runners/arithmetic.py13
-rwxr-xr-xyardstick/benchmark/runners/base.py6
-rw-r--r--yardstick/benchmark/runners/duration.py13
-rwxr-xr-xyardstick/benchmark/runners/iteration.py13
-rw-r--r--yardstick/benchmark/runners/sequence.py13
-rw-r--r--yardstick/benchmark/scenarios/compute/cyclictest.py41
-rw-r--r--yardstick/benchmark/scenarios/compute/lmbench.py46
-rw-r--r--yardstick/benchmark/scenarios/compute/perf.py49
-rw-r--r--yardstick/benchmark/scenarios/networking/iperf3.py95
-rwxr-xr-xyardstick/benchmark/scenarios/networking/netperf.py88
-rw-r--r--yardstick/benchmark/scenarios/networking/ping.py58
-rw-r--r--yardstick/benchmark/scenarios/networking/pktgen.py69
-rw-r--r--yardstick/benchmark/scenarios/storage/fio.py45
-rwxr-xr-xyardstick/cmd/commands/task.py66
16 files changed, 390 insertions, 256 deletions
diff --git a/yardstick/benchmark/contexts/base.py b/yardstick/benchmark/contexts/base.py
index ae860accd..76a828811 100644
--- a/yardstick/benchmark/contexts/base.py
+++ b/yardstick/benchmark/contexts/base.py
@@ -48,12 +48,12 @@ class Context(object):
@abc.abstractmethod
def _get_server(self, attr_name):
- '''get server object by name from context
+ '''get server info by name from context
'''
@staticmethod
def get_server(attr_name):
- '''lookup server object by name from context
+ '''lookup server info by name from context
attr_name: either a name for a server created by yardstick or a dict
with attribute name mapping when using external heat templates
'''
@@ -64,7 +64,7 @@ class Context(object):
break
if server is None:
- raise ValueError("context not found for server '%s'" %
- attr_name["name"])
+ raise ValueError("context not found for server '%r'" %
+ attr_name)
return server
diff --git a/yardstick/benchmark/contexts/heat.py b/yardstick/benchmark/contexts/heat.py
index 9cf29981b..7bd430bc5 100644
--- a/yardstick/benchmark/contexts/heat.py
+++ b/yardstick/benchmark/contexts/heat.py
@@ -8,6 +8,7 @@
##############################################################################
import sys
+import pkg_resources
from yardstick.benchmark.contexts.base import Context
from yardstick.benchmark.contexts.model import Server
@@ -195,10 +196,13 @@ class HeatContext(Context):
print "Context '%s' undeployed" % self.name
def _get_server(self, attr_name):
- '''lookup server object by name from context
+ '''lookup server info by name from context
attr_name: either a name for a server created by yardstick or a dict
with attribute name mapping when using external heat templates
'''
+ key_filename = pkg_resources.resource_filename(
+ 'yardstick.resources', 'files/yardstick_key')
+
if type(attr_name) is dict:
cname = attr_name["name"].split(".")[1]
if cname != self.name:
@@ -216,8 +220,21 @@ class HeatContext(Context):
server = Server(attr_name["name"].split(".")[0], self, {})
server.public_ip = public_ip
server.private_ip = private_ip
- return server
else:
if attr_name not in self._server_map:
return None
- return self._server_map[attr_name]
+ server = self._server_map[attr_name]
+
+ if server is None:
+ return None
+
+ result = {
+ "user": server.context.user,
+ "key_filename": key_filename,
+ "private_ip": server.private_ip
+ }
+ # Target server may only have private_ip
+ if server.public_ip:
+ result["ip"] = server.public_ip
+
+ return result
diff --git a/yardstick/benchmark/runners/arithmetic.py b/yardstick/benchmark/runners/arithmetic.py
index 68c8bfdef..af2303479 100755
--- a/yardstick/benchmark/runners/arithmetic.py
+++ b/yardstick/benchmark/runners/arithmetic.py
@@ -22,7 +22,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -40,12 +40,13 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
LOG.info("worker START, step(%s, %d, %d, %d), class %s",
arg_name, start, stop, step, cls)
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
@@ -63,7 +64,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -129,8 +130,8 @@ class ArithmeticRunner(base.Runner):
__execution_type__ = 'Arithmetic'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/runners/base.py b/yardstick/benchmark/runners/base.py
index cc8c93cb6..d443806a7 100755
--- a/yardstick/benchmark/runners/base.py
+++ b/yardstick/benchmark/runners/base.py
@@ -169,7 +169,6 @@ class Runner(object):
Runner.release(runner)
def __init__(self, config, queue):
- self.context = {}
self.config = config
self.periodic_action_process = None
self.result_queue = queue
@@ -189,7 +188,8 @@ class Runner(object):
log.debug("post-stop data: \n%s" % data)
self.result_queue.put({'post-stop-action-data': data})
- def run(self, scenario_type, scenario_cfg):
+ def run(self, scenario_cfg, context_cfg):
+ scenario_type = scenario_cfg["type"]
class_name = base_scenario.Scenario.get(scenario_type)
path_split = class_name.split(".")
module_path = ".".join(path_split[:-1])
@@ -228,7 +228,7 @@ class Runner(object):
self.result_queue))
self.periodic_action_process.start()
- self._run_benchmark(cls, "run", scenario_cfg)
+ self._run_benchmark(cls, "run", scenario_cfg, context_cfg)
def join(self):
self.process.join()
diff --git a/yardstick/benchmark/runners/duration.py b/yardstick/benchmark/runners/duration.py
index e4ad037af..40e0aa708 100644
--- a/yardstick/benchmark/runners/duration.py
+++ b/yardstick/benchmark/runners/duration.py
@@ -21,7 +21,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -33,7 +33,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
runner_cfg['runner_id'] = os.getpid()
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
@@ -42,7 +42,8 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
sla_action = scenario_cfg["sla"].get("action", "assert")
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
start = time.time()
while True:
@@ -54,7 +55,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -109,8 +110,8 @@ If the scenario ends before the time has elapsed, it will be started again.
'''
__execution_type__ = 'Duration'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/runners/iteration.py b/yardstick/benchmark/runners/iteration.py
index b6d861d6c..077e0e813 100755
--- a/yardstick/benchmark/runners/iteration.py
+++ b/yardstick/benchmark/runners/iteration.py
@@ -21,7 +21,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -33,12 +33,13 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
runner_cfg['runner_id'] = os.getpid()
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
@@ -53,7 +54,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -108,8 +109,8 @@ If the scenario ends before the time has elapsed, it will be started again.
'''
__execution_type__ = 'Iteration'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/runners/sequence.py b/yardstick/benchmark/runners/sequence.py
index 29f86e19c..a410eea0e 100644
--- a/yardstick/benchmark/runners/sequence.py
+++ b/yardstick/benchmark/runners/sequence.py
@@ -22,7 +22,7 @@ from yardstick.benchmark.runners import base
LOG = logging.getLogger(__name__)
-def _worker_process(queue, cls, method_name, scenario_cfg):
+def _worker_process(queue, cls, method_name, scenario_cfg, context_cfg):
sequence = 1
@@ -42,12 +42,13 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
LOG.info("worker START, sequence_values(%s, %s), class %s",
arg_name, sequence_values, cls)
- benchmark = cls(runner_cfg)
+ benchmark = cls(scenario_cfg, context_cfg)
benchmark.setup()
method = getattr(benchmark, method_name)
queue.put({'runner_id': runner_cfg['runner_id'],
- 'scenario_cfg': scenario_cfg})
+ 'scenario_cfg': scenario_cfg,
+ 'context_cfg': context_cfg})
sla_action = None
if "sla" in scenario_cfg:
@@ -63,7 +64,7 @@ def _worker_process(queue, cls, method_name, scenario_cfg):
errors = ""
try:
- method(scenario_cfg, data)
+ method(data)
except AssertionError as assertion:
# SLA validation failed in scenario, determine what to do now
if sla_action == "assert":
@@ -121,8 +122,8 @@ class SequenceRunner(base.Runner):
__execution_type__ = 'Sequence'
- def _run_benchmark(self, cls, method, scenario_cfg):
+ def _run_benchmark(self, cls, method, scenario_cfg, context_cfg):
self.process = multiprocessing.Process(
target=_worker_process,
- args=(self.result_queue, cls, method, scenario_cfg))
+ args=(self.result_queue, cls, method, scenario_cfg, context_cfg))
self.process.start()
diff --git a/yardstick/benchmark/scenarios/compute/cyclictest.py b/yardstick/benchmark/scenarios/compute/cyclictest.py
index 595986f8a..e8fc63cf7 100644
--- a/yardstick/benchmark/scenarios/compute/cyclictest.py
+++ b/yardstick/benchmark/scenarios/compute/cyclictest.py
@@ -54,8 +54,9 @@ class Cyclictest(base.Scenario):
TARGET_SCRIPT = "cyclictest_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -63,13 +64,14 @@ class Cyclictest(base.Scenario):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.compute",
Cyclictest.TARGET_SCRIPT)
- user = self.context.get("user", "root")
- host = self.context.get("host", None)
- key_filename = self.context.get("key_filename", "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "root")
+ ip = host.get("ip", None)
+ key_filename = host.get("key_filename", "~/.ssh/id_rsa")
- LOG.debug("user:%s, host:%s", user, host)
+ LOG.debug("user:%s, host:%s", user, ip)
print "key_filename:" + key_filename
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -78,14 +80,14 @@ class Cyclictest(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
default_args = "-m -n -q"
if not self.setup_done:
self.setup()
- options = args["options"]
+ options = self.scenario_cfg["options"]
affinity = options.get("affinity", 1)
interval = options.get("interval", 1000)
priority = options.get("priority", 99)
@@ -104,13 +106,14 @@ class Cyclictest(base.Scenario):
result.update(json.loads(stdout))
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sla_error = ""
for t, latency in result.items():
- if 'max_%s_latency' % t not in args['sla']:
+ if 'max_%s_latency' % t not in self.scenario_cfg['sla']:
continue
- sla_latency = int(args['sla']['max_%s_latency' % t])
+ sla_latency = int(self.scenario_cfg['sla'][
+ 'max_%s_latency' % t])
latency = int(latency)
if latency > sla_latency:
sla_error += "%s latency %d > sla:max_%s_latency(%d); " % \
@@ -123,16 +126,16 @@ def _test():
key_filename = pkg_resources.resource_filename("yardstick.resources",
"files/yardstick_key")
ctx = {
- "host": "192.168.50.28",
- "user": "root",
- "key_filename": key_filename
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ }
}
logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- cyclictest = Cyclictest(ctx)
-
options = {
"affinity": 2,
"interval": 100,
@@ -150,8 +153,10 @@ def _test():
"options": options,
"sla": sla
}
+ result = {}
- result = cyclictest.run(args)
+ cyclictest = Cyclictest(args, ctx)
+ cyclictest.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/compute/lmbench.py b/yardstick/benchmark/scenarios/compute/lmbench.py
index d2558c936..03caff525 100644
--- a/yardstick/benchmark/scenarios/compute/lmbench.py
+++ b/yardstick/benchmark/scenarios/compute/lmbench.py
@@ -35,8 +35,9 @@ class Lmbench(base.Scenario):
TARGET_SCRIPT = "lmbench_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -44,12 +45,13 @@ class Lmbench(base.Scenario):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.compute",
Lmbench.TARGET_SCRIPT)
- user = self.context.get("user", "ubuntu")
- host = self.context.get("host", None)
- key_filename = self.context.get('key_filename', "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "ubuntu")
+ ip = host.get("ip", None)
+ key_filename = host.get('key_filename', "~/.ssh/id_rsa")
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -58,13 +60,13 @@ class Lmbench(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- options = args['options']
+ options = self.scenario_cfg['options']
stride = options.get('stride', 128)
stop_size = options.get('stop_size', 16)
@@ -75,11 +77,10 @@ class Lmbench(base.Scenario):
if status:
raise RuntimeError(stderr)
- result.update(json.loads(stdout))
-
- if "sla" in args:
+ result.update({"latencies": json.loads(stdout)})
+ if "sla" in self.scenario_cfg:
sla_error = ""
- sla_max_latency = int(args['sla']['max_latency'])
+ sla_max_latency = int(self.scenario_cfg['sla']['max_latency'])
for t_latency in result:
latency = t_latency['latency']
if latency > sla_max_latency:
@@ -92,20 +93,23 @@ def _test():
"""internal test function"""
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Lmbench(ctx)
-
options = {'stride': 128, 'stop_size': 16}
-
args = {'options': options}
- result = p.run(args)
+ result = {}
+
+ p = Lmbench(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/compute/perf.py b/yardstick/benchmark/scenarios/compute/perf.py
index 281bd8e0c..f408e9cb4 100644
--- a/yardstick/benchmark/scenarios/compute/perf.py
+++ b/yardstick/benchmark/scenarios/compute/perf.py
@@ -36,20 +36,22 @@ class Perf(base.Scenario):
TARGET_SCRIPT = 'perf_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
"""scenario setup"""
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.compute', Perf.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ user = host.get('user', 'ubuntu')
+ ip = host.get('ip', None)
+ key_filename = host.get('key_filename', '~/.ssh/id_rsa')
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -58,13 +60,13 @@ class Perf(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- options = args['options']
+ options = self.scenario_cfg['options']
events = options.get('events', ['task-clock'])
events_string = ""
@@ -72,7 +74,8 @@ class Perf(base.Scenario):
events_string += event + " "
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
if duration_time:
@@ -98,10 +101,11 @@ class Perf(base.Scenario):
result.update(json.loads(stdout))
- if "sla" in args:
- metric = args['sla']['metric']
- exp_val = args['sla']['expected_value']
- smaller_than_exp = 'smaller_than_expected' in args['sla']
+ if "sla" in self.scenario_cfg:
+ metric = self.scenario_cfg['sla']['metric']
+ exp_val = self.scenario_cfg['sla']['expected_value']
+ smaller_than_exp = 'smaller_than_expected' \
+ in self.scenario_cfg['sla']
if metric not in result:
assert False, "Metric (%s) not found." % metric
@@ -118,20 +122,23 @@ def _test():
"""internal test function"""
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Perf(ctx)
-
options = {'load': True}
args = {'options': options}
+ result = {}
- result = p.run(args)
+ p = Perf(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/networking/iperf3.py b/yardstick/benchmark/scenarios/networking/iperf3.py
index a324c5b85..86610c88f 100644
--- a/yardstick/benchmark/scenarios/networking/iperf3.py
+++ b/yardstick/benchmark/scenarios/networking/iperf3.py
@@ -48,32 +48,39 @@ For more info see http://software.es.net/iperf
"""
__scenario_type__ = "Iperf3"
- def __init__(self, context):
- self.context = context
- self.user = context.get('user', 'ubuntu')
- self.host_ipaddr = context['host']
- self.target_ipaddr = context['target']
- self.key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
- LOG.debug("setup, key %s", self.key_filename)
- LOG.info("host:%s, user:%s", self.host_ipaddr, self.user)
- self.host = ssh.SSH(self.user, self.host_ipaddr,
- key_filename=self.key_filename)
- self.host.wait(timeout=600)
-
- LOG.info("target:%s, user:%s", self.target_ipaddr, self.user)
- self.target = ssh.SSH(self.user, self.target_ipaddr,
- key_filename=self.key_filename)
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
+
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.target = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.target.wait(timeout=600)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.host = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
+ self.host.wait(timeout=600)
+
cmd = "iperf3 -s -D"
LOG.debug("Starting iperf3 server with command: %s", cmd)
status, _, stderr = self.target.execute(cmd)
if status:
raise RuntimeError(stderr)
+ self.setup_done = True
+
def teardown(self):
LOG.debug("teardown")
self.host.close()
@@ -82,14 +89,17 @@ For more info see http://software.es.net/iperf
LOG.warn(stderr)
self.target.close()
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
+ if not self.setup_done:
+ self.setup()
# if run by a duration runner, get the duration time and setup as arg
- time = self.context.get('duration', None)
- options = args['options']
+ time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
+ options = self.scenario_cfg['options']
- cmd = "iperf3 -c %s --json" % (self.target_ipaddr)
+ cmd = "iperf3 -c %s --json" % (self.context_cfg['target']['ipaddr'])
# If there are no options specified
if not options:
@@ -124,8 +134,8 @@ For more info see http://software.es.net/iperf
result.update(json.loads(stdout))
- if "sla" in args:
- sla_iperf = args["sla"]
+ if "sla" in self.scenario_cfg:
+ sla_iperf = self.scenario_cfg["sla"]
if not use_UDP:
sla_bytes_per_second = int(sla_iperf["bytes_per_second"])
@@ -147,31 +157,32 @@ For more info see http://software.es.net/iperf
def _test():
'''internal test function'''
+ key_filename = pkg_resources.resource_filename('yardstick.resources',
+ 'files/yardstick_key')
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ },
+ 'target': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename,
+ 'ipaddr': '10.229.47.137',
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- key_filename = pkg_resources.resource_filename('yardstick.resources',
- 'files/yardstick_key')
- runner_cfg = {}
- runner_cfg['type'] = 'Duration'
- runner_cfg['duration'] = 5
- runner_cfg['host'] = '10.0.2.33'
- runner_cfg['target_ipaddr'] = '10.0.2.53'
- runner_cfg['user'] = 'ubuntu'
- runner_cfg['output_filename'] = "/tmp/yardstick.out"
- runner_cfg['key_filename'] = key_filename
-
- scenario_args = {}
- scenario_args['options'] = {"bytes": 10000000000}
- scenario_args['sla'] = \
- {"bytes_per_second": 2900000000, "action": "monitor"}
-
- from yardstick.benchmark.runners import base as base_runner
- runner = base_runner.Runner.get(runner_cfg)
- runner.run("Iperf3", scenario_args)
- runner.join()
- base_runner.Runner.release(runner)
+ options = {'packetsize': 120}
+ args = {'options': options}
+ result = {}
+
+ p = Iperf(args, ctx)
+ p.run(result)
+ print result
if __name__ == '__main__':
_test()
diff --git a/yardstick/benchmark/scenarios/networking/netperf.py b/yardstick/benchmark/scenarios/networking/netperf.py
index fb5497089..dcd4ef7b6 100755
--- a/yardstick/benchmark/scenarios/networking/netperf.py
+++ b/yardstick/benchmark/scenarios/networking/netperf.py
@@ -50,8 +50,9 @@ class Netperf(base.Scenario):
TARGET_SCRIPT = 'netperf_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -59,18 +60,24 @@ class Netperf(base.Scenario):
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking',
Netperf.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- target = self.context.get('target', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
# netserver start automatically during the vm boot
- LOG.info("user:%s, target:%s", user, target)
- self.server = ssh.SSH(user, target, key_filename=key_filename)
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.server = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.server.wait(timeout=600)
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.client = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -79,17 +86,18 @@ class Netperf(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
# get global options
- ipaddr = args.get("ipaddr", '127.0.0.1')
- options = args['options']
+ ipaddr = self.context_cfg['target'].get("ipaddr", '127.0.0.1')
+ options = self.scenario_cfg['options']
testname = options.get("testname", 'TCP_STREAM')
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
arithmetic_time = options.get("duration", None)
if duration_time:
testlen = duration_time
@@ -125,8 +133,9 @@ class Netperf(base.Scenario):
# sla check
mean_latency = float(result['mean_latency'])
- if "sla" in args:
- sla_max_mean_latency = int(args["sla"]["mean_latency"])
+ if "sla" in self.scenario_cfg:
+ sla_max_mean_latency = int(
+ self.scenario_cfg["sla"]["mean_latency"])
assert mean_latency <= sla_max_mean_latency, \
"mean_latency %f > sla_max_mean_latency(%f); " % \
@@ -135,28 +144,35 @@ class Netperf(base.Scenario):
def _test():
'''internal test function'''
- logger = logging.getLogger('yardstick')
+ key_filename = pkg_resources.resource_filename("yardstick.resources",
+ "files/yardstick_key")
+ ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ },
+ "target": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename,
+ "ipaddr": "10.229.47.137"
+ }
+ }
+
+ logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- key_filename = pkg_resources.resource_filename('yardstick.resources',
- 'files/yardstick_key')
- runner_cfg = {}
- runner_cfg['type'] = 'Duration'
- runner_cfg['duration'] = 5
- runner_cfg['clinet'] = '10.0.2.33'
- runner_cfg['server'] = '10.0.2.53'
- runner_cfg['user'] = 'ubuntu'
- runner_cfg['output_filename'] = "/tmp/yardstick.out"
- runner_cfg['key_filename'] = key_filename
-
- scenario_args = {}
- scenario_args['options'] = {"testname": 'TCP_STREAM'}
-
- from yardstick.benchmark.runners import base as base_runner
- runner = base_runner.Runner.get(runner_cfg)
- runner.run("Netperf", scenario_args)
- runner.join()
- base_runner.Runner.release(runner)
+ options = {
+ "testname": 'TCP_STREAM'
+ }
+
+ args = {"options": options}
+ result = {}
+
+ netperf = Netperf(args, ctx)
+ netperf.run(result)
+ print result
if __name__ == '__main__':
_test()
diff --git a/yardstick/benchmark/scenarios/networking/ping.py b/yardstick/benchmark/scenarios/networking/ping.py
index 10964350b..34278b90f 100644
--- a/yardstick/benchmark/scenarios/networking/ping.py
+++ b/yardstick/benchmark/scenarios/networking/ping.py
@@ -32,28 +32,31 @@ class Ping(base.Scenario):
TARGET_SCRIPT = 'ping_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking', Ping.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
+ host = self.context_cfg['host']
+ user = host.get('user', 'ubuntu')
+ ip = host.get('ip', None)
+ key_filename = host.get('key_filename', '~/.ssh/id_rsa')
- LOG.info("user:%s, host:%s", user, host)
+ LOG.info("user:%s, host:%s", user, ip)
- self.connection = ssh.SSH(user, host, key_filename=key_filename)
+ self.connection = ssh.SSH(user, ip, key_filename=key_filename)
self.connection.wait()
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
- if "options" in args:
- options = "-s %s" % args['options'].get("packetsize", '56')
+ if "options" in self.scenario_cfg:
+ options = "-s %s" % \
+ self.scenario_cfg['options'].get("packetsize", '56')
else:
options = ""
- destination = args.get("ipaddr", '127.0.0.1')
+ destination = self.context_cfg['target'].get("ipaddr", '127.0.0.1')
LOG.debug("ping '%s' '%s'", options, destination)
@@ -66,7 +69,36 @@ class Ping(base.Scenario):
result["rtt"] = float(stdout)
- if "sla" in args:
- sla_max_rtt = int(args["sla"]["max_rtt"])
+ if "sla" in self.scenario_cfg:
+ sla_max_rtt = int(self.scenario_cfg["sla"]["max_rtt"])
assert result["rtt"] <= sla_max_rtt, "rtt %f > sla:max_rtt(%f); " % \
(result["rtt"], sla_max_rtt)
+
+
+def _test():
+ '''internal test function'''
+ key_filename = pkg_resources.resource_filename("yardstick.resources",
+ "files/yardstick_key")
+ ctx = {
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ },
+ "target": {
+ "ipaddr": "10.229.17.105",
+ }
+ }
+
+ logger = logging.getLogger("yardstick")
+ logger.setLevel(logging.DEBUG)
+
+ args = {}
+ result = {}
+
+ p = Ping(args, ctx)
+ p.run(result)
+ print result
+
+if __name__ == '__main__':
+ _test()
diff --git a/yardstick/benchmark/scenarios/networking/pktgen.py b/yardstick/benchmark/scenarios/networking/pktgen.py
index f373fd2ec..9dac4c90c 100644
--- a/yardstick/benchmark/scenarios/networking/pktgen.py
+++ b/yardstick/benchmark/scenarios/networking/pktgen.py
@@ -37,8 +37,9 @@ class Pktgen(base.Scenario):
TARGET_SCRIPT = 'pktgen_benchmark.bash'
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -46,17 +47,23 @@ class Pktgen(base.Scenario):
self.target_script = pkg_resources.resource_filename(
'yardstick.benchmark.scenarios.networking',
Pktgen.TARGET_SCRIPT)
- user = self.context.get('user', 'ubuntu')
- host = self.context.get('host', None)
- target = self.context.get('target', None)
- key_filename = self.context.get('key_filename', '~/.ssh/id_rsa')
-
- LOG.info("user:%s, target:%s", user, target)
- self.server = ssh.SSH(user, target, key_filename=key_filename)
+ host = self.context_cfg['host']
+ host_user = host.get('user', 'ubuntu')
+ host_ip = host.get('ip', None)
+ host_key_filename = host.get('key_filename', '~/.ssh/id_rsa')
+ target = self.context_cfg['target']
+ target_user = target.get('user', 'ubuntu')
+ target_ip = target.get('ip', None)
+ target_key_filename = target.get('key_filename', '~/.ssh/id_rsa')
+
+ LOG.info("user:%s, target:%s", target_user, target_ip)
+ self.server = ssh.SSH(target_user, target_ip,
+ key_filename=target_key_filename)
self.server.wait(timeout=600)
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", host_user, host_ip)
+ self.client = ssh.SSH(host_user, host_ip,
+ key_filename=host_key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -86,19 +93,20 @@ class Pktgen(base.Scenario):
raise RuntimeError(stderr)
return int(stdout)
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
if not self.setup_done:
self.setup()
- ipaddr = args.get("ipaddr", '127.0.0.1')
+ ipaddr = self.context_cfg["target"].get("ipaddr", '127.0.0.1')
- options = args['options']
+ options = self.scenario_cfg['options']
packetsize = options.get("packetsize", 60)
self.number_of_ports = options.get("number_of_ports", 10)
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
@@ -123,11 +131,11 @@ class Pktgen(base.Scenario):
result['packets_received'] = self._iptables_get_result()
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sent = result['packets_sent']
received = result['packets_received']
ppm = 1000000 * (sent - received) / sent
- sla_max_ppm = int(args["sla"]["max_ppm"])
+ sla_max_ppm = int(self.scenario_cfg["sla"]["max_ppm"])
assert ppm <= sla_max_ppm, "ppm %d > sla_max_ppm %d; " \
% (ppm, sla_max_ppm)
@@ -136,22 +144,29 @@ def _test():
'''internal test function'''
key_filename = pkg_resources.resource_filename('yardstick.resources',
'files/yardstick_key')
- ctx = {'host': '172.16.0.137',
- 'target': '172.16.0.138',
- 'user': 'ubuntu',
- 'key_filename': key_filename
- }
+ ctx = {
+ 'host': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename
+ },
+ 'target': {
+ 'ip': '10.229.47.137',
+ 'user': 'root',
+ 'key_filename': key_filename,
+ 'ipaddr': '10.229.47.137',
+ }
+ }
logger = logging.getLogger('yardstick')
logger.setLevel(logging.DEBUG)
- p = Pktgen(ctx)
-
options = {'packetsize': 120}
+ args = {'options': options}
+ result = {}
- args = {'options': options,
- 'ipaddr': '192.168.111.31'}
- result = p.run(args)
+ p = Pktgen(args, ctx)
+ p.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/benchmark/scenarios/storage/fio.py b/yardstick/benchmark/scenarios/storage/fio.py
index af90b0703..8969472e9 100644
--- a/yardstick/benchmark/scenarios/storage/fio.py
+++ b/yardstick/benchmark/scenarios/storage/fio.py
@@ -48,8 +48,9 @@ class Fio(base.Scenario):
TARGET_SCRIPT = "fio_benchmark.bash"
- def __init__(self, context):
- self.context = context
+ def __init__(self, scenario_cfg, context_cfg):
+ self.scenario_cfg = scenario_cfg
+ self.context_cfg = context_cfg
self.setup_done = False
def setup(self):
@@ -57,12 +58,13 @@ class Fio(base.Scenario):
self.target_script = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.storage",
Fio.TARGET_SCRIPT)
- user = self.context.get("user", "root")
- host = self.context.get("host", None)
- key_filename = self.context.get("key_filename", "~/.ssh/id_rsa")
+ host = self.context_cfg["host"]
+ user = host.get("user", "root")
+ ip = host.get("ip", None)
+ key_filename = host.get("key_filename", "~/.ssh/id_rsa")
- LOG.info("user:%s, host:%s", user, host)
- self.client = ssh.SSH(user, host, key_filename=key_filename)
+ LOG.info("user:%s, host:%s", user, ip)
+ self.client = ssh.SSH(user, ip, key_filename=key_filename)
self.client.wait(timeout=600)
# copy script to host
@@ -71,7 +73,7 @@ class Fio(base.Scenario):
self.setup_done = True
- def run(self, args, result):
+ def run(self, result):
"""execute the benchmark"""
default_args = "-ioengine=libaio -direct=1 -group_reporting " \
"-numjobs=1 -time_based --output-format=json"
@@ -79,7 +81,7 @@ class Fio(base.Scenario):
if not self.setup_done:
self.setup()
- options = args["options"]
+ options = self.scenario_cfg["options"]
filename = options.get("filename", "/home/ec2-user/data.raw")
bs = options.get("bs", "4k")
iodepth = options.get("iodepth", "1")
@@ -87,7 +89,8 @@ class Fio(base.Scenario):
ramp_time = options.get("ramp_time", 20)
name = "yardstick-fio"
# if run by a duration runner
- duration_time = self.context.get("duration", None)
+ duration_time = self.scenario_cfg["runner"].get("duration", None) \
+ if "runner" in self.scenario_cfg else None
# if run by an arithmetic runner
arithmetic_time = options.get("duration", None)
if duration_time:
@@ -122,20 +125,20 @@ class Fio(base.Scenario):
result["write_iops"] = raw_data["jobs"][0]["write"]["iops"]
result["write_lat"] = raw_data["jobs"][0]["write"]["lat"]["mean"]
- if "sla" in args:
+ if "sla" in self.scenario_cfg:
sla_error = ""
for k, v in result.items():
- if k not in args['sla']:
+ if k not in self.scenario_cfg['sla']:
continue
if "lat" in k:
# For lattency small value is better
- max_v = float(args['sla'][k])
+ max_v = float(self.scenario_cfg['sla'][k])
if v > max_v:
sla_error += "%s %f > sla:%s(%f); " % (k, v, k, max_v)
else:
# For bandwidth and iops big value is better
- min_v = int(args['sla'][k])
+ min_v = int(self.scenario_cfg['sla'][k])
if v < min_v:
sla_error += "%s %d < " \
"sla:%s(%d); " % (k, v, k, min_v)
@@ -148,16 +151,16 @@ def _test():
key_filename = pkg_resources.resource_filename("yardstick.resources",
"files/yardstick_key")
ctx = {
- "host": "10.0.0.101",
- "user": "ec2-user",
- "key_filename": key_filename
+ "host": {
+ "ip": "10.229.47.137",
+ "user": "root",
+ "key_filename": key_filename
+ }
}
logger = logging.getLogger("yardstick")
logger.setLevel(logging.DEBUG)
- fio = Fio(ctx)
-
options = {
"filename": "/home/ec2-user/data.raw",
"bs": "4k",
@@ -166,9 +169,11 @@ def _test():
"ramp_time": 1,
"duration": 10
}
+ result = {}
args = {"options": options}
- result = fio.run(args)
+ fio = Fio(args, ctx)
+ fio.run(result)
print result
if __name__ == '__main__':
diff --git a/yardstick/cmd/commands/task.py b/yardstick/cmd/commands/task.py
index 5eb38989a..8db6e77e6 100755
--- a/yardstick/cmd/commands/task.py
+++ b/yardstick/cmd/commands/task.py
@@ -13,7 +13,6 @@ import sys
import os
import yaml
import atexit
-import pkg_resources
import ipaddress
from yardstick.benchmark.contexts.base import Context
@@ -242,42 +241,61 @@ def is_ip_addr(addr):
return False
-def run_one_scenario(scenario_cfg, output_file):
- '''run one scenario using context'''
- key_filename = pkg_resources.resource_filename(
- 'yardstick.resources', 'files/yardstick_key')
+def _is_same_heat_context(host_attr, target_attr):
+ '''check if two servers are in the same heat context
+ host_attr: either a name for a server created by yardstick or a dict
+ with attribute name mapping when using external heat templates
+ target_attr: either a name for a server created by yardstick or a dict
+ with attribute name mapping when using external heat templates
+ '''
+ host = None
+ target = None
+ for context in Context.list:
+ if context.__context_type__ != "Heat":
+ continue
+
+ host = context._get_server(host_attr)
+ if host is None:
+ continue
+
+ target = context._get_server(target_attr)
+ if target is None:
+ return False
+
+ # Both host and target is not None, then they are in the
+ # same heat context.
+ return True
+
+ return False
- # TODO support get multi hosts/vms info
- host = Context.get_server(scenario_cfg["host"])
+def run_one_scenario(scenario_cfg, output_file):
+ '''run one scenario using context'''
runner_cfg = scenario_cfg["runner"]
- runner_cfg['host'] = host.public_ip
- runner_cfg['user'] = host.context.user
- runner_cfg['key_filename'] = key_filename
runner_cfg['output_filename'] = output_file
+ # TODO support get multi hosts/vms info
+ context_cfg = {}
+ context_cfg['host'] = Context.get_server(scenario_cfg["host"])
+
if "target" in scenario_cfg:
if is_ip_addr(scenario_cfg["target"]):
- scenario_cfg["ipaddr"] = scenario_cfg["target"]
+ context_cfg['target'] = {}
+ context_cfg['target']["ipaddr"] = scenario_cfg["target"]
else:
- target = Context.get_server(scenario_cfg["target"])
-
- # get public IP for target server, some scenarios require it
- if target.public_ip:
- runner_cfg['target'] = target.public_ip
-
- # TODO scenario_cfg["ipaddr"] is bad naming
- if host.context != target.context:
- # target is in another context, get its public IP
- scenario_cfg["ipaddr"] = target.public_ip
+ context_cfg['target'] = Context.get_server(scenario_cfg["target"])
+ if _is_same_heat_context(scenario_cfg["host"],
+ scenario_cfg["target"]):
+ context_cfg["target"]["ipaddr"] = \
+ context_cfg["target"]["private_ip"]
else:
- # target is in the same context, get its private IP
- scenario_cfg["ipaddr"] = target.private_ip
+ context_cfg["target"]["ipaddr"] = \
+ context_cfg["target"]["ip"]
runner = base_runner.Runner.get(runner_cfg)
print "Starting runner of type '%s'" % runner_cfg["type"]
- runner.run(scenario_cfg["type"], scenario_cfg)
+ runner.run(scenario_cfg, context_cfg)
return runner