summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorliyin <liyin11@huawei.com>2017-09-20 09:06:34 +0800
committerAce Lee <liyin11@huawei.com>2017-10-10 01:26:48 +0000
commitc0dc0572a4bf4bfc308b9d9ff5072468b9995d3c (patch)
treed88c1db7869ac3041d9f712c6837ab2d27c99c9c
parentad12c5707ac1191063af5cd33d54dd30ff649787 (diff)
Bottlenecks testpmd scale-up testcase.
JIRA: BOTTLENECK-205 This is the script of testpmd scale-up feature testcase. we will debug the dashboard. Change-Id: I4a6de0e7e1ea7c04639ece78a7b11b75128d8d2f Signed-off-by: liyin <liyin11@huawei.com>
-rw-r--r--testsuites/posca/testcase_cfg/posca_feature_testpmd_scale_up.yaml39
-rw-r--r--testsuites/posca/testcase_script/posca_factor_system_bandwidth.py1
-rw-r--r--testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py141
-rw-r--r--utils/env_prepare/stack_prepare.py2
-rw-r--r--utils/infra_setup/runner/docker_env.py3
-rw-r--r--utils/infra_setup/runner/yardstick.py12
-rw-r--r--utils/parser.py9
7 files changed, 204 insertions, 3 deletions
diff --git a/testsuites/posca/testcase_cfg/posca_feature_testpmd_scale_up.yaml b/testsuites/posca/testcase_cfg/posca_feature_testpmd_scale_up.yaml
new file mode 100644
index 00000000..a686b9db
--- /dev/null
+++ b/testsuites/posca/testcase_cfg/posca_feature_testpmd_scale_up.yaml
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2017 HUAWEI TECHNOLOGIES CO.,LTD and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+load_manager:
+ scenarios:
+ tool: testpmd
+ # TODO: The number of seconds to run when doing binary search for a throughput test..
+ search_interval: 60
+ # info that the cpus and memes have the same number of data.
+ cpus: 6
+ mems: 12
+ # this is pktsize of Moongen to generate
+ # pkt_size: 64,128,256,1024,1518
+ pkt_size: 64,128,256,1024
+ # this is multiqueue of Monngen to generate
+ multistream: 1, 1000
+
+ runners:
+ stack_create: yardstick
+ Debug: False
+ yardstick_test_dir: "samples"
+ yardstick_testcase: "netperf_bottlenecks"
+
+ runner_exta:
+ # info this section is for yardstick do some exta env prepare.
+ installation_method: yardstick
+ installation_type: testpmd
+
+contexts:
+ # info that dashboard if have data, we will create the data dashboard.
+ dashboard: "Bottlenecks-ELK"
+ yardstick: "yardstick_pmd"
+ yardstick_envpre: Flase
diff --git a/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py b/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py
index 05ea61e2..1a54554c 100644
--- a/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py
+++ b/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py
@@ -94,6 +94,7 @@ def do_test(test_config, Use_Dashboard, context_conf):
def run(test_config):
con_dic = test_config["load_manager"]
+ Use_Dashboard = False
env_pre(None)
if test_config["contexts"]["yardstick_ip"] is None:
con_dic["contexts"]["yardstick_ip"] =\
diff --git a/testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py b/testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py
new file mode 100644
index 00000000..830ff73f
--- /dev/null
+++ b/testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''This file realize the function of run systembandwidth script.
+for example this contain two part first run_script,
+second is algorithm, this part is about how to judge the bottlenecks.
+This test is using yardstick as a tool to begin test.'''
+
+import os
+import time
+import uuid
+import json
+import utils.logger as log
+from utils.parser import Parser as conf_parser
+import utils.env_prepare.stack_prepare as stack_prepare
+import utils.infra_setup.runner.docker_env as docker_env
+import utils.infra_setup.runner.yardstick as yardstick_task
+
+# --------------------------------------------------
+# logging configuration
+# --------------------------------------------------
+LOG = log.Logger(__name__).getLogger()
+
+testfile = os.path.basename(__file__)
+testcase, file_format = os.path.splitext(testfile)
+cidr = "/home/opnfv/repos/yardstick/samples/pvp_throughput_bottlenecks.yaml"
+runner_DEBUG = True
+
+
+def env_pre(con_dic):
+ LOG.info("yardstick environment prepare!")
+ stack_prepare._prepare_env_daemon(True)
+
+
+def config_to_result(test_config, test_result):
+ final_data = []
+ print(test_result)
+ out_data = test_result["result"]["testcases"]
+ test_data = out_data["pvp_throughput_bottlenecks"]["tc_data"]
+ for result in test_data:
+ testdata = {}
+ testdata["vcpu"] = test_config["vcpu"]
+ testdata["memory"] = test_config["memory"]
+ testdata["nrFlows"] = result["data"]["nrFlows"]
+ testdata["packet_size"] = result["data"]["packet_size"]
+ testdata["throughput"] = result["data"]["throughput_rx_mbps"]
+ final_data.append(testdata)
+ return final_data
+
+
+def testcase_parser(out_file="yardstick.out", **parameter_info):
+ cmd = yardstick_task.yardstick_command_parser(debug=runner_DEBUG,
+ cidr=cidr,
+ outfile=out_file,
+ parameter=parameter_info)
+ return cmd
+
+
+def do_test(test_config, Use_Dashboard, context_conf):
+ yardstick_container = docker_env.yardstick_info['container']
+ out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
+ cmd = testcase_parser(out_file=out_file, **test_config)
+ print(cmd)
+ stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
+ LOG.info(stdout)
+ loop_value = 0
+ while loop_value < 60:
+ time.sleep(2)
+ loop_value = loop_value + 1
+ with open(out_file) as f:
+ data = json.load(f)
+ if data["status"] == 1:
+ LOG.info("yardstick run success")
+ break
+ elif data["status"] == 2:
+ LOG.error("yardstick error exit")
+ exit()
+ # data = json.load(output)
+
+ save_data = config_to_result(test_config, data)
+ if Use_Dashboard is True:
+ print("use dashboard")
+ # DashBoard.dashboard_send_data(context_conf, save_data)
+
+ # return save_data["data_body"]
+ return save_data
+
+
+def run(test_config):
+ load_config = test_config["load_manager"]
+ scenarios_conf = load_config["scenarios"]
+ Use_Dashboard = False
+
+ env_pre(None)
+ if test_config["contexts"]["yardstick_ip"] is None:
+ load_config["contexts"]["yardstick_ip"] =\
+ conf_parser.ip_parser("yardstick_test_ip")
+
+ if "dashboard" in test_config["contexts"].keys():
+ if test_config["contexts"]["dashboard_ip"] is None:
+ test_config["contexts"]["dashboard_ip"] =\
+ conf_parser.ip_parser("dashboard")
+ LOG.info("Create Dashboard data")
+ Use_Dashboard = True
+ # DashBoard.dashboard_system_bandwidth(test_config["contexts"])
+
+ cpus = conf_parser.str_to_list(scenarios_conf["cpus"])
+ mems = conf_parser.str_to_list(scenarios_conf["mems"])
+ pkt_size = conf_parser.str_to_list(scenarios_conf["pkt_size"])
+ multistream = conf_parser.str_to_list(scenarios_conf["multistream"])
+ search_interval = scenarios_conf["search_interval"]
+
+ load_config["result_file"] = os.path.dirname(
+ os.path.abspath(__file__)) + "/test_case/result"
+
+ if len(cpus) != len(mems):
+ LOG.error("the cpus and mems config data number is not same!")
+ os._exit()
+
+ result = []
+
+ for i in range(0, len(cpus)):
+ case_config = {"vcpu": cpus[i],
+ "memory": int(mems[i]) * 1024,
+ "multistreams": multistream,
+ "pktsize": pkt_size,
+ "search_interval": search_interval}
+
+ data_reply = do_test(case_config, Use_Dashboard,
+ test_config["contexts"])
+ result.append(data_reply)
+
+ LOG.info("Finished bottlenecks testcase")
+ LOG.info("The result data is %s", result)
+ return result
diff --git a/utils/env_prepare/stack_prepare.py b/utils/env_prepare/stack_prepare.py
index c7dae390..5de6218f 100644
--- a/utils/env_prepare/stack_prepare.py
+++ b/utils/env_prepare/stack_prepare.py
@@ -44,7 +44,7 @@ def _prepare_env_daemon(test_yardstick):
file_after)
docker_env.docker_exec_cmd(yardstick_contain,
cmd)
- cmd = "sed -i '12s/http/file/g' /etc/yardstick/yardstick.conf"
+ cmd = "sed -i '13s/http/file/g' /etc/yardstick/yardstick.conf"
docker_env.docker_exec_cmd(yardstick_contain,
cmd)
diff --git a/utils/infra_setup/runner/docker_env.py b/utils/infra_setup/runner/docker_env.py
index 64d049ba..6e9c78a0 100644
--- a/utils/infra_setup/runner/docker_env.py
+++ b/utils/infra_setup/runner/docker_env.py
@@ -46,8 +46,7 @@ def env_yardstick(docker_name):
yardstick_tag = os.getenv("Yardstick_TAG")
if yardstick_tag is None:
yardstick_tag = "danube.3.1"
- env_docker = client.containers.run(image="opnfv/yardstick:%s"
- % yardstick_tag,
+ env_docker = client.containers.run(image="yardstick_pmd",
privileged=True,
tty=True,
detach=True,
diff --git a/utils/infra_setup/runner/yardstick.py b/utils/infra_setup/runner/yardstick.py
index 35b89ae8..559b9c10 100644
--- a/utils/infra_setup/runner/yardstick.py
+++ b/utils/infra_setup/runner/yardstick.py
@@ -24,6 +24,18 @@ headers = {"Content-Type": "application/json"}
LOG = logger.Logger(__name__).getLogger()
+def yardstick_command_parser(debug, cidr, outfile, parameter):
+ cmd = "yardstick"
+ if debug:
+ cmd += " -d"
+ cmd += " task start "
+ cmd += str(cidr)
+ cmd += " --output-file " + outfile
+ if parameter is not None:
+ cmd += " --task-args " + '"' + str(parameter) + '"'
+ return cmd
+
+
def Get_Reply(test_config, task_id, time_test=1):
reply_url = ("http://%s/yardstick/results?task_id=%s"
% (test_config['yardstick_test_ip'], task_id))
diff --git a/utils/parser.py b/utils/parser.py
index ecd6badd..b46a3b91 100644
--- a/utils/parser.py
+++ b/utils/parser.py
@@ -127,6 +127,15 @@ class Parser():
f.write(json.dumps(data, f))
f.write("\n")
+ @staticmethod
+ def str_to_list(str_org):
+ try:
+ data = str_org.split(',')
+ except AttributeError:
+ data = []
+ data.append(str_org)
+ return data
+
class HeatTemplate_Parser():
"""parser a Heat template and a method to deploy template to a stack"""