summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorYang Yu <Gabriel.yuyang@huawei.com>2018-04-16 17:19:44 +0800
committerYu Yang (Gabriel) <Gabriel.yuyang@huawei.com>2018-04-25 08:43:39 +0000
commita8636748fbf45bb3d8faa4f12e6b34cbd96ed599 (patch)
tree815ce68f8a3a63253f425da8bf902877deb6a0d1
parent4828a4c36083c9409c97a4652d04a2d5f974d456 (diff)
Lazy creation of VM pairs for soak throuputs tests
Add the lazy creation progress of VM pairs while testing dataplane traffic using netperf Change-Id: I3228ff444ea074b99cb1afaec138191358064e40 Signed-off-by: Yang Yu <Gabriel.yuyang@huawei.com> (cherry picked from commit 61aafcb0ca6cab9c25d2df7d409f4fa8716ebe5f)
-rwxr-xr-xrun_tests.sh4
-rw-r--r--testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml6
-rw-r--r--testsuites/posca/testcase_script/posca_factor_soak_throughputs.py92
-rw-r--r--utils/infra_setup/runner/yardstick.py2
4 files changed, 82 insertions, 22 deletions
diff --git a/run_tests.sh b/run_tests.sh
index 2463edbf..3bf18c62 100755
--- a/run_tests.sh
+++ b/run_tests.sh
@@ -17,12 +17,12 @@ where:
-h|--help show the help text
-s|--teststory run specific test story
<test story> one of the following:
- (rubbos, vstf, posca_factor_test)
+ (posca_factor_test)
user can also define their own test story and pass as var to this file,
please refer to testsuites/posca/testsuite_story/ for details
-c|--testcase run specific test case
<test case> one of the following:
- (posca_factor_system_bandwidth, posca_factor_ping)
+ (posca_factor_ping, posca_factor_soak_throughputs, ...)
--cleanup cleanup test dockers runing when test is done (false by default)
--report push results to DB (false by default)
diff --git a/testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml b/testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml
index 346c4106..983b7d76 100644
--- a/testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml
+++ b/testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml
@@ -15,9 +15,9 @@
load_manager:
scenarios:
tool: netperf
- test_duration_hours: 1
- vim_pair_ttl: 300
- vim_pair_lazy_cre_delay: 2
+ test_duration_hours: 0.1
+ vim_pair_ttl: 10
+ vim_pair_lazy_cre_delay: 120
package_size:
threshhold:
package_loss: 0%
diff --git a/testsuites/posca/testcase_script/posca_factor_soak_throughputs.py b/testsuites/posca/testcase_script/posca_factor_soak_throughputs.py
index b45ce932..2fd35006 100644
--- a/testsuites/posca/testcase_script/posca_factor_soak_throughputs.py
+++ b/testsuites/posca/testcase_script/posca_factor_soak_throughputs.py
@@ -28,18 +28,16 @@ import utils.logger as log
import uuid
import json
import os
-import sys
import time
-# import threading
-# import datetime
+import threading
+import datetime
import Queue
-# from utils.parser import Parser as conf_parser
+from utils.parser import Parser as conf_parser
import utils.env_prepare.quota_prepare as quota_prepare
import utils.env_prepare.stack_prepare as stack_prepare
import utils.infra_setup.runner.yardstick as runner_yardstick
-
-# import testsuites.posca.testcase_dashboard.posca_factor_throughputs as DashBoard # noqa
import utils.infra_setup.runner.docker_env as docker_env
+import math
# --------------------------------------------------
# logging configuration
@@ -76,7 +74,6 @@ def env_pre(test_config):
def do_test(con_dic):
- func_name = sys._getframe().f_code.co_name
out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
parameter_info = dict(test_time=con_dic["scenarios"]["vim_pair_ttl"])
yardstick_container = docker_env.yardstick_info['container']
@@ -93,25 +90,30 @@ def do_test(con_dic):
loop_value = loop_value + 1
with open(out_file) as f:
data = json.load(f)
- if data["status"] == 1:
+ if data["result"]["criteria"] == "PASS":
LOG.info("Success run yardstick netperf_soak test!")
out_value = 1
break
- elif data["status"] == 2:
+ elif data["result"]["criteria"] == "FAIL":
LOG.error("Failed run yardstick netperf_soak test!")
out_value = 0
break
- q.put((out_value, func_name))
+ q.put((out_value, data["result"]["testcases"]))
return out_value
-def config_to_result(num, out_num, during_date, result):
+def config_to_result(
+ test_duration, added_duration, vim_pair_ttl,
+ vim_pair_lazy_cre_delay,
+ vim_pair_num, vim_pair_success_num, result):
testdata = {}
test_result = {}
- test_result["number_of_stacks"] = float(num)
- test_result["success_times"] = out_num
- test_result["success_rate"] = out_num / num
- test_result["duration_time"] = during_date
+ test_result["test_duration"] = test_duration
+ test_result["sum_duration"] = added_duration
+ test_result["vim_pair_ttl"] = vim_pair_ttl
+ test_result["vim_pair_cre_interval"] = vim_pair_lazy_cre_delay
+ test_result["vim_pair_num"] = vim_pair_num
+ test_result["vim_pair_success_num"] = vim_pair_success_num
test_result["result"] = result
testdata["data_body"] = test_result
testdata["testcase"] = testcase
@@ -129,4 +131,62 @@ def run(test_config):
env_pre(test_config)
LOG.info("yardstick environment prepare done!")
- return func_run(con_dic)
+ test_duration = float(
+ con_dic["scenarios"]["test_duration_hours"]) * 3600
+ vim_pair_ttl = float(
+ con_dic["scenarios"]["vim_pair_ttl"])
+ vim_pair_lazy_cre_delay = float(
+ con_dic["scenarios"]["vim_pair_lazy_cre_delay"])
+ vim_pair_num = int(math.ceil(
+ (test_duration - vim_pair_ttl) / vim_pair_lazy_cre_delay
+ ) + 1)
+
+ threadings = []
+ result = []
+ vim_pair_success_num = 0
+
+ start_time = datetime.datetime.now()
+
+ LOG.info("Data-path test duration are %i seconds", test_duration)
+ LOG.info("TTL of each VM pair are %i seconds", vim_pair_ttl)
+ LOG.info("Creation delay between VM pairs are %i seconds",
+ vim_pair_lazy_cre_delay)
+ LOG.info("Number of VM pairs to be created are %i", vim_pair_num)
+
+ for vim_pair_index in xrange(0, vim_pair_num):
+ index_thread = threading.Thread(target=func_run,
+ args=(con_dic,))
+ threadings.append(index_thread)
+ index_thread.start()
+ vim_pair_error = False
+ for wait_time in xrange(0, int(vim_pair_lazy_cre_delay)):
+ time.sleep(1)
+ while not q.empty():
+ result.append(q.get())
+ for one_result in result:
+ if '0' == one_result[0]:
+ vim_pair_error = True
+ break
+ if vim_pair_error:
+ break
+ for one_thread in threadings:
+ one_thread.join()
+ while not q.empty():
+ result.append(q.get())
+ for item in result:
+ vim_pair_success_num += int(item[0])
+
+ end_time = datetime.datetime.now()
+ added_duration = (end_time - start_time).seconds
+ LOG.info("Number of success VM pairs/threads are %s out %s ",
+ vim_pair_success_num, vim_pair_num)
+
+ return_result = config_to_result(
+ test_duration, added_duration, vim_pair_ttl,
+ vim_pair_lazy_cre_delay,
+ vim_pair_num, vim_pair_success_num, result
+ )
+
+ conf_parser.result_to_file(return_result, test_config["out_file"])
+
+ return vim_pair_error
diff --git a/utils/infra_setup/runner/yardstick.py b/utils/infra_setup/runner/yardstick.py
index 616bcc52..3eeeee6b 100644
--- a/utils/infra_setup/runner/yardstick.py
+++ b/utils/infra_setup/runner/yardstick.py
@@ -45,7 +45,7 @@ def yardstick_command_parser(debug, cidr, outfile, parameter):
cmd += " --output-file " + outfile
image_name = config.bottlenecks_config["yardstick_image_name"]
parameter["image_name"] = image_name
- print parameter
+ LOG.info(parameter)
if parameter is not None:
cmd += " --task-args " + '"' + str(parameter) + '"'
return cmd