summaryrefslogtreecommitdiffstats
path: root/testsuites/posca
diff options
context:
space:
mode:
Diffstat (limited to 'testsuites/posca')
-rw-r--r--testsuites/posca/testcase_cfg/posca_factor_multistack_storage.yaml35
-rw-r--r--testsuites/posca/testcase_cfg/posca_factor_multistack_storage_parallel.yaml33
-rw-r--r--testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml35
-rw-r--r--testsuites/posca/testcase_cfg/posca_feature_moon_resources.yaml21
-rw-r--r--testsuites/posca/testcase_cfg/posca_feature_moon_tenants.yaml29
-rw-r--r--testsuites/posca/testcase_dashboard/posca_feature_moon.py121
-rw-r--r--testsuites/posca/testcase_dashboard/posca_feature_moon_dashboard.json13
-rw-r--r--testsuites/posca/testcase_dashboard/posca_feature_moon_index_pattern.json4
-rw-r--r--testsuites/posca/testcase_dashboard/posca_feature_moon_resources_histogram.json11
-rw-r--r--testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_discover.json23
-rw-r--r--testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_histogram.json11
-rw-r--r--testsuites/posca/testcase_dashboard/posca_moon_resources.py36
-rw-r--r--testsuites/posca/testcase_dashboard/posca_moon_tenants.py36
-rw-r--r--testsuites/posca/testcase_script/posca_factor_multistack_storage.py236
-rw-r--r--testsuites/posca/testcase_script/posca_factor_multistack_storage_parallel.py164
-rw-r--r--testsuites/posca/testcase_script/posca_factor_ping.py18
-rw-r--r--testsuites/posca/testcase_script/posca_factor_soak_throughputs.py132
-rw-r--r--testsuites/posca/testcase_script/posca_factor_system_bandwidth.py4
-rw-r--r--testsuites/posca/testcase_script/posca_feature_moon_resources.py59
-rw-r--r--testsuites/posca/testcase_script/posca_feature_moon_tenants.py112
-rw-r--r--testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py5
-rw-r--r--testsuites/posca/testcase_script/posca_feature_vnf_scale_out.py10
22 files changed, 1021 insertions, 127 deletions
diff --git a/testsuites/posca/testcase_cfg/posca_factor_multistack_storage.yaml b/testsuites/posca/testcase_cfg/posca_factor_multistack_storage.yaml
new file mode 100644
index 00000000..e2f48438
--- /dev/null
+++ b/testsuites/posca/testcase_cfg/posca_factor_multistack_storage.yaml
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2017 HUAWEI TECHNOLOGIES CO.,LTD and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+load_manager:
+ scenarios:
+ tool: fio
+ test_times: 10
+ #this "rw" value could be write read rw rr or randrw
+ rw: 'randrw'
+ bs: '4k'
+ size: '20g'
+ rwmixwrite: '50'
+ num_thread: 1, 2
+ num_stack: 1
+ volume_num: '1'
+ num_jobs: '1'
+ direct: '1'
+ volume_size: 50
+
+ runners:
+ stack_create: yardstick
+ flavor:
+ yardstick_test_dir: "samples"
+ yardstick_testcase: "storage_bottlenecks"
+
+contexts:
+ dashboard: "Bottlenecks-ELK"
+ yardstick: "Bottlenecks-Yardstick"
+ yardstick_envpre: True
diff --git a/testsuites/posca/testcase_cfg/posca_factor_multistack_storage_parallel.yaml b/testsuites/posca/testcase_cfg/posca_factor_multistack_storage_parallel.yaml
new file mode 100644
index 00000000..b55b826c
--- /dev/null
+++ b/testsuites/posca/testcase_cfg/posca_factor_multistack_storage_parallel.yaml
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2017 HUAWEI TECHNOLOGIES CO.,LTD and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+load_manager:
+ scenarios:
+ tool: fio
+ # for this option we provide " write, read, rw, rr"
+ rw: "randrw"
+ bs: "4k"
+ size: "20g"
+ rwmixwrite: "50"
+ num_stack: 1, 3
+ volume_num: "1"
+ num_jobs: "1"
+ direct: "1"
+ volume_size: 50
+
+ runners:
+ stack_create: yardstick
+ flavor:
+ yardstick_test_dir: "samples"
+ yardstick_testcase: "storage_bottlenecks"
+
+contexts:
+ dashboard: "Bottlenecks-ELK"
+ yardstick: "Bottlenecks-Yardstick"
+ yardstick_envpre: True
diff --git a/testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml b/testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml
new file mode 100644
index 00000000..346c4106
--- /dev/null
+++ b/testsuites/posca/testcase_cfg/posca_factor_soak_throughputs.yaml
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2017 HUAWEI TECHNOLOGIES CO.,LTD and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+# Sample config file for life-cycle throuphputs baseline test
+# Each vm pair will have its ttl (time to live) and creation delay
+# (lazy creation delay) specified.
+# Multiple context are used to specify the host and target VMs.
+
+load_manager:
+ scenarios:
+ tool: netperf
+ test_duration_hours: 1
+ vim_pair_ttl: 300
+ vim_pair_lazy_cre_delay: 2
+ package_size:
+ threshhold:
+ package_loss: 0%
+ latency: 300
+
+ runners:
+ stack_create: yardstick
+ flavor:
+ yardstick_test_dir: "samples"
+ yardstick_testcase: "netperf_soak"
+
+contexts:
+ dashboard: "Bottlenecks-ELK"
+ yardstick: "Bottlenecks-Yardstick"
+ yardstick_envpre: True
diff --git a/testsuites/posca/testcase_cfg/posca_feature_moon_resources.yaml b/testsuites/posca/testcase_cfg/posca_feature_moon_resources.yaml
index d6b325f7..bbf65ba7 100644
--- a/testsuites/posca/testcase_cfg/posca_feature_moon_resources.yaml
+++ b/testsuites/posca/testcase_cfg/posca_feature_moon_resources.yaml
@@ -11,20 +11,27 @@ load_manager:
scenarios:
tool: https request
# info that the cpus and memes have the same number of data.
+ pdp_name: pdp
+ policy_name: "MLS Policy example"
+ model_name: MLS
tenants: 1,5,10,20
+ subject_number: 10
+ object_number: 10
+ timeout: 0.2
runners:
stack_create: yardstick
Debug: False
yardstick_test_dir: "samples"
- yardstick_testcase: "bottlenecks_moon_resources"
-
- runner_exta:
- # info this section is for yardstick do some exta env prepare.
- installation_method: yardstick
- installation_type: testpmd
+ yardstick_testcase: "moon_resource"
contexts:
# info that dashboard if have data, we will create the data dashboard.
dashboard: "Bottlenecks-ELK"
- yardstick: "Bottlenecks-yardstick" \ No newline at end of file
+ yardstick: "Bottlenecks-yardstick"
+ moon_monitoring: True
+ moon_environment:
+ ip: "192.168.37.205"
+ user: "root"
+ password: "root"
+ consul_port: 30005
diff --git a/testsuites/posca/testcase_cfg/posca_feature_moon_tenants.yaml b/testsuites/posca/testcase_cfg/posca_feature_moon_tenants.yaml
index 3b621a99..7feb6e4e 100644
--- a/testsuites/posca/testcase_cfg/posca_feature_moon_tenants.yaml
+++ b/testsuites/posca/testcase_cfg/posca_feature_moon_tenants.yaml
@@ -11,25 +11,30 @@ load_manager:
scenarios:
tool: https request
# info that the cpus and memes have the same number of data.
- resources: 20
- initial: 0
- threshhold: 5
- timeout: 30
+ pdp_name: pdp
+ policy_name: "MLS Policy example"
+ model_name: MLS
+ subject_number: 20
+ object_number: 20
+ timeout: 0.003
+ initial_tenants: 0
+ steps_tenants: 1
+ tolerate_time: 20
SLA: 5
-
runners:
stack_create: yardstick
Debug: False
yardstick_test_dir: "samples"
- yardstick_testcase: "bottlenecks_moon_tenants"
-
- runner_exta:
- # info this section is for yardstick do some exta env prepare.
- installation_method: yardstick
- installation_type: testpmd
+ yardstick_testcase: "moon_tenant"
contexts:
# info that dashboard if have data, we will create the data dashboard.
dashboard: "Bottlenecks-ELK"
- yardstick: "Bottlenecks-yardstick" \ No newline at end of file
+ yardstick: "Bottlenecks-yardstick"
+ moon_monitoring: True
+ moon_environment:
+ ip: "192.168.37.205"
+ user: "root"
+ password: "root"
+ consul_port: 30005
diff --git a/testsuites/posca/testcase_dashboard/posca_feature_moon.py b/testsuites/posca/testcase_dashboard/posca_feature_moon.py
new file mode 100644
index 00000000..6819ea84
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_feature_moon.py
@@ -0,0 +1,121 @@
+#!/usr/bin/python
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''This file realize a function of creating dashboard of stress ping test'''
+import ConfigParser
+from elasticsearch import Elasticsearch
+import json
+import os
+import utils.logger as log
+from utils.parser import Parser as conf_parser
+
+LOG = log.Logger(__name__).getLogger()
+config = ConfigParser.ConfigParser()
+es = Elasticsearch()
+dashboard_path = os.path.join(conf_parser.test_dir,
+ "posca",
+ "testcase_dashboard")
+dashboard_dir = dashboard_path + "/"
+
+
+def dashboard_send_data(runner_config, test_data):
+ global es
+ print runner_config
+ es_ip = runner_config['dashboard_ip'].split(':')
+ es = Elasticsearch([{'host': es_ip[0]}])
+ print test_data["test_body"]
+ res = es.index(index="bottlenecks",
+ doc_type=test_data["testcase"],
+ body=test_data["test_body"][0])
+ if res['created'] == "False":
+ LOG.error("date send to kibana have errors ", test_data["data_body"])
+
+
+def posca_moon_init(runner_config):
+ global es
+ es_ip = runner_config['dashboard_ip'].split(':')
+ es = Elasticsearch([{'host': es_ip[0]}])
+ # Create bottlenecks index
+ with open(dashboard_dir + 'posca_feature_moon_index_pattern.json')\
+ as index_pattern:
+ doc = json.load(index_pattern)
+ res = es.index(
+ index=".kibana",
+ doc_type="index-pattern",
+ id="bottlenecks",
+ body=doc)
+ if res['created'] == "True":
+ LOG.info("bottlenecks index-pattern has created")
+ else:
+ LOG.info("bottlenecks index-pattern has existed")
+
+ with open(dashboard_dir + 'posca_system_bandwidth_config.json')\
+ as index_config:
+ doc = json.load(index_config)
+ res = es.index(index=".kibana", doc_type="config", id="4.6.1", body=doc)
+ if res['created'] == "True":
+ LOG.info("bottlenecks config has created")
+ else:
+ LOG.info("bottlenecks config has existed")
+
+ # Configure discover panel
+ with open(dashboard_dir + 'posca_feature_moon_discover.json')\
+ as index_discover:
+ doc = json.load(index_discover)
+ res = es.index(
+ index=".kibana",
+ doc_type="search",
+ id="moon",
+ body=doc)
+ if res['created'] == "True":
+ LOG.info("moon testcase search has created")
+ else:
+ LOG.info("moon testcase search has existed")
+
+ # Create testing data in line graph
+ with open(dashboard_dir + 'posca_feature_moon_resources_histogram.json')\
+ as line_data:
+ doc = json.load(line_data)
+ res = es.index(
+ index=".kibana",
+ doc_type="visualization",
+ id="resources",
+ body=doc)
+ if res['created'] == "True":
+ LOG.info("moon resources visualization has created")
+ else:
+ LOG.info("moon resources visualization has existed")
+
+ # Create comparison results in line chart
+ with open(dashboard_dir + 'posca_feature_moon_tenants_histogram.json')\
+ as line_char:
+ doc = json.load(line_char)
+ res = es.index(
+ index=".kibana",
+ doc_type="visualization",
+ id="tenants",
+ body=doc)
+ if res['created'] == "True":
+ LOG.info("moon tenants visualization has created")
+ else:
+ LOG.info("moon tenants visualization has existed")
+
+ # Create dashboard
+ with open(dashboard_dir + 'posca_feature_moon_dashboard.json')\
+ as dashboard:
+ doc = json.load(dashboard)
+ res = es.index(
+ index=".kibana",
+ doc_type="dashboard",
+ id="moon",
+ body=doc)
+ if res['created'] == "True":
+ LOG.info("moon testcases dashboard has created")
+ else:
+ LOG.info("moon testcases dashboard has existed")
diff --git a/testsuites/posca/testcase_dashboard/posca_feature_moon_dashboard.json b/testsuites/posca/testcase_dashboard/posca_feature_moon_dashboard.json
new file mode 100644
index 00000000..53a4a750
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_feature_moon_dashboard.json
@@ -0,0 +1,13 @@
+{
+ "title": "moon",
+ "hits": 0,
+ "description": "",
+ "panelsJSON": "[{\"id\":\"resources\",\"type\":\"visualization\",\"panelIndex\":1,\"size_x\":8,\"size_y\":7,\"col\":1,\"row\":1},{\"id\":\"tenants\",\"type\":\"visualization\",\"panelIndex\":2,\"size_x\":3,\"size_y\":7,\"col\":9,\"row\":1}]",
+ "optionsJSON": "{\"darkTheme\":false}",
+ "uiStateJSON": "{}",
+ "version": 1,
+ "timeRestore": false,
+ "kibanaSavedObjectMeta": {
+ "searchSourceJSON": "{\"filter\":[{\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}}}]}"
+ }
+} \ No newline at end of file
diff --git a/testsuites/posca/testcase_dashboard/posca_feature_moon_index_pattern.json b/testsuites/posca/testcase_dashboard/posca_feature_moon_index_pattern.json
new file mode 100644
index 00000000..2bff871a
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_feature_moon_index_pattern.json
@@ -0,0 +1,4 @@
+{
+ "title": "bottlenecks",
+ "fields": "[{\"name\":\"_index\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"indexed\":false,\"analyzed\":false,\"doc_values\":false},{\"name\":\"tenant_max\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"indexed\":true,\"analyzed\":false,\"doc_values\":true},{\"name\":\"_source\",\"type\":\"_source\",\"count\":0,\"scripted\":false,\"indexed\":false,\"analyzed\":false,\"doc_values\":false},{\"name\":\"max_user\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"indexed\":true,\"analyzed\":false,\"doc_values\":true},{\"name\":\"tenant_number\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"indexed\":true,\"analyzed\":false,\"doc_values\":true},{\"name\":\"_id\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"indexed\":false,\"analyzed\":false,\"doc_values\":false},{\"name\":\"_type\",\"type\":\"string\",\"count\":0,\"scripted\":false,\"indexed\":false,\"analyzed\":false,\"doc_values\":false},{\"name\":\"_score\",\"type\":\"number\",\"count\":0,\"scripted\":false,\"indexed\":false,\"analyzed\":false,\"doc_values\":false}]"
+} \ No newline at end of file
diff --git a/testsuites/posca/testcase_dashboard/posca_feature_moon_resources_histogram.json b/testsuites/posca/testcase_dashboard/posca_feature_moon_resources_histogram.json
new file mode 100644
index 00000000..c8977a72
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_feature_moon_resources_histogram.json
@@ -0,0 +1,11 @@
+{
+ "title": "resources",
+ "visState": "{\"title\":\"New Visualization\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"type\":\"sum\",\"schema\":\"metric\",\"params\":{\"field\":\"max_user\"}},{\"id\":\"2\",\"type\":\"terms\",\"schema\":\"segment\",\"params\":{\"field\":\"tenant_number\",\"size\":5,\"order\":\"asc\",\"orderBy\":\"_term\"}}],\"listeners\":{}}",
+ "uiStateJSON": "{}",
+ "description": "",
+ "savedSearchId": "moon",
+ "version": 1,
+ "kibanaSavedObjectMeta": {
+ "searchSourceJSON": "{\"filter\":[]}"
+ }
+} \ No newline at end of file
diff --git a/testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_discover.json b/testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_discover.json
new file mode 100644
index 00000000..03360f89
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_discover.json
@@ -0,0 +1,23 @@
+{
+ "_index": ".kibana",
+ "_type": "search",
+ "_id": "tenants",
+ "_version": 1,
+ "found": true,
+ "_source": {
+ "title": "tenants",
+ "description": "",
+ "hits": 0,
+ "columns": [
+ "_source"
+ ],
+ "sort": [
+ "_score",
+ "desc"
+ ],
+ "version": 1,
+ "kibanaSavedObjectMeta": {
+ "searchSourceJSON": "{\"index\":\"bottlenecks\",\"filter\":[],\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"require_field_match\":false,\"fragment_size\":2147483647},\"query\":{\"query_string\":{\"query\":\"_type:posca_factor_moon_tenants\",\"analyze_wildcard\":true}}}"
+ }
+ }
+} \ No newline at end of file
diff --git a/testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_histogram.json b/testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_histogram.json
new file mode 100644
index 00000000..a731acfc
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_feature_moon_tenants_histogram.json
@@ -0,0 +1,11 @@
+{
+ "title": "tenants",
+ "visState": "{\"title\":\"New Visualization\",\"type\":\"histogram\",\"params\":{\"shareYAxis\":true,\"addTooltip\":true,\"addLegend\":true,\"scale\":\"linear\",\"mode\":\"stacked\",\"times\":[],\"addTimeMarker\":false,\"defaultYExtents\":false,\"setYExtents\":false,\"yAxis\":{}},\"aggs\":[{\"id\":\"1\",\"type\":\"sum\",\"schema\":\"metric\",\"params\":{\"field\":\"tenant_max\"}}],\"listeners\":{}}",
+ "uiStateJSON": "{}",
+ "description": "",
+ "savedSearchId": "moon",
+ "version": 1,
+ "kibanaSavedObjectMeta": {
+ "searchSourceJSON": "{\"filter\":[]}"
+ }
+} \ No newline at end of file
diff --git a/testsuites/posca/testcase_dashboard/posca_moon_resources.py b/testsuites/posca/testcase_dashboard/posca_moon_resources.py
new file mode 100644
index 00000000..446faccc
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_moon_resources.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''This file realize a function of creating dashboard of stress ping test'''
+import ConfigParser
+from elasticsearch import Elasticsearch
+import os
+import utils.logger as log
+from utils.parser import Parser as conf_parser
+
+LOG = log.Logger(__name__).getLogger()
+config = ConfigParser.ConfigParser()
+es = Elasticsearch()
+dashboard_path = os.path.join(conf_parser.test_dir,
+ "posca",
+ "testcase_dashboard")
+dashboard_dir = dashboard_path + "/"
+
+
+def dashboard_send_data(runner_config, test_data):
+ global es
+ print runner_config
+ es_ip = runner_config['dashboard_ip'].split(':')
+ es = Elasticsearch([{'host': es_ip[0]}])
+ print test_data["test_body"]
+ res = es.index(index="bottlenecks",
+ doc_type=test_data["testcase"],
+ body=test_data["test_body"][0])
+ if res['created'] == "False":
+ LOG.error("date send to kibana have errors ", test_data["data_body"])
diff --git a/testsuites/posca/testcase_dashboard/posca_moon_tenants.py b/testsuites/posca/testcase_dashboard/posca_moon_tenants.py
new file mode 100644
index 00000000..446faccc
--- /dev/null
+++ b/testsuites/posca/testcase_dashboard/posca_moon_tenants.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+##############################################################################
+# Copyright (c) 2015 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''This file realize a function of creating dashboard of stress ping test'''
+import ConfigParser
+from elasticsearch import Elasticsearch
+import os
+import utils.logger as log
+from utils.parser import Parser as conf_parser
+
+LOG = log.Logger(__name__).getLogger()
+config = ConfigParser.ConfigParser()
+es = Elasticsearch()
+dashboard_path = os.path.join(conf_parser.test_dir,
+ "posca",
+ "testcase_dashboard")
+dashboard_dir = dashboard_path + "/"
+
+
+def dashboard_send_data(runner_config, test_data):
+ global es
+ print runner_config
+ es_ip = runner_config['dashboard_ip'].split(':')
+ es = Elasticsearch([{'host': es_ip[0]}])
+ print test_data["test_body"]
+ res = es.index(index="bottlenecks",
+ doc_type=test_data["testcase"],
+ body=test_data["test_body"][0])
+ if res['created'] == "False":
+ LOG.error("date send to kibana have errors ", test_data["data_body"])
diff --git a/testsuites/posca/testcase_script/posca_factor_multistack_storage.py b/testsuites/posca/testcase_script/posca_factor_multistack_storage.py
new file mode 100644
index 00000000..34ee225c
--- /dev/null
+++ b/testsuites/posca/testcase_script/posca_factor_multistack_storage.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''This file realize the function of run posca multistack storage stress test
+This file contain several part:
+First is create a script to realize several threading run'''
+
+import utils.logger as log
+import uuid
+import json
+import os
+import time
+import threading
+import datetime
+import Queue
+from utils.parser import Parser as conf_parser
+import utils.env_prepare.quota_prepare as quota_prepare
+import utils.env_prepare.stack_prepare as stack_prepare
+import utils.infra_setup.runner.yardstick as yardstick_task
+
+import utils.infra_setup.runner.docker_env as docker_env
+
+# --------------------------------------------------
+# logging configuration
+# --------------------------------------------------
+LOG = log.Logger(__name__).getLogger()
+
+test_dict = {
+ "action": "runTestCase",
+ "args": {
+ "opts": {
+ "task-args": {}
+ },
+ "testcase": "multistack_storage_bottlenecks"
+ }
+}
+testfile = os.path.basename(__file__)
+testcase, file_format = os.path.splitext(testfile)
+cidr = "/home/opnfv/repos/yardstick/samples/storage_bottlenecks.yaml"
+runner_DEBUG = True
+q = Queue.Queue()
+final_result = Queue.Queue()
+
+
+def env_pre(test_config):
+ test_yardstick = False
+ if "yardstick" in test_config["contexts"].keys():
+ test_yardstick = True
+ stack_prepare._prepare_env_daemon(test_yardstick)
+ quota_prepare.quota_env_prepare()
+ LOG.info("yardstick environment prepare!")
+ if(test_config["contexts"]['yardstick_envpre']):
+ stdout = yardstick_task.yardstick_image_prepare()
+ LOG.debug(stdout)
+
+
+def testcase_parser(out_file="yardstick.out", **parameter_info):
+ cmd = yardstick_task.yardstick_command_parser(debug=runner_DEBUG,
+ cidr=cidr,
+ outfile=out_file,
+ parameter=parameter_info)
+ return cmd
+
+
+def do_test(test_config):
+ out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
+ yardstick_container = docker_env.yardstick_info['container']
+ cmd = testcase_parser(out_file=out_file, **test_config)
+ print(cmd)
+ stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
+ LOG.info(stdout)
+ loop_value = 0
+ while loop_value < 60:
+ time.sleep(2)
+ loop_value = loop_value + 1
+ with open(out_file) as f:
+ data = json.load(f)
+ if data["result"]["criteria"] == "PASS":
+ LOG.info("yardstick run success")
+ LOG.info("%s" % data["result"]["testcases"])
+ break
+ else:
+ LOG.error("yardstick error exit")
+ break
+
+ save_data = final_config_to_result(test_config, data)
+ return save_data
+
+
+def config_to_result(num, out_num, during_date):
+ testdata = {}
+ test_result = {}
+ final_data = {}
+
+ final_data["read_iops"] = 0
+ final_data["read_bw"] = 0
+ final_data["read_lat"] = 0
+ final_data["write_iops"] = 0
+ final_data["write_bw"] = 0
+ final_data["write_lat"] = 0
+
+ test_result["number_of_stacks"] = float(num)
+ test_result["success_times"] = out_num
+ test_result["success_rate"] = out_num / num
+ test_result["duration_time"] = during_date
+ testdata["data_body"] = test_result
+ testdata["testcase"] = testcase
+
+ while not final_result.empty():
+ data = final_result.get()
+ final_data["read_iops"] += data["read_iops"]
+ final_data["read_bw"] += data["read_bw"]
+ if final_data["read_lat"] is 0:
+ final_data["read_lat"] = data["read_lat"]
+ else:
+ final_data["read_lat"] += data["read_lat"]
+ final_data["read_lat"] = final_data["read_lat"]/2
+ final_data["write_iops"] += data["write_iops"]
+ final_data["write_bw"] += data["read_iops"]
+ if final_data["write_lat"] is 0:
+ final_data["write_lat"] = data["write_lat"]
+ else:
+ final_data["write_lat"] += data["write_lat"]
+ final_data["write_lat"] = final_data["write_lat"]/2
+
+ testdata["test_value"] = final_data
+ LOG.info("Final testdata is %s" % testdata)
+ return testdata
+
+
+def final_config_to_result(test_config, test_result):
+ out_data = test_result["result"]["testcases"]
+ test_data = out_data["storage_bottlenecks"]["tc_data"]
+ testdata = {}
+ testdata["read_iops"] = 0
+ testdata["read_bw"] = 0
+ testdata["read_lat"] = 0
+ testdata["write_iops"] = 0
+ testdata["write_bw"] = 0
+ testdata["write_lat"] = 0
+ print(testdata["read_iops"])
+ for result in test_data:
+ testdata["read_iops"] += result["data"]["read_iops"]
+ testdata["read_bw"] += result["data"]["read_bw"]
+ if testdata["read_lat"] is 0:
+ testdata["read_lat"] = result["data"]["read_lat"]
+ else:
+ testdata["read_lat"] += result["data"]["read_lat"]
+ testdata["read_lat"] = testdata["read_lat"]/2
+ testdata["write_iops"] += result["data"]["write_iops"]
+ testdata["write_bw"] += result["data"]["write_bw"]
+ if testdata["write_lat"] is 0:
+ testdata["write_lat"] = result["data"]["write_lat"]
+ else:
+ testdata["write_lat"] += result["data"]["write_lat"]
+ testdata["write_lat"] = testdata["write_lat"]/2
+ final_result.put(testdata)
+ q.put(1)
+ return testdata
+
+
+def func_run(con_dic):
+ test_date = do_test(con_dic)
+ return test_date
+
+
+def run(test_config):
+ con_dic = test_config["load_manager"]
+ scenarios_conf = con_dic["scenarios"]
+
+ if test_config["contexts"]["yardstick_ip"] is None:
+ con_dic["contexts"]["yardstick_ip"] =\
+ conf_parser.ip_parser("yardstick_test_ip")
+
+ env_pre(test_config)
+ LOG.info("yardstick environment prepare done!")
+
+ stack_num = scenarios_conf["num_stack"]
+ test_num = conf_parser.str_to_list(scenarios_conf["num_thread"])
+ rw = scenarios_conf["rw"]
+ bs = scenarios_conf["bs"]
+ size = scenarios_conf["size"]
+ rwmixwrite = scenarios_conf["rwmixwrite"]
+ numjobs = scenarios_conf["num_jobs"]
+ direct = scenarios_conf["direct"]
+ volume_num = scenarios_conf["volume_num"]
+ volume_size = scenarios_conf["volume_size"]
+
+ for value in test_num:
+ result = []
+ out_num = 0
+ num = int(value)
+ # pool = multiprocessing.Pool(processes=num)
+ threadings = []
+ LOG.info("begin to run %s thread" % num)
+
+ starttime = datetime.datetime.now()
+
+ for i in xrange(0, num):
+ case_config = {"stack_num": int(stack_num),
+ "volume_num": volume_num,
+ "rw": rw,
+ "bs": bs,
+ "size": size,
+ "rwmixwrite": rwmixwrite,
+ "numjobs": numjobs,
+ "direct": direct,
+ "volume_size": int(volume_size)}
+ tmp_thread = threading.Thread(target=func_run, args=(case_config,))
+ threadings.append(tmp_thread)
+ tmp_thread.start()
+
+ for one_thread in threadings:
+ one_thread.join()
+ while not q.empty():
+ result.append(q.get())
+ for item in result:
+ out_num = out_num + float(item)
+
+ print(result)
+
+ endtime = datetime.datetime.now()
+ LOG.info("%s thread success %d times" % (num, out_num))
+ during_date = (endtime - starttime).seconds
+
+ data_reply = config_to_result(num, out_num, during_date)
+ conf_parser.result_to_file(data_reply, test_config["out_file"])
+
+ LOG.info('END POSCA stress multistack storage test')
+ return data_reply
diff --git a/testsuites/posca/testcase_script/posca_factor_multistack_storage_parallel.py b/testsuites/posca/testcase_script/posca_factor_multistack_storage_parallel.py
new file mode 100644
index 00000000..8c623d41
--- /dev/null
+++ b/testsuites/posca/testcase_script/posca_factor_multistack_storage_parallel.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''This file realize the function of run posca multistack storage stress test
+This file contain several part:
+First is create a script to realize several threading run'''
+
+import utils.logger as log
+import uuid
+import json
+import os
+import time
+from utils.parser import Parser as conf_parser
+import utils.env_prepare.quota_prepare as quota_prepare
+import utils.env_prepare.stack_prepare as stack_prepare
+import utils.infra_setup.runner.yardstick as yardstick_task
+
+import utils.infra_setup.runner.docker_env as docker_env
+
+# --------------------------------------------------
+# logging configuration
+# --------------------------------------------------
+LOG = log.Logger(__name__).getLogger()
+
+test_dict = {
+ "action": "runTestCase",
+ "args": {
+ "opts": {
+ "task-args": {}
+ },
+ "testcase": "multistack_storage_bottlenecks_parallel"
+ }
+}
+testfile = os.path.basename(__file__)
+testcase, file_format = os.path.splitext(testfile)
+cidr = "/home/opnfv/repos/yardstick/samples/storage_bottlenecks.yaml"
+runner_DEBUG = True
+
+
+def env_pre(test_config):
+ test_yardstick = False
+ if "yardstick" in test_config["contexts"].keys():
+ test_yardstick = True
+ stack_prepare._prepare_env_daemon(test_yardstick)
+ quota_prepare.quota_env_prepare()
+ if(test_config["contexts"]['yardstick_envpre']):
+ LOG.info("yardstick environment prepare!")
+ stdout = yardstick_task.yardstick_image_prepare()
+ LOG.debug(stdout)
+
+
+def testcase_parser(out_file="yardstick.out", **parameter_info):
+ cmd = yardstick_task.yardstick_command_parser(debug=runner_DEBUG,
+ cidr=cidr,
+ outfile=out_file,
+ parameter=parameter_info)
+ return cmd
+
+
+def do_test(test_config):
+ out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
+ yardstick_container = docker_env.yardstick_info['container']
+ cmd = testcase_parser(out_file=out_file, **test_config)
+ print(cmd)
+ stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
+ LOG.info(stdout)
+ loop_value = 0
+ while loop_value < 60:
+ time.sleep(2)
+ loop_value = loop_value + 1
+ with open(out_file) as f:
+ data = json.load(f)
+ if data["result"]["criteria"] == "PASS":
+ LOG.info("yardstick run success")
+ LOG.info("%s" % data["result"]["testcases"])
+ break
+ else:
+ LOG.error("yardstick error exit")
+ break
+
+ save_data = config_to_result(test_config, data)
+ LOG.info(save_data)
+ return save_data
+
+
+def config_to_result(test_config, test_result):
+ print(test_result)
+ out_data = test_result["result"]["testcases"]
+ test_data = out_data["storage_bottlenecks"]["tc_data"]
+ testdata = {}
+ testdata["read_iops"] = 0
+ testdata["read_bw"] = 0
+ testdata["read_lat"] = 0
+ testdata["write_iops"] = 0
+ testdata["write_bw"] = 0
+ testdata["write_lat"] = 0
+ print(testdata["read_iops"])
+ for result in test_data:
+ testdata["read_iops"] += result["data"]["read_iops"]
+ testdata["read_bw"] += result["data"]["read_bw"]
+ if testdata["read_lat"] is 0:
+ testdata["read_lat"] = result["data"]["read_lat"]
+ else:
+ testdata["read_lat"] += result["data"]["read_lat"]
+ testdata["read_lat"] = testdata["read_lat"]/2
+ testdata["write_iops"] += result["data"]["write_iops"]
+ testdata["write_bw"] += result["data"]["write_bw"]
+ if testdata["write_lat"] is 0:
+ testdata["write_lat"] = result["data"]["write_lat"]
+ else:
+ testdata["write_lat"] += result["data"]["write_lat"]
+ testdata["write_lat"] = testdata["write_lat"]/2
+ return testdata
+
+
+def run(test_config):
+ con_dic = test_config["load_manager"]
+ scenarios_conf = con_dic["scenarios"]
+
+ if test_config["contexts"]["yardstick_ip"] is None:
+ con_dic["contexts"]["yardstick_ip"] =\
+ conf_parser.ip_parser("yardstick_test_ip")
+
+ env_pre(test_config)
+ LOG.info("yardstick environment prepare done!")
+
+ test_num = conf_parser.str_to_list(scenarios_conf["num_stack"])
+ rw = scenarios_conf["rw"]
+ bs = scenarios_conf["bs"]
+ size = scenarios_conf["size"]
+ rwmixwrite = scenarios_conf["rwmixwrite"]
+ numjobs = scenarios_conf["num_jobs"]
+ direct = scenarios_conf["direct"]
+ volume_num = scenarios_conf["volume_num"]
+ volume_size = scenarios_conf["volume_size"]
+
+ result = []
+
+ for value in test_num:
+ case_config = {"stack_num": int(value),
+ "volume_num": volume_num,
+ "rw": rw,
+ "bs": bs,
+ "size": size,
+ "rwmixwrite": rwmixwrite,
+ "numjobs": numjobs,
+ "direct": direct,
+ "volume_size": int(volume_size)}
+ data_reply = do_test(case_config)
+ result.append(data_reply)
+
+ LOG.info("%s stack successful run" % (value))
+
+ conf_parser.result_to_file(data_reply, test_config["out_file"])
+
+ LOG.info('END POSCA stress multistack storage parallel testcase')
+ LOG.info("The result data is %s", result)
+ return result
diff --git a/testsuites/posca/testcase_script/posca_factor_ping.py b/testsuites/posca/testcase_script/posca_factor_ping.py
index 3a2277cf..4ee43964 100644
--- a/testsuites/posca/testcase_script/posca_factor_ping.py
+++ b/testsuites/posca/testcase_script/posca_factor_ping.py
@@ -23,6 +23,7 @@ import Queue
from utils.parser import Parser as conf_parser
import utils.env_prepare.quota_prepare as quota_prepare
import utils.env_prepare.stack_prepare as stack_prepare
+import utils.infra_setup.runner.yardstick as runner_yardstick
import testsuites.posca.testcase_dashboard.posca_stress_ping as DashBoard
import utils.infra_setup.runner.docker_env as docker_env
@@ -42,6 +43,8 @@ test_dict = {
}
testfile = os.path.basename(__file__)
testcase, file_format = os.path.splitext(testfile)
+cidr = "/home/opnfv/repos/yardstick/samples/ping_bottlenecks.yaml"
+runner_DEBUG = True
q = Queue.Queue()
@@ -52,20 +55,21 @@ def env_pre(test_config):
test_yardstick = True
stack_prepare._prepare_env_daemon(test_yardstick)
quota_prepare.quota_env_prepare()
- cmd = ('yardstick env prepare')
LOG.info("yardstick environment prepare!")
if(test_config["contexts"]['yardstick_envpre']):
- yardstick_container = docker_env.yardstick_info['container']
- stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
+ stdout = runner_yardstick.yardstick_image_prepare()
LOG.debug(stdout)
def do_test():
func_name = sys._getframe().f_code.co_name
out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
+ parameter_info = {}
yardstick_container = docker_env.yardstick_info['container']
- cmd = ('yardstick task start /home/opnfv/repos/yardstick/'
- 'samples/ping_bottlenecks.yaml --output-file ' + out_file)
+ cmd = runner_yardstick.yardstick_command_parser(debug=runner_DEBUG,
+ cidr=cidr,
+ outfile=out_file,
+ parameter=parameter_info)
stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
LOG.info(stdout)
out_value = 0
@@ -75,11 +79,11 @@ def do_test():
loop_value = loop_value + 1
with open(out_file) as f:
data = json.load(f)
- if data["status"] == 1:
+ if data["result"]["criteria"] == "PASS":
LOG.info("yardstick run success")
out_value = 1
break
- elif data["status"] == 2:
+ else:
LOG.error("yardstick error exit")
out_value = 0
break
diff --git a/testsuites/posca/testcase_script/posca_factor_soak_throughputs.py b/testsuites/posca/testcase_script/posca_factor_soak_throughputs.py
new file mode 100644
index 00000000..b45ce932
--- /dev/null
+++ b/testsuites/posca/testcase_script/posca_factor_soak_throughputs.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+##############################################################################
+# Copyright (c) 2017 Huawei Technologies Co.,Ltd and others.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+'''This file is to do data-plane baseline test for
+VM pair life-cycle events using netperf.
+Testing steps are summarized below:
+1. run_test load testcase configuration
+2. Bottlenecks eliminates the environments limits/constraints
+3. Bottlenecks tells Yardstick to prepare environment
+4. Bottlenecks tells Yardstick to run test
+ 3.1 to create stack
+ 3.2 to install netperf
+ 3.3 to send/forward packets for t2 seconds
+ 3.4 record results and detroy stack
+ 3.4 after every t1 seconds goto 3.1 and repeat the workflow
+5. Bottlenecks collects testing results from Yardstick
+6. Bottlenecks tells Yardstick to stop when time ends
+ or system fails the test
+7. Bottlenecks sends testing data to bottlenecks-elk'''
+
+import utils.logger as log
+import uuid
+import json
+import os
+import sys
+import time
+# import threading
+# import datetime
+import Queue
+# from utils.parser import Parser as conf_parser
+import utils.env_prepare.quota_prepare as quota_prepare
+import utils.env_prepare.stack_prepare as stack_prepare
+import utils.infra_setup.runner.yardstick as runner_yardstick
+
+# import testsuites.posca.testcase_dashboard.posca_factor_throughputs as DashBoard # noqa
+import utils.infra_setup.runner.docker_env as docker_env
+
+# --------------------------------------------------
+# logging configuration
+# --------------------------------------------------
+LOG = log.Logger(__name__).getLogger()
+
+test_dict = {
+ "action": "runTestCase",
+ "args": {
+ "opts": {
+ "task-args": {}
+ },
+ "testcase": "netperf_bottlenecks"
+ }
+}
+testfile = os.path.basename(__file__)
+testcase, file_format = os.path.splitext(testfile)
+cidr = "/home/opnfv/repos/yardstick/samples/netperf_soak.yaml"
+runner_DEBUG = True
+
+q = Queue.Queue()
+
+
+def env_pre(test_config):
+ test_yardstick = False
+ if "yardstick" in test_config["contexts"].keys():
+ test_yardstick = True
+ stack_prepare._prepare_env_daemon(test_yardstick)
+ quota_prepare.quota_env_prepare()
+ LOG.info("yardstick environment prepare!")
+ if(test_config["contexts"]['yardstick_envpre']):
+ stdout = runner_yardstick.yardstick_image_prepare()
+ LOG.debug(stdout)
+
+
+def do_test(con_dic):
+ func_name = sys._getframe().f_code.co_name
+ out_file = ("/tmp/yardstick_" + str(uuid.uuid4()) + ".out")
+ parameter_info = dict(test_time=con_dic["scenarios"]["vim_pair_ttl"])
+ yardstick_container = docker_env.yardstick_info['container']
+ cmd = runner_yardstick.yardstick_command_parser(debug=runner_DEBUG,
+ cidr=cidr,
+ outfile=out_file,
+ parameter=parameter_info)
+ stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
+ LOG.info(stdout)
+ out_value = 0
+ loop_value = 0
+ while loop_value < 60:
+ time.sleep(2)
+ loop_value = loop_value + 1
+ with open(out_file) as f:
+ data = json.load(f)
+ if data["status"] == 1:
+ LOG.info("Success run yardstick netperf_soak test!")
+ out_value = 1
+ break
+ elif data["status"] == 2:
+ LOG.error("Failed run yardstick netperf_soak test!")
+ out_value = 0
+ break
+ q.put((out_value, func_name))
+ return out_value
+
+
+def config_to_result(num, out_num, during_date, result):
+ testdata = {}
+ test_result = {}
+ test_result["number_of_stacks"] = float(num)
+ test_result["success_times"] = out_num
+ test_result["success_rate"] = out_num / num
+ test_result["duration_time"] = during_date
+ test_result["result"] = result
+ testdata["data_body"] = test_result
+ testdata["testcase"] = testcase
+ return testdata
+
+
+def func_run(con_dic):
+ test_date = do_test(con_dic)
+ return test_date
+
+
+def run(test_config):
+ con_dic = test_config["load_manager"]
+
+ env_pre(test_config)
+ LOG.info("yardstick environment prepare done!")
+
+ return func_run(con_dic)
diff --git a/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py b/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py
index 1a54554c..9d8b0ec6 100644
--- a/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py
+++ b/testsuites/posca/testcase_script/posca_factor_system_bandwidth.py
@@ -79,10 +79,10 @@ def do_test(test_config, Use_Dashboard, context_conf):
with open(out_file) as f:
data = json.load(f)
if data["status"] == 1:
- LOG.info("yardstick run success")
+ LOG.info("Success run yardstick netperf_bottlenecks test!")
break
elif data["status"] == 2:
- LOG.error("yardstick error exit")
+ LOG.error("Failed to run yardstick netperf_bottlenecks test!")
exit()
save_data = config_to_result(test_config, data['result'][1])
diff --git a/testsuites/posca/testcase_script/posca_feature_moon_resources.py b/testsuites/posca/testcase_script/posca_feature_moon_resources.py
index 3c66c7b9..8b23824c 100644
--- a/testsuites/posca/testcase_script/posca_feature_moon_resources.py
+++ b/testsuites/posca/testcase_script/posca_feature_moon_resources.py
@@ -18,8 +18,9 @@ import uuid
import json
import utils.logger as log
from utils.parser import Parser as conf_parser
-import utils.env_prepare.stack_prepare as stack_prepare
+import utils.env_prepare.moon_prepare as moon_env
import utils.infra_setup.runner.docker_env as docker_env
+import testsuites.posca.testcase_dashboard.posca_feature_moon as DashBoard
import utils.infra_setup.runner.yardstick as yardstick_task
# --------------------------------------------------
@@ -29,35 +30,35 @@ LOG = log.Logger(__name__).getLogger()
testfile = os.path.basename(__file__)
testcase, file_format = os.path.splitext(testfile)
-# cidr = "/home/opnfv/repos/yardstick/samples/pvp_throughput_bottlenecks.yaml"
runner_DEBUG = True
-def env_pre(con_dic):
- LOG.info("yardstick environment prepare!")
- stack_prepare._prepare_env_daemon(True)
+def env_pre(test_config):
+ if "moon_monitoring" in test_config["contexts"].keys():
+ if test_config["contexts"]['moon_envpre'] is True:
+ moon_environment = test_config["contexts"]['moon_environment']
+ moon_env.moon_envprepare(moon_environment)
+ LOG.info("moon environment prepare!")
def config_to_result(test_config, test_result):
- final_data = []
- print(test_result)
+ final_data = {}
+ final_data["testcase"] = "posca_factor_moon_resources"
+ final_data["test_body"] = []
out_data = test_result["result"]["testcases"]
- test_data = out_data["pvp_throughput_bottlenecks"]["tc_data"]
+ test_data = out_data["moon_resource"]["tc_data"]
for result in test_data:
testdata = {}
- testdata["vcpu"] = test_config["vcpu"]
- testdata["memory"] = test_config["memory"]
- testdata["nrFlows"] = result["data"]["nrFlows"]
- testdata["packet_size"] = result["data"]["packet_size"]
- testdata["throughput"] = result["data"]["throughput_rx_mbps"]
- final_data.append(testdata)
+ testdata["tenant_number"] = int(test_config["tenant_number"])
+ testdata["max_user"] = result["data"]["max_user"]
+ final_data["test_body"].append(testdata)
return final_data
def testcase_parser(runner_conf, out_file="yardstick.out", **parameter_info):
cidr = "/home/opnfv/repos/yardstick/" + \
- runner_conf["yardstick_test_dir"] + \
- runner_conf["yardstick_testcase"]
+ runner_conf["yardstick_test_dir"] + "/" + \
+ runner_conf["yardstick_testcase"] + ".yaml"
cmd = yardstick_task.yardstick_command_parser(debug=runner_DEBUG,
cidr=cidr,
outfile=out_file,
@@ -84,24 +85,21 @@ def do_test(runner_conf, test_config, Use_Dashboard, context_conf):
elif data["status"] == 2:
LOG.error("yardstick error exit")
exit()
- # data = json.load(output)
save_data = config_to_result(test_config, data)
if Use_Dashboard is True:
print("use dashboard")
- # DashBoard.dashboard_send_data(context_conf, save_data)
-
- # return save_data["data_body"]
+ DashBoard.dashboard_send_data(context_conf, save_data)
return save_data
def run(test_config):
load_config = test_config["load_manager"]
scenarios_conf = load_config["scenarios"]
- runner_conf = test_config["runners"]
+ runner_conf = load_config["runners"]
+ contexts_conf = test_config["contexts"]
Use_Dashboard = False
-
- env_pre(None)
+ env_pre(test_config)
if test_config["contexts"]["yardstick_ip"] is None:
load_config["contexts"]["yardstick_ip"] =\
conf_parser.ip_parser("yardstick_test_ip")
@@ -112,9 +110,14 @@ def run(test_config):
conf_parser.ip_parser("dashboard")
LOG.info("Create Dashboard data")
Use_Dashboard = True
- # DashBoard.dashboard_system_bandwidth(test_config["contexts"])
+ DashBoard.posca_moon_init(test_config["contexts"])
tenants_conf = conf_parser.str_to_list(scenarios_conf["tenants"])
+ subject_number = int(scenarios_conf["subject_number"])
+ object_number = int(scenarios_conf["object_number"])
+ timeout = scenarios_conf["timeout"]
+ consul_host = contexts_conf["moon_environment"]["ip"]
+ consul_port = contexts_conf["moon_environment"]["consul_port"]
load_config["result_file"] = os.path.dirname(
os.path.abspath(__file__)) + "/test_case/result"
@@ -122,7 +125,13 @@ def run(test_config):
result = []
for tenants in tenants_conf:
- case_config = {"tenants": tenants}
+ print tenants
+ case_config = {"tenant_number": tenants,
+ "subject_number": subject_number,
+ "object_number": object_number,
+ "timeout": timeout,
+ "consul_host": consul_host,
+ "consul_port": consul_port}
data_reply = do_test(runner_conf, case_config,
Use_Dashboard, test_config["contexts"])
diff --git a/testsuites/posca/testcase_script/posca_feature_moon_tenants.py b/testsuites/posca/testcase_script/posca_feature_moon_tenants.py
index 8f4061df..e932575c 100644
--- a/testsuites/posca/testcase_script/posca_feature_moon_tenants.py
+++ b/testsuites/posca/testcase_script/posca_feature_moon_tenants.py
@@ -15,14 +15,15 @@ This test is using yardstick as a tool to begin test.'''
import os
import time
import uuid
-import json
import Queue
import multiprocessing
import utils.logger as log
from utils.parser import Parser as conf_parser
-import utils.env_prepare.stack_prepare as stack_prepare
+import utils.env_prepare.moon_prepare as moon_env
import utils.infra_setup.runner.docker_env as docker_env
+
import utils.infra_setup.runner.yardstick as yardstick_task
+import testsuites.posca.testcase_dashboard.posca_feature_moon as DashBoard
# --------------------------------------------------
# logging configuration
@@ -31,36 +32,31 @@ LOG = log.Logger(__name__).getLogger()
testfile = os.path.basename(__file__)
testcase, file_format = os.path.splitext(testfile)
-# cidr = "/home/opnfv/repos/yardstick/samples/pvp_throughput_bottlenecks.yaml"
-runner_switch = True
runner_DEBUG = True
+manager = multiprocessing.Manager()
+switch = manager.Value('tmp', 0)
-def env_pre(con_dic):
+def env_pre(test_config):
+ if "moon_monitoring" in test_config["contexts"].keys():
+ if test_config["contexts"]['moon_envpre'] is True:
+ moon_environment = test_config["contexts"]['moon_environment']
+ moon_env.moon_envprepare(moon_environment)
LOG.info("yardstick environment prepare!")
- stack_prepare._prepare_env_daemon(True)
-
-
-def config_to_result(test_config, test_result):
- final_data = []
- print(test_result)
- out_data = test_result["result"]["testcases"]
- test_data = out_data["pvp_throughput_bottlenecks"]["tc_data"]
- for result in test_data:
- testdata = {}
- testdata["vcpu"] = test_config["vcpu"]
- testdata["memory"] = test_config["memory"]
- testdata["nrFlows"] = result["data"]["nrFlows"]
- testdata["packet_size"] = result["data"]["packet_size"]
- testdata["throughput"] = result["data"]["throughput_rx_mbps"]
- final_data.append(testdata)
+
+
+def config_to_result(test_result):
+ final_data = {}
+ final_data["testcase"] = "posca_factor_moon_tenants"
+ final_data["test_body"] = []
+ final_data["test_body"].append(test_result)
return final_data
def testcase_parser(runner_conf, out_file="yardstick.out", **parameter_info):
cidr = "/home/opnfv/repos/yardstick/" + \
- runner_conf["yardstick_test_dir"] + \
- runner_conf["yardstick_testcase"]
+ runner_conf["yardstick_test_dir"] + "/" + \
+ runner_conf["yardstick_testcase"] + ".yaml"
cmd = yardstick_task.yardstick_command_parser(debug=runner_DEBUG,
cidr=cidr,
outfile=out_file,
@@ -75,36 +71,19 @@ def do_test(runner_conf, test_config, Use_Dashboard, context_conf):
print(cmd)
stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
LOG.info(stdout)
- loop_value = 0
- while loop_value < 60:
- time.sleep(2)
- loop_value = loop_value + 1
- with open(out_file) as f:
- data = json.load(f)
- if data["status"] == 1:
- LOG.info("yardstick run success")
- break
- elif data["status"] == 2:
- LOG.error("yardstick error exit")
- exit()
- # data = json.load(output)
-
- save_data = config_to_result(test_config, data)
- if Use_Dashboard is True:
- print("use dashboard")
- # DashBoard.dashboard_send_data(context_conf, save_data)
-
- # return save_data["data_body"]
- return save_data
+ switch.value += 1
+ save_date = []
+ return save_date
def run(test_config):
load_config = test_config["load_manager"]
scenarios_conf = load_config["scenarios"]
- runner_conf = test_config["runners"]
+ contexts_conf = test_config["contexts"]
+ runner_conf = load_config["runners"]
Use_Dashboard = False
- env_pre(None)
+ env_pre(test_config)
if test_config["contexts"]["yardstick_ip"] is None:
load_config["contexts"]["yardstick_ip"] =\
conf_parser.ip_parser("yardstick_test_ip")
@@ -115,15 +94,22 @@ def run(test_config):
conf_parser.ip_parser("dashboard")
LOG.info("Create Dashboard data")
Use_Dashboard = True
- # DashBoard.dashboard_system_bandwidth(test_config["contexts"])
-
- resources = conf_parser.str_to_list(scenarios_conf["resources"])
- initial = conf_parser.str_to_list(scenarios_conf["initial"])
- threshhold = conf_parser.str_to_list(scenarios_conf["threshhold"])
- timeout = conf_parser.str_to_list(scenarios_conf["timeout"])
- SLA = conf_parser.str_to_list(scenarios_conf["SLA"])
- case_config = {"SLA": SLA,
- "resources": resources}
+ DashBoard.posca_moon_init(test_config["contexts"])
+
+ subject_number = int(scenarios_conf["subject_number"])
+ object_number = int(scenarios_conf["object_number"])
+ timeout = scenarios_conf["timeout"]
+ consul_host = contexts_conf["moon_environment"]["ip"]
+ consul_port = contexts_conf["moon_environment"]["consul_port"]
+
+ initial = scenarios_conf["initial_tenants"]
+ threshhold = scenarios_conf["steps_tenants"]
+ tolerate_time = scenarios_conf["tolerate_time"]
+ case_config = {"subject_number": subject_number,
+ "object_number": object_number,
+ "timeout": timeout,
+ "consul_host": consul_host,
+ "consul_port": consul_port}
process_queue = Queue.Queue()
@@ -136,8 +122,8 @@ def run(test_config):
tenant_number = threshhold
else:
tenant_number = initial
-
- while runner_switch is True:
+ while switch.value == 0:
+ LOG.info("Start %d process", tenant_number)
for tenant in range(0, tenant_number):
process = multiprocessing.Process(target=do_test,
args=(runner_conf,
@@ -150,7 +136,7 @@ def run(test_config):
result = result + tenant_number
tenant_number = threshhold
- time.sleep(timeout)
+ time.sleep(tolerate_time)
while process_queue.qsize():
process = process_queue.get()
@@ -161,6 +147,12 @@ def run(test_config):
else:
result = result - threshhold
+ testdate = {"tenant_max": result}
+ testresult = config_to_result(testdate)
LOG.info("Finished bottlenecks testcase")
- LOG.info("The result data is %s", result)
- return result
+ LOG.info("The result data is %d", result)
+ if Use_Dashboard is True:
+ print "Use Dashboard"
+ DashBoard.dashboard_send_data(test_config["contexts"], testresult)
+
+ return testresult
diff --git a/testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py b/testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py
index 830ff73f..08c4cbe9 100644
--- a/testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py
+++ b/testsuites/posca/testcase_script/posca_feature_testpmd_scale_up.py
@@ -81,14 +81,10 @@ def do_test(test_config, Use_Dashboard, context_conf):
elif data["status"] == 2:
LOG.error("yardstick error exit")
exit()
- # data = json.load(output)
save_data = config_to_result(test_config, data)
if Use_Dashboard is True:
print("use dashboard")
- # DashBoard.dashboard_send_data(context_conf, save_data)
-
- # return save_data["data_body"]
return save_data
@@ -108,7 +104,6 @@ def run(test_config):
conf_parser.ip_parser("dashboard")
LOG.info("Create Dashboard data")
Use_Dashboard = True
- # DashBoard.dashboard_system_bandwidth(test_config["contexts"])
cpus = conf_parser.str_to_list(scenarios_conf["cpus"])
mems = conf_parser.str_to_list(scenarios_conf["mems"])
diff --git a/testsuites/posca/testcase_script/posca_feature_vnf_scale_out.py b/testsuites/posca/testcase_script/posca_feature_vnf_scale_out.py
index 6d53515f..417cf2b9 100644
--- a/testsuites/posca/testcase_script/posca_feature_vnf_scale_out.py
+++ b/testsuites/posca/testcase_script/posca_feature_vnf_scale_out.py
@@ -46,12 +46,9 @@ def env_pre(test_config):
print(test_yardstick)
stack_prepare._prepare_env_daemon(test_yardstick)
quota_prepare.quota_env_prepare()
- cmd = ('yardstick env prepare')
LOG.info("yardstick environment prepare!")
- print docker_env.yardstick_info['container']
if(test_config["contexts"]['yardstick_envpre']):
- yardstick_container = docker_env.yardstick_info['container']
- stdout = docker_env.docker_exec_cmd(yardstick_container, cmd)
+ stdout = yardstick_task.yardstick_image_prepare()
LOG.debug(stdout)
@@ -99,16 +96,11 @@ def do_test(test_config, Use_Dashboard, context_conf):
elif data["status"] == 2:
LOG.error("yardstick error exit")
exit()
- # data = json.load(output)
save_data = config_to_result(test_config, data)
- print("^^^^^^^^^^^^^^^^^^^^^^^^^")
- print save_data
if Use_Dashboard is True:
print("use dashboard")
- # DashBoard.dashboard_send_data(context_conf, save_data)
- # return save_data["data_body"]
return save_data