summaryrefslogtreecommitdiffstats
path: root/testcases/OpenStack
diff options
context:
space:
mode:
authorMorgan Richomme <morgan.richomme@orange.com>2016-06-08 15:56:54 +0200
committerMorgan Richomme <morgan.richomme@orange.com>2016-06-08 16:43:47 +0200
commit03fc68820d1badd600832de3b7c6dd72368dd198 (patch)
tree0ece513a5ea3acd1296e299fd1e09a331f80ad03 /testcases/OpenStack
parent4c971bff1c2e4665bbf389f84acfa4d2c81f9b55 (diff)
Adapt functest testcase to APi refactoring
JIRA: FUNCTEST-303 Change-Id: Ia276d9ca6e8d62b496c3b5f81561b14b02c43fd7 Signed-off-by: Morgan Richomme <morgan.richomme@orange.com>
Diffstat (limited to 'testcases/OpenStack')
-rwxr-xr-xtestcases/OpenStack/rally/run_rally-cert.py47
-rw-r--r--testcases/OpenStack/tempest/run_tempest.py48
-rw-r--r--testcases/OpenStack/vPing/vPing_ssh.py58
-rw-r--r--testcases/OpenStack/vPing/vPing_userdata.py51
4 files changed, 89 insertions, 115 deletions
diff --git a/testcases/OpenStack/rally/run_rally-cert.py b/testcases/OpenStack/rally/run_rally-cert.py
index c3dd304ac..6bb29b8e0 100755
--- a/testcases/OpenStack/rally/run_rally-cert.py
+++ b/testcases/OpenStack/rally/run_rally-cert.py
@@ -18,7 +18,6 @@ import iniparse
import json
import os
import re
-import requests
import subprocess
import time
import yaml
@@ -125,26 +124,6 @@ CINDER_VOLUME_TYPE_NAME = "volume_test"
SUMMARY = []
-def push_results_to_db(case, payload, criteria):
-
- url = TEST_DB + "/results"
- installer = functest_utils.get_installer_type(logger)
- scenario = functest_utils.get_scenario(logger)
- version = functest_utils.get_version(logger)
- pod_name = functest_utils.get_pod_name(logger)
-
- # evalutate success criteria
-
- params = {"project_name": "functest", "case_name": case,
- "pod_name": pod_name, "installer": installer,
- "version": version, "scenario": scenario,
- "criteria": criteria, "details": payload}
-
- headers = {'Content-Type': 'application/json'}
- r = requests.post(url, data=json.dumps(params), headers=headers)
- logger.debug(r)
-
-
def get_task_id(cmd_raw):
"""
get task id from command rally result
@@ -303,6 +282,8 @@ def run_task(test_name):
#
global SUMMARY
logger.info('Starting test scenario "{}" ...'.format(test_name))
+ start_time = time.time()
+ stop_time = start_time
task_file = '{}task.yaml'.format(RALLY_DIR)
if not os.path.exists(task_file):
@@ -376,13 +357,23 @@ def run_task(test_name):
# Push results in payload of testcase
if args.report:
- logger.debug("Push result into DB")
- push_results_to_db("Rally_details", json_data, status)
+ stop_time = time.time()
+ logger.debug("Push Rally detailed results into DB")
+ functest_utils.push_results_to_db("functest",
+ "Rally_details",
+ logger,
+ start_time,
+ stop_time,
+ status,
+ json_data)
def main():
global SUMMARY
global network_dict
+ start_time = time.time()
+ stop_time = start_time
+
# configure script
if not (args.test_name in tests):
logger.error('argument not valid')
@@ -482,6 +473,7 @@ def main():
"+===================+============+===============+===========+"
"\n")
payload = []
+ stop_time = time.time()
# for each scenario we draw a row for the table
total_duration = 0.0
@@ -538,8 +530,13 @@ def main():
if args.report:
logger.debug("Pushing Rally summary into DB...")
- push_results_to_db("Rally", payload, status)
-
+ functest_utils.push_results_to_db("functest",
+ "Rally",
+ logger,
+ start_time,
+ stop_time,
+ status,
+ payload)
if args.noclean:
exit(0)
diff --git a/testcases/OpenStack/tempest/run_tempest.py b/testcases/OpenStack/tempest/run_tempest.py
index d8a8a1acb..46b01898f 100644
--- a/testcases/OpenStack/tempest/run_tempest.py
+++ b/testcases/OpenStack/tempest/run_tempest.py
@@ -14,12 +14,11 @@
# http://www.apache.org/licenses/LICENSE-2.0
#
import argparse
-import json
import os
import re
-import requests
import shutil
import subprocess
+import sys
import time
import yaml
import ConfigParser
@@ -114,27 +113,6 @@ def get_info(file_result):
logger.debug("duration:" + duration)
-def push_results_to_db(case, payload, criteria):
-
- # TODO move DB creds into config file
- url = TEST_DB + "/results"
- installer = ft_utils.get_installer_type(logger)
- scenario = ft_utils.get_scenario(logger)
- version = ft_utils.get_version(logger)
- pod_name = ft_utils.get_pod_name(logger)
-
- logger.info("Pushing results to DB: '%s'." % url)
-
- params = {"project_name": "functest", "case_name": case,
- "pod_name": str(pod_name), 'installer': installer,
- "version": version, "scenario": scenario, "criteria": criteria,
- 'details': payload}
- headers = {'Content-Type': 'application/json'}
-
- r = requests.post(url, data=json.dumps(params), headers=headers)
- logger.debug(r)
-
-
def create_tempest_resources():
ks_creds = os_utils.get_credentials("keystone")
logger.debug("Creating tenant and user for Tempest suite")
@@ -253,6 +231,8 @@ def run_tempest(OPTION):
# :return: void
#
logger.info("Starting Tempest test suite: '%s'." % OPTION)
+ start_time = time.time()
+ stop_time = start_time
cmd_line = "rally verify start " + OPTION + " --system-wide"
header = ("Tempest environment:\n"
@@ -293,11 +273,12 @@ def run_tempest(OPTION):
dur_sec_float = float(duration.split(':')[2])
dur_sec_int = int(round(dur_sec_float, 0))
dur_sec_int = dur_sec_int + 60 * dur_min
-
+ stop_time = time.time()
# Push results in payload of testcase
if args.report:
+ logger.debug("Pushing tempest results into DB...")
# Note criteria hardcoded...TODO move to testcase.yaml
- status = "failed"
+ status = "FAIL"
try:
diff = (int(num_tests) - int(num_failures))
success_rate = 100 * diff / int(num_tests)
@@ -306,7 +287,7 @@ def run_tempest(OPTION):
# For Tempest we assume that the success rate is above 90%
if success_rate >= 90:
- status = "passed"
+ status = "PASS"
# add the test in error in the details sections
# should be possible to do it during the test
@@ -322,9 +303,18 @@ def run_tempest(OPTION):
"tests": int(num_tests), "failures": int(num_failures),
"errors": error_logs}
logger.info("Results: " + str(json_results))
-
- logger.debug("Push result into DB")
- push_results_to_db("Tempest", json_results, status)
+ # TODO split Tempest smoke and full
+ try:
+ ft_utils.push_results_to_db("functest",
+ "Tempest",
+ logger,
+ start_time,
+ stop_time,
+ status,
+ json_results)
+ except:
+ logger.error("Error pushing results into Database '%s'"
+ % sys.exc_info()[0])
def main():
diff --git a/testcases/OpenStack/vPing/vPing_ssh.py b/testcases/OpenStack/vPing/vPing_ssh.py
index 11887845c..2a417eb3c 100644
--- a/testcases/OpenStack/vPing/vPing_ssh.py
+++ b/testcases/OpenStack/vPing/vPing_ssh.py
@@ -11,7 +11,7 @@
# Later, the VM2 boots then execute cloud-init to ping VM1.
# After successful ping, both the VMs are deleted.
# 0.2: measure test duration and publish results under json format
-#
+# 0.3: adapt push 2 DB after Test API refacroting
#
import argparse
import datetime
@@ -19,6 +19,7 @@ import os
import paramiko
import pprint
import re
+import sys
import time
import yaml
from scp import SCPClient
@@ -176,30 +177,6 @@ def create_security_group(neutron_client):
return sg_id
-def push_results(start_time_ts, duration, status):
- try:
- logger.debug("Pushing result into DB...")
- scenario = functest_utils.get_scenario(logger)
- version = functest_utils.get_version(logger)
- criteria = "failed"
- test_criteria = functest_utils.get_criteria_by_test("vping_ssh")
- if eval(test_criteria): # evaluates the regex 'status == "PASS"'
- criteria = "passed"
- pod_name = functest_utils.get_pod_name(logger)
- build_tag = functest_utils.get_build_tag(logger)
- functest_utils.push_results_to_db(TEST_DB,
- "functest",
- "vPing",
- logger, pod_name, version, scenario,
- criteria, build_tag,
- payload={'timestart': start_time_ts,
- 'duration': duration,
- 'status': status})
- except:
- logger.error("Error pushing results into Database '%s'"
- % sys.exc_info()[0])
-
-
def main():
creds_nova = openstack_utils.get_credentials("nova")
@@ -268,10 +245,10 @@ def main():
server.delete()
# boot VM 1
- start_time_ts = time.time()
- end_time_ts = start_time_ts
+ start_time = time.time()
+ stop_time = start_time
logger.info("vPing Start Time:'%s'" % (
- datetime.datetime.fromtimestamp(start_time_ts).strftime(
+ datetime.datetime.fromtimestamp(start_time).strftime(
'%Y-%m-%d %H:%M:%S')))
logger.info("Creating instance '%s'..." % NAME_VM_1)
@@ -409,10 +386,12 @@ def main():
logger.info("Waiting for ping...")
sec = 0
+ stop_time = time.time()
duration = 0
cmd = '~/ping.sh ' + test_ip
flag = False
+
while True:
time.sleep(1)
(stdin, stdout, stderr) = ssh.exec_command(cmd)
@@ -423,8 +402,8 @@ def main():
logger.info("vPing detected!")
# we consider start time at VM1 booting
- end_time_ts = time.time()
- duration = round(end_time_ts - start_time_ts, 1)
+ stop_time = time.time()
+ duration = round(stop_time - start_time, 1)
logger.info("vPing duration:'%s' s." % duration)
EXIT_CODE = 0
flag = True
@@ -440,7 +419,9 @@ def main():
sec += 1
test_status = "FAIL"
- if EXIT_CODE == 0:
+ test_criteria = functest_utils.get_criteria_by_test("vping_ssh")
+
+ if eval(test_criteria):
logger.info("vPing OK")
test_status = "PASS"
else:
@@ -448,7 +429,20 @@ def main():
logger.error("vPing FAILED")
if args.report:
- push_results(start_time_ts, duration, test_status)
+ try:
+ logger.debug("Pushing vPing SSH results into DB...")
+ functest_utils.push_results_to_db("functest",
+ "vPing",
+ logger,
+ start_time,
+ stop_time,
+ test_status,
+ details={'timestart': start_time,
+ 'duration': duration,
+ 'status': test_status})
+ except:
+ logger.error("Error pushing results into Database '%s'"
+ % sys.exc_info()[0])
exit(EXIT_CODE)
diff --git a/testcases/OpenStack/vPing/vPing_userdata.py b/testcases/OpenStack/vPing/vPing_userdata.py
index 2b2963144..5b7d2d90f 100644
--- a/testcases/OpenStack/vPing/vPing_userdata.py
+++ b/testcases/OpenStack/vPing/vPing_userdata.py
@@ -11,6 +11,7 @@
# Later, the VM2 boots then execute cloud-init to ping VM1.
# After successful ping, both the VMs are deleted.
# 0.2: measure test duration and publish results under json format
+# 0.3: adapt push 2 DB after Test API refacroting
#
#
@@ -18,6 +19,7 @@ import argparse
import datetime
import os
import pprint
+import sys
import time
import yaml
@@ -174,29 +176,6 @@ def create_security_group(neutron_client):
return sg_id
-def push_results(start_time_ts, duration, test_status):
- try:
- logger.debug("Pushing result into DB...")
- scenario = functest_utils.get_scenario(logger)
- version = functest_utils.get_version(logger)
- criteria = "failed"
- if test_status == "OK":
- criteria = "passed"
- pod_name = functest_utils.get_pod_name(logger)
- build_tag = functest_utils.get_build_tag(logger)
- functest_utils.push_results_to_db(TEST_DB,
- "functest",
- "vPing_userdata",
- logger, pod_name, version, scenario,
- criteria, build_tag,
- payload={'timestart': start_time_ts,
- 'duration': duration,
- 'status': test_status})
- except:
- logger.error("Error pushing results into Database '%s'"
- % sys.exc_info()[0])
-
-
def main():
creds_nova = openstack_utils.get_credentials("nova")
@@ -268,10 +247,10 @@ def main():
# tune (e.g. flavor, images, network) to your specific
# openstack configuration here
# we consider start time at VM1 booting
- start_time_ts = time.time()
- end_time_ts = start_time_ts
+ start_time = time.time()
+ stop_time = start_time
logger.info("vPing Start Time:'%s'" % (
- datetime.datetime.fromtimestamp(start_time_ts).strftime(
+ datetime.datetime.fromtimestamp(start_time).strftime(
'%Y-%m-%d %H:%M:%S')))
# create VM
@@ -336,6 +315,7 @@ def main():
metadata_tries = 0
console_log = vm2.get_console_output()
duration = 0
+ stop_time = time.time()
while True:
time.sleep(1)
@@ -346,8 +326,8 @@ def main():
logger.info("vPing detected!")
# we consider start time at VM1 booting
- end_time_ts = time.time()
- duration = round(end_time_ts - start_time_ts, 1)
+ stop_time = time.time()
+ duration = round(stop_time - start_time, 1)
logger.info("vPing duration:'%s'" % duration)
EXIT_CODE = 0
break
@@ -379,7 +359,20 @@ def main():
logger.error("vPing FAILED")
if args.report:
- push_results(start_time_ts, duration, test_status)
+ try:
+ logger.debug("Pushing vPing userdata results into DB...")
+ functest_utils.push_results_to_db("functest",
+ "vPing_userdata",
+ logger,
+ start_time,
+ stop_time,
+ test_status,
+ details={'timestart': start_time,
+ 'duration': duration,
+ 'status': test_status})
+ except:
+ logger.error("Error pushing results into Database '%s'"
+ % sys.exc_info()[0])
exit(EXIT_CODE)