summaryrefslogtreecommitdiffstats
path: root/testcases/Controllers/ONOS
diff options
context:
space:
mode:
Diffstat (limited to 'testcases/Controllers/ONOS')
-rw-r--r--testcases/Controllers/ONOS/Sfc/Sfc.py46
-rwxr-xr-xtestcases/Controllers/ONOS/Teston/onosfunctest.py9
2 files changed, 51 insertions, 4 deletions
diff --git a/testcases/Controllers/ONOS/Sfc/Sfc.py b/testcases/Controllers/ONOS/Sfc/Sfc.py
index cba726c7a..6b1973ef0 100644
--- a/testcases/Controllers/ONOS/Sfc/Sfc.py
+++ b/testcases/Controllers/ONOS/Sfc/Sfc.py
@@ -22,7 +22,10 @@
# Testcase 7 : Cleanup
# ###########################################################################
#
+
import functest.utils.functest_logger as ft_logger
+import functest.utils.functest_utils as functest_utils
+import time
from Sfc_fun import Sfc_fun
@@ -30,6 +33,8 @@ class Sfc:
"""Script to Test the SFC scenarios in ONOS."""
logger = ft_logger.Logger("sfc").getLogger()
Sfc_obj = Sfc_fun()
+ start_time = time.time()
+ status = "PASS"
print("################################################################")
print(" OPNFV SFC Script ")
print("################################################################")
@@ -39,18 +44,21 @@ class Sfc:
if (Sfc_obj.getToken() == 200):
logger.info("\t\tCreation of Token is successfull")
else:
+ status = "FAIL"
logger.error("\t\t : Creation of Token is NOT successfull")
#########################################################################
logger.info("\t1.2 Creation of Network")
if (Sfc_obj.createNetworks() == 201):
logger.info("\t\tCreation of network is successfull")
else:
+ status = "FAIL"
logger.error("\t\t : Creation of network is NOT successfull")
#########################################################################
logger.info("\t1.3 Creation of Subnetwork")
if (Sfc_obj.createSubnets() == 201):
logger.info("\t\tCreation of Subnetwork is successfull")
else:
+ status = "FAIL"
logger.error("\t\t : Creation of Subnetwork is NOT successfull")
print ("\n###########################################################\n")
########################################################################
@@ -60,36 +68,42 @@ class Sfc:
if (Sfc_obj.createPorts() == 201):
logger.info("\t\tCreation of Port is successfull")
else:
+ status = "FAIL"
logger.error("\t\t : Creation of Port is NOT successfull")
#########################################################################
logger.info("\t2.2 Creation of VM-Compute-Node")
if (Sfc_obj.createVm() == 202):
logger.info("\t\tCreation of VM is successfull")
else:
+ status = "FAIL"
logger.error("\t\t : Creation of VM is NOT successfull")
#########################################################################
logger.info("\t2.3 Check VM Status")
if (Sfc_obj.checkVmState() == 200):
logger.info("\t\tVM are in active state")
else:
+ status = "FAIL"
logger.error("\t\t : VM is NOT Active")
#########################################################################
logger.info("\t\t2.4 Router Creation")
if (Sfc_obj.createRouter() == 201):
logger.info("\t\t Router Creation is Successful")
else:
+ status = "FAIL"
logger.error("\t\t : Router Creation is NOT Successful")
#########################################################################
logger.info("\t\t2.5 Attachement of Interface to VM")
if (Sfc_obj.attachInterface() == 200):
logger.info("\t\t Interface attached to VM")
else:
+ status = "FAIL"
logger.error("\t\t : Interface NOT attached to VM")
#########################################################################
logger.info("\t\t2.6 Attachement of FLoating Ip to VM")
if (Sfc_obj.addFloatingIp() == 202):
logger.info("\t\t Floating Ip attached to VM SUccessful")
else:
+ status = "FAIL"
logger.error("\t\t : Floating Ip NOT attached to VM ")
print ("\n###########################################################\n")
########################################################################
@@ -100,6 +114,7 @@ class Sfc:
if (Sfc_obj.createPortPair() == 201):
logger.info("\t\tCreation of Port pair is successful")
else:
+ status = "FAIL"
logger.error("\t\t : Creation of Port pair is NOT successful")
#########################################################################
@@ -107,6 +122,7 @@ class Sfc:
if (Sfc_obj.getPortPair() == 200):
logger.info("\t\tSuccessfully got Port Pair ID")
else:
+ status = "FAIL"
logger.error("\t\t : UnSuccessfully got Port Pair ID")
#########################################################################
@@ -114,6 +130,7 @@ class Sfc:
if (Sfc_obj.createPortGroup() == 201):
logger.info("\t\tPort Pair Group successfully Created")
else:
+ status = "FAIL"
logger.error("\t\t : Port Pair Group NOT successfully Created")
#########################################################################
@@ -122,6 +139,7 @@ class Sfc:
if (Sfc_obj.getPortGroup() == 200):
logger.info("\t\tPort Pair Group ID successfully received")
else:
+ status = "FAIL"
logger.error("\t\t : Port Pair Group ID NOT successfully received")
#########################################################################
@@ -129,6 +147,7 @@ class Sfc:
if (Sfc_obj.createFlowClassifier() == 201):
logger.info("\t\tFlow Classifier successfully Created")
else:
+ status = "FAIL"
logger.error("\t\t : Flow Classifier NOT successfully Created")
print ("\n###########################################################\n")
########################################################################
@@ -139,6 +158,7 @@ class Sfc:
if (Sfc_obj.createPortChain() == 201):
logger.info("\t\tPortChain successfully Created")
else:
+ status = "FAIL"
logger.error("\t\tPortChain NOT successfully Created")
print ("\n###########################################################\n")
#########################################################################
@@ -146,6 +166,7 @@ class Sfc:
if (Sfc_obj.loginToVM() == "1"):
logger.info("\t\tSFC function Working")
else:
+ status = "FAIL"
logger.error("\t\t : SFC function not working")
print ("\n###########################################################\n")
#########################################################################
@@ -154,6 +175,7 @@ class Sfc:
if (Sfc_obj.loginToVM() == "0"):
logger.info("\t\tSFC function is removed Successfully")
else:
+ status = "FAIL"
logger.error("\t\t:SFC function not Removed.Have some problem")
if (Sfc_obj.deleteFlowClassifier() == 204):
if (Sfc_obj.deletePortGroup() == 204):
@@ -161,15 +183,19 @@ class Sfc:
logger.info(
"\t\tSFC configuration is deleted successfully")
else:
+ status = "FAIL"
logger.error("\t\t : Port pair configuration is NOT\
deleted successfully")
else:
+ status = "FAIL"
logger.error("\t\t : Port Group configuration is NOT \
deleted successfully")
else:
+ status = "FAIL"
logger.error("\t\t : Flow classifier configuration is NOT \
deleted successfully")
else:
+ status = "FAIL"
logger.error("\t\t:PortChain configuration is NOT deleted \
successfully")
print ("\n###########################################################n")
@@ -178,6 +204,26 @@ class Sfc:
if (Sfc_obj.cleanup() == 204):
logger.info("\t\tCleanUp is successfull")
else:
+ status = "FAIL"
logger.error("\t\t : CleanUp is NOT successfull")
print ("###############################################################")
+ logger.info("Summary :")
+ try:
+ logger.debug("Push ONOS SFC results into DB")
+ stop_time = time.time()
+
+ # ONOS SFC success criteria = all tests OK
+ duration = round(stop_time - start_time, 1)
+ logger.info("Result is " + status)
+ functest_utils.push_results_to_db("functest",
+ "onos_sfc",
+ logger,
+ start_time,
+ stop_time,
+ status,
+ details={'timestart': start_time,
+ 'duration': duration,
+ 'status': status})
+ except:
+ logger.error("Error pushing results into Database")
print("############################END OF SCRIPT ######################")
diff --git a/testcases/Controllers/ONOS/Teston/onosfunctest.py b/testcases/Controllers/ONOS/Teston/onosfunctest.py
index 23fb62d47..35ced61c5 100755
--- a/testcases/Controllers/ONOS/Teston/onosfunctest.py
+++ b/testcases/Controllers/ONOS/Teston/onosfunctest.py
@@ -227,10 +227,6 @@ def main():
SetOnosIp()
RunScript("FUNCvirNetNB")
RunScript("FUNCvirNetNBL3")
- if DEPLOY_SCENARIO == "os-onos-sfc-ha":
- CreateImage()
- SetSfcConf()
- SfcTest()
try:
logger.debug("Push ONOS results into DB")
# TODO check path result for the file
@@ -258,6 +254,11 @@ def main():
except:
logger.error("Error pushing results into Database")
+ if DEPLOY_SCENARIO == "os-onos-sfc-ha":
+ CreateImage()
+ SetSfcConf()
+ SfcTest()
+
# CleanOnosTest()
"># Authors: # morgan.richomme@orange.com # jose.lausuch@ericsson.com # viktor.tikkanen@nokia.com # # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 # import argparse import json import logging import os import re import requests import shutil import subprocess import sys import time import yaml import keystoneclient.v2_0.client as ksclient from neutronclient.v2_0 import client as neutronclient modes = ['full', 'smoke', 'baremetal', 'compute', 'data_processing', 'identity', 'image', 'network', 'object_storage', 'orchestration', 'telemetry', 'volume', 'custom'] """ tests configuration """ parser = argparse.ArgumentParser() parser.add_argument("-d", "--debug", help="Debug mode", action="store_true") parser.add_argument("-m", "--mode", help="Tempest test mode [smoke, all]", default="smoke") parser.add_argument("-r", "--report", help="Create json result file", action="store_true") args = parser.parse_args() """ logging configuration """ logger = logging.getLogger('run_tempest') logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() if args.debug: ch.setLevel(logging.DEBUG) else: ch.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) REPO_PATH=os.environ['repos_dir']+'/functest/' if not os.path.exists(REPO_PATH): logger.error("Functest repository directory not found '%s'" % REPO_PATH) exit(-1) sys.path.append(REPO_PATH + "testcases/") import functest_utils with open("/home/opnfv/functest/conf/config_functest.yaml") as f: functest_yaml = yaml.safe_load(f) f.close() TEST_DB = functest_yaml.get("results").get("test_db_url") MODE = "smoke" TENANT_NAME = functest_yaml.get("tempest").get("identity").get("tenant_name") TENANT_DESCRIPTION = functest_yaml.get("tempest").get("identity").get("tenant_description") USER_NAME = functest_yaml.get("tempest").get("identity").get("user_name") USER_PASSWORD = functest_yaml.get("tempest").get("identity").get("user_password") DEPLOYMENT_MAME = functest_yaml.get("rally").get("deployment_name") RALLY_INSTALLATION_DIR = functest_yaml.get("general").get("directories").get("dir_rally_inst") RESULTS_DIR = functest_yaml.get("general").get("directories").get("dir_results") TEMPEST_RESULTS_DIR = RESULTS_DIR + '/tempest' def get_info(file_result): test_run = "" duration = "" test_failed = "" p = subprocess.Popen('cat tempest.log', shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) for line in p.stdout.readlines(): # print line, if (len(test_run) < 1): test_run = re.findall("[0-9]*\.[0-9]*s", line) if (len(duration) < 1): duration = re.findall("[0-9]*\ tests", line) regexp = r"(failures=[0-9]+)" if (len(test_failed) < 1): test_failed = re.findall(regexp, line) retval = p.wait() logger.debug("test_run:"+test_run) logger.debug("duration:"+duration) def push_results_to_db(payload, module, pod_name): # TODO move DB creds into config file url = TEST_DB + "/results" installer = functest_utils.get_installer_type(logger) scenario = functest_utils.get_scenario(logger) logger.info("Pushing results to DB: '%s'." % url) params = {"project_name": "functest", "case_name": "Tempest", "pod_name": str(pod_name), 'installer': installer, "version": scenario, 'details': payload} headers = {'Content-Type': 'application/json'} r = requests.post(url, data=json.dumps(params), headers=headers) logger.debug(r) def create_tempest_resources(): ks_creds = functest_utils.get_credentials("keystone") logger.info("Creating tenant and user for Tempest suite") keystone = ksclient.Client(**ks_creds) tenant_id = functest_utils.create_tenant(keystone, TENANT_NAME, TENANT_DESCRIPTION) if tenant_id == '': logger.error("Error : Failed to create %s tenant" %TENANT_NAME) user_id = functest_utils.create_user(keystone, USER_NAME, USER_PASSWORD, None, tenant_id) if user_id == '': logger.error("Error : Failed to create %s user" %USER_NAME) def free_tempest_resources(): ks_creds = functest_utils.get_credentials("keystone") logger.info("Deleting tenant and user for Tempest suite)") keystone = ksclient.Client(**ks_creds) user_id = functest_utils.get_user_id(keystone, USER_NAME) if user_id == '': logger.error("Error : Failed to get id of %s user" % USER_NAME) else: if not functest_utils.delete_user(keystone, user_id): logger.error("Error : Failed to delete %s user" % USER_NAME) tenant_id = functest_utils.get_tenant_id(keystone, TENANT_NAME) if tenant_id == '': logger.error("Error : Failed to get id of %s tenant" % TENANT_NAME) else: if not functest_utils.delete_tenant(keystone, tenant_id): logger.error("Error : Failed to delete %s tenant" % TENANT_NAME) def configure_tempest(): """ Add/update needed parameters into tempest.conf file generated by Rally """ logger.debug("Generating tempest.conf file...") cmd = "rally verify genconfig" functest_utils.execute_command(cmd,logger) logger.debug("Resolving deployment UUID...") cmd = "rally deployment list | awk '/"+DEPLOYMENT_MAME+"/ {print $2}'" p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT); deployment_uuid = p.stdout.readline().rstrip() if deployment_uuid == "": logger.debug(" Rally deployment NOT found") return False logger.debug("Finding tempest.conf file...") tempest_conf_file = RALLY_INSTALLATION_DIR+"/tempest/for-deployment-" \ +deployment_uuid+"/tempest.conf" if not os.path.isfile(tempest_conf_file): logger.error(" Tempest configuration file %s NOT found." % tempest_conf_file) return False logger.debug(" Updating fixed_network_name...") private_net_name = "" creds_neutron = functest_utils.get_credentials("neutron") neutron_client = neutronclient.Client(**creds_neutron) private_net = functest_utils.get_private_net(neutron_client) if private_net is None: logger.error("No shared private networks found.") else: private_net_name = private_net['name'] cmd = "crudini --set "+tempest_conf_file+" compute fixed_network_name " \ +private_net_name functest_utils.execute_command(cmd,logger) logger.debug(" Updating non-admin credentials...") cmd = "crudini --set "+tempest_conf_file+" identity tenant_name " \ +TENANT_NAME functest_utils.execute_command(cmd,logger) cmd = "crudini --set "+tempest_conf_file+" identity username " \ +USER_NAME functest_utils.execute_command(cmd,logger) cmd = "crudini --set "+tempest_conf_file+" identity password " \ +USER_PASSWORD functest_utils.execute_command(cmd,logger) # Copy tempest.conf to /home/opnfv/functest/results/tempest/ print shutil.copyfile(tempest_conf_file,TEMPEST_RESULTS_DIR+'/tempest.conf') return True def run_tempest(OPTION): # # the "main" function of the script which launches Rally to run Tempest # :param option: tempest option (smoke, ..) # :return: void # logger.info("Starting Tempest test suite: '%s'." % OPTION) cmd_line = "rally verify start "+OPTION logger.debug('Executing command : {}'.format(cmd_line)) CI_DEBUG = os.environ.get("CI_DEBUG") if CI_DEBUG == "true" or CI_DEBUG == "True": subprocess.call(cmd_line, shell=True, stderr=subprocess.STDOUT) else: header = "Tempest environment:\n"\ " Installer: %s\n Scenario: %s\n Node: %s\n Date: %s\n" % \ (os.getenv('INSTALLER_TYPE','Unknown'), \ os.getenv('DEPLOY_SCENARIO','Unknown'), \ os.getenv('NODE_NAME','Unknown'), \ time.strftime("%a %b %d %H:%M:%S %Z %Y")) f_stdout = open(TEMPEST_RESULTS_DIR+"/tempest.log", 'w+') f_stderr = open(TEMPEST_RESULTS_DIR+"/tempest-error.log", 'w+') f_env = open(TEMPEST_RESULTS_DIR+"/environment.log", 'w+') f_env.write(header) subprocess.call(cmd_line, shell=True, stdout=f_stdout, stderr=f_stderr) f_stdout.close() f_stderr.close() f_env.close() cmd_line = "rally verify show" subprocess.call(cmd_line, shell=True) cmd_line = "rally verify list" logger.debug('Executing command : {}'.format(cmd_line)) cmd = os.popen(cmd_line) output = (((cmd.read()).splitlines()[3]).replace(" ", "")).split("|") # Format: # | UUID | Deployment UUID | smoke | tests | failures | Created at | # Duration | Status | num_tests = output[4] num_failures = output[5] time_start = output[6] duration = output[7] # Compute duration (lets assume it does not take more than 60 min) dur_min=int(duration.split(':')[1]) dur_sec_float=float(duration.split(':')[2]) dur_sec_int=int(round(dur_sec_float,0)) dur_sec_int = dur_sec_int + 60 * dur_min # Generate json results for DB json_results = {"timestart": time_start, "duration": dur_sec_int, "tests": int(num_tests), "failures": int(num_failures)} logger.info("Results: "+str(json_results)) pod_name = functest_utils.get_pod_name(logger) # Push results in payload of testcase if args.report: logger.debug("Push result into DB") push_results_to_db(json_results, MODE, pod_name) def main(): global MODE if not (args.mode): MODE = "smoke" elif not (args.mode in modes): logger.error("Tempest mode not valid. Possible values are:\n" + str(modes)) exit(-1) elif (args.mode == 'custom'): MODE = "--tests-file "+REPO_PATH+"testcases/VIM/OpenStack/CI/custom_tests/test_list.txt" else: MODE = "--set "+args.mode if not os.path.exists(TEMPEST_RESULTS_DIR): os.makedirs(TEMPEST_RESULTS_DIR) create_tempest_resources() configure_tempest() run_tempest(MODE) free_tempest_resources() if __name__ == '__main__': main()