#!/usr/bin/env python3 # Copyright 2015-2017 Intel Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """VSPERF main script. """ import logging import os import sys import argparse import re import time import csv import datetime import shutil import unittest import locale import copy import glob import subprocess import ast import xmlrunner from tabulate import tabulate from conf import merge_spec from conf import settings import core.component_factory as component_factory from core.loader import Loader from testcases import PerformanceTestCase from testcases import IntegrationTestCase from testcases import K8sPerformanceTestCase from tools import tasks from tools import networkcard from tools import functions from tools.pkt_gen import trafficgen from tools.opnfvdashboard import opnfvdashboard from tools.os_deploy_tgen import osdt sys.dont_write_bytecode = True VERBOSITY_LEVELS = { 'debug': logging.DEBUG, 'info': logging.INFO, 'warning': logging.WARNING, 'error': logging.ERROR, 'critical': logging.CRITICAL } _CURR_DIR = os.path.dirname(os.path.realpath(__file__)) _TEMPLATE_RST = {'head' : os.path.join(_CURR_DIR, 'tools/report/report_head.rst'), 'foot' : os.path.join(_CURR_DIR, 'tools/report/report_foot.rst'), 'final' : 'test_report.rst', 'tmp' : os.path.join(_CURR_DIR, 'tools/report/report_tmp_caption.rst') } _TEMPLATE_MATRIX = "Performance Matrix\n------------------\n\n"\ "The following performance matrix was generated with the results of all the\n"\ "currently run tests. The metric used for comparison is {}.\n\n{}\n\n" _LOGGER = logging.getLogger() logging.getLogger('matplotlib').setLevel(logging.ERROR) def parse_param_string(values): """ Parse and split a single '--test-params' argument. This expects either 'x=y', 'x=y,z' or 'x' (implicit true) values. For multiple overrides use a ; separated list for e.g. --test-params 'x=z; y=(a,b)' """ results = {} if values == '': return {} for param, _, value in re.findall('([^;=]+)(=([^;]+))?', values): param = param.strip() value = value.strip() if param: if value: # values are passed inside string from CLI, so we must retype them accordingly try: results[param] = ast.literal_eval(value) except ValueError: # for backward compatibility, we have to accept strings without quotes _LOGGER.warning("Adding missing quotes around string value: %s = %s", param, str(value)) results[param] = str(value) else: results[param] = True return results def parse_arguments(): """ Parse command line arguments. """ class _SplitTestParamsAction(argparse.Action): """ Parse and split '--test-params' arguments. This expects either a single list of ; separated overrides as 'x=y', 'x=y,z' or 'x' (implicit true) values. e.g. --test-params 'x=z; y=(a,b)' Or a list of these ; separated lists with overrides for multiple tests. e.g. --test-params "['x=z; y=(a,b)','x=z']" """ def __call__(self, parser, namespace, values, option_string=None): if values[0] == '[': input_list = ast.literal_eval(values) parameter_list = [] for test_params in input_list: parameter_list.append(parse_param_string(test_params)) else: parameter_list = parse_param_string(values) results = {'_PARAMS_LIST':parameter_list} setattr(namespace, self.dest, results) class _ValidateFileAction(argparse.Action): """Validate a file can be read from before using it. """ def __call__(self, parser, namespace, values, option_string=None): if not os.path.isfile(values): raise argparse.ArgumentTypeError( 'the path \'%s\' is not a valid path' % values) elif not os.access(values, os.R_OK): raise argparse.ArgumentTypeError( 'the path \'%s\' is not accessible' % values) setattr(namespace, self.dest, values) class _ValidateDirAction(argparse.Action): """Validate a directory can be written to before using it. """ def __call__(self, parser, namespace, values, option_string=None): if not os.path.isdir(values): raise argparse.ArgumentTypeError( 'the path \'%s\' is not a valid path' % values) elif not os.access(values, os.W_OK): raise argparse.ArgumentTypeError( 'the path \'%s\' is not accessible' % values) setattr(namespace, self.dest, values) def list_logging_levels(): """Give a summary of all available logging levels. :return: List of verbosity level names in decreasing order of verbosity """ return sorted(VERBOSITY_LEVELS.keys(), key=lambda x: VERBOSITY_LEVELS[x]) parser = argparse.ArgumentParser(prog=__file__, formatter_class= argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('--version', action='version', version='%(prog)s 0.2') parser.add_argument('--list', '--list-tests', action='store_true', help='list all tests and exit') parser.add_argument('--list-trafficgens', action='store_true', help='list all traffic generators and exit') parser.add_argument('--list-collectors', action='store_true', help='list all system metrics loggers and exit') parser.add_argument('--list-vswitches', action='store_true', help='list all system vswitches and exit') parser.add_argument('--list-fwdapps', action='store_true', help='list all system forwarding applications and exit') parser.add_argument('--list-vnfs', action='store_true', help='list all system vnfs and exit') parser.add_argument('--list-loadgens', action='store_true', help='list all background load generators') parser.add_argument('--list-pods', action='store_true', help='list all system pods') parser.add_argument('--list-settings', action='store_true', help='list effective settings configuration and exit') parser.add_argument('exact_test_name', nargs='*', help='Exact names of\ tests to run. E.g "vsperf phy2phy_tput phy2phy_cont"\ runs only the two tests with those exact names.\ To run all tests omit both positional args and --tests arg.') group = parser.add_argument_group('test selection options') group.add_argument('-m', '--mode', help='vsperf mode of operation;\ Values: "normal" - execute vSwitch, VNF and traffic generator;\ "trafficgen" - execute only traffic generator; "trafficgen-off" \ - execute vSwitch and VNF; trafficgen-pause - execute vSwitch \ and VNF but pause before traffic transmission ', default='normal') group.add_argument('-f', '--test-spec', help='test specification file') group.add_argument('-d', '--test-dir', help='directory containing tests') group.add_argument('-t', '--tests', help='Comma-separated list of terms \ indicating tests to run. e.g. "RFC2544,!p2p" - run all tests whose\ name contains RFC2544 less those containing "p2p"; "!back2back" - \ run all tests except those containing back2back') group.add_argument('--verbosity', choices=list_logging_levels(), help='debug level') group.add_argument('--integration', action='store_true', help='execute integration tests') group.add_argument('--k8s', action='store_true', help='execute Kubernetes tests') group.add_argument('--openstack', action='store_true', help='Run VSPERF with openstack') group.add_argument('--trafficgen', help='traffic generator to use') group.add_argument('--vswitch', help='vswitch implementation to use') group.add_argument('--fwdapp', help='packet forwarding application to use') group.add_argument('--vnf', help='vnf to use') group.add_argument('--loadgen', help='loadgen to use') group.add_argument('--sysmetrics', help='system metrics logger to use') group = parser.add_argument_group('test behavior options') group.add_argument('--xunit', action='store_true', help='enable xUnit-formatted output') group.add_argument('--xunit-dir', action=_ValidateDirAction, help='output directory of xUnit-formatted output') group.add_argument('--load-env', action='store_true', help='enable loading of settings from the environment') group.add_argument('--conf-file', action=_ValidateFileAction, help='settings file') group.add_argument('--test-params', action=_SplitTestParamsAction, help='csv list of test parameters: key=val; e.g. ' 'TRAFFICGEN_PKT_SIZES=(64,128);TRAFFICGEN_DURATION=30; ' 'GUEST_LOOPBACK=["l2fwd"] ...' ' or a list of csv lists of test parameters: key=val; e.g. ' '[\'TRAFFICGEN_DURATION=10;TRAFFICGEN_PKT_SIZES=(128,)\',' '\'TRAFFICGEN_DURATION=10;TRAFFICGEN_PKT_SIZES=(64,)\']') group.add_argument('--opnfvpod', help='name of POD in opnfv') group.add_argument('--matrix', help='enable performance matrix analysis', action='store_true', default=False) args = vars(parser.parse_args()) return args def configure_logging(level): """Configure logging. """ name, ext = os.path.splitext(settings.getValue('LOG_FILE_DEFAULT')) rename_default = "{name}_{uid}{ex}".format(name=name, uid=settings.getValue( 'LOG_TIMESTAMP'), ex=ext) log_file_default = os.path.join( settings.getValue('RESULTS_PATH'), rename_default) name, ext = os.path.splitext(settings.getValue('LOG_FILE_HOST_CMDS')) rename_hostcmd = "{name}_{uid}{ex}".format(name=name, uid=settings.getValue( 'LOG_TIMESTAMP'), ex=ext) log_file_host_cmds = os.path.join( settings.getValue('RESULTS_PATH'), rename_hostcmd) name, ext = os.path.splitext(settings.getValue('LOG_FILE_TRAFFIC_GEN')) rename_traffic = "{name}_{uid}{ex}".format(name=name, uid=settings.getValue( 'LOG_TIMESTAMP'), ex=ext) log_file_traffic_gen = os.path.join( settings.getValue('RESULTS_PATH'), rename_traffic) metrics_file = (settings.getValue('LOG_FILE_INFRA_METRICS_PFX') + settings.getValue('LOG_TIMESTAMP') + '.log') log_file_infra_metrics = os.path.join(settings.getValue('LOG_DIR'), metrics_file) _LOGGER.setLevel(logging.DEBUG) stream_logger = logging.StreamHandler(sys.stdout) stream_logger.setLevel(VERBOSITY_LEVELS[level]) stream_logger.setFormatter(logging.Formatter( '[%(levelname)-5s] %(asctime)s : (%(name)s) - %(message)s')) _LOGGER.addHandler(stream_logger) file_logger = logging.FileHandler(filename=log_file_default) file_logger.setLevel(logging.DEBUG) file_logger.setFormatter(logging.Formatter( '%(asctime)s : %(message)s')) _LOGGER.addHandler(file_logger) class CommandFilter(logging.Filter): """Filter out strings beginning with 'cmd :'""" def filter(self, record): return record.getMessage().startswith(tasks.CMD_PREFIX) class TrafficGenCommandFilter(logging.Filter): """Filter out strings beginning with 'gencmd :'""" def filter(self, record): return record.getMessage().startswith(trafficgen.CMD_PREFIX) class CollectdMetricsFilter(logging.Filter): """Filter out strings beginning with 'COLLECTD' :'""" def filter(self, record): return record.getMessage().startswith('COLLECTD') cmd_logger = logging.FileHandler(filename=log_file_host_cmds) cmd_logger.setLevel(logging.DEBUG) cmd_logger.addFilter(CommandFilter()) _LOGGER.addHandler(cmd_logger) gen_logger = logging.FileHandler(filename=log_file_traffic_gen) gen_logger.setLevel(logging.DEBUG) gen_logger.addFilter(TrafficGenCommandFilter()) _LOGGER.addHandler(gen_logger) if settings.getValue('COLLECTOR') == 'Collectd': met_logger = logging.FileHandler(filename=log_file_infra_metrics) met_logger.setLevel(logging.DEBUG) met_logger.addFilter(CollectdMetricsFilter()) _LOGGER.addHandler(met_logger) def apply_filter(tests, tc_filter): """Allow a subset of tests to be conveniently selected :param tests: The list of Tests from which to select. :param tc_filter: A case-insensitive string of comma-separated terms indicating the Tests to select. e.g. 'RFC' - select all tests whose name contains 'RFC' e.g. 'RFC,burst' - select all tests whose name contains 'RFC' or 'burst' e.g. 'RFC,burst,!p2p' - select all tests whose name contains 'RFC' or 'burst' and from these remove any containing 'p2p'. e.g. '' - empty string selects all tests. :return: A list of the selected Tests. """ # if negative filter is first we have to start with full list of tests if tc_filter.strip()[0] == '!': result = tests else: result = [] if tc_filter is None: tc_filter = "" for term in [x.strip() for x in tc_filter.lower().split(",")]: if not term or term[0] != '!': # Add matching tests from 'tests' into results result.extend([test for test in tests \ if test['Name'].lower().find(term) >= 0]) else: # Term begins with '!' so we remove matching tests result = [test for test in result \ if test['Name'].lower().find(term[1:]) < 0] return result def check_and_set_locale(): """ Function will check locale settings. In case, that it isn't configured properly, then default values specified by DEFAULT_LOCALE will be used. """ system_locale = locale.getdefaultlocale() if None in system_locale: os.environ['LC_ALL'] = settings.getValue('DEFAULT_LOCALE') _LOGGER.warning("Locale was not properly configured. Default values were set. Old locale: %s, New locale: %s", system_locale, locale.getdefaultlocale()) def get_vswitch_names(rst_files): """ Function will return a list of vSwitches detected in given ``rst_files``. """ vswitch_names = set() if rst_files: try: output = subprocess.check_output(['grep', '-h', '^* vSwitch'] + rst_files).decode().splitlines() for line in output: match = re.search(r'^\* vSwitch: ([^,]+)', str(line)) if match: vswitch_names.add(match.group(1)) if vswitch_names: return list(vswitch_names) except subprocess.CalledProcessError: _LOGGER.warning('Cannot detect vSwitches used during testing.') # fallback to the default value return ['vSwitch'] def get_build_tag(): """ Function will return a Jenkins job ID environment variable. """ try: build_tag = os.environ['BUILD_TAG'] except KeyError: _LOGGER.warning('Cannot detect Jenkins job ID') build_tag = "none" return build_tag def generate_final_report(): """ Function will check if partial test results are available and generates final report in rst format. """ path = settings.getValue('RESULTS_PATH') # check if there are any results in rst format rst_results = glob.glob(os.path.join(path, 'result*rst')) pkt_processors = get_vswitch_names(rst_results) if rst_results: try: test_report = os.path.join(path, '{}_{}'.format('_'.join(pkt_processors), _TEMPLATE_RST['final'])) # create report caption directly - it is not worth to execute jinja machinery report_caption = '{}\n{} {}\n{}\n\n'.format( '============================================================', 'Performance report for', ', '.join(pkt_processors), '============================================================') with open(_TEMPLATE_RST['tmp'], 'w') as file_: file_.write(report_caption) retval = subprocess.call('cat {} {} {} {} > {}'.format(_TEMPLATE_RST['tmp'], _TEMPLATE_RST['head'], ' '.join(rst_results), _TEMPLATE_RST['foot'], test_report), shell=True) if retval == 0 and os.path.isfile(test_report): _LOGGER.info('Overall test report written to "%s"', test_report) else: _LOGGER.error('Generation of overall test report has failed.') # remove temporary file os.remove(_TEMPLATE_RST['tmp']) except subprocess.CalledProcessError: _LOGGER.error('Generatrion of overall test report has failed.') def generate_performance_matrix(selected_tests, results_path): """ Loads the results of all the currently run tests, compares them based on the MATRIX_METRIC, outputs and saves the generated table. :selected_tests: list of currently run test :results_path: directory path to the results of current tests """ _LOGGER.info('Performance Matrix:') test_list = [] for test in selected_tests: test_name = test.get('Name', '') test_deployment = test.get('Deployment', '') test_list.append({'test_name':test_name, 'test_deployment':test_deployment, 'csv_data':False}) test_params = {} output = [] all_params = settings.getValue('_PARAMS_LIST') for i in range(len(selected_tests)): test = test_list[i] if isinstance(all_params, list): list_index = i if i >= len(all_params): list_index = len(all_params) - 1 if settings.getValue('CUMULATIVE_PARAMS') and (i > 0): test_params.update(all_params[list_index]) else: test_params = all_params[list_index] else: test_params = all_params settings.setValue('TEST_PARAMS', test_params) test['test_params'] = copy.deepcopy(test_params) try: with open("{}/result_{}_{}_{}.csv".format(results_path, str(i), test['test_name'], test['test_deployment'])) as csvfile: reader = list(csv.DictReader(csvfile)) test['csv_data'] = reader[0] # pylint: disable=broad-except except (Exception) as ex: _LOGGER.error("Result file not found: %s", ex) metric = settings.getValue('MATRIX_METRIC') change = {} output_header = ("ID", "Name", metric, "Change [%]", "Parameters, "\ "CUMULATIVE_PARAMS = {}".format(settings.getValue('CUMULATIVE_PARAMS'))) if not test_list[0]['csv_data'] or float(test_list[0]['csv_data'][metric]) == 0: _LOGGER.error("Incorrect format of test results") return for i, test in enumerate(test_list): if test['csv_data']: change[i] = float(test['csv_data'][metric])/\ (float(test_list[0]['csv_data'][metric]) / 100) - 100 output.append([i, test['test_name'], float(test['csv_data'][metric]), change[i], str(test['test_params'])[1:-1]]) else: change[i] = 0 output.append([i, test['test_name'], "Test Failed", 0, test['test_params']]) print(tabulate(output, headers=output_header, tablefmt="grid", floatfmt="0.3f")) with open(results_path + '/result_performance_matrix.rst', 'w+') as output_file: output_file.write(_TEMPLATE_MATRIX.format(metric, tabulate(output, headers=output_header, tablefmt="rst", floatfmt="0.3f"))) _LOGGER.info('Performance matrix written to: "%s/result_performance_matrix.rst"', results_path) def enable_sriov(nic_list): """ Enable SRIOV for given enhanced PCI IDs :param nic_list: A list of enhanced PCI IDs """ # detect if sriov is required sriov_nic = {} for nic in nic_list: if networkcard.is_sriov_nic(nic): tmp_nic = nic.split('|') if tmp_nic[0] in sriov_nic: if int(tmp_nic[1][2:]) > sriov_nic[tmp_nic[0]]: sriov_nic[tmp_nic[0]] = int(tmp_nic[1][2:]) else: sriov_nic.update({tmp_nic[0] : int(tmp_nic[1][2:])}) # sriov is required for some NICs if sriov_nic: for nic in sriov_nic: # check if SRIOV is supported and enough virt interfaces are available if not networkcard.is_sriov_supported(nic) \ or networkcard.get_sriov_numvfs(nic) <= sriov_nic[nic]: # if not, enable and set appropriate number of VFs if not networkcard.set_sriov_numvfs(nic, sriov_nic[nic] + 1): raise RuntimeError('SRIOV cannot be enabled for NIC {}'.format(nic)) else: _LOGGER.debug("SRIOV enabled for NIC %s", nic) # ensure that path to the bind tool is valid functions.settings_update_paths() # WORKAROUND: it has been observed with IXGBE(VF) driver, # that NIC doesn't correclty dispatch traffic to VFs based # on their MAC address. Unbind and bind to the same driver # solves this issue. networkcard.reinit_vfs(nic) # After SRIOV is enabled it takes some time until network drivers # properly initialize all cards. # Wait also in case, that SRIOV was already configured as it can be # configured automatically just before vsperf execution. time.sleep(2) return True return False def disable_sriov(nic_list): """ Disable SRIOV for given PCI IDs :param nic_list: A list of enhanced PCI IDs """ for nic in nic_list: if networkcard.is_sriov_nic(nic): if not networkcard.set_sriov_numvfs(nic.split('|')[0], 0): raise RuntimeError('SRIOV cannot be disabled for NIC {}'.format(nic)) else: _LOGGER.debug("SRIOV disabled for NIC %s", nic.split('|')[0]) def handle_list_options(args): """ Process --list cli arguments if needed :param args: A dictionary with all CLI arguments """ if args['list_trafficgens']: print(Loader().get_trafficgens_printable()) sys.exit(0) if args['list_collectors']: print(Loader().get_collectors_printable()) sys.exit(0) if args['list_vswitches']: print(Loader().get_vswitches_printable()) sys.exit(0) if args['list_vnfs']: print(Loader().get_vnfs_printable()) sys.exit(0) if args['list_fwdapps']: print(Loader().get_pktfwds_printable()) sys.exit(0) if args['list_loadgens']: print(Loader().get_loadgens_printable()) sys.exit(0) if args['list_pods']: print(Loader().get_pods_printable()) sys.exit(0) if args['list_settings']: print(str(settings)) sys.exit(0) if args['list']: list_testcases(args) sys.exit(0) def list_testcases(args): """ Print list of testcases requested by --list CLI argument :param args: A dictionary with all CLI arguments """ # configure tests if args['integration']: testcases = settings.getValue('INTEGRATION_TESTS') elif args['k8s']: testcases = settings.getValue('K8SPERFORMANCE_TESTS') else: testcases = settings.getValue('PERFORMANCE_TESTS') print("Available Tests:") print("================") for test in testcases: description = functions.format_description(test['Description'], 70) if len(test['Name']) < 40: print('* {:40} {}'.format('{}:'.format(test['Name']), description[0])) else: print('* {}'.format('{}:'.format(test['Name']))) print(' {:40} {}'.format('', description[0])) for i in range(1, len(description)): print(' {:40} {}'.format('', description[i])) def vsperf_finalize(): """ Clean up before exit """ # remove directory if no result files were created try: results_path = settings.getValue('RESULTS_PATH') if os.path.exists(results_path): files_list = os.listdir(results_path) if files_list == []: _LOGGER.info("Removing empty result directory: %s", results_path) shutil.rmtree(results_path) except AttributeError: # skip it if parameter doesn't exist pass # disable SRIOV if needed try: if settings.getValue('SRIOV_ENABLED'): disable_sriov(settings.getValue('WHITELIST_NICS_ORIG')) except AttributeError: # skip it if parameter doesn't exist pass class MockTestCase(unittest.TestCase): """Allow use of xmlrunner to generate Jenkins compatible output without using xmlrunner to actually run tests. Usage: suite = unittest.TestSuite() suite.addTest(MockTestCase('Test1 passed ', True, 'Test1')) suite.addTest(MockTestCase('Test2 failed because...', False, 'Test2')) xmlrunner.XMLTestRunner(...).run(suite) """ def __init__(self, msg, is_pass, test_name): #remember the things self.msg = msg self.is_pass = is_pass #dynamically create a test method with the right name #but point the method at our generic test method setattr(MockTestCase, test_name, self.generic_test) super(MockTestCase, self).__init__(test_name) def generic_test(self): """Provide a generic function that raises or not based on how self.is_pass was set in the constructor""" self.assertTrue(self.is_pass, self.msg) # pylint: disable=too-many-locals, too-many-branches, too-many-statements def main(): """Main function. """ args = parse_arguments() # configure settings settings.load_from_dir(os.path.join(_CURR_DIR, 'conf')) # define the timestamp to be used by logs and results date = datetime.datetime.fromtimestamp(time.time()) timestamp = date.strftime('%Y-%m-%d_%H-%M-%S') settings.setValue('LOG_TIMESTAMP', timestamp) # generate results directory name # integration test use vswitchd log in test step assertions, ensure that # correct value will be set before loading integration test configuration results_dir = "results_" + timestamp results_path = os.path.join(settings.getValue('LOG_DIR'), results_dir) settings.setValue('RESULTS_PATH', results_path) # create results directory if not os.path.exists(results_path): os.makedirs(results_path) # load non performance/integration tests if args['integration']: settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/integration')) if args['k8s']: settings.load_from_dir(os.path.join(_CURR_DIR, 'conf/kubernetes')) # load command line parameters first in case there are settings files # to be used settings.load_from_dict(args) if args['conf_file']: settings.load_from_file(args['conf_file']) if args['load_env']: settings.load_from_env() # reload command line parameters since these should take higher priority # than both a settings file and environment variables settings.load_from_dict(args) settings.setValue('mode', args['mode']) if args['k8s']: settings.setValue('K8S', True) else: settings.setValue('K8S', False) if args['openstack']: result = osdt.deploy_testvnf() if result: _LOGGER.info('TestVNF successfully deployed on Openstack') settings.setValue('mode', 'trafficgen') else: _LOGGER.error('Failed to deploy TestVNF in Openstac') sys.exit(1) # update paths to trafficgens if required if settings.getValue('mode') == 'trafficgen': functions.settings_update_paths() # if required, handle list-* operations handle_list_options(args) configure_logging(settings.getValue('VERBOSITY')) # CI build support _LOGGER.info("Creating result directory: %s", results_path) # check and fix locale check_and_set_locale() # configure trafficgens if args['trafficgen']: trafficgens = Loader().get_trafficgens() if args['trafficgen'] not in trafficgens: _LOGGER.error('There are no trafficgens matching \'%s\' found in' ' \'%s\'. Exiting...', args['trafficgen'], settings.getValue('TRAFFICGEN_DIR')) sys.exit(1) # configuration validity checks if args['vswitch']: vswitch_none = args['vswitch'].strip().lower() == 'none' if vswitch_none: settings.setValue('VSWITCH', 'none') else: vswitches = Loader().get_vswitches() if args['vswitch'] not in vswitches: _LOGGER.error('There are no vswitches matching \'%s\' found in' ' \'%s\'. Exiting...', args['vswitch'], settings.getValue('VSWITCH_DIR')) sys.exit(1) if args['fwdapp']: settings.setValue('PKTFWD', args['fwdapp']) fwdapps = Loader().get_pktfwds() if args['fwdapp'] not in fwdapps: _LOGGER.error('There are no forwarding application' ' matching \'%s\' found in' ' \'%s\'. Exiting...', args['fwdapp'], settings.getValue('PKTFWD_DIR')) sys.exit(1) if args['vnf']: vnfs = Loader().get_vnfs() if args['vnf'] not in vnfs: _LOGGER.error('there are no vnfs matching \'%s\' found in' ' \'%s\'. exiting...', args['vnf'], settings.getValue('VNF_DIR')) sys.exit(1) if args['loadgen']: loadgens = Loader().get_loadgens() if args['loadgen'] not in loadgens: _LOGGER.error('There are no loadgens matching \'%s\' found in' ' \'%s\'. Exiting...', args['loadgen'], settings.getValue('LOADGEN_DIR')) sys.exit(1) if args['exact_test_name'] and args['tests']: _LOGGER.error("Cannot specify tests with both positional args and --test.") sys.exit(1) # modify NIC configuration to decode enhanced PCI IDs wl_nics_orig = list(networkcard.check_pci(pci) for pci in settings.getValue('WHITELIST_NICS')) settings.setValue('WHITELIST_NICS_ORIG', wl_nics_orig) # sriov handling is performed on checked/expanded PCI IDs settings.setValue('SRIOV_ENABLED', enable_sriov(wl_nics_orig)) nic_list = [] for nic in wl_nics_orig: tmp_nic = networkcard.get_nic_info(nic) if tmp_nic: nic_list.append({'pci' : tmp_nic, 'type' : 'vf' if networkcard.get_sriov_pf(tmp_nic) else 'pf', 'mac' : networkcard.get_mac(tmp_nic), 'driver' : networkcard.get_driver(tmp_nic), 'device' : networkcard.get_device_name(tmp_nic)}) else: vsperf_finalize() raise RuntimeError("Invalid network card PCI ID: '{}'".format(nic)) settings.setValue('NICS', nic_list) # for backward compatibility settings.setValue('WHITELIST_NICS', list(nic['pci'] for nic in nic_list)) # pylint: disable=too-many-nested-blocks if settings.getValue('mode') == 'trafficgen': # execute only traffic generator _LOGGER.debug("Executing traffic generator:") loader = Loader() # set traffic details, so they can be passed to traffic ctl traffic = copy.deepcopy(settings.getValue('TRAFFIC')) traffic = functions.check_traffic(traffic) traffic_ctl = component_factory.create_traffic( traffic['traffic_type'], loader.get_trafficgen_class()) with traffic_ctl: traffic_ctl.send_traffic(traffic) _LOGGER.debug("Traffic Results:") traffic_ctl.print_results() # write results into CSV file result_file = os.path.join(results_path, "result.csv") PerformanceTestCase.write_result_to_file(traffic_ctl.get_results(), result_file) else: # configure tests if args['integration']: testcases = settings.getValue('INTEGRATION_TESTS') elif args['k8s']: testcases = settings.getValue('K8SPERFORMANCE_TESTS') else: testcases = settings.getValue('PERFORMANCE_TESTS') if args['exact_test_name']: exact_names = args['exact_test_name'] # positional args => exact matches only selected_tests = [] for test_name in exact_names: for test in testcases: if test['Name'] == test_name: selected_tests.append(test) elif args['tests']: # --tests => apply filter to select requested tests selected_tests = apply_filter(testcases, args['tests']) else: # Default - run all tests selected_tests = testcases if not selected_tests: _LOGGER.error("No tests matched --tests option or positional args. Done.") vsperf_finalize() sys.exit(1) suite = unittest.TestSuite() settings_snapshot = copy.deepcopy(settings.__dict__) for i, cfg in enumerate(selected_tests): settings.setValue('_TEST_INDEX', i) test_name = cfg.get('Name', '') try: test_params = settings.getValue('_PARAMS_LIST') if isinstance(test_params, list): list_index = i if i >= len(test_params): list_index = len(test_params) - 1 test_params = test_params[list_index] if settings.getValue('CUMULATIVE_PARAMS'): test_params = merge_spec(settings.getValue('TEST_PARAMS'), test_params) settings.setValue('TEST_PARAMS', test_params) if args['integration']: test = IntegrationTestCase(cfg) elif args['k8s']: test = K8sPerformanceTestCase(cfg) else: test = PerformanceTestCase(cfg) test.run() suite.addTest(MockTestCase('', True, test.name)) # pylint: disable=broad-except except (Exception) as ex: _LOGGER.exception("Failed to run test: %s", test_name) suite.addTest(MockTestCase(str(ex), False, test_name)) _LOGGER.info("Continuing with next test...") finally: if not settings.getValue('CUMULATIVE_PARAMS'): settings.restore_from_dict(settings_snapshot) settings.restore_from_dict(settings_snapshot) # Generate and printout Performance Matrix if args['matrix']: generate_performance_matrix(selected_tests, results_path) # generate final rst report with results of all executed TCs generate_final_report() if settings.getValue('XUNIT'): xmlrunner.XMLTestRunner( output=settings.getValue('XUNIT_DIR'), outsuffix="", verbosity=0).run(suite) if args['opnfvpod'] or settings.getValue('OPNFVPOD'): pod_name = (args['opnfvpod'] if args['opnfvpod'] else settings.getValue('OPNFVPOD')) installer_name = str(settings.getValue('OPNFV_INSTALLER')).lower() opnfv_url = settings.getValue('OPNFV_URL') pkg_list = settings.getValue('PACKAGE_LIST') int_data = {'pod': pod_name, 'build_tag': get_build_tag(), 'installer': installer_name, 'pkg_list': pkg_list, 'db_url': opnfv_url, # pass vswitch name from configuration to be used for failed # TCs; In case of successful TCs it is safer to use vswitch # name from CSV as TC can override global configuration 'vswitch': str(settings.getValue('VSWITCH')).lower()} tc_names = [tc['Name'] for tc in selected_tests] opnfvdashboard.results2opnfv_dashboard(tc_names, results_path, int_data) # cleanup before exit vsperf_finalize() if __name__ == "__main__": main()