summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/functions.py124
-rw-r--r--tools/module_manager.py50
-rw-r--r--tools/networkcard.py2
-rw-r--r--tools/pkt_gen/moongen/moongen.py118
-rw-r--r--tools/pkt_gen/testcenter/testcenter-rfc2889-rest.py304
-rw-r--r--tools/pkt_gen/testcenter/testcenter.py86
-rw-r--r--tools/report/report.py5
-rw-r--r--tools/systeminfo.py104
8 files changed, 671 insertions, 122 deletions
diff --git a/tools/functions.py b/tools/functions.py
index 60ed0802..3bd8cc4d 100644
--- a/tools/functions.py
+++ b/tools/functions.py
@@ -15,20 +15,126 @@
"""Various helper functions
"""
-from conf import settings
+import os
+import logging
+import glob
+import shutil
+from conf import settings as S
#
# Support functions
#
def settings_update_paths():
- """ Configure paths to OVS and DPDK based on VSWITCH and VNF values
+ """ Configure paths to OVS, DPDK and QEMU sources and binaries based on
+ selected vswitch type and src/binary switch. Data are taken from
+ PATHS dictionary and after their processing they are stored inside TOOLS.
+ PATHS dictionary has specific section for 'vswitch', 'qemu' and 'dpdk'
+ Following processing is done for every item:
+ item 'type' - string, which defines the type of paths ('src' or 'bin') to be selected
+ for a given section:
+ 'src' means, that VSPERF will use OVS, DPDK or QEMU built from sources
+ e.g. by execution of systems/build_base_machine.sh script during VSPERF
+ installation
+ 'bin' means, that VSPERF will use OVS, DPDK or QEMU binaries installed
+ in the OS, e.g. via OS specific packaging system
+ item 'path' - string with valid path; Its content is checked for existence, prefixed
+ with section name and stored into TOOLS for later use
+ e.g. TOOLS['dpdk_src'] or TOOLS['vswitch_src']
+ item 'modules' - list of strings; Every value from given list is checked for '.ko'
+ suffix. In case it matches and it is not an absolute path to the module, then
+ module name is prefixed with 'path' defined for the same section
+ e.g. TOOLS['vswitch_modules'] = [
+ '/tmp/vsperf/src_vanilla/ovs/ovs/datapath/linux/openvswitch.ko']
+ all other items - string - if given string is a relative path and item 'path'
+ is defined for a given section, then item content will be prefixed with
+ content of the 'path'. Otherwise tool name will be searched within
+ standard system directories. Also any OS filename wildcards will be
+ expanded to the real path. At the end of processing, every absolute
+ path is checked for its existence. In case that temporary path (i.e. path
+ with '_tmp' suffix) doesn't exist, then log will be written and vsperf will
+ continue. If any other path will not exist, then vsperf execution will
+ be terminated with runtime error.
+
+ Note: In case that 'bin' type is set for DPDK, then TOOLS['dpdk_src'] will be set to
+ the value of PATHS['dpdk']['src']['path']. The reason is, that VSPERF uses downloaded
+ DPDK sources to copy DPDK and testpmd into the GUEST, where testpmd is built. In case,
+ that DPDK sources are not available, then vsperf will continue with test execution,
+ but testpmd can't be used as a guest loopback. This is useful in case, that other guest
+ loopback applications (e.g. buildin) are used by CI jobs, etc.
"""
# set dpdk and ovs paths accorfing to VNF and VSWITCH
- if settings.getValue('VSWITCH').endswith('Vanilla'):
- # settings paths for Vanilla
- settings.setValue('OVS_DIR', (settings.getValue('OVS_DIR_VANILLA')))
- else:
- # default - set to VHOST USER but can be changed during enhancement
- settings.setValue('RTE_SDK', (settings.getValue('RTE_SDK_USER')))
- settings.setValue('OVS_DIR', (settings.getValue('OVS_DIR_USER')))
+ paths = {}
+ vswitch_type = S.getValue('PATHS')['vswitch'][S.getValue('VSWITCH')]['type']
+ paths['vswitch'] = S.getValue('PATHS')['vswitch'][S.getValue('VSWITCH')][vswitch_type]
+ paths['dpdk'] = S.getValue('PATHS')['dpdk'][S.getValue('PATHS')['dpdk']['type']]
+ paths['qemu'] = S.getValue('PATHS')['qemu'][S.getValue('PATHS')['qemu']['type']]
+ paths['paths'] = {}
+ paths['paths']['ovs_var_tmp'] = S.getValue('PATHS')['vswitch']['ovs_var_tmp']
+ paths['paths']['ovs_etc_tmp'] = S.getValue('PATHS')['vswitch']['ovs_etc_tmp']
+
+ tools = {}
+ for path_class in paths:
+ for tool in paths[path_class]:
+ tmp_tool = paths[path_class][tool]
+
+ # store valid path of given class into tools dict
+ if tool == 'path':
+ if os.path.isdir(tmp_tool):
+ tools['{}_src'.format(path_class)] = tmp_tool
+ continue
+ else:
+ raise RuntimeError('Path {} does not exist.'.format(tmp_tool))
+
+ # store list of modules of given class into tools dict
+ if tool == 'modules':
+ tmp_modules = []
+ for module in tmp_tool:
+ # add path to the .ko modules and check it for existence
+ if module.endswith('.ko') and not os.path.isabs(module):
+ module = os.path.join(paths[path_class]['path'], module)
+ if not os.path.exists(module):
+ raise RuntimeError('Cannot locate modlue {}'.format(module))
+
+ tmp_modules.append(module)
+
+ tools['{}_modules'.format(path_class)] = tmp_modules
+ continue
+
+ # if path to the tool is relative, then 'path' will be prefixed
+ # in case that 'path' is not defined, then tool will be searched
+ # within standard system paths
+ if not os.path.isabs(tmp_tool):
+ if 'path' in paths[path_class]:
+ tmp_tool = os.path.join(paths[path_class]['path'], tmp_tool)
+ elif shutil.which(tmp_tool):
+ tmp_tool = shutil.which(tmp_tool)
+ else:
+ raise RuntimeError('Cannot locate tool {}'.format(tmp_tool))
+
+ # expand OS wildcards in paths if needed
+ if glob.has_magic(tmp_tool):
+ tmp_glob = glob.glob(tmp_tool)
+ if len(tmp_glob) == 0:
+ raise RuntimeError('Path to the {} is not valid: {}.'.format(tool, tmp_tool))
+ elif len(tmp_glob) > 1:
+ raise RuntimeError('Path to the {} is ambiguous {}'.format(tool, tmp_glob))
+ elif len(tmp_glob) == 1:
+ tmp_tool = tmp_glob[0]
+ elif not os.path.exists(tmp_tool):
+ if tool.endswith('_tmp'):
+ logging.getLogger().debug('Temporary path to the {} does not '
+ 'exist: {}.'.format(tool, tmp_tool))
+ else:
+ raise RuntimeError('Path to the {} is not valid: {}'.format(tool, tmp_tool))
+
+ tools[tool] = tmp_tool
+
+ # ensure, that dpkg_src for bin will be set to downloaded DPDK sources, so it can
+ # be copied to the guest share dir and used by GUEST to build and run testpmd
+ # Validity of the path is not checked by purpose, so user can use VSPERF without
+ # downloading DPDK sources. In that case guest loopback can't be set to 'testpmd'
+ if S.getValue('PATHS')['dpdk']['type'] == 'bin':
+ tools['dpdk_src'] = S.getValue('PATHS')['dpdk']['src']['path']
+
+ S.setValue('TOOLS', tools)
diff --git a/tools/module_manager.py b/tools/module_manager.py
index 2eb4c63d..911f7252 100644
--- a/tools/module_manager.py
+++ b/tools/module_manager.py
@@ -31,30 +31,40 @@ class ModuleManager(object):
"""
self._modules = []
- def insert_module(self, module):
+ def insert_module(self, module, auto_remove=True):
"""Method inserts given module.
In case that module name ends with .ko suffix then insmod will
be used for its insertion. Otherwise modprobe will be called.
:param module: a name of kernel module
+ :param auto_remove: if True (by default), then module will be
+ automatically removed by remove_modules() method
"""
module_base_name = os.path.basename(os.path.splitext(module)[0])
if self.is_module_inserted(module):
self._logger.info('Module already loaded \'%s\'.', module_base_name)
# add it to internal list, so we can try to remove it at the end
- self._modules.append(module)
+ if auto_remove:
+ self._modules.append(module)
return
try:
if module.endswith('.ko'):
+ # load module dependecies first, but suppress automatic
+ # module removal at the end; Just for case, that module
+ # depends on generic module
+ for depmod in self.get_module_dependecies(module):
+ self.insert_module(depmod, auto_remove=False)
+
tasks.run_task(['sudo', 'insmod', module], self._logger,
'Insmod module \'%s\'...' % module_base_name, True)
else:
tasks.run_task(['sudo', 'modprobe', module], self._logger,
'Modprobe module \'%s\'...' % module_base_name, True)
- self._modules.append(module)
+ if auto_remove:
+ self._modules.append(module)
except subprocess.CalledProcessError:
# in case of error, show full module name
self._logger.error('Unable to insert module \'%s\'.', module)
@@ -68,17 +78,6 @@ class ModuleManager(object):
for module in modules:
self.insert_module(module)
- def insert_module_group(self, module_group, path_prefix):
- """Ensure all modules in a group are inserted into the system.
-
- :param module_group: A name of configuration item containing a list
- of module names
- :param path_prefix: A name of directory which contains given
- group of modules
- """
- for (path_suffix, module) in module_group:
- self.insert_module(os.path.join(path_prefix, path_suffix, '%s.ko' % module))
-
def remove_module(self, module):
"""Removes a single module.
@@ -143,3 +142,26 @@ class ModuleManager(object):
return line
return None
+
+ @staticmethod
+ def get_module_dependecies(module):
+ """Return list of modules, which must be loaded before module itself
+
+ :param module: a name of kernel module
+ :returns: In case that module has any dependencies, then list of module
+ names will be returned. Otherwise it returns empty list, i.e. [].
+ """
+ deps = ''
+ try:
+ # get list of module dependecies from kernel
+ deps = subprocess.check_output('modinfo -F depends {}'.format(module),
+ shell=True).decode().rstrip('\n')
+ except subprocess.CalledProcessError:
+ # in case of error, show full module name...
+ self._logger.info('Unable to get list of dependecies for module \'%s\'.', module)
+ # ...and try to continue, just for case that dependecies are already loaded
+
+ if len(deps):
+ return deps.split(',')
+ else:
+ return []
diff --git a/tools/networkcard.py b/tools/networkcard.py
index 8d704fd5..945534be 100644
--- a/tools/networkcard.py
+++ b/tools/networkcard.py
@@ -249,7 +249,7 @@ def reinit_vfs(pf_pci_handle):
:param pf_pci_handle: PCI slot identifier of PF with domain part.
"""
- rte_pci_tool = glob.glob(os.path.join(settings.getValue('RTE_SDK'), 'tools', 'dpdk*bind.py'))[0]
+ rte_pci_tool = settings.getValue('TOOLS')['bind-tool']
for vf_nic in get_sriov_vfs_list(pf_pci_handle):
nic_driver = get_driver(vf_nic)
diff --git a/tools/pkt_gen/moongen/moongen.py b/tools/pkt_gen/moongen/moongen.py
index 6ae52f61..fe3aca52 100644
--- a/tools/pkt_gen/moongen/moongen.py
+++ b/tools/pkt_gen/moongen/moongen.py
@@ -308,55 +308,38 @@ class Moongen(ITrafficGenerator):
collected_results = Moongen.run_moongen_and_collect_results(self,
test_run=1)
- total_throughput_rx_fps = (
- float(collected_results[ResultsConstants.THROUGHPUT_RX_FPS]))
-
- total_throughput_rx_mbps = (
- float(collected_results[ResultsConstants.THROUGHPUT_RX_MBPS]))
-
- total_throughput_rx_pct = (
- float(collected_results[ResultsConstants.THROUGHPUT_RX_PERCENT]))
-
- total_throughput_tx_fps = (
- float(collected_results[ResultsConstants.TX_RATE_FPS]))
-
- total_throughput_tx_mbps = (
- float(collected_results[ResultsConstants.TX_RATE_MBPS]))
-
- total_throughput_tx_pct = (
- float(collected_results[ResultsConstants.TX_RATE_PERCENT]))
-
- total_min_latency_ns = 0
- total_max_latency_ns = 0
- total_avg_latency_ns = 0
-
results = OrderedDict()
+
results[ResultsConstants.THROUGHPUT_RX_FPS] = (
- '{:.6f}'.format(total_throughput_rx_fps))
+ '{:.6f}'.format(
+ float(collected_results[ResultsConstants.THROUGHPUT_RX_FPS])))
results[ResultsConstants.THROUGHPUT_RX_MBPS] = (
- '{:.3f}'.format(total_throughput_rx_mbps))
+ '{:.3f}'.format(
+ float(collected_results[ResultsConstants.THROUGHPUT_RX_MBPS])))
results[ResultsConstants.THROUGHPUT_RX_PERCENT] = (
- '{:.3f}'.format(total_throughput_rx_pct))
+ '{:.3f}'.format(
+ float(
+ collected_results[ResultsConstants.THROUGHPUT_RX_PERCENT])))
results[ResultsConstants.TX_RATE_FPS] = (
- '{:.6f}'.format(total_throughput_tx_fps))
+ '{:.3f}'.format(
+ float(collected_results[ResultsConstants.TX_RATE_FPS])))
results[ResultsConstants.TX_RATE_MBPS] = (
- '{:.3f}'.format(total_throughput_tx_mbps))
+ '{:.3f}'.format(
+ float(collected_results[ResultsConstants.TX_RATE_MBPS])))
results[ResultsConstants.TX_RATE_PERCENT] = (
- '{:.3f}'.format(total_throughput_tx_pct))
+ '{:.3f}'.format(
+ float(collected_results[ResultsConstants.TX_RATE_PERCENT])))
- results[ResultsConstants.MIN_LATENCY_NS] = (
- '{:.3f}'.format(total_min_latency_ns))
+ results[ResultsConstants.MIN_LATENCY_NS] = 0
- results[ResultsConstants.MAX_LATENCY_NS] = (
- '{:.3f}'.format(total_max_latency_ns))
+ results[ResultsConstants.MAX_LATENCY_NS] = 0
- results[ResultsConstants.AVG_LATENCY_NS] = (
- '{:.3f}'.format(total_avg_latency_ns))
+ results[ResultsConstants.AVG_LATENCY_NS] = 0
return results
@@ -572,70 +555,75 @@ class Moongen(ITrafficGenerator):
duration=duration,
acceptable_loss_pct=lossrate)
- total_throughput_rx_fps = 0
- total_throughput_rx_mbps = 0
- total_throughput_rx_pct = 0
- total_throughput_tx_fps = 0
- total_throughput_tx_mbps = 0
- total_throughput_tx_pct = 0
- total_min_latency_ns = 0
- total_max_latency_ns = 0
- total_avg_latency_ns = 0
+ # Initialize RFC 2544 throughput specific results
+ results = OrderedDict()
+ results[ResultsConstants.THROUGHPUT_RX_FPS] = 0
+ results[ResultsConstants.THROUGHPUT_RX_MBPS] = 0
+ results[ResultsConstants.THROUGHPUT_RX_PERCENT] = 0
+ results[ResultsConstants.TX_RATE_FPS] = 0
+ results[ResultsConstants.TX_RATE_MBPS] = 0
+ results[ResultsConstants.TX_RATE_PERCENT] = 0
+ results[ResultsConstants.MIN_LATENCY_NS] = 0
+ results[ResultsConstants.MAX_LATENCY_NS] = 0
+ results[ResultsConstants.AVG_LATENCY_NS] = 0
for test_run in range(1, tests+1):
collected_results = (
Moongen.run_moongen_and_collect_results(self, test_run=test_run))
- total_throughput_rx_fps += (
+ results[ResultsConstants.THROUGHPUT_RX_FPS] += (
float(collected_results[ResultsConstants.THROUGHPUT_RX_FPS]))
- total_throughput_rx_mbps += (
+ results[ResultsConstants.THROUGHPUT_RX_MBPS] += (
float(collected_results[ResultsConstants.THROUGHPUT_RX_MBPS]))
- total_throughput_rx_pct += (
+ results[ResultsConstants.THROUGHPUT_RX_PERCENT] += (
float(collected_results[ResultsConstants.THROUGHPUT_RX_PERCENT]))
- total_throughput_tx_fps += (
+ results[ResultsConstants.TX_RATE_FPS] += (
float(collected_results[ResultsConstants.TX_RATE_FPS]))
- total_throughput_tx_mbps += (
+ results[ResultsConstants.TX_RATE_MBPS] += (
float(collected_results[ResultsConstants.TX_RATE_MBPS]))
- total_throughput_tx_pct += (
+ results[ResultsConstants.TX_RATE_PERCENT] += (
float(collected_results[ResultsConstants.TX_RATE_PERCENT]))
- # Latency not supported now, leaving as placeholder
- total_min_latency_ns = 0
- total_max_latency_ns = 0
- total_avg_latency_ns = 0
-
- results = OrderedDict()
results[ResultsConstants.THROUGHPUT_RX_FPS] = (
- '{:.6f}'.format(total_throughput_rx_fps / tests))
+ '{:.6f}'.format(results[ResultsConstants.THROUGHPUT_RX_FPS] /
+ tests))
results[ResultsConstants.THROUGHPUT_RX_MBPS] = (
- '{:.3f}'.format(total_throughput_rx_mbps / tests))
+ '{:.3f}'.format(results[ResultsConstants.THROUGHPUT_RX_MBPS] /
+ tests))
results[ResultsConstants.THROUGHPUT_RX_PERCENT] = (
- '{:.3f}'.format(total_throughput_rx_pct / tests))
+ '{:.3f}'.format(results[ResultsConstants.THROUGHPUT_RX_PERCENT] /
+ tests))
results[ResultsConstants.TX_RATE_FPS] = (
- '{:.6f}'.format(total_throughput_tx_fps / tests))
+ '{:.6f}'.format(results[ResultsConstants.TX_RATE_FPS] /
+ tests))
results[ResultsConstants.TX_RATE_MBPS] = (
- '{:.3f}'.format(total_throughput_tx_mbps / tests))
+ '{:.3f}'.format(results[ResultsConstants.TX_RATE_MBPS] /
+ tests))
results[ResultsConstants.TX_RATE_PERCENT] = (
- '{:.3f}'.format(total_throughput_tx_pct / tests))
+ '{:.3f}'.format(results[ResultsConstants.TX_RATE_PERCENT] /
+ tests))
results[ResultsConstants.MIN_LATENCY_NS] = (
- '{:.3f}'.format(total_min_latency_ns / tests))
+ '{:.3f}'.format(results[ResultsConstants.MIN_LATENCY_NS] /
+ tests))
results[ResultsConstants.MAX_LATENCY_NS] = (
- '{:.3f}'.format(total_max_latency_ns / tests))
+ '{:.3f}'.format(results[ResultsConstants.MAX_LATENCY_NS] /
+ tests))
results[ResultsConstants.AVG_LATENCY_NS] = (
- '{:.3f}'.format(total_avg_latency_ns / tests))
+ '{:.3f}'.format(results[ResultsConstants.AVG_LATENCY_NS] /
+ tests))
return results
@@ -671,6 +659,7 @@ class Moongen(ITrafficGenerator):
Back to Back Count (frames), Frame Loss (frames), Frame Loss (%)
:rtype: :class:`Back2BackResult`
"""
+ self._logger.info("In moongen send_rfc2544_back2back method")
self._params.clear()
self._params['traffic'] = self.traffic_defaults.copy()
@@ -683,6 +672,7 @@ class Moongen(ITrafficGenerator):
duration=duration,
acceptable_loss_pct=lossrate)
+ # Initialize RFC 2544 B2B specific results
results = OrderedDict()
results[ResultsConstants.B2B_RX_FPS] = 0
results[ResultsConstants.B2B_TX_FPS] = 0
diff --git a/tools/pkt_gen/testcenter/testcenter-rfc2889-rest.py b/tools/pkt_gen/testcenter/testcenter-rfc2889-rest.py
new file mode 100644
index 00000000..cfa425e8
--- /dev/null
+++ b/tools/pkt_gen/testcenter/testcenter-rfc2889-rest.py
@@ -0,0 +1,304 @@
+# Copyright 2016 Spirent Communications.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+@author Spirent Communications
+
+This test automates the RFC2544 tests using the Spirent
+TestCenter REST APIs. This test supports Python 3.4
+
+'''
+import argparse
+import logging
+import os
+
+# Logger Configuration
+logger = logging.getLogger(__name__)
+
+
+def create_dir(path):
+ """Create the directory as specified in path """
+ if not os.path.exists(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ logger.error("Failed to create directory %s: %s", path, str(e))
+ raise
+
+
+def write_query_results_to_csv(results_path, csv_results_file_prefix,
+ query_results):
+ """ Write the results of the query to the CSV """
+ create_dir(results_path)
+ filec = os.path.join(results_path, csv_results_file_prefix + ".csv")
+ with open(filec, "wb") as f:
+ f.write(query_results["Columns"].replace(" ", ",") + "\n")
+ for row in (query_results["Output"].replace("} {", ",").
+ replace("{", "").replace("}", "").split(",")):
+ f.write(row.replace(" ", ",") + "\n")
+
+
+def positive_int(value):
+ """ Positive Integer type for Arguments """
+ ivalue = int(value)
+ if ivalue <= 0:
+ raise argparse.ArgumentTypeError(
+ "%s is an invalid positive int value" % value)
+ return ivalue
+
+
+def percent_float(value):
+ """ Floating type for Arguments """
+ pvalue = float(value)
+ if pvalue < 0.0 or pvalue > 100.0:
+ raise argparse.ArgumentTypeError(
+ "%s not in range [0.0, 100.0]" % pvalue)
+ return pvalue
+
+
+def main():
+ """ Read the arguments, Invoke Test and Return the results"""
+ parser = argparse.ArgumentParser()
+ # Required parameters
+ required_named = parser.add_argument_group("required named arguments")
+ required_named.add_argument("--lab_server_addr",
+ required=True,
+ help=("The IP address of the "
+ "Spirent Lab Server"),
+ dest="lab_server_addr")
+ required_named.add_argument("--license_server_addr",
+ required=True,
+ help=("The IP address of the Spirent "
+ "License Server"),
+ dest="license_server_addr")
+ required_named.add_argument("--location_list",
+ required=True,
+ help=("A comma-delimited list of test port "
+ "locations"),
+ dest="location_list")
+ # Optional parameters
+ optional_named = parser.add_argument_group("optional named arguments")
+ optional_named.add_argument("--metric",
+ required=False,
+ help=("One among - Forwarding,\
+ Address Caching and Congestion"),
+ choices=["forwarding", "caching",
+ "congestion"],
+ default="forwarding",
+ dest="metric")
+ optional_named.add_argument("--test_session_name",
+ required=False,
+ default="Rfc2889Ses",
+ help=("The friendly name to identify "
+ "the Spirent Lab Server test session"),
+ dest="test_session_name")
+
+ optional_named.add_argument("--test_user_name",
+ required=False,
+ default="Rfc2889Usr",
+ help=("The friendly name to identify the "
+ "Spirent Lab Server test user"),
+ dest="test_user_name")
+ optional_named.add_argument("--results_dir",
+ required=False,
+ default="./Results",
+ help="The directory to copy results to",
+ dest="results_dir")
+ optional_named.add_argument("--csv_results_file_prefix",
+ required=False,
+ default="Rfc2889MaxFor",
+ help="The prefix for the CSV results files",
+ dest="csv_results_file_prefix")
+ optional_named.add_argument("--num_trials",
+ type=positive_int,
+ required=False,
+ default=1,
+ help=("The number of trials to execute during "
+ "the test"),
+ dest="num_trials")
+ optional_named.add_argument("--trial_duration_sec",
+ type=positive_int,
+ required=False,
+ default=60,
+ help=("The duration of each trial executed "
+ "during the test"),
+ dest="trial_duration_sec")
+ optional_named.add_argument("--traffic_pattern",
+ required=False,
+ choices=["BACKBONE", "MESH", "PAIR"],
+ default="MESH",
+ help="The traffic pattern between endpoints",
+ dest="traffic_pattern")
+ optional_named.add_argument("--frame_size_list",
+ type=lambda s: [int(item)
+ for item in s.split(',')],
+ required=False,
+ default=[256],
+ help="A comma-delimited list of frame sizes",
+ dest="frame_size_list")
+ parser.add_argument("-v",
+ "--verbose",
+ required=False,
+ default=True,
+ help="More output during operation when present",
+ action="store_true",
+ dest="verbose")
+ args = parser.parse_args()
+
+ if args.verbose:
+ logger.debug("Creating results directory")
+ create_dir(args.results_dir)
+ locationList = [str(item) for item in args.location_list.split(',')]
+
+ session_name = args.test_session_name
+ user_name = args.test_user_name
+
+ try:
+ # Load Spirent REST Library
+ from stcrestclient import stchttp
+
+ stc = stchttp.StcHttp(args.lab_server_addr)
+ session_id = stc.new_session(user_name, session_name)
+ stc.join_session(session_id)
+ except RuntimeError as e:
+ logger.error(e)
+ raise
+
+ # Retrieve and display the server information
+ if args.verbose:
+ logger.debug("SpirentTestCenter system version: %s",
+ stc.get("system1", "version"))
+
+ try:
+ if args.verbose:
+ logger.debug("Bring up license server")
+ license_mgr = stc.get("system1", "children-licenseservermanager")
+ if args.verbose:
+ logger.debug("license_mgr = %s", license_mgr)
+ stc.create("LicenseServer", under=license_mgr, attributes={
+ "server": args.license_server_addr})
+
+ # Create the root project object
+ if args.verbose:
+ logger.debug("Creating project ...")
+ project = stc.get("System1", "children-Project")
+
+ # Create ports
+ if args.verbose:
+ logger.debug("Creating ports ...")
+
+ for location in locationList:
+ stc.perform("CreateAndReservePorts", params={"locationList":
+ location,
+ "RevokeOwner":
+ "FALSE"})
+
+ port_list_get = stc.get("System1.project", "children-port")
+
+ if args.verbose:
+ logger.debug("Adding Host Gen PArams")
+ gen_params = stc.create("EmulatedDeviceGenParams",
+ under=project,
+ attributes={"Port": port_list_get})
+
+ # Create the DeviceGenEthIIIfParams object
+ stc.create("DeviceGenEthIIIfParams",
+ under=gen_params)
+ # Configuring Ipv4 interfaces
+ stc.create("DeviceGenIpv4IfParams",
+ under=gen_params)
+
+ stc.perform("DeviceGenConfigExpand",
+ params={"DeleteExisting": "No",
+ "GenParams": gen_params})
+
+ if args.verbose:
+ logger.debug("Set up the RFC2889 Forwarding test...")
+ stc.perform("Rfc2889SetupMaxForwardingRateTestCommand",
+ params={"Duration": args.trial_duration_sec,
+ "FrameSizeList": args.frame_size_list,
+ "NumOfTrials": args.num_trials,
+ "TrafficPattern": args.traffic_pattern})
+
+ # Save the configuration
+ stc.perform("SaveToTcc", params={"Filename": "2889.tcc"})
+ # Connect to the hardware...
+ stc.perform("AttachPorts", params={"portList": stc.get(
+ "system1.project", "children-port"), "autoConnect": "TRUE"})
+ # Apply configuration.
+ if args.verbose:
+ logger.debug("Apply configuration...")
+ stc.apply()
+
+ if args.verbose:
+ logger.debug("Starting the sequencer...")
+ stc.perform("SequencerStart")
+
+ # Wait for sequencer to finish
+ logger.info(
+ "Starting test... Please wait for the test to complete...")
+ stc.wait_until_complete()
+ logger.info("The test has completed... Saving results...")
+
+ # Determine what the results database filename is...
+ lab_server_resultsdb = stc.get(
+ "system1.project.TestResultSetting", "CurrentResultFileName")
+
+ if args.verbose:
+ logger.debug("The lab server results database is %s",
+ lab_server_resultsdb)
+
+ stc.perform("CSSynchronizeFiles",
+ params={"DefaultDownloadDir": args.results_dir})
+
+ resultsdb = args.results_dir + \
+ lab_server_resultsdb.split("/Results")[1]
+
+ logger.info(
+ "The local summary DB file has been saved to %s", resultsdb)
+
+ resultsdict = (
+ stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString":
+ resultsdb,
+ "ResultPath":
+ ("RFC2889MaxForwardingRateTestResultDetailed"
+ "SummaryView")}))
+ if args.verbose:
+ logger.debug("resultsdict[\"Columns\"]: %s",
+ resultsdict["Columns"])
+ logger.debug("resultsdict[\"Output\"]: %s", resultsdict["Output"])
+ logger.debug("Result paths: %s",
+ stc.perform("GetTestResultSettingPaths"))
+
+ # Write results to csv
+ if args.verbose:
+ logger.debug("Writing CSV file to results directory %s",
+ args.results_dir)
+ write_query_results_to_csv(
+ args.results_dir, args.csv_results_file_prefix, resultsdict)
+
+ except RuntimeError as err:
+ logger.error(err)
+
+ if args.verbose:
+ logger.debug("Destroy session on lab server")
+
+ stc.end_session()
+
+ logger.info("Test complete!")
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/pkt_gen/testcenter/testcenter.py b/tools/pkt_gen/testcenter/testcenter.py
index a6cea2e1..701d451c 100644
--- a/tools/pkt_gen/testcenter/testcenter.py
+++ b/tools/pkt_gen/testcenter/testcenter.py
@@ -115,6 +115,25 @@ def get_rfc2544_custom_settings(framesize, custom_tr, tests):
return args
+def get_rfc2889_settings(framesize, tests, duration):
+ args = [settings.getValue("TRAFFICGEN_STC_PYTHON2_PATH"),
+ os.path.join(
+ settings.getValue("TRAFFICGEN_STC_TESTCENTER_PATH"),
+ settings.getValue(
+ "TRAFFICGEN_STC_RFC2889_TEST_FILE_NAME")),
+ "--lab_server_addr",
+ settings.getValue("TRAFFICGEN_STC_LAB_SERVER_ADDR"),
+ "--license_server_addr",
+ settings.getValue("TRAFFICGEN_STC_LICENSE_SERVER_ADDR"),
+ "--location_list",
+ settings.getValue("TRAFFICGEN_STC_RFC2889_LOCATIONS"),
+ "--frame_size_list",
+ str(framesize),
+ "--num_trials",
+ str(tests)]
+ return args
+
+
class TestCenter(trafficgen.ITrafficGenerator):
"""
Spirent TestCenter
@@ -139,6 +158,48 @@ class TestCenter(trafficgen.ITrafficGenerator):
"""
return None
+ def send_rfc2889_congestion(self, traffic=None, tests=1, duration=20):
+ """
+ Do nothing.
+ """
+ return None
+
+ def send_rfc2889_caching(self, traffic=None, tests=1, duration=20):
+ """
+ Do nothing.
+ """
+ return None
+
+ def get_rfc2889_results(self, filename):
+ """
+ Reads the CSV file and return the results
+ """
+ result = {}
+ with open(filename, "r") as csvfile:
+ csvreader = csv.DictReader(csvfile)
+ for row in csvreader:
+ self._logger.info("Row: %s", row)
+ duration = int((float(row["TxSignatureFrameCount"])) /
+ (float(row["OfferedLoad(fps)"])))
+ tx_fps = (float(row["OfferedLoad(fps)"]))
+ rx_fps = float((float(row["RxFrameCount"])) /
+ float(duration))
+ tx_mbps = ((tx_fps * float(row["FrameSize"])) /
+ (1000000.0))
+ rx_mbps = ((rx_fps * float(row["FrameSize"])) /
+ (1000000.0))
+ result[ResultsConstants.TX_RATE_FPS] = tx_fps
+ result[ResultsConstants.THROUGHPUT_RX_FPS] = rx_fps
+ result[ResultsConstants.TX_RATE_MBPS] = tx_mbps
+ result[ResultsConstants.THROUGHPUT_RX_MBPS] = rx_mbps
+ result[ResultsConstants.TX_RATE_PERCENT] = float(
+ row["OfferedLoad(%)"])
+ result[ResultsConstants.FRAME_LOSS_PERCENT] = float(
+ row["PercentFrameLoss(%)"])
+ result[ResultsConstants.FORWARDING_RATE_FPS] = float(
+ row["ForwardingRate(fps)"])
+ return result
+
def get_rfc2544_results(self, filename):
"""
Reads the CSV file and return the results
@@ -211,6 +272,31 @@ class TestCenter(trafficgen.ITrafficGenerator):
return self.get_rfc2544_results(filec)
+ def send_rfc2889_forwarding(self, traffic=None, tests=1, duration=20):
+ """
+ Send traffic per RFC2544 throughput test specifications.
+ """
+ framesize = settings.getValue("TRAFFICGEN_STC_FRAME_SIZE")
+ if traffic and 'l2' in traffic:
+ if 'framesize' in traffic['l2']:
+ framesize = traffic['l2']['framesize']
+ args = get_rfc2889_settings(framesize, tests, duration)
+ if settings.getValue("TRAFFICGEN_STC_VERBOSE") is "True":
+ args.append("--verbose")
+ verbose = True
+ self._logger.debug("Arguments used to call test: %s", args)
+ subprocess.check_call(args)
+
+ filec = os.path.join(settings.getValue("TRAFFICGEN_STC_RESULTS_DIR"),
+ settings.getValue(
+ "TRAFFICGEN_STC_CSV_RESULTS_FILE_PREFIX") +
+ ".csv")
+
+ if verbose:
+ self._logger.debug("file: %s", filec)
+
+ return self.get_rfc2889_results(filec)
+
def send_rfc2544_throughput(self, traffic=None, tests=1, duration=20,
lossrate=0.0):
"""
diff --git a/tools/report/report.py b/tools/report/report.py
index 7d991011..1115f052 100644
--- a/tools/report/report.py
+++ b/tools/report/report.py
@@ -20,8 +20,8 @@ Generate reports in format defined by X.
import sys
import os
-import jinja2
import logging
+import jinja2
from core.results.results_constants import ResultsConstants
from conf import settings as S
@@ -64,7 +64,8 @@ def _get_env(result):
if result[ResultsConstants.DEPLOYMENT].count('v'):
env.update({'vnf': systeminfo.get_version(S.getValue('VNF')),
'guest_image': S.getValue('GUEST_IMAGE'),
- 'loopback_app': list(map(systeminfo.get_version, S.getValue('GUEST_LOOPBACK'))),
+ 'loopback_app': list(map(systeminfo.get_loopback_version,
+ S.getValue('GUEST_LOOPBACK'))),
})
return env
diff --git a/tools/systeminfo.py b/tools/systeminfo.py
index 50dc17e0..fb1616d4 100644
--- a/tools/systeminfo.py
+++ b/tools/systeminfo.py
@@ -19,6 +19,7 @@ import os
import platform
import subprocess
import locale
+import re
from conf import settings as S
@@ -176,6 +177,40 @@ def pid_isalive(pid):
"""
return os.path.isdir('/proc/' + str(pid))
+def get_bin_version(binary, regex):
+ """ get version of given binary selected by given regex
+
+ :returns: version string or None
+ """
+ try:
+ output = subprocess.check_output(binary, shell=True).decode().rstrip('\n')
+ except subprocess.CalledProcessError:
+ return None
+
+ versions = re.findall(regex, output)
+ if len(versions):
+ return versions[0]
+ else:
+ return None
+
+def get_git_tag(path):
+ """ get tag of recent commit from repository located at 'path'
+
+ :returns: git tag in form of string with commit hash or None if there
+ isn't any git repository at given path
+ """
+ try:
+ if os.path.isdir(path):
+ return subprocess.check_output('cd {}; git rev-parse HEAD'.format(path), shell=True,
+ stderr=subprocess.DEVNULL).decode().rstrip('\n')
+ elif os.path.isfile(path):
+ return subprocess.check_output('cd $(dirname {}); git log -1 --pretty="%H" {}'.format(path, path),
+ shell=True, stderr=subprocess.DEVNULL).decode().rstrip('\n')
+ else:
+ return None
+ except subprocess.CalledProcessError:
+ return None
+
# This function uses long switch per purpose, so let us suppress pylint warning too-many-branches
# pylint: disable=R0912
def get_version(app_name):
@@ -186,45 +221,38 @@ def get_version(app_name):
"""
app_version_file = {
- 'ovs' : os.path.join(S.getValue('OVS_DIR'), 'include/openvswitch/version.h'),
- 'dpdk' : os.path.join(S.getValue('RTE_SDK'), 'lib/librte_eal/common/include/rte_version.h'),
- 'qemu' : os.path.join(S.getValue('QEMU_DIR'), 'VERSION'),
- 'l2fwd' : os.path.join(S.getValue('ROOT_DIR'), 'src/l2fwd/l2fwd.c'),
+ 'ovs' : r'Open vSwitch\) ([0-9.]+)',
+ 'testpmd' : r'RTE Version: \'\S+ ([0-9.]+)',
+ 'qemu' : r'QEMU emulator version ([0-9.]+)',
+ 'loopback_l2fwd' : os.path.join(S.getValue('ROOT_DIR'), 'src/l2fwd/l2fwd.c'),
+ 'loopback_testpmd' : os.path.join(S.getValue('TOOLS')['dpdk_src'],
+ 'lib/librte_eal/common/include/rte_version.h'),
'ixnet' : os.path.join(S.getValue('TRAFFICGEN_IXNET_LIB_PATH'), 'pkgIndex.tcl'),
}
- def get_git_tag(path):
- """ get tag of recent commit from repository located at 'path'
-
- :returns: git tag in form of string with commit hash or None if there
- isn't any git repository at given path
- """
- try:
- if os.path.isdir(path):
- return subprocess.check_output('cd {}; git rev-parse HEAD'.format(path), shell=True,
- stderr=subprocess.DEVNULL).decode().rstrip('\n')
- elif os.path.isfile(path):
- return subprocess.check_output('cd $(dirname {}); git log -1 --pretty="%H" {}'.format(path, path),
- shell=True, stderr=subprocess.DEVNULL).decode().rstrip('\n')
- else:
- return None
- except subprocess.CalledProcessError:
- return None
-
app_version = None
app_git_tag = None
if app_name.lower().startswith('ovs'):
- app_version = match_line(app_version_file['ovs'], '#define OVS_PACKAGE_VERSION')
- if app_version:
- app_version = app_version.split('"')[1]
- app_git_tag = get_git_tag(S.getValue('OVS_DIR'))
+ app_version = get_bin_version('{} --version'.format(S.getValue('TOOLS')['ovs-vswitchd']),
+ app_version_file['ovs'])
+ if 'vswitch_src' in S.getValue('TOOLS'):
+ app_git_tag = get_git_tag(S.getValue('TOOLS')['vswitch_src'])
elif app_name.lower() in ['dpdk', 'testpmd']:
+ app_version = get_bin_version('{} -v -h'.format(S.getValue('TOOLS')['testpmd']),
+ app_version_file['testpmd'])
+ # we have to consult PATHS settings to be sure, that dpdk/testpmd
+ # were build from the sources
+ if S.getValue('PATHS')[app_name.lower()]['type'] == 'src':
+ app_git_tag = get_git_tag(S.getValue('TOOLS')['dpdk_src'])
+ elif app_name.lower() == 'loopback_testpmd':
+ # testpmd inside the guest is compiled from downloaded sources
+ # stored at TOOS['dpdk_src'] directory
tmp_ver = ['', '', '']
dpdk_16 = False
- with open(app_version_file['dpdk']) as file_:
+ with open(app_version_file['loopback_testpmd']) as file_:
for line in file_:
if not line.strip():
continue
@@ -263,10 +291,12 @@ def get_version(app_name):
if len(tmp_ver[0]):
app_version = '.'.join(tmp_ver)
- app_git_tag = get_git_tag(S.getValue('RTE_SDK'))
+ app_git_tag = get_git_tag(S.getValue('TOOLS')['dpdk_src'])
elif app_name.lower().startswith('qemu'):
- app_version = match_line(app_version_file['qemu'], '')
- app_git_tag = get_git_tag(S.getValue('QEMU_DIR'))
+ app_version = get_bin_version('{} --version'.format(S.getValue('TOOLS')['qemu-system']),
+ app_version_file['qemu'])
+ if 'qemu_src' in S.getValue('TOOLS'):
+ app_git_tag = get_git_tag(S.getValue('TOOLS')['qemu_src'])
elif app_name.lower() == 'ixnet':
app_version = match_line(app_version_file['ixnet'], 'package provide IxTclNetwork')
if app_version:
@@ -283,13 +313,23 @@ def get_version(app_name):
elif app_name.lower() == 'vswitchperf':
app_git_tag = get_git_tag(S.getValue('ROOT_DIR'))
elif app_name.lower() == 'l2fwd':
- app_version = match_line(app_version_file[app_name], 'MODULE_VERSION')
+ app_version = match_line(app_version_file['loopback_l2fwd'], 'MODULE_VERSION')
if app_version:
app_version = app_version.split('"')[1]
- app_git_tag = get_git_tag(app_version_file[app_name])
+ app_git_tag = get_git_tag(app_version_file['loopback_l2fwd'])
elif app_name.lower() in ['linux_bridge', 'buildin']:
# without login into running VM, it is not possible to check bridge_utils version
app_version = 'NA'
app_git_tag = 'NA'
return {'name' : app_name, 'version' : app_version, 'git_tag' : app_git_tag}
+
+def get_loopback_version(loopback_app_name):
+ """ Get version of given guest loopback application and its git tag
+
+ :returns: dictionary {'name' : app_name, 'version' : app_version, 'git_tag' : app_git_tag) in case that
+ version or git tag are not known or not applicaple, than None is returned for any unknown value
+ """
+ version = get_version("loopback_{}".format(loopback_app_name))
+ version['name'] = loopback_app_name
+ return version