aboutsummaryrefslogtreecommitdiffstats
path: root/tools/pkt_gen/testcenter
diff options
context:
space:
mode:
Diffstat (limited to 'tools/pkt_gen/testcenter')
-rw-r--r--tools/pkt_gen/testcenter/testcenter-rfc2544-rest.py558
-rw-r--r--tools/pkt_gen/testcenter/testcenter.py183
2 files changed, 713 insertions, 28 deletions
diff --git a/tools/pkt_gen/testcenter/testcenter-rfc2544-rest.py b/tools/pkt_gen/testcenter/testcenter-rfc2544-rest.py
new file mode 100644
index 00000000..13f68fb7
--- /dev/null
+++ b/tools/pkt_gen/testcenter/testcenter-rfc2544-rest.py
@@ -0,0 +1,558 @@
+# Copyright 2016 Spirent Communications.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+@author Spirent Communications
+
+This test automates the RFC2544 tests using the Spirent
+TestCenter REST APIs. This test supports Python 3.4
+
+'''
+import argparse
+import logging
+import os
+
+# Load Spirent REST Library
+from stcrestclient import stchttp
+
+logger = logging.getLogger(__name__)
+
+
+def create_dir(path):
+ """Create the directory as specified in path """
+ if not os.path.exists(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ logger.error("Failed to create directory %s: %s", path, str(e))
+ raise
+
+
+def write_query_results_to_csv(results_path, csv_results_file_prefix,
+ query_results):
+ """ Write the results of the query to the CSV """
+ create_dir(results_path)
+ filec = os.path.join(results_path, csv_results_file_prefix + ".csv")
+ with open(filec, "wb") as f:
+ f.write(query_results["Columns"].replace(" ", ",") + "\n")
+ for row in (query_results["Output"].replace("} {", ",").
+ replace("{", "").replace("}", "").split(",")):
+ f.write(row.replace(" ", ",") + "\n")
+
+
+def positive_int(value):
+ """ Positive Integer type for Arguments """
+ ivalue = int(value)
+ if ivalue <= 0:
+ raise argparse.ArgumentTypeError(
+ "%s is an invalid positive int value" % value)
+ return ivalue
+
+
+def percent_float(value):
+ """ Floating type for Arguments """
+ pvalue = float(value)
+ if pvalue < 0.0 or pvalue > 100.0:
+ raise argparse.ArgumentTypeError(
+ "%s not in range [0.0, 100.0]" % pvalue)
+ return pvalue
+
+
+def main():
+ """ Read the arguments, Invoke Test and Return the results"""
+ parser = argparse.ArgumentParser()
+ # Required parameters
+ required_named = parser.add_argument_group("required named arguments")
+ required_named.add_argument("--lab_server_addr",
+ required=True,
+ help=("The IP address of the"
+ "Spirent Lab Server"),
+ dest="lab_server_addr")
+ required_named.add_argument("--license_server_addr",
+ required=True,
+ help=("The IP address of the Spirent"
+ "License Server"),
+ dest="license_server_addr")
+ required_named.add_argument("--east_chassis_addr",
+ required=True,
+ help=("The TestCenter chassis IP address to"
+ "use for the east test port"),
+ dest="east_chassis_addr")
+ required_named.add_argument("--east_slot_num",
+ type=positive_int,
+ required=True,
+ help=("The TestCenter slot number to"
+ "use for the east test port"),
+ dest="east_slot_num")
+ required_named.add_argument("--east_port_num",
+ type=positive_int,
+ required=True,
+ help=("The TestCenter port number to use"
+ "for the east test port"),
+ dest="east_port_num")
+ required_named.add_argument("--west_chassis_addr",
+ required=True,
+ help=("The TestCenter chassis IP address"
+ "to use for the west test port"),
+ dest="west_chassis_addr")
+ required_named.add_argument("--west_slot_num",
+ type=positive_int,
+ required=True,
+ help=("The TestCenter slot number to use"
+ "for the west test port"),
+ dest="west_slot_num")
+ required_named.add_argument("--west_port_num",
+ type=positive_int,
+ required=True,
+ help=("The TestCenter port number to"
+ "use for the west test port"),
+ dest="west_port_num")
+ # Optional parameters
+ optional_named = parser.add_argument_group("optional named arguments")
+ optional_named.add_argument("--metric",
+ required=False,
+ help=("One among - throughput, latency,\
+ backtoback and frameloss"),
+ choices=["throughput", "latency",
+ "backtoback", "frameloss"],
+ default="throughput",
+ dest="metric")
+ optional_named.add_argument("--test_session_name",
+ required=False,
+ default="RFC2544 East-West Throughput",
+ help=("The friendly name to identify"
+ "the Spirent Lab Server test session"),
+ dest="test_session_name")
+
+ optional_named.add_argument("--test_user_name",
+ required=False,
+ default="RFC2544 East-West User",
+ help=("The friendly name to identify the"
+ "Spirent Lab Server test user"),
+ dest="test_user_name")
+ optional_named.add_argument("--results_dir",
+ required=False,
+ default="./Results",
+ help="The directory to copy results to",
+ dest="results_dir")
+ optional_named.add_argument("--csv_results_file_prefix",
+ required=False,
+ default="Rfc2544Tput",
+ help="The prefix for the CSV results files",
+ dest="csv_results_file_prefix")
+ optional_named.add_argument("--num_trials",
+ type=positive_int,
+ required=False,
+ default=1,
+ help=("The number of trials to execute during"
+ "the test"),
+ dest="num_trials")
+ optional_named.add_argument("--trial_duration_sec",
+ type=positive_int,
+ required=False,
+ default=60,
+ help=("The duration of each trial executed"
+ "during the test"),
+ dest="trial_duration_sec")
+ optional_named.add_argument("--traffic_pattern",
+ required=False,
+ choices=["BACKBONE", "MESH", "PAIR"],
+ default="PAIR",
+ help="The traffic pattern between endpoints",
+ dest="traffic_pattern")
+ optional_named.add_argument("--search_mode",
+ required=False,
+ choices=["COMBO", "STEP", "BINARY"],
+ default="BINARY",
+ help=("The search mode used to find the"
+ "throughput rate"),
+ dest="search_mode")
+ optional_named.add_argument("--learning_mode",
+ required=False,
+ choices=["AUTO", "L2_LEARNING",
+ "L3_LEARNING", "NONE"],
+ default="AUTO",
+ help=("The learning mode used during the test,"
+ "default is 'NONE'"),
+ dest="learning_mode")
+ optional_named.add_argument("--rate_lower_limit_pct",
+ type=percent_float,
+ required=False,
+ default=1.0,
+ help=("The minimum percent line rate that"
+ "will be used during the test"),
+ dest="rate_lower_limit_pct")
+ optional_named.add_argument("--rate_upper_limit_pct",
+ type=percent_float,
+ required=False,
+ default=99.0,
+ help=("The maximum percent line rate that"
+ "will be used during the test"),
+ dest="rate_upper_limit_pct")
+ optional_named.add_argument("--rate_initial_pct",
+ type=percent_float,
+ required=False,
+ default=99.0,
+ help=("If Search Mode is BINARY, the percent"
+ "line rate that will be used at the"
+ "start of the test"),
+ dest="rate_initial_pct")
+ optional_named.add_argument("--rate_step_pct",
+ type=percent_float,
+ required=False,
+ default=10.0,
+ help=("If SearchMode is STEP, the percent"
+ "load increase per step"),
+ dest="rate_step_pct")
+ optional_named.add_argument("--resolution_pct",
+ type=percent_float,
+ required=False,
+ default=1.0,
+ help=("The minimum percentage of load"
+ "adjustment between iterations"),
+ dest="resolution_pct")
+ optional_named.add_argument("--frame_size_list",
+ type=lambda s: [int(item)
+ for item in s.split(',')],
+ required=False,
+ default=[256],
+ help="A comma-delimited list of frame sizes",
+ dest="frame_size_list")
+ optional_named.add_argument("--acceptable_frame_loss_pct",
+ type=percent_float,
+ required=False,
+ default=0.0,
+ help=("The maximum acceptable frame loss"
+ "percent in any iteration"),
+ dest="acceptable_frame_loss_pct")
+ optional_named.add_argument("--east_intf_addr",
+ required=False,
+ default="192.85.1.3",
+ help=("The address to assign to the first"
+ "emulated device interface on the first"
+ "east port"),
+ dest="east_intf_addr")
+ optional_named.add_argument("--east_intf_gateway_addr",
+ required=False,
+ default="192.85.1.53",
+ help=("The gateway address to assign to the"
+ "first emulated device interface on the"
+ "first east port"),
+ dest="east_intf_gateway_addr")
+ optional_named.add_argument("--west_intf_addr",
+ required=False,
+ default="192.85.1.53",
+ help=("The address to assign to the first"
+ "emulated device interface on the"
+ "first west port"),
+ dest="west_intf_addr")
+ optional_named.add_argument("--west_intf_gateway_addr",
+ required=False,
+ default="192.85.1.53",
+ help=("The gateway address to assign to"
+ "the first emulated device interface"
+ "on the first west port"),
+ dest="west_intf_gateway_addr")
+ parser.add_argument("-v",
+ "--verbose",
+ required=False,
+ default=True,
+ help="More output during operation when present",
+ action="store_true",
+ dest="verbose")
+ args = parser.parse_args()
+
+ if args.verbose:
+ logger.debug("Creating results directory")
+ create_dir(args.results_dir)
+
+ session_name = args.test_session_name
+ user_name = args.test_user_name
+
+ try:
+ stc = stchttp.StcHttp(args.lab_server_addr)
+ session_id = stc.new_session(user_name, session_name)
+ stc.join_session(session_id)
+ except RuntimeError as e:
+ logger.error(e)
+ raise
+
+ # Get STC system info.
+ tx_port_loc = "//%s/%s/%s" % (args.east_chassis_addr,
+ args.east_slot_num,
+ args.east_port_num)
+ rx_port_loc = "//%s/%s/%s" % (args.west_chassis_addr,
+ args.west_slot_num,
+ args.west_port_num)
+
+ # Retrieve and display the server information
+ if args.verbose:
+ logger.debug("SpirentTestCenter system version: %s",
+ stc.get("system1", "version"))
+
+ try:
+ device_list = []
+ port_list = []
+ if args.verbose:
+ logger.debug("Bring up license server")
+ license_mgr = stc.get("system1", "children-licenseservermanager")
+ if args.verbose:
+ logger.debug("license_mgr = %s", license_mgr)
+ stc.create("LicenseServer", under=license_mgr, attributes={
+ "server": args.license_server_addr})
+
+ # Create the root project object
+ if args.verbose:
+ logger.debug("Creating project ...")
+ project = stc.get("System1", "children-Project")
+
+ # Create ports
+ if args.verbose:
+ logger.debug("Creating ports ...")
+ east_chassis_port = stc.create('port', project)
+ if args.verbose:
+ logger.debug("Configuring TX port ...")
+ stc.config(east_chassis_port, {'location': tx_port_loc})
+ port_list.append(east_chassis_port)
+
+ west_chassis_port = stc.create('port', project)
+ if args.verbose:
+ logger.debug("Configuring RX port ...")
+ stc.config(west_chassis_port, {'location': rx_port_loc})
+ port_list.append(west_chassis_port)
+
+ # Create emulated genparam for east port
+ east_device_gen_params = stc.create("EmulatedDeviceGenParams",
+ under=project,
+ attributes={"Port":
+ east_chassis_port})
+ # Create the DeviceGenEthIIIfParams object
+ stc.create("DeviceGenEthIIIfParams",
+ under=east_device_gen_params)
+ # Configuring Ipv4 interfaces
+ stc.create("DeviceGenIpv4IfParams",
+ under=east_device_gen_params,
+ attributes={"Addr": args.east_intf_addr,
+ "Gateway": args.east_intf_gateway_addr})
+ # Create Devices using the Device Wizard
+ device_gen_config = stc.perform("DeviceGenConfigExpand",
+ params={"DeleteExisting": "No",
+ "GenParams":
+ east_device_gen_params})
+ # Append to the device list
+ device_list.append(device_gen_config['ReturnList'])
+
+ # Create emulated genparam for west port
+ west_device_gen_params = stc.create("EmulatedDeviceGenParams",
+ under=project,
+ attributes={"Port":
+ west_chassis_port})
+ # Create the DeviceGenEthIIIfParams object
+ stc.create("DeviceGenEthIIIfParams",
+ under=west_device_gen_params)
+ # Configuring Ipv4 interfaces
+ stc.create("DeviceGenIpv4IfParams",
+ under=west_device_gen_params,
+ attributes={"Addr": args.west_intf_addr,
+ "Gateway": args.west_intf_gateway_addr})
+ # Create Devices using the Device Wizard
+ device_gen_config = stc.perform("DeviceGenConfigExpand",
+ params={"DeleteExisting": "No",
+ "GenParams":
+ west_device_gen_params})
+ # Append to the device list
+ device_list.append(device_gen_config['ReturnList'])
+ if args.verbose:
+ logger.debug(device_list)
+
+ # Create the RFC 2544 'metric test
+ if args.metric == "throughput":
+ if args.verbose:
+ logger.debug("Set up the RFC2544 throughput test...")
+ stc.perform("Rfc2544SetupThroughputTestCommand",
+ params={"AcceptableFrameLoss":
+ args.acceptable_frame_loss_pct,
+ "Duration": args.trial_duration_sec,
+ "FrameSizeList": args.frame_size_list,
+ "LearningMode": args.learning_mode,
+ "NumOfTrials": args.num_trials,
+ "RateInitial": args.rate_initial_pct,
+ "RateLowerLimit": args.rate_lower_limit_pct,
+ "RateStep": args.rate_step_pct,
+ "RateUpperLimit": args.rate_upper_limit_pct,
+ "Resolution": args.resolution_pct,
+ "SearchMode": args.search_mode,
+ "TrafficPattern": args.traffic_pattern})
+ elif args.metric == "backtoback":
+ stc.perform("Rfc2544SetupBackToBackTestCommand",
+ params={"AcceptableFrameLoss":
+ args.acceptable_frame_loss_pct,
+ "Duration": args.trial_duration_sec,
+ "FrameSizeList": args.frame_size_list,
+ "LearningMode": args.learning_mode,
+ "LatencyType": args.latency_type,
+ "NumOfTrials": args.num_trials,
+ "RateInitial": args.rate_initial_pct,
+ "RateLowerLimit": args.rate_lower_limit_pct,
+ "RateStep": args.rate_step_pct,
+ "RateUpperLimit": args.rate_upper_limit_pct,
+ "Resolution": args.resolution_pct,
+ "SearchMode": args.search_mode,
+ "TrafficPattern": args.traffic_pattern})
+ elif args.metric == "frameloss":
+ stc.perform("Rfc2544SetupFrameLossTestCommand",
+ params={"AcceptableFrameLoss":
+ args.acceptable_frame_loss_pct,
+ "Duration": args.trial_duration_sec,
+ "FrameSizeList": args.frame_size_list,
+ "LearningMode": args.learning_mode,
+ "LatencyType": args.latency_type,
+ "NumOfTrials": args.num_trials,
+ "RateInitial": args.rate_initial_pct,
+ "RateLowerLimit": args.rate_lower_limit_pct,
+ "RateStep": args.rate_step_pct,
+ "RateUpperLimit": args.rate_upper_limit_pct,
+ "Resolution": args.resolution_pct,
+ "SearchMode": args.search_mode,
+ "TrafficPattern": args.traffic_pattern})
+ elif args.metric == "latency":
+ stc.perform("Rfc2544SetupLatencyTestCommand",
+ params={"AcceptableFrameLoss":
+ args.acceptable_frame_loss_pct,
+ "Duration": args.trial_duration_sec,
+ "FrameSizeList": args.frame_size_list,
+ "LearningMode": args.learning_mode,
+ "LatencyType": args.latency_type,
+ "NumOfTrials": args.num_trials,
+ "RateInitial": args.rate_initial_pct,
+ "RateLowerLimit": args.rate_lower_limit_pct,
+ "RateStep": args.rate_step_pct,
+ "RateUpperLimit": args.rate_upper_limit_pct,
+ "Resolution": args.resolution_pct,
+ "SearchMode": args.search_mode,
+ "TrafficPattern": args.traffic_pattern})
+
+ # Save the configuration
+ stc.perform("SaveToTcc", params={"Filename": "2544.tcc"})
+ # Connect to the hardware...
+ stc.perform("AttachPorts", params={"portList": stc.get(
+ "system1.project", "children-port"), "autoConnect": "TRUE"})
+ # Apply configuration.
+ if args.verbose:
+ logger.debug("Apply configuration...")
+ stc.apply()
+
+ if args.verbose:
+ logger.debug("Starting the sequencer...")
+ stc.perform("SequencerStart")
+
+ # Wait for sequencer to finish
+ logger.info(
+ "Starting test... Please wait for the test to complete...")
+ stc.wait_until_complete()
+ logger.info("The test has completed... Saving results...")
+
+ # Determine what the results database filename is...
+ lab_server_resultsdb = stc.get(
+ "system1.project.TestResultSetting", "CurrentResultFileName")
+
+ if args.verbose:
+ logger.debug("The lab server results database is %s",
+ lab_server_resultsdb)
+
+ stc.perform("CSSynchronizeFiles",
+ params={"DefaultDownloadDir": args.results_dir})
+
+ resultsdb = args.results_dir + \
+ lab_server_resultsdb.split("/Results")[1]
+
+ logger.info(
+ "The local summary DB file has been saved to %s", resultsdb)
+
+ # The returns the "RFC2544ThroughputTestResultDetailedSummaryView"
+ # table view from the results database.
+ # There are other views available.
+
+ if args.metric == "throughput":
+ resultsdict = (
+ stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString":
+ resultsdb,
+ "ResultPath":
+ ("RFC2544ThroughputTestResultDetailed"
+ "SummaryView")}))
+
+ # The returns the "RFC2544BacktoBackTestResultDetailedSummaryView"
+ # table view from the results database.
+ # There are other views available.
+ elif args.metric == "backtoback":
+ resultsdict = (
+ stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString":
+ resultsdb,
+ "ResultPath":
+ ("RFC2544Back2BackTestResultDetailed"
+ "SummaryView")}))
+
+ # The returns the "RFC2544LatencyTestResultDetailedSummaryView"
+ # table view from the results database.
+ # There are other views available.
+ elif args.metric == "latency":
+ resultsdict = (
+ stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString":
+ resultsdb,
+ "ResultPath":
+ ("RFC2544LatencyTestResultDetailed"
+ "SummaryView")}))
+
+ # The returns the "RFC2544FrameLossTestResultDetailedSummaryView"
+ # table view from the results database.
+ # There are other views available.
+ elif args.metric == "frameloss":
+ resultsdict = (
+ stc.perform("QueryResult",
+ params={
+ "DatabaseConnectionString":
+ resultsdb,
+ "ResultPath":
+ ("RFC2544FrameLossTestResultDetailed"
+ "SummaryView")}))
+ if args.verbose:
+ logger.debug("resultsdict[\"Columns\"]: %s",
+ resultsdict["Columns"])
+ logger.debug("resultsdict[\"Output\"]: %s", resultsdict["Output"])
+ logger.debug("Result paths: %s",
+ stc.perform("GetTestResultSettingPaths"))
+
+ # Write results to csv
+ logger.debug("Writing CSV file to results directory %s",
+ args.results_dir)
+ write_query_results_to_csv(
+ args.results_dir, args.csv_results_file_prefix, resultsdict)
+
+ except RuntimeError as e:
+ logger.error(e)
+
+ if args.verbose:
+ logger.debug("Destroy session on lab server")
+ stc.end_session()
+
+ logger.info("Test complete!")
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/pkt_gen/testcenter/testcenter.py b/tools/pkt_gen/testcenter/testcenter.py
index f670612c..b1351155 100644
--- a/tools/pkt_gen/testcenter/testcenter.py
+++ b/tools/pkt_gen/testcenter/testcenter.py
@@ -1,4 +1,4 @@
-# Copyright 2015 Spirent Communications.
+# Copyright 2016 Spirent Communications.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,20 +19,22 @@ Provides a model for Spirent TestCenter as a test tool for implementing
various performance tests of a virtual switch.
"""
-from __future__ import print_function
-
-from tools.pkt_gen import trafficgen
-from core.results.results_constants import ResultsConstants
-import subprocess
-import os
import csv
+import logging
+import os
+import subprocess
+
from conf import settings
+from core.results.results_constants import ResultsConstants
+from tools.pkt_gen import trafficgen
class TestCenter(trafficgen.ITrafficGenerator):
"""
Spirent TestCenter
"""
+ _logger = logging.getLogger(__name__)
+
def connect(self):
"""
Do nothing.
@@ -45,29 +47,33 @@ class TestCenter(trafficgen.ITrafficGenerator):
"""
pass
- def send_burst_traffic(self, traffic=None, numpkts=100, duration=20, framerate=100):
+ def send_burst_traffic(self, traffic=None, numpkts=100, duration=20):
"""
Do nothing.
"""
return None
- def send_cont_traffic(self, traffic=None, duration=30, framerate=0,
- multistream=False):
+ def send_cont_traffic(self, traffic=None, duration=30):
"""
Do nothing.
"""
return None
def send_rfc2544_throughput(self, traffic=None, trials=3, duration=20,
- lossrate=0.0, multistream=False):
+ lossrate=0.0):
"""
Send traffic per RFC2544 throughput test specifications.
"""
verbose = False
-
+ framesize = settings.getValue("TRAFFICGEN_STC_FRAME_SIZE")
+ if traffic and 'l2' in traffic:
+ if 'framesize' in traffic['l2']:
+ framesize = traffic['l2']['framesize']
args = [settings.getValue("TRAFFICGEN_STC_PYTHON2_PATH"),
- os.path.join(settings.getValue("TRAFFICGEN_STC_TESTCENTER_PATH"),
- settings.getValue("TRAFFICGEN_STC_RFC2544_TPUT_TEST_FILE_NAME")),
+ os.path.join(
+ settings.getValue("TRAFFICGEN_STC_TESTCENTER_PATH"),
+ settings.getValue(
+ "TRAFFICGEN_STC_RFC2544_TPUT_TEST_FILE_NAME")),
"--lab_server_addr",
settings.getValue("TRAFFICGEN_STC_LAB_SERVER_ADDR"),
"--license_server_addr",
@@ -111,7 +117,7 @@ class TestCenter(trafficgen.ITrafficGenerator):
"--resolution_pct",
settings.getValue("TRAFFICGEN_STC_RESOLUTION_PCT"),
"--frame_size_list",
- settings.getValue("TRAFFICGEN_STC_FRAME_SIZE"),
+ str(framesize),
"--acceptable_frame_loss_pct",
settings.getValue("TRAFFICGEN_STC_ACCEPTABLE_FRAME_LOSS_PCT"),
"--east_intf_addr",
@@ -122,33 +128,154 @@ class TestCenter(trafficgen.ITrafficGenerator):
settings.getValue("TRAFFICGEN_STC_WEST_INTF_ADDR"),
"--west_intf_gateway_addr",
settings.getValue("TRAFFICGEN_STC_WEST_INTF_GATEWAY_ADDR")]
+
if settings.getValue("TRAFFICGEN_STC_VERBOSE") is "True":
args.append("--verbose")
verbose = True
- print("Arguments used to call test: %s" % args)
+ self._logger.debug("Arguments used to call test: %s", args)
+ subprocess.check_call(args)
+
+ filec = os.path.join(settings.getValue("TRAFFICGEN_STC_RESULTS_DIR"),
+ settings.getValue(
+ "TRAFFICGEN_STC_CSV_RESULTS_FILE_PREFIX") +
+ ".csv")
+
+ if verbose:
+ self._logger.info("file: %s", filec)
+
+ result = {}
+
+ with open(filec, "r") as csvfile:
+ csvreader = csv.DictReader(csvfile)
+ for row in csvreader:
+ self._logger.info("Row: %s", row)
+ result[ResultsConstants.TX_RATE_FPS] = 0.0
+ result[ResultsConstants.THROUGHPUT_RX_FPS] = 0.0
+ result[ResultsConstants.TX_RATE_MBPS] = 0.0
+ result[ResultsConstants.THROUGHPUT_RX_MBPS] = 0.0
+ result[ResultsConstants.TX_RATE_PERCENT] = float(
+ row["OfferedLoad(%)"])
+ result[ResultsConstants.THROUGHPUT_RX_PERCENT] = float(
+ row["Throughput(%)"])
+ result[ResultsConstants.MIN_LATENCY_NS] = float(
+ row["MinimumLatency(us)"]) * 1000
+ result[ResultsConstants.MAX_LATENCY_NS] = float(
+ row["MaximumLatency(us)"]) * 1000
+ result[ResultsConstants.AVG_LATENCY_NS] = float(
+ row["AverageLatency(us)"]) * 1000
+ result[ResultsConstants.FRAME_LOSS_PERCENT] = float(
+ row["PercentLoss"])
+ return result
+
+ def send_rfc2544_back2back(self, traffic=None, trials=1, duration=20,
+ lossrate=0.0):
+ """
+ Send traffic per RFC2544 BacktoBack test specifications.
+ """
+ verbose = False
+ framesize = settings.getValue("TRAFFICGEN_STC_FRAME_SIZE")
+ if traffic and 'l2' in traffic:
+ if 'framesize' in traffic['l2']:
+ framesize = traffic['l2']['framesize']
+ args = [settings.getValue("TRAFFICGEN_STC_PYTHON2_PATH"),
+ os.path.join(
+ settings.getValue("TRAFFICGEN_STC_TESTCENTER_PATH"),
+ settings.getValue(
+ "TRAFFICGEN_STC_RFC2544_B2B_TEST_FILE_NAME")),
+ "--metric",
+ settings.getValue("TRAFFICGEN_STC_RFC2544_METRIC"),
+ "--lab_server_addr",
+ settings.getValue("TRAFFICGEN_STC_LAB_SERVER_ADDR"),
+ "--license_server_addr",
+ settings.getValue("TRAFFICGEN_STC_LICENSE_SERVER_ADDR"),
+ "--east_chassis_addr",
+ settings.getValue("TRAFFICGEN_STC_EAST_CHASSIS_ADDR"),
+ "--east_slot_num",
+ settings.getValue("TRAFFICGEN_STC_EAST_SLOT_NUM"),
+ "--east_port_num",
+ settings.getValue("TRAFFICGEN_STC_EAST_PORT_NUM"),
+ "--west_chassis_addr",
+ settings.getValue("TRAFFICGEN_STC_WEST_CHASSIS_ADDR"),
+ "--west_slot_num",
+ settings.getValue("TRAFFICGEN_STC_WEST_SLOT_NUM"),
+ "--west_port_num",
+ settings.getValue("TRAFFICGEN_STC_WEST_PORT_NUM"),
+ "--test_session_name",
+ settings.getValue("TRAFFICGEN_STC_TEST_SESSION_NAME"),
+ "--results_dir",
+ settings.getValue("TRAFFICGEN_STC_RESULTS_DIR"),
+ "--csv_results_file_prefix",
+ settings.getValue("TRAFFICGEN_STC_CSV_RESULTS_FILE_PREFIX"),
+ "--num_trials",
+ settings.getValue("TRAFFICGEN_STC_NUMBER_OF_TRIALS"),
+ "--trial_duration_sec",
+ settings.getValue("TRAFFICGEN_STC_TRIAL_DURATION_SEC"),
+ "--traffic_pattern",
+ settings.getValue("TRAFFICGEN_STC_TRAFFIC_PATTERN"),
+ "--search_mode",
+ settings.getValue("TRAFFICGEN_STC_SEARCH_MODE"),
+ "--learning_mode",
+ settings.getValue("TRAFFICGEN_STC_LEARNING_MODE"),
+ "--latency_type",
+ settings.getValue("TRAFFICGEN_STC_LATENCY_TYPE"),
+ "--rate_lower_limit_pct",
+ settings.getValue("TRAFFICGEN_STC_RATE_LOWER_LIMIT_PCT"),
+ "--rate_upper_limit_pct",
+ settings.getValue("TRAFFICGEN_STC_RATE_UPPER_LIMIT_PCT"),
+ "--rate_initial_pct",
+ settings.getValue("TRAFFICGEN_STC_RATE_INITIAL_PCT"),
+ "--rate_step_pct",
+ settings.getValue("TRAFFICGEN_STC_RATE_STEP_PCT"),
+ "--resolution_pct",
+ settings.getValue("TRAFFICGEN_STC_RESOLUTION_PCT"),
+ "--frame_size_list",
+ str(framesize),
+ "--acceptable_frame_loss_pct",
+ settings.getValue("TRAFFICGEN_STC_ACCEPTABLE_FRAME_LOSS_PCT"),
+ "--east_intf_addr",
+ settings.getValue("TRAFFICGEN_STC_EAST_INTF_ADDR"),
+ "--east_intf_gateway_addr",
+ settings.getValue("TRAFFICGEN_STC_EAST_INTF_GATEWAY_ADDR"),
+ "--west_intf_addr",
+ settings.getValue("TRAFFICGEN_STC_WEST_INTF_ADDR"),
+ "--west_intf_gateway_addr",
+ settings.getValue("TRAFFICGEN_STC_WEST_INTF_GATEWAY_ADDR")]
- subprocess.check_call(map(os.path.expanduser, args))
+ if settings.getValue("TRAFFICGEN_STC_VERBOSE") is "True":
+ args.append("--verbose")
+ verbose = True
+ self._logger.info("Arguments used to call test: %s", args)
+ subprocess.check_call(args)
- file = os.path.join(settings.getValue("TRAFFICGEN_STC_RESULTS_DIR"),
- settings.getValue("TRAFFICGEN_STC_CSV_RESULTS_FILE_PREFIX") + ".csv")
+ filecs = os.path.join(settings.getValue("TRAFFICGEN_STC_RESULTS_DIR"),
+ settings.getValue(
+ "TRAFFICGEN_STC_CSV_RESULTS_FILE_PREFIX") +
+ ".csv")
if verbose:
- print("file: %s" % file)
+ self._logger.debug("file: %s", filecs)
result = {}
- with open(file, "r") as csvfile:
+ with open(filecs, "r") as csvfile:
csvreader = csv.DictReader(csvfile)
for row in csvreader:
- print("Row: %s" % row)
+ self._logger.info("Row: %s", row)
result[ResultsConstants.TX_RATE_FPS] = 0.0
result[ResultsConstants.THROUGHPUT_RX_FPS] = 0.0
result[ResultsConstants.TX_RATE_MBPS] = 0.0
result[ResultsConstants.THROUGHPUT_RX_MBPS] = 0.0
- result[ResultsConstants.TX_RATE_PERCENT] = float(row["OfferedLoad(%)"])
- result[ResultsConstants.THROUGHPUT_RX_PERCENT] = float(row["Throughput(%)"])
- result[ResultsConstants.MIN_LATENCY_NS] = float(row["MinimumLatency(us)"]) * 1000
- result[ResultsConstants.MAX_LATENCY_NS] = float(row["MaximumLatency(us)"]) * 1000
- result[ResultsConstants.AVG_LATENCY_NS] = float(row["AverageLatency(us)"]) * 1000
+ result[ResultsConstants.TX_RATE_PERCENT] = float(
+ row["OfferedLoad(%)"])
+ result[ResultsConstants.THROUGHPUT_RX_PERCENT] = float(
+ row["Throughput(%)"])
+ result[ResultsConstants.MIN_LATENCY_NS] = float(
+ row["MinimumLatency(us)"]) * 1000
+ result[ResultsConstants.MAX_LATENCY_NS] = float(
+ row["MaximumLatency(us)"]) * 1000
+ result[ResultsConstants.AVG_LATENCY_NS] = float(
+ row["AverageLatency(us)"]) * 1000
+ result[ResultsConstants.FRAME_LOSS_PERCENT] = float(
+ row["PercentLoss"])
return result
if __name__ == '__main__':
@@ -159,6 +286,6 @@ if __name__ == '__main__':
'dstip': '90.90.90.90',
},
}
-
with TestCenter() as dev:
print(dev.send_rfc2544_throughput(traffic=TRAFFIC))
+ print(dev.send_rfc2544_backtoback(traffic=TRAFFIC))