aboutsummaryrefslogtreecommitdiffstats
path: root/yardstick
diff options
context:
space:
mode:
Diffstat (limited to 'yardstick')
-rw-r--r--yardstick/benchmark/contexts/heat.py4
-rw-r--r--yardstick/benchmark/core/report.py122
-rw-r--r--yardstick/benchmark/scenarios/availability/monitor/monitor_multi.py27
-rw-r--r--yardstick/benchmark/scenarios/availability/monitor/monitor_process.py12
-rw-r--r--yardstick/common/exceptions.py4
-rw-r--r--yardstick/common/nsb_report.css19
-rw-r--r--yardstick/common/nsb_report.html.j252
-rw-r--r--yardstick/common/nsb_report.js73
-rw-r--r--yardstick/common/utils.py11
-rw-r--r--yardstick/network_services/libs/ixia_libs/ixnet/ixnet_api.py3
-rw-r--r--yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py90
-rw-r--r--yardstick/ssh.py8
-rw-r--r--yardstick/tests/functional/benchmark/core/__init__.py0
-rw-r--r--yardstick/tests/functional/benchmark/core/test_report.py114
-rw-r--r--yardstick/tests/unit/benchmark/core/test_report.py139
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py17
-rw-r--r--yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py16
-rw-r--r--yardstick/tests/unit/common/test_utils.py9
-rw-r--r--yardstick/tests/unit/network_services/libs/ixia_libs/test_ixnet_api.py6
-rw-r--r--yardstick/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py105
-rw-r--r--yardstick/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py100
21 files changed, 701 insertions, 230 deletions
diff --git a/yardstick/benchmark/contexts/heat.py b/yardstick/benchmark/contexts/heat.py
index c3c5451bd..f4c48f4a5 100644
--- a/yardstick/benchmark/contexts/heat.py
+++ b/yardstick/benchmark/contexts/heat.py
@@ -508,10 +508,12 @@ class HeatContext(Context):
pkey = pkg_resources.resource_string(
'yardstick.resources',
h_join('files/yardstick_key', self.name)).decode('utf-8')
-
+ key_filename = pkg_resources.resource_filename('yardstick.resources',
+ h_join('files/yardstick_key', self.name))
result = {
"user": server.context.user,
"pkey": pkey,
+ "key_filename": key_filename,
"private_ip": server.private_ip,
"interfaces": server.interfaces,
"routing_table": self.generate_routing_table(server),
diff --git a/yardstick/benchmark/core/report.py b/yardstick/benchmark/core/report.py
index 0bc392fe5..0819cd497 100644
--- a/yardstick/benchmark/core/report.py
+++ b/yardstick/benchmark/core/report.py
@@ -10,13 +10,12 @@
""" Handler for yardstick command 'report' """
-import ast
import re
+import six
import uuid
import jinja2
from api.utils import influx
-from oslo_utils import encodeutils
from oslo_utils import uuidutils
from yardstick.common import constants as consts
from yardstick.common.utils import cliargs
@@ -55,11 +54,9 @@ class JSTree(object):
def format_for_jstree(self, data):
"""Format the data into the required format for jsTree.
- The data format expected is a list of key-value pairs which represent
- the data and label for each metric e.g.:
+ The data format expected is a list of metric names e.g.:
- [{'data': [0, ], 'label': 'tg__0.DropPackets'},
- {'data': [548, ], 'label': 'tg__0.LatencyAvg.5'},]
+ ['tg__0.DropPackets', 'tg__0.LatencyAvg.5']
This data is converted into the format required for jsTree to group and
display the metrics in a hierarchial fashion, including creating a
@@ -76,8 +73,8 @@ class JSTree(object):
self._created_nodes = ['#']
self.jstree_data = []
- for item in data:
- self._create_node(item["label"])
+ for metric in data:
+ self._create_node(metric)
return self.jstree_data
@@ -115,10 +112,10 @@ class Report(object):
else:
raise KeyError("Test case not found.")
- def _get_tasks(self):
- task_cmd = "select * from \"%s\" where task_id= '%s'"
- task_query = task_cmd % (self.yaml_name, self.task_id)
- query_exec = influx.query(task_query)
+ def _get_metrics(self):
+ metrics_cmd = "select * from \"%s\" where task_id = '%s'"
+ metrics_query = metrics_cmd % (self.yaml_name, self.task_id)
+ query_exec = influx.query(metrics_query)
if query_exec:
return query_exec
else:
@@ -132,38 +129,72 @@ class Report(object):
"""
self._validate(args.yaml_name[0], args.task_id[0])
- self.db_fieldkeys = self._get_fieldkeys()
+ db_fieldkeys = self._get_fieldkeys()
+ # list of dicts of:
+ # - PY2: unicode key and unicode value
+ # - PY3: str key and str value
- self.db_task = self._get_tasks()
+ db_metrics = self._get_metrics()
+ # list of dicts of:
+ # - PY2: unicode key and { None | unicode | float | long | int } value
+ # - PY3: str key and { None | str | float | int } value
- field_keys = []
- datasets = []
- table_vals = {}
+ # extract fieldKey entries, and convert them to str where needed
+ field_keys = [key if isinstance(key, str) # PY3: already str
+ else key.encode('utf8') # PY2: unicode to str
+ for key in
+ [field['fieldKey']
+ for field in db_fieldkeys]]
- field_keys = [encodeutils.to_utf8(field['fieldKey'])
- for field in self.db_fieldkeys]
+ # extract timestamps
+ self.Timestamp = []
+ for metric in db_metrics:
+ metric_time = metric['time'] # in RFC3339 format
+ if not isinstance(metric_time, str):
+ metric_time = metric_time.encode('utf8') # PY2: unicode to str
+ metric_time = metric_time[11:] # skip date, keep time
+ head, _, tail = metric_time.partition('.') # split HH:MM:SS and nsZ
+ metric_time = head + '.' + tail[:6] # join HH:MM:SS and .us
+ self.Timestamp.append(metric_time) # HH:MM:SS.micros
+
+ # prepare return values
+ datasets = []
+ table_vals = {'Timestamp': self.Timestamp}
+ # extract and convert field values
for key in field_keys:
- self.Timestamp = []
values = []
- for task in self.db_task:
- task_time = encodeutils.to_utf8(task['time'])
- if not isinstance(task_time, str):
- task_time = str(task_time, 'utf8')
- if not isinstance(key, str):
- key = str(key, 'utf8')
- task_time = task_time[11:]
- head, _, tail = task_time.partition('.')
- task_time = head + "." + tail[:6]
- self.Timestamp.append(task_time)
- if task[key] is None:
- values.append(None)
- elif isinstance(task[key], (int, float)):
- values.append(task[key])
+ for metric in db_metrics:
+ val = metric.get(key, None)
+ if val is None:
+ # keep explicit None or missing entry as is
+ pass
+ elif isinstance(val, (int, float)):
+ # keep plain int or float as is
+ pass
+ elif six.PY2 and isinstance(val,
+ long): # pylint: disable=undefined-variable
+ # PY2: long value would be rendered with trailing L,
+ # which JS does not support, so convert it to float
+ val = float(val)
+ elif isinstance(val, six.string_types):
+ s = val
+ if not isinstance(s, str):
+ s = s.encode('utf8') # PY2: unicode to str
+ try:
+ # convert until failure
+ val = s
+ val = float(s)
+ val = int(s)
+ if six.PY2 and isinstance(val,
+ long): # pylint: disable=undefined-variable
+ val = float(val) # PY2: long to float
+ except ValueError:
+ pass
else:
- values.append(ast.literal_eval(task[key]))
+ raise ValueError("Cannot convert %r" % val)
+ values.append(val)
datasets.append({'label': key, 'data': values})
- table_vals['Timestamp'] = self.Timestamp
table_vals[key] = values
return datasets, table_vals
@@ -197,8 +228,14 @@ class Report(object):
@cliargs("yaml_name", type=str, help=" Yaml file Name", nargs=1)
def generate_nsb(self, args):
"""Start NSB report generation."""
- datasets, table_vals = self._generate_common(args)
- jstree_data = JSTree().format_for_jstree(datasets)
+ _, report_data = self._generate_common(args)
+ report_time = report_data.pop('Timestamp')
+ report_keys = sorted(report_data, key=str.lower)
+ report_tree = JSTree().format_for_jstree(report_keys)
+ report_meta = {
+ "testcase": self.yaml_name,
+ "task_id": self.task_id,
+ }
template_dir = consts.YARDSTICK_ROOT_PATH + "yardstick/common"
template_environment = jinja2.Environment(
@@ -207,10 +244,11 @@ class Report(object):
lstrip_blocks=True)
context = {
- "Timestamps": self.Timestamp,
- "task_id": self.task_id,
- "table": table_vals,
- "jstree_nodes": jstree_data,
+ "report_meta": report_meta,
+ "report_data": report_data,
+ "report_time": report_time,
+ "report_keys": report_keys,
+ "report_tree": report_tree,
}
template_html = template_environment.get_template("nsb_report.html.j2")
diff --git a/yardstick/benchmark/scenarios/availability/monitor/monitor_multi.py b/yardstick/benchmark/scenarios/availability/monitor/monitor_multi.py
index 971bae1e9..8f1f53cde 100644
--- a/yardstick/benchmark/scenarios/availability/monitor/monitor_multi.py
+++ b/yardstick/benchmark/scenarios/availability/monitor/monitor_multi.py
@@ -62,20 +62,19 @@ class MultiMonitor(basemonitor.BaseMonitor):
outage_time = (
last_outage - first_outage if last_outage > first_outage else 0
)
+ self._result = {"outage_time": outage_time}
LOG.debug("outage_time is: %f", outage_time)
max_outage_time = 0
- if "max_outage_time" in self._config["sla"]:
- max_outage_time = self._config["sla"]["max_outage_time"]
- elif "max_recover_time" in self._config["sla"]:
- max_outage_time = self._config["sla"]["max_recover_time"]
- else:
- raise RuntimeError("'max_outage_time' or 'max_recover_time' "
- "config is not found")
- self._result = {"outage_time": outage_time}
-
- if outage_time > max_outage_time:
- LOG.error("SLA failure: %f > %f", outage_time, max_outage_time)
- return False
- else:
- return True
+ if self._config.get("sla"):
+ if "max_outage_time" in self._config["sla"]:
+ max_outage_time = self._config["sla"]["max_outage_time"]
+ elif "max_recover_time" in self._config["sla"]:
+ max_outage_time = self._config["sla"]["max_recover_time"]
+ else:
+ raise RuntimeError("'max_outage_time' or 'max_recover_time' "
+ "config is not found")
+ if outage_time > max_outage_time:
+ LOG.error("SLA failure: %f > %f", outage_time, max_outage_time)
+ return False
+ return True
diff --git a/yardstick/benchmark/scenarios/availability/monitor/monitor_process.py b/yardstick/benchmark/scenarios/availability/monitor/monitor_process.py
index 8d2f2633c..280e5811d 100644
--- a/yardstick/benchmark/scenarios/availability/monitor/monitor_process.py
+++ b/yardstick/benchmark/scenarios/availability/monitor/monitor_process.py
@@ -46,12 +46,12 @@ class MonitorProcess(basemonitor.BaseMonitor):
def verify_SLA(self):
outage_time = self._result.get('outage_time', None)
- max_outage_time = self._config["sla"]["max_recover_time"]
- if outage_time > max_outage_time:
- LOG.info("SLA failure: %f > %f", outage_time, max_outage_time)
- return False
- else:
- return True
+ if self._config.get("sla"):
+ max_outage_time = self._config["sla"]["max_recover_time"]
+ if outage_time > max_outage_time:
+ LOG.info("SLA failure: %f > %f", outage_time, max_outage_time)
+ return False
+ return True
def _test(): # pragma: no cover
diff --git a/yardstick/common/exceptions.py b/yardstick/common/exceptions.py
index 5e0df973a..f62f02f18 100644
--- a/yardstick/common/exceptions.py
+++ b/yardstick/common/exceptions.py
@@ -405,6 +405,10 @@ class IxNetworkFieldNotPresentInStackItem(YardstickException):
message = 'Field "%(field_name)s" not present in stack item %(stack_item)s'
+class IncorrectFlowOption(YardstickException):
+ message = 'Flow option {option} for {link} is incorrect'
+
+
class SLAValidationError(YardstickException):
message = '%(case_name)s SLA validation failed. Error: %(error_msg)s'
diff --git a/yardstick/common/nsb_report.css b/yardstick/common/nsb_report.css
index 2beb91c53..667f865a5 100644
--- a/yardstick/common/nsb_report.css
+++ b/yardstick/common/nsb_report.css
@@ -9,21 +9,26 @@
******************************************************************************/
body {
- font-size: 16pt;
+ font-family: Frutiger, "Helvetica Neue", Helvetica, Arial, sans-serif;
+}
+
+header {
+ padding-top: 5px;
+ text-align: center;
+ font-weight: bold;
}
-table {
+#tblMetrics {
overflow-y: scroll;
height: 360px;
display: block;
}
-header {
- font-family: Frutiger, "Helvetica Neue", Helvetica, Arial, sans-serif;
- clear: left;
- text-align: center;
+#cnvGraph {
+ width: 100%;
+ height: 500px;
}
-.control-pane {
+#divTree {
font-size: 10pt;
}
diff --git a/yardstick/common/nsb_report.html.j2 b/yardstick/common/nsb_report.html.j2
index a3087d746..aa90253f8 100644
--- a/yardstick/common/nsb_report.html.j2
+++ b/yardstick/common/nsb_report.html.j2
@@ -29,53 +29,45 @@
</head>
<body>
- <div class="container" style="width:80%">
+ <div class="container-fluid">
<div class="row">
- <header class="jumbotron">
- <h1>Yardstick User Interface</h1>
- <h4>Report of {{task_id}} Generated</h4>
+ <header>
+ Testcase: {{report_meta.testcase}}<br>
+ Task-ID: {{report_meta.task_id}}<br>
</header>
</div>
<div class="row">
- <div class="col-md-2 control-pane">
- <div id="data_selector"></div>
+ <div class="col-md-2">
+ <div id="divTree"></div>
</div>
- <div class="col-md-10 data-pane">
- <canvas id="cnvGraph" style="width: 100%; height: 500px"></canvas>
+ <div class="col-md-10">
+ <canvas id="cnvGraph"></canvas>
</div>
</div>
<div class="row">
<div class="col-md-12 table-responsive">
- <table class="table table-hover"></table>
+ <table id="tblMetrics" class="table table-condensed table-hover"></table>
</div>
</div>
</div>
<script>
- var arr, jstree_data, timestamps;
- arr = {{table|safe}};
- timestamps = {{Timestamps|safe}};
- jstree_data = {{jstree_nodes|safe}};
+ // Injected metrics, timestamps, keys and hierarchy
+ var report_data = {{report_data|safe}};
+ var report_time = {{report_time|safe}};
+ var report_keys = {{report_keys|safe}};
+ var report_tree = {{report_tree|safe}};
+ // Wait for DOM to be loaded
$(function() {
- create_table(arr);
- create_tree(jstree_data);
- var objGraph = create_graph($('#cnvGraph'), timestamps);
+ var tblMetrics = $('#tblMetrics');
+ var cnvGraph = $('#cnvGraph');
+ var divTree = $('#divTree');
- $('#data_selector').on('check_node.jstree uncheck_node.jstree', function(e, data) {
- var selected_datasets = [];
- for (var i = 0; i < data.selected.length; i++) {
- var node = data.instance.get_node(data.selected[i]);
- if (node.children.length == 0) {
- var dataset = {
- label: node.id,
- data: arr[node.id],
- };
- selected_datasets.push(dataset);
- }
- }
- update_graph(objGraph, selected_datasets);
- });
+ create_table(tblMetrics, report_data, report_time, report_keys);
+ var objGraph = create_graph(cnvGraph, report_time);
+ create_tree(divTree, report_tree);
+ handle_tree(divTree, tblMetrics, objGraph, report_data, report_time);
});
</script>
</body>
diff --git a/yardstick/common/nsb_report.js b/yardstick/common/nsb_report.js
index cc5e14ee7..4de1c8e78 100644
--- a/yardstick/common/nsb_report.js
+++ b/yardstick/common/nsb_report.js
@@ -10,9 +10,9 @@
var None = null;
-function create_tree(jstree_data)
+function create_tree(divTree, jstree_data)
{
- $('#data_selector').jstree({
+ divTree.jstree({
plugins: ['checkbox'],
checkbox: {
three_state: false,
@@ -29,34 +29,33 @@ function create_tree(jstree_data)
});
}
-// may need to pass timestamps too...
-function create_table(table_data)
+function create_table(tblMetrics, table_data, timestamps, table_keys)
{
- var tab, tr, td, tn, tbody, keys, key, curr_data, val;
- // create table
- tab = document.getElementsByTagName('table')[0];
- tbody = document.createElement('tbody');
+ var tbody = $('<tbody></tbody>');
+ var tr0 = $('<tr></tr>');
+ var th0 = $('<th></th>');
+ var td0 = $('<td></td>');
+ var tr;
+
+ // create table headings using timestamps
+ tr = tr0.clone().append(th0.clone().text('Timestamp'));
+ timestamps.forEach(function(t) {
+ tr.append(th0.clone().text(t));
+ });
+ tbody.append(tr);
+
// for each metric
- keys = Object.keys(table_data);
- for (var i = 0; i < keys.length; i++) {
- key = keys[i];
- tr = document.createElement('tr');
- td = document.createElement('td');
- tn = document.createTextNode(key);
- td.appendChild(tn);
- tr.appendChild(td);
+ table_keys.forEach(function(key) {
+ tr = tr0.clone().append(td0.clone().text(key));
// add each piece of data as its own column
- curr_data = table_data[key];
- for (var j = 0; j < curr_data.length; j++) {
- val = curr_data[j];
- td = document.createElement('td');
- tn = document.createTextNode(val === None ? '' : val);
- td.appendChild(tn);
- tr.appendChild(td);
- }
- tbody.appendChild(tr);
- }
- tab.appendChild(tbody);
+ table_data[key].forEach(function(val) {
+ tr.append(td0.clone().text(val === None ? '' : val));
+ });
+ tbody.append(tr);
+ });
+
+ // re-create table
+ tblMetrics.empty().append(tbody);
}
function create_graph(cnvGraph, timestamps)
@@ -144,3 +143,23 @@ function update_graph(objGraph, datasets)
objGraph.data.datasets = datasets;
objGraph.update();
}
+
+function handle_tree(divTree, tblMetrics, objGraph, table_data, timestamps)
+{
+ divTree.on('check_node.jstree uncheck_node.jstree', function(e, data) {
+ var selected_keys = [];
+ var selected_datasets = [];
+ data.selected.forEach(function(sel) {
+ var node = data.instance.get_node(sel);
+ if (node.children.length == 0) {
+ selected_keys.push(node.id);
+ selected_datasets.push({
+ label: node.id,
+ data: table_data[node.id],
+ });
+ }
+ });
+ create_table(tblMetrics, table_data, timestamps, selected_keys);
+ update_graph(objGraph, selected_datasets);
+ });
+}
diff --git a/yardstick/common/utils.py b/yardstick/common/utils.py
index 51313ef47..9eba896e2 100644
--- a/yardstick/common/utils.py
+++ b/yardstick/common/utils.py
@@ -293,6 +293,17 @@ def make_ipv4_address(ip_addr):
return ipaddress.IPv4Address(six.text_type(ip_addr))
+def get_ip_range_count(iprange):
+ start_range, end_range = iprange.split("-")
+ start = int(make_ipv4_address(start_range))
+ end = int(make_ipv4_address(end_range))
+ return end - start
+
+
+def get_ip_range_start(iprange):
+ return str(make_ipv4_address(iprange.split("-")[0]))
+
+
def safe_ip_address(ip_addr):
""" get ip address version v6 or v4 """
try:
diff --git a/yardstick/network_services/libs/ixia_libs/ixnet/ixnet_api.py b/yardstick/network_services/libs/ixia_libs/ixnet/ixnet_api.py
index d41dd028f..cc627ef78 100644
--- a/yardstick/network_services/libs/ixia_libs/ixnet/ixnet_api.py
+++ b/yardstick/network_services/libs/ixia_libs/ixnet/ixnet_api.py
@@ -152,6 +152,9 @@ class IxNextgen(object): # pragma: no cover
vports = self.ixnet.getList(self.ixnet.getRoot(), 'vport')
return vports
+ def get_static_interface(self, vport):
+ return self.ixnet.getList(vport, 'interface')
+
def _get_config_element_by_flow_group_name(self, flow_group_name):
"""Get a config element using the flow group name
diff --git a/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py b/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py
index 1d37f8f6f..ae95843a5 100644
--- a/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py
+++ b/yardstick/network_services/vnf_generic/vnf/tg_rfc2544_ixia.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,6 +17,7 @@ import logging
import six
from yardstick.common import utils
+from yardstick.common import exceptions
from yardstick.network_services.libs.ixia_libs.ixnet import ixnet_api
from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFTrafficGen
from yardstick.network_services.vnf_generic.vnf.sample_vnf import ClientResourceHelper
@@ -31,6 +32,8 @@ WAIT_PROTOCOLS_STARTED = 360
class IxiaBasicScenario(object):
+ """Ixia Basic scenario for flow from port to port"""
+
def __init__(self, client, context_cfg, ixia_cfg):
self.client = client
@@ -43,6 +46,12 @@ class IxiaBasicScenario(object):
def apply_config(self):
pass
+ def run_protocols(self):
+ pass
+
+ def stop_protocols(self):
+ pass
+
def create_traffic_model(self, traffic_profile=None):
# pylint: disable=unused-argument
vports = self.client.get_vports()
@@ -51,11 +60,81 @@ class IxiaBasicScenario(object):
self.client.create_traffic_model(self._uplink_vports,
self._downlink_vports)
- def run_protocols(self):
- pass
- def stop_protocols(self):
- pass
+class IxiaL3Scenario(IxiaBasicScenario):
+ """Ixia scenario for L3 flow between static ip's"""
+
+ def _add_static_ips(self):
+ vports = self.client.get_vports()
+ uplink_intf_vport = [(self.client.get_static_interface(vport), vport)
+ for vport in vports[::2]]
+ downlink_intf_vport = [(self.client.get_static_interface(vport), vport)
+ for vport in vports[1::2]]
+
+ for index in range(len(uplink_intf_vport)):
+ intf, vport = uplink_intf_vport[index]
+ try:
+ iprange = self.ixia_cfg['flow'].get('src_ip')[index]
+ start_ip = utils.get_ip_range_start(iprange)
+ count = utils.get_ip_range_count(iprange)
+ self.client.add_static_ipv4(intf, vport, start_ip, count, '32')
+ except IndexError:
+ raise exceptions.IncorrectFlowOption(
+ option="src_ip", link="uplink_{}".format(index))
+
+ intf, vport = downlink_intf_vport[index]
+ try:
+ iprange = self.ixia_cfg['flow'].get('dst_ip')[index]
+ start_ip = utils.get_ip_range_start(iprange)
+ count = utils.get_ip_range_count(iprange)
+ self.client.add_static_ipv4(intf, vport, start_ip, count, '32')
+ except IndexError:
+ raise exceptions.IncorrectFlowOption(
+ option="dst_ip", link="downlink_{}".format(index))
+
+ def _add_interfaces(self):
+ vports = self.client.get_vports()
+ uplink_vports = (vport for vport in vports[::2])
+ downlink_vports = (vport for vport in vports[1::2])
+
+ ix_node = next(node for _, node in self.context_cfg['nodes'].items()
+ if node['role'] == 'IxNet')
+
+ for intf in ix_node['interfaces'].values():
+ ip = intf.get('local_ip')
+ mac = intf.get('local_mac')
+ gateway = None
+ try:
+ gateway = next(route.get('gateway')
+ for route in ix_node.get('routing_table')
+ if route.get('if') == intf.get('ifname'))
+ except StopIteration:
+ LOG.debug("Gateway not provided")
+
+ if 'uplink' in intf.get('vld_id'):
+ self.client.add_interface(next(uplink_vports),
+ ip, mac, gateway)
+ else:
+ self.client.add_interface(next(downlink_vports),
+ ip, mac, gateway)
+
+ def apply_config(self):
+ self._add_interfaces()
+ self._add_static_ips()
+
+ def create_traffic_model(self, traffic_profile=None):
+ # pylint: disable=unused-argument
+ vports = self.client.get_vports()
+ self._uplink_vports = vports[::2]
+ self._downlink_vports = vports[1::2]
+
+ uplink_endpoints = [port + '/protocols/static'
+ for port in self._uplink_vports]
+ downlink_endpoints = [port + '/protocols/static'
+ for port in self._downlink_vports]
+
+ self.client.create_ipv4_traffic_model(uplink_endpoints,
+ downlink_endpoints)
class IxiaPppoeClientScenario(object):
@@ -370,6 +449,7 @@ class IxiaResourceHelper(ClientResourceHelper):
self._ixia_scenarios = {
"IxiaBasic": IxiaBasicScenario,
+ "IxiaL3": IxiaL3Scenario,
"IxiaPppoeClient": IxiaPppoeClientScenario,
}
diff --git a/yardstick/ssh.py b/yardstick/ssh.py
index 8bdc32c7c..2ebf40e98 100644
--- a/yardstick/ssh.py
+++ b/yardstick/ssh.py
@@ -89,14 +89,6 @@ def convert_key_to_str(key):
return k.getvalue()
-# class SSHError(Exception):
-# pass
-#
-#
-# class SSHTimeout(SSHError):
-# pass
-
-
class SSH(object):
"""Represent ssh connection."""
diff --git a/yardstick/tests/functional/benchmark/core/__init__.py b/yardstick/tests/functional/benchmark/core/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/yardstick/tests/functional/benchmark/core/__init__.py
diff --git a/yardstick/tests/functional/benchmark/core/test_report.py b/yardstick/tests/functional/benchmark/core/test_report.py
new file mode 100644
index 000000000..5f060dd1e
--- /dev/null
+++ b/yardstick/tests/functional/benchmark/core/test_report.py
@@ -0,0 +1,114 @@
+##############################################################################
+# Copyright (c) 2018 Intel Corporation.
+#
+# All rights reserved. This program and the accompanying materials
+# are made available under the terms of the Apache License, Version 2.0
+# which accompanies this distribution, and is available at
+# http://www.apache.org/licenses/LICENSE-2.0
+##############################################################################
+
+import ast
+import tempfile
+import unittest
+
+import mock
+from six.moves import configparser
+
+from yardstick.benchmark import core
+from yardstick.benchmark.core import report
+from yardstick.cmd.commands import change_osloobj_to_paras
+
+
+GOOD_YAML_NAME = 'fake_name'
+GOOD_TASK_ID = "9cbe74b6-df09-4535-8bdc-dc3a43b8a4e2"
+GOOD_DB_FIELDKEYS = [
+ {u'fieldKey': u'metric1', u'fieldType': u'integer'},
+ {u'fieldKey': u'metric4', u'fieldType': u'integer'},
+ {u'fieldKey': u'metric2', u'fieldType': u'integer'},
+ {u'fieldKey': u'metric3', u'fieldType': u'integer'},
+]
+GOOD_DB_METRICS = [
+ {u'time': u'2018-08-20T16:49:26.372662016Z',
+ u'metric1': 1, u'metric2': 0, u'metric3': 8, u'metric4': 5},
+ {u'time': u'2018-08-20T16:49:27.374208000Z',
+ u'metric1': 1, u'metric2': 1, u'metric3': 5, u'metric4': 4},
+ {u'time': u'2018-08-20T16:49:28.375742976Z',
+ u'metric1': 2, u'metric2': 2, u'metric3': 3, u'metric4': 3},
+ {u'time': u'2018-08-20T16:49:29.377299968Z',
+ u'metric1': 3, u'metric2': 3, u'metric3': 2, u'metric4': 2},
+ {u'time': u'2018-08-20T16:49:30.378252032Z',
+ u'metric1': 5, u'metric2': 4, u'metric3': 1, u'metric4': 1},
+ {u'time': u'2018-08-20T16:49:30.379359421Z',
+ u'metric1': 8, u'metric2': 5, u'metric3': 1, u'metric4': 0},
+]
+
+yardstick_config = """
+[DEFAULT]
+dispatcher = influxdb
+"""
+
+
+def my_query(query_sql):
+ get_fieldkeys_cmd = 'show field keys'
+ get_metrics_cmd = 'select * from'
+
+ if get_fieldkeys_cmd in query_sql:
+ return GOOD_DB_FIELDKEYS
+ elif get_metrics_cmd in query_sql:
+ return GOOD_DB_METRICS
+ return []
+
+
+class ReportTestCase(unittest.TestCase):
+
+ @mock.patch.object(report.influx, 'query', new=my_query)
+ @mock.patch.object(configparser.ConfigParser,
+ 'read', side_effect=mock.mock_open(read_data=yardstick_config))
+ def test_report_generate_nsb_simple(self, *args):
+ tmpfile = tempfile.NamedTemporaryFile(delete=True)
+
+ args = core.Param({"task_id": [GOOD_TASK_ID], "yaml_name": [GOOD_YAML_NAME]})
+ params = change_osloobj_to_paras(args)
+
+ with mock.patch.object(report.consts, 'DEFAULT_HTML_FILE', tmpfile.name):
+ report.Report().generate_nsb(params)
+
+ data_act = None
+ time_act = None
+ keys_act = None
+ tree_act = None
+ with open(tmpfile.name) as f:
+ for l in f.readlines():
+ if "var report_data = {" in l:
+ data_act = ast.literal_eval(l.strip()[18:-1])
+ elif "var report_time = [" in l:
+ time_act = ast.literal_eval(l.strip()[18:-1])
+ elif "var report_keys = [" in l:
+ keys_act = ast.literal_eval(l.strip()[18:-1])
+ elif "var report_tree = [" in l:
+ tree_act = ast.literal_eval(l.strip()[18:-1])
+
+ data_exp = {
+ 'metric1': [1, 1, 2, 3, 5, 8],
+ 'metric2': [0, 1, 2, 3, 4, 5],
+ 'metric3': [8, 5, 3, 2, 1, 1],
+ 'metric4': [5, 4, 3, 2, 1, 0],
+ }
+ time_exp = [
+ '16:49:26.372662', '16:49:27.374208', '16:49:28.375742',
+ '16:49:29.377299', '16:49:30.378252', '16:49:30.379359',
+ ]
+ keys_exp = [
+ 'metric1', 'metric2', 'metric3', 'metric4',
+ ]
+ tree_exp = [
+ {'parent': '#', 'text': 'metric1', 'id': 'metric1'},
+ {'parent': '#', 'text': 'metric2', 'id': 'metric2'},
+ {'parent': '#', 'text': 'metric3', 'id': 'metric3'},
+ {'parent': '#', 'text': 'metric4', 'id': 'metric4'},
+ ]
+
+ self.assertEqual(data_exp, data_act)
+ self.assertEqual(time_exp, time_act)
+ self.assertEqual(keys_exp, keys_act)
+ self.assertEqual(tree_exp, tree_act)
diff --git a/yardstick/tests/unit/benchmark/core/test_report.py b/yardstick/tests/unit/benchmark/core/test_report.py
index 11d017ff0..4683c26b0 100644
--- a/yardstick/tests/unit/benchmark/core/test_report.py
+++ b/yardstick/tests/unit/benchmark/core/test_report.py
@@ -9,6 +9,7 @@
##############################################################################
import mock
+import six
import unittest
import uuid
@@ -19,13 +20,82 @@ from yardstick.cmd.commands import change_osloobj_to_paras
GOOD_YAML_NAME = 'fake_name'
GOOD_TASK_ID = str(uuid.uuid4())
GOOD_DB_FIELDKEYS = [{'fieldKey': 'fake_key'}]
-GOOD_DB_TASK = [{
+GOOD_DB_METRICS = [{
'fake_key': 1.234,
'time': '0000-00-00T12:34:56.789012Z',
}]
GOOD_TIMESTAMP = ['12:34:56.789012']
BAD_YAML_NAME = 'F@KE_NAME'
BAD_TASK_ID = 'aaaaaa-aaaaaaaa-aaaaaaaaaa-aaaaaa'
+MORE_DB_FIELDKEYS = [
+ {'fieldKey': 'fake_key'},
+ {'fieldKey': 'str_str'},
+ {'fieldKey': u'str_unicode'},
+ {u'fieldKey': 'unicode_str'},
+ {u'fieldKey': u'unicode_unicode'},
+ ]
+MORE_DB_METRICS = [{
+ 'fake_key': None,
+ 'time': '0000-00-00T00:00:00.000000Z',
+ }, {
+ 'fake_key': 123,
+ 'time': '0000-00-00T00:00:01.000000Z',
+ }, {
+ 'fake_key': 4.56,
+ 'time': '0000-00-00T00:00:02.000000Z',
+ }, {
+ 'fake_key': 9876543210987654321,
+ 'time': '0000-00-00T00:00:03.000000Z',
+ }, {
+ 'fake_key': 'str_str value',
+ 'time': '0000-00-00T00:00:04.000000Z',
+ }, {
+ 'fake_key': u'str_unicode value',
+ 'time': '0000-00-00T00:00:05.000000Z',
+ }, {
+ u'fake_key': 'unicode_str value',
+ 'time': '0000-00-00T00:00:06.000000Z',
+ }, {
+ u'fake_key': u'unicode_unicode value',
+ 'time': '0000-00-00T00:00:07.000000Z',
+ }, {
+ 'fake_key': '7.89',
+ 'time': '0000-00-00T00:00:08.000000Z',
+ }, {
+ 'fake_key': '1011',
+ 'time': '0000-00-00T00:00:09.000000Z',
+ }, {
+ 'fake_key': '9876543210123456789',
+ 'time': '0000-00-00T00:00:10.000000Z',
+ }]
+MORE_TIMESTAMP = ['00:00:%02d.000000' % n for n in range(len(MORE_DB_METRICS))]
+MORE_EMPTY_DATA = [None] * len(MORE_DB_METRICS)
+MORE_EXPECTED_TABLE_VALS = {
+ 'Timestamp': MORE_TIMESTAMP,
+ 'fake_key': [
+ None,
+ 123,
+ 4.56,
+ 9876543210987654321 if six.PY3 else 9.876543210987655e+18,
+ 'str_str value',
+ 'str_unicode value',
+ 'unicode_str value',
+ 'unicode_unicode value',
+ 7.89,
+ 1011,
+ 9876543210123456789 if six.PY3 else 9.876543210123457e+18,
+ ],
+ 'str_str': MORE_EMPTY_DATA,
+ 'str_unicode': MORE_EMPTY_DATA,
+ 'unicode_str': MORE_EMPTY_DATA,
+ 'unicode_unicode': MORE_EMPTY_DATA,
+ }
+MORE_EXPECTED_DATASETS = [{
+ 'label': key,
+ 'data': MORE_EXPECTED_TABLE_VALS[key],
+ }
+ for key in map(str, [field['fieldKey'] for field in MORE_DB_FIELDKEYS])
+ ]
class JSTreeTestCase(unittest.TestCase):
@@ -47,23 +117,15 @@ class JSTreeTestCase(unittest.TestCase):
def test_format_for_jstree(self):
data = [
- {'data': [0, ], 'label': 'tg__0.DropPackets'},
- {'data': [548, ], 'label': 'tg__0.LatencyAvg.5'},
- {'data': [1172, ], 'label': 'tg__0.LatencyAvg.6'},
- {'data': [1001, ], 'label': 'tg__0.LatencyMax.5'},
- {'data': [1468, ], 'label': 'tg__0.LatencyMax.6'},
- {'data': [18.11, ], 'label': 'tg__0.RxThroughput'},
- {'data': [18.11, ], 'label': 'tg__0.TxThroughput'},
- {'data': [0, ], 'label': 'tg__1.DropPackets'},
- {'data': [548, ], 'label': 'tg__1.LatencyAvg.5'},
- {'data': [1172, ], 'label': 'tg__1.LatencyAvg.6'},
- {'data': [1001, ], 'label': 'tg__1.LatencyMax.5'},
- {'data': [1468, ], 'label': 'tg__1.LatencyMax.6'},
- {'data': [18.1132084505, ], 'label': 'tg__1.RxThroughput'},
- {'data': [18.1157260383, ], 'label': 'tg__1.TxThroughput'},
- {'data': [9057888, ], 'label': 'vnf__0.curr_packets_in'},
- {'data': [0, ], 'label': 'vnf__0.packets_dropped'},
- {'data': [617825443, ], 'label': 'vnf__0.packets_fwd'},
+ 'tg__0.DropPackets',
+ 'tg__0.LatencyAvg.5', 'tg__0.LatencyAvg.6',
+ 'tg__0.LatencyMax.5', 'tg__0.LatencyMax.6',
+ 'tg__0.RxThroughput', 'tg__0.TxThroughput',
+ 'tg__1.DropPackets',
+ 'tg__1.LatencyAvg.5', 'tg__1.LatencyAvg.6',
+ 'tg__1.LatencyMax.5', 'tg__1.LatencyMax.6',
+ 'tg__1.RxThroughput', 'tg__1.TxThroughput',
+ 'vnf__0.curr_packets_in', 'vnf__0.packets_dropped', 'vnf__0.packets_fwd',
]
expected_output = [
@@ -117,11 +179,11 @@ class ReportTestCase(unittest.TestCase):
self.assertEqual(GOOD_TASK_ID, str(self.rep.task_id))
def test__validate_invalid_yaml_name(self):
- with self.assertRaisesRegexp(ValueError, "yaml*"):
+ with six.assertRaisesRegex(self, ValueError, "yaml*"):
self.rep._validate(BAD_YAML_NAME, GOOD_TASK_ID)
def test__validate_invalid_task_id(self):
- with self.assertRaisesRegexp(ValueError, "task*"):
+ with six.assertRaisesRegex(self, ValueError, "task*"):
self.rep._validate(GOOD_YAML_NAME, BAD_TASK_ID)
@mock.patch.object(influx, 'query')
@@ -141,42 +203,51 @@ class ReportTestCase(unittest.TestCase):
mock_query.return_value = []
self.rep.yaml_name = GOOD_YAML_NAME
self.rep.task_id = GOOD_TASK_ID
- self.assertRaisesRegexp(KeyError, "Test case", self.rep._get_fieldkeys)
+ six.assertRaisesRegex(self, KeyError, "Test case", self.rep._get_fieldkeys)
@mock.patch.object(influx, 'query')
- def test__get_tasks(self, mock_query):
- mock_query.return_value = GOOD_DB_TASK
+ def test__get_metrics(self, mock_query):
+ mock_query.return_value = GOOD_DB_METRICS
self.rep.yaml_name = GOOD_YAML_NAME
self.rep.task_id = GOOD_TASK_ID
- self.assertEqual(GOOD_DB_TASK, self.rep._get_tasks())
+ self.assertEqual(GOOD_DB_METRICS, self.rep._get_metrics())
@mock.patch.object(influx, 'query')
- def test__get_tasks_task_not_found(self, mock_query):
+ def test__get_metrics_task_not_found(self, mock_query):
mock_query.return_value = []
self.rep.yaml_name = GOOD_YAML_NAME
self.rep.task_id = GOOD_TASK_ID
- self.assertRaisesRegexp(KeyError, "Task ID", self.rep._get_tasks)
+ six.assertRaisesRegex(self, KeyError, "Task ID", self.rep._get_metrics)
- @mock.patch.object(report.Report, '_get_tasks')
+ @mock.patch.object(report.Report, '_get_metrics')
+ @mock.patch.object(report.Report, '_get_fieldkeys')
+ def test__generate_common(self, mock_keys, mock_metrics):
+ mock_metrics.return_value = MORE_DB_METRICS
+ mock_keys.return_value = MORE_DB_FIELDKEYS
+ datasets, table_vals = self.rep._generate_common(self.param)
+ self.assertEqual(MORE_EXPECTED_DATASETS, datasets)
+ self.assertEqual(MORE_EXPECTED_TABLE_VALS, table_vals)
+
+ @mock.patch.object(report.Report, '_get_metrics')
@mock.patch.object(report.Report, '_get_fieldkeys')
@mock.patch.object(report.Report, '_validate')
- def test_generate(self, mock_valid, mock_keys, mock_tasks):
- mock_tasks.return_value = GOOD_DB_TASK
+ def test_generate(self, mock_valid, mock_keys, mock_metrics):
+ mock_metrics.return_value = GOOD_DB_METRICS
mock_keys.return_value = GOOD_DB_FIELDKEYS
self.rep.generate(self.param)
mock_valid.assert_called_once_with(GOOD_YAML_NAME, GOOD_TASK_ID)
- mock_tasks.assert_called_once_with()
+ mock_metrics.assert_called_once_with()
mock_keys.assert_called_once_with()
self.assertEqual(GOOD_TIMESTAMP, self.rep.Timestamp)
- @mock.patch.object(report.Report, '_get_tasks')
+ @mock.patch.object(report.Report, '_get_metrics')
@mock.patch.object(report.Report, '_get_fieldkeys')
@mock.patch.object(report.Report, '_validate')
- def test_generate_nsb(self, mock_valid, mock_keys, mock_tasks):
- mock_tasks.return_value = GOOD_DB_TASK
+ def test_generate_nsb(self, mock_valid, mock_keys, mock_metrics):
+ mock_metrics.return_value = GOOD_DB_METRICS
mock_keys.return_value = GOOD_DB_FIELDKEYS
self.rep.generate_nsb(self.param)
mock_valid.assert_called_once_with(GOOD_YAML_NAME, GOOD_TASK_ID)
- mock_tasks.assert_called_once_with()
+ mock_metrics.assert_called_once_with()
mock_keys.assert_called_once_with()
self.assertEqual(GOOD_TIMESTAMP, self.rep.Timestamp)
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
index e9c680257..dc3a4b99a 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_multi.py
@@ -63,3 +63,20 @@ class MultiMonitorServiceTestCase(unittest.TestCase):
ins.start_monitor()
ins.wait_monitor()
ins.verify_SLA()
+
+ def test__monitor_multi_no_sla(self, mock_open, mock_ssh):
+ monitor_cfg = {
+ 'monitor_type': 'general-monitor',
+ 'monitor_number': 3,
+ 'key': 'service-status',
+ 'monitor_key': 'service-status',
+ 'host': 'node1',
+ 'monitor_time': 0.1,
+ 'parameter': {'serviceName': 'haproxy'}
+ }
+ ins = monitor_multi.MultiMonitor(
+ monitor_cfg, self.context, {"nova-api": 10})
+ mock_ssh.SSH.from_node().execute.return_value = (0, "running", '')
+ ins.start_monitor()
+ ins.wait_monitor()
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
index a6d2ca398..8c73bf221 100644
--- a/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
+++ b/yardstick/tests/unit/benchmark/scenarios/availability/test_monitor_process.py
@@ -55,3 +55,19 @@ class MonitorProcessTestCase(unittest.TestCase):
ins.monitor_func()
ins._result = {"outage_time": 10}
ins.verify_SLA()
+
+ def test__monitor_process_no_sla(self, mock_ssh):
+
+ monitor_cfg = {
+ 'monitor_type': 'process',
+ 'process_name': 'nova-api',
+ 'host': "node1",
+ 'monitor_time': 1,
+ }
+ ins = monitor_process.MonitorProcess(monitor_cfg, self.context, {"nova-api": 10})
+
+ mock_ssh.SSH.from_node().execute.return_value = (0, "0", '')
+ ins.setup()
+ ins.monitor_func()
+ ins._result = {"outage_time": 10}
+ self.assertTrue(ins.verify_SLA())
diff --git a/yardstick/tests/unit/common/test_utils.py b/yardstick/tests/unit/common/test_utils.py
index c0c928916..6b8d81907 100644
--- a/yardstick/tests/unit/common/test_utils.py
+++ b/yardstick/tests/unit/common/test_utils.py
@@ -1135,6 +1135,15 @@ class TestUtilsIpAddrMethods(ut_base.BaseUnitTestCase):
for addr in addr_list:
self.assertRaises(Exception, utils.make_ipv4_address, addr)
+ def test_get_ip_range_count(self):
+ iprange = "192.168.0.1-192.168.0.25"
+ count = utils.get_ip_range_count(iprange)
+ self.assertEqual(count, 24)
+
+ def test_get_ip_range_start(self):
+ iprange = "192.168.0.1-192.168.0.25"
+ start = utils.get_ip_range_start(iprange)
+ self.assertEqual(start, "192.168.0.1")
def test_safe_ip_address(self):
addr_list = self.GOOD_IP_V4_ADDRESS_STR_LIST
diff --git a/yardstick/tests/unit/network_services/libs/ixia_libs/test_ixnet_api.py b/yardstick/tests/unit/network_services/libs/ixia_libs/test_ixnet_api.py
index 5b32a4297..110224742 100644
--- a/yardstick/tests/unit/network_services/libs/ixia_libs/test_ixnet_api.py
+++ b/yardstick/tests/unit/network_services/libs/ixia_libs/test_ixnet_api.py
@@ -103,6 +103,12 @@ class TestIxNextgen(unittest.TestCase):
self.ixnet.getRoot.return_value = 'my_root'
self.ixnet_gen = ixnet_api.IxNextgen()
self.ixnet_gen._ixnet = self.ixnet
+ self._mock_log = mock.patch.object(ixnet_api.log, 'info')
+ self.mock_log = self._mock_log.start()
+ self.addCleanup(self._stop_mocks)
+
+ def _stop_mocks(self):
+ self.mock_log.stop()
def test_get_config(self):
tg_cfg = {
diff --git a/yardstick/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py b/yardstick/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py
index 43682dd07..a1802aa55 100644
--- a/yardstick/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py
+++ b/yardstick/tests/unit/network_services/vnf_generic/vnf/test_sample_vnf.py
@@ -17,12 +17,14 @@ from copy import deepcopy
import unittest
import mock
+import paramiko
+
from yardstick.common import exceptions as y_exceptions
from yardstick.common import utils
from yardstick.network_services.nfvi.resource import ResourceProfile
from yardstick.network_services.vnf_generic.vnf.base import VnfdHelper
from yardstick.network_services.vnf_generic.vnf import sample_vnf
-from yardstick.network_services.vnf_generic.vnf.vnf_ssh_helper import VnfSshHelper
+from yardstick.network_services.vnf_generic.vnf import vnf_ssh_helper
from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFDeployHelper
from yardstick.network_services.vnf_generic.vnf.sample_vnf import ScenarioHelper
from yardstick.network_services.vnf_generic.vnf.sample_vnf import ResourceHelper
@@ -34,6 +36,7 @@ from yardstick.network_services.vnf_generic.vnf.sample_vnf import SampleVNFTraff
from yardstick.network_services.vnf_generic.vnf.sample_vnf import DpdkVnfSetupEnvHelper
from yardstick.tests.unit.network_services.vnf_generic.vnf import test_base
from yardstick.benchmark.contexts import base as ctx_base
+from yardstick import ssh
class MockError(Exception):
@@ -82,10 +85,11 @@ class TestVnfSshHelper(unittest.TestCase):
'virtual-interface': {
'dst_mac': '00:00:00:00:00:03',
'vpci': '0000:05:00.0',
+ 'dpdk_port_num': 0,
+ 'driver': 'i40e',
'local_ip': '152.16.100.19',
'type': 'PCI-PASSTHROUGH',
'netmask': '255.255.255.0',
- 'dpdk_port_num': 0,
'bandwidth': '10 Gbps',
'dst_ip': '152.16.100.20',
'local_mac': '00:00:00:00:00:01',
@@ -99,10 +103,11 @@ class TestVnfSshHelper(unittest.TestCase):
'virtual-interface': {
'dst_mac': '00:00:00:00:00:04',
'vpci': '0000:05:00.1',
+ 'dpdk_port_num': 1,
+ 'driver': 'ixgbe',
'local_ip': '152.16.40.19',
'type': 'PCI-PASSTHROUGH',
'netmask': '255.255.255.0',
- 'dpdk_port_num': 1,
'bandwidth': '10 Gbps',
'dst_ip': '152.16.40.20',
'local_mac': '00:00:00:00:00:02',
@@ -151,90 +156,88 @@ class TestVnfSshHelper(unittest.TestCase):
}
}
+ def setUp(self):
+ self.ssh_helper = vnf_ssh_helper.VnfSshHelper(
+ self.VNFD_0['mgmt-interface'], 'my/bin/path')
+ self.ssh_helper._run = mock.Mock()
+
def assertAll(self, iterable, message=None):
self.assertTrue(all(iterable), message)
def test_get_class(self):
- self.assertIs(VnfSshHelper.get_class(), VnfSshHelper)
+ self.assertIs(vnf_ssh_helper.VnfSshHelper.get_class(),
+ vnf_ssh_helper.VnfSshHelper)
- @mock.patch('yardstick.ssh.paramiko')
+ @mock.patch.object(ssh, 'paramiko')
def test_copy(self, _):
- ssh_helper = VnfSshHelper(self.VNFD_0['mgmt-interface'], 'my/bin/path')
- ssh_helper._run = mock.Mock()
-
- ssh_helper.execute('ls')
- self.assertTrue(ssh_helper.is_connected)
- result = ssh_helper.copy()
- self.assertIsInstance(result, VnfSshHelper)
+ self.ssh_helper.execute('ls')
+ self.assertTrue(self.ssh_helper.is_connected)
+ result = self.ssh_helper.copy()
+ self.assertIsInstance(result, vnf_ssh_helper.VnfSshHelper)
self.assertFalse(result.is_connected)
- self.assertEqual(result.bin_path, ssh_helper.bin_path)
- self.assertEqual(result.host, ssh_helper.host)
- self.assertEqual(result.port, ssh_helper.port)
- self.assertEqual(result.user, ssh_helper.user)
- self.assertEqual(result.password, ssh_helper.password)
- self.assertEqual(result.key_filename, ssh_helper.key_filename)
-
- @mock.patch('yardstick.ssh.paramiko')
+ self.assertEqual(result.bin_path, self.ssh_helper.bin_path)
+ self.assertEqual(result.host, self.ssh_helper.host)
+ self.assertEqual(result.port, self.ssh_helper.port)
+ self.assertEqual(result.user, self.ssh_helper.user)
+ self.assertEqual(result.password, self.ssh_helper.password)
+ self.assertEqual(result.key_filename, self.ssh_helper.key_filename)
+
+ @mock.patch.object(paramiko, 'SSHClient')
def test_upload_config_file(self, mock_paramiko):
- ssh_helper = VnfSshHelper(self.VNFD_0['mgmt-interface'], 'my/bin/path')
- ssh_helper._run = mock.MagicMock()
+ self.assertFalse(self.ssh_helper.is_connected)
+ cfg_file = self.ssh_helper.upload_config_file('/my/prefix', 'my content')
+ self.assertTrue(self.ssh_helper.is_connected)
+ mock_paramiko.assert_called_once()
+ self.assertEqual(cfg_file, '/my/prefix')
- self.assertFalse(ssh_helper.is_connected)
- cfg_file = ssh_helper.upload_config_file('my/prefix', 'my content')
- self.assertTrue(ssh_helper.is_connected)
- mock_paramiko.SSHClient.assert_called_once()
+ @mock.patch.object(paramiko, 'SSHClient')
+ def test_upload_config_file_path_does_not_exist(self, mock_paramiko):
+ self.assertFalse(self.ssh_helper.is_connected)
+ cfg_file = self.ssh_helper.upload_config_file('my/prefix', 'my content')
+ self.assertTrue(self.ssh_helper.is_connected)
+ mock_paramiko.assert_called_once()
self.assertTrue(cfg_file.startswith('/tmp'))
- cfg_file = ssh_helper.upload_config_file('/my/prefix', 'my content')
- self.assertTrue(ssh_helper.is_connected)
- mock_paramiko.SSHClient.assert_called_once()
- self.assertEqual(cfg_file, '/my/prefix')
-
def test_join_bin_path(self):
- ssh_helper = VnfSshHelper(self.VNFD_0['mgmt-interface'], 'my/bin/path')
-
expected_start = 'my'
expected_middle_list = ['bin']
expected_end = 'path'
- result = ssh_helper.join_bin_path()
+ result = self.ssh_helper.join_bin_path()
self.assertTrue(result.startswith(expected_start))
self.assertAll(middle in result for middle in expected_middle_list)
self.assertTrue(result.endswith(expected_end))
expected_middle_list.append(expected_end)
expected_end = 'some_file.sh'
- result = ssh_helper.join_bin_path('some_file.sh')
+ result = self.ssh_helper.join_bin_path('some_file.sh')
self.assertTrue(result.startswith(expected_start))
self.assertAll(middle in result for middle in expected_middle_list)
self.assertTrue(result.endswith(expected_end))
expected_middle_list.append('some_dir')
expected_end = 'some_file.sh'
- result = ssh_helper.join_bin_path('some_dir', 'some_file.sh')
+ result = self.ssh_helper.join_bin_path('some_dir', 'some_file.sh')
self.assertTrue(result.startswith(expected_start))
self.assertAll(middle in result for middle in expected_middle_list)
self.assertTrue(result.endswith(expected_end))
- @mock.patch('yardstick.ssh.paramiko')
- @mock.patch('yardstick.ssh.provision_tool')
+ @mock.patch.object(paramiko, 'SSHClient')
+ @mock.patch.object(ssh, 'provision_tool')
def test_provision_tool(self, mock_provision_tool, mock_paramiko):
- ssh_helper = VnfSshHelper(self.VNFD_0['mgmt-interface'], 'my/bin/path')
- ssh_helper._run = mock.MagicMock()
-
- self.assertFalse(ssh_helper.is_connected)
- ssh_helper.provision_tool()
- self.assertTrue(ssh_helper.is_connected)
- mock_paramiko.SSHClient.assert_called_once()
+ self.assertFalse(self.ssh_helper.is_connected)
+ self.ssh_helper.provision_tool()
+ self.assertTrue(self.ssh_helper.is_connected)
+ mock_paramiko.assert_called_once()
mock_provision_tool.assert_called_once()
- ssh_helper.provision_tool(tool_file='my_tool.sh')
- self.assertTrue(ssh_helper.is_connected)
- mock_paramiko.SSHClient.assert_called_once()
+ self.ssh_helper.provision_tool(tool_file='my_tool.sh')
+ self.assertTrue(self.ssh_helper.is_connected)
+ mock_paramiko.assert_called_once()
self.assertEqual(mock_provision_tool.call_count, 2)
- ssh_helper.provision_tool('tool_path', 'my_tool.sh')
- self.assertTrue(ssh_helper.is_connected)
- mock_paramiko.SSHClient.assert_called_once()
+ self.ssh_helper.provision_tool('tool_path', 'my_tool.sh')
+ self.assertTrue(self.ssh_helper.is_connected)
+ mock_paramiko.assert_called_once()
self.assertEqual(mock_provision_tool.call_count, 3)
diff --git a/yardstick/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py b/yardstick/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py
index 65bf56f1e..42cd71b16 100644
--- a/yardstick/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py
+++ b/yardstick/tests/unit/network_services/vnf_generic/vnf/test_tg_rfc2544_ixia.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2016-2017 Intel Corporation
+# Copyright (c) 2016-2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -432,7 +432,6 @@ class TestIXIATrafficGen(unittest.TestCase):
class TestIxiaBasicScenario(unittest.TestCase):
-
def setUp(self):
self._mock_IxNextgen = mock.patch.object(ixnet_api, 'IxNextgen')
self.mock_IxNextgen = self._mock_IxNextgen.start()
@@ -450,15 +449,15 @@ class TestIxiaBasicScenario(unittest.TestCase):
self.assertIsInstance(self.scenario, tg_rfc2544_ixia.IxiaBasicScenario)
self.assertEqual(self.scenario.client, self.mock_IxNextgen)
- def test_apply_config(self):
- self.assertIsNone(self.scenario.apply_config())
-
def test_create_traffic_model(self):
self.mock_IxNextgen.get_vports.return_value = [1, 2, 3, 4]
self.scenario.create_traffic_model()
self.scenario.client.get_vports.assert_called_once()
self.scenario.client.create_traffic_model.assert_called_once_with([1, 3], [2, 4])
+ def test_apply_config(self):
+ self.assertIsNone(self.scenario.apply_config())
+
def test_run_protocols(self):
self.assertIsNone(self.scenario.run_protocols())
@@ -466,6 +465,97 @@ class TestIxiaBasicScenario(unittest.TestCase):
self.assertIsNone(self.scenario.stop_protocols())
+class TestIxiaL3Scenario(TestIxiaBasicScenario):
+ IXIA_CFG = {
+ 'flow': {
+ 'src_ip': ['192.168.0.1-192.168.0.50'],
+ 'dst_ip': ['192.168.1.1-192.168.1.150']
+ }
+ }
+
+ CONTEXT_CFG = {
+ 'nodes': {
+ 'tg__0': {
+ 'role': 'IxNet',
+ 'interfaces': {
+ 'xe0': {
+ 'vld_id': 'uplink_0',
+ 'local_ip': '10.1.1.1',
+ 'local_mac': 'aa:bb:cc:dd:ee:ff',
+ 'ifname': 'xe0'
+ },
+ 'xe1': {
+ 'vld_id': 'downlink_0',
+ 'local_ip': '20.2.2.2',
+ 'local_mac': 'bb:bb:cc:dd:ee:ee',
+ 'ifname': 'xe1'
+ }
+ },
+ 'routing_table': [{
+ 'network': "152.16.100.20",
+ 'netmask': '255.255.0.0',
+ 'gateway': '152.16.100.21',
+ 'if': 'xe0'
+ }]
+ }
+ }
+ }
+
+ def setUp(self):
+ super(TestIxiaL3Scenario, self).setUp()
+ self.ixia_cfg = self.IXIA_CFG
+ self.context_cfg = self.CONTEXT_CFG
+ self.scenario = tg_rfc2544_ixia.IxiaL3Scenario(self.mock_IxNextgen,
+ self.context_cfg,
+ self.ixia_cfg)
+
+ def test___init___(self):
+ self.assertIsInstance(self.scenario, tg_rfc2544_ixia.IxiaL3Scenario)
+ self.assertEqual(self.scenario.client, self.mock_IxNextgen)
+
+ def test_create_traffic_model(self):
+ self.mock_IxNextgen.get_vports.return_value = ['1', '2']
+ self.scenario.create_traffic_model()
+ self.scenario.client.get_vports.assert_called_once()
+ self.scenario.client.create_ipv4_traffic_model.\
+ assert_called_once_with(['1/protocols/static'],
+ ['2/protocols/static'])
+
+ def test_apply_config(self):
+ self.scenario._add_interfaces = mock.Mock()
+ self.scenario._add_static_ips = mock.Mock()
+ self.assertIsNone(self.scenario.apply_config())
+
+ def test__add_static(self):
+ self.mock_IxNextgen.get_vports.return_value = ['1', '2']
+ self.mock_IxNextgen.get_static_interface.side_effect = ['intf1',
+ 'intf2']
+
+ self.scenario._add_static_ips()
+
+ self.mock_IxNextgen.get_static_interface.assert_any_call('1')
+ self.mock_IxNextgen.get_static_interface.assert_any_call('2')
+
+ self.scenario.client.add_static_ipv4.assert_any_call(
+ 'intf1', '1', '192.168.0.1', 49, '32')
+ self.scenario.client.add_static_ipv4.assert_any_call(
+ 'intf2', '2', '192.168.1.1', 149, '32')
+
+ def test__add_interfaces(self):
+ self.mock_IxNextgen.get_vports.return_value = ['1', '2']
+
+ self.scenario._add_interfaces()
+
+ self.mock_IxNextgen.add_interface.assert_any_call('1',
+ '10.1.1.1',
+ 'aa:bb:cc:dd:ee:ff',
+ '152.16.100.21')
+ self.mock_IxNextgen.add_interface.assert_any_call('2',
+ '20.2.2.2',
+ 'bb:bb:cc:dd:ee:ee',
+ None)
+
+
class TestIxiaPppoeClientScenario(unittest.TestCase):
IXIA_CFG = {